158 while (fgets(buf, sizeof (buf), f) != NULL) {
159 if (p = strchr(buf, ':')) {
160 long v = strtol(p+1, NULL, 0);
161 if (strncmp(buf, "CPU implementer", sizeof "CPU implementer" - 1) == 0) {
162 _cpu = v;
163 cpu_lines++;
164 } else if (strncmp(buf, "CPU variant", sizeof "CPU variant" - 1) == 0) {
165 _variant = v;
166 } else if (strncmp(buf, "CPU part", sizeof "CPU part" - 1) == 0) {
167 if (_model != v) _model2 = _model;
168 _model = v;
169 } else if (strncmp(buf, "CPU revision", sizeof "CPU revision" - 1) == 0) {
170 _revision = v;
171 }
172 }
173 }
174 fclose(f);
175 }
176
177 // Enable vendor specific features
178 if (_cpu == CPU_CAVIUM && _variant == 0) _features |= CPU_DMB_ATOMICS;
179 if (_cpu == CPU_ARM && (_model == 0xd03 || _model2 == 0xd03)) _features |= CPU_A53MAC;
180 if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
181 // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
182 // we assume the worst and assume we could be on a big little system and have
183 // undisclosed A53 cores which we could be swapped to at any stage
184 if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
185
186 sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
187 if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
188 if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
189 if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
190 if (auxv & HWCAP_AES) strcat(buf, ", aes");
191 if (auxv & HWCAP_SHA1) strcat(buf, ", sha1");
192 if (auxv & HWCAP_SHA2) strcat(buf, ", sha256");
193 if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
194
195 _features_string = os::strdup(buf);
196
197 if (FLAG_IS_DEFAULT(UseCRC32)) {
198 UseCRC32 = (auxv & HWCAP_CRC32) != 0;
|
158 while (fgets(buf, sizeof (buf), f) != NULL) {
159 if (p = strchr(buf, ':')) {
160 long v = strtol(p+1, NULL, 0);
161 if (strncmp(buf, "CPU implementer", sizeof "CPU implementer" - 1) == 0) {
162 _cpu = v;
163 cpu_lines++;
164 } else if (strncmp(buf, "CPU variant", sizeof "CPU variant" - 1) == 0) {
165 _variant = v;
166 } else if (strncmp(buf, "CPU part", sizeof "CPU part" - 1) == 0) {
167 if (_model != v) _model2 = _model;
168 _model = v;
169 } else if (strncmp(buf, "CPU revision", sizeof "CPU revision" - 1) == 0) {
170 _revision = v;
171 }
172 }
173 }
174 fclose(f);
175 }
176
177 // Enable vendor specific features
178 if (_cpu == CPU_CAVIUM) {
179 if (_variant == 0) _features |= CPU_DMB_ATOMICS;
180 if (FLAG_IS_DEFAULT(AvoidUnalignedAccesses)) {
181 FLAG_SET_DEFAULT(AvoidUnalignedAccesses, true);
182 }
183 if (FLAG_IS_DEFAULT(UseSIMDForMemoryOps)) {
184 FLAG_SET_DEFAULT(UseSIMDForMemoryOps, (_variant > 0));
185 }
186 }
187 if (_cpu == CPU_ARM && (_model == 0xd03 || _model2 == 0xd03)) _features |= CPU_A53MAC;
188 if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
189 // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
190 // we assume the worst and assume we could be on a big little system and have
191 // undisclosed A53 cores which we could be swapped to at any stage
192 if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
193
194 sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
195 if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
196 if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
197 if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
198 if (auxv & HWCAP_AES) strcat(buf, ", aes");
199 if (auxv & HWCAP_SHA1) strcat(buf, ", sha1");
200 if (auxv & HWCAP_SHA2) strcat(buf, ", sha256");
201 if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
202
203 _features_string = os::strdup(buf);
204
205 if (FLAG_IS_DEFAULT(UseCRC32)) {
206 UseCRC32 = (auxv & HWCAP_CRC32) != 0;
|