228 }
229
230 // Cortex A73
231 if (_cpu == CPU_ARM && (_model == 0xd09 || _model2 == 0xd09)) {
232 if (FLAG_IS_DEFAULT(SoftwarePrefetchHintDistance)) {
233 FLAG_SET_DEFAULT(SoftwarePrefetchHintDistance, -1);
234 }
235 // A73 is faster with short-and-easy-for-speculative-execution-loop
236 if (FLAG_IS_DEFAULT(UseSimpleArrayEquals)) {
237 FLAG_SET_DEFAULT(UseSimpleArrayEquals, true);
238 }
239 }
240
241 // Neoverse N1
242 if (_cpu == CPU_ARM && (_model == 0xd0c || _model2 == 0xd0c)) {
243 if (FLAG_IS_DEFAULT(UseSIMDForMemoryOps)) {
244 FLAG_SET_DEFAULT(UseSIMDForMemoryOps, true);
245 }
246 }
247
248 if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
249 // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
250 // we assume the worst and assume we could be on a big little system and have
251 // undisclosed A53 cores which we could be swapped to at any stage
252 if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
253
254 sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
255 if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
256 if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
257 if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
258 if (auxv & HWCAP_AES) strcat(buf, ", aes");
259 if (auxv & HWCAP_SHA1) strcat(buf, ", sha1");
260 if (auxv & HWCAP_SHA2) strcat(buf, ", sha256");
261 if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
262
263 _features_string = os::strdup(buf);
264
265 if (FLAG_IS_DEFAULT(UseCRC32)) {
266 UseCRC32 = (auxv & HWCAP_CRC32) != 0;
267 }
|
228 }
229
230 // Cortex A73
231 if (_cpu == CPU_ARM && (_model == 0xd09 || _model2 == 0xd09)) {
232 if (FLAG_IS_DEFAULT(SoftwarePrefetchHintDistance)) {
233 FLAG_SET_DEFAULT(SoftwarePrefetchHintDistance, -1);
234 }
235 // A73 is faster with short-and-easy-for-speculative-execution-loop
236 if (FLAG_IS_DEFAULT(UseSimpleArrayEquals)) {
237 FLAG_SET_DEFAULT(UseSimpleArrayEquals, true);
238 }
239 }
240
241 // Neoverse N1
242 if (_cpu == CPU_ARM && (_model == 0xd0c || _model2 == 0xd0c)) {
243 if (FLAG_IS_DEFAULT(UseSIMDForMemoryOps)) {
244 FLAG_SET_DEFAULT(UseSIMDForMemoryOps, true);
245 }
246 }
247
248 if (_cpu == CPU_ARM) {
249 if (FLAG_IS_DEFAULT(UseSignumIntrinsic)) {
250 FLAG_SET_DEFAULT(UseSignumIntrinsic, true);
251 }
252 }
253
254 if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
255 // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
256 // we assume the worst and assume we could be on a big little system and have
257 // undisclosed A53 cores which we could be swapped to at any stage
258 if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
259
260 sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
261 if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
262 if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
263 if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
264 if (auxv & HWCAP_AES) strcat(buf, ", aes");
265 if (auxv & HWCAP_SHA1) strcat(buf, ", sha1");
266 if (auxv & HWCAP_SHA2) strcat(buf, ", sha256");
267 if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
268
269 _features_string = os::strdup(buf);
270
271 if (FLAG_IS_DEFAULT(UseCRC32)) {
272 UseCRC32 = (auxv & HWCAP_CRC32) != 0;
273 }
|