< prev index next >

src/hotspot/cpu/aarch64/vm_version_aarch64.cpp

Print this page




 174         } else if (strncmp(buf, "CPU part", sizeof "CPU part" - 1) == 0) {
 175           if (_model != v)  _model2 = _model;
 176           _model = v;
 177         } else if (strncmp(buf, "CPU revision", sizeof "CPU revision" - 1) == 0) {
 178           _revision = v;
 179         }
 180       }
 181     }
 182     fclose(f);
 183   }
 184 
 185   // Enable vendor specific features
 186   if (_cpu == CPU_CAVIUM) {
 187     if (_variant == 0) _features |= CPU_DMB_ATOMICS;
 188     if (FLAG_IS_DEFAULT(AvoidUnalignedAccesses)) {
 189       FLAG_SET_DEFAULT(AvoidUnalignedAccesses, true);
 190     }
 191     if (FLAG_IS_DEFAULT(UseSIMDForMemoryOps)) {
 192       FLAG_SET_DEFAULT(UseSIMDForMemoryOps, (_variant > 0));
 193     }



 194   }
 195   if (_cpu == CPU_ARM && (_model == 0xd03 || _model2 == 0xd03)) _features |= CPU_A53MAC;
 196   if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
 197   // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
 198   // we assume the worst and assume we could be on a big little system and have
 199   // undisclosed A53 cores which we could be swapped to at any stage
 200   if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
 201 
 202   sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
 203   if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
 204   if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
 205   if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
 206   if (auxv & HWCAP_AES)   strcat(buf, ", aes");
 207   if (auxv & HWCAP_SHA1)  strcat(buf, ", sha1");
 208   if (auxv & HWCAP_SHA2)  strcat(buf, ", sha256");
 209   if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
 210 
 211   _features_string = os::strdup(buf);
 212 
 213   if (FLAG_IS_DEFAULT(UseCRC32)) {




 174         } else if (strncmp(buf, "CPU part", sizeof "CPU part" - 1) == 0) {
 175           if (_model != v)  _model2 = _model;
 176           _model = v;
 177         } else if (strncmp(buf, "CPU revision", sizeof "CPU revision" - 1) == 0) {
 178           _revision = v;
 179         }
 180       }
 181     }
 182     fclose(f);
 183   }
 184 
 185   // Enable vendor specific features
 186   if (_cpu == CPU_CAVIUM) {
 187     if (_variant == 0) _features |= CPU_DMB_ATOMICS;
 188     if (FLAG_IS_DEFAULT(AvoidUnalignedAccesses)) {
 189       FLAG_SET_DEFAULT(AvoidUnalignedAccesses, true);
 190     }
 191     if (FLAG_IS_DEFAULT(UseSIMDForMemoryOps)) {
 192       FLAG_SET_DEFAULT(UseSIMDForMemoryOps, (_variant > 0));
 193     }
 194     if ((_model == 0x0a1 || _model2 == 0x0a1) && FLAG_IS_DEFAULT(UseSIMDForArrayEquals)) {
 195         UseSIMDForArrayEquals = false; // ThunderX T88 is slow with SIMD
 196     }
 197   }
 198   if (_cpu == CPU_ARM && (_model == 0xd03 || _model2 == 0xd03)) _features |= CPU_A53MAC;
 199   if (_cpu == CPU_ARM && (_model == 0xd07 || _model2 == 0xd07)) _features |= CPU_STXR_PREFETCH;
 200   // If an olde style /proc/cpuinfo (cpu_lines == 1) then if _model is an A57 (0xd07)
 201   // we assume the worst and assume we could be on a big little system and have
 202   // undisclosed A53 cores which we could be swapped to at any stage
 203   if (_cpu == CPU_ARM && cpu_lines == 1 && _model == 0xd07) _features |= CPU_A53MAC;
 204 
 205   sprintf(buf, "0x%02x:0x%x:0x%03x:%d", _cpu, _variant, _model, _revision);
 206   if (_model2) sprintf(buf+strlen(buf), "(0x%03x)", _model2);
 207   if (auxv & HWCAP_ASIMD) strcat(buf, ", simd");
 208   if (auxv & HWCAP_CRC32) strcat(buf, ", crc");
 209   if (auxv & HWCAP_AES)   strcat(buf, ", aes");
 210   if (auxv & HWCAP_SHA1)  strcat(buf, ", sha1");
 211   if (auxv & HWCAP_SHA2)  strcat(buf, ", sha256");
 212   if (auxv & HWCAP_ATOMICS) strcat(buf, ", lse");
 213 
 214   _features_string = os::strdup(buf);
 215 
 216   if (FLAG_IS_DEFAULT(UseCRC32)) {


< prev index next >