src/share/vm/opto/c2compiler.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8130832-review Sdiff src/share/vm/opto

src/share/vm/opto/c2compiler.cpp

Print this page




  62   // Check that runtime and architecture description agree on callee-saved-floats
  63   bool callee_saved_floats = false;
  64   for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(_last_Mach_Reg); i = OptoReg::add(i,1) ) {
  65     // Is there a callee-saved float or double?
  66     if( register_save_policy[i] == 'E' /* callee-saved */ &&
  67        (register_save_type[i] == Op_RegF || register_save_type[i] == Op_RegD) ) {
  68       callee_saved_floats = true;
  69     }
  70   }
  71 
  72   DEBUG_ONLY( Node::init_NodeProperty(); )
  73 
  74   Compile::pd_compiler2_init();
  75 
  76   CompilerThread* thread = CompilerThread::current();
  77 
  78   HandleMark handle_mark(thread);
  79   return OptoRuntime::generate(thread->env());
  80 }
  81 
  82 
  83 void C2Compiler::initialize() {
  84   // The first compiler thread that gets here will initialize the
  85   // small amount of global state (and runtime stubs) that C2 needs.
  86 
  87   // There is a race possible once at startup and then we're fine
  88 
  89   // Note that this is being called from a compiler thread not the
  90   // main startup thread.
  91   if (should_perform_init()) {
  92     bool successful = C2Compiler::init_c2_runtime();
  93     int new_state = (successful) ? initialized : failed;
  94     set_state(new_state);
  95   }
  96 }
  97 
  98 void C2Compiler::compile_method(ciEnv* env, ciMethod* target, int entry_bci) {
  99   assert(is_initialized(), "Compiler thread must be initialized");
 100 
 101   bool subsume_loads = SubsumeLoads;
 102   bool do_escape_analysis = DoEscapeAnalysis && !env->should_retain_local_variables();


 137     }
 138     if (StressRecompilation) {
 139       if (subsume_loads) {
 140         subsume_loads = false;
 141         continue;  // retry
 142       }
 143       if (do_escape_analysis) {
 144         do_escape_analysis = false;
 145         continue;  // retry
 146       }
 147     }
 148 
 149     // print inlining for last compilation only
 150     C.dump_print_inlining();
 151 
 152     // No retry; just break the loop.
 153     break;
 154   }
 155 }
 156 
 157 
 158 void C2Compiler::print_timers() {
 159   Compile::print_timers();






























































































































































































































































































































































 160 }
 161 
 162 int C2Compiler::initial_code_buffer_size() {
 163   assert(SegmentedCodeCache, "Should be only used with a segmented code cache");
 164   return Compile::MAX_inst_size + Compile::MAX_locs_size + initial_const_capacity;
 165 }


  62   // Check that runtime and architecture description agree on callee-saved-floats
  63   bool callee_saved_floats = false;
  64   for( OptoReg::Name i=OptoReg::Name(0); i<OptoReg::Name(_last_Mach_Reg); i = OptoReg::add(i,1) ) {
  65     // Is there a callee-saved float or double?
  66     if( register_save_policy[i] == 'E' /* callee-saved */ &&
  67        (register_save_type[i] == Op_RegF || register_save_type[i] == Op_RegD) ) {
  68       callee_saved_floats = true;
  69     }
  70   }
  71 
  72   DEBUG_ONLY( Node::init_NodeProperty(); )
  73 
  74   Compile::pd_compiler2_init();
  75 
  76   CompilerThread* thread = CompilerThread::current();
  77 
  78   HandleMark handle_mark(thread);
  79   return OptoRuntime::generate(thread->env());
  80 }
  81 

  82 void C2Compiler::initialize() {
  83   // The first compiler thread that gets here will initialize the
  84   // small amount of global state (and runtime stubs) that C2 needs.
  85 
  86   // There is a race possible once at startup and then we're fine
  87 
  88   // Note that this is being called from a compiler thread not the
  89   // main startup thread.
  90   if (should_perform_init()) {
  91     bool successful = C2Compiler::init_c2_runtime();
  92     int new_state = (successful) ? initialized : failed;
  93     set_state(new_state);
  94   }
  95 }
  96 
  97 void C2Compiler::compile_method(ciEnv* env, ciMethod* target, int entry_bci) {
  98   assert(is_initialized(), "Compiler thread must be initialized");
  99 
 100   bool subsume_loads = SubsumeLoads;
 101   bool do_escape_analysis = DoEscapeAnalysis && !env->should_retain_local_variables();


 136     }
 137     if (StressRecompilation) {
 138       if (subsume_loads) {
 139         subsume_loads = false;
 140         continue;  // retry
 141       }
 142       if (do_escape_analysis) {
 143         do_escape_analysis = false;
 144         continue;  // retry
 145       }
 146     }
 147 
 148     // print inlining for last compilation only
 149     C.dump_print_inlining();
 150 
 151     // No retry; just break the loop.
 152     break;
 153   }
 154 }
 155 

 156 void C2Compiler::print_timers() {
 157   Compile::print_timers();
 158 }
 159 
 160 bool C2Compiler::is_intrinsic_available(methodHandle method, methodHandle compilation_context) {
 161   // Assume a non-virtual dispatch. A virtual dispatch is
 162   // possible for only a limited set of available intrinsics whereas
 163   // a non-virtual dispatch is possible for all available intrinsics.
 164   //return Compile::is_intrinsic_available(method, compilation_context, false);
 165   return is_intrinsic_supported(method, false) &&
 166          !vmIntrinsics::is_disabled_by_flags(method->intrinsic_id()) &&
 167          !is_intrinsic_disabled_by_flag(method, compilation_context);
 168 }
 169 
 170 bool C2Compiler::is_intrinsic_supported(methodHandle method, bool is_virtual) {
 171   vmIntrinsics::ID id = method->intrinsic_id();
 172   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 173 
 174   if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) {
 175     return false;
 176   }
 177 
 178   // Only Object.hashCode and Object.clone intrinsics implement also a virtual
 179   // dispatch because calling both methods is expensive but both methods are
 180   // frequently overridden. All other intrinsics implement only a non-virtual
 181   // dispatch.
 182   if (is_virtual) {
 183     switch (id) {
 184     case vmIntrinsics::_hashCode:
 185     case vmIntrinsics::_clone:
 186       break;
 187     default:
 188       return false;
 189     }
 190   }
 191 
 192   switch (id) {
 193   case vmIntrinsics::_compareTo:
 194     if (!Matcher::match_rule_supported(Op_StrComp)) return false;
 195     break;
 196   case vmIntrinsics::_equals:
 197     if (!Matcher::match_rule_supported(Op_StrEquals)) return false;
 198     break;
 199   case vmIntrinsics::_equalsC:
 200     if (!Matcher::match_rule_supported(Op_AryEq)) return false;
 201     break;
 202   case vmIntrinsics::_copyMemory:
 203     if (StubRoutines::unsafe_arraycopy() == NULL) return false;
 204     break;
 205   case vmIntrinsics::_encodeISOArray:
 206     if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false;
 207     break;
 208   case vmIntrinsics::_bitCount_i:
 209     if (!Matcher::match_rule_supported(Op_PopCountI)) return false;
 210     break;
 211   case vmIntrinsics::_bitCount_l:
 212     if (!Matcher::match_rule_supported(Op_PopCountL)) return false;
 213     break;
 214   case vmIntrinsics::_numberOfLeadingZeros_i:
 215     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false;
 216     break;
 217   case vmIntrinsics::_numberOfLeadingZeros_l:
 218     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false;
 219     break;
 220   case vmIntrinsics::_numberOfTrailingZeros_i:
 221     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false;
 222     break;
 223   case vmIntrinsics::_numberOfTrailingZeros_l:
 224     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false;
 225     break;
 226   case vmIntrinsics::_reverseBytes_c:
 227     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
 228     break;
 229   case vmIntrinsics::_reverseBytes_s:
 230     if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false;
 231     break;
 232   case vmIntrinsics::_reverseBytes_i:
 233     if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false;
 234     break;
 235   case vmIntrinsics::_reverseBytes_l:
 236     if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false;
 237     break;
 238   case vmIntrinsics::_compareAndSwapObject:
 239 #ifdef _LP64
 240     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false;
 241 #endif
 242     break;
 243   case vmIntrinsics::_compareAndSwapLong:
 244     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false;
 245     break;
 246   case vmIntrinsics::_getAndAddInt:
 247     if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false;
 248     break;
 249   case vmIntrinsics::_getAndAddLong:
 250     if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false;
 251     break;
 252   case vmIntrinsics::_getAndSetInt:
 253     if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false;
 254     break;
 255   case vmIntrinsics::_getAndSetLong:
 256     if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false;
 257     break;
 258   case vmIntrinsics::_getAndSetObject:
 259 #ifdef _LP64
 260     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 261     if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false;
 262     break;
 263 #else
 264     if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false;
 265     break;
 266 #endif
 267   case vmIntrinsics::_incrementExactI:
 268   case vmIntrinsics::_addExactI:
 269     if (!Matcher::match_rule_supported(Op_OverflowAddI)) return false;
 270     break;
 271   case vmIntrinsics::_incrementExactL:
 272   case vmIntrinsics::_addExactL:
 273     if (!Matcher::match_rule_supported(Op_OverflowAddL)) return false;
 274     break;
 275   case vmIntrinsics::_decrementExactI:
 276   case vmIntrinsics::_subtractExactI:
 277     if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 278     break;
 279   case vmIntrinsics::_decrementExactL:
 280   case vmIntrinsics::_subtractExactL:
 281     if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 282     break;
 283   case vmIntrinsics::_negateExactI:
 284     if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
 285     break;
 286   case vmIntrinsics::_negateExactL:
 287     if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
 288     break;
 289   case vmIntrinsics::_multiplyExactI:
 290     if (!Matcher::match_rule_supported(Op_OverflowMulI)) return false;
 291     break;
 292   case vmIntrinsics::_multiplyExactL:
 293     if (!Matcher::match_rule_supported(Op_OverflowMulL)) return false;
 294     break;
 295   case vmIntrinsics::_getCallerClass:
 296     if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return false;
 297     break;
 298   case vmIntrinsics::_hashCode:
 299   case vmIntrinsics::_identityHashCode:
 300   case vmIntrinsics::_getClass:
 301   case vmIntrinsics::_dsin:
 302   case vmIntrinsics::_dcos:
 303   case vmIntrinsics::_dtan:
 304   case vmIntrinsics::_dabs:
 305   case vmIntrinsics::_datan2:
 306   case vmIntrinsics::_dsqrt:
 307   case vmIntrinsics::_dexp:
 308   case vmIntrinsics::_dlog:
 309   case vmIntrinsics::_dlog10:
 310   case vmIntrinsics::_dpow:
 311   case vmIntrinsics::_min:
 312   case vmIntrinsics::_max:
 313   case vmIntrinsics::_arraycopy:
 314   case vmIntrinsics::_indexOf:
 315   case vmIntrinsics::_getObject:
 316   case vmIntrinsics::_getBoolean:
 317   case vmIntrinsics::_getByte:
 318   case vmIntrinsics::_getShort:
 319   case vmIntrinsics::_getChar:
 320   case vmIntrinsics::_getInt:
 321   case vmIntrinsics::_getLong:
 322   case vmIntrinsics::_getFloat:
 323   case vmIntrinsics::_getDouble:
 324   case vmIntrinsics::_putObject:
 325   case vmIntrinsics::_putBoolean:
 326   case vmIntrinsics::_putByte:
 327   case vmIntrinsics::_putShort:
 328   case vmIntrinsics::_putChar:
 329   case vmIntrinsics::_putInt:
 330   case vmIntrinsics::_putLong:
 331   case vmIntrinsics::_putFloat:
 332   case vmIntrinsics::_putDouble:
 333   case vmIntrinsics::_getByte_raw:
 334   case vmIntrinsics::_getShort_raw:
 335   case vmIntrinsics::_getChar_raw:
 336   case vmIntrinsics::_getInt_raw:
 337   case vmIntrinsics::_getLong_raw:
 338   case vmIntrinsics::_getFloat_raw:
 339   case vmIntrinsics::_getDouble_raw:
 340   case vmIntrinsics::_getAddress_raw:
 341   case vmIntrinsics::_putByte_raw:
 342   case vmIntrinsics::_putShort_raw:
 343   case vmIntrinsics::_putChar_raw:
 344   case vmIntrinsics::_putInt_raw:
 345   case vmIntrinsics::_putLong_raw:
 346   case vmIntrinsics::_putFloat_raw:
 347   case vmIntrinsics::_putDouble_raw:
 348   case vmIntrinsics::_putAddress_raw:
 349   case vmIntrinsics::_getObjectVolatile:
 350   case vmIntrinsics::_getBooleanVolatile:
 351   case vmIntrinsics::_getByteVolatile:
 352   case vmIntrinsics::_getShortVolatile:
 353   case vmIntrinsics::_getCharVolatile:
 354   case vmIntrinsics::_getIntVolatile:
 355   case vmIntrinsics::_getLongVolatile:
 356   case vmIntrinsics::_getFloatVolatile:
 357   case vmIntrinsics::_getDoubleVolatile:
 358   case vmIntrinsics::_putObjectVolatile:
 359   case vmIntrinsics::_putBooleanVolatile:
 360   case vmIntrinsics::_putByteVolatile:
 361   case vmIntrinsics::_putShortVolatile:
 362   case vmIntrinsics::_putCharVolatile:
 363   case vmIntrinsics::_putIntVolatile:
 364   case vmIntrinsics::_putLongVolatile:
 365   case vmIntrinsics::_putFloatVolatile:
 366   case vmIntrinsics::_putDoubleVolatile:
 367   case vmIntrinsics::_getShortUnaligned:
 368   case vmIntrinsics::_getCharUnaligned:
 369   case vmIntrinsics::_getIntUnaligned:
 370   case vmIntrinsics::_getLongUnaligned:
 371   case vmIntrinsics::_putShortUnaligned:
 372   case vmIntrinsics::_putCharUnaligned:
 373   case vmIntrinsics::_putIntUnaligned:
 374   case vmIntrinsics::_putLongUnaligned:
 375   case vmIntrinsics::_compareAndSwapInt:
 376   case vmIntrinsics::_putOrderedObject:
 377   case vmIntrinsics::_putOrderedInt:
 378   case vmIntrinsics::_putOrderedLong:
 379   case vmIntrinsics::_loadFence:
 380   case vmIntrinsics::_storeFence:
 381   case vmIntrinsics::_fullFence:
 382   case vmIntrinsics::_currentThread:
 383   case vmIntrinsics::_isInterrupted:
 384 #ifdef TRACE_HAVE_INTRINSICS
 385   case vmIntrinsics::_classID:
 386   case vmIntrinsics::_threadID:
 387   case vmIntrinsics::_counterTime:
 388 #endif
 389   case vmIntrinsics::_currentTimeMillis:
 390   case vmIntrinsics::_nanoTime:
 391   case vmIntrinsics::_allocateInstance:
 392   case vmIntrinsics::_newArray:
 393   case vmIntrinsics::_getLength:
 394   case vmIntrinsics::_copyOf:
 395   case vmIntrinsics::_copyOfRange:
 396   case vmIntrinsics::_clone:
 397   case vmIntrinsics::_isAssignableFrom:
 398   case vmIntrinsics::_isInstance:
 399   case vmIntrinsics::_getModifiers:
 400   case vmIntrinsics::_isInterface:
 401   case vmIntrinsics::_isArray:
 402   case vmIntrinsics::_isPrimitive:
 403   case vmIntrinsics::_getSuperclass:
 404   case vmIntrinsics::_getClassAccessFlags:
 405   case vmIntrinsics::_floatToRawIntBits:
 406   case vmIntrinsics::_floatToIntBits:
 407   case vmIntrinsics::_intBitsToFloat:
 408   case vmIntrinsics::_doubleToRawLongBits:
 409   case vmIntrinsics::_doubleToLongBits:
 410   case vmIntrinsics::_longBitsToDouble:
 411   case vmIntrinsics::_Reference_get:
 412   case vmIntrinsics::_Class_cast:
 413   case vmIntrinsics::_aescrypt_encryptBlock:
 414   case vmIntrinsics::_aescrypt_decryptBlock:
 415   case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
 416   case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
 417   case vmIntrinsics::_sha_implCompress:
 418   case vmIntrinsics::_sha2_implCompress:
 419   case vmIntrinsics::_sha5_implCompress:
 420   case vmIntrinsics::_digestBase_implCompressMB:
 421   case vmIntrinsics::_multiplyToLen:
 422   case vmIntrinsics::_squareToLen:
 423   case vmIntrinsics::_mulAdd:
 424   case vmIntrinsics::_montgomeryMultiply:
 425   case vmIntrinsics::_montgomerySquare:
 426   case vmIntrinsics::_ghash_processBlocks:
 427   case vmIntrinsics::_updateCRC32:
 428   case vmIntrinsics::_updateBytesCRC32:
 429   case vmIntrinsics::_updateByteBufferCRC32:
 430   case vmIntrinsics::_updateBytesCRC32C:
 431   case vmIntrinsics::_updateDirectByteBufferCRC32C:
 432   case vmIntrinsics::_profileBoolean:
 433   case vmIntrinsics::_isCompileConstant:
 434     break;
 435   default:
 436     return false;
 437   }
 438   return true;
 439 }
 440 
 441 bool C2Compiler::is_intrinsic_disabled_by_flag(methodHandle method, methodHandle compilation_context) {
 442   vmIntrinsics::ID id = method->intrinsic_id();
 443   assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
 444 
 445   // Check if the intrinsic corresponding to 'method' has been disabled on
 446   // the command line by using the DisableIntrinsic flag (either globally
 447   // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp
 448   // for details).
 449   // Usually, the compilation context is the caller of the method 'method'.
 450   // The only case when for a non-recursive method 'method' the compilation context
 451   // is not the caller of the 'method' (but it is the method itself) is
 452   // java.lang.ref.Referene::get.
 453   // For java.lang.ref.Reference::get, the intrinsic version is used
 454   // instead of the C2-compiled version so that the value in the referent
 455   // field can be registered by the G1 pre-barrier code. The intrinsified
 456   // version of Reference::get also adds a memory barrier to prevent
 457   // commoning reads from the referent field across safepoint since GC
 458   // can change the referent field's value. See Compile::Compile()
 459   // in src/share/vm/opto/compile.cpp for more details.
 460   ccstr disable_intr = NULL;
 461   if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
 462       (!compilation_context.is_null() &&
 463           CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) &&
 464           strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)
 465   ) {
 466     return true;
 467   }
 468 
 469   // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in
 470   // the following switch statement.
 471   if (!InlineNatives) {
 472     switch (id) {
 473     case vmIntrinsics::_indexOf:
 474     case vmIntrinsics::_compareTo:
 475     case vmIntrinsics::_equals:
 476     case vmIntrinsics::_equalsC:
 477     case vmIntrinsics::_getAndAddInt:
 478     case vmIntrinsics::_getAndAddLong:
 479     case vmIntrinsics::_getAndSetInt:
 480     case vmIntrinsics::_getAndSetLong:
 481     case vmIntrinsics::_getAndSetObject:
 482     case vmIntrinsics::_loadFence:
 483     case vmIntrinsics::_storeFence:
 484     case vmIntrinsics::_fullFence:
 485     case vmIntrinsics::_Reference_get:
 486       return false;
 487       break;
 488     default:
 489       return true;
 490     }
 491   }
 492 
 493   if (!InlineUnsafeOps) {
 494     switch (id) {
 495     case vmIntrinsics::_loadFence:
 496     case vmIntrinsics::_storeFence:
 497     case vmIntrinsics::_fullFence:
 498     case vmIntrinsics::_compareAndSwapObject:
 499     case vmIntrinsics::_compareAndSwapLong:
 500     case vmIntrinsics::_compareAndSwapInt:
 501       return true;
 502     default:
 503       return false;
 504     }
 505   }
 506 
 507   return false;
 508 }
 509 
 510 int C2Compiler::initial_code_buffer_size() {
 511   assert(SegmentedCodeCache, "Should be only used with a segmented code cache");
 512   return Compile::MAX_inst_size + Compile::MAX_locs_size + initial_const_capacity;
 513 }
src/share/vm/opto/c2compiler.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File