src/share/vm/opto/library_call.cpp

Print this page
rev 3898 : 8005031: Some cleanup in c2 to prepare for incremental inlining support
Summary: collection of small changes to prepare for incremental inlining.
Reviewed-by:


 395     if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
 396     break;
 397 
 398   case vmIntrinsics::_numberOfLeadingZeros_i:
 399     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
 400     break;
 401 
 402   case vmIntrinsics::_numberOfLeadingZeros_l:
 403     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
 404     break;
 405 
 406   case vmIntrinsics::_numberOfTrailingZeros_i:
 407     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
 408     break;
 409 
 410   case vmIntrinsics::_numberOfTrailingZeros_l:
 411     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
 412     break;
 413 
 414   case vmIntrinsics::_reverseBytes_c:
 415     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
 416     break;
 417   case vmIntrinsics::_reverseBytes_s:
 418     if (!Matcher::match_rule_supported(Op_ReverseBytesS))  return false;
 419     break;
 420   case vmIntrinsics::_reverseBytes_i:
 421     if (!Matcher::match_rule_supported(Op_ReverseBytesI))  return false;
 422     break;
 423   case vmIntrinsics::_reverseBytes_l:
 424     if (!Matcher::match_rule_supported(Op_ReverseBytesL))  return false;
 425     break;
 426 
 427   case vmIntrinsics::_Reference_get:
 428     // Use the intrinsic version of Reference.get() so that the value in
 429     // the referent field can be registered by the G1 pre-barrier code.
 430     // Also add memory barrier to prevent commoning reads from this field
 431     // across safepoint since GC can change it value.
 432     break;
 433 
 434   case vmIntrinsics::_compareAndSwapObject:
 435 #ifdef _LP64
 436     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
 437 #endif
 438     break;
 439 
 440   case vmIntrinsics::_compareAndSwapLong:
 441     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
 442     break;
 443 
 444   case vmIntrinsics::_getAndAddInt:


 519   // Nothing to do here.
 520 }
 521 
 522 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 523   LibraryCallKit kit(jvms, this);
 524   Compile* C = kit.C;
 525   int nodes = C->unique();
 526 #ifndef PRODUCT
 527   if ((PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) && Verbose) {
 528     char buf[1000];
 529     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 530     tty->print_cr("Intrinsic %s", str);
 531   }
 532 #endif
 533   ciMethod* callee = kit.callee();
 534   const int bci    = kit.bci();
 535 
 536   // Try to inline the intrinsic.
 537   if (kit.try_to_inline()) {
 538     if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 539       CompileTask::print_inlining(callee, jvms->depth() - 1, bci, is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)");
 540     }
 541     C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
 542     if (C->log()) {
 543       C->log()->elem("intrinsic id='%s'%s nodes='%d'",
 544                      vmIntrinsics::name_at(intrinsic_id()),
 545                      (is_virtual() ? " virtual='1'" : ""),
 546                      C->unique() - nodes);
 547     }
 548     // Push the result from the inlined method onto the stack.
 549     kit.push_result();
 550     return kit.transfer_exceptions_into_jvms();
 551   }
 552 
 553   // The intrinsic bailed out
 554   if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 555     if (jvms->has_method()) {
 556       // Not a root compile.
 557       const char* msg = is_virtual() ? "failed to inline (intrinsic, virtual)" : "failed to inline (intrinsic)";
 558       CompileTask::print_inlining(callee, jvms->depth() - 1, bci, msg);
 559     } else {
 560       // Root compile
 561       tty->print("Did not generate intrinsic %s%s at bci:%d in",
 562                vmIntrinsics::name_at(intrinsic_id()),
 563                (is_virtual() ? " (virtual)" : ""), bci);
 564     }
 565   }
 566   C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
 567   return NULL;
 568 }
 569 
 570 Node* LibraryIntrinsic::generate_predicate(JVMState* jvms) {
 571   LibraryCallKit kit(jvms, this);
 572   Compile* C = kit.C;
 573   int nodes = C->unique();
 574 #ifndef PRODUCT
 575   assert(is_predicted(), "sanity");
 576   if ((PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) && Verbose) {
 577     char buf[1000];
 578     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 579     tty->print_cr("Predicate for intrinsic %s", str);
 580   }
 581 #endif
 582   ciMethod* callee = kit.callee();
 583   const int bci    = kit.bci();
 584 
 585   Node* slow_ctl = kit.try_to_predicate();
 586   if (!kit.failing()) {
 587     if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 588       CompileTask::print_inlining(callee, jvms->depth() - 1, bci, is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)");
 589     }
 590     C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
 591     if (C->log()) {
 592       C->log()->elem("predicate_intrinsic id='%s'%s nodes='%d'",
 593                      vmIntrinsics::name_at(intrinsic_id()),
 594                      (is_virtual() ? " virtual='1'" : ""),
 595                      C->unique() - nodes);
 596     }
 597     return slow_ctl; // Could be NULL if the check folds.
 598   }
 599 
 600   // The intrinsic bailed out
 601   if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 602     if (jvms->has_method()) {
 603       // Not a root compile.
 604       const char* msg = "failed to generate predicate for intrinsic";
 605       CompileTask::print_inlining(kit.callee(), jvms->depth() - 1, bci, msg);
 606     } else {
 607       // Root compile
 608       tty->print("Did not generate predicate for intrinsic %s%s at bci:%d in",
 609                vmIntrinsics::name_at(intrinsic_id()),
 610                (is_virtual() ? " (virtual)" : ""), bci);
 611     }
 612   }
 613   C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
 614   return NULL;
 615 }
 616 
 617 bool LibraryCallKit::try_to_inline() {
 618   // Handle symbolic names for otherwise undistinguished boolean switches:
 619   const bool is_store       = true;
 620   const bool is_native_ptr  = true;
 621   const bool is_static      = true;
 622   const bool is_volatile    = true;
 623 
 624   if (!jvms()->has_method()) {
 625     // Root JVMState has a null method.
 626     assert(map()->memory()->Opcode() == Op_Parm, "");
 627     // Insert the memory aliasing node
 628     set_all_memory(reset_memory());


3302     _prim_1_path,               // {N,P} => false
3303     _ref_subtype_path,          // {N,N} & subtype check wins => true
3304     _both_ref_path,             // {N,N} & subtype check loses => false
3305     PATH_LIMIT
3306   };
3307 
3308   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3309   Node*       phi    = new (C) PhiNode(region, TypeInt::BOOL);
3310   record_for_igvn(region);
3311 
3312   const TypePtr* adr_type = TypeRawPtr::BOTTOM;   // memory type of loads
3313   const TypeKlassPtr* kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3314   int class_klass_offset = java_lang_Class::klass_offset_in_bytes();
3315 
3316   // First null-check both mirrors and load each mirror's klass metaobject.
3317   int which_arg;
3318   for (which_arg = 0; which_arg <= 1; which_arg++) {
3319     Node* arg = args[which_arg];
3320     arg = null_check(arg);
3321     if (stopped())  break;
3322     args[which_arg] = _gvn.transform(arg);
3323 
3324     Node* p = basic_plus_adr(arg, class_klass_offset);
3325     Node* kls = LoadKlassNode::make(_gvn, immutable_memory(), p, adr_type, kls_type);
3326     klasses[which_arg] = _gvn.transform(kls);
3327   }
3328 
3329   // Having loaded both klasses, test each for null.
3330   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3331   for (which_arg = 0; which_arg <= 1; which_arg++) {
3332     Node* kls = klasses[which_arg];
3333     Node* null_ctl = top();
3334     kls = null_check_oop(kls, &null_ctl, never_see_null);
3335     int prim_path = (which_arg == 0 ? _prim_0_path : _prim_1_path);
3336     region->init_req(prim_path, null_ctl);
3337     if (stopped())  break;
3338     klasses[which_arg] = kls;
3339   }
3340 
3341   if (!stopped()) {
3342     // now we have two reference types, in klasses[0..1]




 395     if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
 396     break;
 397 
 398   case vmIntrinsics::_numberOfLeadingZeros_i:
 399     if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
 400     break;
 401 
 402   case vmIntrinsics::_numberOfLeadingZeros_l:
 403     if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
 404     break;
 405 
 406   case vmIntrinsics::_numberOfTrailingZeros_i:
 407     if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
 408     break;
 409 
 410   case vmIntrinsics::_numberOfTrailingZeros_l:
 411     if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
 412     break;
 413 
 414   case vmIntrinsics::_reverseBytes_c:
 415     if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL;
 416     break;
 417   case vmIntrinsics::_reverseBytes_s:
 418     if (!Matcher::match_rule_supported(Op_ReverseBytesS))  return NULL;
 419     break;
 420   case vmIntrinsics::_reverseBytes_i:
 421     if (!Matcher::match_rule_supported(Op_ReverseBytesI))  return NULL;
 422     break;
 423   case vmIntrinsics::_reverseBytes_l:
 424     if (!Matcher::match_rule_supported(Op_ReverseBytesL))  return NULL;
 425     break;
 426 
 427   case vmIntrinsics::_Reference_get:
 428     // Use the intrinsic version of Reference.get() so that the value in
 429     // the referent field can be registered by the G1 pre-barrier code.
 430     // Also add memory barrier to prevent commoning reads from this field
 431     // across safepoint since GC can change it value.
 432     break;
 433 
 434   case vmIntrinsics::_compareAndSwapObject:
 435 #ifdef _LP64
 436     if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
 437 #endif
 438     break;
 439 
 440   case vmIntrinsics::_compareAndSwapLong:
 441     if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
 442     break;
 443 
 444   case vmIntrinsics::_getAndAddInt:


 519   // Nothing to do here.
 520 }
 521 
 522 JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
 523   LibraryCallKit kit(jvms, this);
 524   Compile* C = kit.C;
 525   int nodes = C->unique();
 526 #ifndef PRODUCT
 527   if ((PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) && Verbose) {
 528     char buf[1000];
 529     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 530     tty->print_cr("Intrinsic %s", str);
 531   }
 532 #endif
 533   ciMethod* callee = kit.callee();
 534   const int bci    = kit.bci();
 535 
 536   // Try to inline the intrinsic.
 537   if (kit.try_to_inline()) {
 538     if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 539       C->print_inlining(callee, jvms->depth() - 1, bci, is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)");
 540     }
 541     C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
 542     if (C->log()) {
 543       C->log()->elem("intrinsic id='%s'%s nodes='%d'",
 544                      vmIntrinsics::name_at(intrinsic_id()),
 545                      (is_virtual() ? " virtual='1'" : ""),
 546                      C->unique() - nodes);
 547     }
 548     // Push the result from the inlined method onto the stack.
 549     kit.push_result();
 550     return kit.transfer_exceptions_into_jvms();
 551   }
 552 
 553   // The intrinsic bailed out
 554   if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 555     if (jvms->has_method()) {
 556       // Not a root compile.
 557       const char* msg = is_virtual() ? "failed to inline (intrinsic, virtual)" : "failed to inline (intrinsic)";
 558       C->print_inlining(callee, jvms->depth() - 1, bci, msg);
 559     } else {
 560       // Root compile
 561       tty->print("Did not generate intrinsic %s%s at bci:%d in",
 562                vmIntrinsics::name_at(intrinsic_id()),
 563                (is_virtual() ? " (virtual)" : ""), bci);
 564     }
 565   }
 566   C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
 567   return NULL;
 568 }
 569 
 570 Node* LibraryIntrinsic::generate_predicate(JVMState* jvms) {
 571   LibraryCallKit kit(jvms, this);
 572   Compile* C = kit.C;
 573   int nodes = C->unique();
 574 #ifndef PRODUCT
 575   assert(is_predicted(), "sanity");
 576   if ((PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) && Verbose) {
 577     char buf[1000];
 578     const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
 579     tty->print_cr("Predicate for intrinsic %s", str);
 580   }
 581 #endif
 582   ciMethod* callee = kit.callee();
 583   const int bci    = kit.bci();
 584 
 585   Node* slow_ctl = kit.try_to_predicate();
 586   if (!kit.failing()) {
 587     if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 588       C->print_inlining(callee, jvms->depth() - 1, bci, is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)");
 589     }
 590     C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
 591     if (C->log()) {
 592       C->log()->elem("predicate_intrinsic id='%s'%s nodes='%d'",
 593                      vmIntrinsics::name_at(intrinsic_id()),
 594                      (is_virtual() ? " virtual='1'" : ""),
 595                      C->unique() - nodes);
 596     }
 597     return slow_ctl; // Could be NULL if the check folds.
 598   }
 599 
 600   // The intrinsic bailed out
 601   if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
 602     if (jvms->has_method()) {
 603       // Not a root compile.
 604       const char* msg = "failed to generate predicate for intrinsic";
 605       C->print_inlining(kit.callee(), jvms->depth() - 1, bci, msg);
 606     } else {
 607       // Root compile
 608       C->print_inlining_stream()->print("Did not generate predicate for intrinsic %s%s at bci:%d in",
 609                                         vmIntrinsics::name_at(intrinsic_id()),
 610                                         (is_virtual() ? " (virtual)" : ""), bci);
 611     }
 612   }
 613   C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
 614   return NULL;
 615 }
 616 
 617 bool LibraryCallKit::try_to_inline() {
 618   // Handle symbolic names for otherwise undistinguished boolean switches:
 619   const bool is_store       = true;
 620   const bool is_native_ptr  = true;
 621   const bool is_static      = true;
 622   const bool is_volatile    = true;
 623 
 624   if (!jvms()->has_method()) {
 625     // Root JVMState has a null method.
 626     assert(map()->memory()->Opcode() == Op_Parm, "");
 627     // Insert the memory aliasing node
 628     set_all_memory(reset_memory());


3302     _prim_1_path,               // {N,P} => false
3303     _ref_subtype_path,          // {N,N} & subtype check wins => true
3304     _both_ref_path,             // {N,N} & subtype check loses => false
3305     PATH_LIMIT
3306   };
3307 
3308   RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3309   Node*       phi    = new (C) PhiNode(region, TypeInt::BOOL);
3310   record_for_igvn(region);
3311 
3312   const TypePtr* adr_type = TypeRawPtr::BOTTOM;   // memory type of loads
3313   const TypeKlassPtr* kls_type = TypeKlassPtr::OBJECT_OR_NULL;
3314   int class_klass_offset = java_lang_Class::klass_offset_in_bytes();
3315 
3316   // First null-check both mirrors and load each mirror's klass metaobject.
3317   int which_arg;
3318   for (which_arg = 0; which_arg <= 1; which_arg++) {
3319     Node* arg = args[which_arg];
3320     arg = null_check(arg);
3321     if (stopped())  break;
3322     args[which_arg] = arg;
3323 
3324     Node* p = basic_plus_adr(arg, class_klass_offset);
3325     Node* kls = LoadKlassNode::make(_gvn, immutable_memory(), p, adr_type, kls_type);
3326     klasses[which_arg] = _gvn.transform(kls);
3327   }
3328 
3329   // Having loaded both klasses, test each for null.
3330   bool never_see_null = !too_many_traps(Deoptimization::Reason_null_check);
3331   for (which_arg = 0; which_arg <= 1; which_arg++) {
3332     Node* kls = klasses[which_arg];
3333     Node* null_ctl = top();
3334     kls = null_check_oop(kls, &null_ctl, never_see_null);
3335     int prim_path = (which_arg == 0 ? _prim_0_path : _prim_1_path);
3336     region->init_req(prim_path, null_ctl);
3337     if (stopped())  break;
3338     klasses[which_arg] = kls;
3339   }
3340 
3341   if (!stopped()) {
3342     // now we have two reference types, in klasses[0..1]