src/share/vm/opto/callGenerator.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/callGenerator.cpp

Print this page
rev 6132 : 8007988: PrintInlining output is inconsistent with incremental inlining
Summary: fix duplicate and conflicting inlining output
Reviewed-by:
rev 6133 : 8005079: fix LogCompilation for incremental inlining
Summary: report late inlining as part of the rest of the inlining output
Reviewed-by:
rev 6134 : [mq]: logcompilation-reviews


 266 
 267 // Allow inlining decisions to be delayed
 268 class LateInlineCallGenerator : public DirectCallGenerator {
 269  protected:
 270   CallGenerator* _inline_cg;
 271 
 272   virtual bool do_late_inline_check(JVMState* jvms) { return true; }
 273 
 274  public:
 275   LateInlineCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 276     DirectCallGenerator(method, true), _inline_cg(inline_cg) {}
 277 
 278   virtual bool is_late_inline() const { return true; }
 279 
 280   // Convert the CallStaticJava into an inline
 281   virtual void do_late_inline();
 282 
 283   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 284     Compile *C = Compile::current();
 285 


 286     // Record that this call site should be revisited once the main
 287     // parse is finished.
 288     if (!is_mh_late_inline()) {
 289       C->add_late_inline(this);
 290     }
 291 
 292     // Emit the CallStaticJava and request separate projections so
 293     // that the late inlining logic can distinguish between fall
 294     // through and exceptional uses of the memory and io projections
 295     // as is done for allocations and macro expansion.
 296     return DirectCallGenerator::generate(jvms, parent_parser);
 297   }
 298 
 299   virtual void print_inlining_late(const char* msg) {
 300     CallNode* call = call_node();
 301     Compile* C = Compile::current();
 302     C->print_inlining_assert_ready();
 303     C->print_inlining(method(), call->jvms()->depth()-1, call->jvms()->bci(), msg);
 304     C->print_inlining_move_to(this);
 305     C->print_inlining_update_delayed(this);


 351 
 352   uint nargs = method()->arg_size();
 353   // blow away old call arguments
 354   Node* top = C->top();
 355   for (uint i1 = 0; i1 < nargs; i1++) {
 356     map->set_req(TypeFunc::Parms + i1, top);
 357   }
 358   jvms->set_map(map);
 359 
 360   // Make enough space in the expression stack to transfer
 361   // the incoming arguments and return value.
 362   map->ensure_stack(jvms, jvms->method()->max_stack());
 363   for (uint i1 = 0; i1 < nargs; i1++) {
 364     map->set_argument(jvms, i1, call->in(TypeFunc::Parms + i1));
 365   }
 366 
 367   C->print_inlining_assert_ready();
 368 
 369   C->print_inlining_move_to(this);
 370 


 371   // This check is done here because for_method_handle_inline() method
 372   // needs jvms for inlined state.
 373   if (!do_late_inline_check(jvms)) {
 374     map->disconnect_inputs(NULL, C);
 375     return;
 376   }
 377 
 378   CompileLog* log = C->log();
 379   if (log != NULL) {
 380     log->head("late_inline method='%d'", log->identify(method()));
 381     JVMState* p = jvms;
 382     while (p != NULL) {
 383       log->elem("jvms bci='%d' method='%d'", p->bci(), log->identify(p->method()));
 384       p = p->caller();
 385     }
 386     log->tail("late_inline");
 387   }
 388 
 389   // Setup default node notes to be picked up by the inlining
 390   Node_Notes* old_nn = C->default_node_notes();
 391   if (old_nn != NULL) {
 392     Node_Notes* entry_nn = old_nn->clone(C);
 393     entry_nn->set_jvms(jvms);
 394     C->set_default_node_notes(entry_nn);
 395   }
 396 
 397   // Now perform the inlining using the synthesized JVMState
 398   JVMState* new_jvms = _inline_cg->generate(jvms, NULL);
 399   if (new_jvms == NULL)  return;  // no change
 400   if (C->failing())      return;
 401 
 402   // Capture any exceptional control flow
 403   GraphKit kit(new_jvms);
 404 
 405   // Find the result object
 406   Node* result = C->top();
 407   int   result_size = method()->return_type()->size();
 408   if (result_size != 0 && !kit.stopped()) {


 421   return new LateInlineCallGenerator(method, inline_cg);
 422 }
 423 
 424 class LateInlineMHCallGenerator : public LateInlineCallGenerator {
 425   ciMethod* _caller;
 426   int _attempt;
 427   bool _input_not_const;
 428 
 429   virtual bool do_late_inline_check(JVMState* jvms);
 430   virtual bool already_attempted() const { return _attempt > 0; }
 431 
 432  public:
 433   LateInlineMHCallGenerator(ciMethod* caller, ciMethod* callee, bool input_not_const) :
 434     LateInlineCallGenerator(callee, NULL), _caller(caller), _attempt(0), _input_not_const(input_not_const) {}
 435 
 436   virtual bool is_mh_late_inline() const { return true; }
 437 
 438   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 439     JVMState* new_jvms = LateInlineCallGenerator::generate(jvms, parent_parser);
 440 

 441     if (_input_not_const) {
 442       // inlining won't be possible so no need to enqueue right now.
 443       call_node()->set_generator(this);
 444     } else {
 445       Compile::current()->add_late_inline(this);
 446     }
 447     return new_jvms;
 448   }
 449 };
 450 
 451 bool LateInlineMHCallGenerator::do_late_inline_check(JVMState* jvms) {
 452 
 453   CallGenerator* cg = for_method_handle_inline(jvms, _caller, method(), _input_not_const);
 454 
 455   Compile::current()->print_inlining_update_delayed(this);
 456 
 457   if (!_input_not_const) {
 458     _attempt++;
 459   }
 460 
 461   if (cg != NULL) {
 462     assert(!cg->is_late_inline() && cg->is_inline(), "we're doing late inlining");
 463     _inline_cg = cg;
 464     Compile::current()->dec_number_of_mh_late_inlines();
 465     return true;
 466   }
 467 
 468   call_node()->set_generator(this);
 469   return false;
 470 }
 471 
 472 CallGenerator* CallGenerator::for_mh_late_inline(ciMethod* caller, ciMethod* callee, bool input_not_const) {
 473   Compile::current()->inc_number_of_mh_late_inlines();
 474   CallGenerator* cg = new LateInlineMHCallGenerator(caller, callee, input_not_const);
 475   return cg;
 476 }
 477 
 478 class LateInlineStringCallGenerator : public LateInlineCallGenerator {
 479 
 480  public:
 481   LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 482     LateInlineCallGenerator(method, inline_cg) {}
 483 
 484   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 485     Compile *C = Compile::current();



 486     C->add_string_late_inline(this);
 487 
 488     JVMState* new_jvms =  DirectCallGenerator::generate(jvms, parent_parser);
 489     return new_jvms;
 490   }
 491 
 492   virtual bool is_string_late_inline() const { return true; }
 493 };
 494 
 495 CallGenerator* CallGenerator::for_string_late_inline(ciMethod* method, CallGenerator* inline_cg) {
 496   return new LateInlineStringCallGenerator(method, inline_cg);
 497 }
 498 
 499 class LateInlineBoxingCallGenerator : public LateInlineCallGenerator {
 500 
 501  public:
 502   LateInlineBoxingCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 503     LateInlineCallGenerator(method, inline_cg) {}
 504 
 505   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 506     Compile *C = Compile::current();
 507 


 508     C->add_boxing_late_inline(this);
 509 
 510     JVMState* new_jvms =  DirectCallGenerator::generate(jvms, parent_parser);
 511     return new_jvms;
 512   }
 513 };
 514 
 515 CallGenerator* CallGenerator::for_boxing_late_inline(ciMethod* method, CallGenerator* inline_cg) {
 516   return new LateInlineBoxingCallGenerator(method, inline_cg);
 517 }
 518 
 519 //---------------------------WarmCallGenerator--------------------------------
 520 // Internal class which handles initial deferral of inlining decisions.
 521 class WarmCallGenerator : public CallGenerator {
 522   WarmCallInfo*   _call_info;
 523   CallGenerator*  _if_cold;
 524   CallGenerator*  _if_hot;
 525   bool            _is_virtual;   // caches virtuality of if_cold
 526   bool            _is_inline;    // caches inline-ness of if_hot
 527 


 769   vmIntrinsics::ID iid = callee->intrinsic_id();
 770   input_not_const = true;
 771   switch (iid) {
 772   case vmIntrinsics::_invokeBasic:
 773     {
 774       // Get MethodHandle receiver:
 775       Node* receiver = kit.argument(0);
 776       if (receiver->Opcode() == Op_ConP) {
 777         input_not_const = false;
 778         const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
 779         ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
 780         guarantee(!target->is_method_handle_intrinsic(), "should not happen");  // XXX remove
 781         const int vtable_index = Method::invalid_vtable_index;
 782         CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, NULL, true, true);
 783         assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
 784         if (cg != NULL && cg->is_inline())
 785           return cg;
 786       } else {
 787         const char* msg = "receiver not constant";
 788         if (PrintInlining)  C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg);

 789       }
 790     }
 791     break;
 792 
 793   case vmIntrinsics::_linkToVirtual:
 794   case vmIntrinsics::_linkToStatic:
 795   case vmIntrinsics::_linkToSpecial:
 796   case vmIntrinsics::_linkToInterface:
 797     {
 798       // Get MemberName argument:
 799       Node* member_name = kit.argument(callee->arg_size() - 1);
 800       if (member_name->Opcode() == Op_ConP) {
 801         input_not_const = false;
 802         const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr();
 803         ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget();
 804 
 805         // In lamda forms we erase signature types to avoid resolving issues
 806         // involving class loaders.  When we optimize a method handle invoke
 807         // to a direct call we must cast the receiver and arguments to its
 808         // actual types.


 841         ciKlass* speculative_receiver_type = NULL;
 842         if (is_virtual_or_interface) {
 843           ciInstanceKlass* klass = target->holder();
 844           Node*             receiver_node = kit.argument(0);
 845           const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr();
 846           // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 847           target = C->optimize_virtual_call(caller, jvms->bci(), klass, target, receiver_type,
 848                                             is_virtual,
 849                                             call_does_dispatch, vtable_index);  // out-parameters
 850           // We lack profiling at this call but type speculation may
 851           // provide us with a type
 852           speculative_receiver_type = receiver_type->speculative_type();
 853         }
 854         CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, true, PROB_ALWAYS, speculative_receiver_type, true, true);
 855         assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
 856         if (cg != NULL && cg->is_inline())
 857           return cg;
 858       } else {
 859         const char* msg = "member_name not constant";
 860         if (PrintInlining)  C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg);

 861       }
 862     }
 863     break;
 864 
 865   default:
 866     fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
 867     break;
 868   }
 869   return NULL;
 870 }
 871 
 872 
 873 //------------------------PredictedIntrinsicGenerator------------------------------
 874 // Internal class which handles all predicted Intrinsic calls.
 875 class PredictedIntrinsicGenerator : public CallGenerator {
 876   CallGenerator* _intrinsic;
 877   CallGenerator* _cg;
 878 
 879 public:
 880   PredictedIntrinsicGenerator(CallGenerator* intrinsic,




 266 
 267 // Allow inlining decisions to be delayed
 268 class LateInlineCallGenerator : public DirectCallGenerator {
 269  protected:
 270   CallGenerator* _inline_cg;
 271 
 272   virtual bool do_late_inline_check(JVMState* jvms) { return true; }
 273 
 274  public:
 275   LateInlineCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 276     DirectCallGenerator(method, true), _inline_cg(inline_cg) {}
 277 
 278   virtual bool is_late_inline() const { return true; }
 279 
 280   // Convert the CallStaticJava into an inline
 281   virtual void do_late_inline();
 282 
 283   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 284     Compile *C = Compile::current();
 285 
 286     C->log_inline_id((jlong)this);
 287 
 288     // Record that this call site should be revisited once the main
 289     // parse is finished.
 290     if (!is_mh_late_inline()) {
 291       C->add_late_inline(this);
 292     }
 293 
 294     // Emit the CallStaticJava and request separate projections so
 295     // that the late inlining logic can distinguish between fall
 296     // through and exceptional uses of the memory and io projections
 297     // as is done for allocations and macro expansion.
 298     return DirectCallGenerator::generate(jvms, parent_parser);
 299   }
 300 
 301   virtual void print_inlining_late(const char* msg) {
 302     CallNode* call = call_node();
 303     Compile* C = Compile::current();
 304     C->print_inlining_assert_ready();
 305     C->print_inlining(method(), call->jvms()->depth()-1, call->jvms()->bci(), msg);
 306     C->print_inlining_move_to(this);
 307     C->print_inlining_update_delayed(this);


 353 
 354   uint nargs = method()->arg_size();
 355   // blow away old call arguments
 356   Node* top = C->top();
 357   for (uint i1 = 0; i1 < nargs; i1++) {
 358     map->set_req(TypeFunc::Parms + i1, top);
 359   }
 360   jvms->set_map(map);
 361 
 362   // Make enough space in the expression stack to transfer
 363   // the incoming arguments and return value.
 364   map->ensure_stack(jvms, jvms->method()->max_stack());
 365   for (uint i1 = 0; i1 < nargs; i1++) {
 366     map->set_argument(jvms, i1, call->in(TypeFunc::Parms + i1));
 367   }
 368 
 369   C->print_inlining_assert_ready();
 370 
 371   C->print_inlining_move_to(this);
 372 
 373   C->log_late_inline(this);
 374 
 375   // This check is done here because for_method_handle_inline() method
 376   // needs jvms for inlined state.
 377   if (!do_late_inline_check(jvms)) {
 378     map->disconnect_inputs(NULL, C);
 379     return;
 380   }
 381 











 382   // Setup default node notes to be picked up by the inlining
 383   Node_Notes* old_nn = C->default_node_notes();
 384   if (old_nn != NULL) {
 385     Node_Notes* entry_nn = old_nn->clone(C);
 386     entry_nn->set_jvms(jvms);
 387     C->set_default_node_notes(entry_nn);
 388   }
 389 
 390   // Now perform the inlining using the synthesized JVMState
 391   JVMState* new_jvms = _inline_cg->generate(jvms, NULL);
 392   if (new_jvms == NULL)  return;  // no change
 393   if (C->failing())      return;
 394 
 395   // Capture any exceptional control flow
 396   GraphKit kit(new_jvms);
 397 
 398   // Find the result object
 399   Node* result = C->top();
 400   int   result_size = method()->return_type()->size();
 401   if (result_size != 0 && !kit.stopped()) {


 414   return new LateInlineCallGenerator(method, inline_cg);
 415 }
 416 
 417 class LateInlineMHCallGenerator : public LateInlineCallGenerator {
 418   ciMethod* _caller;
 419   int _attempt;
 420   bool _input_not_const;
 421 
 422   virtual bool do_late_inline_check(JVMState* jvms);
 423   virtual bool already_attempted() const { return _attempt > 0; }
 424 
 425  public:
 426   LateInlineMHCallGenerator(ciMethod* caller, ciMethod* callee, bool input_not_const) :
 427     LateInlineCallGenerator(callee, NULL), _caller(caller), _attempt(0), _input_not_const(input_not_const) {}
 428 
 429   virtual bool is_mh_late_inline() const { return true; }
 430 
 431   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 432     JVMState* new_jvms = LateInlineCallGenerator::generate(jvms, parent_parser);
 433 
 434     Compile* C = Compile::current();
 435     if (_input_not_const) {
 436       // inlining won't be possible so no need to enqueue right now.
 437       call_node()->set_generator(this);
 438     } else {
 439       C->add_late_inline(this);
 440     }
 441     return new_jvms;
 442   }
 443 };
 444 
 445 bool LateInlineMHCallGenerator::do_late_inline_check(JVMState* jvms) {
 446 
 447   CallGenerator* cg = for_method_handle_inline(jvms, _caller, method(), _input_not_const);
 448 
 449   Compile::current()->print_inlining_update_delayed(this);
 450 
 451   if (!_input_not_const) {
 452     _attempt++;
 453   }
 454 
 455   if (cg != NULL) {
 456     assert(!cg->is_late_inline() && cg->is_inline(), "we're doing late inlining");
 457     _inline_cg = cg;
 458     Compile::current()->dec_number_of_mh_late_inlines();
 459     return true;
 460   }
 461 
 462   call_node()->set_generator(this);
 463   return false;
 464 }
 465 
 466 CallGenerator* CallGenerator::for_mh_late_inline(ciMethod* caller, ciMethod* callee, bool input_not_const) {
 467   Compile::current()->inc_number_of_mh_late_inlines();
 468   CallGenerator* cg = new LateInlineMHCallGenerator(caller, callee, input_not_const);
 469   return cg;
 470 }
 471 
 472 class LateInlineStringCallGenerator : public LateInlineCallGenerator {
 473 
 474  public:
 475   LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 476     LateInlineCallGenerator(method, inline_cg) {}
 477 
 478   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 479     Compile *C = Compile::current();
 480 
 481     C->log_inline_id((jlong)this);
 482 
 483     C->add_string_late_inline(this);
 484 
 485     JVMState* new_jvms =  DirectCallGenerator::generate(jvms, parent_parser);
 486     return new_jvms;
 487   }
 488 
 489   virtual bool is_string_late_inline() const { return true; }
 490 };
 491 
 492 CallGenerator* CallGenerator::for_string_late_inline(ciMethod* method, CallGenerator* inline_cg) {
 493   return new LateInlineStringCallGenerator(method, inline_cg);
 494 }
 495 
 496 class LateInlineBoxingCallGenerator : public LateInlineCallGenerator {
 497 
 498  public:
 499   LateInlineBoxingCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
 500     LateInlineCallGenerator(method, inline_cg) {}
 501 
 502   virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
 503     Compile *C = Compile::current();
 504 
 505     C->log_inline_id((jlong)this);
 506 
 507     C->add_boxing_late_inline(this);
 508 
 509     JVMState* new_jvms =  DirectCallGenerator::generate(jvms, parent_parser);
 510     return new_jvms;
 511   }
 512 };
 513 
 514 CallGenerator* CallGenerator::for_boxing_late_inline(ciMethod* method, CallGenerator* inline_cg) {
 515   return new LateInlineBoxingCallGenerator(method, inline_cg);
 516 }
 517 
 518 //---------------------------WarmCallGenerator--------------------------------
 519 // Internal class which handles initial deferral of inlining decisions.
 520 class WarmCallGenerator : public CallGenerator {
 521   WarmCallInfo*   _call_info;
 522   CallGenerator*  _if_cold;
 523   CallGenerator*  _if_hot;
 524   bool            _is_virtual;   // caches virtuality of if_cold
 525   bool            _is_inline;    // caches inline-ness of if_hot
 526 


 768   vmIntrinsics::ID iid = callee->intrinsic_id();
 769   input_not_const = true;
 770   switch (iid) {
 771   case vmIntrinsics::_invokeBasic:
 772     {
 773       // Get MethodHandle receiver:
 774       Node* receiver = kit.argument(0);
 775       if (receiver->Opcode() == Op_ConP) {
 776         input_not_const = false;
 777         const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr();
 778         ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget();
 779         guarantee(!target->is_method_handle_intrinsic(), "should not happen");  // XXX remove
 780         const int vtable_index = Method::invalid_vtable_index;
 781         CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, NULL, true, true);
 782         assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
 783         if (cg != NULL && cg->is_inline())
 784           return cg;
 785       } else {
 786         const char* msg = "receiver not constant";
 787         if (PrintInlining)  C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg);
 788         C->log_inline_failure(msg);
 789       }
 790     }
 791     break;
 792 
 793   case vmIntrinsics::_linkToVirtual:
 794   case vmIntrinsics::_linkToStatic:
 795   case vmIntrinsics::_linkToSpecial:
 796   case vmIntrinsics::_linkToInterface:
 797     {
 798       // Get MemberName argument:
 799       Node* member_name = kit.argument(callee->arg_size() - 1);
 800       if (member_name->Opcode() == Op_ConP) {
 801         input_not_const = false;
 802         const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr();
 803         ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget();
 804 
 805         // In lamda forms we erase signature types to avoid resolving issues
 806         // involving class loaders.  When we optimize a method handle invoke
 807         // to a direct call we must cast the receiver and arguments to its
 808         // actual types.


 841         ciKlass* speculative_receiver_type = NULL;
 842         if (is_virtual_or_interface) {
 843           ciInstanceKlass* klass = target->holder();
 844           Node*             receiver_node = kit.argument(0);
 845           const TypeOopPtr* receiver_type = gvn.type(receiver_node)->isa_oopptr();
 846           // call_does_dispatch and vtable_index are out-parameters.  They might be changed.
 847           target = C->optimize_virtual_call(caller, jvms->bci(), klass, target, receiver_type,
 848                                             is_virtual,
 849                                             call_does_dispatch, vtable_index);  // out-parameters
 850           // We lack profiling at this call but type speculation may
 851           // provide us with a type
 852           speculative_receiver_type = receiver_type->speculative_type();
 853         }
 854         CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, true, PROB_ALWAYS, speculative_receiver_type, true, true);
 855         assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here");
 856         if (cg != NULL && cg->is_inline())
 857           return cg;
 858       } else {
 859         const char* msg = "member_name not constant";
 860         if (PrintInlining)  C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg);
 861         C->log_inline_failure(msg);
 862       }
 863     }
 864     break;
 865 
 866   default:
 867     fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
 868     break;
 869   }
 870   return NULL;
 871 }
 872 
 873 
 874 //------------------------PredictedIntrinsicGenerator------------------------------
 875 // Internal class which handles all predicted Intrinsic calls.
 876 class PredictedIntrinsicGenerator : public CallGenerator {
 877   CallGenerator* _intrinsic;
 878   CallGenerator* _cg;
 879 
 880 public:
 881   PredictedIntrinsicGenerator(CallGenerator* intrinsic,


src/share/vm/opto/callGenerator.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File