< prev index next >

src/share/vm/opto/callGenerator.cpp

Print this page




 137   PhaseGVN& gvn = kit.gvn();
 138   bool is_static = method()->is_static();
 139   address target = is_static ? SharedRuntime::get_resolve_static_call_stub()
 140                              : SharedRuntime::get_resolve_opt_virtual_call_stub();
 141 
 142   if (kit.C->log() != NULL) {
 143     kit.C->log()->elem("direct_call bci='%d'", jvms->bci());
 144   }
 145 
 146   CallStaticJavaNode *call = new CallStaticJavaNode(kit.C, tf(), target, method(), kit.bci());
 147   if (is_inlined_mh_linker(jvms, method())) {
 148     // To be able to issue a direct call and skip a call to MH.linkTo*/invokeBasic adapter,
 149     // additional information about the method being invoked should be attached
 150     // to the call site to make resolution logic work
 151     // (see SharedRuntime::resolve_static_call_C).
 152     call->set_override_symbolic_info(true);
 153   }
 154   _call_node = call;  // Save the call node in case we need it later
 155   if (!is_static) {
 156     if (kit.argument(0)->is_ValueType()) {

 157       ValueTypeNode* vt = kit.argument(0)->as_ValueType();
 158       vt->store_to_memory(&kit);

 159     } else {
 160       // Make an explicit receiver null_check as part of this call.
 161       // Since we share a map with the caller, his JVMS gets adjusted.
 162       kit.null_check_receiver_before_call(method());
 163     }
 164     if (kit.stopped()) {
 165       // And dump it back to the caller, decorated with any exceptions:
 166       return kit.transfer_exceptions_into_jvms();
 167     }
 168     // Mark the call node as virtual, sort of:
 169     call->set_optimized_virtual(true);
 170     if (method()->is_method_handle_intrinsic() ||
 171         method()->is_compiled_lambda_form()) {
 172       call->set_method_handle_invoke(true);
 173     }
 174   }
 175   kit.set_arguments_for_java_call(call);
 176   kit.set_edges_for_java_call(call, false, _separate_io_proj);
 177   Node* ret = kit.set_results_for_java_call(call, _separate_io_proj);
 178   // Check if return value is a value type pointer


 353     C->print_inlining_update_delayed(this);
 354   }
 355 
 356   virtual void set_unique_id(jlong id) {
 357     _unique_id = id;
 358   }
 359 
 360   virtual jlong unique_id() const {
 361     return _unique_id;
 362   }
 363 };
 364 
 365 void LateInlineCallGenerator::do_late_inline() {
 366   // Can't inline it
 367   CallStaticJavaNode* call = call_node();
 368   if (call == NULL || call->outcnt() == 0 ||
 369       call->in(0) == NULL || call->in(0)->is_top()) {
 370     return;
 371   }
 372 
 373   const TypeTuple *r = call->tf()->domain();




 374   for (int i1 = 0; i1 < method()->arg_size(); i1++) {
 375     if (call->in(TypeFunc::Parms + i1)->is_top() && r->field_at(TypeFunc::Parms + i1) != Type::HALF) {
 376       assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing");
 377       return;
 378     }
 379   }
 380 
 381   if (call->in(TypeFunc::Memory)->is_top()) {
 382     assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing");
 383     return;
 384   }
 385 
 386   Compile* C = Compile::current();
 387   // Remove inlined methods from Compiler's lists.
 388   if (call->is_macro()) {
 389     C->remove_macro_node(call);
 390   }
 391 
 392   // Make a clone of the JVMState that appropriate to use for driving a parse
 393   JVMState* old_jvms = call->jvms();




 137   PhaseGVN& gvn = kit.gvn();
 138   bool is_static = method()->is_static();
 139   address target = is_static ? SharedRuntime::get_resolve_static_call_stub()
 140                              : SharedRuntime::get_resolve_opt_virtual_call_stub();
 141 
 142   if (kit.C->log() != NULL) {
 143     kit.C->log()->elem("direct_call bci='%d'", jvms->bci());
 144   }
 145 
 146   CallStaticJavaNode *call = new CallStaticJavaNode(kit.C, tf(), target, method(), kit.bci());
 147   if (is_inlined_mh_linker(jvms, method())) {
 148     // To be able to issue a direct call and skip a call to MH.linkTo*/invokeBasic adapter,
 149     // additional information about the method being invoked should be attached
 150     // to the call site to make resolution logic work
 151     // (see SharedRuntime::resolve_static_call_C).
 152     call->set_override_symbolic_info(true);
 153   }
 154   _call_node = call;  // Save the call node in case we need it later
 155   if (!is_static) {
 156     if (kit.argument(0)->is_ValueType()) {
 157       if (!ValueTypePassFieldsAsArgs) {
 158         ValueTypeNode* vt = kit.argument(0)->as_ValueType();
 159         vt->store_to_memory(&kit);
 160       }
 161     } else {
 162       // Make an explicit receiver null_check as part of this call.
 163       // Since we share a map with the caller, his JVMS gets adjusted.
 164       kit.null_check_receiver_before_call(method());
 165     }
 166     if (kit.stopped()) {
 167       // And dump it back to the caller, decorated with any exceptions:
 168       return kit.transfer_exceptions_into_jvms();
 169     }
 170     // Mark the call node as virtual, sort of:
 171     call->set_optimized_virtual(true);
 172     if (method()->is_method_handle_intrinsic() ||
 173         method()->is_compiled_lambda_form()) {
 174       call->set_method_handle_invoke(true);
 175     }
 176   }
 177   kit.set_arguments_for_java_call(call);
 178   kit.set_edges_for_java_call(call, false, _separate_io_proj);
 179   Node* ret = kit.set_results_for_java_call(call, _separate_io_proj);
 180   // Check if return value is a value type pointer


 355     C->print_inlining_update_delayed(this);
 356   }
 357 
 358   virtual void set_unique_id(jlong id) {
 359     _unique_id = id;
 360   }
 361 
 362   virtual jlong unique_id() const {
 363     return _unique_id;
 364   }
 365 };
 366 
 367 void LateInlineCallGenerator::do_late_inline() {
 368   // Can't inline it
 369   CallStaticJavaNode* call = call_node();
 370   if (call == NULL || call->outcnt() == 0 ||
 371       call->in(0) == NULL || call->in(0)->is_top()) {
 372     return;
 373   }
 374   
 375   // FIXME: late inlining of methods that take value type arguments is
 376   // broken: arguments at the call are set up so fields of value type
 377   // arguments are passed but code here expects a single argument per
 378   // value type (a ValueTypeNode) instead.
 379   const TypeTuple *r = call->tf()->domain_sig();
 380   for (int i1 = 0; i1 < method()->arg_size(); i1++) {
 381     if (call->in(TypeFunc::Parms + i1)->is_top() && r->field_at(TypeFunc::Parms + i1) != Type::HALF) {
 382       assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing");
 383       return;
 384     }
 385   }
 386 
 387   if (call->in(TypeFunc::Memory)->is_top()) {
 388     assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing");
 389     return;
 390   }
 391 
 392   Compile* C = Compile::current();
 393   // Remove inlined methods from Compiler's lists.
 394   if (call->is_macro()) {
 395     C->remove_macro_node(call);
 396   }
 397 
 398   // Make a clone of the JVMState that appropriate to use for driving a parse
 399   JVMState* old_jvms = call->jvms();


< prev index next >