src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/c1

src/share/vm/c1/c1_GraphBuilder.cpp

Print this page
rev 5403 : 8023657: New type profiling points: arguments to call
Summary: x86 interpreter and c1 type profiling for arguments at calls
Reviewed-by: kvn, twisti
rev 5404 : 8026054: New type profiling points: type of return values at calls
Summary: x86 interpreter and c1 type profiling for return values at calls
Reviewed-by:
rev 5405 : imported patch kvn
rev 5406 : imported patch twisti


1449       // Report exit from inline methods
1450       Values* args = new Values(1);
1451       args->push(append(new Constant(new MethodConstant(method()))));
1452       append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));
1453     }
1454 
1455     // If the inlined method is synchronized, the monitor must be
1456     // released before we jump to the continuation block.
1457     if (method()->is_synchronized()) {
1458       assert(state()->locks_size() == 1, "receiver must be locked here");
1459       monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
1460     }
1461 
1462     if (need_mem_bar) {
1463       append(new MemBar(lir_membar_storestore));
1464     }
1465 
1466     // State at end of inlined method is the state of the caller
1467     // without the method parameters on stack, including the
1468     // return value, if any, of the inlined method on operand stack.

1469     set_state(state()->caller_state()->copy_for_parsing());
1470     if (x != NULL) {
1471       state()->push(x->type(), x);












1472     }
1473     Goto* goto_callee = new Goto(continuation(), false);
1474 
1475     // See whether this is the first return; if so, store off some
1476     // of the state for later examination
1477     if (num_returns() == 0) {
1478       set_inline_cleanup_info();
1479     }
1480 
1481     // The current bci() is in the wrong scope, so use the bci() of
1482     // the continuation point.
1483     append_with_bci(goto_callee, scope_data()->continuation()->bci());
1484     incr_num_returns();
1485     return;
1486   }
1487 
1488   state()->truncate_stack(0);
1489   if (method()->is_synchronized()) {
1490     // perform the unlocking before exiting the method
1491     Value receiver;


1991       if (cha_monomorphic_target != NULL) {
1992         target_klass = cha_monomorphic_target->holder();
1993       } else if (exact_target != NULL) {
1994         target_klass = exact_target->holder();
1995       }
1996       profile_call(target, recv, target_klass, collect_args_for_profiling(args, false), false);
1997     }
1998   }
1999 
2000   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
2001   // push result
2002   append_split(result);
2003 
2004   if (result_type != voidType) {
2005     if (method()->is_strict()) {
2006       push(result_type, round_fp(result));
2007     } else {
2008       push(result_type, result);
2009     }
2010   }



2011 }
2012 
2013 
2014 void GraphBuilder::new_instance(int klass_index) {
2015   ValueStack* state_before = copy_state_exhandling();
2016   bool will_link;
2017   ciKlass* klass = stream()->get_klass(will_link);
2018   assert(klass->is_instance_klass(), "must be an instance klass");
2019   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
2020   _memory->new_instance(new_instance);
2021   apush(append_split(new_instance));
2022 }
2023 
2024 
2025 void GraphBuilder::new_type_array() {
2026   ValueStack* state_before = copy_state_exhandling();
2027   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2028 }
2029 
2030 


3539     if (callee != method()) {
3540       // Note that we'd collect profile data in this method if we wanted it.
3541       compilation()->set_would_profile(true);
3542       if (profile_calls()) {
3543         Value recv = NULL;
3544         if (has_receiver) {
3545           recv = args->at(0);
3546           null_check(recv);
3547         }
3548         profile_call(callee, recv, NULL, collect_args_for_profiling(args, true), true);
3549       }
3550     }
3551   }
3552 
3553   Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
3554                                     preserves_state, cantrap);
3555   // append instruction & push result
3556   Value value = append_split(result);
3557   if (result_type != voidType) push(result_type, value);
3558 




3559   // done
3560   return true;
3561 }
3562 
3563 
3564 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3565   // Introduce a new callee continuation point - all Ret instructions
3566   // will be replaced with Gotos to this point.
3567   BlockBegin* cont = block_at(next_bci());
3568   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3569 
3570   // Note: can not assign state to continuation yet, as we have to
3571   // pick up the state from the Ret instructions.
3572 
3573   // Push callee scope
3574   push_scope_for_jsr(cont, jsr_dest_bci);
3575 
3576   // Temporarily set up bytecode stream so we can append instructions
3577   // (only using the bci of this stream)
3578   scope_data()->set_stream(scope_data()->parent()->stream());


4293     Instruction* offset = args->at(2);
4294 #ifndef _LP64
4295     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4296 #endif
4297     Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4298     compilation()->set_has_unsafe_access(true);
4299     kill_all();
4300     push(op->type(), op);
4301   }
4302   return InlineUnsafeOps;
4303 }
4304 
4305 #ifndef PRODUCT
4306 void GraphBuilder::print_stats() {
4307   vmap()->print();
4308 }
4309 #endif // PRODUCT
4310 
4311 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4312   append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));















4313 }
4314 
4315 void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {
4316   append(new ProfileInvoke(callee, state));
4317 }


1449       // Report exit from inline methods
1450       Values* args = new Values(1);
1451       args->push(append(new Constant(new MethodConstant(method()))));
1452       append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));
1453     }
1454 
1455     // If the inlined method is synchronized, the monitor must be
1456     // released before we jump to the continuation block.
1457     if (method()->is_synchronized()) {
1458       assert(state()->locks_size() == 1, "receiver must be locked here");
1459       monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
1460     }
1461 
1462     if (need_mem_bar) {
1463       append(new MemBar(lir_membar_storestore));
1464     }
1465 
1466     // State at end of inlined method is the state of the caller
1467     // without the method parameters on stack, including the
1468     // return value, if any, of the inlined method on operand stack.
1469     int invoke_bci = state()->caller_state()->bci();
1470     set_state(state()->caller_state()->copy_for_parsing());
1471     if (x != NULL) {
1472       state()->push(x->type(), x);
1473       if (profile_calls() && MethodData::profile_return() && x->type()->is_object_kind()) {
1474         ciMethod* caller = state()->scope()->method();
1475         ciMethodData* md = caller->method_data_or_null();
1476         ciProfileData* data = md->bci_to_data(invoke_bci);
1477         if (data->is_CallTypeData() || data->is_VirtualCallTypeData()) {
1478           bool has_return = data->is_CallTypeData() ? ((ciCallTypeData*)data)->has_return() : ((ciVirtualCallTypeData*)data)->has_return();
1479           // May not be true in case of an inlined call through a method handle intrinsic.
1480           if (has_return) {
1481             profile_return_type(x, method(), caller, invoke_bci);
1482           }
1483         }
1484       }
1485     }
1486     Goto* goto_callee = new Goto(continuation(), false);
1487 
1488     // See whether this is the first return; if so, store off some
1489     // of the state for later examination
1490     if (num_returns() == 0) {
1491       set_inline_cleanup_info();
1492     }
1493 
1494     // The current bci() is in the wrong scope, so use the bci() of
1495     // the continuation point.
1496     append_with_bci(goto_callee, scope_data()->continuation()->bci());
1497     incr_num_returns();
1498     return;
1499   }
1500 
1501   state()->truncate_stack(0);
1502   if (method()->is_synchronized()) {
1503     // perform the unlocking before exiting the method
1504     Value receiver;


2004       if (cha_monomorphic_target != NULL) {
2005         target_klass = cha_monomorphic_target->holder();
2006       } else if (exact_target != NULL) {
2007         target_klass = exact_target->holder();
2008       }
2009       profile_call(target, recv, target_klass, collect_args_for_profiling(args, false), false);
2010     }
2011   }
2012 
2013   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
2014   // push result
2015   append_split(result);
2016 
2017   if (result_type != voidType) {
2018     if (method()->is_strict()) {
2019       push(result_type, round_fp(result));
2020     } else {
2021       push(result_type, result);
2022     }
2023   }
2024   if (profile_calls() && MethodData::profile_return() && result_type->is_object_kind()) {
2025     profile_return_type(result, target);
2026   }
2027 }
2028 
2029 
2030 void GraphBuilder::new_instance(int klass_index) {
2031   ValueStack* state_before = copy_state_exhandling();
2032   bool will_link;
2033   ciKlass* klass = stream()->get_klass(will_link);
2034   assert(klass->is_instance_klass(), "must be an instance klass");
2035   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
2036   _memory->new_instance(new_instance);
2037   apush(append_split(new_instance));
2038 }
2039 
2040 
2041 void GraphBuilder::new_type_array() {
2042   ValueStack* state_before = copy_state_exhandling();
2043   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2044 }
2045 
2046 


3555     if (callee != method()) {
3556       // Note that we'd collect profile data in this method if we wanted it.
3557       compilation()->set_would_profile(true);
3558       if (profile_calls()) {
3559         Value recv = NULL;
3560         if (has_receiver) {
3561           recv = args->at(0);
3562           null_check(recv);
3563         }
3564         profile_call(callee, recv, NULL, collect_args_for_profiling(args, true), true);
3565       }
3566     }
3567   }
3568 
3569   Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
3570                                     preserves_state, cantrap);
3571   // append instruction & push result
3572   Value value = append_split(result);
3573   if (result_type != voidType) push(result_type, value);
3574 
3575   if (callee != method() && profile_calls() && MethodData::profile_return() && result_type->is_object_kind()) {
3576     profile_return_type(result, callee);
3577   }
3578 
3579   // done
3580   return true;
3581 }
3582 
3583 
3584 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
3585   // Introduce a new callee continuation point - all Ret instructions
3586   // will be replaced with Gotos to this point.
3587   BlockBegin* cont = block_at(next_bci());
3588   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
3589 
3590   // Note: can not assign state to continuation yet, as we have to
3591   // pick up the state from the Ret instructions.
3592 
3593   // Push callee scope
3594   push_scope_for_jsr(cont, jsr_dest_bci);
3595 
3596   // Temporarily set up bytecode stream so we can append instructions
3597   // (only using the bci of this stream)
3598   scope_data()->set_stream(scope_data()->parent()->stream());


4313     Instruction* offset = args->at(2);
4314 #ifndef _LP64
4315     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
4316 #endif
4317     Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
4318     compilation()->set_has_unsafe_access(true);
4319     kill_all();
4320     push(op->type(), op);
4321   }
4322   return InlineUnsafeOps;
4323 }
4324 
4325 #ifndef PRODUCT
4326 void GraphBuilder::print_stats() {
4327   vmap()->print();
4328 }
4329 #endif // PRODUCT
4330 
4331 void GraphBuilder::profile_call(ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) {
4332   append(new ProfileCall(method(), bci(), callee, recv, known_holder, obj_args, inlined));
4333 }
4334 
4335 void GraphBuilder::profile_return_type(Value ret, ciMethod* callee, ciMethod* m, int invoke_bci) {
4336   assert((m == NULL) == (invoke_bci < 0), "invalid method and invalid bci together");
4337   if (m == NULL) {
4338     m = method();
4339   }
4340   if (invoke_bci < 0) {
4341     invoke_bci = bci();
4342   }
4343   ciMethodData* md = m->method_data_or_null();
4344   ciProfileData* data = md->bci_to_data(invoke_bci);
4345   if (data->is_CallTypeData() || data->is_VirtualCallTypeData()) {
4346     append(new ProfileReturnType(m , invoke_bci, callee, ret));
4347   }
4348 }
4349 
4350 void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {
4351   append(new ProfileInvoke(callee, state));
4352 }
src/share/vm/c1/c1_GraphBuilder.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File