< prev index next >

src/share/vm/c1/c1_LIRGenerator.cpp

Print this page
rev 9088 : 8139040: Fix initializations before ShouldNotReachHere() etc. and enable -Wuninitialized on linux.


3331     md->set_return_type(bci, exact);
3332   }
3333 }
3334 
3335 void LIRGenerator::do_ProfileInvoke(ProfileInvoke* x) {
3336   // We can safely ignore accessors here, since c2 will inline them anyway,
3337   // accessors are also always mature.
3338   if (!x->inlinee()->is_accessor()) {
3339     CodeEmitInfo* info = state_for(x, x->state(), true);
3340     // Notify the runtime very infrequently only to take care of counter overflows
3341     int freq_log = Tier23InlineeNotifyFreqLog;
3342     double scale;
3343     if (_method->has_option_value("CompileThresholdScaling", scale)) {
3344       freq_log = Arguments::scaled_freq_log(freq_log, scale);
3345     }
3346     increment_event_counter_impl(info, x->inlinee(), right_n_bits(freq_log), InvocationEntryBci, false, true);
3347   }
3348 }
3349 
3350 void LIRGenerator::increment_event_counter(CodeEmitInfo* info, int bci, bool backedge) {
3351   int freq_log;
3352   int level = compilation()->env()->comp_level();
3353   if (level == CompLevel_limited_profile) {
3354     freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3355   } else if (level == CompLevel_full_profile) {
3356     freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3357   } else {
3358     ShouldNotReachHere();
3359   }
3360   // Increment the appropriate invocation/backedge counter and notify the runtime.
3361   double scale;
3362   if (_method->has_option_value("CompileThresholdScaling", scale)) {
3363     freq_log = Arguments::scaled_freq_log(freq_log, scale);
3364   }
3365   increment_event_counter_impl(info, info->scope()->method(), right_n_bits(freq_log), bci, backedge, true);
3366 }
3367 
3368 void LIRGenerator::decrement_age(CodeEmitInfo* info) {
3369   ciMethod* method = info->scope()->method();
3370   MethodCounters* mc_adr = method->ensure_method_counters();
3371   if (mc_adr != NULL) {


3377     __ load(counter, result);
3378     __ sub(result, LIR_OprFact::intConst(1), result);
3379     __ store(result, counter);
3380     // DeoptimizeStub will reexecute from the current state in code info.
3381     CodeStub* deopt = new DeoptimizeStub(info, Deoptimization::Reason_tenured,
3382                                          Deoptimization::Action_make_not_entrant);
3383     __ cmp(lir_cond_lessEqual, result, LIR_OprFact::intConst(0));
3384     __ branch(lir_cond_lessEqual, T_INT, deopt);
3385   }
3386 }
3387 
3388 
3389 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3390                                                 ciMethod *method, int frequency,
3391                                                 int bci, bool backedge, bool notify) {
3392   assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3393   int level = _compilation->env()->comp_level();
3394   assert(level > CompLevel_simple, "Shouldn't be here");
3395 
3396   int offset = -1;
3397   LIR_Opr counter_holder;
3398   if (level == CompLevel_limited_profile) {
3399     MethodCounters* counters_adr = method->ensure_method_counters();
3400     if (counters_adr == NULL) {
3401       bailout("method counters allocation failed");
3402       return;
3403     }
3404     counter_holder = new_pointer_register();
3405     __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3406     offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3407                                  MethodCounters::invocation_counter_offset());
3408   } else if (level == CompLevel_full_profile) {
3409     counter_holder = new_register(T_METADATA);
3410     offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3411                                  MethodData::invocation_counter_offset());
3412     ciMethodData* md = method->method_data_or_null();
3413     assert(md != NULL, "Sanity");
3414     __ metadata2reg(md->constant_encoding(), counter_holder);
3415   } else {
3416     ShouldNotReachHere();
3417   }




3331     md->set_return_type(bci, exact);
3332   }
3333 }
3334 
3335 void LIRGenerator::do_ProfileInvoke(ProfileInvoke* x) {
3336   // We can safely ignore accessors here, since c2 will inline them anyway,
3337   // accessors are also always mature.
3338   if (!x->inlinee()->is_accessor()) {
3339     CodeEmitInfo* info = state_for(x, x->state(), true);
3340     // Notify the runtime very infrequently only to take care of counter overflows
3341     int freq_log = Tier23InlineeNotifyFreqLog;
3342     double scale;
3343     if (_method->has_option_value("CompileThresholdScaling", scale)) {
3344       freq_log = Arguments::scaled_freq_log(freq_log, scale);
3345     }
3346     increment_event_counter_impl(info, x->inlinee(), right_n_bits(freq_log), InvocationEntryBci, false, true);
3347   }
3348 }
3349 
3350 void LIRGenerator::increment_event_counter(CodeEmitInfo* info, int bci, bool backedge) {
3351   int freq_log = 0;
3352   int level = compilation()->env()->comp_level();
3353   if (level == CompLevel_limited_profile) {
3354     freq_log = (backedge ? Tier2BackedgeNotifyFreqLog : Tier2InvokeNotifyFreqLog);
3355   } else if (level == CompLevel_full_profile) {
3356     freq_log = (backedge ? Tier3BackedgeNotifyFreqLog : Tier3InvokeNotifyFreqLog);
3357   } else {
3358     ShouldNotReachHere();
3359   }
3360   // Increment the appropriate invocation/backedge counter and notify the runtime.
3361   double scale;
3362   if (_method->has_option_value("CompileThresholdScaling", scale)) {
3363     freq_log = Arguments::scaled_freq_log(freq_log, scale);
3364   }
3365   increment_event_counter_impl(info, info->scope()->method(), right_n_bits(freq_log), bci, backedge, true);
3366 }
3367 
3368 void LIRGenerator::decrement_age(CodeEmitInfo* info) {
3369   ciMethod* method = info->scope()->method();
3370   MethodCounters* mc_adr = method->ensure_method_counters();
3371   if (mc_adr != NULL) {


3377     __ load(counter, result);
3378     __ sub(result, LIR_OprFact::intConst(1), result);
3379     __ store(result, counter);
3380     // DeoptimizeStub will reexecute from the current state in code info.
3381     CodeStub* deopt = new DeoptimizeStub(info, Deoptimization::Reason_tenured,
3382                                          Deoptimization::Action_make_not_entrant);
3383     __ cmp(lir_cond_lessEqual, result, LIR_OprFact::intConst(0));
3384     __ branch(lir_cond_lessEqual, T_INT, deopt);
3385   }
3386 }
3387 
3388 
3389 void LIRGenerator::increment_event_counter_impl(CodeEmitInfo* info,
3390                                                 ciMethod *method, int frequency,
3391                                                 int bci, bool backedge, bool notify) {
3392   assert(frequency == 0 || is_power_of_2(frequency + 1), "Frequency must be x^2 - 1 or 0");
3393   int level = _compilation->env()->comp_level();
3394   assert(level > CompLevel_simple, "Shouldn't be here");
3395 
3396   int offset = -1;
3397   LIR_Opr counter_holder = NULL;
3398   if (level == CompLevel_limited_profile) {
3399     MethodCounters* counters_adr = method->ensure_method_counters();
3400     if (counters_adr == NULL) {
3401       bailout("method counters allocation failed");
3402       return;
3403     }
3404     counter_holder = new_pointer_register();
3405     __ move(LIR_OprFact::intptrConst(counters_adr), counter_holder);
3406     offset = in_bytes(backedge ? MethodCounters::backedge_counter_offset() :
3407                                  MethodCounters::invocation_counter_offset());
3408   } else if (level == CompLevel_full_profile) {
3409     counter_holder = new_register(T_METADATA);
3410     offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3411                                  MethodData::invocation_counter_offset());
3412     ciMethodData* md = method->method_data_or_null();
3413     assert(md != NULL, "Sanity");
3414     __ metadata2reg(md->constant_encoding(), counter_holder);
3415   } else {
3416     ShouldNotReachHere();
3417   }


< prev index next >