--- old/src/share/vm/code/dependencies.cpp Thu Jan 21 15:37:31 2010 +++ new/src/share/vm/code/dependencies.cpp Thu Jan 21 15:37:31 2010 @@ -843,7 +843,8 @@ if (occasional_print || final_stats) { // Every now and then dump a little info about dependency searching. if (xtty != NULL) { - xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'", + ttyLocker ttyl; + xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'", deps_find_witness_calls, deps_find_witness_steps, deps_find_witness_recursions, @@ -850,6 +851,7 @@ deps_find_witness_singles); } if (final_stats || (TraceDependencies && WizardMode)) { + ttyLocker ttyl; tty->print_cr("Dependency check (find_witness) " "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d", deps_find_witness_calls, --- old/src/share/vm/code/nmethod.cpp Thu Jan 21 15:37:32 2010 +++ new/src/share/vm/code/nmethod.cpp Thu Jan 21 15:37:32 2010 @@ -1113,7 +1113,6 @@ if (_method->code() == this) { _method->clear_code(); // Break a cycle } - inc_decompile_count(); // Last chance to make a mark on the MDO _method = NULL; // Clear the method of this dead nmethod } // Make the class unloaded - i.e., change state and notify sweeper @@ -1173,15 +1172,17 @@ bool nmethod::make_not_entrant_or_zombie(unsigned int state) { assert(state == zombie || state == not_entrant, "must be zombie or not_entrant"); - // If the method is already zombie there is nothing to do - if (is_zombie()) { - return false; - } + bool was_alive = false; // Make sure the nmethod is not flushed in case of a safepoint in code below. nmethodLocker nml(this); { + // If the method is already zombie there is nothing to do + if (is_zombie()) { + return false; + } + // invalidate osr nmethod before acquiring the patching lock since // they both acquire leaf locks and we don't want a deadlock. // This logic is equivalent to the logic below for patching the @@ -1219,6 +1220,8 @@ assert(state == not_entrant, "other cases may need to be handled differently"); } + was_alive = is_in_use(); // Read state under lock + // Change state flags.state = state; @@ -1245,8 +1248,11 @@ mark_as_seen_on_stack(); } - // It's a true state change, so mark the method as decompiled. - inc_decompile_count(); + if (was_alive) { + // It's a true state change, so mark the method as decompiled. + // Do it only for transition from alive. + inc_decompile_count(); + } // zombie only - if a JVMTI agent has enabled the CompiledMethodUnload event // and it hasn't already been reported for this nmethod then report it now. --- old/src/share/vm/oops/methodDataOop.hpp Thu Jan 21 15:37:32 2010 +++ new/src/share/vm/oops/methodDataOop.hpp Thu Jan 21 15:37:32 2010 @@ -1391,6 +1391,9 @@ } void inc_decompile_count() { _nof_decompiles += 1; + if (decompile_count() > (uint)PerMethodRecompilationCutoff) { + method()->set_not_compilable(); + } } // Support for code generation --- old/src/share/vm/oops/methodOop.cpp Thu Jan 21 15:37:33 2010 +++ new/src/share/vm/oops/methodOop.cpp Thu Jan 21 15:37:33 2010 @@ -575,12 +575,6 @@ return true; } - methodDataOop mdo = method_data(); - if (mdo != NULL - && (uint)mdo->decompile_count() > (uint)PerMethodRecompilationCutoff) { - // Since (uint)-1 is large, -1 really means 'no cutoff'. - return true; - } #ifdef COMPILER2 if (is_tier1_compile(comp_level)) { if (is_not_tier1_compilable()) { @@ -594,6 +588,15 @@ // call this when compiler finds that this method is not compilable void methodOopDesc::set_not_compilable(int comp_level) { + if (PrintCompilation) { + ttyLocker ttyl; + tty->print("made not compilable "); + this->print_short_name(tty); + int size = this->code_size(); + if (size > 0) + tty->print(" (%d bytes)", size); + tty->cr(); + } if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) { ttyLocker ttyl; xtty->begin_elem("make_not_compilable thread='%d'", (int) os::current_thread_id()); --- old/src/share/vm/opto/doCall.cpp Thu Jan 21 15:37:33 2010 +++ new/src/share/vm/opto/doCall.cpp Thu Jan 21 15:37:33 2010 @@ -182,26 +182,16 @@ } } CallGenerator* miss_cg; + Deoptimization::DeoptReason reason = (profile.morphism() == 2) ? + Deoptimization::Reason_bimorphic : + Deoptimization::Reason_class_check; if (( profile.morphism() == 1 || (profile.morphism() == 2 && next_hit_cg != NULL) ) && - - !too_many_traps(Deoptimization::Reason_class_check) - - // Check only total number of traps per method to allow - // the transition from monomorphic to bimorphic case between - // compilations without falling into virtual call. - // A monomorphic case may have the class_check trap flag is set - // due to the time gap between the uncommon trap processing - // when flags are set in MDO and the call site bytecode execution - // in Interpreter when MDO counters are updated. - // There was also class_check trap in monomorphic case due to - // the bug 6225440. - + !too_many_traps(jvms->method(), jvms->bci(), reason) ) { // Generate uncommon trap for class check failure path // in case of monomorphic or bimorphic virtual call site. - miss_cg = CallGenerator::for_uncommon_trap(call_method, - Deoptimization::Reason_class_check, + miss_cg = CallGenerator::for_uncommon_trap(call_method, reason, Deoptimization::Action_maybe_recompile); } else { // Generate virtual call for class check failure path --- old/src/share/vm/runtime/deoptimization.cpp Thu Jan 21 15:37:34 2010 +++ new/src/share/vm/runtime/deoptimization.cpp Thu Jan 21 15:37:34 2010 @@ -1332,6 +1332,7 @@ // Whether the interpreter is producing MDO data or not, we also need // to use the MDO to detect hot deoptimization points and control // aggressive optimization. + bool inc_recompile_count = false; if (ProfileTraps && update_trap_state && trap_mdo.not_null()) { assert(trap_mdo() == get_method_data(thread, trap_method, false), "sanity"); uint this_trap_count = 0; @@ -1374,24 +1375,23 @@ // Detect repeated recompilation at the same BCI, and enforce a limit. if (make_not_entrant && maybe_prior_recompile) { // More than one recompile at this point. - trap_mdo->inc_overflow_recompile_count(); - if (maybe_prior_trap - && ((uint)trap_mdo->overflow_recompile_count() - > (uint)PerBytecodeRecompilationCutoff)) { - // Give up on the method containing the bad BCI. - if (trap_method() == nm->method()) { - make_not_compilable = true; - } else { - trap_method->set_not_compilable(); - // But give grace to the enclosing nm->method(). - } - } + inc_recompile_count = maybe_prior_trap; } } else { - // For reasons which are not recorded per-bytecode, we simply - // force recompiles unconditionally. - // (Note that PerMethodRecompilationCutoff is enforced elsewhere.) - make_not_entrant = true; + if (reason == Reason_bimorphic) { + // This isn't recorded per bci because of MDO limitations + // but lets piggy back the invalidation on the + // Reason_class_check count. + uint prior_trap_count = trap_mdo->trap_count(Reason_bimorphic); + if (prior_trap_count >= (uint)PerBytecodeTrapLimit) { + make_not_entrant = true; + } + } else { + // For reasons which are not recorded per-bytecode, we simply + // force recompiles unconditionally. + // (Note that PerMethodRecompilationCutoff is enforced elsewhere.) + make_not_entrant = true; + } } // Go back to the compiler if there are too many traps in this method. @@ -1408,7 +1408,12 @@ // Here's more hysteresis: If there has been a recompile at // this trap point already, run the method in the interpreter // for a while to exercise it more thoroughly. - if (make_not_entrant && maybe_prior_recompile && maybe_prior_trap) { + // Also do this to collect more data for transition from monomorphic + // case to bimorphic. But recompile method immediately if it is already + // compiled with bimorphic code. + if (make_not_entrant && maybe_prior_trap && + ((maybe_prior_recompile && reason != Reason_bimorphic) || + (UseBimorphicInlining && reason == Reason_class_check))) { reset_counters = true; } @@ -1423,6 +1428,27 @@ // Take requested actions on the method: + // Recompile + if (make_not_entrant) { + if (!nm->make_not_entrant()) { + return; // the call did not change nmethod's state + } + } + + if (inc_recompile_count) { + trap_mdo->inc_overflow_recompile_count(); + if ((uint)trap_mdo->overflow_recompile_count() > + (uint)PerBytecodeRecompilationCutoff) { + // Give up on the method containing the bad BCI. + if (trap_method() == nm->method()) { + make_not_compilable = true; + } else { + trap_method->set_not_compilable(); + // But give grace to the enclosing nm->method(). + } + } + } + // Reset invocation counters if (reset_counters) { if (nm->is_osr_method()) @@ -1431,13 +1457,8 @@ reset_invocation_counter(trap_scope); } - // Recompile - if (make_not_entrant) { - nm->make_not_entrant(); - } - // Give up compiling - if (make_not_compilable) { + if (make_not_compilable && !nm->method()->is_not_compilable()) { assert(make_not_entrant, "consistent"); nm->method()->set_not_compilable(); } @@ -1510,9 +1531,11 @@ if (tstate1 != tstate0) pdata->set_trap_state(tstate1); } else { - if (LogCompilation && xtty != NULL) + if (LogCompilation && xtty != NULL) { + ttyLocker ttyl; // Missing MDP? Leave a small complaint in the log. xtty->elem("missing_mdp bci='%d'", trap_bci); + } } } @@ -1673,7 +1696,8 @@ "constraint", "div0_check", "age", - "predicate" + "predicate", + "bimorphic" }; const char* Deoptimization::_trap_action_name[Action_LIMIT] = { // Note: Keep this in sync. with enum DeoptAction. --- old/src/share/vm/runtime/deoptimization.hpp Thu Jan 21 15:37:34 2010 +++ new/src/share/vm/runtime/deoptimization.hpp Thu Jan 21 15:37:34 2010 @@ -47,6 +47,7 @@ Reason_div0_check, // a null_check due to division by zero Reason_age, // nmethod too old; tier threshold reached Reason_predicate, // compiler generated predicate failed + Reason_bimorphic, // failure of bimorphic inlining Reason_LIMIT, // Note: Keep this enum in sync. with _trap_reason_name. Reason_RECORDED_LIMIT = Reason_unloaded // some are not recorded per bc --- old/src/share/vm/runtime/globals.hpp Thu Jan 21 15:37:35 2010 +++ new/src/share/vm/runtime/globals.hpp Thu Jan 21 15:37:35 2010 @@ -2861,7 +2861,7 @@ product(intx, PerMethodRecompilationCutoff, 400, \ "After recompiling N times, stay in the interpreter (-1=>'Inf')") \ \ - product(intx, PerBytecodeRecompilationCutoff, 100, \ + product(intx, PerBytecodeRecompilationCutoff, 200, \ "Per-BCI limit on repeated recompilation (-1=>'Inf')") \ \ product(intx, PerMethodTrapLimit, 100, \