# HG changeset patch # User shade # Date 1548866969 -3600 # Wed Jan 30 17:49:29 2019 +0100 # Node ID d3b5481f474af7f9eeba5f5b6ac84b7ab9cb3ab6 # Parent c88533ce800a6fec3e60f47f75c0c1a89b1a0b65 8218031: Zero broken after JDK-8217922 (Compiler dead code removal) Reviewed-by: thartmann, sgehwolf, shade Contributed-by: Ao Qi diff --git a/src/hotspot/share/interpreter/invocationCounter.cpp b/src/hotspot/share/interpreter/invocationCounter.cpp --- a/src/hotspot/share/interpreter/invocationCounter.cpp +++ b/src/hotspot/share/interpreter/invocationCounter.cpp @@ -79,6 +79,10 @@ int InvocationCounter::_init [InvocationCounter::number_of_states]; InvocationCounter::Action InvocationCounter::_action[InvocationCounter::number_of_states]; +#ifdef CC_INTERP +int InvocationCounter::InterpreterInvocationLimit; +int InvocationCounter::InterpreterBackwardBranchLimit; +#endif const char* InvocationCounter::state_as_string(State state) { switch (state) { @@ -132,6 +136,22 @@ guarantee((int)number_of_states <= (int)state_limit, "adjust number_of_state_bits"); def(wait_for_nothing, 0, do_nothing); def(wait_for_compile, 0, do_decay); + +#ifdef CC_INTERP + InterpreterInvocationLimit = CompileThreshold << number_of_noncount_bits; + + // When methodData is collected, the backward branch limit is compared against a + // methodData counter, rather than an InvocationCounter. In the former case, we + // don't need the shift by number_of_noncount_bits, but we do need to adjust + // the factor by which we scale the threshold. + if (ProfileInterpreter) { + InterpreterBackwardBranchLimit = (int)((int64_t)CompileThreshold * (OnStackReplacePercentage - InterpreterProfilePercentage) / 100); + } else { + InterpreterBackwardBranchLimit = (int)(((int64_t)CompileThreshold * OnStackReplacePercentage / 100) << number_of_noncount_bits); + } + + assert(0 <= InterpreterBackwardBranchLimit, "OSR threshold should be non-negative"); +#endif } void invocationCounter_init() { diff --git a/src/hotspot/share/interpreter/invocationCounter.hpp b/src/hotspot/share/interpreter/invocationCounter.hpp --- a/src/hotspot/share/interpreter/invocationCounter.hpp +++ b/src/hotspot/share/interpreter/invocationCounter.hpp @@ -92,6 +92,9 @@ int count() const { return _counter >> number_of_noncount_bits; } #ifdef CC_INTERP + static int InterpreterInvocationLimit; // CompileThreshold scaled for interpreter use + static int InterpreterBackwardBranchLimit; // A separate threshold for on stack replacement + // Test counter using scaled limits like the asm interpreter would do rather than doing // the shifts to normalize the counter. // Checks sum of invocation_counter and backedge_counter as the template interpreter does. @@ -103,11 +106,6 @@ return (_counter & count_mask) + (back_edge_count->_counter & count_mask) >= (unsigned int) InterpreterBackwardBranchLimit; } - // Do this just like asm interpreter does for max speed. - bool reached_ProfileLimit(InvocationCounter *back_edge_count) const { - return (_counter & count_mask) + (back_edge_count->_counter & count_mask) >= - (unsigned int) InterpreterProfileLimit; - } #endif // CC_INTERP void increment() { _counter += count_increment; }