51 // The counter is reset by the sweeper and is decremented by some of the compiled
52 // code. The counter values are interpreted as follows:
53 // 1. (HotMethodDetection..INT_MAX] - initial value, no counters inserted
54 // 2. [1..HotMethodDetectionLimit) - the method is warm, the counter is used
55 // to figure out which methods can be flushed.
56 // 3. (INT_MIN..0] - method is hot and will deopt and get
57 // recompiled without the counters
58 int _nmethod_age;
59 int _interpreter_invocation_limit; // per-method InterpreterInvocationLimit
60 int _interpreter_backward_branch_limit; // per-method InterpreterBackwardBranchLimit
61 int _interpreter_profile_limit; // per-method InterpreterProfileLimit
62 int _invoke_mask; // per-method Tier0InvokeNotifyFreqLog
63 int _backedge_mask; // per-method Tier0BackedgeNotifyFreqLog
64 #ifdef TIERED
65 float _rate; // Events (invocation and backedge counter increments) per millisecond
66 jlong _prev_time; // Previous time the rate was acquired
67 u1 _highest_comp_level; // Highest compile level this method has ever seen.
68 u1 _highest_osr_comp_level; // Same for OSR level
69 #endif
70
71 MethodCounters(methodHandle mh) :
72 #if INCLUDE_AOT
73 _method(mh()),
74 #endif
75 _nmethod_age(INT_MAX)
76 #ifdef TIERED
77 , _rate(0),
78 _prev_time(0),
79 _highest_comp_level(0),
80 _highest_osr_comp_level(0)
81 #endif
82 {
83 set_interpreter_invocation_count(0);
84 set_interpreter_throwout_count(0);
85 JVMTI_ONLY(clear_number_of_breakpoints());
86 invocation_counter()->init();
87 backedge_counter()->init();
88
89 if (StressCodeAging) {
90 set_nmethod_age(HotMethodDetectionLimit);
91 }
95 CompilerOracle::has_option_value(mh, "CompileThresholdScaling", scale);
96
97 int compile_threshold = Arguments::scaled_compile_threshold(CompileThreshold, scale);
98 _interpreter_invocation_limit = compile_threshold << InvocationCounter::count_shift;
99 if (ProfileInterpreter) {
100 // If interpreter profiling is enabled, the backward branch limit
101 // is compared against the method data counter rather than an invocation
102 // counter, therefore no shifting of bits is required.
103 _interpreter_backward_branch_limit = (compile_threshold * (OnStackReplacePercentage - InterpreterProfilePercentage)) / 100;
104 } else {
105 _interpreter_backward_branch_limit = ((compile_threshold * OnStackReplacePercentage) / 100) << InvocationCounter::count_shift;
106 }
107 _interpreter_profile_limit = ((compile_threshold * InterpreterProfilePercentage) / 100) << InvocationCounter::count_shift;
108 _invoke_mask = right_n_bits(Arguments::scaled_freq_log(Tier0InvokeNotifyFreqLog, scale)) << InvocationCounter::count_shift;
109 _backedge_mask = right_n_bits(Arguments::scaled_freq_log(Tier0BackedgeNotifyFreqLog, scale)) << InvocationCounter::count_shift;
110 }
111
112 public:
113 virtual bool is_methodCounters() const volatile { return true; }
114
115 static MethodCounters* allocate(methodHandle mh, TRAPS);
116
117 void deallocate_contents(ClassLoaderData* loader_data) {}
118
119 AOT_ONLY(Method* method() const { return _method; })
120
121 static int size() {
122 return align_up((int)sizeof(MethodCounters), wordSize) / wordSize;
123 }
124
125 void clear_counters();
126
127 #if defined(COMPILER2) || INCLUDE_JVMCI
128
129 int interpreter_invocation_count() {
130 return _interpreter_invocation_count;
131 }
132 void set_interpreter_invocation_count(int count) {
133 _interpreter_invocation_count = count;
134 }
135 int increment_interpreter_invocation_count() {
|
51 // The counter is reset by the sweeper and is decremented by some of the compiled
52 // code. The counter values are interpreted as follows:
53 // 1. (HotMethodDetection..INT_MAX] - initial value, no counters inserted
54 // 2. [1..HotMethodDetectionLimit) - the method is warm, the counter is used
55 // to figure out which methods can be flushed.
56 // 3. (INT_MIN..0] - method is hot and will deopt and get
57 // recompiled without the counters
58 int _nmethod_age;
59 int _interpreter_invocation_limit; // per-method InterpreterInvocationLimit
60 int _interpreter_backward_branch_limit; // per-method InterpreterBackwardBranchLimit
61 int _interpreter_profile_limit; // per-method InterpreterProfileLimit
62 int _invoke_mask; // per-method Tier0InvokeNotifyFreqLog
63 int _backedge_mask; // per-method Tier0BackedgeNotifyFreqLog
64 #ifdef TIERED
65 float _rate; // Events (invocation and backedge counter increments) per millisecond
66 jlong _prev_time; // Previous time the rate was acquired
67 u1 _highest_comp_level; // Highest compile level this method has ever seen.
68 u1 _highest_osr_comp_level; // Same for OSR level
69 #endif
70
71 MethodCounters(const methodHandle& mh) :
72 #if INCLUDE_AOT
73 _method(mh()),
74 #endif
75 _nmethod_age(INT_MAX)
76 #ifdef TIERED
77 , _rate(0),
78 _prev_time(0),
79 _highest_comp_level(0),
80 _highest_osr_comp_level(0)
81 #endif
82 {
83 set_interpreter_invocation_count(0);
84 set_interpreter_throwout_count(0);
85 JVMTI_ONLY(clear_number_of_breakpoints());
86 invocation_counter()->init();
87 backedge_counter()->init();
88
89 if (StressCodeAging) {
90 set_nmethod_age(HotMethodDetectionLimit);
91 }
95 CompilerOracle::has_option_value(mh, "CompileThresholdScaling", scale);
96
97 int compile_threshold = Arguments::scaled_compile_threshold(CompileThreshold, scale);
98 _interpreter_invocation_limit = compile_threshold << InvocationCounter::count_shift;
99 if (ProfileInterpreter) {
100 // If interpreter profiling is enabled, the backward branch limit
101 // is compared against the method data counter rather than an invocation
102 // counter, therefore no shifting of bits is required.
103 _interpreter_backward_branch_limit = (compile_threshold * (OnStackReplacePercentage - InterpreterProfilePercentage)) / 100;
104 } else {
105 _interpreter_backward_branch_limit = ((compile_threshold * OnStackReplacePercentage) / 100) << InvocationCounter::count_shift;
106 }
107 _interpreter_profile_limit = ((compile_threshold * InterpreterProfilePercentage) / 100) << InvocationCounter::count_shift;
108 _invoke_mask = right_n_bits(Arguments::scaled_freq_log(Tier0InvokeNotifyFreqLog, scale)) << InvocationCounter::count_shift;
109 _backedge_mask = right_n_bits(Arguments::scaled_freq_log(Tier0BackedgeNotifyFreqLog, scale)) << InvocationCounter::count_shift;
110 }
111
112 public:
113 virtual bool is_methodCounters() const volatile { return true; }
114
115 static MethodCounters* allocate(const methodHandle& mh, TRAPS);
116
117 void deallocate_contents(ClassLoaderData* loader_data) {}
118
119 AOT_ONLY(Method* method() const { return _method; })
120
121 static int size() {
122 return align_up((int)sizeof(MethodCounters), wordSize) / wordSize;
123 }
124
125 void clear_counters();
126
127 #if defined(COMPILER2) || INCLUDE_JVMCI
128
129 int interpreter_invocation_count() {
130 return _interpreter_invocation_count;
131 }
132 void set_interpreter_invocation_count(int count) {
133 _interpreter_invocation_count = count;
134 }
135 int increment_interpreter_invocation_count() {
|