80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 // note: can have vtables with >2**16 elements (because of inheritance)
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 // Flags
85 enum Flags {
86 _caller_sensitive = 1 << 0,
87 _force_inline = 1 << 1,
88 _dont_inline = 1 << 2,
89 _hidden = 1 << 3,
90 _has_injected_profile = 1 << 4,
91 _running_emcp = 1 << 5,
92 _intrinsic_candidate = 1 << 6,
93 _reserved_stack_access = 1 << 7
94 };
95 mutable u2 _flags;
96
97 JFR_ONLY(DEFINE_TRACE_FLAG;)
98
99 #ifndef PRODUCT
100 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
101 #endif
102 // Entry point for calling both from and to the interpreter.
103 address _i2i_entry; // All-args-on-stack calling convention
104 // Entry point for calling from compiled code, to compiled code if it exists
105 // or else the interpreter.
106 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
107 // The entry point for calling both from and to compiled code is
108 // "_code->entry_point()". Because of tiered compilation and de-opt, this
109 // field can come and go. It can transition from NULL to not-null at any
110 // time (whenever a compile completes). It can transition from not-null to
111 // NULL only at safepoints (because of a de-opt).
112 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
113 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
114
115 #if INCLUDE_AOT && defined(TIERED)
116 CompiledMethod* _aot_code;
117 #endif
118
119 // Constructor
120 Method(ConstMethod* xconst, AccessFlags access_flags);
430
431 static MethodCounters* build_method_counters(Method* m, TRAPS);
432
433 int interpreter_invocation_count() {
434 if (TieredCompilation) {
435 return invocation_count();
436 } else {
437 MethodCounters* mcs = method_counters();
438 return (mcs == NULL) ? 0 : mcs->interpreter_invocation_count();
439 }
440 }
441 #if COMPILER2_OR_JVMCI
442 int increment_interpreter_invocation_count(TRAPS) {
443 if (TieredCompilation) ShouldNotReachHere();
444 MethodCounters* mcs = get_method_counters(CHECK_0);
445 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
446 }
447 #endif
448
449 #ifndef PRODUCT
450 int compiled_invocation_count() const { return _compiled_invocation_count; }
451 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
452 #else
453 // for PrintMethodData in a product build
454 int compiled_invocation_count() const { return 0; }
455 #endif // not PRODUCT
456
457 // Clear (non-shared space) pointers which could not be relevant
458 // if this (shared) method were mapped into another JVM.
459 void remove_unshareable_info();
460
461 // nmethod/verified compiler entry
462 address verified_code_entry();
463 bool check_code() const; // Not inline to avoid circular ref
464 CompiledMethod* volatile code() const;
465 void clear_code(bool acquire_lock = true); // Clear out any compiled code
466 static void set_code(const methodHandle& mh, CompiledMethod* code);
467 void set_adapter_entry(AdapterHandlerEntry* adapter) {
468 constMethod()->set_adapter_entry(adapter);
469 }
470 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
471 constMethod()->update_adapter_trampoline(adapter);
472 }
473
474 address get_i2c_entry();
|
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 // note: can have vtables with >2**16 elements (because of inheritance)
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 // Flags
85 enum Flags {
86 _caller_sensitive = 1 << 0,
87 _force_inline = 1 << 1,
88 _dont_inline = 1 << 2,
89 _hidden = 1 << 3,
90 _has_injected_profile = 1 << 4,
91 _running_emcp = 1 << 5,
92 _intrinsic_candidate = 1 << 6,
93 _reserved_stack_access = 1 << 7
94 };
95 mutable u2 _flags;
96
97 JFR_ONLY(DEFINE_TRACE_FLAG;)
98
99 #ifndef PRODUCT
100 int64_t _compiled_invocation_count;
101 #endif
102 // Entry point for calling both from and to the interpreter.
103 address _i2i_entry; // All-args-on-stack calling convention
104 // Entry point for calling from compiled code, to compiled code if it exists
105 // or else the interpreter.
106 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
107 // The entry point for calling both from and to compiled code is
108 // "_code->entry_point()". Because of tiered compilation and de-opt, this
109 // field can come and go. It can transition from NULL to not-null at any
110 // time (whenever a compile completes). It can transition from not-null to
111 // NULL only at safepoints (because of a de-opt).
112 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
113 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
114
115 #if INCLUDE_AOT && defined(TIERED)
116 CompiledMethod* _aot_code;
117 #endif
118
119 // Constructor
120 Method(ConstMethod* xconst, AccessFlags access_flags);
430
431 static MethodCounters* build_method_counters(Method* m, TRAPS);
432
433 int interpreter_invocation_count() {
434 if (TieredCompilation) {
435 return invocation_count();
436 } else {
437 MethodCounters* mcs = method_counters();
438 return (mcs == NULL) ? 0 : mcs->interpreter_invocation_count();
439 }
440 }
441 #if COMPILER2_OR_JVMCI
442 int increment_interpreter_invocation_count(TRAPS) {
443 if (TieredCompilation) ShouldNotReachHere();
444 MethodCounters* mcs = get_method_counters(CHECK_0);
445 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
446 }
447 #endif
448
449 #ifndef PRODUCT
450 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
451 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
452 #else
453 // for PrintMethodData in a product build
454 int64_t compiled_invocation_count() const { return 0; }
455 #endif // not PRODUCT
456
457 // Clear (non-shared space) pointers which could not be relevant
458 // if this (shared) method were mapped into another JVM.
459 void remove_unshareable_info();
460
461 // nmethod/verified compiler entry
462 address verified_code_entry();
463 bool check_code() const; // Not inline to avoid circular ref
464 CompiledMethod* volatile code() const;
465 void clear_code(bool acquire_lock = true); // Clear out any compiled code
466 static void set_code(const methodHandle& mh, CompiledMethod* code);
467 void set_adapter_entry(AdapterHandlerEntry* adapter) {
468 constMethod()->set_adapter_entry(adapter);
469 }
470 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
471 constMethod()->update_adapter_trampoline(adapter);
472 }
473
474 address get_i2c_entry();
|