73 private:
74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 // note: can have vtables with >2**16 elements (because of inheritance)
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 // Flags
85 enum Flags {
86 _caller_sensitive = 1 << 0,
87 _force_inline = 1 << 1,
88 _dont_inline = 1 << 2,
89 _hidden = 1 << 3,
90 _has_injected_profile = 1 << 4,
91 _running_emcp = 1 << 5,
92 _intrinsic_candidate = 1 << 6,
93 _reserved_stack_access = 1 << 7
94 };
95 mutable u2 _flags;
96
97 JFR_ONLY(DEFINE_TRACE_FLAG;)
98
99 #ifndef PRODUCT
100 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
101 #endif
102 // Entry point for calling both from and to the interpreter.
103 address _i2i_entry; // All-args-on-stack calling convention
104 // Entry point for calling from compiled code, to compiled code if it exists
105 // or else the interpreter.
106 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
107 // The entry point for calling both from and to compiled code is
108 // "_code->entry_point()". Because of tiered compilation and de-opt, this
109 // field can come and go. It can transition from NULL to not-null at any
110 // time (whenever a compile completes). It can transition from not-null to
111 // NULL only at safepoints (because of a de-opt).
112 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
113 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
678 bool has_aot_code() const { return aot_code() != NULL; }
679 #endif
680
681 // sizing
682 static int header_size() {
683 return align_up((int)sizeof(Method), wordSize) / wordSize;
684 }
685 static int size(bool is_native);
686 int size() const { return method_size(); }
687 #if INCLUDE_SERVICES
688 void collect_statistics(KlassSizeStats *sz) const;
689 #endif
690 void log_touched(TRAPS);
691 static void print_touched_methods(outputStream* out);
692
693 // interpreter support
694 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
695 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
696 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
697 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
698 static ByteSize method_data_offset() {
699 return byte_offset_of(Method, _method_data);
700 }
701 static ByteSize method_counters_offset() {
702 return byte_offset_of(Method, _method_counters);
703 }
704 #ifndef PRODUCT
705 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
706 #endif // not PRODUCT
707 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
708 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
709 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
710 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
711 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
712
713 // for code generation
714 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
715 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
716 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
717
878 return (_flags & _intrinsic_candidate) != 0;
879 }
880 void set_intrinsic_candidate(bool x) {
881 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
882 }
883
884 bool has_injected_profile() {
885 return (_flags & _has_injected_profile) != 0;
886 }
887 void set_has_injected_profile(bool x) {
888 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
889 }
890
891 bool has_reserved_stack_access() {
892 return (_flags & _reserved_stack_access) != 0;
893 }
894
895 void set_has_reserved_stack_access(bool x) {
896 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
897 }
898
899 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
900
901 ConstMethod::MethodType method_type() const {
902 return _constMethod->method_type();
903 }
904 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
905
906 // On-stack replacement support
907 bool has_osr_nmethod(int level, bool match_level) {
908 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
909 }
910
911 int mark_osr_nmethods() {
912 return method_holder()->mark_osr_nmethods(this);
913 }
914
915 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
916 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
917 }
|
73 private:
74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 // note: can have vtables with >2**16 elements (because of inheritance)
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 // Flags
85 enum Flags {
86 _caller_sensitive = 1 << 0,
87 _force_inline = 1 << 1,
88 _dont_inline = 1 << 2,
89 _hidden = 1 << 3,
90 _has_injected_profile = 1 << 4,
91 _running_emcp = 1 << 5,
92 _intrinsic_candidate = 1 << 6,
93 _reserved_stack_access = 1 << 7,
94 _known_not_returning_vt= 1 << 8, // <- See byte_value_for_known_not_returning_vt()
95 _known_returning_vt = 1 << 9, // <- for these 2 bits.
96 _unused_bits_mask = 0xfc00
97 };
98 mutable u2 _flags;
99
100 JFR_ONLY(DEFINE_TRACE_FLAG;)
101
102 #ifndef PRODUCT
103 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
104 #endif
105 // Entry point for calling both from and to the interpreter.
106 address _i2i_entry; // All-args-on-stack calling convention
107 // Entry point for calling from compiled code, to compiled code if it exists
108 // or else the interpreter.
109 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
110 // The entry point for calling both from and to compiled code is
111 // "_code->entry_point()". Because of tiered compilation and de-opt, this
112 // field can come and go. It can transition from NULL to not-null at any
113 // time (whenever a compile completes). It can transition from not-null to
114 // NULL only at safepoints (because of a de-opt).
115 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
116 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
681 bool has_aot_code() const { return aot_code() != NULL; }
682 #endif
683
684 // sizing
685 static int header_size() {
686 return align_up((int)sizeof(Method), wordSize) / wordSize;
687 }
688 static int size(bool is_native);
689 int size() const { return method_size(); }
690 #if INCLUDE_SERVICES
691 void collect_statistics(KlassSizeStats *sz) const;
692 #endif
693 void log_touched(TRAPS);
694 static void print_touched_methods(outputStream* out);
695
696 // interpreter support
697 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
698 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
699 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
700 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
701 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
702 static ByteSize method_data_offset() {
703 return byte_offset_of(Method, _method_data);
704 }
705 static ByteSize method_counters_offset() {
706 return byte_offset_of(Method, _method_counters);
707 }
708 #ifndef PRODUCT
709 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
710 #endif // not PRODUCT
711 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
712 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
713 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
714 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
715 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
716
717 // for code generation
718 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
719 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
720 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
721
882 return (_flags & _intrinsic_candidate) != 0;
883 }
884 void set_intrinsic_candidate(bool x) {
885 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
886 }
887
888 bool has_injected_profile() {
889 return (_flags & _has_injected_profile) != 0;
890 }
891 void set_has_injected_profile(bool x) {
892 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
893 }
894
895 bool has_reserved_stack_access() {
896 return (_flags & _reserved_stack_access) != 0;
897 }
898
899 void set_has_reserved_stack_access(bool x) {
900 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
901 }
902
903 static int byte_value_for_known_not_returning_vt() {
904 // The higher byte of Method::_flags has only the
905 // _known_not_returning_vt and _known_returning_vt bits, and all other bits
906 // are zero, so we can test for is_known_not_returning_vt() in the interpreter
907 // by essentially comparing (_flags >> 8) == Method::byte_value_for_known_not_returning_vt()
908 assert(_unused_bits_mask == 0xfc00, "must be");
909 return (_known_not_returning_vt >> 8);
910 }
911
912 bool is_known_not_returning_vt() {
913 return (_flags & _known_not_returning_vt) != 0;
914 }
915
916 void set_known_not_returning_vt() {
917 _flags |= _known_not_returning_vt;
918 }
919
920 bool is_known_returning_vt() {
921 return (_flags & _known_returning_vt) != 0;
922 }
923
924 void set_known_returning_vt() {
925 _flags |= _known_returning_vt;
926 }
927
928 void check_returning_vt(TRAPS);
929
930 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
931
932 ConstMethod::MethodType method_type() const {
933 return _constMethod->method_type();
934 }
935 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
936
937 // On-stack replacement support
938 bool has_osr_nmethod(int level, bool match_level) {
939 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
940 }
941
942 int mark_osr_nmethods() {
943 return method_holder()->mark_osr_nmethods(this);
944 }
945
946 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
947 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
948 }
|