74 private:
75 // If you add a new field that points to any metaspace object, you
76 // must add this field to Method::metaspace_pointers_do().
77 ConstMethod* _constMethod; // Method read-only data.
78 MethodData* _method_data;
79 MethodCounters* _method_counters;
80 AccessFlags _access_flags; // Access flags
81 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
82 // note: can have vtables with >2**16 elements (because of inheritance)
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 // Flags
86 enum Flags {
87 _caller_sensitive = 1 << 0,
88 _force_inline = 1 << 1,
89 _dont_inline = 1 << 2,
90 _hidden = 1 << 3,
91 _has_injected_profile = 1 << 4,
92 _running_emcp = 1 << 5,
93 _intrinsic_candidate = 1 << 6,
94 _reserved_stack_access = 1 << 7
95 };
96 mutable u2 _flags;
97
98 JFR_ONLY(DEFINE_TRACE_FLAG;)
99
100 #ifndef PRODUCT
101 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
102 #endif
103 // Entry point for calling both from and to the interpreter.
104 address _i2i_entry; // All-args-on-stack calling convention
105 // Entry point for calling from compiled code, to compiled code if it exists
106 // or else the interpreter.
107 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
108 // The entry point for calling both from and to compiled code is
109 // "_code->entry_point()". Because of tiered compilation and de-opt, this
110 // field can come and go. It can transition from NULL to not-null at any
111 // time (whenever a compile completes). It can transition from not-null to
112 // NULL only at safepoints (because of a de-opt).
113 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
114 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
115
116 #if INCLUDE_AOT && defined(TIERED)
117 CompiledMethod* _aot_code;
118 #endif
119
120 // Constructor
121 Method(ConstMethod* xconst, AccessFlags access_flags);
122 public:
123
124 static Method* allocate(ClassLoaderData* loader_data,
125 int byte_code_size,
126 AccessFlags access_flags,
127 InlineTableSizes* sizes,
128 ConstMethod::MethodType method_type,
129 TRAPS);
130
131 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
132 Method(){}
133
134 bool is_method() const volatile { return true; }
432 if (TieredCompilation) ShouldNotReachHere();
433 MethodCounters* mcs = get_method_counters(CHECK_0);
434 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
435 }
436 #endif
437
438 #ifndef PRODUCT
439 int compiled_invocation_count() const { return _compiled_invocation_count; }
440 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
441 #else
442 // for PrintMethodData in a product build
443 int compiled_invocation_count() const { return 0; }
444 #endif // not PRODUCT
445
446 // Clear (non-shared space) pointers which could not be relevant
447 // if this (shared) method were mapped into another JVM.
448 void remove_unshareable_info();
449
450 // nmethod/verified compiler entry
451 address verified_code_entry();
452 bool check_code() const; // Not inline to avoid circular ref
453 CompiledMethod* volatile code() const;
454 void clear_code(bool acquire_lock = true); // Clear out any compiled code
455 static void set_code(const methodHandle& mh, CompiledMethod* code);
456 void set_adapter_entry(AdapterHandlerEntry* adapter) {
457 constMethod()->set_adapter_entry(adapter);
458 }
459 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
460 constMethod()->update_adapter_trampoline(adapter);
461 }
462
463 address get_i2c_entry();
464 address get_c2i_entry();
465 address get_c2i_unverified_entry();
466 AdapterHandlerEntry* adapter() const {
467 return constMethod()->adapter();
468 }
469 // setup entry points
470 void link_method(const methodHandle& method, TRAPS);
471 // clear entry points. Used by sharing code during dump time
557
558 // localvariable table
559 bool has_localvariable_table() const
560 { return constMethod()->has_localvariable_table(); }
561 int localvariable_table_length() const
562 { return constMethod()->localvariable_table_length(); }
563 LocalVariableTableElement* localvariable_table_start() const
564 { return constMethod()->localvariable_table_start(); }
565
566 bool has_linenumber_table() const
567 { return constMethod()->has_linenumber_table(); }
568 u_char* compressed_linenumber_table() const
569 { return constMethod()->compressed_linenumber_table(); }
570
571 // method holder (the Klass* holding this method)
572 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
573
574 void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
575 Symbol* klass_name() const; // returns the name of the method holder
576 BasicType result_type() const; // type of the method result
577 bool is_returning_oop() const { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY); }
578 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
579
580 // Checked exceptions thrown by this method (resolved to mirrors)
581 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
582
583 // Access flags
584 bool is_public() const { return access_flags().is_public(); }
585 bool is_private() const { return access_flags().is_private(); }
586 bool is_protected() const { return access_flags().is_protected(); }
587 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
588 bool is_static() const { return access_flags().is_static(); }
589 bool is_final() const { return access_flags().is_final(); }
590 bool is_synchronized() const { return access_flags().is_synchronized();}
591 bool is_native() const { return access_flags().is_native(); }
592 bool is_abstract() const { return access_flags().is_abstract(); }
593 bool is_strict() const { return access_flags().is_strict(); }
594 bool is_synthetic() const { return access_flags().is_synthetic(); }
595
596 // returns true if contains only return operation
597 bool is_empty_method() const;
598
668 #ifdef TIERED
669 bool has_aot_code() const { return aot_code() != NULL; }
670 #endif
671
672 // sizing
673 static int header_size() {
674 return align_up((int)sizeof(Method), wordSize) / wordSize;
675 }
676 static int size(bool is_native);
677 int size() const { return method_size(); }
678 #if INCLUDE_SERVICES
679 void collect_statistics(KlassSizeStats *sz) const;
680 #endif
681 void log_touched(TRAPS);
682 static void print_touched_methods(outputStream* out);
683
684 // interpreter support
685 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
686 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
687 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
688 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
689 static ByteSize method_data_offset() {
690 return byte_offset_of(Method, _method_data);
691 }
692 static ByteSize method_counters_offset() {
693 return byte_offset_of(Method, _method_counters);
694 }
695 #ifndef PRODUCT
696 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
697 #endif // not PRODUCT
698 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
699 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
700 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
701 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
702 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
703
704 // for code generation
705 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
706 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
707 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
708
709 // Static methods that are used to implement member methods where an exposed this pointer
710 // is needed due to possible GCs
711 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS);
712
713 // Returns the byte code index from the byte code pointer
714 int bci_from(address bcp) const;
715 address bcp_from(int bci) const;
716 address bcp_from(address bcp) const;
717 int validate_bci_from_bcp(address bcp) const;
718 int validate_bci(int bci) const;
719
720 // Returns the line number for a bci if debugging information for the method is prowided,
721 // -1 is returned otherwise.
722 int line_number_from_bci(int bci) const;
723
724 // Reflection support
725 bool is_overridden_in(Klass* k) const;
726
727 // Stack walking support
728 bool is_ignored_by_security_stack_walk() const;
866 bool intrinsic_candidate() {
867 return (_flags & _intrinsic_candidate) != 0;
868 }
869 void set_intrinsic_candidate(bool x) {
870 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
871 }
872
873 bool has_injected_profile() {
874 return (_flags & _has_injected_profile) != 0;
875 }
876 void set_has_injected_profile(bool x) {
877 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
878 }
879
880 bool has_reserved_stack_access() {
881 return (_flags & _reserved_stack_access) != 0;
882 }
883
884 void set_has_reserved_stack_access(bool x) {
885 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
886 }
887
888 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
889
890 ConstMethod::MethodType method_type() const {
891 return _constMethod->method_type();
892 }
893 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
894
895 // On-stack replacement support
896 bool has_osr_nmethod(int level, bool match_level) {
897 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
898 }
899
900 int mark_osr_nmethods() {
901 return method_holder()->mark_osr_nmethods(this);
902 }
903
904 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
905 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
|
74 private:
75 // If you add a new field that points to any metaspace object, you
76 // must add this field to Method::metaspace_pointers_do().
77 ConstMethod* _constMethod; // Method read-only data.
78 MethodData* _method_data;
79 MethodCounters* _method_counters;
80 AccessFlags _access_flags; // Access flags
81 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
82 // note: can have vtables with >2**16 elements (because of inheritance)
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 // Flags
86 enum Flags {
87 _caller_sensitive = 1 << 0,
88 _force_inline = 1 << 1,
89 _dont_inline = 1 << 2,
90 _hidden = 1 << 3,
91 _has_injected_profile = 1 << 4,
92 _running_emcp = 1 << 5,
93 _intrinsic_candidate = 1 << 6,
94 _reserved_stack_access = 1 << 7,
95 _scalarized_args = 1 << 8,
96 _needs_stack_repair = 1 << 9
97 };
98 mutable u2 _flags;
99
100 JFR_ONLY(DEFINE_TRACE_FLAG;)
101
102 #ifndef PRODUCT
103 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
104 #endif
105 // Entry point for calling both from and to the interpreter.
106 address _i2i_entry; // All-args-on-stack calling convention
107 // Entry point for calling from compiled code, to compiled code if it exists
108 // or else the interpreter.
109 volatile address _from_compiled_entry; // Cache of: _code ? _code->verified_entry_point() : _adapter->c2i_entry()
110 volatile address _from_compiled_value_ro_entry; // Cache of: _code ? _code->verified_value_ro_entry_point() : _adapter->c2i_value_ro_entry()
111 volatile address _from_compiled_value_entry; // Cache of: _code ? _code->verified_value_entry_point() : _adapter->c2i_value_entry()
112 // The entry point for calling both from and to compiled code is
113 // "_code->entry_point()". Because of tiered compilation and de-opt, this
114 // field can come and go. It can transition from NULL to not-null at any
115 // time (whenever a compile completes). It can transition from not-null to
116 // NULL only at safepoints (because of a de-opt).
117 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
118 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
119 int _max_vt_buffer; // max number of VT buffer chunk to use before recycling
120
121 #if INCLUDE_AOT && defined(TIERED)
122 CompiledMethod* _aot_code;
123 #endif
124
125 // Constructor
126 Method(ConstMethod* xconst, AccessFlags access_flags);
127 public:
128
129 static Method* allocate(ClassLoaderData* loader_data,
130 int byte_code_size,
131 AccessFlags access_flags,
132 InlineTableSizes* sizes,
133 ConstMethod::MethodType method_type,
134 TRAPS);
135
136 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
137 Method(){}
138
139 bool is_method() const volatile { return true; }
437 if (TieredCompilation) ShouldNotReachHere();
438 MethodCounters* mcs = get_method_counters(CHECK_0);
439 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
440 }
441 #endif
442
443 #ifndef PRODUCT
444 int compiled_invocation_count() const { return _compiled_invocation_count; }
445 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
446 #else
447 // for PrintMethodData in a product build
448 int compiled_invocation_count() const { return 0; }
449 #endif // not PRODUCT
450
451 // Clear (non-shared space) pointers which could not be relevant
452 // if this (shared) method were mapped into another JVM.
453 void remove_unshareable_info();
454
455 // nmethod/verified compiler entry
456 address verified_code_entry();
457 address verified_value_ro_code_entry();
458 bool check_code() const; // Not inline to avoid circular ref
459 CompiledMethod* volatile code() const;
460 void clear_code(bool acquire_lock = true); // Clear out any compiled code
461 static void set_code(const methodHandle& mh, CompiledMethod* code);
462 void set_adapter_entry(AdapterHandlerEntry* adapter) {
463 constMethod()->set_adapter_entry(adapter);
464 }
465 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
466 constMethod()->update_adapter_trampoline(adapter);
467 }
468
469 address get_i2c_entry();
470 address get_c2i_entry();
471 address get_c2i_unverified_entry();
472 AdapterHandlerEntry* adapter() const {
473 return constMethod()->adapter();
474 }
475 // setup entry points
476 void link_method(const methodHandle& method, TRAPS);
477 // clear entry points. Used by sharing code during dump time
563
564 // localvariable table
565 bool has_localvariable_table() const
566 { return constMethod()->has_localvariable_table(); }
567 int localvariable_table_length() const
568 { return constMethod()->localvariable_table_length(); }
569 LocalVariableTableElement* localvariable_table_start() const
570 { return constMethod()->localvariable_table_start(); }
571
572 bool has_linenumber_table() const
573 { return constMethod()->has_linenumber_table(); }
574 u_char* compressed_linenumber_table() const
575 { return constMethod()->compressed_linenumber_table(); }
576
577 // method holder (the Klass* holding this method)
578 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
579
580 void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
581 Symbol* klass_name() const; // returns the name of the method holder
582 BasicType result_type() const; // type of the method result
583 bool may_return_oop() const { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY || r == T_VALUETYPE); }
584 #ifdef ASSERT
585 ValueKlass* returned_value_type(Thread* thread) const;
586 #endif
587
588 // Checked exceptions thrown by this method (resolved to mirrors)
589 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
590
591 // Access flags
592 bool is_public() const { return access_flags().is_public(); }
593 bool is_private() const { return access_flags().is_private(); }
594 bool is_protected() const { return access_flags().is_protected(); }
595 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
596 bool is_static() const { return access_flags().is_static(); }
597 bool is_final() const { return access_flags().is_final(); }
598 bool is_synchronized() const { return access_flags().is_synchronized();}
599 bool is_native() const { return access_flags().is_native(); }
600 bool is_abstract() const { return access_flags().is_abstract(); }
601 bool is_strict() const { return access_flags().is_strict(); }
602 bool is_synthetic() const { return access_flags().is_synthetic(); }
603
604 // returns true if contains only return operation
605 bool is_empty_method() const;
606
676 #ifdef TIERED
677 bool has_aot_code() const { return aot_code() != NULL; }
678 #endif
679
680 // sizing
681 static int header_size() {
682 return align_up((int)sizeof(Method), wordSize) / wordSize;
683 }
684 static int size(bool is_native);
685 int size() const { return method_size(); }
686 #if INCLUDE_SERVICES
687 void collect_statistics(KlassSizeStats *sz) const;
688 #endif
689 void log_touched(TRAPS);
690 static void print_touched_methods(outputStream* out);
691
692 // interpreter support
693 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
694 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
695 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
696 static ByteSize from_compiled_value_offset() { return byte_offset_of(Method, _from_compiled_value_entry); }
697 static ByteSize from_compiled_value_ro_offset(){ return byte_offset_of(Method, _from_compiled_value_ro_entry); }
698 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
699 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
700 static ByteSize method_data_offset() {
701 return byte_offset_of(Method, _method_data);
702 }
703 static ByteSize method_counters_offset() {
704 return byte_offset_of(Method, _method_counters);
705 }
706 #ifndef PRODUCT
707 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
708 #endif // not PRODUCT
709 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
710 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
711 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
712 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
713 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
714
715 // for code generation
716 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
717 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
718 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
719
720 static ByteSize max_vt_buffer_offset() { return byte_offset_of(Method, _max_vt_buffer); }
721
722 // Static methods that are used to implement member methods where an exposed this pointer
723 // is needed due to possible GCs
724 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS);
725
726 // Returns the byte code index from the byte code pointer
727 int bci_from(address bcp) const;
728 address bcp_from(int bci) const;
729 address bcp_from(address bcp) const;
730 int validate_bci_from_bcp(address bcp) const;
731 int validate_bci(int bci) const;
732
733 // Returns the line number for a bci if debugging information for the method is prowided,
734 // -1 is returned otherwise.
735 int line_number_from_bci(int bci) const;
736
737 // Reflection support
738 bool is_overridden_in(Klass* k) const;
739
740 // Stack walking support
741 bool is_ignored_by_security_stack_walk() const;
879 bool intrinsic_candidate() {
880 return (_flags & _intrinsic_candidate) != 0;
881 }
882 void set_intrinsic_candidate(bool x) {
883 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
884 }
885
886 bool has_injected_profile() {
887 return (_flags & _has_injected_profile) != 0;
888 }
889 void set_has_injected_profile(bool x) {
890 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
891 }
892
893 bool has_reserved_stack_access() {
894 return (_flags & _reserved_stack_access) != 0;
895 }
896
897 void set_has_reserved_stack_access(bool x) {
898 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
899 }
900
901 bool has_scalarized_args() {
902 return (_flags & _scalarized_args) != 0;
903 }
904
905 void set_has_scalarized_args(bool x) {
906 _flags = x ? (_flags | _scalarized_args) : (_flags & ~_scalarized_args);
907 }
908
909 bool needs_stack_repair() {
910 return (_flags & _needs_stack_repair) != 0;
911 }
912
913 void set_needs_stack_repair(bool x) {
914 _flags = x ? (_flags | _needs_stack_repair) : (_flags & ~_needs_stack_repair);
915 }
916
917 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
918
919 ConstMethod::MethodType method_type() const {
920 return _constMethod->method_type();
921 }
922 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
923
924 // On-stack replacement support
925 bool has_osr_nmethod(int level, bool match_level) {
926 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
927 }
928
929 int mark_osr_nmethods() {
930 return method_holder()->mark_osr_nmethods(this);
931 }
932
933 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
934 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
|