47 // (required by the interpreter) 48 // 49 // Method embedded field layout (after declared fields): 50 // [EMBEDDED native_function (present only if native) ] 51 // [EMBEDDED signature_handler (present only if native) ] 52 53 class CheckedExceptionElement; 54 class LocalVariableTableElement; 55 class AdapterHandlerEntry; 56 class MethodData; 57 class MethodCounters; 58 class ConstMethod; 59 class InlineTableSizes; 60 class KlassSizeStats; 61 62 class Method : public Metadata { 63 friend class VMStructs; 64 friend class JVMCIVMStructs; 65 private: 66 ConstMethod* _constMethod; // Method read-only data. 67 #if defined(COMPILER2) || INCLUDE_JVMCI 68 MethodData* _method_data; 69 #endif 70 MethodCounters* _method_counters; 71 AccessFlags _access_flags; // Access flags 72 int _vtable_index; // vtable index of this method (see VtableIndexFlag) 73 // note: can have vtables with >2**16 elements (because of inheritance) 74 #ifdef CC_INTERP 75 int _result_index; // C++ interpreter needs for converting results to/from stack 76 #endif 77 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none) 78 79 // Flags 80 enum Flags { 81 _jfr_towrite = 1 << 0, 82 _caller_sensitive = 1 << 1, 83 _force_inline = 1 << 2, 84 _dont_inline = 1 << 3, 85 _hidden = 1 << 4, 86 _has_injected_profile = 1 << 5, 87 _running_emcp = 1 << 6, 88 _intrinsic_candidate = 1 << 7, 89 _reserved_stack_access = 1 << 8 304 { return constMethod()->has_exception_handler(); } 305 int exception_table_length() const 306 { return constMethod()->exception_table_length(); } 307 ExceptionTableElement* exception_table_start() const 308 { return constMethod()->exception_table_start(); } 309 310 // Finds the first entry point bci of an exception handler for an 311 // exception of klass ex_klass thrown at throw_bci. A value of NULL 312 // for ex_klass indicates that the exception klass is not known; in 313 // this case it matches any constraint class. Returns -1 if the 314 // exception cannot be handled in this method. The handler 315 // constraint classes are loaded if necessary. Note that this may 316 // throw an exception if loading of the constraint classes causes 317 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError. 318 // If an exception is thrown, returns the bci of the 319 // exception handler which caused the exception to be thrown, which 320 // is needed for proper retries. See, for example, 321 // InterpreterRuntime::exception_handler_for_exception. 322 static int fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS); 323 324 #if defined(COMPILER2) || INCLUDE_JVMCI 325 // method data access 326 MethodData* method_data() const { 327 return _method_data; 328 } 329 330 void set_method_data(MethodData* data) { 331 // The store into method must be released. On platforms without 332 // total store order (TSO) the reference may become visible before 333 // the initialization of data otherwise. 334 OrderAccess::release_store_ptr((volatile void *)&_method_data, data); 335 } 336 #else 337 MethodData* method_data() const { return NULL; } 338 void set_method_data(MethodData* data) { } 339 #endif 340 341 MethodCounters* method_counters() const { 342 return _method_counters; 343 } 344 345 void clear_method_counters() { 346 _method_counters = NULL; 347 } 348 349 bool init_method_counters(MethodCounters* counters) { 350 // Try to install a pointer to MethodCounters, return true on success. 351 return Atomic::cmpxchg_ptr(counters, (volatile void*)&_method_counters, NULL) == NULL; 352 } 353 354 #ifdef TIERED 355 // We are reusing interpreter_invocation_count as a holder for the previous event count! 356 // We can do that since interpreter_invocation_count is not used in tiered. 357 int prev_event_count() const { 358 if (method_counters() == NULL) { 359 return 0; 629 bool has_compiled_code() const { return code() != NULL; } 630 631 // sizing 632 static int header_size() { return sizeof(Method)/wordSize; } 633 static int size(bool is_native); 634 int size() const { return method_size(); } 635 #if INCLUDE_SERVICES 636 void collect_statistics(KlassSizeStats *sz) const; 637 #endif 638 void log_touched(TRAPS); 639 static void print_touched_methods(outputStream* out); 640 641 // interpreter support 642 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); } 643 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); } 644 #ifdef CC_INTERP 645 static ByteSize result_index_offset() { return byte_offset_of(Method, _result_index ); } 646 #endif /* CC_INTERP */ 647 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); } 648 static ByteSize code_offset() { return byte_offset_of(Method, _code); } 649 #if defined(COMPILER2) || INCLUDE_JVMCI 650 static ByteSize method_data_offset() { 651 return byte_offset_of(Method, _method_data); 652 } 653 #else 654 static ByteSize method_data_offset() { 655 ShouldNotReachHere(); 656 return in_ByteSize(0); 657 } 658 #endif 659 static ByteSize method_counters_offset() { 660 return byte_offset_of(Method, _method_counters); 661 } 662 #ifndef PRODUCT 663 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); } 664 #endif // not PRODUCT 665 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); } 666 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); } 667 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); } 668 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); } 669 670 // for code generation 671 #if defined(COMPILER2) || INCLUDE_JVMCI 672 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); } 673 #else 674 static int method_data_offset_in_bytes() { ShouldNotReachHere(); return 0; } 675 #endif 676 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); } 677 static int intrinsic_id_size_in_bytes() { return sizeof(u2); } 678 679 // Static methods that are used to implement member methods where an exposed this pointer 680 // is needed due to possible GCs 681 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS); 682 683 // Returns the byte code index from the byte code pointer 684 int bci_from(address bcp) const; 685 address bcp_from(int bci) const; 686 address bcp_from(address bcp) const; 687 int validate_bci_from_bcp(address bcp) const; 688 int validate_bci(int bci) const; 689 690 // Returns the line number for a bci if debugging information for the method is prowided, 691 // -1 is returned otherwise. 692 int line_number_from_bci(int bci) const; 693 694 // Reflection support 695 bool is_overridden_in(Klass* k) const; | 47 // (required by the interpreter) 48 // 49 // Method embedded field layout (after declared fields): 50 // [EMBEDDED native_function (present only if native) ] 51 // [EMBEDDED signature_handler (present only if native) ] 52 53 class CheckedExceptionElement; 54 class LocalVariableTableElement; 55 class AdapterHandlerEntry; 56 class MethodData; 57 class MethodCounters; 58 class ConstMethod; 59 class InlineTableSizes; 60 class KlassSizeStats; 61 62 class Method : public Metadata { 63 friend class VMStructs; 64 friend class JVMCIVMStructs; 65 private: 66 ConstMethod* _constMethod; // Method read-only data. 67 MethodData* _method_data; 68 MethodCounters* _method_counters; 69 AccessFlags _access_flags; // Access flags 70 int _vtable_index; // vtable index of this method (see VtableIndexFlag) 71 // note: can have vtables with >2**16 elements (because of inheritance) 72 #ifdef CC_INTERP 73 int _result_index; // C++ interpreter needs for converting results to/from stack 74 #endif 75 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none) 76 77 // Flags 78 enum Flags { 79 _jfr_towrite = 1 << 0, 80 _caller_sensitive = 1 << 1, 81 _force_inline = 1 << 2, 82 _dont_inline = 1 << 3, 83 _hidden = 1 << 4, 84 _has_injected_profile = 1 << 5, 85 _running_emcp = 1 << 6, 86 _intrinsic_candidate = 1 << 7, 87 _reserved_stack_access = 1 << 8 302 { return constMethod()->has_exception_handler(); } 303 int exception_table_length() const 304 { return constMethod()->exception_table_length(); } 305 ExceptionTableElement* exception_table_start() const 306 { return constMethod()->exception_table_start(); } 307 308 // Finds the first entry point bci of an exception handler for an 309 // exception of klass ex_klass thrown at throw_bci. A value of NULL 310 // for ex_klass indicates that the exception klass is not known; in 311 // this case it matches any constraint class. Returns -1 if the 312 // exception cannot be handled in this method. The handler 313 // constraint classes are loaded if necessary. Note that this may 314 // throw an exception if loading of the constraint classes causes 315 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError. 316 // If an exception is thrown, returns the bci of the 317 // exception handler which caused the exception to be thrown, which 318 // is needed for proper retries. See, for example, 319 // InterpreterRuntime::exception_handler_for_exception. 320 static int fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS); 321 322 // method data access 323 MethodData* method_data() const { 324 return _method_data; 325 } 326 327 void set_method_data(MethodData* data) { 328 // The store into method must be released. On platforms without 329 // total store order (TSO) the reference may become visible before 330 // the initialization of data otherwise. 331 OrderAccess::release_store_ptr((volatile void *)&_method_data, data); 332 } 333 334 MethodCounters* method_counters() const { 335 return _method_counters; 336 } 337 338 void clear_method_counters() { 339 _method_counters = NULL; 340 } 341 342 bool init_method_counters(MethodCounters* counters) { 343 // Try to install a pointer to MethodCounters, return true on success. 344 return Atomic::cmpxchg_ptr(counters, (volatile void*)&_method_counters, NULL) == NULL; 345 } 346 347 #ifdef TIERED 348 // We are reusing interpreter_invocation_count as a holder for the previous event count! 349 // We can do that since interpreter_invocation_count is not used in tiered. 350 int prev_event_count() const { 351 if (method_counters() == NULL) { 352 return 0; 622 bool has_compiled_code() const { return code() != NULL; } 623 624 // sizing 625 static int header_size() { return sizeof(Method)/wordSize; } 626 static int size(bool is_native); 627 int size() const { return method_size(); } 628 #if INCLUDE_SERVICES 629 void collect_statistics(KlassSizeStats *sz) const; 630 #endif 631 void log_touched(TRAPS); 632 static void print_touched_methods(outputStream* out); 633 634 // interpreter support 635 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); } 636 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); } 637 #ifdef CC_INTERP 638 static ByteSize result_index_offset() { return byte_offset_of(Method, _result_index ); } 639 #endif /* CC_INTERP */ 640 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); } 641 static ByteSize code_offset() { return byte_offset_of(Method, _code); } 642 static ByteSize method_data_offset() { 643 return byte_offset_of(Method, _method_data); 644 } 645 static ByteSize method_counters_offset() { 646 return byte_offset_of(Method, _method_counters); 647 } 648 #ifndef PRODUCT 649 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); } 650 #endif // not PRODUCT 651 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); } 652 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); } 653 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); } 654 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); } 655 656 // for code generation 657 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); } 658 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); } 659 static int intrinsic_id_size_in_bytes() { return sizeof(u2); } 660 661 // Static methods that are used to implement member methods where an exposed this pointer 662 // is needed due to possible GCs 663 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS); 664 665 // Returns the byte code index from the byte code pointer 666 int bci_from(address bcp) const; 667 address bcp_from(int bci) const; 668 address bcp_from(address bcp) const; 669 int validate_bci_from_bcp(address bcp) const; 670 int validate_bci(int bci) const; 671 672 // Returns the line number for a bci if debugging information for the method is prowided, 673 // -1 is returned otherwise. 674 int line_number_from_bci(int bci) const; 675 676 // Reflection support 677 bool is_overridden_in(Klass* k) const; |