1 /* 2 * Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OOPS_METHOD_HPP 26 #define SHARE_OOPS_METHOD_HPP 27 28 #include "classfile/vmSymbols.hpp" 29 #include "code/compressedStream.hpp" 30 #include "compiler/compilerDefinitions.hpp" 31 #include "compiler/oopMap.hpp" 32 #include "interpreter/invocationCounter.hpp" 33 #include "oops/annotations.hpp" 34 #include "oops/constantPool.hpp" 35 #include "oops/methodCounters.hpp" 36 #include "oops/instanceKlass.hpp" 37 #include "oops/oop.hpp" 38 #include "oops/typeArrayOop.hpp" 39 #include "utilities/accessFlags.hpp" 40 #include "utilities/align.hpp" 41 #include "utilities/growableArray.hpp" 42 #include "utilities/macros.hpp" 43 #if INCLUDE_JFR 44 #include "jfr/support/jfrTraceIdExtension.hpp" 45 #endif 46 47 48 // A Method represents a Java method. 49 // 50 // Note that most applications load thousands of methods, so keeping the size of this 51 // class small has a big impact on footprint. 52 // 53 // Note that native_function and signature_handler have to be at fixed offsets 54 // (required by the interpreter) 55 // 56 // Method embedded field layout (after declared fields): 57 // [EMBEDDED native_function (present only if native) ] 58 // [EMBEDDED signature_handler (present only if native) ] 59 60 class CheckedExceptionElement; 61 class LocalVariableTableElement; 62 class AdapterHandlerEntry; 63 class MethodData; 64 class MethodCounters; 65 class ConstMethod; 66 class InlineTableSizes; 67 class CompiledMethod; 68 class InterpreterOopMap; 69 70 class Method : public Metadata { 71 friend class VMStructs; 72 friend class JVMCIVMStructs; 73 private: 74 // If you add a new field that points to any metaspace object, you 75 // must add this field to Method::metaspace_pointers_do(). 76 ConstMethod* _constMethod; // Method read-only data. 77 MethodData* _method_data; 78 MethodCounters* _method_counters; 79 AccessFlags _access_flags; // Access flags 80 int _vtable_index; // vtable index of this method (see VtableIndexFlag) 81 // note: can have vtables with >2**16 elements (because of inheritance) 82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none) 83 84 // Flags 85 enum Flags { 86 _caller_sensitive = 1 << 0, 87 _force_inline = 1 << 1, 88 _dont_inline = 1 << 2, 89 _hidden = 1 << 3, 90 _has_injected_profile = 1 << 4, 91 _running_emcp = 1 << 5, 92 _intrinsic_candidate = 1 << 6, 93 _reserved_stack_access = 1 << 7 94 }; 95 mutable u2 _flags; 96 97 JFR_ONLY(DEFINE_TRACE_FLAG;) 98 99 #ifndef PRODUCT 100 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging) 101 #endif 102 // Entry point for calling both from and to the interpreter. 103 address _i2i_entry; // All-args-on-stack calling convention 104 // Entry point for calling from compiled code, to compiled code if it exists 105 // or else the interpreter. 106 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry() 107 // The entry point for calling both from and to compiled code is 108 // "_code->entry_point()". Because of tiered compilation and de-opt, this 109 // field can come and go. It can transition from NULL to not-null at any 110 // time (whenever a compile completes). It can transition from not-null to 111 // NULL only at safepoints (because of a de-opt). 112 CompiledMethod* volatile _code; // Points to the corresponding piece of native code 113 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry 114 115 #if INCLUDE_AOT && defined(TIERED) 116 CompiledMethod* _aot_code; 117 #endif 118 119 // Constructor 120 Method(ConstMethod* xconst, AccessFlags access_flags); 121 public: 122 123 static Method* allocate(ClassLoaderData* loader_data, 124 int byte_code_size, 125 AccessFlags access_flags, 126 InlineTableSizes* sizes, 127 ConstMethod::MethodType method_type, 128 TRAPS); 129 130 // CDS and vtbl checking can create an empty Method to get vtbl pointer. 131 Method(){} 132 133 bool is_method() const volatile { return true; } 134 135 void restore_unshareable_info(TRAPS); 136 137 // accessors for instance variables 138 139 ConstMethod* constMethod() const { return _constMethod; } 140 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; } 141 142 143 static address make_adapters(const methodHandle& mh, TRAPS); 144 address from_compiled_entry() const; 145 address from_compiled_entry_no_trampoline() const; 146 address from_interpreted_entry() const; 147 148 // access flag 149 AccessFlags access_flags() const { return _access_flags; } 150 void set_access_flags(AccessFlags flags) { _access_flags = flags; } 151 152 // name 153 Symbol* name() const { return constants()->symbol_at(name_index()); } 154 int name_index() const { return constMethod()->name_index(); } 155 void set_name_index(int index) { constMethod()->set_name_index(index); } 156 157 // signature 158 Symbol* signature() const { return constants()->symbol_at(signature_index()); } 159 int signature_index() const { return constMethod()->signature_index(); } 160 void set_signature_index(int index) { constMethod()->set_signature_index(index); } 161 162 // generics support 163 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); } 164 int generic_signature_index() const { return constMethod()->generic_signature_index(); } 165 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); } 166 167 // annotations support 168 AnnotationArray* annotations() const { 169 return constMethod()->method_annotations(); 170 } 171 AnnotationArray* parameter_annotations() const { 172 return constMethod()->parameter_annotations(); 173 } 174 AnnotationArray* annotation_default() const { 175 return constMethod()->default_annotations(); 176 } 177 AnnotationArray* type_annotations() const { 178 return constMethod()->type_annotations(); 179 } 180 181 // Helper routine: get klass name + "." + method name + signature as 182 // C string, for the purpose of providing more useful 183 // fatal error handling. The string is allocated in resource 184 // area if a buffer is not provided by the caller. 185 char* name_and_sig_as_C_string() const; 186 char* name_and_sig_as_C_string(char* buf, int size) const; 187 188 // Static routine in the situations we don't have a Method* 189 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature); 190 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size); 191 192 // Get return type + klass name + "." + method name + ( parameters types ) 193 // as a C string or print it to an outputStream. 194 // This is to be used to assemble strings passed to Java, so that 195 // the text more resembles Java code. Used in exception messages. 196 // Memory is allocated in the resource area; the caller needs 197 // a ResourceMark. 198 const char* external_name() const; 199 void print_external_name(outputStream *os) const; 200 201 static const char* external_name( Klass* klass, Symbol* method_name, Symbol* signature); 202 static void print_external_name(outputStream *os, Klass* klass, Symbol* method_name, Symbol* signature); 203 204 Bytecodes::Code java_code_at(int bci) const { 205 return Bytecodes::java_code_at(this, bcp_from(bci)); 206 } 207 Bytecodes::Code code_at(int bci) const { 208 return Bytecodes::code_at(this, bcp_from(bci)); 209 } 210 211 // JVMTI breakpoints 212 #if !INCLUDE_JVMTI 213 Bytecodes::Code orig_bytecode_at(int bci) const { 214 ShouldNotReachHere(); 215 return Bytecodes::_shouldnotreachhere; 216 } 217 void set_orig_bytecode_at(int bci, Bytecodes::Code code) { 218 ShouldNotReachHere(); 219 }; 220 u2 number_of_breakpoints() const {return 0;} 221 #else // !INCLUDE_JVMTI 222 Bytecodes::Code orig_bytecode_at(int bci) const; 223 void set_orig_bytecode_at(int bci, Bytecodes::Code code); 224 void set_breakpoint(int bci); 225 void clear_breakpoint(int bci); 226 void clear_all_breakpoints(); 227 // Tracking number of breakpoints, for fullspeed debugging. 228 // Only mutated by VM thread. 229 u2 number_of_breakpoints() const { 230 MethodCounters* mcs = method_counters(); 231 if (mcs == NULL) { 232 return 0; 233 } else { 234 return mcs->number_of_breakpoints(); 235 } 236 } 237 void incr_number_of_breakpoints(TRAPS) { 238 MethodCounters* mcs = get_method_counters(CHECK); 239 if (mcs != NULL) { 240 mcs->incr_number_of_breakpoints(); 241 } 242 } 243 void decr_number_of_breakpoints(TRAPS) { 244 MethodCounters* mcs = get_method_counters(CHECK); 245 if (mcs != NULL) { 246 mcs->decr_number_of_breakpoints(); 247 } 248 } 249 // Initialization only 250 void clear_number_of_breakpoints() { 251 MethodCounters* mcs = method_counters(); 252 if (mcs != NULL) { 253 mcs->clear_number_of_breakpoints(); 254 } 255 } 256 #endif // !INCLUDE_JVMTI 257 258 // index into InstanceKlass methods() array 259 // note: also used by jfr 260 u2 method_idnum() const { return constMethod()->method_idnum(); } 261 void set_method_idnum(u2 idnum) { constMethod()->set_method_idnum(idnum); } 262 263 u2 orig_method_idnum() const { return constMethod()->orig_method_idnum(); } 264 void set_orig_method_idnum(u2 idnum) { constMethod()->set_orig_method_idnum(idnum); } 265 266 // code size 267 int code_size() const { return constMethod()->code_size(); } 268 269 // method size in words 270 int method_size() const { return sizeof(Method)/wordSize + ( is_native() ? 2 : 0 ); } 271 272 // constant pool for Klass* holding this method 273 ConstantPool* constants() const { return constMethod()->constants(); } 274 void set_constants(ConstantPool* c) { constMethod()->set_constants(c); } 275 276 // max stack 277 // return original max stack size for method verification 278 int verifier_max_stack() const { return constMethod()->max_stack(); } 279 int max_stack() const { return constMethod()->max_stack() + extra_stack_entries(); } 280 void set_max_stack(int size) { constMethod()->set_max_stack(size); } 281 282 // max locals 283 int max_locals() const { return constMethod()->max_locals(); } 284 void set_max_locals(int size) { constMethod()->set_max_locals(size); } 285 286 int highest_comp_level() const; 287 void set_highest_comp_level(int level); 288 int highest_osr_comp_level() const; 289 void set_highest_osr_comp_level(int level); 290 291 #if COMPILER2_OR_JVMCI 292 // Count of times method was exited via exception while interpreting 293 void interpreter_throwout_increment(TRAPS) { 294 MethodCounters* mcs = get_method_counters(CHECK); 295 if (mcs != NULL) { 296 mcs->interpreter_throwout_increment(); 297 } 298 } 299 #endif 300 301 int interpreter_throwout_count() const { 302 MethodCounters* mcs = method_counters(); 303 if (mcs == NULL) { 304 return 0; 305 } else { 306 return mcs->interpreter_throwout_count(); 307 } 308 } 309 310 // Derive stuff from the signature at load time. 311 void compute_from_signature(Symbol* sig); 312 313 // size of parameters (receiver if any + arguments) 314 int size_of_parameters() const { return constMethod()->size_of_parameters(); } 315 void set_size_of_parameters(int size) { constMethod()->set_size_of_parameters(size); } 316 317 bool has_stackmap_table() const { 318 return constMethod()->has_stackmap_table(); 319 } 320 321 Array<u1>* stackmap_data() const { 322 return constMethod()->stackmap_data(); 323 } 324 325 void set_stackmap_data(Array<u1>* sd) { 326 constMethod()->set_stackmap_data(sd); 327 } 328 329 // exception handler table 330 bool has_exception_handler() const 331 { return constMethod()->has_exception_handler(); } 332 int exception_table_length() const 333 { return constMethod()->exception_table_length(); } 334 ExceptionTableElement* exception_table_start() const 335 { return constMethod()->exception_table_start(); } 336 337 // Finds the first entry point bci of an exception handler for an 338 // exception of klass ex_klass thrown at throw_bci. A value of NULL 339 // for ex_klass indicates that the exception klass is not known; in 340 // this case it matches any constraint class. Returns -1 if the 341 // exception cannot be handled in this method. The handler 342 // constraint classes are loaded if necessary. Note that this may 343 // throw an exception if loading of the constraint classes causes 344 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError. 345 // If an exception is thrown, returns the bci of the 346 // exception handler which caused the exception to be thrown, which 347 // is needed for proper retries. See, for example, 348 // InterpreterRuntime::exception_handler_for_exception. 349 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS); 350 351 static bool register_native(Klass* k, 352 Symbol* name, 353 Symbol* signature, 354 address entry, 355 TRAPS); 356 357 // method data access 358 MethodData* method_data() const { 359 return _method_data; 360 } 361 362 void set_method_data(MethodData* data); 363 364 MethodCounters* method_counters() const { 365 return _method_counters; 366 } 367 368 void clear_method_counters() { 369 _method_counters = NULL; 370 } 371 372 bool init_method_counters(MethodCounters* counters); 373 374 #ifdef TIERED 375 // We are reusing interpreter_invocation_count as a holder for the previous event count! 376 // We can do that since interpreter_invocation_count is not used in tiered. 377 int prev_event_count() const { 378 if (method_counters() == NULL) { 379 return 0; 380 } else { 381 return method_counters()->interpreter_invocation_count(); 382 } 383 } 384 void set_prev_event_count(int count) { 385 MethodCounters* mcs = method_counters(); 386 if (mcs != NULL) { 387 mcs->set_interpreter_invocation_count(count); 388 } 389 } 390 jlong prev_time() const { 391 MethodCounters* mcs = method_counters(); 392 return mcs == NULL ? 0 : mcs->prev_time(); 393 } 394 void set_prev_time(jlong time) { 395 MethodCounters* mcs = method_counters(); 396 if (mcs != NULL) { 397 mcs->set_prev_time(time); 398 } 399 } 400 float rate() const { 401 MethodCounters* mcs = method_counters(); 402 return mcs == NULL ? 0 : mcs->rate(); 403 } 404 void set_rate(float rate) { 405 MethodCounters* mcs = method_counters(); 406 if (mcs != NULL) { 407 mcs->set_rate(rate); 408 } 409 } 410 411 #if INCLUDE_AOT 412 void set_aot_code(CompiledMethod* aot_code) { 413 _aot_code = aot_code; 414 } 415 416 CompiledMethod* aot_code() const { 417 return _aot_code; 418 } 419 #else 420 CompiledMethod* aot_code() const { return NULL; } 421 #endif // INCLUDE_AOT 422 #endif // TIERED 423 424 int nmethod_age() const { 425 if (method_counters() == NULL) { 426 return INT_MAX; 427 } else { 428 return method_counters()->nmethod_age(); 429 } 430 } 431 432 int invocation_count(); 433 int backedge_count(); 434 435 bool was_executed_more_than(int n); 436 bool was_never_executed() { return !was_executed_more_than(0); } 437 438 static void build_interpreter_method_data(const methodHandle& method, TRAPS); 439 440 static MethodCounters* build_method_counters(Method* m, TRAPS); 441 442 int interpreter_invocation_count() { 443 if (TieredCompilation) { 444 return invocation_count(); 445 } else { 446 MethodCounters* mcs = method_counters(); 447 return (mcs == NULL) ? 0 : mcs->interpreter_invocation_count(); 448 } 449 } 450 #if COMPILER2_OR_JVMCI 451 int increment_interpreter_invocation_count(TRAPS) { 452 if (TieredCompilation) ShouldNotReachHere(); 453 MethodCounters* mcs = get_method_counters(CHECK_0); 454 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count(); 455 } 456 #endif 457 458 #ifndef PRODUCT 459 int compiled_invocation_count() const { return _compiled_invocation_count; } 460 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; } 461 #else 462 // for PrintMethodData in a product build 463 int compiled_invocation_count() const { return 0; } 464 #endif // not PRODUCT 465 466 // Clear (non-shared space) pointers which could not be relevant 467 // if this (shared) method were mapped into another JVM. 468 void remove_unshareable_info(); 469 470 // nmethod/verified compiler entry 471 address verified_code_entry(); 472 bool check_code() const; // Not inline to avoid circular ref 473 CompiledMethod* volatile code() const; 474 475 // Locks CompiledMethod_lock if not held. 476 void unlink_code(CompiledMethod *compare); 477 // Locks CompiledMethod_lock if not held. 478 void unlink_code(); 479 480 private: 481 // Either called with CompiledMethod_lock held or from constructor. 482 void clear_code(); 483 484 public: 485 static void set_code(const methodHandle& mh, CompiledMethod* code); 486 void set_adapter_entry(AdapterHandlerEntry* adapter) { 487 constMethod()->set_adapter_entry(adapter); 488 } 489 void set_adapter_trampoline(AdapterHandlerEntry** trampoline) { 490 constMethod()->set_adapter_trampoline(trampoline); 491 } 492 void update_adapter_trampoline(AdapterHandlerEntry* adapter) { 493 constMethod()->update_adapter_trampoline(adapter); 494 } 495 void set_from_compiled_entry(address entry) { 496 _from_compiled_entry = entry; 497 } 498 499 address get_i2c_entry(); 500 address get_c2i_entry(); 501 address get_c2i_unverified_entry(); 502 address get_c2i_no_clinit_check_entry(); 503 AdapterHandlerEntry* adapter() const { 504 return constMethod()->adapter(); 505 } 506 // setup entry points 507 void link_method(const methodHandle& method, TRAPS); 508 // clear entry points. Used by sharing code during dump time 509 void unlink_method() NOT_CDS_RETURN; 510 511 virtual void metaspace_pointers_do(MetaspaceClosure* iter); 512 virtual MetaspaceObj::Type type() const { return MethodType; } 513 514 // vtable index 515 enum VtableIndexFlag { 516 // Valid vtable indexes are non-negative (>= 0). 517 // These few negative values are used as sentinels. 518 itable_index_max = -10, // first itable index, growing downward 519 pending_itable_index = -9, // itable index will be assigned 520 invalid_vtable_index = -4, // distinct from any valid vtable index 521 garbage_vtable_index = -3, // not yet linked; no vtable layout yet 522 nonvirtual_vtable_index = -2 // there is no need for vtable dispatch 523 // 6330203 Note: Do not use -1, which was overloaded with many meanings. 524 }; 525 DEBUG_ONLY(bool valid_vtable_index() const { return _vtable_index >= nonvirtual_vtable_index; }) 526 bool has_vtable_index() const { return _vtable_index >= 0; } 527 int vtable_index() const { return _vtable_index; } 528 void set_vtable_index(int index); 529 DEBUG_ONLY(bool valid_itable_index() const { return _vtable_index <= pending_itable_index; }) 530 bool has_itable_index() const { return _vtable_index <= itable_index_max; } 531 int itable_index() const { assert(valid_itable_index(), ""); 532 return itable_index_max - _vtable_index; } 533 void set_itable_index(int index); 534 535 // interpreter entry 536 address interpreter_entry() const { return _i2i_entry; } 537 // Only used when first initialize so we can set _i2i_entry and _from_interpreted_entry 538 void set_interpreter_entry(address entry) { 539 assert(!is_shared(), 540 "shared method's interpreter entry should not be changed at run time"); 541 if (_i2i_entry != entry) { 542 _i2i_entry = entry; 543 } 544 if (_from_interpreted_entry != entry) { 545 _from_interpreted_entry = entry; 546 } 547 } 548 549 // native function (used for native methods only) 550 enum { 551 native_bind_event_is_interesting = true 552 }; 553 address native_function() const { return *(native_function_addr()); } 554 555 // Must specify a real function (not NULL). 556 // Use clear_native_function() to unregister. 557 void set_native_function(address function, bool post_event_flag); 558 bool has_native_function() const; 559 void clear_native_function(); 560 561 // signature handler (used for native methods only) 562 address signature_handler() const { return *(signature_handler_addr()); } 563 void set_signature_handler(address handler); 564 565 // Interpreter oopmap support 566 void mask_for(int bci, InterpreterOopMap* mask); 567 568 // operations on invocation counter 569 void print_invocation_count(); 570 571 // byte codes 572 void set_code(address code) { return constMethod()->set_code(code); } 573 address code_base() const { return constMethod()->code_base(); } 574 bool contains(address bcp) const { return constMethod()->contains(bcp); } 575 576 // prints byte codes 577 void print_codes() const { print_codes_on(tty); } 578 void print_codes_on(outputStream* st) const; 579 void print_codes_on(int from, int to, outputStream* st) const; 580 581 // method parameters 582 bool has_method_parameters() const 583 { return constMethod()->has_method_parameters(); } 584 int method_parameters_length() const 585 { return constMethod()->method_parameters_length(); } 586 MethodParametersElement* method_parameters_start() const 587 { return constMethod()->method_parameters_start(); } 588 589 // checked exceptions 590 int checked_exceptions_length() const 591 { return constMethod()->checked_exceptions_length(); } 592 CheckedExceptionElement* checked_exceptions_start() const 593 { return constMethod()->checked_exceptions_start(); } 594 595 // localvariable table 596 bool has_localvariable_table() const 597 { return constMethod()->has_localvariable_table(); } 598 int localvariable_table_length() const 599 { return constMethod()->localvariable_table_length(); } 600 LocalVariableTableElement* localvariable_table_start() const 601 { return constMethod()->localvariable_table_start(); } 602 603 bool has_linenumber_table() const 604 { return constMethod()->has_linenumber_table(); } 605 u_char* compressed_linenumber_table() const 606 { return constMethod()->compressed_linenumber_table(); } 607 608 // method holder (the Klass* holding this method) 609 InstanceKlass* method_holder() const { return constants()->pool_holder(); } 610 611 Symbol* klass_name() const; // returns the name of the method holder 612 BasicType result_type() const { return constMethod()->result_type(); } 613 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); } 614 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); } 615 616 // Checked exceptions thrown by this method (resolved to mirrors) 617 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); } 618 619 // Access flags 620 bool is_public() const { return access_flags().is_public(); } 621 bool is_private() const { return access_flags().is_private(); } 622 bool is_protected() const { return access_flags().is_protected(); } 623 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); } 624 bool is_static() const { return access_flags().is_static(); } 625 bool is_final() const { return access_flags().is_final(); } 626 bool is_synchronized() const { return access_flags().is_synchronized();} 627 bool is_native() const { return access_flags().is_native(); } 628 bool is_abstract() const { return access_flags().is_abstract(); } 629 bool is_strict() const { return access_flags().is_strict(); } 630 bool is_synthetic() const { return access_flags().is_synthetic(); } 631 632 // returns true if contains only return operation 633 bool is_empty_method() const; 634 635 // returns true if this is a vanilla constructor 636 bool is_vanilla_constructor() const; 637 638 // checks method and its method holder 639 bool is_final_method() const; 640 bool is_final_method(AccessFlags class_access_flags) const; 641 // interface method declared with 'default' - excludes private interface methods 642 bool is_default_method() const; 643 644 // true if method needs no dynamic dispatch (final and/or no vtable entry) 645 bool can_be_statically_bound() const; 646 bool can_be_statically_bound(InstanceKlass* context) const; 647 bool can_be_statically_bound(AccessFlags class_access_flags) const; 648 649 // returns true if the method has any backward branches. 650 bool has_loops() { 651 return access_flags().loops_flag_init() ? access_flags().has_loops() : compute_has_loops_flag(); 652 }; 653 654 bool compute_has_loops_flag(); 655 656 bool has_jsrs() { 657 return access_flags().has_jsrs(); 658 }; 659 void set_has_jsrs() { 660 _access_flags.set_has_jsrs(); 661 } 662 663 // returns true if the method has any monitors. 664 bool has_monitors() const { return is_synchronized() || access_flags().has_monitor_bytecodes(); } 665 bool has_monitor_bytecodes() const { return access_flags().has_monitor_bytecodes(); } 666 667 void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); } 668 669 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes 670 // propererly nest in the method. It might return false, even though they actually nest properly, since the info. 671 // has not been computed yet. 672 bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); } 673 void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); } 674 675 // returns true if the method is an accessor function (setter/getter). 676 bool is_accessor() const; 677 678 // returns true if the method is a getter 679 bool is_getter() const; 680 681 // returns true if the method is a setter 682 bool is_setter() const; 683 684 // returns true if the method does nothing but return a constant of primitive type 685 bool is_constant_getter() const; 686 687 // returns true if the method is an initializer (<init> or <clinit>). 688 bool is_initializer() const; 689 690 // returns true if the method is static OR if the classfile version < 51 691 bool has_valid_initializer_flags() const; 692 693 // returns true if the method name is <clinit> and the method has 694 // valid static initializer flags. 695 bool is_static_initializer() const; 696 697 // returns true if the method name is <init> 698 bool is_object_initializer() const; 699 700 // compiled code support 701 // NOTE: code() is inherently racy as deopt can be clearing code 702 // simultaneously. Use with caution. 703 bool has_compiled_code() const; 704 705 #ifdef TIERED 706 bool has_aot_code() const { return aot_code() != NULL; } 707 #endif 708 709 bool needs_clinit_barrier() const; 710 711 // sizing 712 static int header_size() { 713 return align_up((int)sizeof(Method), wordSize) / wordSize; 714 } 715 static int size(bool is_native); 716 int size() const { return method_size(); } 717 void log_touched(TRAPS); 718 static void print_touched_methods(outputStream* out); 719 720 // interpreter support 721 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); } 722 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); } 723 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); } 724 static ByteSize code_offset() { return byte_offset_of(Method, _code); } 725 static ByteSize method_data_offset() { 726 return byte_offset_of(Method, _method_data); 727 } 728 static ByteSize method_counters_offset() { 729 return byte_offset_of(Method, _method_counters); 730 } 731 #ifndef PRODUCT 732 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); } 733 #endif // not PRODUCT 734 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); } 735 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); } 736 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); } 737 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); } 738 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); } 739 740 // for code generation 741 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); } 742 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); } 743 static int intrinsic_id_size_in_bytes() { return sizeof(u2); } 744 745 // Static methods that are used to implement member methods where an exposed this pointer 746 // is needed due to possible GCs 747 static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS); 748 749 // Returns the byte code index from the byte code pointer 750 int bci_from(address bcp) const; 751 address bcp_from(int bci) const; 752 address bcp_from(address bcp) const; 753 int validate_bci_from_bcp(address bcp) const; 754 int validate_bci(int bci) const; 755 756 // Returns the line number for a bci if debugging information for the method is prowided, 757 // -1 is returned otherwise. 758 int line_number_from_bci(int bci) const; 759 760 // Reflection support 761 bool is_overridden_in(Klass* k) const; 762 763 // Stack walking support 764 bool is_ignored_by_security_stack_walk() const; 765 766 // JSR 292 support 767 bool is_method_handle_intrinsic() const; // MethodHandles::is_signature_polymorphic_intrinsic(intrinsic_id) 768 bool is_compiled_lambda_form() const; // intrinsic_id() == vmIntrinsics::_compiledLambdaForm 769 bool has_member_arg() const; // intrinsic_id() == vmIntrinsics::_linkToSpecial, etc. 770 static methodHandle make_method_handle_intrinsic(vmIntrinsics::ID iid, // _invokeBasic, _linkToVirtual 771 Symbol* signature, //anything at all 772 TRAPS); 773 static Klass* check_non_bcp_klass(Klass* klass); 774 775 enum { 776 // How many extra stack entries for invokedynamic 777 extra_stack_entries_for_jsr292 = 1 778 }; 779 780 // this operates only on invoke methods: 781 // presize interpreter frames for extra interpreter stack entries, if needed 782 // Account for the extra appendix argument for invokehandle/invokedynamic 783 static int extra_stack_entries() { return extra_stack_entries_for_jsr292; } 784 static int extra_stack_words(); // = extra_stack_entries() * Interpreter::stackElementSize 785 786 // RedefineClasses() support: 787 bool is_old() const { return access_flags().is_old(); } 788 void set_is_old() { _access_flags.set_is_old(); } 789 bool is_obsolete() const { return access_flags().is_obsolete(); } 790 void set_is_obsolete() { _access_flags.set_is_obsolete(); } 791 bool is_deleted() const { return access_flags().is_deleted(); } 792 void set_is_deleted() { _access_flags.set_is_deleted(); } 793 794 bool is_running_emcp() const { 795 // EMCP methods are old but not obsolete or deleted. Equivalent 796 // Modulo Constant Pool means the method is equivalent except 797 // the constant pool and instructions that access the constant 798 // pool might be different. 799 // If a breakpoint is set in a redefined method, its EMCP methods that are 800 // still running must have a breakpoint also. 801 return (_flags & _running_emcp) != 0; 802 } 803 804 void set_running_emcp(bool x) { 805 _flags = x ? (_flags | _running_emcp) : (_flags & ~_running_emcp); 806 } 807 808 bool on_stack() const { return access_flags().on_stack(); } 809 void set_on_stack(const bool value); 810 811 // see the definition in Method*.cpp for the gory details 812 bool should_not_be_cached() const; 813 814 // JVMTI Native method prefixing support: 815 bool is_prefixed_native() const { return access_flags().is_prefixed_native(); } 816 void set_is_prefixed_native() { _access_flags.set_is_prefixed_native(); } 817 818 // Rewriting support 819 static methodHandle clone_with_new_data(const methodHandle& m, u_char* new_code, int new_code_length, 820 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS); 821 822 // jmethodID handling 823 // Because the useful life-span of a jmethodID cannot be determined, 824 // once created they are never reclaimed. The methods to which they refer, 825 // however, can be GC'ed away if the class is unloaded or if the method is 826 // made obsolete or deleted -- in these cases, the jmethodID 827 // refers to NULL (as is the case for any weak reference). 828 static jmethodID make_jmethod_id(ClassLoaderData* loader_data, Method* mh); 829 static void destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID mid); 830 831 // Ensure there is enough capacity in the internal tracking data 832 // structures to hold the number of jmethodIDs you plan to generate. 833 // This saves substantial time doing allocations. 834 static void ensure_jmethod_ids(ClassLoaderData* loader_data, int capacity); 835 836 // Use resolve_jmethod_id() in situations where the caller is expected 837 // to provide a valid jmethodID; the only sanity checks are in asserts; 838 // result guaranteed not to be NULL. 839 inline static Method* resolve_jmethod_id(jmethodID mid) { 840 assert(mid != NULL, "JNI method id should not be null"); 841 return *((Method**)mid); 842 } 843 844 // Use checked_resolve_jmethod_id() in situations where the caller 845 // should provide a valid jmethodID, but might not. NULL is returned 846 // when the jmethodID does not refer to a valid method. 847 static Method* checked_resolve_jmethod_id(jmethodID mid); 848 849 static void change_method_associated_with_jmethod_id(jmethodID old_jmid_ptr, Method* new_method); 850 static bool is_method_id(jmethodID mid); 851 852 // Clear methods 853 static void clear_jmethod_ids(ClassLoaderData* loader_data); 854 static void print_jmethod_ids(const ClassLoaderData* loader_data, outputStream* out) PRODUCT_RETURN; 855 856 // Get this method's jmethodID -- allocate if it doesn't exist 857 jmethodID jmethod_id(); 858 859 // Lookup the jmethodID for this method. Return NULL if not found. 860 // NOTE that this function can be called from a signal handler 861 // (see AsyncGetCallTrace support for Forte Analyzer) and this 862 // needs to be async-safe. No allocation should be done and 863 // so handles are not used to avoid deadlock. 864 jmethodID find_jmethod_id_or_null() { return method_holder()->jmethod_id_or_null(this); } 865 866 // Support for inlining of intrinsic methods 867 vmIntrinsics::ID intrinsic_id() const { return (vmIntrinsics::ID) _intrinsic_id; } 868 void set_intrinsic_id(vmIntrinsics::ID id) { _intrinsic_id = (u2) id; } 869 870 // Helper routines for intrinsic_id() and vmIntrinsics::method(). 871 void init_intrinsic_id(); // updates from _none if a match 872 static vmSymbols::SID klass_id_for_intrinsics(const Klass* holder); 873 874 bool caller_sensitive() { 875 return (_flags & _caller_sensitive) != 0; 876 } 877 void set_caller_sensitive(bool x) { 878 _flags = x ? (_flags | _caller_sensitive) : (_flags & ~_caller_sensitive); 879 } 880 881 bool force_inline() { 882 return (_flags & _force_inline) != 0; 883 } 884 void set_force_inline(bool x) { 885 _flags = x ? (_flags | _force_inline) : (_flags & ~_force_inline); 886 } 887 888 bool dont_inline() { 889 return (_flags & _dont_inline) != 0; 890 } 891 void set_dont_inline(bool x) { 892 _flags = x ? (_flags | _dont_inline) : (_flags & ~_dont_inline); 893 } 894 895 bool is_hidden() { 896 return (_flags & _hidden) != 0; 897 } 898 void set_hidden(bool x) { 899 _flags = x ? (_flags | _hidden) : (_flags & ~_hidden); 900 } 901 902 bool intrinsic_candidate() { 903 return (_flags & _intrinsic_candidate) != 0; 904 } 905 void set_intrinsic_candidate(bool x) { 906 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate); 907 } 908 909 bool has_injected_profile() { 910 return (_flags & _has_injected_profile) != 0; 911 } 912 void set_has_injected_profile(bool x) { 913 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile); 914 } 915 916 bool has_reserved_stack_access() { 917 return (_flags & _reserved_stack_access) != 0; 918 } 919 920 void set_has_reserved_stack_access(bool x) { 921 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access); 922 } 923 924 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;) 925 926 ConstMethod::MethodType method_type() const { 927 return _constMethod->method_type(); 928 } 929 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; } 930 931 // On-stack replacement support 932 bool has_osr_nmethod(int level, bool match_level) { 933 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL; 934 } 935 936 int mark_osr_nmethods() { 937 return method_holder()->mark_osr_nmethods(this); 938 } 939 940 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) { 941 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level); 942 } 943 944 // Find if klass for method is loaded 945 bool is_klass_loaded_by_klass_index(int klass_index) const; 946 bool is_klass_loaded(int refinfo_index, bool must_be_resolved = false) const; 947 948 // Indicates whether compilation failed earlier for this method, or 949 // whether it is not compilable for another reason like having a 950 // breakpoint set in it. 951 bool is_not_compilable(int comp_level = CompLevel_any) const; 952 void set_not_compilable(const char* reason, int comp_level = CompLevel_all, bool report = true); 953 void set_not_compilable_quietly(const char* reason, int comp_level = CompLevel_all) { 954 set_not_compilable(reason, comp_level, false); 955 } 956 bool is_not_osr_compilable(int comp_level = CompLevel_any) const; 957 void set_not_osr_compilable(const char* reason, int comp_level = CompLevel_all, bool report = true); 958 void set_not_osr_compilable_quietly(const char* reason, int comp_level = CompLevel_all) { 959 set_not_osr_compilable(reason, comp_level, false); 960 } 961 bool is_always_compilable() const; 962 963 private: 964 void print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason); 965 966 public: 967 MethodCounters* get_method_counters(TRAPS) { 968 if (_method_counters == NULL) { 969 build_method_counters(this, CHECK_AND_CLEAR_NULL); 970 } 971 return _method_counters; 972 } 973 974 bool is_not_c1_compilable() const { return access_flags().is_not_c1_compilable(); } 975 void set_not_c1_compilable() { _access_flags.set_not_c1_compilable(); } 976 void clear_not_c1_compilable() { _access_flags.clear_not_c1_compilable(); } 977 bool is_not_c2_compilable() const { return access_flags().is_not_c2_compilable(); } 978 void set_not_c2_compilable() { _access_flags.set_not_c2_compilable(); } 979 void clear_not_c2_compilable() { _access_flags.clear_not_c2_compilable(); } 980 981 bool is_not_c1_osr_compilable() const { return is_not_c1_compilable(); } // don't waste an accessFlags bit 982 void set_not_c1_osr_compilable() { set_not_c1_compilable(); } // don't waste an accessFlags bit 983 void clear_not_c1_osr_compilable() { clear_not_c1_compilable(); } // don't waste an accessFlags bit 984 bool is_not_c2_osr_compilable() const { return access_flags().is_not_c2_osr_compilable(); } 985 void set_not_c2_osr_compilable() { _access_flags.set_not_c2_osr_compilable(); } 986 void clear_not_c2_osr_compilable() { _access_flags.clear_not_c2_osr_compilable(); } 987 988 // Background compilation support 989 bool queued_for_compilation() const { return access_flags().queued_for_compilation(); } 990 void set_queued_for_compilation() { _access_flags.set_queued_for_compilation(); } 991 void clear_queued_for_compilation() { _access_flags.clear_queued_for_compilation(); } 992 993 // Resolve all classes in signature, return 'true' if successful 994 static bool load_signature_classes(const methodHandle& m, TRAPS); 995 996 // Return if true if not all classes references in signature, including return type, has been loaded 997 static bool has_unloaded_classes_in_signature(const methodHandle& m, TRAPS); 998 999 // Printing 1000 void print_short_name(outputStream* st = tty); // prints as klassname::methodname; Exposed so field engineers can debug VM 1001 #if INCLUDE_JVMTI 1002 void print_name(outputStream* st = tty); // prints as "virtual void foo(int)"; exposed for TraceRedefineClasses 1003 #else 1004 void print_name(outputStream* st = tty) PRODUCT_RETURN; // prints as "virtual void foo(int)" 1005 #endif 1006 1007 typedef int (*method_comparator_func)(Method* a, Method* b); 1008 1009 // Helper routine used for method sorting 1010 static void sort_methods(Array<Method*>* methods, bool set_idnums = true, method_comparator_func func = NULL); 1011 1012 // Deallocation function for redefine classes or if an error occurs 1013 void deallocate_contents(ClassLoaderData* loader_data); 1014 1015 void release_C_heap_structures(); 1016 1017 Method* get_new_method() const { 1018 InstanceKlass* holder = method_holder(); 1019 Method* new_method = holder->method_with_idnum(orig_method_idnum()); 1020 1021 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 1022 assert(this != new_method, "sanity check"); 1023 return new_method; 1024 } 1025 1026 // Printing 1027 #ifndef PRODUCT 1028 void print_on(outputStream* st) const; 1029 #endif 1030 void print_value_on(outputStream* st) const; 1031 void print_linkage_flags(outputStream* st) PRODUCT_RETURN; 1032 1033 const char* internal_name() const { return "{method}"; } 1034 1035 // Check for valid method pointer 1036 static bool has_method_vptr(const void* ptr); 1037 static bool is_valid_method(const Method* m); 1038 1039 // Verify 1040 void verify() { verify_on(tty); } 1041 void verify_on(outputStream* st); 1042 1043 private: 1044 1045 // Inlined elements 1046 address* native_function_addr() const { assert(is_native(), "must be native"); return (address*) (this+1); } 1047 address* signature_handler_addr() const { return native_function_addr() + 1; } 1048 }; 1049 1050 1051 // Utility class for compressing line number tables 1052 1053 class CompressedLineNumberWriteStream: public CompressedWriteStream { 1054 private: 1055 int _bci; 1056 int _line; 1057 public: 1058 // Constructor 1059 CompressedLineNumberWriteStream(int initial_size) : CompressedWriteStream(initial_size), _bci(0), _line(0) {} 1060 CompressedLineNumberWriteStream(u_char* buffer, int initial_size) : CompressedWriteStream(buffer, initial_size), _bci(0), _line(0) {} 1061 1062 // Write (bci, line number) pair to stream 1063 void write_pair_regular(int bci_delta, int line_delta); 1064 1065 // If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned) 1066 // we save it as one byte, otherwise we write a 0xFF escape character 1067 // and use regular compression. 0x0 is used as end-of-stream terminator. 1068 void write_pair_inline(int bci, int line); 1069 1070 void write_pair(int bci, int line); 1071 1072 // Write end-of-stream marker 1073 void write_terminator() { write_byte(0); } 1074 }; 1075 1076 1077 // Utility class for decompressing line number tables 1078 1079 class CompressedLineNumberReadStream: public CompressedReadStream { 1080 private: 1081 int _bci; 1082 int _line; 1083 public: 1084 // Constructor 1085 CompressedLineNumberReadStream(u_char* buffer); 1086 // Read (bci, line number) pair from stream. Returns false at end-of-stream. 1087 bool read_pair(); 1088 // Accessing bci and line number (after calling read_pair) 1089 int bci() const { return _bci; } 1090 int line() const { return _line; } 1091 }; 1092 1093 1094 #if INCLUDE_JVMTI 1095 1096 /// Fast Breakpoints. 1097 1098 // If this structure gets more complicated (because bpts get numerous), 1099 // move it into its own header. 1100 1101 // There is presently no provision for concurrent access 1102 // to breakpoint lists, which is only OK for JVMTI because 1103 // breakpoints are written only at safepoints, and are read 1104 // concurrently only outside of safepoints. 1105 1106 class BreakpointInfo : public CHeapObj<mtClass> { 1107 friend class VMStructs; 1108 private: 1109 Bytecodes::Code _orig_bytecode; 1110 int _bci; 1111 u2 _name_index; // of method 1112 u2 _signature_index; // of method 1113 BreakpointInfo* _next; // simple storage allocation 1114 1115 public: 1116 BreakpointInfo(Method* m, int bci); 1117 1118 // accessors 1119 Bytecodes::Code orig_bytecode() { return _orig_bytecode; } 1120 void set_orig_bytecode(Bytecodes::Code code) { _orig_bytecode = code; } 1121 int bci() { return _bci; } 1122 1123 BreakpointInfo* next() const { return _next; } 1124 void set_next(BreakpointInfo* n) { _next = n; } 1125 1126 // helps for searchers 1127 bool match(const Method* m, int bci) { 1128 return bci == _bci && match(m); 1129 } 1130 1131 bool match(const Method* m) { 1132 return _name_index == m->name_index() && 1133 _signature_index == m->signature_index(); 1134 } 1135 1136 void set(Method* method); 1137 void clear(Method* method); 1138 }; 1139 1140 #endif // INCLUDE_JVMTI 1141 1142 // Utility class for access exception handlers 1143 class ExceptionTable : public StackObj { 1144 private: 1145 ExceptionTableElement* _table; 1146 u2 _length; 1147 1148 public: 1149 ExceptionTable(const Method* m) { 1150 if (m->has_exception_handler()) { 1151 _table = m->exception_table_start(); 1152 _length = m->exception_table_length(); 1153 } else { 1154 _table = NULL; 1155 _length = 0; 1156 } 1157 } 1158 1159 int length() const { 1160 return _length; 1161 } 1162 1163 u2 start_pc(int idx) const { 1164 assert(idx < _length, "out of bounds"); 1165 return _table[idx].start_pc; 1166 } 1167 1168 void set_start_pc(int idx, u2 value) { 1169 assert(idx < _length, "out of bounds"); 1170 _table[idx].start_pc = value; 1171 } 1172 1173 u2 end_pc(int idx) const { 1174 assert(idx < _length, "out of bounds"); 1175 return _table[idx].end_pc; 1176 } 1177 1178 void set_end_pc(int idx, u2 value) { 1179 assert(idx < _length, "out of bounds"); 1180 _table[idx].end_pc = value; 1181 } 1182 1183 u2 handler_pc(int idx) const { 1184 assert(idx < _length, "out of bounds"); 1185 return _table[idx].handler_pc; 1186 } 1187 1188 void set_handler_pc(int idx, u2 value) { 1189 assert(idx < _length, "out of bounds"); 1190 _table[idx].handler_pc = value; 1191 } 1192 1193 u2 catch_type_index(int idx) const { 1194 assert(idx < _length, "out of bounds"); 1195 return _table[idx].catch_type_index; 1196 } 1197 1198 void set_catch_type_index(int idx, u2 value) { 1199 assert(idx < _length, "out of bounds"); 1200 _table[idx].catch_type_index = value; 1201 } 1202 }; 1203 1204 #endif // SHARE_OOPS_METHOD_HPP