119 int byte_code_size,
120 AccessFlags access_flags,
121 InlineTableSizes* sizes,
122 ConstMethod::MethodType method_type,
123 TRAPS);
124
125 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
126 Method(){}
127
128 bool is_method() const volatile { return true; }
129
130 void restore_unshareable_info(TRAPS);
131
132 // accessors for instance variables
133
134 ConstMethod* constMethod() const { return _constMethod; }
135 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
136
137
138 static address make_adapters(const methodHandle& mh, TRAPS);
139 volatile address from_compiled_entry() const { return (address)OrderAccess::load_ptr_acquire(&_from_compiled_entry); }
140 volatile address from_compiled_entry_no_trampoline() const;
141 volatile address from_interpreted_entry() const{ return (address)OrderAccess::load_ptr_acquire(&_from_interpreted_entry); }
142
143 // access flag
144 AccessFlags access_flags() const { return _access_flags; }
145 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
146
147 // name
148 Symbol* name() const { return constants()->symbol_at(name_index()); }
149 int name_index() const { return constMethod()->name_index(); }
150 void set_name_index(int index) { constMethod()->set_name_index(index); }
151
152 // signature
153 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
154 int signature_index() const { return constMethod()->signature_index(); }
155 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
156
157 // generics support
158 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
159 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
160 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
161
320 // this case it matches any constraint class. Returns -1 if the
321 // exception cannot be handled in this method. The handler
322 // constraint classes are loaded if necessary. Note that this may
323 // throw an exception if loading of the constraint classes causes
324 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
325 // If an exception is thrown, returns the bci of the
326 // exception handler which caused the exception to be thrown, which
327 // is needed for proper retries. See, for example,
328 // InterpreterRuntime::exception_handler_for_exception.
329 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
330
331 // method data access
332 MethodData* method_data() const {
333 return _method_data;
334 }
335
336 void set_method_data(MethodData* data) {
337 // The store into method must be released. On platforms without
338 // total store order (TSO) the reference may become visible before
339 // the initialization of data otherwise.
340 OrderAccess::release_store_ptr((volatile void *)&_method_data, data);
341 }
342
343 MethodCounters* method_counters() const {
344 return _method_counters;
345 }
346
347 void clear_method_counters() {
348 _method_counters = NULL;
349 }
350
351 bool init_method_counters(MethodCounters* counters) {
352 // Try to install a pointer to MethodCounters, return true on success.
353 return Atomic::cmpxchg_ptr(counters, (volatile void*)&_method_counters, NULL) == NULL;
354 }
355
356 #ifdef TIERED
357 // We are reusing interpreter_invocation_count as a holder for the previous event count!
358 // We can do that since interpreter_invocation_count is not used in tiered.
359 int prev_event_count() const {
360 if (method_counters() == NULL) {
361 return 0;
362 } else {
363 return method_counters()->interpreter_invocation_count();
364 }
365 }
366 void set_prev_event_count(int count) {
367 MethodCounters* mcs = method_counters();
368 if (mcs != NULL) {
369 mcs->set_interpreter_invocation_count(count);
370 }
371 }
372 jlong prev_time() const {
373 MethodCounters* mcs = method_counters();
374 return mcs == NULL ? 0 : mcs->prev_time();
435 MethodCounters* mcs = get_method_counters(CHECK_0);
436 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
437 }
438 #endif
439
440 #ifndef PRODUCT
441 int compiled_invocation_count() const { return _compiled_invocation_count; }
442 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
443 #else
444 // for PrintMethodData in a product build
445 int compiled_invocation_count() const { return 0; }
446 #endif // not PRODUCT
447
448 // Clear (non-shared space) pointers which could not be relevant
449 // if this (shared) method were mapped into another JVM.
450 void remove_unshareable_info();
451
452 // nmethod/verified compiler entry
453 address verified_code_entry();
454 bool check_code() const; // Not inline to avoid circular ref
455 CompiledMethod* volatile code() const { assert( check_code(), "" ); return (CompiledMethod *)OrderAccess::load_ptr_acquire(&_code); }
456 void clear_code(bool acquire_lock = true); // Clear out any compiled code
457 static void set_code(const methodHandle& mh, CompiledMethod* code);
458 void set_adapter_entry(AdapterHandlerEntry* adapter) {
459 constMethod()->set_adapter_entry(adapter);
460 }
461 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
462 constMethod()->update_adapter_trampoline(adapter);
463 }
464
465 address get_i2c_entry();
466 address get_c2i_entry();
467 address get_c2i_unverified_entry();
468 AdapterHandlerEntry* adapter() const {
469 return constMethod()->adapter();
470 }
471 // setup entry points
472 void link_method(const methodHandle& method, TRAPS);
473 // clear entry points. Used by sharing code during dump time
474 void unlink_method() NOT_CDS_RETURN;
475
|
119 int byte_code_size,
120 AccessFlags access_flags,
121 InlineTableSizes* sizes,
122 ConstMethod::MethodType method_type,
123 TRAPS);
124
125 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
126 Method(){}
127
128 bool is_method() const volatile { return true; }
129
130 void restore_unshareable_info(TRAPS);
131
132 // accessors for instance variables
133
134 ConstMethod* constMethod() const { return _constMethod; }
135 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
136
137
138 static address make_adapters(const methodHandle& mh, TRAPS);
139 volatile address from_compiled_entry() const { return OrderAccess::load_acquire(&_from_compiled_entry); }
140 volatile address from_compiled_entry_no_trampoline() const;
141 volatile address from_interpreted_entry() const{ return OrderAccess::load_acquire(&_from_interpreted_entry); }
142
143 // access flag
144 AccessFlags access_flags() const { return _access_flags; }
145 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
146
147 // name
148 Symbol* name() const { return constants()->symbol_at(name_index()); }
149 int name_index() const { return constMethod()->name_index(); }
150 void set_name_index(int index) { constMethod()->set_name_index(index); }
151
152 // signature
153 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
154 int signature_index() const { return constMethod()->signature_index(); }
155 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
156
157 // generics support
158 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
159 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
160 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
161
320 // this case it matches any constraint class. Returns -1 if the
321 // exception cannot be handled in this method. The handler
322 // constraint classes are loaded if necessary. Note that this may
323 // throw an exception if loading of the constraint classes causes
324 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
325 // If an exception is thrown, returns the bci of the
326 // exception handler which caused the exception to be thrown, which
327 // is needed for proper retries. See, for example,
328 // InterpreterRuntime::exception_handler_for_exception.
329 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
330
331 // method data access
332 MethodData* method_data() const {
333 return _method_data;
334 }
335
336 void set_method_data(MethodData* data) {
337 // The store into method must be released. On platforms without
338 // total store order (TSO) the reference may become visible before
339 // the initialization of data otherwise.
340 OrderAccess::release_store(&_method_data, data);
341 }
342
343 MethodCounters* method_counters() const {
344 return _method_counters;
345 }
346
347 void clear_method_counters() {
348 _method_counters = NULL;
349 }
350
351 bool init_method_counters(MethodCounters* counters);
352
353 #ifdef TIERED
354 // We are reusing interpreter_invocation_count as a holder for the previous event count!
355 // We can do that since interpreter_invocation_count is not used in tiered.
356 int prev_event_count() const {
357 if (method_counters() == NULL) {
358 return 0;
359 } else {
360 return method_counters()->interpreter_invocation_count();
361 }
362 }
363 void set_prev_event_count(int count) {
364 MethodCounters* mcs = method_counters();
365 if (mcs != NULL) {
366 mcs->set_interpreter_invocation_count(count);
367 }
368 }
369 jlong prev_time() const {
370 MethodCounters* mcs = method_counters();
371 return mcs == NULL ? 0 : mcs->prev_time();
432 MethodCounters* mcs = get_method_counters(CHECK_0);
433 return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
434 }
435 #endif
436
437 #ifndef PRODUCT
438 int compiled_invocation_count() const { return _compiled_invocation_count; }
439 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
440 #else
441 // for PrintMethodData in a product build
442 int compiled_invocation_count() const { return 0; }
443 #endif // not PRODUCT
444
445 // Clear (non-shared space) pointers which could not be relevant
446 // if this (shared) method were mapped into another JVM.
447 void remove_unshareable_info();
448
449 // nmethod/verified compiler entry
450 address verified_code_entry();
451 bool check_code() const; // Not inline to avoid circular ref
452 CompiledMethod* volatile code() const { assert( check_code(), "" ); return OrderAccess::load_acquire(&_code); }
453 void clear_code(bool acquire_lock = true); // Clear out any compiled code
454 static void set_code(const methodHandle& mh, CompiledMethod* code);
455 void set_adapter_entry(AdapterHandlerEntry* adapter) {
456 constMethod()->set_adapter_entry(adapter);
457 }
458 void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
459 constMethod()->update_adapter_trampoline(adapter);
460 }
461
462 address get_i2c_entry();
463 address get_c2i_entry();
464 address get_c2i_unverified_entry();
465 AdapterHandlerEntry* adapter() const {
466 return constMethod()->adapter();
467 }
468 // setup entry points
469 void link_method(const methodHandle& method, TRAPS);
470 // clear entry points. Used by sharing code during dump time
471 void unlink_method() NOT_CDS_RETURN;
472
|