119
120 jbyte _scavenge_root_state;
121
122 #if INCLUDE_RTM_OPT
123 // RTM state at compile time. Used during deoptimization to decide
124 // whether to restart collecting RTM locking abort statistic again.
125 RTMState _rtm_state;
126 #endif
127
128 // Nmethod Flushing lock. If non-zero, then the nmethod is not removed
129 // and is not made into a zombie. However, once the nmethod is made into
130 // a zombie, it will be locked one final time if CompiledMethodUnload
131 // event processing needs to be done.
132 volatile jint _lock_count;
133
134 // not_entrant method removal. Each mark_sweep pass will update
135 // this mark to current sweep invocation count if it is seen on the
136 // stack. An not_entrant method can be removed when there are no
137 // more activations, i.e., when the _stack_traversal_mark is less than
138 // current sweep traversal index.
139 volatile long _stack_traversal_mark;
140
141 // The _hotness_counter indicates the hotness of a method. The higher
142 // the value the hotter the method. The hotness counter of a nmethod is
143 // set to [(ReservedCodeCacheSize / (1024 * 1024)) * 2] each time the method
144 // is active while stack scanning (mark_active_nmethods()). The hotness
145 // counter is decreased (by 1) while sweeping.
146 int _hotness_counter;
147
148 // These are used for compiled synchronized native methods to
149 // locate the owner and stack slot for the BasicLock so that we can
150 // properly revoke the bias of the owner if necessary. They are
151 // needed because there is no debug information for compiled native
152 // wrappers and the oop maps are insufficient to allow
153 // frame::retrieve_receiver() to work. Currently they are expected
154 // to be byte offsets from the Java stack pointer for maximum code
155 // sharing between platforms. Note that currently biased locking
156 // will never cause Class instances to be biased but this code
157 // handles the static synchronized case as well.
158 // JVMTI's GetLocalInstance() also uses these offsets to find the receiver
159 // for non-static native wrapper frames.
379
380 // Scavengable oop support
381 bool on_scavenge_root_list() const { return (_scavenge_root_state & 1) != 0; }
382 protected:
383 enum { sl_on_list = 0x01, sl_marked = 0x10 };
384 void set_on_scavenge_root_list() { _scavenge_root_state = sl_on_list; }
385 void clear_on_scavenge_root_list() { _scavenge_root_state = 0; }
386 // assertion-checking and pruning logic uses the bits of _scavenge_root_state
387 #ifndef PRODUCT
388 void set_scavenge_root_marked() { _scavenge_root_state |= sl_marked; }
389 void clear_scavenge_root_marked() { _scavenge_root_state &= ~sl_marked; }
390 bool scavenge_root_not_marked() { return (_scavenge_root_state &~ sl_on_list) == 0; }
391 // N.B. there is no positive marked query, and we only use the not_marked query for asserts.
392 #endif //PRODUCT
393 nmethod* scavenge_root_link() const { return _scavenge_root_link; }
394 void set_scavenge_root_link(nmethod *n) { _scavenge_root_link = n; }
395
396 public:
397
398 // Sweeper support
399 long stack_traversal_mark() { return OrderAccess::load_acquire(&_stack_traversal_mark); }
400 void set_stack_traversal_mark(long l) { OrderAccess::release_store(&_stack_traversal_mark, l); }
401
402 // implicit exceptions support
403 address continuation_for_implicit_exception(address pc);
404
405 // On-stack replacement support
406 int osr_entry_bci() const { assert(is_osr_method(), "wrong kind of nmethod"); return _entry_bci; }
407 address osr_entry() const { assert(is_osr_method(), "wrong kind of nmethod"); return _osr_entry_point; }
408 void invalidate_osr_method();
409 nmethod* osr_link() const { return _osr_link; }
410 void set_osr_link(nmethod *n) { _osr_link = n; }
411
412 // Verify calls to dead methods have been cleaned.
413 void verify_clean_inline_caches();
414
415 // unlink and deallocate this nmethod
416 // Only NMethodSweeper class is expected to use this. NMethodSweeper is not
417 // expected to use any other private methods/data in this class.
418
419 protected:
420 void flush();
|
119
120 jbyte _scavenge_root_state;
121
122 #if INCLUDE_RTM_OPT
123 // RTM state at compile time. Used during deoptimization to decide
124 // whether to restart collecting RTM locking abort statistic again.
125 RTMState _rtm_state;
126 #endif
127
128 // Nmethod Flushing lock. If non-zero, then the nmethod is not removed
129 // and is not made into a zombie. However, once the nmethod is made into
130 // a zombie, it will be locked one final time if CompiledMethodUnload
131 // event processing needs to be done.
132 volatile jint _lock_count;
133
134 // not_entrant method removal. Each mark_sweep pass will update
135 // this mark to current sweep invocation count if it is seen on the
136 // stack. An not_entrant method can be removed when there are no
137 // more activations, i.e., when the _stack_traversal_mark is less than
138 // current sweep traversal index.
139 volatile jlong _stack_traversal_mark;
140
141 // The _hotness_counter indicates the hotness of a method. The higher
142 // the value the hotter the method. The hotness counter of a nmethod is
143 // set to [(ReservedCodeCacheSize / (1024 * 1024)) * 2] each time the method
144 // is active while stack scanning (mark_active_nmethods()). The hotness
145 // counter is decreased (by 1) while sweeping.
146 int _hotness_counter;
147
148 // These are used for compiled synchronized native methods to
149 // locate the owner and stack slot for the BasicLock so that we can
150 // properly revoke the bias of the owner if necessary. They are
151 // needed because there is no debug information for compiled native
152 // wrappers and the oop maps are insufficient to allow
153 // frame::retrieve_receiver() to work. Currently they are expected
154 // to be byte offsets from the Java stack pointer for maximum code
155 // sharing between platforms. Note that currently biased locking
156 // will never cause Class instances to be biased but this code
157 // handles the static synchronized case as well.
158 // JVMTI's GetLocalInstance() also uses these offsets to find the receiver
159 // for non-static native wrapper frames.
379
380 // Scavengable oop support
381 bool on_scavenge_root_list() const { return (_scavenge_root_state & 1) != 0; }
382 protected:
383 enum { sl_on_list = 0x01, sl_marked = 0x10 };
384 void set_on_scavenge_root_list() { _scavenge_root_state = sl_on_list; }
385 void clear_on_scavenge_root_list() { _scavenge_root_state = 0; }
386 // assertion-checking and pruning logic uses the bits of _scavenge_root_state
387 #ifndef PRODUCT
388 void set_scavenge_root_marked() { _scavenge_root_state |= sl_marked; }
389 void clear_scavenge_root_marked() { _scavenge_root_state &= ~sl_marked; }
390 bool scavenge_root_not_marked() { return (_scavenge_root_state &~ sl_on_list) == 0; }
391 // N.B. there is no positive marked query, and we only use the not_marked query for asserts.
392 #endif //PRODUCT
393 nmethod* scavenge_root_link() const { return _scavenge_root_link; }
394 void set_scavenge_root_link(nmethod *n) { _scavenge_root_link = n; }
395
396 public:
397
398 // Sweeper support
399 jlong stack_traversal_mark() { return OrderAccess::load_acquire(&_stack_traversal_mark); }
400 void set_stack_traversal_mark(jlong l) { OrderAccess::release_store(&_stack_traversal_mark, l); }
401
402 // implicit exceptions support
403 address continuation_for_implicit_exception(address pc);
404
405 // On-stack replacement support
406 int osr_entry_bci() const { assert(is_osr_method(), "wrong kind of nmethod"); return _entry_bci; }
407 address osr_entry() const { assert(is_osr_method(), "wrong kind of nmethod"); return _osr_entry_point; }
408 void invalidate_osr_method();
409 nmethod* osr_link() const { return _osr_link; }
410 void set_osr_link(nmethod *n) { _osr_link = n; }
411
412 // Verify calls to dead methods have been cleaned.
413 void verify_clean_inline_caches();
414
415 // unlink and deallocate this nmethod
416 // Only NMethodSweeper class is expected to use this. NMethodSweeper is not
417 // expected to use any other private methods/data in this class.
418
419 protected:
420 void flush();
|