src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8047290SUPERfinal Sdiff src/share/vm/gc_implementation/g1

src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp

Print this page




4914 
4915       for (int i = 0; i < num_claimed_nmethods; i++) {
4916         clean_nmethod(claimed_nmethods[i]);
4917       }
4918     }
4919 
4920     // The nmethod cleaning helps out and does the CodeCache part of MetadataOnStackMark.
4921     // Need to retire the buffers now that this thread has stopped cleaning nmethods.
4922     MetadataOnStackMark::retire_buffer_for_thread(Thread::current());
4923   }
4924 
4925   void work_second_pass(uint worker_id) {
4926     nmethod* nm;
4927     // Take care of postponed nmethods.
4928     while ((nm = claim_postponed_nmethod()) != NULL) {
4929       clean_nmethod_postponed(nm);
4930     }
4931   }
4932 };
4933 
4934 Monitor* G1CodeCacheUnloadingTask::_lock = new Monitor(Mutex::leaf, "Code Cache Unload lock");
4935 
4936 class G1KlassCleaningTask : public StackObj {
4937   BoolObjectClosure*                      _is_alive;
4938   volatile jint                           _clean_klass_tree_claimed;
4939   ClassLoaderDataGraphKlassIteratorAtomic _klass_iterator;
4940 
4941  public:
4942   G1KlassCleaningTask(BoolObjectClosure* is_alive) :
4943       _is_alive(is_alive),
4944       _clean_klass_tree_claimed(0),
4945       _klass_iterator() {
4946   }
4947 
4948  private:
4949   bool claim_clean_klass_tree_task() {
4950     if (_clean_klass_tree_claimed) {
4951       return false;
4952     }
4953 
4954     return Atomic::cmpxchg(1, (jint*)&_clean_klass_tree_claimed, 0) == 0;




4914 
4915       for (int i = 0; i < num_claimed_nmethods; i++) {
4916         clean_nmethod(claimed_nmethods[i]);
4917       }
4918     }
4919 
4920     // The nmethod cleaning helps out and does the CodeCache part of MetadataOnStackMark.
4921     // Need to retire the buffers now that this thread has stopped cleaning nmethods.
4922     MetadataOnStackMark::retire_buffer_for_thread(Thread::current());
4923   }
4924 
4925   void work_second_pass(uint worker_id) {
4926     nmethod* nm;
4927     // Take care of postponed nmethods.
4928     while ((nm = claim_postponed_nmethod()) != NULL) {
4929       clean_nmethod_postponed(nm);
4930     }
4931   }
4932 };
4933 
4934 Monitor* G1CodeCacheUnloadingTask::_lock = new Monitor(Mutex::leaf, "Code Cache Unload lock", false, Monitor::_safepoint_check_never);
4935 
4936 class G1KlassCleaningTask : public StackObj {
4937   BoolObjectClosure*                      _is_alive;
4938   volatile jint                           _clean_klass_tree_claimed;
4939   ClassLoaderDataGraphKlassIteratorAtomic _klass_iterator;
4940 
4941  public:
4942   G1KlassCleaningTask(BoolObjectClosure* is_alive) :
4943       _is_alive(is_alive),
4944       _clean_klass_tree_claimed(0),
4945       _klass_iterator() {
4946   }
4947 
4948  private:
4949   bool claim_clean_klass_tree_task() {
4950     if (_clean_klass_tree_claimed) {
4951       return false;
4952     }
4953 
4954     return Atomic::cmpxchg(1, (jint*)&_clean_klass_tree_claimed, 0) == 0;


src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File