< prev index next >

src/share/vm/gc/shared/genCollectedHeap.cpp

Print this page




 544     MemoryService::track_memory_usage();
 545 
 546     gc_epilogue(complete);
 547 
 548     if (must_restore_marks_for_biased_locking) {
 549       BiasedLocking::restore_marks();
 550     }
 551   }
 552 
 553   print_heap_after_gc();
 554 
 555 #ifdef TRACESPINNING
 556   ParallelTaskTerminator::print_termination_counts();
 557 #endif
 558 }
 559 
 560 HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
 561   return collector_policy()->satisfy_failed_allocation(size, is_tlab);
 562 }
 563 
 564 void GenCollectedHeap::set_par_threads(uint t) {
 565   assert(t == 0 || !UseSerialGC, "Cannot have parallel threads");
 566   CollectedHeap::set_par_threads(t);
 567   set_n_termination(t);
 568 }
 569 
 570 void GenCollectedHeap::set_n_termination(uint t) {
 571   _process_strong_tasks->set_n_threads(t);
 572 }
 573 
 574 #ifdef ASSERT
 575 class AssertNonScavengableClosure: public OopClosure {
 576 public:
 577   virtual void do_oop(oop* p) {
 578     assert(!GenCollectedHeap::heap()->is_in_partial_collection(*p),
 579       "Referent should not be scavengable.");  }
 580   virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
 581 };
 582 static AssertNonScavengableClosure assert_is_non_scavengable_closure;
 583 #endif
 584 
 585 void GenCollectedHeap::process_roots(bool activate_scope,
 586                                      ScanningOption so,
 587                                      OopClosure* strong_roots,
 588                                      OopClosure* weak_roots,
 589                                      CLDClosure* strong_cld_closure,
 590                                      CLDClosure* weak_cld_closure,
 591                                      CodeBlobClosure* code_roots) {
 592   StrongRootsScope srs(activate_scope);
 593 
 594   // General roots.
 595   assert(Threads::thread_claim_parity() != 0, "must have called prologue code");
 596   assert(code_roots != NULL, "code root closure should always be set");
 597   // _n_termination for _process_strong_tasks should be set up stream
 598   // in a method not running in a GC worker.  Otherwise the GC worker
 599   // could be trying to change the termination condition while the task
 600   // is executing in another GC worker.
 601 
 602   if (!_process_strong_tasks->is_task_claimed(GCH_PS_ClassLoaderDataGraph_oops_do)) {
 603     ClassLoaderDataGraph::roots_cld_do(strong_cld_closure, weak_cld_closure);
 604   }
 605 
 606   // Some CLDs contained in the thread frames should be considered strong.
 607   // Don't process them if they will be processed during the ClassLoaderDataGraph phase.
 608   CLDClosure* roots_from_clds_p = (strong_cld_closure != weak_cld_closure) ? strong_cld_closure : NULL;
 609   // Only process code roots from thread stacks if we aren't visiting the entire CodeCache anyway
 610   CodeBlobClosure* roots_from_code_p = (so & SO_AllCodeCache) ? NULL : code_roots;
 611 
 612   bool is_par = n_par_threads() > 0;
 613   Threads::possibly_parallel_oops_do(is_par, strong_roots, roots_from_clds_p, roots_from_code_p);
 614 
 615   if (!_process_strong_tasks->is_task_claimed(GCH_PS_Universe_oops_do)) {
 616     Universe::oops_do(strong_roots);
 617   }
 618   // Global (strong) JNI handles
 619   if (!_process_strong_tasks->is_task_claimed(GCH_PS_JNIHandles_oops_do)) {
 620     JNIHandles::oops_do(strong_roots);
 621   }
 622 
 623   if (!_process_strong_tasks->is_task_claimed(GCH_PS_ObjectSynchronizer_oops_do)) {
 624     ObjectSynchronizer::oops_do(strong_roots);
 625   }
 626   if (!_process_strong_tasks->is_task_claimed(GCH_PS_FlatProfiler_oops_do)) {
 627     FlatProfiler::oops_do(strong_roots);
 628   }
 629   if (!_process_strong_tasks->is_task_claimed(GCH_PS_Management_oops_do)) {
 630     Management::oops_do(strong_roots);
 631   }
 632   if (!_process_strong_tasks->is_task_claimed(GCH_PS_jvmti_oops_do)) {


 652       assert(code_roots != NULL, "must supply closure for code cache");
 653 
 654       // We only visit parts of the CodeCache when scavenging.
 655       CodeCache::scavenge_root_nmethods_do(code_roots);
 656     }
 657     if (so & SO_AllCodeCache) {
 658       assert(code_roots != NULL, "must supply closure for code cache");
 659 
 660       // CMSCollector uses this to do intermediate-strength collections.
 661       // We scan the entire code cache, since CodeCache::do_unloading is not called.
 662       CodeCache::blobs_do(code_roots);
 663     }
 664     // Verify that the code cache contents are not subject to
 665     // movement by a scavenging collection.
 666     DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 667     DEBUG_ONLY(CodeCache::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 668   }
 669 
 670 }
 671 
 672 void GenCollectedHeap::gen_process_roots(int level,

 673                                          bool younger_gens_as_roots,
 674                                          bool activate_scope,
 675                                          ScanningOption so,
 676                                          bool only_strong_roots,
 677                                          OopsInGenClosure* not_older_gens,
 678                                          OopsInGenClosure* older_gens,
 679                                          CLDClosure* cld_closure) {
 680   const bool is_adjust_phase = !only_strong_roots && !younger_gens_as_roots;
 681 
 682   bool is_moving_collection = false;
 683   if (level == 0 || is_adjust_phase) {
 684     // young collections are always moving
 685     is_moving_collection = true;
 686   }
 687 
 688   MarkingCodeBlobClosure mark_code_closure(not_older_gens, is_moving_collection);
 689   OopsInGenClosure* weak_roots = only_strong_roots ? NULL : not_older_gens;
 690   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 691 
 692   process_roots(activate_scope, so,
 693                 not_older_gens, weak_roots,
 694                 cld_closure, weak_cld_closure,
 695                 &mark_code_closure);
 696 
 697   if (younger_gens_as_roots) {
 698     if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
 699       if (level == 1) {
 700         not_older_gens->set_generation(_young_gen);
 701         _young_gen->oop_iterate(not_older_gens);
 702       }
 703       not_older_gens->reset_generation();
 704     }
 705   }
 706   // When collection is parallel, all threads get to cooperate to do
 707   // older-gen scanning.
 708   if (level == 0) {
 709     older_gens->set_generation(_old_gen);
 710     rem_set()->younger_refs_iterate(_old_gen, older_gens);
 711     older_gens->reset_generation();
 712   }
 713 
 714   _process_strong_tasks->all_tasks_completed();
 715 }
 716 
 717 
 718 class AlwaysTrueClosure: public BoolObjectClosure {
 719 public:
 720   bool do_object_b(oop p) { return true; }
 721 };
 722 static AlwaysTrueClosure always_true;
 723 
 724 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 725   JNIHandles::weak_oops_do(&always_true, root_closure);
 726   _young_gen->ref_processor()->weak_oops_do(root_closure);
 727   _old_gen->ref_processor()->weak_oops_do(root_closure);
 728 }
 729 
 730 #define GCH_SINCE_SAVE_MARKS_ITERATE_DEFN(OopClosureType, nv_suffix)    \
 731 void GenCollectedHeap::                                                 \
 732 oop_since_save_marks_iterate(int level,                                 \
 733                              OopClosureType* cur,                       \
 734                              OopClosureType* older) {                   \




 544     MemoryService::track_memory_usage();
 545 
 546     gc_epilogue(complete);
 547 
 548     if (must_restore_marks_for_biased_locking) {
 549       BiasedLocking::restore_marks();
 550     }
 551   }
 552 
 553   print_heap_after_gc();
 554 
 555 #ifdef TRACESPINNING
 556   ParallelTaskTerminator::print_termination_counts();
 557 #endif
 558 }
 559 
 560 HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
 561   return collector_policy()->satisfy_failed_allocation(size, is_tlab);
 562 }
 563 










 564 #ifdef ASSERT
 565 class AssertNonScavengableClosure: public OopClosure {
 566 public:
 567   virtual void do_oop(oop* p) {
 568     assert(!GenCollectedHeap::heap()->is_in_partial_collection(*p),
 569       "Referent should not be scavengable.");  }
 570   virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
 571 };
 572 static AssertNonScavengableClosure assert_is_non_scavengable_closure;
 573 #endif
 574 
 575 void GenCollectedHeap::process_roots(StrongRootsScope* scope,
 576                                      ScanningOption so,
 577                                      OopClosure* strong_roots,
 578                                      OopClosure* weak_roots,
 579                                      CLDClosure* strong_cld_closure,
 580                                      CLDClosure* weak_cld_closure,
 581                                      CodeBlobClosure* code_roots) {


 582   // General roots.
 583   assert(Threads::thread_claim_parity() != 0, "must have called prologue code");
 584   assert(code_roots != NULL, "code root closure should always be set");
 585   // _n_termination for _process_strong_tasks should be set up stream
 586   // in a method not running in a GC worker.  Otherwise the GC worker
 587   // could be trying to change the termination condition while the task
 588   // is executing in another GC worker.
 589 
 590   if (!_process_strong_tasks->is_task_claimed(GCH_PS_ClassLoaderDataGraph_oops_do)) {
 591     ClassLoaderDataGraph::roots_cld_do(strong_cld_closure, weak_cld_closure);
 592   }
 593 
 594   // Some CLDs contained in the thread frames should be considered strong.
 595   // Don't process them if they will be processed during the ClassLoaderDataGraph phase.
 596   CLDClosure* roots_from_clds_p = (strong_cld_closure != weak_cld_closure) ? strong_cld_closure : NULL;
 597   // Only process code roots from thread stacks if we aren't visiting the entire CodeCache anyway
 598   CodeBlobClosure* roots_from_code_p = (so & SO_AllCodeCache) ? NULL : code_roots;
 599 
 600   bool is_par = scope->n_threads() > 1;
 601   Threads::possibly_parallel_oops_do(is_par, strong_roots, roots_from_clds_p, roots_from_code_p);
 602 
 603   if (!_process_strong_tasks->is_task_claimed(GCH_PS_Universe_oops_do)) {
 604     Universe::oops_do(strong_roots);
 605   }
 606   // Global (strong) JNI handles
 607   if (!_process_strong_tasks->is_task_claimed(GCH_PS_JNIHandles_oops_do)) {
 608     JNIHandles::oops_do(strong_roots);
 609   }
 610 
 611   if (!_process_strong_tasks->is_task_claimed(GCH_PS_ObjectSynchronizer_oops_do)) {
 612     ObjectSynchronizer::oops_do(strong_roots);
 613   }
 614   if (!_process_strong_tasks->is_task_claimed(GCH_PS_FlatProfiler_oops_do)) {
 615     FlatProfiler::oops_do(strong_roots);
 616   }
 617   if (!_process_strong_tasks->is_task_claimed(GCH_PS_Management_oops_do)) {
 618     Management::oops_do(strong_roots);
 619   }
 620   if (!_process_strong_tasks->is_task_claimed(GCH_PS_jvmti_oops_do)) {


 640       assert(code_roots != NULL, "must supply closure for code cache");
 641 
 642       // We only visit parts of the CodeCache when scavenging.
 643       CodeCache::scavenge_root_nmethods_do(code_roots);
 644     }
 645     if (so & SO_AllCodeCache) {
 646       assert(code_roots != NULL, "must supply closure for code cache");
 647 
 648       // CMSCollector uses this to do intermediate-strength collections.
 649       // We scan the entire code cache, since CodeCache::do_unloading is not called.
 650       CodeCache::blobs_do(code_roots);
 651     }
 652     // Verify that the code cache contents are not subject to
 653     // movement by a scavenging collection.
 654     DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
 655     DEBUG_ONLY(CodeCache::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
 656   }
 657 
 658 }
 659 
 660 void GenCollectedHeap::gen_process_roots(StrongRootsScope* scope,
 661                                          int level,
 662                                          bool younger_gens_as_roots,

 663                                          ScanningOption so,
 664                                          bool only_strong_roots,
 665                                          OopsInGenClosure* not_older_gens,
 666                                          OopsInGenClosure* older_gens,
 667                                          CLDClosure* cld_closure) {
 668   const bool is_adjust_phase = !only_strong_roots && !younger_gens_as_roots;
 669 
 670   bool is_moving_collection = false;
 671   if (level == 0 || is_adjust_phase) {
 672     // young collections are always moving
 673     is_moving_collection = true;
 674   }
 675 
 676   MarkingCodeBlobClosure mark_code_closure(not_older_gens, is_moving_collection);
 677   OopsInGenClosure* weak_roots = only_strong_roots ? NULL : not_older_gens;
 678   CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
 679 
 680   process_roots(scope, so,
 681                 not_older_gens, weak_roots,
 682                 cld_closure, weak_cld_closure,
 683                 &mark_code_closure);
 684 
 685   if (younger_gens_as_roots) {
 686     if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
 687       if (level == 1) {
 688         not_older_gens->set_generation(_young_gen);
 689         _young_gen->oop_iterate(not_older_gens);
 690       }
 691       not_older_gens->reset_generation();
 692     }
 693   }
 694   // When collection is parallel, all threads get to cooperate to do
 695   // older-gen scanning.
 696   if (level == 0) {
 697     older_gens->set_generation(_old_gen);
 698     rem_set()->younger_refs_iterate(_old_gen, older_gens, scope->n_threads());
 699     older_gens->reset_generation();
 700   }
 701 
 702   _process_strong_tasks->all_tasks_completed(scope->n_threads());
 703 }
 704 
 705 
 706 class AlwaysTrueClosure: public BoolObjectClosure {
 707 public:
 708   bool do_object_b(oop p) { return true; }
 709 };
 710 static AlwaysTrueClosure always_true;
 711 
 712 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
 713   JNIHandles::weak_oops_do(&always_true, root_closure);
 714   _young_gen->ref_processor()->weak_oops_do(root_closure);
 715   _old_gen->ref_processor()->weak_oops_do(root_closure);
 716 }
 717 
 718 #define GCH_SINCE_SAVE_MARKS_ITERATE_DEFN(OopClosureType, nv_suffix)    \
 719 void GenCollectedHeap::                                                 \
 720 oop_since_save_marks_iterate(int level,                                 \
 721                              OopClosureType* cur,                       \
 722                              OopClosureType* older) {                   \


< prev index next >