610 KlassScanClosure klass_scan_closure(&fsc_with_no_gc_barrier,
611 gch->rem_set()->klass_rem_set());
612 CLDToKlassAndOopClosure cld_scan_closure(&klass_scan_closure,
613 &fsc_with_no_gc_barrier,
614 false);
615
616 set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
617 FastEvacuateFollowersClosure evacuate_followers(gch,
618 &fsc_with_no_gc_barrier,
619 &fsc_with_gc_barrier);
620
621 assert(gch->no_allocs_since_save_marks(),
622 "save marks have not been newly set.");
623
624 {
625 // DefNew needs to run with n_threads == 0, to make sure the serial
626 // version of the card table scanning code is used.
627 // See: CardTableModRefBSForCTRS::non_clean_card_iterate_possibly_parallel.
628 StrongRootsScope srs(0);
629
630 gch->gen_process_roots(&srs,
631 GenCollectedHeap::YoungGen,
632 true, // Process younger gens, if any,
633 // as strong roots.
634 GenCollectedHeap::SO_ScavengeCodeCache,
635 GenCollectedHeap::StrongAndWeakRoots,
636 &fsc_with_no_gc_barrier,
637 &fsc_with_gc_barrier,
638 &cld_scan_closure);
639 }
640
641 // "evacuate followers".
642 evacuate_followers.do_void();
643
644 FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
645 ReferenceProcessor* rp = ref_processor();
646 rp->setup_policy(clear_all_soft_refs);
647 const ReferenceProcessorStats& stats =
648 rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
649 NULL, _gc_timer);
650 gc_tracer.report_gc_reference_stats(stats);
651
652 if (!_promotion_failed) {
653 // Swap the survivor spaces.
654 eden()->clear(SpaceDecorator::Mangle);
655 from()->clear(SpaceDecorator::Mangle);
656 if (ZapUnusedHeapArea) {
657 // This is now done here because of the piece-meal mangling which
658 // can check for valid mangling at intermediate points in the
834 }
835
836
837 void DefNewGeneration::reset_saved_marks() {
838 eden()->reset_saved_mark();
839 to()->reset_saved_mark();
840 from()->reset_saved_mark();
841 }
842
843
844 bool DefNewGeneration::no_allocs_since_save_marks() {
845 assert(eden()->saved_mark_at_top(), "Violated spec - alloc in eden");
846 assert(from()->saved_mark_at_top(), "Violated spec - alloc in from");
847 return to()->saved_mark_at_top();
848 }
849
850 #define DefNew_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix) \
851 \
852 void DefNewGeneration:: \
853 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) { \
854 cl->set_generation(this); \
855 eden()->oop_since_save_marks_iterate##nv_suffix(cl); \
856 to()->oop_since_save_marks_iterate##nv_suffix(cl); \
857 from()->oop_since_save_marks_iterate##nv_suffix(cl); \
858 cl->reset_generation(); \
859 save_marks(); \
860 }
861
862 ALL_SINCE_SAVE_MARKS_CLOSURES(DefNew_SINCE_SAVE_MARKS_DEFN)
863
864 #undef DefNew_SINCE_SAVE_MARKS_DEFN
865
866 void DefNewGeneration::contribute_scratch(ScratchBlock*& list, Generation* requestor,
867 size_t max_alloc_words) {
868 if (requestor == this || _promotion_failed) {
869 return;
870 }
871 assert(GenCollectedHeap::heap()->is_old_gen(requestor), "We should not call our own generation");
872
873 /* $$$ Assert this? "trace" is a "MarkSweep" function so that's not appropriate.
874 if (to_space->top() > to_space->bottom()) {
875 trace("to_space not empty when contribute_scratch called");
876 }
877 */
878
|
610 KlassScanClosure klass_scan_closure(&fsc_with_no_gc_barrier,
611 gch->rem_set()->klass_rem_set());
612 CLDToKlassAndOopClosure cld_scan_closure(&klass_scan_closure,
613 &fsc_with_no_gc_barrier,
614 false);
615
616 set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
617 FastEvacuateFollowersClosure evacuate_followers(gch,
618 &fsc_with_no_gc_barrier,
619 &fsc_with_gc_barrier);
620
621 assert(gch->no_allocs_since_save_marks(),
622 "save marks have not been newly set.");
623
624 {
625 // DefNew needs to run with n_threads == 0, to make sure the serial
626 // version of the card table scanning code is used.
627 // See: CardTableModRefBSForCTRS::non_clean_card_iterate_possibly_parallel.
628 StrongRootsScope srs(0);
629
630 gch->young_process_roots(&srs,
631 &fsc_with_no_gc_barrier,
632 &fsc_with_gc_barrier,
633 &cld_scan_closure);
634 }
635
636 // "evacuate followers".
637 evacuate_followers.do_void();
638
639 FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
640 ReferenceProcessor* rp = ref_processor();
641 rp->setup_policy(clear_all_soft_refs);
642 const ReferenceProcessorStats& stats =
643 rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
644 NULL, _gc_timer);
645 gc_tracer.report_gc_reference_stats(stats);
646
647 if (!_promotion_failed) {
648 // Swap the survivor spaces.
649 eden()->clear(SpaceDecorator::Mangle);
650 from()->clear(SpaceDecorator::Mangle);
651 if (ZapUnusedHeapArea) {
652 // This is now done here because of the piece-meal mangling which
653 // can check for valid mangling at intermediate points in the
829 }
830
831
832 void DefNewGeneration::reset_saved_marks() {
833 eden()->reset_saved_mark();
834 to()->reset_saved_mark();
835 from()->reset_saved_mark();
836 }
837
838
839 bool DefNewGeneration::no_allocs_since_save_marks() {
840 assert(eden()->saved_mark_at_top(), "Violated spec - alloc in eden");
841 assert(from()->saved_mark_at_top(), "Violated spec - alloc in from");
842 return to()->saved_mark_at_top();
843 }
844
845 #define DefNew_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix) \
846 \
847 void DefNewGeneration:: \
848 oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) { \
849 cl->assert_generation(this); \
850 eden()->oop_since_save_marks_iterate##nv_suffix(cl); \
851 to()->oop_since_save_marks_iterate##nv_suffix(cl); \
852 from()->oop_since_save_marks_iterate##nv_suffix(cl); \
853 save_marks(); \
854 }
855
856 ALL_SINCE_SAVE_MARKS_CLOSURES(DefNew_SINCE_SAVE_MARKS_DEFN)
857
858 #undef DefNew_SINCE_SAVE_MARKS_DEFN
859
860 void DefNewGeneration::contribute_scratch(ScratchBlock*& list, Generation* requestor,
861 size_t max_alloc_words) {
862 if (requestor == this || _promotion_failed) {
863 return;
864 }
865 assert(GenCollectedHeap::heap()->is_old_gen(requestor), "We should not call our own generation");
866
867 /* $$$ Assert this? "trace" is a "MarkSweep" function so that's not appropriate.
868 if (to_space->top() > to_space->bottom()) {
869 trace("to_space not empty when contribute_scratch called");
870 }
871 */
872
|