32 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
33 #include "gc_implementation/g1/g1CollectorPolicy.hpp"
34 #include "gc_implementation/g1/g1ErgoVerbose.hpp"
35 #include "gc_implementation/g1/g1EvacFailure.hpp"
36 #include "gc_implementation/g1/g1GCPhaseTimes.hpp"
37 #include "gc_implementation/g1/g1Log.hpp"
38 #include "gc_implementation/g1/g1MarkSweep.hpp"
39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
41 #include "gc_implementation/g1/heapRegion.inline.hpp"
42 #include "gc_implementation/g1/heapRegionRemSet.hpp"
43 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
44 #include "gc_implementation/g1/vm_operations_g1.hpp"
45 #include "gc_implementation/shared/isGCActiveMark.hpp"
46 #include "memory/gcLocker.inline.hpp"
47 #include "memory/genOopClosures.inline.hpp"
48 #include "memory/generationSpec.hpp"
49 #include "memory/referenceProcessor.hpp"
50 #include "oops/oop.inline.hpp"
51 #include "oops/oop.pcgc.inline.hpp"
52 #include "runtime/aprofiler.hpp"
53 #include "runtime/vmThread.hpp"
54
55 size_t G1CollectedHeap::_humongous_object_threshold_in_words = 0;
56
57 // turn it on so that the contents of the young list (scan-only /
58 // to-be-collected) are printed at "strategic" points before / during
59 // / after the collection --- this is useful for debugging
60 #define YOUNG_LIST_VERBOSE 0
61 // CURRENT STATUS
62 // This file is under construction. Search for "FIXME".
63
64 // INVARIANTS/NOTES
65 //
66 // All allocation activity covered by the G1CollectedHeap interface is
67 // serialized by acquiring the HeapLock. This happens in mem_allocate
68 // and allocate_new_tlab, which are the "entry" points to the
69 // allocation code from the rest of the JVM. (Note that this does not
70 // apply to TLAB allocation, which is not part of this interface: it
71 // is done by clients of this interface.)
72
2582
2583 // Iterates an ObjectClosure over all objects within a HeapRegion.
2584
2585 class IterateObjectClosureRegionClosure: public HeapRegionClosure {
2586 ObjectClosure* _cl;
2587 public:
2588 IterateObjectClosureRegionClosure(ObjectClosure* cl) : _cl(cl) {}
2589 bool doHeapRegion(HeapRegion* r) {
2590 if (! r->continuesHumongous()) {
2591 r->object_iterate(_cl);
2592 }
2593 return false;
2594 }
2595 };
2596
2597 void G1CollectedHeap::object_iterate(ObjectClosure* cl) {
2598 IterateObjectClosureRegionClosure blk(cl);
2599 heap_region_iterate(&blk);
2600 }
2601
2602 void G1CollectedHeap::object_iterate_since_last_GC(ObjectClosure* cl) {
2603 // FIXME: is this right?
2604 guarantee(false, "object_iterate_since_last_GC not supported by G1 heap");
2605 }
2606
2607 // Calls a SpaceClosure on a HeapRegion.
2608
2609 class SpaceClosureRegionClosure: public HeapRegionClosure {
2610 SpaceClosure* _cl;
2611 public:
2612 SpaceClosureRegionClosure(SpaceClosure* cl) : _cl(cl) {}
2613 bool doHeapRegion(HeapRegion* r) {
2614 _cl->do_space(r);
2615 return false;
2616 }
2617 };
2618
2619 void G1CollectedHeap::space_iterate(SpaceClosure* cl) {
2620 SpaceClosureRegionClosure blk(cl);
2621 heap_region_iterate(&blk);
2622 }
2623
2624 void G1CollectedHeap::heap_region_iterate(HeapRegionClosure* cl) const {
2625 _hrs.iterate(cl);
2626 }
3515 void G1CollectedHeap::print_cset_rsets() {
3516 PrintRSetsClosure cl("Printing CSet RSets");
3517 collection_set_iterate(&cl);
3518 }
3519
3520 void G1CollectedHeap::print_all_rsets() {
3521 PrintRSetsClosure cl("Printing All RSets");;
3522 heap_region_iterate(&cl);
3523 }
3524 #endif // PRODUCT
3525
3526 G1CollectedHeap* G1CollectedHeap::heap() {
3527 assert(_sh->kind() == CollectedHeap::G1CollectedHeap,
3528 "not a garbage-first heap");
3529 return _g1h;
3530 }
3531
3532 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
3533 // always_do_update_barrier = false;
3534 assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
3535 // Call allocation profiler
3536 AllocationProfiler::iterate_since_last_gc();
3537 // Fill TLAB's and such
3538 ensure_parsability(true);
3539 }
3540
3541 void G1CollectedHeap::gc_epilogue(bool full /* Ignored */) {
3542 // FIXME: what is this about?
3543 // I'm ignoring the "fill_newgen()" call if "alloc_event_enabled"
3544 // is set.
3545 COMPILER2_PRESENT(assert(DerivedPointerTable::is_empty(),
3546 "derived pointer present"));
3547 // always_do_update_barrier = true;
3548
3549 // We have just completed a GC. Update the soft reference
3550 // policy with the new heap occupancy
3551 Universe::update_heap_info_at_gc();
3552 }
3553
3554 HeapWord* G1CollectedHeap::do_collection_pause(size_t word_size,
3555 unsigned int gc_count_before,
3556 bool* succeeded) {
|
32 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
33 #include "gc_implementation/g1/g1CollectorPolicy.hpp"
34 #include "gc_implementation/g1/g1ErgoVerbose.hpp"
35 #include "gc_implementation/g1/g1EvacFailure.hpp"
36 #include "gc_implementation/g1/g1GCPhaseTimes.hpp"
37 #include "gc_implementation/g1/g1Log.hpp"
38 #include "gc_implementation/g1/g1MarkSweep.hpp"
39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
41 #include "gc_implementation/g1/heapRegion.inline.hpp"
42 #include "gc_implementation/g1/heapRegionRemSet.hpp"
43 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
44 #include "gc_implementation/g1/vm_operations_g1.hpp"
45 #include "gc_implementation/shared/isGCActiveMark.hpp"
46 #include "memory/gcLocker.inline.hpp"
47 #include "memory/genOopClosures.inline.hpp"
48 #include "memory/generationSpec.hpp"
49 #include "memory/referenceProcessor.hpp"
50 #include "oops/oop.inline.hpp"
51 #include "oops/oop.pcgc.inline.hpp"
52 #include "runtime/vmThread.hpp"
53
54 size_t G1CollectedHeap::_humongous_object_threshold_in_words = 0;
55
56 // turn it on so that the contents of the young list (scan-only /
57 // to-be-collected) are printed at "strategic" points before / during
58 // / after the collection --- this is useful for debugging
59 #define YOUNG_LIST_VERBOSE 0
60 // CURRENT STATUS
61 // This file is under construction. Search for "FIXME".
62
63 // INVARIANTS/NOTES
64 //
65 // All allocation activity covered by the G1CollectedHeap interface is
66 // serialized by acquiring the HeapLock. This happens in mem_allocate
67 // and allocate_new_tlab, which are the "entry" points to the
68 // allocation code from the rest of the JVM. (Note that this does not
69 // apply to TLAB allocation, which is not part of this interface: it
70 // is done by clients of this interface.)
71
2581
2582 // Iterates an ObjectClosure over all objects within a HeapRegion.
2583
2584 class IterateObjectClosureRegionClosure: public HeapRegionClosure {
2585 ObjectClosure* _cl;
2586 public:
2587 IterateObjectClosureRegionClosure(ObjectClosure* cl) : _cl(cl) {}
2588 bool doHeapRegion(HeapRegion* r) {
2589 if (! r->continuesHumongous()) {
2590 r->object_iterate(_cl);
2591 }
2592 return false;
2593 }
2594 };
2595
2596 void G1CollectedHeap::object_iterate(ObjectClosure* cl) {
2597 IterateObjectClosureRegionClosure blk(cl);
2598 heap_region_iterate(&blk);
2599 }
2600
2601 // Calls a SpaceClosure on a HeapRegion.
2602
2603 class SpaceClosureRegionClosure: public HeapRegionClosure {
2604 SpaceClosure* _cl;
2605 public:
2606 SpaceClosureRegionClosure(SpaceClosure* cl) : _cl(cl) {}
2607 bool doHeapRegion(HeapRegion* r) {
2608 _cl->do_space(r);
2609 return false;
2610 }
2611 };
2612
2613 void G1CollectedHeap::space_iterate(SpaceClosure* cl) {
2614 SpaceClosureRegionClosure blk(cl);
2615 heap_region_iterate(&blk);
2616 }
2617
2618 void G1CollectedHeap::heap_region_iterate(HeapRegionClosure* cl) const {
2619 _hrs.iterate(cl);
2620 }
3509 void G1CollectedHeap::print_cset_rsets() {
3510 PrintRSetsClosure cl("Printing CSet RSets");
3511 collection_set_iterate(&cl);
3512 }
3513
3514 void G1CollectedHeap::print_all_rsets() {
3515 PrintRSetsClosure cl("Printing All RSets");;
3516 heap_region_iterate(&cl);
3517 }
3518 #endif // PRODUCT
3519
3520 G1CollectedHeap* G1CollectedHeap::heap() {
3521 assert(_sh->kind() == CollectedHeap::G1CollectedHeap,
3522 "not a garbage-first heap");
3523 return _g1h;
3524 }
3525
3526 void G1CollectedHeap::gc_prologue(bool full /* Ignored */) {
3527 // always_do_update_barrier = false;
3528 assert(InlineCacheBuffer::is_empty(), "should have cleaned up ICBuffer");
3529 // Fill TLAB's and such
3530 ensure_parsability(true);
3531 }
3532
3533 void G1CollectedHeap::gc_epilogue(bool full /* Ignored */) {
3534 // FIXME: what is this about?
3535 // I'm ignoring the "fill_newgen()" call if "alloc_event_enabled"
3536 // is set.
3537 COMPILER2_PRESENT(assert(DerivedPointerTable::is_empty(),
3538 "derived pointer present"));
3539 // always_do_update_barrier = true;
3540
3541 // We have just completed a GC. Update the soft reference
3542 // policy with the new heap occupancy
3543 Universe::update_heap_info_at_gc();
3544 }
3545
3546 HeapWord* G1CollectedHeap::do_collection_pause(size_t word_size,
3547 unsigned int gc_count_before,
3548 bool* succeeded) {
|