810 prev_p = p;
811 p += obj_size;
812 }
813 }
814
815 void HeapRegion::verify_rem_set() const {
816 bool failures = false;
817 verify_rem_set(VerifyOption_G1UsePrevMarking, &failures);
818 guarantee(!failures, "HeapRegion RemSet verification failed");
819 }
820
821 void HeapRegion::prepare_for_compaction(CompactPoint* cp) {
822 scan_and_forward(this, cp);
823 }
824
825 // G1OffsetTableContigSpace code; copied from space.cpp. Hope this can go
826 // away eventually.
827
828 void G1ContiguousSpace::clear(bool mangle_space) {
829 set_top(bottom());
830 _scan_top = bottom();
831 CompactibleSpace::clear(mangle_space);
832 reset_bot();
833 }
834
835 #ifndef PRODUCT
836 void G1ContiguousSpace::mangle_unused_area() {
837 mangle_unused_area_complete();
838 }
839
840 void G1ContiguousSpace::mangle_unused_area_complete() {
841 SpaceMangler::mangle_region(MemRegion(top(), end()));
842 }
843 #endif
844
845 void G1ContiguousSpace::print() const {
846 print_short();
847 tty->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", "
848 INTPTR_FORMAT ", " INTPTR_FORMAT ")",
849 p2i(bottom()), p2i(top()), p2i(_bot_part.threshold()), p2i(end()));
850 }
851
852 HeapWord* G1ContiguousSpace::initialize_threshold() {
853 return _bot_part.initialize_threshold();
854 }
855
856 HeapWord* G1ContiguousSpace::cross_threshold(HeapWord* start,
857 HeapWord* end) {
858 _bot_part.alloc_block(start, end);
859 return _bot_part.threshold();
860 }
861
862 HeapWord* G1ContiguousSpace::scan_top() const {
863 G1CollectedHeap* g1h = G1CollectedHeap::heap();
864 HeapWord* local_top = top();
865 OrderAccess::loadload();
866 const unsigned local_time_stamp = _gc_time_stamp;
867 assert(local_time_stamp <= g1h->get_gc_time_stamp(), "invariant");
868 if (local_time_stamp < g1h->get_gc_time_stamp()) {
869 return local_top;
870 } else {
871 return _scan_top;
872 }
873 }
874
875 void G1ContiguousSpace::record_timestamp() {
876 G1CollectedHeap* g1h = G1CollectedHeap::heap();
877 uint curr_gc_time_stamp = g1h->get_gc_time_stamp();
878
879 if (_gc_time_stamp < curr_gc_time_stamp) {
880 // Setting the time stamp here tells concurrent readers to look at
881 // scan_top to know the maximum allowed address to look at.
882
883 // scan_top should be bottom for all regions except for the
884 // retained old alloc region which should have scan_top == top
885 HeapWord* st = _scan_top;
886 guarantee(st == _bottom || st == _top, "invariant");
887
888 _gc_time_stamp = curr_gc_time_stamp;
889 }
890 }
891
892 void G1ContiguousSpace::record_retained_region() {
893 // scan_top is the maximum address where it's safe for the next gc to
894 // scan this region.
895 _scan_top = top();
896 }
897
898 void G1ContiguousSpace::safe_object_iterate(ObjectClosure* blk) {
899 object_iterate(blk);
900 }
901
902 void G1ContiguousSpace::object_iterate(ObjectClosure* blk) {
903 HeapWord* p = bottom();
904 while (p < top()) {
905 if (block_is_obj(p)) {
906 blk->do_object(oop(p));
907 }
908 p += block_size(p);
909 }
910 }
911
912 G1ContiguousSpace::G1ContiguousSpace(G1BlockOffsetTable* bot) :
913 _bot_part(bot, this),
914 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true),
915 _gc_time_stamp(0)
916 {
917 }
918
919 void G1ContiguousSpace::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
920 CompactibleSpace::initialize(mr, clear_space, mangle_space);
921 _top = bottom();
922 _scan_top = bottom();
923 set_saved_mark_word(NULL);
924 reset_bot();
925 }
926
|
810 prev_p = p;
811 p += obj_size;
812 }
813 }
814
815 void HeapRegion::verify_rem_set() const {
816 bool failures = false;
817 verify_rem_set(VerifyOption_G1UsePrevMarking, &failures);
818 guarantee(!failures, "HeapRegion RemSet verification failed");
819 }
820
821 void HeapRegion::prepare_for_compaction(CompactPoint* cp) {
822 scan_and_forward(this, cp);
823 }
824
825 // G1OffsetTableContigSpace code; copied from space.cpp. Hope this can go
826 // away eventually.
827
828 void G1ContiguousSpace::clear(bool mangle_space) {
829 set_top(bottom());
830 CompactibleSpace::clear(mangle_space);
831 reset_bot();
832 }
833
834 #ifndef PRODUCT
835 void G1ContiguousSpace::mangle_unused_area() {
836 mangle_unused_area_complete();
837 }
838
839 void G1ContiguousSpace::mangle_unused_area_complete() {
840 SpaceMangler::mangle_region(MemRegion(top(), end()));
841 }
842 #endif
843
844 void G1ContiguousSpace::print() const {
845 print_short();
846 tty->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", "
847 INTPTR_FORMAT ", " INTPTR_FORMAT ")",
848 p2i(bottom()), p2i(top()), p2i(_bot_part.threshold()), p2i(end()));
849 }
850
851 HeapWord* G1ContiguousSpace::initialize_threshold() {
852 return _bot_part.initialize_threshold();
853 }
854
855 HeapWord* G1ContiguousSpace::cross_threshold(HeapWord* start,
856 HeapWord* end) {
857 _bot_part.alloc_block(start, end);
858 return _bot_part.threshold();
859 }
860
861 void G1ContiguousSpace::record_timestamp() {
862 G1CollectedHeap* g1h = G1CollectedHeap::heap();
863 uint curr_gc_time_stamp = g1h->get_gc_time_stamp();
864
865 if (_gc_time_stamp < curr_gc_time_stamp) {
866 _gc_time_stamp = curr_gc_time_stamp;
867 }
868 }
869
870 void G1ContiguousSpace::safe_object_iterate(ObjectClosure* blk) {
871 object_iterate(blk);
872 }
873
874 void G1ContiguousSpace::object_iterate(ObjectClosure* blk) {
875 HeapWord* p = bottom();
876 while (p < top()) {
877 if (block_is_obj(p)) {
878 blk->do_object(oop(p));
879 }
880 p += block_size(p);
881 }
882 }
883
884 G1ContiguousSpace::G1ContiguousSpace(G1BlockOffsetTable* bot) :
885 _bot_part(bot, this),
886 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true),
887 _gc_time_stamp(0)
888 {
889 }
890
891 void G1ContiguousSpace::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
892 CompactibleSpace::initialize(mr, clear_space, mangle_space);
893 _top = bottom();
894 set_saved_mark_word(NULL);
895 reset_bot();
896 }
897
|