111 // saved mark. Otherwise, the "obj_allocated_since_save_marks" method of
112 // the space must distinguish between objects in the region allocated before
113 // and after the call to save marks.
114 MemRegion used_region_at_save_marks() const {
115 return MemRegion(bottom(), saved_mark_word());
116 }
117
118 // Initialization.
119 // "initialize" should be called once on a space, before it is used for
120 // any purpose. The "mr" arguments gives the bounds of the space, and
121 // the "clear_space" argument should be true unless the memory in "mr" is
122 // known to be zeroed.
123 virtual void initialize(MemRegion mr, bool clear_space, bool mangle_space);
124
125 // The "clear" method must be called on a region that may have
126 // had allocation performed in it, but is now to be considered empty.
127 virtual void clear(bool mangle_space);
128
129 // For detecting GC bugs. Should only be called at GC boundaries, since
130 // some unused space may be used as scratch space during GC's.
131 // Default implementation does nothing. We also call this when expanding
132 // a space to satisfy an allocation request. See bug #4668531
133 virtual void mangle_unused_area() {}
134 virtual void mangle_unused_area_complete() {}
135 virtual void mangle_region(MemRegion mr) {}
136
137 // Testers
138 bool is_empty() const { return used() == 0; }
139 bool not_empty() const { return used() > 0; }
140
141 // Returns true iff the given the space contains the
142 // given address as part of an allocated object. For
143 // certain kinds of spaces, this might be a potentially
144 // expensive operation. To prevent performance problems
145 // on account of its inadvertent use in product jvm's,
146 // we restrict its use to assertion checks only.
147 bool is_in(const void* p) const {
148 return used_region().contains(p);
149 }
150
151 // Returns true iff the given reserved memory of the space contains the
152 // given address.
153 bool is_in_reserved(const void* p) const { return _bottom <= p && p < _end; }
154
155 // Returns true iff the given block is not allocated.
542 void reset_saved_mark() { _saved_mark_word = bottom(); }
543
544 WaterMark bottom_mark() { return WaterMark(this, bottom()); }
545 WaterMark top_mark() { return WaterMark(this, top()); }
546 WaterMark saved_mark() { return WaterMark(this, saved_mark_word()); }
547 bool saved_mark_at_top() const { return saved_mark_word() == top(); }
548
549 // In debug mode mangle (write it with a particular bit
550 // pattern) the unused part of a space.
551
552 // Used to save the an address in a space for later use during mangling.
553 void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
554 // Used to save the space's current top for later use during mangling.
555 void set_top_for_allocations() PRODUCT_RETURN;
556
557 // Mangle regions in the space from the current top up to the
558 // previously mangled part of the space.
559 void mangle_unused_area() PRODUCT_RETURN;
560 // Mangle [top, end)
561 void mangle_unused_area_complete() PRODUCT_RETURN;
562 // Mangle the given MemRegion.
563 void mangle_region(MemRegion mr) PRODUCT_RETURN;
564
565 // Do some sparse checking on the area that should have been mangled.
566 void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
567 // Check the complete area that should have been mangled.
568 // This code may be NULL depending on the macro DEBUG_MANGLING.
569 void check_mangled_unused_area_complete() PRODUCT_RETURN;
570
571 // Size computations: sizes in bytes.
572 size_t capacity() const { return byte_size(bottom(), end()); }
573 size_t used() const { return byte_size(bottom(), top()); }
574 size_t free() const { return byte_size(top(), end()); }
575
576 virtual bool is_free_block(const HeapWord* p) const;
577
578 // In a contiguous space we have a more obvious bound on what parts
579 // contain objects.
580 MemRegion used_region() const { return MemRegion(bottom(), top()); }
581
582 // Allocation (return NULL if full)
583 virtual HeapWord* allocate(size_t word_size);
|
111 // saved mark. Otherwise, the "obj_allocated_since_save_marks" method of
112 // the space must distinguish between objects in the region allocated before
113 // and after the call to save marks.
114 MemRegion used_region_at_save_marks() const {
115 return MemRegion(bottom(), saved_mark_word());
116 }
117
118 // Initialization.
119 // "initialize" should be called once on a space, before it is used for
120 // any purpose. The "mr" arguments gives the bounds of the space, and
121 // the "clear_space" argument should be true unless the memory in "mr" is
122 // known to be zeroed.
123 virtual void initialize(MemRegion mr, bool clear_space, bool mangle_space);
124
125 // The "clear" method must be called on a region that may have
126 // had allocation performed in it, but is now to be considered empty.
127 virtual void clear(bool mangle_space);
128
129 // For detecting GC bugs. Should only be called at GC boundaries, since
130 // some unused space may be used as scratch space during GC's.
131 // We also call this when expanding a space to satisfy an allocation
132 // request. See bug #4668531
133 virtual void mangle_unused_area() = 0;
134 virtual void mangle_unused_area_complete() = 0;
135
136 // Testers
137 bool is_empty() const { return used() == 0; }
138 bool not_empty() const { return used() > 0; }
139
140 // Returns true iff the given the space contains the
141 // given address as part of an allocated object. For
142 // certain kinds of spaces, this might be a potentially
143 // expensive operation. To prevent performance problems
144 // on account of its inadvertent use in product jvm's,
145 // we restrict its use to assertion checks only.
146 bool is_in(const void* p) const {
147 return used_region().contains(p);
148 }
149
150 // Returns true iff the given reserved memory of the space contains the
151 // given address.
152 bool is_in_reserved(const void* p) const { return _bottom <= p && p < _end; }
153
154 // Returns true iff the given block is not allocated.
541 void reset_saved_mark() { _saved_mark_word = bottom(); }
542
543 WaterMark bottom_mark() { return WaterMark(this, bottom()); }
544 WaterMark top_mark() { return WaterMark(this, top()); }
545 WaterMark saved_mark() { return WaterMark(this, saved_mark_word()); }
546 bool saved_mark_at_top() const { return saved_mark_word() == top(); }
547
548 // In debug mode mangle (write it with a particular bit
549 // pattern) the unused part of a space.
550
551 // Used to save the an address in a space for later use during mangling.
552 void set_top_for_allocations(HeapWord* v) PRODUCT_RETURN;
553 // Used to save the space's current top for later use during mangling.
554 void set_top_for_allocations() PRODUCT_RETURN;
555
556 // Mangle regions in the space from the current top up to the
557 // previously mangled part of the space.
558 void mangle_unused_area() PRODUCT_RETURN;
559 // Mangle [top, end)
560 void mangle_unused_area_complete() PRODUCT_RETURN;
561
562 // Do some sparse checking on the area that should have been mangled.
563 void check_mangled_unused_area(HeapWord* limit) PRODUCT_RETURN;
564 // Check the complete area that should have been mangled.
565 // This code may be NULL depending on the macro DEBUG_MANGLING.
566 void check_mangled_unused_area_complete() PRODUCT_RETURN;
567
568 // Size computations: sizes in bytes.
569 size_t capacity() const { return byte_size(bottom(), end()); }
570 size_t used() const { return byte_size(bottom(), top()); }
571 size_t free() const { return byte_size(top(), end()); }
572
573 virtual bool is_free_block(const HeapWord* p) const;
574
575 // In a contiguous space we have a more obvious bound on what parts
576 // contain objects.
577 MemRegion used_region() const { return MemRegion(bottom(), top()); }
578
579 // Allocation (return NULL if full)
580 virtual HeapWord* allocate(size_t word_size);
|