178 }
179
180 inline void HeapRegion::note_end_of_copying(bool during_initial_mark) {
181 if (is_survivor()) {
182 // This is how we always allocate survivors.
183 assert(_next_top_at_mark_start == bottom(), "invariant");
184 } else {
185 if (during_initial_mark) {
186 // See the comment for note_start_of_copying() for the details
187 // on this.
188 assert(_next_top_at_mark_start == end(), "pre-condition");
189 _next_top_at_mark_start = top();
190 } else {
191 // See the comment for note_start_of_copying() for the details
192 // on this.
193 assert(top() >= _next_top_at_mark_start, "invariant");
194 }
195 }
196 }
197
198 #endif // SHARE_VM_GC_IMPLEMENTATION_G1_HEAPREGION_INLINE_HPP
|
178 }
179
180 inline void HeapRegion::note_end_of_copying(bool during_initial_mark) {
181 if (is_survivor()) {
182 // This is how we always allocate survivors.
183 assert(_next_top_at_mark_start == bottom(), "invariant");
184 } else {
185 if (during_initial_mark) {
186 // See the comment for note_start_of_copying() for the details
187 // on this.
188 assert(_next_top_at_mark_start == end(), "pre-condition");
189 _next_top_at_mark_start = top();
190 } else {
191 // See the comment for note_start_of_copying() for the details
192 // on this.
193 assert(top() >= _next_top_at_mark_start, "invariant");
194 }
195 }
196 }
197
198 inline bool HeapRegion::in_collection_set() const {
199 return G1CollectedHeap::heap()->is_in_cset(this);
200 }
201
202 #endif // SHARE_VM_GC_IMPLEMENTATION_G1_HEAPREGION_INLINE_HPP
|