1124
1125 // Return "TRUE" iff the given object address is within the collection
1126 // set. Assumes that the reference points into the heap.
1127 inline bool is_in_cset(const HeapRegion *hr);
1128 inline bool is_in_cset(oop obj);
1129 inline bool is_in_cset(HeapWord* addr);
1130
1131 inline bool is_in_cset_or_humongous(const oop obj);
1132
1133 private:
1134 // This array is used for a quick test on whether a reference points into
1135 // the collection set or not. Each of the array's elements denotes whether the
1136 // corresponding region is in the collection set or not.
1137 G1HeapRegionAttrBiasedMappedArray _region_attr;
1138
1139 public:
1140
1141 inline G1HeapRegionAttr region_attr(const void* obj) const;
1142 inline G1HeapRegionAttr region_attr(uint idx) const;
1143
1144 MemRegion reserved_region() const {
1145 return _reserved;
1146 }
1147
1148 HeapWord* base() const {
1149 return _reserved.start();
1150 }
1151
1152 bool is_in_reserved(const void* addr) const {
1153 return _reserved.contains(addr);
1154 }
1155
1156 G1HotCardCache* hot_card_cache() const { return _hot_card_cache; }
1157
1158 G1CardTable* card_table() const {
1159 return _card_table;
1160 }
1161
1162 // Iteration functions.
1163
1164 void object_iterate_parallel(ObjectClosure* cl, uint worker_id, HeapRegionClaimer* claimer);
1165
1166 // Iterate over all objects, calling "cl.do_object" on each.
1167 virtual void object_iterate(ObjectClosure* cl);
1168
1169 virtual ParallelObjectIterator* parallel_object_iterator(uint thread_num);
1170
1171 // Keep alive an object that was loaded with AS_NO_KEEPALIVE.
1172 virtual void keep_alive(oop obj);
1173
1264 static bool is_humongous(size_t word_size) {
1265 // Note this has to be strictly greater-than as the TLABs
1266 // are capped at the humongous threshold and we want to
1267 // ensure that we don't try to allocate a TLAB as
1268 // humongous and that we don't allocate a humongous
1269 // object in a TLAB.
1270 return word_size > _humongous_object_threshold_in_words;
1271 }
1272
1273 // Returns the humongous threshold for a specific region size
1274 static size_t humongous_threshold_for(size_t region_size) {
1275 return (region_size / 2);
1276 }
1277
1278 // Returns the number of regions the humongous object of the given word size
1279 // requires.
1280 static size_t humongous_obj_size_in_regions(size_t word_size);
1281
1282 // Print the maximum heap capacity.
1283 virtual size_t max_capacity() const;
1284
1285 // Return the size of reserved memory. Returns different value than max_capacity() when AllocateOldGenAt is used.
1286 virtual size_t max_reserved_capacity() const;
1287
1288 Tickspan time_since_last_collection() const { return Ticks::now() - _collection_pause_end; }
1289
1290 // Convenience function to be used in situations where the heap type can be
1291 // asserted to be this type.
1292 static G1CollectedHeap* heap() {
1293 return named_heap<G1CollectedHeap>(CollectedHeap::G1);
1294 }
1295
1296 void set_region_short_lived_locked(HeapRegion* hr);
1297 // add appropriate methods for any other surv rate groups
1298
1299 const G1SurvivorRegions* survivor() const { return &_survivor; }
1300
1301 uint eden_regions_count() const { return _eden.length(); }
1302 uint eden_regions_count(uint node_index) const { return _eden.regions_on_node(node_index); }
1303 uint survivor_regions_count() const { return _survivor.length(); }
1304 uint survivor_regions_count(uint node_index) const { return _survivor.regions_on_node(node_index); }
1305 size_t eden_regions_used_bytes() const { return _eden.used_bytes(); }
1306 size_t survivor_regions_used_bytes() const { return _survivor.used_bytes(); }
|
1124
1125 // Return "TRUE" iff the given object address is within the collection
1126 // set. Assumes that the reference points into the heap.
1127 inline bool is_in_cset(const HeapRegion *hr);
1128 inline bool is_in_cset(oop obj);
1129 inline bool is_in_cset(HeapWord* addr);
1130
1131 inline bool is_in_cset_or_humongous(const oop obj);
1132
1133 private:
1134 // This array is used for a quick test on whether a reference points into
1135 // the collection set or not. Each of the array's elements denotes whether the
1136 // corresponding region is in the collection set or not.
1137 G1HeapRegionAttrBiasedMappedArray _region_attr;
1138
1139 public:
1140
1141 inline G1HeapRegionAttr region_attr(const void* obj) const;
1142 inline G1HeapRegionAttr region_attr(uint idx) const;
1143
1144 MemRegion reserved() const {
1145 return _hrm->reserved();
1146 }
1147
1148 bool is_in_reserved(const void* addr) const {
1149 return reserved().contains(addr);
1150 }
1151
1152 G1HotCardCache* hot_card_cache() const { return _hot_card_cache; }
1153
1154 G1CardTable* card_table() const {
1155 return _card_table;
1156 }
1157
1158 // Iteration functions.
1159
1160 void object_iterate_parallel(ObjectClosure* cl, uint worker_id, HeapRegionClaimer* claimer);
1161
1162 // Iterate over all objects, calling "cl.do_object" on each.
1163 virtual void object_iterate(ObjectClosure* cl);
1164
1165 virtual ParallelObjectIterator* parallel_object_iterator(uint thread_num);
1166
1167 // Keep alive an object that was loaded with AS_NO_KEEPALIVE.
1168 virtual void keep_alive(oop obj);
1169
1260 static bool is_humongous(size_t word_size) {
1261 // Note this has to be strictly greater-than as the TLABs
1262 // are capped at the humongous threshold and we want to
1263 // ensure that we don't try to allocate a TLAB as
1264 // humongous and that we don't allocate a humongous
1265 // object in a TLAB.
1266 return word_size > _humongous_object_threshold_in_words;
1267 }
1268
1269 // Returns the humongous threshold for a specific region size
1270 static size_t humongous_threshold_for(size_t region_size) {
1271 return (region_size / 2);
1272 }
1273
1274 // Returns the number of regions the humongous object of the given word size
1275 // requires.
1276 static size_t humongous_obj_size_in_regions(size_t word_size);
1277
1278 // Print the maximum heap capacity.
1279 virtual size_t max_capacity() const;
1280
1281 Tickspan time_since_last_collection() const { return Ticks::now() - _collection_pause_end; }
1282
1283 // Convenience function to be used in situations where the heap type can be
1284 // asserted to be this type.
1285 static G1CollectedHeap* heap() {
1286 return named_heap<G1CollectedHeap>(CollectedHeap::G1);
1287 }
1288
1289 void set_region_short_lived_locked(HeapRegion* hr);
1290 // add appropriate methods for any other surv rate groups
1291
1292 const G1SurvivorRegions* survivor() const { return &_survivor; }
1293
1294 uint eden_regions_count() const { return _eden.length(); }
1295 uint eden_regions_count(uint node_index) const { return _eden.regions_on_node(node_index); }
1296 uint survivor_regions_count() const { return _survivor.length(); }
1297 uint survivor_regions_count(uint node_index) const { return _survivor.regions_on_node(node_index); }
1298 size_t eden_regions_used_bytes() const { return _eden.used_bytes(); }
1299 size_t survivor_regions_used_bytes() const { return _survivor.used_bytes(); }
|