src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.hpp

Print this page




1183 
1184   // Public accessors
1185   static elapsedTimer* accumulated_time() { return &_accumulated_time; }
1186   static unsigned int total_invocations() { return _total_invocations; }
1187   static CollectorCounters* counters()    { return _counters; }
1188 
1189   // Used to add tasks
1190   static GCTaskManager* const gc_task_manager();
1191   static Klass* updated_int_array_klass_obj() {
1192     return _updated_int_array_klass_obj;
1193   }
1194 
1195   // Marking support
1196   static inline bool mark_obj(oop obj);
1197   static inline bool is_marked(oop obj);
1198   // Check mark and maybe push on marking stack
1199   template <class T> static inline void mark_and_push(ParCompactionManager* cm,
1200                                                       T* p);
1201   template <class T> static inline void adjust_pointer(T* p);
1202 
1203   static void follow_klass(ParCompactionManager* cm, Klass* klass);
1204   static void adjust_klass(ParCompactionManager* cm, Klass* klass);
1205 
1206   static void follow_class_loader(ParCompactionManager* cm,
1207                                   ClassLoaderData* klass);
1208   static void adjust_class_loader(ParCompactionManager* cm,
1209                                   ClassLoaderData* klass);
1210 
1211   // Compaction support.
1212   // Return true if p is in the range [beg_addr, end_addr).
1213   static inline bool is_in(HeapWord* p, HeapWord* beg_addr, HeapWord* end_addr);
1214   static inline bool is_in(oop* p, HeapWord* beg_addr, HeapWord* end_addr);
1215 
1216   // Convenience wrappers for per-space data kept in _space_info.
1217   static inline MutableSpace*     space(SpaceId space_id);
1218   static inline HeapWord*         new_top(SpaceId space_id);
1219   static inline HeapWord*         dense_prefix(SpaceId space_id);
1220   static inline ObjectStartArray* start_array(SpaceId space_id);
1221 
1222   // Move and update the live objects in the specified space.
1223   static void move_and_update(ParCompactionManager* cm, SpaceId space_id);
1224 
1225   // Process the end of the given region range in the dense prefix.
1226   // This includes saving any object not updated.
1227   static void dense_prefix_regions_epilogue(ParCompactionManager* cm,
1228                                             size_t region_start_index,
1229                                             size_t region_end_index,


1361       cm->push(obj);
1362     }
1363   }
1364 }
1365 
1366 template <class T>
1367 inline void PSParallelCompact::adjust_pointer(T* p) {
1368   T heap_oop = oopDesc::load_heap_oop(p);
1369   if (!oopDesc::is_null(heap_oop)) {
1370     oop obj     = oopDesc::decode_heap_oop_not_null(heap_oop);
1371     oop new_obj = (oop)summary_data().calc_new_pointer(obj);
1372     assert(new_obj != NULL,                    // is forwarding ptr?
1373            "should be forwarded");
1374     // Just always do the update unconditionally?
1375     if (new_obj != NULL) {
1376       assert(Universe::heap()->is_in_reserved(new_obj),
1377              "should be in object space");
1378       oopDesc::encode_store_heap_oop_not_null(p, new_obj);
1379     }
1380   }





1381 }
1382 
1383 template <class T>
1384 inline void PSParallelCompact::KeepAliveClosure::do_oop_work(T* p) {
1385   mark_and_push(_compaction_manager, p);
1386 }
1387 
1388 inline bool PSParallelCompact::print_phases() {
1389   return _print_phases;
1390 }
1391 
1392 inline double PSParallelCompact::normal_distribution(double density) {
1393   assert(_dwl_initialized, "uninitialized");
1394   const double squared_term = (density - _dwl_mean) / _dwl_std_dev;
1395   return _dwl_first_term * exp(-0.5 * squared_term * squared_term);
1396 }
1397 
1398 inline bool
1399 PSParallelCompact::dead_space_crosses_boundary(const RegionData* region,
1400                                                idx_t bit)




1183 
1184   // Public accessors
1185   static elapsedTimer* accumulated_time() { return &_accumulated_time; }
1186   static unsigned int total_invocations() { return _total_invocations; }
1187   static CollectorCounters* counters()    { return _counters; }
1188 
1189   // Used to add tasks
1190   static GCTaskManager* const gc_task_manager();
1191   static Klass* updated_int_array_klass_obj() {
1192     return _updated_int_array_klass_obj;
1193   }
1194 
1195   // Marking support
1196   static inline bool mark_obj(oop obj);
1197   static inline bool is_marked(oop obj);
1198   // Check mark and maybe push on marking stack
1199   template <class T> static inline void mark_and_push(ParCompactionManager* cm,
1200                                                       T* p);
1201   template <class T> static inline void adjust_pointer(T* p);
1202 
1203   static inline void follow_klass(ParCompactionManager* cm, Klass* klass);

1204 
1205   static void follow_class_loader(ParCompactionManager* cm,
1206                                   ClassLoaderData* klass);


1207 
1208   // Compaction support.
1209   // Return true if p is in the range [beg_addr, end_addr).
1210   static inline bool is_in(HeapWord* p, HeapWord* beg_addr, HeapWord* end_addr);
1211   static inline bool is_in(oop* p, HeapWord* beg_addr, HeapWord* end_addr);
1212 
1213   // Convenience wrappers for per-space data kept in _space_info.
1214   static inline MutableSpace*     space(SpaceId space_id);
1215   static inline HeapWord*         new_top(SpaceId space_id);
1216   static inline HeapWord*         dense_prefix(SpaceId space_id);
1217   static inline ObjectStartArray* start_array(SpaceId space_id);
1218 
1219   // Move and update the live objects in the specified space.
1220   static void move_and_update(ParCompactionManager* cm, SpaceId space_id);
1221 
1222   // Process the end of the given region range in the dense prefix.
1223   // This includes saving any object not updated.
1224   static void dense_prefix_regions_epilogue(ParCompactionManager* cm,
1225                                             size_t region_start_index,
1226                                             size_t region_end_index,


1358       cm->push(obj);
1359     }
1360   }
1361 }
1362 
1363 template <class T>
1364 inline void PSParallelCompact::adjust_pointer(T* p) {
1365   T heap_oop = oopDesc::load_heap_oop(p);
1366   if (!oopDesc::is_null(heap_oop)) {
1367     oop obj     = oopDesc::decode_heap_oop_not_null(heap_oop);
1368     oop new_obj = (oop)summary_data().calc_new_pointer(obj);
1369     assert(new_obj != NULL,                    // is forwarding ptr?
1370            "should be forwarded");
1371     // Just always do the update unconditionally?
1372     if (new_obj != NULL) {
1373       assert(Universe::heap()->is_in_reserved(new_obj),
1374              "should be in object space");
1375       oopDesc::encode_store_heap_oop_not_null(p, new_obj);
1376     }
1377   }
1378 }
1379 
1380 inline void PSParallelCompact::follow_klass(ParCompactionManager* cm, Klass* klass) {
1381   oop holder = klass->klass_holder();
1382   PSParallelCompact::mark_and_push(cm, &holder);
1383 }
1384 
1385 template <class T>
1386 inline void PSParallelCompact::KeepAliveClosure::do_oop_work(T* p) {
1387   mark_and_push(_compaction_manager, p);
1388 }
1389 
1390 inline bool PSParallelCompact::print_phases() {
1391   return _print_phases;
1392 }
1393 
1394 inline double PSParallelCompact::normal_distribution(double density) {
1395   assert(_dwl_initialized, "uninitialized");
1396   const double squared_term = (density - _dwl_mean) / _dwl_std_dev;
1397   return _dwl_first_term * exp(-0.5 * squared_term * squared_term);
1398 }
1399 
1400 inline bool
1401 PSParallelCompact::dead_space_crosses_boundary(const RegionData* region,
1402                                                idx_t bit)