24
25 #include "precompiled.hpp"
26 #include "aot/aotLoader.hpp"
27 #include "classfile/classLoaderDataGraph.hpp"
28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/stringTable.hpp"
30 #include "classfile/symbolTable.hpp"
31 #include "classfile/systemDictionary.hpp"
32 #include "code/codeCache.hpp"
33 #include "gc/parallel/gcTaskManager.hpp"
34 #include "gc/parallel/parallelScavengeHeap.inline.hpp"
35 #include "gc/parallel/parMarkBitMap.inline.hpp"
36 #include "gc/parallel/pcTasks.hpp"
37 #include "gc/parallel/psAdaptiveSizePolicy.hpp"
38 #include "gc/parallel/psCompactionManager.inline.hpp"
39 #include "gc/parallel/psOldGen.hpp"
40 #include "gc/parallel/psParallelCompact.inline.hpp"
41 #include "gc/parallel/psPromotionManager.inline.hpp"
42 #include "gc/parallel/psScavenge.hpp"
43 #include "gc/parallel/psYoungGen.hpp"
44 #include "gc/shared/gcCause.hpp"
45 #include "gc/shared/gcHeapSummary.hpp"
46 #include "gc/shared/gcId.hpp"
47 #include "gc/shared/gcLocker.hpp"
48 #include "gc/shared/gcTimer.hpp"
49 #include "gc/shared/gcTrace.hpp"
50 #include "gc/shared/gcTraceTime.inline.hpp"
51 #include "gc/shared/isGCActiveMark.hpp"
52 #include "gc/shared/referencePolicy.hpp"
53 #include "gc/shared/referenceProcessor.hpp"
54 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
55 #include "gc/shared/spaceDecorator.hpp"
56 #include "gc/shared/weakProcessor.hpp"
57 #include "logging/log.hpp"
58 #include "memory/iterator.inline.hpp"
59 #include "memory/resourceArea.hpp"
60 #include "oops/access.inline.hpp"
61 #include "oops/instanceClassLoaderKlass.inline.hpp"
62 #include "oops/instanceKlass.inline.hpp"
63 #include "oops/instanceMirrorKlass.inline.hpp"
1480 //
1481 // In the 64-bit VM, each bit represents one 64-bit word:
1482 // +------------+
1483 // b) beg_bits: ... x x x | 0 || 0 | x x ...
1484 // end_bits: ... x x 1 | 0 || 0 | x x ...
1485 // +------------+
1486 // +-------+
1487 // c) beg_bits: ... x x | 0 0 | || 0 x x ...
1488 // end_bits: ... x 1 | 0 0 | || 0 x x ...
1489 // +-------+
1490 // +-----------+
1491 // d) beg_bits: ... x | 0 0 0 | || 0 x x ...
1492 // end_bits: ... 1 | 0 0 0 | || 0 x x ...
1493 // +-----------+
1494 // +-------+
1495 // e) beg_bits: ... 0 0 | 0 0 | || 0 x x ...
1496 // end_bits: ... 0 0 | 0 0 | || 0 x x ...
1497 // +-------+
1498
1499 // Initially assume case a, c or e will apply.
1500 size_t obj_len = CollectedHeap::min_fill_size();
1501 HeapWord* obj_beg = dense_prefix_end - obj_len;
1502
1503 #ifdef _LP64
1504 if (MinObjAlignment > 1) { // object alignment > heap word size
1505 // Cases a, c or e.
1506 } else if (_mark_bitmap.is_obj_end(dense_prefix_bit - 2)) {
1507 // Case b above.
1508 obj_beg = dense_prefix_end - 1;
1509 } else if (!_mark_bitmap.is_obj_end(dense_prefix_bit - 3) &&
1510 _mark_bitmap.is_obj_end(dense_prefix_bit - 4)) {
1511 // Case d above.
1512 obj_beg = dense_prefix_end - 3;
1513 obj_len = 3;
1514 }
1515 #endif // #ifdef _LP64
1516
1517 CollectedHeap::fill_with_object(obj_beg, obj_len);
1518 _mark_bitmap.mark_obj(obj_beg, obj_len);
1519 _summary_data.add_obj(obj_beg, obj_len);
1520 assert(start_array(id) != NULL, "sanity");
1521 start_array(id)->allocate_block(obj_beg);
1522 }
1523 }
1524
1525 void
1526 PSParallelCompact::summarize_space(SpaceId id, bool maximum_compaction)
1527 {
1528 assert(id < last_space_id, "id out of range");
1529 assert(_space_info[id].dense_prefix() == _space_info[id].space()->bottom(),
1530 "should have been reset in summarize_spaces_quick()");
1531
1532 const MutableSpace* space = _space_info[id].space();
1533 if (_space_info[id].new_top() != space->bottom()) {
1534 HeapWord* dense_prefix_end = compute_dense_prefix(id, maximum_compaction);
1535 _space_info[id].set_dense_prefix(dense_prefix_end);
1536
1537 #ifndef PRODUCT
2024 if (new_young_size < young_gen->min_gen_size()) {
2025 return false; // Respect young gen minimum size.
2026 }
2027
2028 log_trace(gc, ergo, heap)(" absorbing " SIZE_FORMAT "K: "
2029 "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
2030 "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
2031 "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
2032 absorb_size / K,
2033 eden_capacity / K, (eden_capacity - absorb_size) / K,
2034 young_gen->from_space()->used_in_bytes() / K,
2035 young_gen->to_space()->used_in_bytes() / K,
2036 young_gen->capacity_in_bytes() / K, new_young_size / K);
2037
2038 // Fill the unused part of the old gen.
2039 MutableSpace* const old_space = old_gen->object_space();
2040 HeapWord* const unused_start = old_space->top();
2041 size_t const unused_words = pointer_delta(old_space->end(), unused_start);
2042
2043 if (unused_words > 0) {
2044 if (unused_words < CollectedHeap::min_fill_size()) {
2045 return false; // If the old gen cannot be filled, must give up.
2046 }
2047 CollectedHeap::fill_with_objects(unused_start, unused_words);
2048 }
2049
2050 // Take the live data from eden and set both top and end in the old gen to
2051 // eden top. (Need to set end because reset_after_change() mangles the region
2052 // from end to virtual_space->high() in debug builds).
2053 HeapWord* const new_top = eden_space->top();
2054 old_gen->virtual_space()->expand_into(young_gen->virtual_space(),
2055 absorb_size);
2056 young_gen->reset_after_change();
2057 old_space->set_top(new_top);
2058 old_space->set_end(new_top);
2059 old_gen->reset_after_change();
2060
2061 // Update the object start array for the filler object and the data from eden.
2062 ObjectStartArray* const start_array = old_gen->start_array();
2063 for (HeapWord* p = unused_start; p < new_top; p += oop(p)->size()) {
2064 start_array->allocate_block(p);
2065 }
2066
2067 // Could update the promoted average here, but it is not typically updated at
3137 {
3138 }
3139
3140 // Updates the references in the object to their new values.
3141 ParMarkBitMapClosure::IterationStatus
3142 UpdateOnlyClosure::do_addr(HeapWord* addr, size_t words) {
3143 do_addr(addr);
3144 return ParMarkBitMap::incomplete;
3145 }
3146
3147 FillClosure::FillClosure(ParCompactionManager* cm, PSParallelCompact::SpaceId space_id) :
3148 ParMarkBitMapClosure(PSParallelCompact::mark_bitmap(), cm),
3149 _start_array(PSParallelCompact::start_array(space_id))
3150 {
3151 assert(space_id == PSParallelCompact::old_space_id,
3152 "cannot use FillClosure in the young gen");
3153 }
3154
3155 ParMarkBitMapClosure::IterationStatus
3156 FillClosure::do_addr(HeapWord* addr, size_t size) {
3157 CollectedHeap::fill_with_objects(addr, size);
3158 HeapWord* const end = addr + size;
3159 do {
3160 _start_array->allocate_block(addr);
3161 addr += oop(addr)->size();
3162 } while (addr < end);
3163 return ParMarkBitMap::incomplete;
3164 }
|
24
25 #include "precompiled.hpp"
26 #include "aot/aotLoader.hpp"
27 #include "classfile/classLoaderDataGraph.hpp"
28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/stringTable.hpp"
30 #include "classfile/symbolTable.hpp"
31 #include "classfile/systemDictionary.hpp"
32 #include "code/codeCache.hpp"
33 #include "gc/parallel/gcTaskManager.hpp"
34 #include "gc/parallel/parallelScavengeHeap.inline.hpp"
35 #include "gc/parallel/parMarkBitMap.inline.hpp"
36 #include "gc/parallel/pcTasks.hpp"
37 #include "gc/parallel/psAdaptiveSizePolicy.hpp"
38 #include "gc/parallel/psCompactionManager.inline.hpp"
39 #include "gc/parallel/psOldGen.hpp"
40 #include "gc/parallel/psParallelCompact.inline.hpp"
41 #include "gc/parallel/psPromotionManager.inline.hpp"
42 #include "gc/parallel/psScavenge.hpp"
43 #include "gc/parallel/psYoungGen.hpp"
44 #include "gc/shared/fill.hpp"
45 #include "gc/shared/gcCause.hpp"
46 #include "gc/shared/gcHeapSummary.hpp"
47 #include "gc/shared/gcId.hpp"
48 #include "gc/shared/gcLocker.hpp"
49 #include "gc/shared/gcTimer.hpp"
50 #include "gc/shared/gcTrace.hpp"
51 #include "gc/shared/gcTraceTime.inline.hpp"
52 #include "gc/shared/isGCActiveMark.hpp"
53 #include "gc/shared/referencePolicy.hpp"
54 #include "gc/shared/referenceProcessor.hpp"
55 #include "gc/shared/referenceProcessorPhaseTimes.hpp"
56 #include "gc/shared/spaceDecorator.hpp"
57 #include "gc/shared/weakProcessor.hpp"
58 #include "logging/log.hpp"
59 #include "memory/iterator.inline.hpp"
60 #include "memory/resourceArea.hpp"
61 #include "oops/access.inline.hpp"
62 #include "oops/instanceClassLoaderKlass.inline.hpp"
63 #include "oops/instanceKlass.inline.hpp"
64 #include "oops/instanceMirrorKlass.inline.hpp"
1481 //
1482 // In the 64-bit VM, each bit represents one 64-bit word:
1483 // +------------+
1484 // b) beg_bits: ... x x x | 0 || 0 | x x ...
1485 // end_bits: ... x x 1 | 0 || 0 | x x ...
1486 // +------------+
1487 // +-------+
1488 // c) beg_bits: ... x x | 0 0 | || 0 x x ...
1489 // end_bits: ... x 1 | 0 0 | || 0 x x ...
1490 // +-------+
1491 // +-----------+
1492 // d) beg_bits: ... x | 0 0 0 | || 0 x x ...
1493 // end_bits: ... 1 | 0 0 0 | || 0 x x ...
1494 // +-----------+
1495 // +-------+
1496 // e) beg_bits: ... 0 0 | 0 0 | || 0 x x ...
1497 // end_bits: ... 0 0 | 0 0 | || 0 x x ...
1498 // +-------+
1499
1500 // Initially assume case a, c or e will apply.
1501 size_t obj_len = Fill::min_size();
1502 HeapWord* obj_beg = dense_prefix_end - obj_len;
1503
1504 #ifdef _LP64
1505 if (MinObjAlignment > 1) { // object alignment > heap word size
1506 // Cases a, c or e.
1507 } else if (_mark_bitmap.is_obj_end(dense_prefix_bit - 2)) {
1508 // Case b above.
1509 obj_beg = dense_prefix_end - 1;
1510 } else if (!_mark_bitmap.is_obj_end(dense_prefix_bit - 3) &&
1511 _mark_bitmap.is_obj_end(dense_prefix_bit - 4)) {
1512 // Case d above.
1513 obj_beg = dense_prefix_end - 3;
1514 obj_len = 3;
1515 }
1516 #endif // #ifdef _LP64
1517
1518 Fill::range(obj_beg, obj_len);
1519 _mark_bitmap.mark_obj(obj_beg, obj_len);
1520 _summary_data.add_obj(obj_beg, obj_len);
1521 assert(start_array(id) != NULL, "sanity");
1522 start_array(id)->allocate_block(obj_beg);
1523 }
1524 }
1525
1526 void
1527 PSParallelCompact::summarize_space(SpaceId id, bool maximum_compaction)
1528 {
1529 assert(id < last_space_id, "id out of range");
1530 assert(_space_info[id].dense_prefix() == _space_info[id].space()->bottom(),
1531 "should have been reset in summarize_spaces_quick()");
1532
1533 const MutableSpace* space = _space_info[id].space();
1534 if (_space_info[id].new_top() != space->bottom()) {
1535 HeapWord* dense_prefix_end = compute_dense_prefix(id, maximum_compaction);
1536 _space_info[id].set_dense_prefix(dense_prefix_end);
1537
1538 #ifndef PRODUCT
2025 if (new_young_size < young_gen->min_gen_size()) {
2026 return false; // Respect young gen minimum size.
2027 }
2028
2029 log_trace(gc, ergo, heap)(" absorbing " SIZE_FORMAT "K: "
2030 "eden " SIZE_FORMAT "K->" SIZE_FORMAT "K "
2031 "from " SIZE_FORMAT "K, to " SIZE_FORMAT "K "
2032 "young_gen " SIZE_FORMAT "K->" SIZE_FORMAT "K ",
2033 absorb_size / K,
2034 eden_capacity / K, (eden_capacity - absorb_size) / K,
2035 young_gen->from_space()->used_in_bytes() / K,
2036 young_gen->to_space()->used_in_bytes() / K,
2037 young_gen->capacity_in_bytes() / K, new_young_size / K);
2038
2039 // Fill the unused part of the old gen.
2040 MutableSpace* const old_space = old_gen->object_space();
2041 HeapWord* const unused_start = old_space->top();
2042 size_t const unused_words = pointer_delta(old_space->end(), unused_start);
2043
2044 if (unused_words > 0) {
2045 if (unused_words < Fill::min_size()) {
2046 return false; // If the old gen cannot be filled, must give up.
2047 }
2048 Fill::range(unused_start, unused_words);
2049 }
2050
2051 // Take the live data from eden and set both top and end in the old gen to
2052 // eden top. (Need to set end because reset_after_change() mangles the region
2053 // from end to virtual_space->high() in debug builds).
2054 HeapWord* const new_top = eden_space->top();
2055 old_gen->virtual_space()->expand_into(young_gen->virtual_space(),
2056 absorb_size);
2057 young_gen->reset_after_change();
2058 old_space->set_top(new_top);
2059 old_space->set_end(new_top);
2060 old_gen->reset_after_change();
2061
2062 // Update the object start array for the filler object and the data from eden.
2063 ObjectStartArray* const start_array = old_gen->start_array();
2064 for (HeapWord* p = unused_start; p < new_top; p += oop(p)->size()) {
2065 start_array->allocate_block(p);
2066 }
2067
2068 // Could update the promoted average here, but it is not typically updated at
3138 {
3139 }
3140
3141 // Updates the references in the object to their new values.
3142 ParMarkBitMapClosure::IterationStatus
3143 UpdateOnlyClosure::do_addr(HeapWord* addr, size_t words) {
3144 do_addr(addr);
3145 return ParMarkBitMap::incomplete;
3146 }
3147
3148 FillClosure::FillClosure(ParCompactionManager* cm, PSParallelCompact::SpaceId space_id) :
3149 ParMarkBitMapClosure(PSParallelCompact::mark_bitmap(), cm),
3150 _start_array(PSParallelCompact::start_array(space_id))
3151 {
3152 assert(space_id == PSParallelCompact::old_space_id,
3153 "cannot use FillClosure in the young gen");
3154 }
3155
3156 ParMarkBitMapClosure::IterationStatus
3157 FillClosure::do_addr(HeapWord* addr, size_t size) {
3158 Fill::range(addr, size);
3159 HeapWord* const end = addr + size;
3160 do {
3161 _start_array->allocate_block(addr);
3162 addr += oop(addr)->size();
3163 } while (addr < end);
3164 return ParMarkBitMap::incomplete;
3165 }
|