< prev index next >

src/share/vm/gc_implementation/parallelScavenge/psYoungGen.cpp

Print this page
rev 8068 : imported patch parallelscavenge_cleanup

*** 60,70 **** _reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(), (HeapWord*)virtual_space()->high_boundary()); MemRegion cmr((HeapWord*)virtual_space()->low(), (HeapWord*)virtual_space()->high()); ! Universe::heap()->barrier_set()->resize_covered_region(cmr); if (ZapUnusedHeapArea) { // Mangle newly committed space immediately because it // can be done here more simply that after the new // spaces have been computed. --- 60,70 ---- _reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(), (HeapWord*)virtual_space()->high_boundary()); MemRegion cmr((HeapWord*)virtual_space()->low(), (HeapWord*)virtual_space()->high()); ! ParallelScavengeHeap::heap()->barrier_set()->resize_covered_region(cmr); if (ZapUnusedHeapArea) { // Mangle newly committed space immediately because it // can be done here more simply that after the new // spaces have been computed.
*** 101,111 **** // Generation Counters - generation 0, 3 subspaces _gen_counters = new PSGenerationCounters("new", 0, 3, _min_gen_size, _max_gen_size, _virtual_space); // Compute maximum space sizes for performance counters ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); size_t alignment = heap->space_alignment(); size_t size = virtual_space()->reserved_size(); size_t max_survivor_size; size_t max_eden_size; --- 101,111 ---- // Generation Counters - generation 0, 3 subspaces _gen_counters = new PSGenerationCounters("new", 0, 3, _min_gen_size, _max_gen_size, _virtual_space); // Compute maximum space sizes for performance counters ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); size_t alignment = heap->space_alignment(); size_t size = virtual_space()->reserved_size(); size_t max_survivor_size; size_t max_eden_size;
*** 151,162 **** compute_initial_space_boundaries(); } void PSYoungGen::compute_initial_space_boundaries() { ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); ! assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); // Compute sizes size_t alignment = heap->space_alignment(); size_t size = virtual_space()->committed_size(); assert(size >= 3 * alignment, "Young space is not large enough for eden + 2 survivors"); --- 151,161 ---- compute_initial_space_boundaries(); } void PSYoungGen::compute_initial_space_boundaries() { ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); // Compute sizes size_t alignment = heap->space_alignment(); size_t size = virtual_space()->committed_size(); assert(size >= 3 * alignment, "Young space is not large enough for eden + 2 survivors");
*** 206,216 **** from_space()->initialize(from_mr, true, ZapUnusedHeapArea); } #ifndef PRODUCT void PSYoungGen::space_invariants() { ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); const size_t alignment = heap->space_alignment(); // Currently, our eden size cannot shrink to zero guarantee(eden_space()->capacity_in_bytes() >= alignment, "eden too small"); guarantee(from_space()->capacity_in_bytes() >= alignment, "from too small"); --- 205,215 ---- from_space()->initialize(from_mr, true, ZapUnusedHeapArea); } #ifndef PRODUCT void PSYoungGen::space_invariants() { ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); const size_t alignment = heap->space_alignment(); // Currently, our eden size cannot shrink to zero guarantee(eden_space()->capacity_in_bytes() >= alignment, "eden too small"); guarantee(from_space()->capacity_in_bytes() >= alignment, "from too small");
*** 492,502 **** char* from_start = (char*)from_space()->bottom(); char* from_end = (char*)from_space()->end(); char* to_start = (char*)to_space()->bottom(); char* to_end = (char*)to_space()->end(); ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); const size_t alignment = heap->space_alignment(); const bool maintain_minimum = (requested_eden_size + 2 * requested_survivor_size) <= min_gen_size(); bool eden_from_to_order = from_start < to_start; --- 491,501 ---- char* from_start = (char*)from_space()->bottom(); char* from_end = (char*)from_space()->end(); char* to_start = (char*)to_space()->bottom(); char* to_end = (char*)to_space()->end(); ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); const size_t alignment = heap->space_alignment(); const bool maintain_minimum = (requested_eden_size + 2 * requested_survivor_size) <= min_gen_size(); bool eden_from_to_order = from_start < to_start;
*** 544,555 **** to_start = (char*)pointer_delta(to_end, (char*)requested_survivor_size, sizeof(char)); // Does the optimal to-space overlap from-space? if (to_start < (char*)from_space()->end()) { - assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); - // Calculate the minimum offset possible for from_end size_t from_size = pointer_delta(from_space()->top(), from_start, sizeof(char)); // Should we be in this method if from_space is empty? Why not the set_space method? FIX ME! if (from_size == 0) { --- 543,552 ----
*** 706,718 **** SpaceDecorator::DontMangle); assert(from_space()->top() == old_from_top, "from top changed!"); if (PrintAdaptiveSizePolicy) { ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); ! assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); ! gclog_or_tty->print("AdaptiveSizePolicy::survivor space sizes: " "collection: %d " "(" SIZE_FORMAT ", " SIZE_FORMAT ") -> " "(" SIZE_FORMAT ", " SIZE_FORMAT ") ", heap->total_collections(), --- 703,713 ---- SpaceDecorator::DontMangle); assert(from_space()->top() == old_from_top, "from top changed!"); if (PrintAdaptiveSizePolicy) { ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); gclog_or_tty->print("AdaptiveSizePolicy::survivor space sizes: " "collection: %d " "(" SIZE_FORMAT ", " SIZE_FORMAT ") -> " "(" SIZE_FORMAT ", " SIZE_FORMAT ") ", heap->total_collections(),
*** 841,851 **** // This method assumes that from-space has live data and that // any shrinkage of the young gen is limited by location of // from-space. size_t PSYoungGen::available_to_live() { size_t delta_in_survivor = 0; ! ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); const size_t space_alignment = heap->space_alignment(); const size_t gen_alignment = heap->generation_alignment(); MutableSpace* space_shrinking = NULL; if (from_space()->end() > to_space()->end()) { --- 836,846 ---- // This method assumes that from-space has live data and that // any shrinkage of the young gen is limited by location of // from-space. size_t PSYoungGen::available_to_live() { size_t delta_in_survivor = 0; ! ParallelScavengeHeap* heap = ParallelScavengeHeap::heap(); const size_t space_alignment = heap->space_alignment(); const size_t gen_alignment = heap->generation_alignment(); MutableSpace* space_shrinking = NULL; if (from_space()->end() > to_space()->end()) {
*** 925,935 **** (eden_space()->bottom() < from_space()->bottom()), "Eden is assumed to be below the survivor spaces"); MemRegion cmr((HeapWord*)virtual_space()->low(), (HeapWord*)virtual_space()->high()); ! Universe::heap()->barrier_set()->resize_covered_region(cmr); space_invariants(); } --- 920,930 ---- (eden_space()->bottom() < from_space()->bottom()), "Eden is assumed to be below the survivor spaces"); MemRegion cmr((HeapWord*)virtual_space()->low(), (HeapWord*)virtual_space()->high()); ! ParallelScavengeHeap::heap()->barrier_set()->resize_covered_region(cmr); space_invariants(); }
< prev index next >