380 cp->space = cp->space->next_compaction_space();
381 if (cp->space == NULL) {
382 cp->gen = GenCollectedHeap::heap()->young_gen();
383 assert(cp->gen != NULL, "compaction must succeed");
384 cp->space = cp->gen->first_compaction_space();
385 assert(cp->space != NULL, "generation must have a first compaction space");
386 }
387 compact_top = cp->space->bottom();
388 cp->space->set_compaction_top(compact_top);
389 cp->threshold = cp->space->initialize_threshold();
390 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
391 }
392
393 // store the forwarding pointer into the mark word
394 if ((HeapWord*)q != compact_top) {
395 q->forward_to(oop(compact_top));
396 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
397 } else {
398 // if the object isn't moving we can just set the mark to the default
399 // mark and handle it specially later on.
400 q->init_mark();
401 assert(q->forwardee() == NULL, "should be forwarded to NULL");
402 }
403
404 compact_top += size;
405
406 // we need to update the offset table so that the beginnings of objects can be
407 // found during scavenge. Note that we are updating the offset table based on
408 // where the object will be once the compaction phase finishes.
409 if (compact_top > cp->threshold)
410 cp->threshold =
411 cp->space->cross_threshold(compact_top - size, compact_top);
412 return compact_top;
413 }
414
415 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
416 scan_and_forward(this, cp);
417 }
418
419 void CompactibleSpace::adjust_pointers() {
420 // Check first is there is any work to do.
678
679 void ContiguousSpace::allocate_temporary_filler(int factor) {
680 // allocate temporary type array decreasing free size with factor 'factor'
681 assert(factor >= 0, "just checking");
682 size_t size = pointer_delta(end(), top());
683
684 // if space is full, return
685 if (size == 0) return;
686
687 if (factor > 0) {
688 size -= size/factor;
689 }
690 size = align_object_size(size);
691
692 const size_t array_header_size = typeArrayOopDesc::header_size(T_INT);
693 if (size >= align_object_size(array_header_size)) {
694 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
695 // allocate uninitialized int array
696 typeArrayOop t = (typeArrayOop) allocate(size);
697 assert(t != NULL, "allocation should succeed");
698 t->set_mark(markOopDesc::prototype());
699 t->set_klass(Universe::intArrayKlassObj());
700 t->set_length((int)length);
701 } else {
702 assert(size == CollectedHeap::min_fill_size(),
703 "size for smallest fake object doesn't match");
704 instanceOop obj = (instanceOop) allocate(size);
705 obj->set_mark(markOopDesc::prototype());
706 obj->set_klass_gap(0);
707 obj->set_klass(SystemDictionary::Object_klass());
708 }
709 }
710
711 HeapWord* OffsetTableContigSpace::initialize_threshold() {
712 return _offsets.initialize_threshold();
713 }
714
715 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
716 _offsets.alloc_block(start, end);
717 return _offsets.threshold();
718 }
719
720 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
721 MemRegion mr) :
722 _offsets(sharedOffsetArray, mr),
723 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
724 {
725 _offsets.set_contig_space(this);
|
380 cp->space = cp->space->next_compaction_space();
381 if (cp->space == NULL) {
382 cp->gen = GenCollectedHeap::heap()->young_gen();
383 assert(cp->gen != NULL, "compaction must succeed");
384 cp->space = cp->gen->first_compaction_space();
385 assert(cp->space != NULL, "generation must have a first compaction space");
386 }
387 compact_top = cp->space->bottom();
388 cp->space->set_compaction_top(compact_top);
389 cp->threshold = cp->space->initialize_threshold();
390 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
391 }
392
393 // store the forwarding pointer into the mark word
394 if ((HeapWord*)q != compact_top) {
395 q->forward_to(oop(compact_top));
396 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
397 } else {
398 // if the object isn't moving we can just set the mark to the default
399 // mark and handle it specially later on.
400 q->init_mark_raw();
401 assert(q->forwardee() == NULL, "should be forwarded to NULL");
402 }
403
404 compact_top += size;
405
406 // we need to update the offset table so that the beginnings of objects can be
407 // found during scavenge. Note that we are updating the offset table based on
408 // where the object will be once the compaction phase finishes.
409 if (compact_top > cp->threshold)
410 cp->threshold =
411 cp->space->cross_threshold(compact_top - size, compact_top);
412 return compact_top;
413 }
414
415 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
416 scan_and_forward(this, cp);
417 }
418
419 void CompactibleSpace::adjust_pointers() {
420 // Check first is there is any work to do.
678
679 void ContiguousSpace::allocate_temporary_filler(int factor) {
680 // allocate temporary type array decreasing free size with factor 'factor'
681 assert(factor >= 0, "just checking");
682 size_t size = pointer_delta(end(), top());
683
684 // if space is full, return
685 if (size == 0) return;
686
687 if (factor > 0) {
688 size -= size/factor;
689 }
690 size = align_object_size(size);
691
692 const size_t array_header_size = typeArrayOopDesc::header_size(T_INT);
693 if (size >= align_object_size(array_header_size)) {
694 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
695 // allocate uninitialized int array
696 typeArrayOop t = (typeArrayOop) allocate(size);
697 assert(t != NULL, "allocation should succeed");
698 t->set_mark_raw(markOopDesc::prototype());
699 t->set_klass(Universe::intArrayKlassObj());
700 t->set_length((int)length);
701 } else {
702 assert(size == CollectedHeap::min_fill_size(),
703 "size for smallest fake object doesn't match");
704 instanceOop obj = (instanceOop) allocate(size);
705 obj->set_mark_raw(markOopDesc::prototype());
706 obj->set_klass_gap(0);
707 obj->set_klass(SystemDictionary::Object_klass());
708 }
709 }
710
711 HeapWord* OffsetTableContigSpace::initialize_threshold() {
712 return _offsets.initialize_threshold();
713 }
714
715 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
716 _offsets.alloc_block(start, end);
717 return _offsets.threshold();
718 }
719
720 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
721 MemRegion mr) :
722 _offsets(sharedOffsetArray, mr),
723 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
724 {
725 _offsets.set_contig_space(this);
|