< prev index next >

src/share/vm/gc/shared/collectedHeap.cpp

Print this page
rev 11777 : [mq]: gcinterface.patch


 282 HeapWord* CollectedHeap::allocate_from_tlab_slow(KlassHandle klass, Thread* thread, size_t size) {
 283 
 284   // Retain tlab and allocate object in shared space if
 285   // the amount free in the tlab is too large to discard.
 286   if (thread->tlab().free() > thread->tlab().refill_waste_limit()) {
 287     thread->tlab().record_slow_allocation(size);
 288     return NULL;
 289   }
 290 
 291   // Discard tlab and allocate a new one.
 292   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 293   size_t new_tlab_size = thread->tlab().compute_size(size);
 294 
 295   thread->tlab().clear_before_allocation();
 296 
 297   if (new_tlab_size == 0) {
 298     return NULL;
 299   }
 300 
 301   // Allocate a new TLAB...
 302   HeapWord* obj = Universe::heap()->allocate_new_tlab(new_tlab_size);
 303   if (obj == NULL) {
 304     return NULL;
 305   }
 306 
 307   AllocTracer::send_allocation_in_new_tlab_event(klass, new_tlab_size * HeapWordSize, size * HeapWordSize);
 308 
 309   if (ZeroTLAB) {
 310     // ..and clear it.
 311     Copy::zero_to_words(obj, new_tlab_size);
 312   } else {
 313     // ...and zap just allocated object.
 314 #ifdef ASSERT
 315     // Skip mangling the space corresponding to the object header to
 316     // ensure that the returned space is not considered parsable by
 317     // any concurrent GC thread.
 318     size_t hdr_size = oopDesc::header_size();
 319     Copy::fill_to_words(obj + hdr_size, new_tlab_size - hdr_size, badHeapWordVal);
 320 #endif // ASSERT
 321   }
 322   thread->tlab().fill(obj, obj + size, new_tlab_size);


 424       assert(bs->has_write_region_opt(), "No write_region() on BarrierSet");
 425       bs->write_region(mr);
 426     }
 427   }
 428   return new_obj;
 429 }
 430 
 431 size_t CollectedHeap::filler_array_hdr_size() {
 432   return size_t(align_object_offset(arrayOopDesc::header_size(T_INT))); // align to Long
 433 }
 434 
 435 size_t CollectedHeap::filler_array_min_size() {
 436   return align_object_size(filler_array_hdr_size()); // align to MinObjAlignment
 437 }
 438 
 439 #ifdef ASSERT
 440 void CollectedHeap::fill_args_check(HeapWord* start, size_t words)
 441 {
 442   assert(words >= min_fill_size(), "too small to fill");
 443   assert(words % MinObjAlignment == 0, "unaligned size");
 444   assert(Universe::heap()->is_in_reserved(start), "not in heap");
 445   assert(Universe::heap()->is_in_reserved(start + words - 1), "not in heap");
 446 }
 447 
 448 void CollectedHeap::zap_filler_array(HeapWord* start, size_t words, bool zap)
 449 {
 450   if (ZapFillerObjects && zap) {
 451     Copy::fill_to_words(start + filler_array_hdr_size(),
 452                         words - filler_array_hdr_size(), 0XDEAFBABE);
 453   }
 454 }
 455 #endif // ASSERT
 456 
 457 void
 458 CollectedHeap::fill_with_array(HeapWord* start, size_t words, bool zap)
 459 {
 460   assert(words >= filler_array_min_size(), "too small for an array");
 461   assert(words <= filler_array_max_size(), "too big for a single object");
 462 
 463   const size_t payload_size = words - filler_array_hdr_size();
 464   const size_t len = payload_size * HeapWordSize / sizeof(jint);
 465   assert((int)len >= 0, "size too large " SIZE_FORMAT " becomes %d", words, (int)len);


 589 void CollectedHeap::pre_full_gc_dump(GCTimer* timer) {
 590   full_gc_dump(timer, true);
 591 }
 592 
 593 void CollectedHeap::post_full_gc_dump(GCTimer* timer) {
 594   full_gc_dump(timer, false);
 595 }
 596 
 597 void CollectedHeap::initialize_reserved_region(HeapWord *start, HeapWord *end) {
 598   // It is important to do this in a way such that concurrent readers can't
 599   // temporarily think something is in the heap.  (Seen this happen in asserts.)
 600   _reserved.set_word_size(0);
 601   _reserved.set_start(start);
 602   _reserved.set_end(end);
 603 }
 604 
 605 /////////////// Unit tests ///////////////
 606 
 607 #ifndef PRODUCT
 608 void CollectedHeap::test_is_in() {
 609   CollectedHeap* heap = Universe::heap();
 610 
 611   uintptr_t epsilon    = (uintptr_t) MinObjAlignment;
 612   uintptr_t heap_start = (uintptr_t) heap->_reserved.start();
 613   uintptr_t heap_end   = (uintptr_t) heap->_reserved.end();
 614 
 615   // Test that NULL is not in the heap.
 616   assert(!heap->is_in(NULL), "NULL is unexpectedly in the heap");
 617 
 618   // Test that a pointer to before the heap start is reported as outside the heap.
 619   assert(heap_start >= ((uintptr_t)NULL + epsilon), "sanity");
 620   void* before_heap = (void*)(heap_start - epsilon);
 621   assert(!heap->is_in(before_heap),
 622          "before_heap: " PTR_FORMAT " is unexpectedly in the heap", p2i(before_heap));
 623 
 624   // Test that a pointer to after the heap end is reported as outside the heap.
 625   assert(heap_end <= ((uintptr_t)-1 - epsilon), "sanity");
 626   void* after_heap = (void*)(heap_end + epsilon);
 627   assert(!heap->is_in(after_heap),
 628          "after_heap: " PTR_FORMAT " is unexpectedly in the heap", p2i(after_heap));
 629 }


 282 HeapWord* CollectedHeap::allocate_from_tlab_slow(KlassHandle klass, Thread* thread, size_t size) {
 283 
 284   // Retain tlab and allocate object in shared space if
 285   // the amount free in the tlab is too large to discard.
 286   if (thread->tlab().free() > thread->tlab().refill_waste_limit()) {
 287     thread->tlab().record_slow_allocation(size);
 288     return NULL;
 289   }
 290 
 291   // Discard tlab and allocate a new one.
 292   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 293   size_t new_tlab_size = thread->tlab().compute_size(size);
 294 
 295   thread->tlab().clear_before_allocation();
 296 
 297   if (new_tlab_size == 0) {
 298     return NULL;
 299   }
 300 
 301   // Allocate a new TLAB...
 302   HeapWord* obj = GC::gc()->heap()->allocate_new_tlab(new_tlab_size);
 303   if (obj == NULL) {
 304     return NULL;
 305   }
 306 
 307   AllocTracer::send_allocation_in_new_tlab_event(klass, new_tlab_size * HeapWordSize, size * HeapWordSize);
 308 
 309   if (ZeroTLAB) {
 310     // ..and clear it.
 311     Copy::zero_to_words(obj, new_tlab_size);
 312   } else {
 313     // ...and zap just allocated object.
 314 #ifdef ASSERT
 315     // Skip mangling the space corresponding to the object header to
 316     // ensure that the returned space is not considered parsable by
 317     // any concurrent GC thread.
 318     size_t hdr_size = oopDesc::header_size();
 319     Copy::fill_to_words(obj + hdr_size, new_tlab_size - hdr_size, badHeapWordVal);
 320 #endif // ASSERT
 321   }
 322   thread->tlab().fill(obj, obj + size, new_tlab_size);


 424       assert(bs->has_write_region_opt(), "No write_region() on BarrierSet");
 425       bs->write_region(mr);
 426     }
 427   }
 428   return new_obj;
 429 }
 430 
 431 size_t CollectedHeap::filler_array_hdr_size() {
 432   return size_t(align_object_offset(arrayOopDesc::header_size(T_INT))); // align to Long
 433 }
 434 
 435 size_t CollectedHeap::filler_array_min_size() {
 436   return align_object_size(filler_array_hdr_size()); // align to MinObjAlignment
 437 }
 438 
 439 #ifdef ASSERT
 440 void CollectedHeap::fill_args_check(HeapWord* start, size_t words)
 441 {
 442   assert(words >= min_fill_size(), "too small to fill");
 443   assert(words % MinObjAlignment == 0, "unaligned size");
 444   assert(GC::gc()->heap()->is_in_reserved(start), "not in heap");
 445   assert(GC::gc()->heap()->is_in_reserved(start + words - 1), "not in heap");
 446 }
 447 
 448 void CollectedHeap::zap_filler_array(HeapWord* start, size_t words, bool zap)
 449 {
 450   if (ZapFillerObjects && zap) {
 451     Copy::fill_to_words(start + filler_array_hdr_size(),
 452                         words - filler_array_hdr_size(), 0XDEAFBABE);
 453   }
 454 }
 455 #endif // ASSERT
 456 
 457 void
 458 CollectedHeap::fill_with_array(HeapWord* start, size_t words, bool zap)
 459 {
 460   assert(words >= filler_array_min_size(), "too small for an array");
 461   assert(words <= filler_array_max_size(), "too big for a single object");
 462 
 463   const size_t payload_size = words - filler_array_hdr_size();
 464   const size_t len = payload_size * HeapWordSize / sizeof(jint);
 465   assert((int)len >= 0, "size too large " SIZE_FORMAT " becomes %d", words, (int)len);


 589 void CollectedHeap::pre_full_gc_dump(GCTimer* timer) {
 590   full_gc_dump(timer, true);
 591 }
 592 
 593 void CollectedHeap::post_full_gc_dump(GCTimer* timer) {
 594   full_gc_dump(timer, false);
 595 }
 596 
 597 void CollectedHeap::initialize_reserved_region(HeapWord *start, HeapWord *end) {
 598   // It is important to do this in a way such that concurrent readers can't
 599   // temporarily think something is in the heap.  (Seen this happen in asserts.)
 600   _reserved.set_word_size(0);
 601   _reserved.set_start(start);
 602   _reserved.set_end(end);
 603 }
 604 
 605 /////////////// Unit tests ///////////////
 606 
 607 #ifndef PRODUCT
 608 void CollectedHeap::test_is_in() {
 609   CollectedHeap* heap = GC::gc()->heap();
 610 
 611   uintptr_t epsilon    = (uintptr_t) MinObjAlignment;
 612   uintptr_t heap_start = (uintptr_t) heap->_reserved.start();
 613   uintptr_t heap_end   = (uintptr_t) heap->_reserved.end();
 614 
 615   // Test that NULL is not in the heap.
 616   assert(!heap->is_in(NULL), "NULL is unexpectedly in the heap");
 617 
 618   // Test that a pointer to before the heap start is reported as outside the heap.
 619   assert(heap_start >= ((uintptr_t)NULL + epsilon), "sanity");
 620   void* before_heap = (void*)(heap_start - epsilon);
 621   assert(!heap->is_in(before_heap),
 622          "before_heap: " PTR_FORMAT " is unexpectedly in the heap", p2i(before_heap));
 623 
 624   // Test that a pointer to after the heap end is reported as outside the heap.
 625   assert(heap_end <= ((uintptr_t)-1 - epsilon), "sanity");
 626   void* after_heap = (void*)(heap_end + epsilon);
 627   assert(!heap->is_in(after_heap),
 628          "after_heap: " PTR_FORMAT " is unexpectedly in the heap", p2i(after_heap));
 629 }
< prev index next >