< prev index next >

src/hotspot/share/gc/shared/collectedHeap.cpp

tracing breakage fix

349 void CollectedHeap::check_for_valid_allocation_state() {                                                                             
350   Thread *thread = Thread::current();                                                                                                
351   // How to choose between a pending exception and a potential                                                                       
352   // OutOfMemoryError?  Don't allow pending exceptions.                                                                              
353   // This is a VM policy failure, so how do we exhaustively test it?                                                                 
354   assert(!thread->has_pending_exception(),                                                                                           
355          "shouldn't be allocating with pending exception");                                                                          
356   if (StrictSafepointChecks) {                                                                                                       
357     assert(thread->allow_allocation(),                                                                                               
358            "Allocation done by thread for which allocation is blocked "                                                              
359            "by No_Allocation_Verifier!");                                                                                            
360     // Allocation of an oop can always invoke a safepoint,                                                                           
361     // hence, the true argument                                                                                                      
362     thread->check_for_valid_safepoint_state(true);                                                                                   
363   }                                                                                                                                  
364 }                                                                                                                                    
365 #endif                                                                                                                               
366 
367 HeapWord* CollectedHeap::obj_allocate_raw(Klass* klass, size_t size,                                                                 
368                                           bool* gc_overhead_limit_was_exceeded, TRAPS) {                                             
                                                                                                                                     
369   if (UseTLAB) {                                                                                                                     
370     HeapWord* result = allocate_from_tlab(klass, size, THREAD);                                                                      
371     if (result != NULL) {                                                                                                            
372       return result;                                                                                                                 
373     }                                                                                                                                
374   }                                                                                                                                  
375   return Universe::heap()->mem_allocate(size, gc_overhead_limit_was_exceeded);                                                       
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
                                                                                                                                     
376 }                                                                                                                                    
377 
378 HeapWord* CollectedHeap::allocate_from_tlab_slow(Klass* klass, size_t size, TRAPS) {                                                 
379   ThreadLocalAllocBuffer& tlab = THREAD->tlab();                                                                                     
380 
381   // Retain tlab and allocate object in shared space if                                                                              
382   // the amount free in the tlab is too large to discard.                                                                            
383   if (tlab.free() > tlab.refill_waste_limit()) {                                                                                     
384     tlab.record_slow_allocation(size);                                                                                               
385     return NULL;                                                                                                                     
386   }                                                                                                                                  
387 
388   // Discard tlab and allocate a new one.                                                                                            
389   // To minimize fragmentation, the last TLAB may be smaller than the rest.                                                          
390   size_t new_tlab_size = tlab.compute_size(size);                                                                                    
391 
392   tlab.clear_before_allocation();                                                                                                    
393 
394   if (new_tlab_size == 0) {                                                                                                          

349 void CollectedHeap::check_for_valid_allocation_state() {
350   Thread *thread = Thread::current();
351   // How to choose between a pending exception and a potential
352   // OutOfMemoryError?  Don't allow pending exceptions.
353   // This is a VM policy failure, so how do we exhaustively test it?
354   assert(!thread->has_pending_exception(),
355          "shouldn't be allocating with pending exception");
356   if (StrictSafepointChecks) {
357     assert(thread->allow_allocation(),
358            "Allocation done by thread for which allocation is blocked "
359            "by No_Allocation_Verifier!");
360     // Allocation of an oop can always invoke a safepoint,
361     // hence, the true argument
362     thread->check_for_valid_safepoint_state(true);
363   }
364 }
365 #endif
366 
367 HeapWord* CollectedHeap::obj_allocate_raw(Klass* klass, size_t size,
368                                           bool* gc_overhead_limit_was_exceeded, TRAPS) {
369   HeapWord* result = NULL;
370   if (UseTLAB) {
371     result = allocate_from_tlab(klass, size, THREAD);
372     if (result != NULL) {
373       return result;
374     }
375   }
376   result = Universe::heap()->mem_allocate(size, gc_overhead_limit_was_exceeded);
377   if (result == NULL) {
378     return result;
379   }
380 
381   NOT_PRODUCT(Universe::heap()->check_for_non_bad_heap_word_value(result, size));
382   assert(!HAS_PENDING_EXCEPTION,
383          "Unexpected exception, will result in uninitialized storage");
384   THREAD->incr_allocated_bytes(size * HeapWordSize);
385 
386   AllocTracer::send_allocation_outside_tlab(klass, result, size * HeapWordSize, THREAD);
387   return result;
388 }
389 
390 HeapWord* CollectedHeap::allocate_from_tlab_slow(Klass* klass, size_t size, TRAPS) {
391   ThreadLocalAllocBuffer& tlab = THREAD->tlab();
392 
393   // Retain tlab and allocate object in shared space if
394   // the amount free in the tlab is too large to discard.
395   if (tlab.free() > tlab.refill_waste_limit()) {
396     tlab.record_slow_allocation(size);
397     return NULL;
398   }
399 
400   // Discard tlab and allocate a new one.
401   // To minimize fragmentation, the last TLAB may be smaller than the rest.
402   size_t new_tlab_size = tlab.compute_size(size);
403 
404   tlab.clear_before_allocation();
405 
406   if (new_tlab_size == 0) {
< prev index next >