< prev index next >

src/hotspot/share/gc/shared/memAllocator.cpp

Print this page
rev 52050 : 8211980: Remove ThreadHeapSampler enable/disable/enabled methods
Summary:
Reviewed-by:


 170   // How to choose between a pending exception and a potential
 171   // OutOfMemoryError?  Don't allow pending exceptions.
 172   // This is a VM policy failure, so how do we exhaustively test it?
 173   assert(!_thread->has_pending_exception(),
 174          "shouldn't be allocating with pending exception");
 175   if (StrictSafepointChecks) {
 176     assert(_thread->allow_allocation(),
 177            "Allocation done by thread for which allocation is blocked "
 178            "by No_Allocation_Verifier!");
 179     // Allocation of an oop can always invoke a safepoint,
 180     // hence, the true argument
 181     _thread->check_for_valid_safepoint_state(true);
 182   }
 183 }
 184 #endif
 185 
 186 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
 187   // support for JVMTI VMObjectAlloc event (no-op if not enabled)
 188   JvmtiExport::vm_object_alloc_event_collector(obj());
 189 
 190   if (!ThreadHeapSampler::enabled()) {
 191     // Sampling disabled
 192     return;
 193   }
 194 
 195   if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
 196     // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
 197     // or expands it due to taking a sampler induced slow path.
 198     return;
 199   }
 200 
 201   assert(JavaThread::current()->heap_sampler().add_sampling_collector(),
 202          "Should never return false.");
 203 
 204   // Only check if the sampler could actually sample something in this path.
 205   assert(!JvmtiExport::should_post_sampled_object_alloc() ||
 206          !JvmtiSampledObjectAllocEventCollector::object_alloc_is_safe_to_sample() ||
 207          _thread->heap_sampler().sampling_collector_present(),
 208          "Sampling collector not present.");
 209 
 210   if (JvmtiExport::should_post_sampled_object_alloc()) {


 276   return mem;
 277 }
 278 
 279 HeapWord* MemAllocator::allocate_inside_tlab(Allocation& allocation) const {
 280   assert(UseTLAB, "should use UseTLAB");
 281 
 282   // Try allocating from an existing TLAB.
 283   HeapWord* mem = _thread->tlab().allocate(_word_size);
 284   if (mem != NULL) {
 285     return mem;
 286   }
 287 
 288   // Try refilling the TLAB and allocating the object in it.
 289   return allocate_inside_tlab_slow(allocation);
 290 }
 291 
 292 HeapWord* MemAllocator::allocate_inside_tlab_slow(Allocation& allocation) const {
 293   HeapWord* mem = NULL;
 294   ThreadLocalAllocBuffer& tlab = _thread->tlab();
 295 
 296   if (ThreadHeapSampler::enabled()) {
 297     // Try to allocate the sampled object from TLAB, it is possible a sample
 298     // point was put and the TLAB still has space.
 299     tlab.set_back_allocation_end();
 300     mem = tlab.allocate(_word_size);
 301     if (mem != NULL) {
 302       allocation._tlab_end_reset_for_sample = true;
 303       return mem;
 304     }
 305   }
 306 
 307   // Retain tlab and allocate object in shared space if
 308   // the amount free in the tlab is too large to discard.
 309   if (tlab.free() > tlab.refill_waste_limit()) {
 310     tlab.record_slow_allocation(_word_size);
 311     return NULL;
 312   }
 313 
 314   // Discard tlab and allocate a new one.
 315   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 316   size_t new_tlab_size = tlab.compute_size(_word_size);




 170   // How to choose between a pending exception and a potential
 171   // OutOfMemoryError?  Don't allow pending exceptions.
 172   // This is a VM policy failure, so how do we exhaustively test it?
 173   assert(!_thread->has_pending_exception(),
 174          "shouldn't be allocating with pending exception");
 175   if (StrictSafepointChecks) {
 176     assert(_thread->allow_allocation(),
 177            "Allocation done by thread for which allocation is blocked "
 178            "by No_Allocation_Verifier!");
 179     // Allocation of an oop can always invoke a safepoint,
 180     // hence, the true argument
 181     _thread->check_for_valid_safepoint_state(true);
 182   }
 183 }
 184 #endif
 185 
 186 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
 187   // support for JVMTI VMObjectAlloc event (no-op if not enabled)
 188   JvmtiExport::vm_object_alloc_event_collector(obj());
 189 
 190   if (!JvmtiExport::should_post_sampled_object_alloc()) {
 191     // Sampling disabled
 192     return;
 193   }
 194 
 195   if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
 196     // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
 197     // or expands it due to taking a sampler induced slow path.
 198     return;
 199   }
 200 
 201   assert(JavaThread::current()->heap_sampler().add_sampling_collector(),
 202          "Should never return false.");
 203 
 204   // Only check if the sampler could actually sample something in this path.
 205   assert(!JvmtiExport::should_post_sampled_object_alloc() ||
 206          !JvmtiSampledObjectAllocEventCollector::object_alloc_is_safe_to_sample() ||
 207          _thread->heap_sampler().sampling_collector_present(),
 208          "Sampling collector not present.");
 209 
 210   if (JvmtiExport::should_post_sampled_object_alloc()) {


 276   return mem;
 277 }
 278 
 279 HeapWord* MemAllocator::allocate_inside_tlab(Allocation& allocation) const {
 280   assert(UseTLAB, "should use UseTLAB");
 281 
 282   // Try allocating from an existing TLAB.
 283   HeapWord* mem = _thread->tlab().allocate(_word_size);
 284   if (mem != NULL) {
 285     return mem;
 286   }
 287 
 288   // Try refilling the TLAB and allocating the object in it.
 289   return allocate_inside_tlab_slow(allocation);
 290 }
 291 
 292 HeapWord* MemAllocator::allocate_inside_tlab_slow(Allocation& allocation) const {
 293   HeapWord* mem = NULL;
 294   ThreadLocalAllocBuffer& tlab = _thread->tlab();
 295 
 296   if (JvmtiExport::should_post_sampled_object_alloc()) {
 297     // Try to allocate the sampled object from TLAB, it is possible a sample
 298     // point was put and the TLAB still has space.
 299     tlab.set_back_allocation_end();
 300     mem = tlab.allocate(_word_size);
 301     if (mem != NULL) {
 302       allocation._tlab_end_reset_for_sample = true;
 303       return mem;
 304     }
 305   }
 306 
 307   // Retain tlab and allocate object in shared space if
 308   // the amount free in the tlab is too large to discard.
 309   if (tlab.free() > tlab.refill_waste_limit()) {
 310     tlab.record_slow_allocation(_word_size);
 311     return NULL;
 312   }
 313 
 314   // Discard tlab and allocate a new one.
 315   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 316   size_t new_tlab_size = tlab.compute_size(_word_size);


< prev index next >