210 211 _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last); 212 } 213 214 if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) { 215 // Tell tlab to forget bytes_since_last if we passed it to the heap sampler. 216 _thread->tlab().set_sample_end(bytes_since_last != 0); 217 } 218 } 219 220 void MemAllocator::Allocation::notify_allocation_low_memory_detector() { 221 // support low memory notifications (no-op if not enabled) 222 LowMemoryDetector::detect_low_memory_for_collected_pools(); 223 } 224 225 void MemAllocator::Allocation::notify_allocation_jfr_sampler() { 226 HeapWord* mem = (HeapWord*)obj(); 227 size_t size_in_bytes = _allocator._word_size * HeapWordSize; 228 229 if (_allocated_outside_tlab) { 230 AllocTracer::send_allocation_outside_tlab(_allocator._klass, mem, size_in_bytes, _thread); 231 } else if (_allocated_tlab_size != 0) { 232 // TLAB was refilled 233 AllocTracer::send_allocation_in_new_tlab(_allocator._klass, mem, _allocated_tlab_size * HeapWordSize, 234 size_in_bytes, _thread); 235 } 236 } 237 238 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() { 239 if (DTraceAllocProbes) { 240 // support for Dtrace object alloc event (no-op most of the time) 241 Klass* klass = _allocator._klass; 242 size_t word_size = _allocator._word_size; 243 if (klass != NULL && klass->name() != NULL) { 244 SharedRuntime::dtrace_object_alloc(obj(), (int)word_size); 245 } 246 } 247 } 248 249 void MemAllocator::Allocation::notify_allocation() { 250 notify_allocation_low_memory_detector(); 251 notify_allocation_jfr_sampler(); 252 notify_allocation_dtrace_sampler(); 253 notify_allocation_jvmti_sampler(); 254 } 255 256 HeapWord* MemAllocator::allocate_outside_tlab(Allocation& allocation) const { 257 allocation._allocated_outside_tlab = true; 258 HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded); 259 if (mem == NULL) { 260 return mem; 261 } | 210 211 _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last); 212 } 213 214 if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) { 215 // Tell tlab to forget bytes_since_last if we passed it to the heap sampler. 216 _thread->tlab().set_sample_end(bytes_since_last != 0); 217 } 218 } 219 220 void MemAllocator::Allocation::notify_allocation_low_memory_detector() { 221 // support low memory notifications (no-op if not enabled) 222 LowMemoryDetector::detect_low_memory_for_collected_pools(); 223 } 224 225 void MemAllocator::Allocation::notify_allocation_jfr_sampler() { 226 HeapWord* mem = (HeapWord*)obj(); 227 size_t size_in_bytes = _allocator._word_size * HeapWordSize; 228 229 if (_allocated_outside_tlab) { 230 AllocTracer::send_allocation_outside_tlab(obj()->klass(), mem, size_in_bytes, _thread); 231 } else if (_allocated_tlab_size != 0) { 232 // TLAB was refilled 233 AllocTracer::send_allocation_in_new_tlab(obj()->klass(), mem, _allocated_tlab_size * HeapWordSize, 234 size_in_bytes, _thread); 235 } 236 } 237 238 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() { 239 if (DTraceAllocProbes) { 240 // support for Dtrace object alloc event (no-op most of the time) 241 Klass* klass = obj()->klass(); 242 size_t word_size = _allocator._word_size; 243 if (klass != NULL && klass->name() != NULL) { 244 SharedRuntime::dtrace_object_alloc(obj(), (int)word_size); 245 } 246 } 247 } 248 249 void MemAllocator::Allocation::notify_allocation() { 250 notify_allocation_low_memory_detector(); 251 notify_allocation_jfr_sampler(); 252 notify_allocation_dtrace_sampler(); 253 notify_allocation_jvmti_sampler(); 254 } 255 256 HeapWord* MemAllocator::allocate_outside_tlab(Allocation& allocation) const { 257 allocation._allocated_outside_tlab = true; 258 HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded); 259 if (mem == NULL) { 260 return mem; 261 } |