--- old/src/hotspot/share/gc/shared/collectedHeap.inline.hpp 2018-02-05 14:55:36.210517207 -0800 +++ new/src/hotspot/share/gc/shared/collectedHeap.inline.hpp 2018-02-05 14:55:35.950518120 -0800 @@ -158,7 +158,9 @@ AllocTracer::send_allocation_outside_tlab(klass, result, size * HeapWordSize, THREAD); - THREAD->tlab().handle_sample(THREAD, result, size); + if (UseTLAB) { + THREAD->tlab().handle_sample(THREAD, result, size); + } return result; } --- old/src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp 2018-02-05 14:55:37.102514073 -0800 +++ new/src/hotspot/share/gc/shared/threadLocalAllocBuffer.cpp 2018-02-05 14:55:36.842514987 -0800 @@ -420,6 +420,7 @@ return _actual_end + alignment_reserve(); } + GlobalTLABStats::GlobalTLABStats() : _allocating_threads_avg(TLABAllocationWeight) { --- old/src/hotspot/share/prims/jvmti.xml 2018-02-05 14:55:38.050510743 -0800 +++ new/src/hotspot/share/prims/jvmti.xml 2018-02-05 14:55:37.774511712 -0800 @@ -10354,7 +10354,7 @@ - + Can sample the heap. If this capability is enabled then the heap sampling methods can be called. @@ -11539,7 +11539,7 @@ - + jvmtiFrameInfo @@ -11560,7 +11560,7 @@ - + jvmtiStackTrace @@ -11578,7 +11578,7 @@ - + @@ -11620,7 +11620,7 @@ - + Start Heap Sampling Start the heap sampler in the JVM. The function provides, via its argument, the sampling @@ -11658,7 +11658,7 @@ - + Stop Heap Sampling Stop the heap sampler in the JVM. @@ -11681,7 +11681,7 @@ - + Get Live Traces Get Live Heap Sampled traces. The fields of the @@ -11710,7 +11710,7 @@ - + Get Garbage Traces Get the recent garbage heap sampled traces. The fields of the @@ -11735,7 +11735,7 @@ - + Get Frequent Garbage Traces Get the frequent garbage heap sampled traces. The fields of the @@ -11760,7 +11760,7 @@ - + Get Live Traces Get the cached sampled traces: the traces are the ones that were collected during the last @@ -11786,7 +11786,7 @@ - + Release traces provided by the heap monitoring Release traces provided by any of the trace retrieval methods. @@ -11807,7 +11807,7 @@ - + Get the heap sampling statistics Returns a to understand the heap sampling behavior and current --- old/src/hotspot/share/runtime/heapMonitoring.cpp 2018-02-05 14:55:39.258506499 -0800 +++ new/src/hotspot/share/runtime/heapMonitoring.cpp 2018-02-05 14:55:38.986507454 -0800 @@ -27,6 +27,7 @@ #include "gc/shared/collectedHeap.hpp" #include "memory/universe.hpp" #include "runtime/heapMonitoring.hpp" +#include "runtime/orderAccess.inline.hpp" #include "runtime/vframe.hpp" static const int MaxStackDepth = 1024; @@ -59,9 +60,23 @@ struct StackTraceDataWithOop : public StackTraceData { oop obj; - StackTraceDataWithOop(jvmtiStackTrace* t, oop o) : StackTraceData(t), obj(o) {} + StackTraceDataWithOop(jvmtiStackTrace* t, oop o) : StackTraceData(t) { + store_oop(o); + } StackTraceDataWithOop() : StackTraceData(), obj(NULL) {} + + oop load_oop() { + return RootAccess::oop_load(&obj); + } + + void store_oop(oop value) { + RootAccess::oop_store(&obj, value); + } + + void clear_oop() { + store_oop(reinterpret_cast(NULL)); + } }; // Fixed size buffer for holding garbage traces. @@ -242,7 +257,9 @@ _stats.sample_rate_count++; } - bool initialized() { return _initialized; } + bool initialized() { + return OrderAccess::load_acquire(&_initialized) != 0; + } private: // The traces currently sampled. @@ -264,7 +281,7 @@ int _max_gc_storage; static StackTraceStorage* internal_storage; - volatile bool _initialized; + int _initialized; // Support functions and classes for copying data to the external // world. @@ -333,7 +350,7 @@ _recent_garbage_traces = NULL; _frequent_garbage_traces = NULL; _max_gc_storage = 0; - _initialized = false; + OrderAccess::release_store(&_initialized, 0); } void StackTraceStorage::free_garbage() { @@ -380,7 +397,7 @@ } void StackTraceStorage::free_storage() { - if (!_initialized) { + if (!initialized()) { return; } @@ -400,7 +417,7 @@ void StackTraceStorage::allocate_storage(int max_gc_storage) { // In case multiple threads got locked and then 1 by 1 got through. - if (_initialized) { + if (initialized()) { return; } @@ -413,7 +430,7 @@ _frequent_garbage_traces = new FrequentGarbageTraces(max_gc_storage); _max_gc_storage = max_gc_storage; - _initialized = true; + OrderAccess::release_store(&_initialized, 1); } void StackTraceStorage::add_trace(jvmtiStackTrace* trace, oop o) { @@ -421,7 +438,7 @@ // Last minute check on initialization here in case: // Between the moment object_alloc_do_sample's check for initialization // and now, there was a stop() that deleted the data. - if (_initialized) { + if (initialized()) { StackTraceDataWithOop new_data(trace, o); _stats.sample_count++; _stats.stack_depth_accumulation += trace->frame_count; @@ -432,30 +449,28 @@ void StackTraceStorage::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { size_t count = 0; - if (_initialized) { + if (initialized()) { int len = _allocated_traces->length(); _traces_on_last_full_gc->clear(); // Compact the oop traces. Moves the live oops to the beginning of the // growable array, potentially overwriting the dead ones. - int curr_pos = 0; for (int i = 0; i < len; i++) { StackTraceDataWithOop &trace = _allocated_traces->at(i); - oop value = RootAccess::oop_load( - &trace.obj); + oop value = trace.load_oop(); if (is_alive->do_object_b(value)) { // Update the oop to point to the new object if it is still alive. f->do_oop(&(trace.obj)); // Copy the old trace, if it is still live. - _allocated_traces->at_put(curr_pos++, trace); + _allocated_traces->at_put(count++, trace); // Store the live trace in a cache, to be served up on /heapz. _traces_on_last_full_gc->append(trace); - - count++; } else { + trace.clear_oop(); + // If the old trace is no longer live, add it to the list of // recently collected garbage. store_garbage_trace(trace); @@ -465,12 +480,12 @@ // Zero out remaining array elements. Even though the call to trunc_to // below truncates these values, zeroing them out is good practice. StackTraceDataWithOop zero_trace; - for (int i = curr_pos; i < len; i++) { + for (int i = count; i < len; i++) { _allocated_traces->at_put(i, zero_trace); } // Set the array's length to the number of live elements. - _allocated_traces->trunc_to(curr_pos); + _allocated_traces->trunc_to(count); } log_develop_trace(gc, ref)("Clearing HeapMonitoring weak reference (" INT64_FORMAT ")", count);