58 static inline void record_new_arena(MEMFLAGS flag) { }
59 static inline void record_arena_free(MEMFLAGS flag) { }
60 static inline void record_arena_size_change(int diff, MEMFLAGS flag) { }
61 static inline void record_virtual_memory_reserve(void* addr, size_t size, const NativeCallStack& stack,
62 MEMFLAGS flag = mtNone) { }
63 static inline void record_virtual_memory_reserve_and_commit(void* addr, size_t size,
64 const NativeCallStack& stack, MEMFLAGS flag = mtNone) { }
65 static inline void record_virtual_memory_commit(void* addr, size_t size, const NativeCallStack& stack) { }
66 static inline Tracker get_virtual_memory_uncommit_tracker() { return Tracker(); }
67 static inline Tracker get_virtual_memory_release_tracker() { }
68 static inline void record_virtual_memory_type(void* addr, MEMFLAGS flag) { }
69 static inline void record_thread_stack(void* addr, size_t size) { }
70 static inline void release_thread_stack(void* addr, size_t size) { }
71
72 static void final_report(outputStream*) { }
73 static void error_report(outputStream*) { }
74 };
75
76 #else
77
78 #include "runtime/atomic.hpp"
79 #include "runtime/threadCritical.hpp"
80 #include "services/mallocTracker.hpp"
81 #include "services/virtualMemoryTracker.hpp"
82
83 extern volatile bool NMT_stack_walkable;
84
85 #define CURRENT_PC ((MemTracker::tracking_level() == NMT_detail && NMT_stack_walkable) ? \
86 NativeCallStack(0, true) : NativeCallStack::empty_stack())
87 #define CALLER_PC ((MemTracker::tracking_level() == NMT_detail && NMT_stack_walkable) ? \
88 NativeCallStack(1, true) : NativeCallStack::empty_stack())
89
90 class MemBaseline;
91 class Mutex;
92
93 // Tracker is used for guarding 'release' semantics of virtual memory operation, to avoid
94 // the other thread obtains and records the same region that is just 'released' by current
95 // thread but before it can record the operation.
96 class Tracker : public StackObj {
97 public:
98 enum TrackerType {
|
58 static inline void record_new_arena(MEMFLAGS flag) { }
59 static inline void record_arena_free(MEMFLAGS flag) { }
60 static inline void record_arena_size_change(int diff, MEMFLAGS flag) { }
61 static inline void record_virtual_memory_reserve(void* addr, size_t size, const NativeCallStack& stack,
62 MEMFLAGS flag = mtNone) { }
63 static inline void record_virtual_memory_reserve_and_commit(void* addr, size_t size,
64 const NativeCallStack& stack, MEMFLAGS flag = mtNone) { }
65 static inline void record_virtual_memory_commit(void* addr, size_t size, const NativeCallStack& stack) { }
66 static inline Tracker get_virtual_memory_uncommit_tracker() { return Tracker(); }
67 static inline Tracker get_virtual_memory_release_tracker() { }
68 static inline void record_virtual_memory_type(void* addr, MEMFLAGS flag) { }
69 static inline void record_thread_stack(void* addr, size_t size) { }
70 static inline void release_thread_stack(void* addr, size_t size) { }
71
72 static void final_report(outputStream*) { }
73 static void error_report(outputStream*) { }
74 };
75
76 #else
77
78 #include "runtime/atomic.inline.hpp"
79 #include "runtime/threadCritical.hpp"
80 #include "services/mallocTracker.hpp"
81 #include "services/virtualMemoryTracker.hpp"
82
83 extern volatile bool NMT_stack_walkable;
84
85 #define CURRENT_PC ((MemTracker::tracking_level() == NMT_detail && NMT_stack_walkable) ? \
86 NativeCallStack(0, true) : NativeCallStack::empty_stack())
87 #define CALLER_PC ((MemTracker::tracking_level() == NMT_detail && NMT_stack_walkable) ? \
88 NativeCallStack(1, true) : NativeCallStack::empty_stack())
89
90 class MemBaseline;
91 class Mutex;
92
93 // Tracker is used for guarding 'release' semantics of virtual memory operation, to avoid
94 // the other thread obtains and records the same region that is just 'released' by current
95 // thread but before it can record the operation.
96 class Tracker : public StackObj {
97 public:
98 enum TrackerType {
|