28 #include "runtime/atomic.hpp"
29 #include "runtime/os.hpp"
30 #include "runtime/threadCritical.hpp"
31 #include "services/memTracker.hpp"
32 #include "services/threadStackTracker.hpp"
33 #include "services/virtualMemoryTracker.hpp"
34
35 size_t VirtualMemorySummary::_snapshot[CALC_OBJ_SIZE_IN_TYPE(VirtualMemorySnapshot, size_t)];
36
37 void VirtualMemorySummary::initialize() {
38 assert(sizeof(_snapshot) >= sizeof(VirtualMemorySnapshot), "Sanity Check");
39 // Use placement operator new to initialize static data area.
40 ::new ((void*)_snapshot) VirtualMemorySnapshot();
41 }
42
43 void VirtualMemorySummary::snapshot(VirtualMemorySnapshot* s) {
44 // Only if thread stack is backed by virtual memory
45 if (ThreadStackTracker::track_as_vm()) {
46 // Snapshot current thread stacks
47 VirtualMemoryTracker::snapshot_thread_stacks();
48 as_snapshot()->copy_to(s);
49 }
50 }
51
52 SortedLinkedList<ReservedMemoryRegion, compare_reserved_region_base>* VirtualMemoryTracker::_reserved_regions;
53
54 int compare_committed_region(const CommittedMemoryRegion& r1, const CommittedMemoryRegion& r2) {
55 return r1.compare(r2);
56 }
57
58 int compare_reserved_region_base(const ReservedMemoryRegion& r1, const ReservedMemoryRegion& r2) {
59 return r1.compare(r2);
60 }
61
62 static bool is_mergeable_with(CommittedMemoryRegion* rgn, address addr, size_t size, const NativeCallStack& stack) {
63 return rgn->adjacent_to(addr, size) && rgn->call_stack()->equals(stack);
64 }
65
66 static bool is_same_as(CommittedMemoryRegion* rgn, address addr, size_t size, const NativeCallStack& stack) {
67 // It would have made sense to use rgn->equals(...), but equals returns true for overlapping regions.
68 return rgn->same_region(addr, size) && rgn->call_stack()->equals(stack);
69 }
|
28 #include "runtime/atomic.hpp"
29 #include "runtime/os.hpp"
30 #include "runtime/threadCritical.hpp"
31 #include "services/memTracker.hpp"
32 #include "services/threadStackTracker.hpp"
33 #include "services/virtualMemoryTracker.hpp"
34
35 size_t VirtualMemorySummary::_snapshot[CALC_OBJ_SIZE_IN_TYPE(VirtualMemorySnapshot, size_t)];
36
37 void VirtualMemorySummary::initialize() {
38 assert(sizeof(_snapshot) >= sizeof(VirtualMemorySnapshot), "Sanity Check");
39 // Use placement operator new to initialize static data area.
40 ::new ((void*)_snapshot) VirtualMemorySnapshot();
41 }
42
43 void VirtualMemorySummary::snapshot(VirtualMemorySnapshot* s) {
44 // Only if thread stack is backed by virtual memory
45 if (ThreadStackTracker::track_as_vm()) {
46 // Snapshot current thread stacks
47 VirtualMemoryTracker::snapshot_thread_stacks();
48 }
49 as_snapshot()->copy_to(s);
50 }
51
52 SortedLinkedList<ReservedMemoryRegion, compare_reserved_region_base>* VirtualMemoryTracker::_reserved_regions;
53
54 int compare_committed_region(const CommittedMemoryRegion& r1, const CommittedMemoryRegion& r2) {
55 return r1.compare(r2);
56 }
57
58 int compare_reserved_region_base(const ReservedMemoryRegion& r1, const ReservedMemoryRegion& r2) {
59 return r1.compare(r2);
60 }
61
62 static bool is_mergeable_with(CommittedMemoryRegion* rgn, address addr, size_t size, const NativeCallStack& stack) {
63 return rgn->adjacent_to(addr, size) && rgn->call_stack()->equals(stack);
64 }
65
66 static bool is_same_as(CommittedMemoryRegion* rgn, address addr, size_t size, const NativeCallStack& stack) {
67 // It would have made sense to use rgn->equals(...), but equals returns true for overlapping regions.
68 return rgn->same_region(addr, size) && rgn->call_stack()->equals(stack);
69 }
|