41 // locating a method given a addess of an instruction.
42
43 class OopClosure;
44 class DepChange;
45
46 class CodeCache : AllStatic {
47 friend class VMStructs;
48 private:
49 // CodeHeap is malloc()'ed at startup and never deleted during shutdown,
50 // so that the generated assembly code is always there when it's needed.
51 // This may cause memory leak, but is necessary, for now. See 4423824,
52 // 4422213 or 4436291 for details.
53 static CodeHeap * _heap;
54 static int _number_of_blobs;
55 static int _number_of_adapters;
56 static int _number_of_nmethods;
57 static int _number_of_nmethods_with_dependencies;
58 static bool _needs_cache_clean;
59 static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link()
60
61 static void verify_if_often() PRODUCT_RETURN;
62
63 static void mark_scavenge_root_nmethods() PRODUCT_RETURN;
64 static void verify_perm_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN;
65
66 static int _codemem_full_count;
67
68 public:
69
70 // Initialization
71 static void initialize();
72
73 static void report_codemem_full();
74
75 // Allocation/administration
76 static CodeBlob* allocate(int size, bool is_critical = false); // allocates a new CodeBlob
77 static void commit(CodeBlob* cb); // called when the allocated CodeBlob has been filled
78 static int alignment_unit(); // guaranteed alignment of all CodeBlobs
79 static int alignment_offset(); // guaranteed offset of first CodeBlob byte within alignment unit (i.e., allocation header)
80 static void free(CodeBlob* cb); // frees a CodeBlob
81 static void flush(); // flushes all CodeBlobs
82 static bool contains(void *p); // returns whether p is included
83 static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs
84 static void blobs_do(CodeBlobClosure* f); // iterates over all CodeBlobs
85 static void nmethods_do(void f(nmethod* nm)); // iterates over all nmethods
86 static void alive_nmethods_do(void f(nmethod* nm)); // iterates over all alive nmethods
87
88 // Lookup
89 static CodeBlob* find_blob(void* start);
90 static nmethod* find_nmethod(void* start);
91
92 // Lookup that does not fail if you lookup a zombie method (if you call this, be sure to know
93 // what you are doing)
94 static CodeBlob* find_blob_unsafe(void* start) {
95 // NMT can walk the stack before code cache is created
96 if (_heap == NULL) return NULL;
97
98 CodeBlob* result = (CodeBlob*)_heap->find_start(start);
99 // this assert is too strong because the heap code will return the
100 // heapblock containing start. That block can often be larger than
101 // the codeBlob itself. If you look up an address that is within
133 // If "unloading_occurred" is true, then unloads (i.e., breaks root links
134 // to) any unmarked codeBlobs in the cache. Sets "marked_for_unloading"
135 // to "true" iff some code got unloaded.
136 static void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred);
137 static void oops_do(OopClosure* f) {
138 CodeBlobToOopClosure oopc(f, /*do_marking=*/ false);
139 blobs_do(&oopc);
140 }
141 static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN;
142 static void scavenge_root_nmethods_do(CodeBlobClosure* f);
143
144 static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; }
145 static void set_scavenge_root_nmethods(nmethod* nm) { _scavenge_root_nmethods = nm; }
146 static void add_scavenge_root_nmethod(nmethod* nm);
147 static void drop_scavenge_root_nmethod(nmethod* nm);
148 static void prune_scavenge_root_nmethods();
149
150 // Printing/debugging
151 static void print(); // prints summary
152 static void print_internals();
153 static void verify(); // verifies the code cache
154 static void print_trace(const char* event, CodeBlob* cb, int size = 0) PRODUCT_RETURN;
155 static void print_summary(outputStream* st, bool detailed = true); // Prints a summary of the code cache usage
156 static void log_state(outputStream* st);
157
158 // The full limits of the codeCache
159 static address low_bound() { return (address) _heap->low_boundary(); }
160 static address high_bound() { return (address) _heap->high_boundary(); }
161 static address high() { return (address) _heap->high(); }
162
163 // Profiling
164 static address first_address(); // first address used for CodeBlobs
165 static address last_address(); // last address used for CodeBlobs
166 static size_t capacity() { return _heap->capacity(); }
167 static size_t max_capacity() { return _heap->max_capacity(); }
168 static size_t unallocated_capacity() { return _heap->unallocated_capacity(); }
169 static double reverse_free_ratio();
170
171 static bool needs_cache_clean() { return _needs_cache_clean; }
172 static void set_needs_cache_clean(bool v) { _needs_cache_clean = v; }
|
41 // locating a method given a addess of an instruction.
42
43 class OopClosure;
44 class DepChange;
45
46 class CodeCache : AllStatic {
47 friend class VMStructs;
48 private:
49 // CodeHeap is malloc()'ed at startup and never deleted during shutdown,
50 // so that the generated assembly code is always there when it's needed.
51 // This may cause memory leak, but is necessary, for now. See 4423824,
52 // 4422213 or 4436291 for details.
53 static CodeHeap * _heap;
54 static int _number_of_blobs;
55 static int _number_of_adapters;
56 static int _number_of_nmethods;
57 static int _number_of_nmethods_with_dependencies;
58 static bool _needs_cache_clean;
59 static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link()
60
61 static void mark_scavenge_root_nmethods() PRODUCT_RETURN;
62 static void verify_perm_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN;
63
64 static int _codemem_full_count;
65 static size_t bytes_allocated_in_freelist() { return _heap->allocated_in_freelist(); }
66 static int allocated_segments() { return _heap->allocated_segments(); }
67 static size_t freelist_length() { return _heap->freelist_length(); }
68
69 public:
70
71 // Initialization
72 static void initialize();
73
74 static void report_codemem_full();
75
76 // Allocation/administration
77 static CodeBlob* allocate(int size, bool is_critical = false); // allocates a new CodeBlob
78 static void commit(CodeBlob* cb); // called when the allocated CodeBlob has been filled
79 static int alignment_unit(); // guaranteed alignment of all CodeBlobs
80 static int alignment_offset(); // guaranteed offset of first CodeBlob byte within alignment unit (i.e., allocation header)
81 static void free(CodeBlob* cb); // frees a CodeBlob
82 static bool contains(void *p); // returns whether p is included
83 static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs
84 static void blobs_do(CodeBlobClosure* f); // iterates over all CodeBlobs
85 static void nmethods_do(void f(nmethod* nm)); // iterates over all nmethods
86 static void alive_nmethods_do(void f(nmethod* nm)); // iterates over all alive nmethods
87
88 // Lookup
89 static CodeBlob* find_blob(void* start);
90 static nmethod* find_nmethod(void* start);
91
92 // Lookup that does not fail if you lookup a zombie method (if you call this, be sure to know
93 // what you are doing)
94 static CodeBlob* find_blob_unsafe(void* start) {
95 // NMT can walk the stack before code cache is created
96 if (_heap == NULL) return NULL;
97
98 CodeBlob* result = (CodeBlob*)_heap->find_start(start);
99 // this assert is too strong because the heap code will return the
100 // heapblock containing start. That block can often be larger than
101 // the codeBlob itself. If you look up an address that is within
133 // If "unloading_occurred" is true, then unloads (i.e., breaks root links
134 // to) any unmarked codeBlobs in the cache. Sets "marked_for_unloading"
135 // to "true" iff some code got unloaded.
136 static void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred);
137 static void oops_do(OopClosure* f) {
138 CodeBlobToOopClosure oopc(f, /*do_marking=*/ false);
139 blobs_do(&oopc);
140 }
141 static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN;
142 static void scavenge_root_nmethods_do(CodeBlobClosure* f);
143
144 static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; }
145 static void set_scavenge_root_nmethods(nmethod* nm) { _scavenge_root_nmethods = nm; }
146 static void add_scavenge_root_nmethod(nmethod* nm);
147 static void drop_scavenge_root_nmethod(nmethod* nm);
148 static void prune_scavenge_root_nmethods();
149
150 // Printing/debugging
151 static void print(); // prints summary
152 static void print_internals();
153 static void print_memory_overhead();
154 static void verify(); // verifies the code cache
155 static void print_trace(const char* event, CodeBlob* cb, int size = 0) PRODUCT_RETURN;
156 static void print_summary(outputStream* st, bool detailed = true); // Prints a summary of the code cache usage
157 static void log_state(outputStream* st);
158
159 // The full limits of the codeCache
160 static address low_bound() { return (address) _heap->low_boundary(); }
161 static address high_bound() { return (address) _heap->high_boundary(); }
162 static address high() { return (address) _heap->high(); }
163
164 // Profiling
165 static address first_address(); // first address used for CodeBlobs
166 static address last_address(); // last address used for CodeBlobs
167 static size_t capacity() { return _heap->capacity(); }
168 static size_t max_capacity() { return _heap->max_capacity(); }
169 static size_t unallocated_capacity() { return _heap->unallocated_capacity(); }
170 static double reverse_free_ratio();
171
172 static bool needs_cache_clean() { return _needs_cache_clean; }
173 static void set_needs_cache_clean(bool v) { _needs_cache_clean = v; }
|