11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24 #ifndef SHARE_MEMORY_METASPACE_HPP
25 #define SHARE_MEMORY_METASPACE_HPP
26
27 #include "memory/allocation.hpp"
28 #include "memory/memRegion.hpp"
29 #include "memory/metaspaceChunkFreeListSummary.hpp"
30 #include "memory/virtualspace.hpp"
31 #include "memory/metaspace/metaspaceSizesSnapshot.hpp"
32 #include "runtime/globals.hpp"
33 #include "utilities/exceptions.hpp"
34
35 // Metaspace
36 //
37 // Metaspaces are Arenas for the VM's metadata.
38 // They are allocated one per class loader object, and one for the null
39 // bootstrap class loader
40 //
41 // block X ---+ +-------------------+
42 // | | Virtualspace |
43 // | | |
44 // | | |
45 // | |-------------------|
46 // | || Chunk |
47 // | || |
48 // | ||---------- |
49 // +------>||| block 0 | |
50 // ||---------- |
51 // ||| block 1 | |
52 // ||---------- |
53 // || |
54 // |-------------------|
55 // | |
56 // | |
57 // +-------------------+
58 //
59
60 class ClassLoaderData;
61 class MetaspaceTracer;
62 class Mutex;
63 class outputStream;
64
65 class CollectedHeap;
66
67 namespace metaspace {
68 class ChunkManager;
69 class ClassLoaderMetaspaceStatistics;
70 class Metablock;
71 class Metachunk;
72 class PrintCLDMetaspaceInfoClosure;
73 class SpaceManager;
74 class VirtualSpaceList;
75 class VirtualSpaceNode;
76 }
77
78 // Metaspaces each have a SpaceManager and allocations
79 // are done by the SpaceManager. Allocations are done
80 // out of the current Metachunk. When the current Metachunk
81 // is exhausted, the SpaceManager gets a new one from
82 // the current VirtualSpace. When the VirtualSpace is exhausted
83 // the SpaceManager gets a new one. The SpaceManager
84 // also manages freelists of available Chunks.
85 //
86 // Currently the space manager maintains the list of
87 // virtual spaces and the list of chunks in use. Its
88 // allocate() method returns a block for use as a
89 // quantum of metadata.
90
91 // Namespace for important central static functions
92 // (auxiliary stuff goes into MetaspaceUtils)
93 class Metaspace : public AllStatic {
94
95 friend class MetaspaceShared;
96
97 public:
98 enum MetadataType {
99 ClassType,
100 NonClassType,
101 MetadataTypeCount
102 };
103 enum MetaspaceType {
104 ZeroMetaspaceType = 0,
105 StandardMetaspaceType = ZeroMetaspaceType,
106 BootMetaspaceType = StandardMetaspaceType + 1,
107 ClassMirrorHolderMetaspaceType = BootMetaspaceType + 1,
108 ReflectionMetaspaceType = ClassMirrorHolderMetaspaceType + 1,
109 MetaspaceTypeCount
110 };
111
112 private:
113
114 // Align up the word size to the allocation word size
115 static size_t align_word_size_up(size_t);
116
117 // Aligned size of the metaspace.
118 static size_t _compressed_class_space_size;
119
120 static size_t compressed_class_space_size() {
121 return _compressed_class_space_size;
122 }
123
124 static void set_compressed_class_space_size(size_t size) {
125 _compressed_class_space_size = size;
126 }
127
128 static size_t _first_chunk_word_size;
129 static size_t _first_class_chunk_word_size;
130
131 static size_t _commit_alignment;
132 static size_t _reserve_alignment;
133 DEBUG_ONLY(static bool _frozen;)
134
135 // Virtual Space lists for both classes and other metadata
136 static metaspace::VirtualSpaceList* _space_list;
137 static metaspace::VirtualSpaceList* _class_space_list;
138
139 static metaspace::ChunkManager* _chunk_manager_metadata;
140 static metaspace::ChunkManager* _chunk_manager_class;
141
142 static const MetaspaceTracer* _tracer;
143
144 static bool _initialized;
145
146 public:
147 static metaspace::VirtualSpaceList* space_list() { return _space_list; }
148 static metaspace::VirtualSpaceList* class_space_list() { return _class_space_list; }
149 static metaspace::VirtualSpaceList* get_space_list(MetadataType mdtype) {
150 assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
151 return mdtype == ClassType ? class_space_list() : space_list();
152 }
153
154 static metaspace::ChunkManager* chunk_manager_metadata() { return _chunk_manager_metadata; }
155 static metaspace::ChunkManager* chunk_manager_class() { return _chunk_manager_class; }
156 static metaspace::ChunkManager* get_chunk_manager(MetadataType mdtype) {
157 assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
158 return mdtype == ClassType ? chunk_manager_class() : chunk_manager_metadata();
159 }
160
161 // convenience function
162 static metaspace::ChunkManager* get_chunk_manager(bool is_class) {
163 return is_class ? chunk_manager_class() : chunk_manager_metadata();
164 }
165
166 static const MetaspaceTracer* tracer() { return _tracer; }
167 static void freeze() {
168 assert(DumpSharedSpaces, "sanity");
169 DEBUG_ONLY(_frozen = true;)
170 }
171 static void assert_not_frozen() {
172 assert(!_frozen, "sanity");
173 }
174
175 private:
176
177 #ifdef _LP64
178
179 // Reserve a range of memory at an address suitable for en/decoding narrow
180 // Klass pointers (see: CompressedClassPointers::is_valid_base()).
181 // The returned address shall both be suitable as a compressed class pointers
182 // base, and aligned to Metaspace::reserve_alignment (which is equal to or a
183 // multiple of allocation granularity).
184 // On error, returns an unreserved space.
185 static ReservedSpace reserve_address_space_for_compressed_classes(size_t size);
186
187 // Given a prereserved space, use that to set up the compressed class space list.
188 static void initialize_class_space(ReservedSpace rs);
189
190 // Returns true if class space has been setup (initialize_class_space).
191 static bool class_space_is_initialized() { return _class_space_list != NULL; }
192
193 #endif
194
195 public:
196
197 static void ergo_initialize();
198 static void global_initialize();
199 static void post_initialize();
200
201 static void verify_global_initialization();
202
203 static size_t first_chunk_word_size() { return _first_chunk_word_size; }
204 static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
205
206 static size_t reserve_alignment() { return _reserve_alignment; }
207 static size_t reserve_alignment_words() { return _reserve_alignment / BytesPerWord; }
208 static size_t commit_alignment() { return _commit_alignment; }
209 static size_t commit_alignment_words() { return _commit_alignment / BytesPerWord; }
210
211 static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size,
212 MetaspaceObj::Type type, TRAPS);
213
214 static bool contains(const void* ptr);
215 static bool contains_non_shared(const void* ptr);
216
217 // Free empty virtualspaces
218 static void purge(MetadataType mdtype);
219 static void purge();
220
221 static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size,
222 MetaspaceObj::Type type, MetadataType mdtype, TRAPS);
223
224 static const char* metadata_type_name(Metaspace::MetadataType mdtype);
225
226 static void print_compressed_class_space(outputStream* st) NOT_LP64({});
227
228 // Return TRUE only if UseCompressedClassPointers is True.
229 static bool using_class_space() {
230 return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers);
231 }
232
233 static bool is_class_space_allocation(MetadataType mdType) {
234 return mdType == ClassType && using_class_space();
235 }
236
237 static bool initialized() { return _initialized; }
238
239 };
240
241 // Manages the metaspace portion belonging to a class loader
242 class ClassLoaderMetaspace : public CHeapObj<mtClass> {
243 friend class CollectedHeap; // For expand_and_allocate()
244 friend class ZCollectedHeap; // For expand_and_allocate()
245 friend class ShenandoahHeap; // For expand_and_allocate()
246 friend class Metaspace;
247 friend class MetaspaceUtils;
248 friend class metaspace::PrintCLDMetaspaceInfoClosure;
249 friend class VM_CollectForMetadataAllocation; // For expand_and_allocate()
250
251 private:
252
253 void initialize(Mutex* lock, Metaspace::MetaspaceType type);
254
255 // Initialize the first chunk for a Metaspace. Used for
256 // special cases such as the boot class loader, reflection
257 // class loader and hidden class loader.
258 void initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype);
259 metaspace::Metachunk* get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype);
260
261 const Metaspace::MetaspaceType _space_type;
262 Mutex* const _lock;
263 metaspace::SpaceManager* _vsm;
264 metaspace::SpaceManager* _class_vsm;
265
266 metaspace::SpaceManager* vsm() const { return _vsm; }
267 metaspace::SpaceManager* class_vsm() const { return _class_vsm; }
268 metaspace::SpaceManager* get_space_manager(Metaspace::MetadataType mdtype) {
269 assert(mdtype != Metaspace::MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
270 return mdtype == Metaspace::ClassType ? class_vsm() : vsm();
271 }
272
273 Mutex* lock() const { return _lock; }
274
275 MetaWord* expand_and_allocate(size_t size, Metaspace::MetadataType mdtype);
276
277 size_t class_chunk_size(size_t word_size);
278
279 // Adds to the given statistic object. Must be locked with CLD metaspace lock.
280 void add_to_statistics_locked(metaspace::ClassLoaderMetaspaceStatistics* out) const;
281
282 Metaspace::MetaspaceType space_type() const { return _space_type; }
283
284 public:
285
286 ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type);
287 ~ClassLoaderMetaspace();
288
289 // Allocate space for metadata of type mdtype. This is space
290 // within a Metachunk and is used by
291 // allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS)
292 MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdtype);
293
294 size_t allocated_blocks_bytes() const;
295 size_t allocated_chunks_bytes() const;
296
297 void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);
298
299 void print_on(outputStream* st) const;
300 // Debugging support
301 void verify();
302
303 // Adds to the given statistic object. Will lock with CLD metaspace lock.
304 void add_to_statistics(metaspace::ClassLoaderMetaspaceStatistics* out) const;
305
306 }; // ClassLoaderMetaspace
307
308 class MetaspaceUtils : AllStatic {
309
310 // Spacemanager updates running counters.
311 friend class metaspace::SpaceManager;
312
313 // Special access for error reporting (checks without locks).
314 friend class oopDesc;
315 friend class Klass;
316
317 // Running counters for statistics concerning in-use chunks.
318 // Note: capacity = used + free + waste + overhead. Note that we do not
319 // count free and waste. Their sum can be deduces from the three other values.
320 // For more details, one should call print_report() from within a safe point.
321 static size_t _capacity_words [Metaspace:: MetadataTypeCount];
322 static size_t _overhead_words [Metaspace:: MetadataTypeCount];
323 static volatile size_t _used_words [Metaspace:: MetadataTypeCount];
324
325 // Atomically decrement or increment in-use statistic counters
326 static void dec_capacity(Metaspace::MetadataType mdtype, size_t words);
327 static void inc_capacity(Metaspace::MetadataType mdtype, size_t words);
328 static void dec_used(Metaspace::MetadataType mdtype, size_t words);
329 static void inc_used(Metaspace::MetadataType mdtype, size_t words);
330 static void dec_overhead(Metaspace::MetadataType mdtype, size_t words);
331 static void inc_overhead(Metaspace::MetadataType mdtype, size_t words);
332
333
334 // Getters for the in-use counters.
335 static size_t capacity_words(Metaspace::MetadataType mdtype) { return _capacity_words[mdtype]; }
336 static size_t used_words(Metaspace::MetadataType mdtype) { return _used_words[mdtype]; }
337 static size_t overhead_words(Metaspace::MetadataType mdtype) { return _overhead_words[mdtype]; }
338
339 static size_t free_chunks_total_words(Metaspace::MetadataType mdtype);
340
341 // Helper for print_xx_report.
342 static void print_vs(outputStream* out, size_t scale);
343
344 public:
345
346 // Collect used metaspace statistics. This involves walking the CLDG. The resulting
347 // output will be the accumulated values for all live metaspaces.
348 // Note: method does not do any locking.
349 static void collect_statistics(metaspace::ClassLoaderMetaspaceStatistics* out);
350
351 // Used by MetaspaceCounters
352 static size_t free_chunks_total_words();
353 static size_t free_chunks_total_bytes();
354 static size_t free_chunks_total_bytes(Metaspace::MetadataType mdtype);
355
356 static size_t capacity_words() {
357 return capacity_words(Metaspace::NonClassType) +
358 capacity_words(Metaspace::ClassType);
359 }
360 static size_t capacity_bytes(Metaspace::MetadataType mdtype) {
361 return capacity_words(mdtype) * BytesPerWord;
362 }
363 static size_t capacity_bytes() {
364 return capacity_words() * BytesPerWord;
365 }
366
367 static size_t used_words() {
368 return used_words(Metaspace::NonClassType) +
369 used_words(Metaspace::ClassType);
370 }
371 static size_t used_bytes(Metaspace::MetadataType mdtype) {
372 return used_words(mdtype) * BytesPerWord;
373 }
374 static size_t used_bytes() {
375 return used_words() * BytesPerWord;
376 }
377
378 // Space committed but yet unclaimed by any class loader.
379 static size_t free_in_vs_bytes();
380 static size_t free_in_vs_bytes(Metaspace::MetadataType mdtype);
381
382 static size_t reserved_bytes(Metaspace::MetadataType mdtype);
383 static size_t reserved_bytes() {
384 return reserved_bytes(Metaspace::ClassType) +
385 reserved_bytes(Metaspace::NonClassType);
386 }
387
388 static size_t committed_bytes(Metaspace::MetadataType mdtype);
389 static size_t committed_bytes() {
390 return committed_bytes(Metaspace::ClassType) +
391 committed_bytes(Metaspace::NonClassType);
392 }
393
394 static size_t min_chunk_size_words();
395
396 // Flags for print_report().
397 enum ReportFlag {
398 // Show usage by class loader.
399 rf_show_loaders = (1 << 0),
400 // Breaks report down by chunk type (small, medium, ...).
401 rf_break_down_by_chunktype = (1 << 1),
402 // Breaks report down by space type (hidden, reflection, ...).
403 rf_break_down_by_spacetype = (1 << 2),
404 // Print details about the underlying virtual spaces.
405 rf_show_vslist = (1 << 3),
406 // Print metaspace map.
407 rf_show_vsmap = (1 << 4),
408 // If show_loaders: show loaded classes for each loader.
409 rf_show_classes = (1 << 5)
410 };
411
412 // This will print out a basic metaspace usage report but
413 // unlike print_report() is guaranteed not to lock or to walk the CLDG.
414 static void print_basic_report(outputStream* st, size_t scale);
415
416 // Prints a report about the current metaspace state.
417 // Optional parts can be enabled via flags.
418 // Function will walk the CLDG and will lock the expand lock; if that is not
419 // convenient, use print_basic_report() instead.
420 static void print_report(outputStream* out, size_t scale = 0, int flags = 0);
421
422 static bool has_chunk_free_list(Metaspace::MetadataType mdtype);
423 static MetaspaceChunkFreeListSummary chunk_free_list_summary(Metaspace::MetadataType mdtype);
424
425 // Log change in used metadata.
426 static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values);
427 static void print_on(outputStream * out);
428
429 // Prints an ASCII representation of the given space.
430 static void print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype);
431
432 static void dump(outputStream* out);
433 static void verify_free_chunks();
434 // Check internal counters (capacity, used).
435 static void verify_metrics();
436 };
437
438 // Metaspace are deallocated when their class loader are GC'ed.
439 // This class implements a policy for inducing GC's to recover
440 // Metaspaces.
441
442 class MetaspaceGC : AllStatic {
443
444 // The current high-water-mark for inducing a GC.
445 // When committed memory of all metaspaces reaches this value,
446 // a GC is induced and the value is increased. Size is in bytes.
447 static volatile size_t _capacity_until_GC;
448 static uint _shrink_factor;
449
450 static size_t shrink_factor() { return _shrink_factor; }
451 void set_shrink_factor(uint v) { _shrink_factor = v; }
452
453 public:
454
455 static void initialize();
456 static void post_initialize();
457
458 static size_t capacity_until_GC();
459 static bool inc_capacity_until_GC(size_t v,
460 size_t* new_cap_until_GC = NULL,
461 size_t* old_cap_until_GC = NULL,
462 bool* can_retry = NULL);
463 static size_t dec_capacity_until_GC(size_t v);
464
465 // The amount to increase the high-water-mark (_capacity_until_GC)
466 static size_t delta_capacity_until_GC(size_t bytes);
467
468 // Tells if we have can expand metaspace without hitting set limits.
469 static bool can_expand(size_t words, bool is_class);
470
471 // Returns amount that we can expand without hitting a GC,
472 // measured in words.
473 static size_t allowed_expansion();
474
475 // Calculate the new high-water mark at which to induce
476 // a GC.
477 static void compute_new_size();
478 };
479
480 #endif // SHARE_MEMORY_METASPACE_HPP
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24 #ifndef SHARE_MEMORY_METASPACE_HPP
25 #define SHARE_MEMORY_METASPACE_HPP
26
27 #include "memory/allocation.hpp"
28 #include "memory/memRegion.hpp"
29 #include "memory/metaspaceChunkFreeListSummary.hpp"
30 #include "memory/virtualspace.hpp"
31 #include "runtime/globals.hpp"
32 #include "utilities/exceptions.hpp"
33 #include "utilities/globalDefinitions.hpp"
34
35 class ClassLoaderData;
36 class MetaspaceShared;
37 class MetaspaceTracer;
38 class Mutex;
39 class outputStream;
40
41 namespace metaspace {
42 class MetaspaceArena;
43 class MetaspaceSizesSnapshot;
44 struct ClmsStats;
45 }
46
47 ////////////////// Metaspace ///////////////////////
48
49 // Namespace for important central static functions
50 // (auxiliary stuff goes into MetaspaceUtils)
51 class Metaspace : public AllStatic {
52
53 friend class MetaspaceShared;
54
55 public:
56 enum MetadataType {
57 ClassType,
58 NonClassType,
59 MetadataTypeCount
60 };
61 enum MetaspaceType {
62 ZeroMetaspaceType = 0,
63 StandardMetaspaceType = ZeroMetaspaceType,
64 BootMetaspaceType = StandardMetaspaceType + 1,
65 ClassMirrorHolderMetaspaceType = BootMetaspaceType + 1,
66 ReflectionMetaspaceType = ClassMirrorHolderMetaspaceType + 1,
67 MetaspaceTypeCount
68 };
69
70 private:
71
72 DEBUG_ONLY(static bool _frozen;)
73
74 static const MetaspaceTracer* _tracer;
75
76 static bool _initialized;
77
78 public:
79
80 static const MetaspaceTracer* tracer() { return _tracer; }
81 static void freeze() {
82 assert(DumpSharedSpaces, "sanity");
83 DEBUG_ONLY(_frozen = true;)
84 }
85 static void assert_not_frozen() {
86 assert(!_frozen, "sanity");
87 }
88
89 private:
90
91 #ifdef _LP64
92
93 // Reserve a range of memory at an address suitable for en/decoding narrow
94 // Klass pointers (see: CompressedClassPointers::is_valid_base()).
95 // The returned address shall both be suitable as a compressed class pointers
96 // base, and aligned to Metaspace::reserve_alignment (which is equal to or a
97 // multiple of allocation granularity).
98 // On error, returns an unreserved space.
99 static ReservedSpace reserve_address_space_for_compressed_classes(size_t size);
100
101 // Given a prereserved space, use that to set up the compressed class space list.
102 static void initialize_class_space(ReservedSpace rs);
103
104 // Returns true if class space has been setup (initialize_class_space).
105 static bool class_space_is_initialized();
106
107 #endif
108
109 public:
110
111 static void ergo_initialize();
112 static void global_initialize();
113 static void post_initialize();
114
115 // Alignment, in bytes, of metaspace mappings
116 static size_t reserve_alignment() { return reserve_alignment_words() * BytesPerWord; }
117 // Alignment, in words, of metaspace mappings
118 static size_t reserve_alignment_words();
119
120 // The granularity at which Metaspace is committed and uncommitted.
121 // (Todo: Why does this have to be exposed?)
122 static size_t commit_alignment() { return commit_alignment_words() * BytesPerWord; }
123 static size_t commit_alignment_words();
124
125 // The largest possible single allocation
126 static size_t max_allocation_word_size();
127
128 static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size,
129 MetaspaceObj::Type type, TRAPS);
130
131 static bool contains(const void* ptr);
132 static bool contains_non_shared(const void* ptr);
133
134 // Free empty virtualspaces
135 static void purge();
136
137 static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size,
138 MetaspaceObj::Type type, Metaspace::MetadataType mdtype, TRAPS);
139
140 static const char* metadata_type_name(Metaspace::MetadataType mdtype);
141
142 static void print_compressed_class_space(outputStream* st) NOT_LP64({});
143
144 // Return TRUE only if UseCompressedClassPointers is True.
145 static bool using_class_space() {
146 return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers);
147 }
148
149 static bool is_class_space_allocation(MetadataType mdType) {
150 return mdType == ClassType && using_class_space();
151 }
152
153 static bool initialized();
154
155 };
156
157 // ClassLoaderMetaspace is an inbetween-object between a CLD and its MetaspaceArena(s).
158 //
159 // A CLD owns one MetaspaceArena if compressed class space is off, two if its one
160 // (one for allocations of Klass* structures from class space, one for the rest from
161 // non-class space).
162 //
163 // ClassLoaderMetaspace only exists to hide this logic from upper layers:
164 //
165 // +------+ +----------------------+ +-------------------+
166 // | CLD | ---> | ClassLoaderMetaspace | ----> | (non class) Arena |
167 // +------+ +----------------------+ | +-------------------+ allocation top
168 // | | v
169 // | + chunk -- chunk ... -- chunk
170 // |
171 // | +-------------------+
172 // +--> | (class) Arena |
173 // +-------------------+
174 // |
175 // + chunk ... chunk
176 // ^
177 // alloc top
178 //
179 class ClassLoaderMetaspace : public CHeapObj<mtClass> {
180
181 // A reference to an outside lock, held by the CLD.
182 Mutex* const _lock;
183
184 const Metaspace::MetaspaceType _space_type;
185
186 // Arena for allocations from non-class metaspace
187 // (resp. for all allocations if -XX:-UseCompressedClassPointers).
188 metaspace::MetaspaceArena* _non_class_space_arena;
189
190 // Arena for allocations from class space
191 // (NULL if -XX:-UseCompressedClassPointers).
192 metaspace::MetaspaceArena* _class_space_arena;
193
194 Mutex* lock() const { return _lock; }
195 metaspace::MetaspaceArena* non_class_space_arena() const { return _non_class_space_arena; }
196 metaspace::MetaspaceArena* class_space_arena() const { return _class_space_arena; }
197
198 metaspace::MetaspaceArena* get_arena(bool is_class) {
199 return is_class ? class_space_arena() : non_class_space_arena();
200 }
201
202 public:
203
204 ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType space_type);
205
206 ~ClassLoaderMetaspace();
207
208 Metaspace::MetaspaceType space_type() const { return _space_type; }
209
210 // Allocate word_size words from Metaspace.
211 MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdType);
212
213 // Attempt to expand the GC threshold to be good for at least another word_size words
214 // and allocate. Returns NULL if failure. Used during Metaspace GC.
215 MetaWord* expand_and_allocate(size_t word_size, Metaspace::MetadataType mdType);
216
217 // Prematurely returns a metaspace allocation to the _block_freelists
218 // because it is not needed anymore.
219 void deallocate(MetaWord* ptr, size_t word_size, bool is_class);
220
221 // Update statistics. This walks all in-use chunks.
222 void add_to_statistics(metaspace::ClmsStats* out) const;
223
224 DEBUG_ONLY(void verify() const;)
225
226 // This only exists for JFR and jcmd VM.classloader_stats. We may want to
227 // change this. Capacity as a stat is of questionable use since it may
228 // contain committed and uncommitted areas. For now we do this to maintain
229 // backward compatibility with JFR.
230 void calculate_jfr_stats(size_t* p_used_bytes, size_t* p_capacity_bytes) const;
231
232 }; // end: ClassLoaderMetaspace
233
234
235 ////////////////// MetaspaceGC ///////////////////////
236
237 // Metaspace are deallocated when their class loader are GC'ed.
238 // This class implements a policy for inducing GC's to recover
239 // Metaspaces.
240
241 class MetaspaceGCThresholdUpdater : public AllStatic {
242 public:
243 enum Type {
244 ComputeNewSize,
245 ExpandAndAllocate,
246 Last
247 };
248
249 static const char* to_string(MetaspaceGCThresholdUpdater::Type updater) {
250 switch (updater) {
251 case ComputeNewSize:
252 return "compute_new_size";
253 case ExpandAndAllocate:
254 return "expand_and_allocate";
255 default:
256 assert(false, "Got bad updater: %d", (int) updater);
257 return NULL;
258 };
259 }
260 };
261
262 class MetaspaceGC : public AllStatic {
263
264 // The current high-water-mark for inducing a GC.
265 // When committed memory of all metaspaces reaches this value,
266 // a GC is induced and the value is increased. Size is in bytes.
267 static volatile size_t _capacity_until_GC;
268 static uint _shrink_factor;
269
270 static size_t shrink_factor() { return _shrink_factor; }
271 void set_shrink_factor(uint v) { _shrink_factor = v; }
272
273 public:
274
275 static void initialize();
276 static void post_initialize();
277
278 static size_t capacity_until_GC();
279 static bool inc_capacity_until_GC(size_t v,
280 size_t* new_cap_until_GC = NULL,
281 size_t* old_cap_until_GC = NULL,
282 bool* can_retry = NULL);
283 static size_t dec_capacity_until_GC(size_t v);
284
285 // The amount to increase the high-water-mark (_capacity_until_GC)
286 static size_t delta_capacity_until_GC(size_t bytes);
287
288 // Tells if we have can expand metaspace without hitting set limits.
289 static bool can_expand(size_t words, bool is_class);
290
291 // Returns amount that we can expand without hitting a GC,
292 // measured in words.
293 static size_t allowed_expansion();
294
295 // Calculate the new high-water mark at which to induce
296 // a GC.
297 static void compute_new_size();
298 };
299
300
301
302
303 class MetaspaceUtils : AllStatic {
304 public:
305
306 // Committed space actually in use by Metadata
307 static size_t used_words();
308 static size_t used_words(Metaspace::MetadataType mdtype);
309
310 // Space committed for Metaspace
311 static size_t committed_words();
312 static size_t committed_words(Metaspace::MetadataType mdtype);
313
314 // Space reserved for Metaspace
315 static size_t reserved_words();
316 static size_t reserved_words(Metaspace::MetadataType mdtype);
317
318 // _bytes() variants for convenience...
319 static size_t used_bytes() { return used_words() * BytesPerWord; }
320 static size_t used_bytes(Metaspace::MetadataType mdtype) { return used_words(mdtype) * BytesPerWord; }
321 static size_t committed_bytes() { return committed_words() * BytesPerWord; }
322 static size_t committed_bytes(Metaspace::MetadataType mdtype) { return committed_words(mdtype) * BytesPerWord; }
323 static size_t reserved_bytes() { return reserved_words() * BytesPerWord; }
324 static size_t reserved_bytes(Metaspace::MetadataType mdtype) { return reserved_words(mdtype) * BytesPerWord; }
325
326 // (See JDK-8251342). Implement or Consolidate.
327 static MetaspaceChunkFreeListSummary chunk_free_list_summary(Metaspace::MetadataType mdtype) {
328 return MetaspaceChunkFreeListSummary(0,0,0,0,0,0,0,0);
329 }
330
331 // Log change in used metadata.
332 static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values);
333
334 // This will print out a basic metaspace usage report but
335 // unlike print_report() is guaranteed not to lock or to walk the CLDG.
336 static void print_basic_report(outputStream* st, size_t scale = 0);
337
338 // Prints a report about the current metaspace state.
339 // Function will walk the CLDG and will lock the expand lock; if that is not
340 // convenient, use print_basic_report() instead.
341 static void print_report(outputStream* out, size_t scale = 0);
342
343 static void print_on(outputStream * out);
344
345 DEBUG_ONLY(static void verify();)
346
347 };
348
349 #endif // SHARE_MEMORY_METASPACE_HPP
|