1 /* 2 * Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 #ifndef SHARE_MEMORY_METASPACE_HPP 25 #define SHARE_MEMORY_METASPACE_HPP 26 27 #include "memory/allocation.hpp" 28 #include "memory/memRegion.hpp" 29 #include "memory/metaspaceChunkFreeListSummary.hpp" 30 #include "memory/virtualspace.hpp" 31 #include "runtime/globals.hpp" 32 #include "utilities/exceptions.hpp" 33 #include "utilities/globalDefinitions.hpp" 34 35 class ClassLoaderData; 36 class MetaspaceShared; 37 class MetaspaceTracer; 38 class Mutex; 39 class outputStream; 40 41 namespace metaspace { 42 class MetaspaceArena; 43 class MetaspaceSizesSnapshot; 44 struct clms_stats_t; 45 } 46 47 ////////////////// Metaspace /////////////////////// 48 49 // Namespace for important central static functions 50 // (auxiliary stuff goes into MetaspaceUtils) 51 class Metaspace : public AllStatic { 52 53 friend class MetaspaceShared; 54 55 public: 56 enum MetadataType { 57 ClassType, 58 NonClassType, 59 MetadataTypeCount 60 }; 61 enum MetaspaceType { 62 ZeroMetaspaceType = 0, 63 StandardMetaspaceType = ZeroMetaspaceType, 64 BootMetaspaceType = StandardMetaspaceType + 1, 65 ClassMirrorHolderMetaspaceType = BootMetaspaceType + 1, 66 ReflectionMetaspaceType = ClassMirrorHolderMetaspaceType + 1, 67 MetaspaceTypeCount 68 }; 69 70 private: 71 72 DEBUG_ONLY(static bool _frozen;) 73 74 static const MetaspaceTracer* _tracer; 75 76 static bool _initialized; 77 78 public: 79 80 static const MetaspaceTracer* tracer() { return _tracer; } 81 static void freeze() { 82 assert(DumpSharedSpaces, "sanity"); 83 DEBUG_ONLY(_frozen = true;) 84 } 85 static void assert_not_frozen() { 86 assert(!_frozen, "sanity"); 87 } 88 89 private: 90 91 #ifdef _LP64 92 93 // Reserve a range of memory at an address suitable for en/decoding narrow 94 // Klass pointers (see: CompressedClassPointers::is_valid_base()). 95 // The returned address shall both be suitable as a compressed class pointers 96 // base, and aligned to Metaspace::reserve_alignment (which is equal to or a 97 // multiple of allocation granularity). 98 // On error, returns an unreserved space. 99 static ReservedSpace reserve_address_space_for_compressed_classes(size_t size); 100 101 // Given a prereserved space, use that to set up the compressed class space list. 102 static void initialize_class_space(ReservedSpace rs); 103 104 // Returns true if class space has been setup (initialize_class_space). 105 static bool class_space_is_initialized(); 106 107 #endif 108 109 public: 110 111 static void ergo_initialize(); 112 static void global_initialize(); 113 static void post_initialize(); 114 115 // Alignment, in bytes, of metaspace mappings 116 static size_t reserve_alignment() { return reserve_alignment_words() * BytesPerWord; } 117 // Alignment, in words, of metaspace mappings 118 static size_t reserve_alignment_words(); 119 120 // The granularity at which Metaspace is committed and uncommitted. 121 // (Todo: Why does this have to be exposed?) 122 static size_t commit_alignment() { return commit_alignment_words() * BytesPerWord; } 123 static size_t commit_alignment_words(); 124 125 // The largest possible single allocation 126 static size_t max_allocation_word_size(); 127 128 static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size, 129 MetaspaceObj::Type type, TRAPS); 130 131 static bool contains(const void* ptr); 132 static bool contains_non_shared(const void* ptr); 133 134 // Free empty virtualspaces 135 static void purge(); 136 137 static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, 138 MetaspaceObj::Type type, Metaspace::MetadataType mdtype, TRAPS); 139 140 static void print_compressed_class_space(outputStream* st) NOT_LP64({}); 141 142 // Return TRUE only if UseCompressedClassPointers is True. 143 static bool using_class_space() { 144 return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers); 145 } 146 147 static bool initialized(); 148 149 }; 150 151 // ClassLoaderMetaspace is an inbetween-object between a CLD and its MetaspaceArena(s). 152 // 153 // A CLD owns one MetaspaceArena if compressed class space is off, two if its one 154 // (one for allocations of Klass* structures from class space, one for the rest from 155 // non-class space). 156 // 157 // ClassLoaderMetaspace only exists to hide this logic from upper layers: 158 // 159 // +------+ +----------------------+ +-------------------+ 160 // | CLD | ---> | ClassLoaderMetaspace | ----> | (non class) Arena | 161 // +------+ +----------------------+ | +-------------------+ allocation top 162 // | | v 163 // | + chunk -- chunk ... -- chunk 164 // | 165 // | +-------------------+ 166 // +--> | (class) Arena | 167 // +-------------------+ 168 // | 169 // + chunk ... chunk 170 // ^ 171 // alloc top 172 // 173 class ClassLoaderMetaspace : public CHeapObj<mtClass> { 174 175 // A reference to an outside lock, held by the CLD. 176 Mutex* const _lock; 177 178 const Metaspace::MetaspaceType _space_type; 179 180 // Arena for allocations from non-class metaspace 181 // (resp. for all allocations if -XX:-UseCompressedClassPointers). 182 metaspace::MetaspaceArena* _non_class_space_arena; 183 184 // Arena for allocations from class space 185 // (NULL if -XX:-UseCompressedClassPointers). 186 metaspace::MetaspaceArena* _class_space_arena; 187 188 Mutex* lock() const { return _lock; } 189 metaspace::MetaspaceArena* non_class_space_arena() const { return _non_class_space_arena; } 190 metaspace::MetaspaceArena* class_space_arena() const { return _class_space_arena; } 191 192 metaspace::MetaspaceArena* get_arena(bool is_class) { 193 return is_class ? class_space_arena() : non_class_space_arena(); 194 } 195 196 public: 197 198 ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType space_type); 199 200 ~ClassLoaderMetaspace(); 201 202 Metaspace::MetaspaceType space_type() const { return _space_type; } 203 204 // Allocate word_size words from Metaspace. 205 MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdType); 206 207 // Attempt to expand the GC threshold to be good for at least another word_size words 208 // and allocate. Returns NULL if failure. Used during Metaspace GC. 209 MetaWord* expand_and_allocate(size_t word_size, Metaspace::MetadataType mdType); 210 211 // Prematurely returns a metaspace allocation to the _block_freelists 212 // because it is not needed anymore. 213 void deallocate(MetaWord* ptr, size_t word_size, bool is_class); 214 215 // Update statistics. This walks all in-use chunks. 216 void add_to_statistics(metaspace::clms_stats_t* out) const; 217 218 DEBUG_ONLY(void verify() const;) 219 220 // This only exists for JFR and jcmd VM.classloader_stats. We may want to 221 // change this. Capacity as a stat is of questionable use since it may 222 // contain committed and uncommitted areas. For now we do this to maintain 223 // backward compatibility with JFR. 224 void calculate_jfr_stats(size_t* p_used_bytes, size_t* p_capacity_bytes) const; 225 226 }; // end: ClassLoaderMetaspace 227 228 229 ////////////////// MetaspaceGC /////////////////////// 230 231 // Metaspace are deallocated when their class loader are GC'ed. 232 // This class implements a policy for inducing GC's to recover 233 // Metaspaces. 234 235 class MetaspaceGCThresholdUpdater : public AllStatic { 236 public: 237 enum Type { 238 ComputeNewSize, 239 ExpandAndAllocate, 240 Last 241 }; 242 243 static const char* to_string(MetaspaceGCThresholdUpdater::Type updater) { 244 switch (updater) { 245 case ComputeNewSize: 246 return "compute_new_size"; 247 case ExpandAndAllocate: 248 return "expand_and_allocate"; 249 default: 250 assert(false, "Got bad updater: %d", (int) updater); 251 return NULL; 252 }; 253 } 254 }; 255 256 class MetaspaceGC : public AllStatic { 257 258 // The current high-water-mark for inducing a GC. 259 // When committed memory of all metaspaces reaches this value, 260 // a GC is induced and the value is increased. Size is in bytes. 261 static volatile size_t _capacity_until_GC; 262 static uint _shrink_factor; 263 264 static size_t shrink_factor() { return _shrink_factor; } 265 void set_shrink_factor(uint v) { _shrink_factor = v; } 266 267 public: 268 269 static void initialize(); 270 static void post_initialize(); 271 272 static size_t capacity_until_GC(); 273 static bool inc_capacity_until_GC(size_t v, 274 size_t* new_cap_until_GC = NULL, 275 size_t* old_cap_until_GC = NULL, 276 bool* can_retry = NULL); 277 static size_t dec_capacity_until_GC(size_t v); 278 279 // The amount to increase the high-water-mark (_capacity_until_GC) 280 static size_t delta_capacity_until_GC(size_t bytes); 281 282 // Tells if we have can expand metaspace without hitting set limits. 283 static bool can_expand(size_t words, bool is_class); 284 285 // Returns amount that we can expand without hitting a GC, 286 // measured in words. 287 static size_t allowed_expansion(); 288 289 // Calculate the new high-water mark at which to induce 290 // a GC. 291 static void compute_new_size(); 292 }; 293 294 295 296 297 class MetaspaceUtils : AllStatic { 298 public: 299 300 // Committed space actually in use by Metadata 301 static size_t used_words(); 302 static size_t used_words(Metaspace::MetadataType mdtype); 303 304 // Space committed for Metaspace 305 static size_t committed_words(); 306 static size_t committed_words(Metaspace::MetadataType mdtype); 307 308 // Space reserved for Metaspace 309 static size_t reserved_words(); 310 static size_t reserved_words(Metaspace::MetadataType mdtype); 311 312 // _bytes() variants for convenience... 313 static size_t used_bytes() { return used_words() * BytesPerWord; } 314 static size_t used_bytes(Metaspace::MetadataType mdtype) { return used_words(mdtype) * BytesPerWord; } 315 static size_t committed_bytes() { return committed_words() * BytesPerWord; } 316 static size_t committed_bytes(Metaspace::MetadataType mdtype) { return committed_words(mdtype) * BytesPerWord; } 317 static size_t reserved_bytes() { return reserved_words() * BytesPerWord; } 318 static size_t reserved_bytes(Metaspace::MetadataType mdtype) { return reserved_words(mdtype) * BytesPerWord; } 319 320 // (See JDK-8251342). Implement or Consolidate. 321 static MetaspaceChunkFreeListSummary chunk_free_list_summary(Metaspace::MetadataType mdtype) { 322 return MetaspaceChunkFreeListSummary(0,0,0,0,0,0,0,0); 323 } 324 325 // Log change in used metadata. 326 static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values); 327 328 // This will print out a basic metaspace usage report but 329 // unlike print_report() is guaranteed not to lock or to walk the CLDG. 330 static void print_basic_report(outputStream* st, size_t scale = 0); 331 332 // Prints a report about the current metaspace state. 333 // Function will walk the CLDG and will lock the expand lock; if that is not 334 // convenient, use print_basic_report() instead. 335 static void print_report(outputStream* out, size_t scale = 0); 336 337 static void print_on(outputStream * out); 338 339 DEBUG_ONLY(static void verify(bool slow);) 340 341 }; 342 343 #endif // SHARE_MEMORY_METASPACE_HPP