1 /*
   2  * Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 #ifndef SHARE_MEMORY_METASPACE_HPP
  25 #define SHARE_MEMORY_METASPACE_HPP
  26 
  27 #include "memory/allocation.hpp"
  28 #include "memory/metaspace/metaspaceEnums.hpp"
  29 #include "memory/metaspaceChunkFreeListSummary.hpp"
  30 #include "memory/virtualspace.hpp"
  31 #include "utilities/exceptions.hpp"
  32 #include "utilities/globalDefinitions.hpp"
  33 
  34 // Metaspace
  35 //
  36 // Metaspaces are Arenas for the VM's metadata.
  37 // They are allocated one per class loader object, and one for the null
  38 // bootstrap class loader
  39 //
  40 //    block X ---+       +-------------------+
  41 //               |       |  Virtualspace     |
  42 //               |       |                   |
  43 //               |       |                   |
  44 //               |       |-------------------|
  45 //               |       || Chunk            |
  46 //               |       ||                  |
  47 //               |       ||----------        |
  48 //               +------>||| block 0 |       |
  49 //                       ||----------        |
  50 //                       ||| block 1 |       |
  51 //                       ||----------        |
  52 //                       ||                  |
  53 //                       |-------------------|
  54 //                       |                   |
  55 //                       |                   |
  56 //                       +-------------------+
  57 //
  58 
  59 class ClassLoaderData;
  60 class MetaspaceShared;
  61 class MetaspaceTracer;
  62 class outputStream;
  63 
  64 
  65 namespace metaspace {
  66 class MetaspaceSizesSnapshot;
  67 }
  68 
  69 ////////////////// Metaspace ///////////////////////
  70 
  71 // Metaspaces each have a  SpaceManager and allocations
  72 // are done by the SpaceManager.  Allocations are done
  73 // out of the current Metachunk.  When the current Metachunk
  74 // is exhausted, the SpaceManager gets a new one from
  75 // the current VirtualSpace.  When the VirtualSpace is exhausted
  76 // the SpaceManager gets a new one.  The SpaceManager
  77 // also manages freelists of available Chunks.
  78 //
  79 // Currently the space manager maintains the list of
  80 // virtual spaces and the list of chunks in use.  Its
  81 // allocate() method returns a block for use as a
  82 // quantum of metadata.
  83 
  84 // Namespace for important central static functions
  85 // (auxiliary stuff goes into MetaspaceUtils)
  86 class Metaspace : public AllStatic {
  87 
  88   friend class MetaspaceShared;
  89 
  90   // Base and size of the compressed class space.
  91   static MetaWord* _compressed_class_space_base;
  92   static size_t _compressed_class_space_size;
  93 
  94   static size_t _commit_alignment;
  95   static size_t _reserve_alignment;
  96   DEBUG_ONLY(static bool   _frozen;)
  97 
  98   static const MetaspaceTracer* _tracer;
  99 
 100   static bool _initialized;
 101 
 102   static MetaWord* compressed_class_space_base()              { return _compressed_class_space_base; }
 103   static size_t compressed_class_space_size()                 { return _compressed_class_space_size; }
 104 
 105 public:
 106 
 107   static const MetaspaceTracer* tracer() { return _tracer; }
 108   static void freeze() {
 109     assert(DumpSharedSpaces, "sanity");
 110     DEBUG_ONLY(_frozen = true;)
 111   }
 112   static void assert_not_frozen() {
 113     assert(!_frozen, "sanity");
 114   }
 115 #ifdef _LP64
 116   static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base);
 117 #endif
 118 
 119  private:
 120 
 121 #ifdef _LP64
 122   static void set_narrow_klass_base_and_shift(address metaspace_base, address cds_base);
 123 
 124   // Returns true if can use CDS with metaspace allocated as specified address.
 125   static bool can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base);
 126 
 127   static void initialize_class_space(ReservedSpace rs);
 128 #endif
 129 
 130  public:
 131 
 132   static void ergo_initialize();
 133   static void global_initialize();
 134   static void post_initialize();
 135 
 136   // The alignment at which Metaspace mappings are reserved.
 137   static size_t reserve_alignment()       { return _reserve_alignment; }
 138   static size_t reserve_alignment_words() { return _reserve_alignment / BytesPerWord; }
 139 
 140   // The granularity at which Metaspace is committed and uncommitted.
 141   static size_t commit_alignment()        { return _commit_alignment; }
 142   static size_t commit_words()            { return _commit_alignment / BytesPerWord; }
 143 
 144   static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size,
 145                             MetaspaceObj::Type type, TRAPS);
 146 
 147   static bool contains(const void* ptr);
 148   static bool contains_non_shared(const void* ptr);
 149 
 150   // Free empty virtualspaces
 151   static void purge();
 152 
 153   static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size,
 154                                    MetaspaceObj::Type type, metaspace::MetadataType mdtype, TRAPS);
 155 
 156   static void print_compressed_class_space(outputStream* st, const char* requested_addr = 0) NOT_LP64({});
 157 
 158   // Return TRUE only if UseCompressedClassPointers is True.
 159   static bool using_class_space() {
 160     return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers);
 161   }
 162 
 163   static bool initialized() { return _initialized; }
 164 
 165 };
 166 
 167 ////////////////// MetaspaceGC ///////////////////////
 168 
 169 // Metaspace are deallocated when their class loader are GC'ed.
 170 // This class implements a policy for inducing GC's to recover
 171 // Metaspaces.
 172 
 173 class MetaspaceGCThresholdUpdater : public AllStatic {
 174  public:
 175   enum Type {
 176     ComputeNewSize,
 177     ExpandAndAllocate,
 178     Last
 179   };
 180 
 181   static const char* to_string(MetaspaceGCThresholdUpdater::Type updater) {
 182     switch (updater) {
 183       case ComputeNewSize:
 184         return "compute_new_size";
 185       case ExpandAndAllocate:
 186         return "expand_and_allocate";
 187       default:
 188         assert(false, "Got bad updater: %d", (int) updater);
 189         return NULL;
 190     };
 191   }
 192 };
 193 
 194 class MetaspaceGC : public AllStatic {
 195 
 196   // The current high-water-mark for inducing a GC.
 197   // When committed memory of all metaspaces reaches this value,
 198   // a GC is induced and the value is increased. Size is in bytes.
 199   static volatile size_t _capacity_until_GC;
 200 
 201   // For a CMS collection, signal that a concurrent collection should
 202   // be started.
 203   static bool _should_concurrent_collect;
 204 
 205   static uint _shrink_factor;
 206 
 207   static size_t shrink_factor() { return _shrink_factor; }
 208   void set_shrink_factor(uint v) { _shrink_factor = v; }
 209 
 210  public:
 211 
 212   static void initialize();
 213   static void post_initialize();
 214 
 215   static size_t capacity_until_GC();
 216   static bool inc_capacity_until_GC(size_t v,
 217                                     size_t* new_cap_until_GC = NULL,
 218                                     size_t* old_cap_until_GC = NULL,
 219                                     bool* can_retry = NULL);
 220   static size_t dec_capacity_until_GC(size_t v);
 221 
 222   static bool should_concurrent_collect() { return _should_concurrent_collect; }
 223   static void set_should_concurrent_collect(bool v) {
 224     _should_concurrent_collect = v;
 225   }
 226 
 227   // The amount to increase the high-water-mark (_capacity_until_GC)
 228   static size_t delta_capacity_until_GC(size_t bytes);
 229 
 230   // Tells if we have can expand metaspace without hitting set limits.
 231   static bool can_expand(size_t words, bool is_class);
 232 
 233   // Returns amount that we can expand without hitting a GC,
 234   // measured in words.
 235   static size_t allowed_expansion();
 236 
 237   // Calculate the new high-water mark at which to induce
 238   // a GC.
 239   static void compute_new_size();
 240 };
 241 
 242 
 243 
 244 
 245 class MetaspaceUtils : AllStatic {
 246 public:
 247 
 248   // Committed space actually in use by Metadata
 249   static size_t used_words();
 250   static size_t used_words(metaspace::MetadataType mdtype);
 251 
 252   // Space committed for Metaspace
 253   static size_t committed_words();
 254   static size_t committed_words(metaspace::MetadataType mdtype);
 255 
 256   // Space reserved for Metaspace
 257   static size_t reserved_words();
 258   static size_t reserved_words(metaspace::MetadataType mdtype);
 259 
 260   // _bytes() variants for convenience...
 261   static size_t used_bytes()                                    { return used_words() * BytesPerWord; }
 262   static size_t used_bytes(metaspace::MetadataType mdtype)      { return used_words(mdtype) * BytesPerWord; }
 263   static size_t committed_bytes()                               { return committed_words() * BytesPerWord; }
 264   static size_t committed_bytes(metaspace::MetadataType mdtype) { return committed_words(mdtype) * BytesPerWord; }
 265   static size_t reserved_bytes()                                { return reserved_words() * BytesPerWord; }
 266   static size_t reserved_bytes(metaspace::MetadataType mdtype)  { return reserved_words(mdtype) * BytesPerWord; }
 267 
 268   // Todo. Consolidate.
 269   // Committed space in freelists
 270   static size_t free_chunks_total_words(metaspace::MetadataType mdtype);
 271 
 272   // Todo. Implement or Consolidate.
 273   static MetaspaceChunkFreeListSummary chunk_free_list_summary(metaspace::MetadataType mdtype) {
 274     return MetaspaceChunkFreeListSummary(0,0,0,0,0,0,0,0);
 275   }
 276 
 277   // Log change in used metadata.
 278   static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values);
 279 
 280   // Prints an ASCII representation of the given space.
 281   static void print_metaspace_map(outputStream* out, metaspace::MetadataType mdtype);
 282 
 283   // This will print out a basic metaspace usage report but
 284   // unlike print_report() is guaranteed not to lock or to walk the CLDG.
 285   static void print_basic_report(outputStream* st, size_t scale = 0);
 286 
 287   // Prints a report about the current metaspace state.
 288   // Function will walk the CLDG and will lock the expand lock; if that is not
 289   // convenient, use print_basic_report() instead.
 290   static void print_full_report(outputStream* out, size_t scale = 0);
 291 
 292   static void print_on(outputStream * out);
 293 
 294   DEBUG_ONLY(static void verify(bool slow);)
 295 
 296 };
 297 
 298 #endif // SHARE_MEMORY_METASPACE_HPP