< prev index next >

src/hotspot/share/memory/metaspace.hpp

Print this page
rev 60538 : imported patch jep387-core.patch

@@ -26,77 +26,35 @@
 
 #include "memory/allocation.hpp"
 #include "memory/memRegion.hpp"
 #include "memory/metaspaceChunkFreeListSummary.hpp"
 #include "memory/virtualspace.hpp"
-#include "memory/metaspace/metaspaceSizesSnapshot.hpp"
 #include "runtime/globals.hpp"
 #include "utilities/exceptions.hpp"
-
-// Metaspace
-//
-// Metaspaces are Arenas for the VM's metadata.
-// They are allocated one per class loader object, and one for the null
-// bootstrap class loader
-//
-//    block X ---+       +-------------------+
-//               |       |  Virtualspace     |
-//               |       |                   |
-//               |       |                   |
-//               |       |-------------------|
-//               |       || Chunk            |
-//               |       ||                  |
-//               |       ||----------        |
-//               +------>||| block 0 |       |
-//                       ||----------        |
-//                       ||| block 1 |       |
-//                       ||----------        |
-//                       ||                  |
-//                       |-------------------|
-//                       |                   |
-//                       |                   |
-//                       +-------------------+
-//
+#include "utilities/globalDefinitions.hpp"
 
 class ClassLoaderData;
+class MetaspaceShared;
 class MetaspaceTracer;
 class Mutex;
 class outputStream;
 
-class CollectedHeap;
-
 namespace metaspace {
-  class ChunkManager;
-  class ClassLoaderMetaspaceStatistics;
-  class Metablock;
-  class Metachunk;
-  class PrintCLDMetaspaceInfoClosure;
-  class SpaceManager;
-  class VirtualSpaceList;
-  class VirtualSpaceNode;
+  class MetaspaceArena;
+  class MetaspaceSizesSnapshot;
+  struct clms_stats_t;
 }
 
-// Metaspaces each have a  SpaceManager and allocations
-// are done by the SpaceManager.  Allocations are done
-// out of the current Metachunk.  When the current Metachunk
-// is exhausted, the SpaceManager gets a new one from
-// the current VirtualSpace.  When the VirtualSpace is exhausted
-// the SpaceManager gets a new one.  The SpaceManager
-// also manages freelists of available Chunks.
-//
-// Currently the space manager maintains the list of
-// virtual spaces and the list of chunks in use.  Its
-// allocate() method returns a block for use as a
-// quantum of metadata.
+////////////////// Metaspace ///////////////////////
 
 // Namespace for important central static functions
 // (auxiliary stuff goes into MetaspaceUtils)
 class Metaspace : public AllStatic {
 
   friend class MetaspaceShared;
 
- public:
+public:
   enum MetadataType {
     ClassType,
     NonClassType,
     MetadataTypeCount
   };

@@ -107,63 +65,19 @@
     ClassMirrorHolderMetaspaceType = BootMetaspaceType + 1,
     ReflectionMetaspaceType = ClassMirrorHolderMetaspaceType + 1,
     MetaspaceTypeCount
   };
 
- private:
-
-  // Align up the word size to the allocation word size
-  static size_t align_word_size_up(size_t);
-
-  // Aligned size of the metaspace.
-  static size_t _compressed_class_space_size;
-
-  static size_t compressed_class_space_size() {
-    return _compressed_class_space_size;
-  }
-
-  static void set_compressed_class_space_size(size_t size) {
-    _compressed_class_space_size = size;
-  }
-
-  static size_t _first_chunk_word_size;
-  static size_t _first_class_chunk_word_size;
+private:
 
-  static size_t _commit_alignment;
-  static size_t _reserve_alignment;
   DEBUG_ONLY(static bool   _frozen;)
 
-  // Virtual Space lists for both classes and other metadata
-  static metaspace::VirtualSpaceList* _space_list;
-  static metaspace::VirtualSpaceList* _class_space_list;
-
-  static metaspace::ChunkManager* _chunk_manager_metadata;
-  static metaspace::ChunkManager* _chunk_manager_class;
-
   static const MetaspaceTracer* _tracer;
 
   static bool _initialized;
 
- public:
-  static metaspace::VirtualSpaceList* space_list()       { return _space_list; }
-  static metaspace::VirtualSpaceList* class_space_list() { return _class_space_list; }
-  static metaspace::VirtualSpaceList* get_space_list(MetadataType mdtype) {
-    assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
-    return mdtype == ClassType ? class_space_list() : space_list();
-  }
-
-  static metaspace::ChunkManager* chunk_manager_metadata() { return _chunk_manager_metadata; }
-  static metaspace::ChunkManager* chunk_manager_class()    { return _chunk_manager_class; }
-  static metaspace::ChunkManager* get_chunk_manager(MetadataType mdtype) {
-    assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
-    return mdtype == ClassType ? chunk_manager_class() : chunk_manager_metadata();
-  }
-
-  // convenience function
-  static metaspace::ChunkManager* get_chunk_manager(bool is_class) {
-    return is_class ? chunk_manager_class() : chunk_manager_metadata();
-  }
+public:
 
   static const MetaspaceTracer* tracer() { return _tracer; }
   static void freeze() {
     assert(DumpSharedSpaces, "sanity");
     DEBUG_ONLY(_frozen = true;)

@@ -186,262 +100,162 @@
 
   // Given a prereserved space, use that to set up the compressed class space list.
   static void initialize_class_space(ReservedSpace rs);
 
   // Returns true if class space has been setup (initialize_class_space).
-  static bool class_space_is_initialized() { return _class_space_list != NULL; }
+  static bool class_space_is_initialized();
 
 #endif
 
  public:
 
   static void ergo_initialize();
   static void global_initialize();
   static void post_initialize();
 
-  static void verify_global_initialization();
-
-  static size_t first_chunk_word_size() { return _first_chunk_word_size; }
-  static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
+  // Alignment, in bytes, of metaspace mappings
+  static size_t reserve_alignment()       { return reserve_alignment_words() * BytesPerWord; }
+  // Alignment, in words, of metaspace mappings
+  static size_t reserve_alignment_words();
+
+  // The granularity at which Metaspace is committed and uncommitted.
+  // (Todo: Why does this have to be exposed?)
+  static size_t commit_alignment()        { return commit_alignment_words() * BytesPerWord; }
+  static size_t commit_alignment_words();
 
-  static size_t reserve_alignment()       { return _reserve_alignment; }
-  static size_t reserve_alignment_words() { return _reserve_alignment / BytesPerWord; }
-  static size_t commit_alignment()        { return _commit_alignment; }
-  static size_t commit_alignment_words()  { return _commit_alignment / BytesPerWord; }
+  // The largest possible single allocation
+  static size_t max_allocation_word_size();
 
   static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size,
                             MetaspaceObj::Type type, TRAPS);
 
   static bool contains(const void* ptr);
   static bool contains_non_shared(const void* ptr);
 
   // Free empty virtualspaces
-  static void purge(MetadataType mdtype);
   static void purge();
 
   static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size,
-                                   MetaspaceObj::Type type, MetadataType mdtype, TRAPS);
-
-  static const char* metadata_type_name(Metaspace::MetadataType mdtype);
+                                   MetaspaceObj::Type type, Metaspace::MetadataType mdtype, TRAPS);
 
   static void print_compressed_class_space(outputStream* st) NOT_LP64({});
 
   // Return TRUE only if UseCompressedClassPointers is True.
   static bool using_class_space() {
     return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers);
   }
 
-  static bool is_class_space_allocation(MetadataType mdType) {
-    return mdType == ClassType && using_class_space();
-  }
-
-  static bool initialized() { return _initialized; }
+  static bool initialized();
 
 };
 
-// Manages the metaspace portion belonging to a class loader
+// ClassLoaderMetaspace is an inbetween-object between a CLD and its MetaspaceArena(s).
+//
+// A CLD owns one MetaspaceArena if compressed class space is off, two if its one
+// (one for allocations of Klass* structures from class space, one for the rest from
+//  non-class space).
+//
+// ClassLoaderMetaspace only exists to hide this logic from upper layers:
+//
+// +------+       +----------------------+       +-------------------+
+// | CLD  | --->  | ClassLoaderMetaspace | ----> | (non class) Arena |
+// +------+       +----------------------+  |    +-------------------+     allocation top
+//                                          |       |                        v
+//                                          |       + chunk -- chunk ... -- chunk
+//                                          |
+//                                          |    +-------------------+
+//                                          +--> | (class) Arena     |
+//                                               +-------------------+
+//                                                  |
+//                                                  + chunk ... chunk
+//                                                               ^
+//                                                               alloc top
+//
 class ClassLoaderMetaspace : public CHeapObj<mtClass> {
-  friend class CollectedHeap; // For expand_and_allocate()
-  friend class ZCollectedHeap; // For expand_and_allocate()
-  friend class ShenandoahHeap; // For expand_and_allocate()
-  friend class Metaspace;
-  friend class MetaspaceUtils;
-  friend class metaspace::PrintCLDMetaspaceInfoClosure;
-  friend class VM_CollectForMetadataAllocation; // For expand_and_allocate()
 
- private:
-
-  void initialize(Mutex* lock, Metaspace::MetaspaceType type);
-
-  // Initialize the first chunk for a Metaspace.  Used for
-  // special cases such as the boot class loader, reflection
-  // class loader and hidden class loader.
-  void initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype);
-  metaspace::Metachunk* get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype);
+  // A reference to an outside lock, held by the CLD.
+  Mutex* const _lock;
 
   const Metaspace::MetaspaceType _space_type;
-  Mutex* const  _lock;
-  metaspace::SpaceManager* _vsm;
-  metaspace::SpaceManager* _class_vsm;
 
-  metaspace::SpaceManager* vsm() const { return _vsm; }
-  metaspace::SpaceManager* class_vsm() const { return _class_vsm; }
-  metaspace::SpaceManager* get_space_manager(Metaspace::MetadataType mdtype) {
-    assert(mdtype != Metaspace::MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
-    return mdtype == Metaspace::ClassType ? class_vsm() : vsm();
-  }
+  // Arena for allocations from non-class  metaspace
+  //  (resp. for all allocations if -XX:-UseCompressedClassPointers).
+  metaspace::MetaspaceArena* _non_class_space_arena;
+
+  // Arena for allocations from class space
+  //  (NULL if -XX:-UseCompressedClassPointers).
+  metaspace::MetaspaceArena* _class_space_arena;
 
   Mutex* lock() const { return _lock; }
+  metaspace::MetaspaceArena* non_class_space_arena() const   { return _non_class_space_arena; }
+  metaspace::MetaspaceArena* class_space_arena() const       { return _class_space_arena; }
 
-  MetaWord* expand_and_allocate(size_t size, Metaspace::MetadataType mdtype);
-
-  size_t class_chunk_size(size_t word_size);
+  metaspace::MetaspaceArena* get_arena(bool is_class) {
+    return is_class ? class_space_arena() : non_class_space_arena();
+  }
 
-  // Adds to the given statistic object. Must be locked with CLD metaspace lock.
-  void add_to_statistics_locked(metaspace::ClassLoaderMetaspaceStatistics* out) const;
+public:
 
-  Metaspace::MetaspaceType space_type() const { return _space_type; }
+  ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType space_type);
 
- public:
-
-  ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type);
   ~ClassLoaderMetaspace();
 
-  // Allocate space for metadata of type mdtype. This is space
-  // within a Metachunk and is used by
-  //   allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS)
-  MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdtype);
-
-  size_t allocated_blocks_bytes() const;
-  size_t allocated_chunks_bytes() const;
-
-  void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);
-
-  void print_on(outputStream* st) const;
-  // Debugging support
-  void verify();
-
-  // Adds to the given statistic object. Will lock with CLD metaspace lock.
-  void add_to_statistics(metaspace::ClassLoaderMetaspaceStatistics* out) const;
-
-}; // ClassLoaderMetaspace
-
-class MetaspaceUtils : AllStatic {
+  Metaspace::MetaspaceType space_type() const { return _space_type; }
 
-  // Spacemanager updates running counters.
-  friend class metaspace::SpaceManager;
+  // Allocate word_size words from Metaspace.
+  MetaWord* allocate(size_t word_size, Metaspace::MetadataType mdType);
 
-  // Special access for error reporting (checks without locks).
-  friend class oopDesc;
-  friend class Klass;
-
-  // Running counters for statistics concerning in-use chunks.
-  // Note: capacity = used + free + waste + overhead. Note that we do not
-  // count free and waste. Their sum can be deduces from the three other values.
-  // For more details, one should call print_report() from within a safe point.
-  static size_t _capacity_words [Metaspace:: MetadataTypeCount];
-  static size_t _overhead_words [Metaspace:: MetadataTypeCount];
-  static volatile size_t _used_words [Metaspace:: MetadataTypeCount];
-
-  // Atomically decrement or increment in-use statistic counters
-  static void dec_capacity(Metaspace::MetadataType mdtype, size_t words);
-  static void inc_capacity(Metaspace::MetadataType mdtype, size_t words);
-  static void dec_used(Metaspace::MetadataType mdtype, size_t words);
-  static void inc_used(Metaspace::MetadataType mdtype, size_t words);
-  static void dec_overhead(Metaspace::MetadataType mdtype, size_t words);
-  static void inc_overhead(Metaspace::MetadataType mdtype, size_t words);
-
-
-  // Getters for the in-use counters.
-  static size_t capacity_words(Metaspace::MetadataType mdtype)        { return _capacity_words[mdtype]; }
-  static size_t used_words(Metaspace::MetadataType mdtype)            { return _used_words[mdtype]; }
-  static size_t overhead_words(Metaspace::MetadataType mdtype)        { return _overhead_words[mdtype]; }
+  // Attempt to expand the GC threshold to be good for at least another word_size words
+  // and allocate. Returns NULL if failure. Used during Metaspace GC.
+  MetaWord* expand_and_allocate(size_t word_size, Metaspace::MetadataType mdType);
 
-  static size_t free_chunks_total_words(Metaspace::MetadataType mdtype);
+  // Prematurely returns a metaspace allocation to the _block_freelists
+  // because it is not needed anymore.
+  void deallocate(MetaWord* ptr, size_t word_size, bool is_class);
 
-  // Helper for print_xx_report.
-  static void print_vs(outputStream* out, size_t scale);
+  // Update statistics. This walks all in-use chunks.
+  void add_to_statistics(metaspace::clms_stats_t* out) const;
 
-public:
+  DEBUG_ONLY(void verify() const;)
 
-  // Collect used metaspace statistics. This involves walking the CLDG. The resulting
-  // output will be the accumulated values for all live metaspaces.
-  // Note: method does not do any locking.
-  static void collect_statistics(metaspace::ClassLoaderMetaspaceStatistics* out);
-
-  // Used by MetaspaceCounters
-  static size_t free_chunks_total_words();
-  static size_t free_chunks_total_bytes();
-  static size_t free_chunks_total_bytes(Metaspace::MetadataType mdtype);
-
-  static size_t capacity_words() {
-    return capacity_words(Metaspace::NonClassType) +
-           capacity_words(Metaspace::ClassType);
-  }
-  static size_t capacity_bytes(Metaspace::MetadataType mdtype) {
-    return capacity_words(mdtype) * BytesPerWord;
-  }
-  static size_t capacity_bytes() {
-    return capacity_words() * BytesPerWord;
-  }
+  // This only exists for JFR and jcmd VM.classloader_stats. We may want to
+  //  change this. Capacity as a stat is of questionable use since it may
+  //  contain committed and uncommitted areas. For now we do this to maintain
+  //  backward compatibility with JFR.
+  void calculate_jfr_stats(size_t* p_used_bytes, size_t* p_capacity_bytes) const;
 
-  static size_t used_words() {
-    return used_words(Metaspace::NonClassType) +
-           used_words(Metaspace::ClassType);
-  }
-  static size_t used_bytes(Metaspace::MetadataType mdtype) {
-    return used_words(mdtype) * BytesPerWord;
-  }
-  static size_t used_bytes() {
-    return used_words() * BytesPerWord;
-  }
+}; // end: ClassLoaderMetaspace
 
-  // Space committed but yet unclaimed by any class loader.
-  static size_t free_in_vs_bytes();
-  static size_t free_in_vs_bytes(Metaspace::MetadataType mdtype);
-
-  static size_t reserved_bytes(Metaspace::MetadataType mdtype);
-  static size_t reserved_bytes() {
-    return reserved_bytes(Metaspace::ClassType) +
-           reserved_bytes(Metaspace::NonClassType);
-  }
 
-  static size_t committed_bytes(Metaspace::MetadataType mdtype);
-  static size_t committed_bytes() {
-    return committed_bytes(Metaspace::ClassType) +
-           committed_bytes(Metaspace::NonClassType);
-  }
+////////////////// MetaspaceGC ///////////////////////
 
-  static size_t min_chunk_size_words();
+// Metaspace are deallocated when their class loader are GC'ed.
+// This class implements a policy for inducing GC's to recover
+// Metaspaces.
 
-  // Flags for print_report().
-  enum ReportFlag {
-    // Show usage by class loader.
-    rf_show_loaders                 = (1 << 0),
-    // Breaks report down by chunk type (small, medium, ...).
-    rf_break_down_by_chunktype      = (1 << 1),
-    // Breaks report down by space type (hidden, reflection, ...).
-    rf_break_down_by_spacetype      = (1 << 2),
-    // Print details about the underlying virtual spaces.
-    rf_show_vslist                  = (1 << 3),
-    // Print metaspace map.
-    rf_show_vsmap                   = (1 << 4),
-    // If show_loaders: show loaded classes for each loader.
-    rf_show_classes                 = (1 << 5)
+class MetaspaceGCThresholdUpdater : public AllStatic {
+ public:
+  enum Type {
+    ComputeNewSize,
+    ExpandAndAllocate,
+    Last
   };
 
-  // This will print out a basic metaspace usage report but
-  // unlike print_report() is guaranteed not to lock or to walk the CLDG.
-  static void print_basic_report(outputStream* st, size_t scale);
-
-  // Prints a report about the current metaspace state.
-  // Optional parts can be enabled via flags.
-  // Function will walk the CLDG and will lock the expand lock; if that is not
-  // convenient, use print_basic_report() instead.
-  static void print_report(outputStream* out, size_t scale = 0, int flags = 0);
-
-  static bool has_chunk_free_list(Metaspace::MetadataType mdtype);
-  static MetaspaceChunkFreeListSummary chunk_free_list_summary(Metaspace::MetadataType mdtype);
-
-  // Log change in used metadata.
-  static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values);
-  static void print_on(outputStream * out);
-
-  // Prints an ASCII representation of the given space.
-  static void print_metaspace_map(outputStream* out, Metaspace::MetadataType mdtype);
-
-  static void dump(outputStream* out);
-  static void verify_free_chunks();
-  // Check internal counters (capacity, used).
-  static void verify_metrics();
+  static const char* to_string(MetaspaceGCThresholdUpdater::Type updater) {
+    switch (updater) {
+      case ComputeNewSize:
+        return "compute_new_size";
+      case ExpandAndAllocate:
+        return "expand_and_allocate";
+      default:
+        assert(false, "Got bad updater: %d", (int) updater);
+        return NULL;
+    };
+  }
 };
 
-// Metaspace are deallocated when their class loader are GC'ed.
-// This class implements a policy for inducing GC's to recover
-// Metaspaces.
-
-class MetaspaceGC : AllStatic {
+class MetaspaceGC : public AllStatic {
 
   // The current high-water-mark for inducing a GC.
   // When committed memory of all metaspaces reaches this value,
   // a GC is induced and the value is increased. Size is in bytes.
   static volatile size_t _capacity_until_GC;

@@ -475,6 +289,55 @@
   // Calculate the new high-water mark at which to induce
   // a GC.
   static void compute_new_size();
 };
 
+
+
+
+class MetaspaceUtils : AllStatic {
+public:
+
+  // Committed space actually in use by Metadata
+  static size_t used_words();
+  static size_t used_words(Metaspace::MetadataType mdtype);
+
+  // Space committed for Metaspace
+  static size_t committed_words();
+  static size_t committed_words(Metaspace::MetadataType mdtype);
+
+  // Space reserved for Metaspace
+  static size_t reserved_words();
+  static size_t reserved_words(Metaspace::MetadataType mdtype);
+
+  // _bytes() variants for convenience...
+  static size_t used_bytes()                                    { return used_words() * BytesPerWord; }
+  static size_t used_bytes(Metaspace::MetadataType mdtype)      { return used_words(mdtype) * BytesPerWord; }
+  static size_t committed_bytes()                               { return committed_words() * BytesPerWord; }
+  static size_t committed_bytes(Metaspace::MetadataType mdtype) { return committed_words(mdtype) * BytesPerWord; }
+  static size_t reserved_bytes()                                { return reserved_words() * BytesPerWord; }
+  static size_t reserved_bytes(Metaspace::MetadataType mdtype)  { return reserved_words(mdtype) * BytesPerWord; }
+
+  // (See JDK-8251342). Implement or Consolidate.
+  static MetaspaceChunkFreeListSummary chunk_free_list_summary(Metaspace::MetadataType mdtype) {
+    return MetaspaceChunkFreeListSummary(0,0,0,0,0,0,0,0);
+  }
+
+  // Log change in used metadata.
+  static void print_metaspace_change(const metaspace::MetaspaceSizesSnapshot& pre_meta_values);
+
+  // This will print out a basic metaspace usage report but
+  // unlike print_report() is guaranteed not to lock or to walk the CLDG.
+  static void print_basic_report(outputStream* st, size_t scale = 0);
+
+  // Prints a report about the current metaspace state.
+  // Function will walk the CLDG and will lock the expand lock; if that is not
+  // convenient, use print_basic_report() instead.
+  static void print_report(outputStream* out, size_t scale = 0);
+
+  static void print_on(outputStream * out);
+
+  DEBUG_ONLY(static void verify(bool slow);)
+
+};
+
 #endif // SHARE_MEMORY_METASPACE_HPP
< prev index next >