1 /*
   2  * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 #ifndef SHARE_VM_MEMORY_METASPACE_HPP
  25 #define SHARE_VM_MEMORY_METASPACE_HPP
  26 
  27 #include "memory/allocation.hpp"
  28 #include "memory/memRegion.hpp"
  29 #include "runtime/virtualspace.hpp"
  30 #include "utilities/exceptions.hpp"
  31 
  32 // Metaspace
  33 //
  34 // Metaspaces are Arenas for the VM's metadata.
  35 // They are allocated one per class loader object, and one for the null
  36 // bootstrap class loader
  37 // Eventually for bootstrap loader we'll have a read-only section and read-write
  38 // to write for DumpSharedSpaces and read for UseSharedSpaces
  39 //
  40 //    block X ---+       +-------------------+
  41 //               |       |  Virtualspace     |
  42 //               |       |                   |
  43 //               |       |                   |
  44 //               |       |-------------------|
  45 //               |       || Chunk            |
  46 //               |       ||                  |
  47 //               |       ||----------        |
  48 //               +------>||| block 0 |       |
  49 //                       ||----------        |
  50 //                       ||| block 1 |       |
  51 //                       ||----------        |
  52 //                       ||                  |
  53 //                       |-------------------|
  54 //                       |                   |
  55 //                       |                   |
  56 //                       +-------------------+
  57 //
  58 

  59 class ClassLoaderData;
  60 class Metablock;

  61 class MetaWord;
  62 class Mutex;
  63 class outputStream;
  64 class SpaceManager;

  65 
  66 // Metaspaces each have a  SpaceManager and allocations
  67 // are done by the SpaceManager.  Allocations are done
  68 // out of the current Metachunk.  When the current Metachunk
  69 // is exhausted, the SpaceManager gets a new one from
  70 // the current VirtualSpace.  When the VirtualSpace is exhausted
  71 // the SpaceManager gets a new one.  The SpaceManager
  72 // also manages freelists of available Chunks.
  73 //
  74 // Currently the space manager maintains the list of
  75 // virtual spaces and the list of chunks in use.  Its
  76 // allocate() method returns a block for use as a
  77 // quantum of metadata.
  78 
  79 class VirtualSpaceList;
  80 
  81 class Metaspace : public CHeapObj<mtClass> {
  82   friend class VMStructs;
  83   friend class SpaceManager;
  84   friend class VM_CollectForMetadataAllocation;
  85   friend class MetaspaceGC;
  86   friend class MetaspaceAux;
  87 
  88  public:
  89   enum MetadataType {ClassType = 0,
  90                      NonClassType = ClassType + 1,
  91                      MetadataTypeCount = ClassType + 2
  92   };
  93   enum MetaspaceType {
  94     StandardMetaspaceType,
  95     BootMetaspaceType,
  96     ROMetaspaceType,
  97     ReadWriteMetaspaceType,
  98     AnonymousMetaspaceType,
  99     ReflectionMetaspaceType
 100   };
 101 
 102  private:
 103   void initialize(Mutex* lock, MetaspaceType type);
 104 




 105   // Align up the word size to the allocation word size
 106   static size_t align_word_size_up(size_t);
 107 
 108   // Aligned size of the metaspace.
 109   static size_t _class_metaspace_size;
 110 
 111   static size_t class_metaspace_size() {
 112     return _class_metaspace_size;
 113   }
 114   static void set_class_metaspace_size(size_t metaspace_size) {
 115     _class_metaspace_size = metaspace_size;
 116   }
 117 
 118   static size_t _first_chunk_word_size;
 119   static size_t _first_class_chunk_word_size;
 120 
 121   SpaceManager* _vsm;
 122   SpaceManager* vsm() const { return _vsm; }
 123 
 124   SpaceManager* _class_vsm;
 125   SpaceManager* class_vsm() const { return _class_vsm; }
 126 
 127   // Allocate space for metadata of type mdtype. This is space
 128   // within a Metachunk and is used by
 129   //   allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS)
 130   // which returns a Metablock.
 131   MetaWord* allocate(size_t word_size, MetadataType mdtype);
 132 
 133   // Virtual Space lists for both classes and other metadata
 134   static VirtualSpaceList* _space_list;
 135   static VirtualSpaceList* _class_space_list;
 136 



 137   static VirtualSpaceList* space_list()       { return _space_list; }
 138   static VirtualSpaceList* class_space_list() { return _class_space_list; }
 139   static VirtualSpaceList* get_space_list(MetadataType mdtype) {
 140     assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
 141     return mdtype == ClassType ? class_space_list() : space_list();
 142   }
 143 







 144   // This is used by DumpSharedSpaces only, where only _vsm is used. So we will
 145   // maintain a single list for now.
 146   void record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size);
 147 
 148 #ifdef _LP64
 149   static void set_narrow_klass_base_and_shift(address metaspace_base, address cds_base);
 150 
 151   // Returns true if can use CDS with metaspace allocated as specified address.
 152   static bool can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base);
 153 
 154   static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base);
 155 
 156   static void initialize_class_space(ReservedSpace rs);
 157 #endif
 158 
 159   class AllocRecord : public CHeapObj<mtClass> {
 160   public:
 161     AllocRecord(address ptr, MetaspaceObj::Type type, int byte_size)
 162       : _next(NULL), _ptr(ptr), _type(type), _byte_size(byte_size) {}
 163     AllocRecord *_next;
 164     address _ptr;
 165     MetaspaceObj::Type _type;
 166     int _byte_size;
 167   };
 168 
 169   AllocRecord * _alloc_record_head;
 170   AllocRecord * _alloc_record_tail;
 171 
 172  public:
 173 
 174   Metaspace(Mutex* lock, MetaspaceType type);
 175   ~Metaspace();
 176 
 177   // Initialize globals for Metaspace
 178   static void global_initialize();
 179 
 180   static size_t first_chunk_word_size() { return _first_chunk_word_size; }
 181   static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
 182 
 183   char*  bottom() const;
 184   size_t used_words_slow(MetadataType mdtype) const;
 185   size_t free_words_slow(MetadataType mdtype) const;
 186   size_t capacity_words_slow(MetadataType mdtype) const;
 187 
 188   size_t used_bytes_slow(MetadataType mdtype) const;
 189   size_t capacity_bytes_slow(MetadataType mdtype) const;
 190 
 191   static Metablock* allocate(ClassLoaderData* loader_data, size_t word_size,
 192                              bool read_only, MetaspaceObj::Type type, TRAPS);
 193   void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);
 194 
 195   MetaWord* expand_and_allocate(size_t size,
 196                                 MetadataType mdtype);
 197 
 198   static bool contains(const void *ptr);
 199   void dump(outputStream* const out) const;
 200 
 201   // Free empty virtualspaces

 202   static void purge();
 203 
 204   void print_on(outputStream* st) const;
 205   // Debugging support
 206   void verify();
 207 
 208   class AllocRecordClosure :  public StackObj {
 209   public:
 210     virtual void doit(address ptr, MetaspaceObj::Type type, int byte_size) = 0;
 211   };
 212 
 213   void iterate(AllocRecordClosure *closure);
 214 
 215   // Return TRUE only if UseCompressedClassPointers is True and DumpSharedSpaces is False.
 216   static bool using_class_space() {
 217     return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers && !DumpSharedSpaces);
 218   }
 219 
 220 };
 221 
 222 class MetaspaceAux : AllStatic {
 223   static size_t free_chunks_total_words(Metaspace::MetadataType mdtype);
 224 
 225   // These methods iterate over the classloader data graph
 226   // for the given Metaspace type.  These are slow.
 227   static size_t used_bytes_slow(Metaspace::MetadataType mdtype);
 228   static size_t free_bytes_slow(Metaspace::MetadataType mdtype);
 229   static size_t capacity_bytes_slow(Metaspace::MetadataType mdtype);
 230   static size_t capacity_bytes_slow();
 231 
 232   // Running sum of space in all Metachunks that has been
 233   // allocated to a Metaspace.  This is used instead of
 234   // iterating over all the classloaders. One for each
 235   // type of Metadata
 236   static size_t _allocated_capacity_words[Metaspace:: MetadataTypeCount];
 237   // Running sum of space in all Metachunks that have
 238   // are being used for metadata. One for each
 239   // type of Metadata.
 240   static size_t _allocated_used_words[Metaspace:: MetadataTypeCount];
 241 
 242  public:
 243   // Decrement and increment _allocated_capacity_words
 244   static void dec_capacity(Metaspace::MetadataType type, size_t words);
 245   static void inc_capacity(Metaspace::MetadataType type, size_t words);
 246 
 247   // Decrement and increment _allocated_used_words
 248   static void dec_used(Metaspace::MetadataType type, size_t words);
 249   static void inc_used(Metaspace::MetadataType type, size_t words);
 250 
 251   // Total of space allocated to metadata in all Metaspaces.
 252   // This sums the space used in each Metachunk by
 253   // iterating over the classloader data graph
 254   static size_t used_bytes_slow() {
 255     return used_bytes_slow(Metaspace::ClassType) +
 256            used_bytes_slow(Metaspace::NonClassType);
 257   }
 258 
 259   // Used by MetaspaceCounters
 260   static size_t free_chunks_total_words();
 261   static size_t free_chunks_total_bytes();
 262   static size_t free_chunks_total_bytes(Metaspace::MetadataType mdtype);
 263 
 264   static size_t allocated_capacity_words(Metaspace::MetadataType mdtype) {
 265     return _allocated_capacity_words[mdtype];
 266   }
 267   static size_t allocated_capacity_words() {
 268     return allocated_capacity_words(Metaspace::NonClassType) +
 269            allocated_capacity_words(Metaspace::ClassType);
 270   }
 271   static size_t allocated_capacity_bytes(Metaspace::MetadataType mdtype) {
 272     return allocated_capacity_words(mdtype) * BytesPerWord;
 273   }
 274   static size_t allocated_capacity_bytes() {
 275     return allocated_capacity_words() * BytesPerWord;
 276   }
 277 
 278   static size_t allocated_used_words(Metaspace::MetadataType mdtype) {
 279     return _allocated_used_words[mdtype];
 280   }
 281   static size_t allocated_used_words() {
 282     return allocated_used_words(Metaspace::NonClassType) +
 283            allocated_used_words(Metaspace::ClassType);
 284   }
 285   static size_t allocated_used_bytes(Metaspace::MetadataType mdtype) {
 286     return allocated_used_words(mdtype) * BytesPerWord;
 287   }
 288   static size_t allocated_used_bytes() {
 289     return allocated_used_words() * BytesPerWord;
 290   }
 291 
 292   static size_t free_bytes();
 293   static size_t free_bytes(Metaspace::MetadataType mdtype);
 294 
 295   static size_t reserved_bytes(Metaspace::MetadataType mdtype);
 296   static size_t reserved_bytes() {
 297     return reserved_bytes(Metaspace::ClassType) +
 298            reserved_bytes(Metaspace::NonClassType);
 299   }
 300 
 301   static size_t committed_bytes(Metaspace::MetadataType mdtype);
 302   static size_t committed_bytes() {
 303     return committed_bytes(Metaspace::ClassType) +
 304            committed_bytes(Metaspace::NonClassType);
 305   }
 306 
 307   static size_t min_chunk_size_words();
 308   static size_t min_chunk_size_bytes() {
 309     return min_chunk_size_words() * BytesPerWord;
 310   }
 311 
 312   // Print change in used metadata.
 313   static void print_metaspace_change(size_t prev_metadata_used);
 314   static void print_on(outputStream * out);
 315   static void print_on(outputStream * out, Metaspace::MetadataType mdtype);
 316 
 317   static void print_class_waste(outputStream* out);
 318   static void print_waste(outputStream* out);
 319   static void dump(outputStream* out);
 320   static void verify_free_chunks();
 321   // Checks that the values returned by allocated_capacity_bytes() and
 322   // capacity_bytes_slow() are the same.
 323   static void verify_capacity();
 324   static void verify_used();
 325   static void verify_metrics();
 326 };
 327 
 328 // Metaspace are deallocated when their class loader are GC'ed.
 329 // This class implements a policy for inducing GC's to recover
 330 // Metaspaces.
 331 
 332 class MetaspaceGC : AllStatic {
 333 
 334   // The current high-water-mark for inducing a GC.  When
 335   // the capacity of all space in the virtual lists reaches this value,
 336   // a GC is induced and the value is increased.  This should be changed
 337   // to the space actually used for allocations to avoid affects of
 338   // fragmentation losses to partially used chunks.  Size is in words.
 339   static size_t _capacity_until_GC;
 340 
 341   // After a GC is done any allocation that fails should try to expand
 342   // the capacity of the Metaspaces.  This flag is set during attempts
 343   // to allocate in the VMGCOperation that does the GC.
 344   static bool _expand_after_GC;
 345 
 346   // For a CMS collection, signal that a concurrent collection should
 347   // be started.
 348   static bool _should_concurrent_collect;
 349 
 350   static uint _shrink_factor;
 351 
 352   static void set_capacity_until_GC(size_t v) { _capacity_until_GC = v; }
 353 
 354   static size_t shrink_factor() { return _shrink_factor; }
 355   void set_shrink_factor(uint v) { _shrink_factor = v; }
 356 
 357  public:
 358 
 359   static size_t capacity_until_GC() { return _capacity_until_GC; }
 360   static void inc_capacity_until_GC(size_t v) { _capacity_until_GC += v; }
 361   static void dec_capacity_until_GC(size_t v) {
 362     _capacity_until_GC = _capacity_until_GC > v ? _capacity_until_GC - v : 0;
 363   }
 364   static bool expand_after_GC()           { return _expand_after_GC; }
 365   static void set_expand_after_GC(bool v) { _expand_after_GC = v; }
 366 
 367   static bool should_concurrent_collect() { return _should_concurrent_collect; }
 368   static void set_should_concurrent_collect(bool v) {
 369     _should_concurrent_collect = v;
 370   }
 371 
 372   // The amount to increase the high-water-mark (_capacity_until_GC)
 373   static size_t delta_capacity_until_GC(size_t word_size);
 374 
 375   // It is expected that this will be called when the current capacity
 376   // has been used and a GC should be considered.
 377   static bool should_expand(VirtualSpaceList* vsl, size_t word_size);
 378 
 379   // Calculate the new high-water mark at which to induce
 380   // a GC.
 381   static void compute_new_size();
 382 };
 383 
 384 #endif // SHARE_VM_MEMORY_METASPACE_HPP
--- EOF ---