1 /*
   2  * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_MEMORY_METASPACE_CHUNKTREE_HPP
  26 #define SHARE_MEMORY_METASPACE_CHUNKTREE_HPP
  27 
  28 #include "memory/allocation.hpp"
  29 #include "memory/metaspace/abstractPool.hpp"
  30 #include "memory/metaspace/chunkLevel.hpp"
  31 #include "memory/metaspace/metachunk.hpp"
  32 
  33 namespace metaspace {
  34 
  35 // Chunks live in a binary tree.
  36 //
  37 
  38 class ChunkClosure {
  39  public:
  40   // Return false to cancel traversal.
  41   virtual bool do_chunk(Metachunk* chunk) = 0;
  42 };
  43 
  44 
  45 class ChunkTree : public CHeapObj<mtClass> {
  46 
  47   typedef u2 ref_t;
  48 
  49   // Root is either a reference to a chunk (in that case, a root chunk of max. size)
  50   // or to a node.
  51   ref_t _root;
  52 
  53   struct btnode_t {
  54 
  55     ref_t parent;
  56     ref_t child[2];
  57 
  58   };
  59 
  60   typedef AbstractPool<btnode_t, ref_t> NodePoolType;
  61   typedef AbstractPool<Metachunk, ref_t> ChunkPoolType;
  62   NodePoolType _nodePool;
  63   ChunkPoolType _chunkPool;
  64 
  65   // The highest bits of a reference encodes the type:
  66   // bit 15:  0 - reference is a btnode_t
  67   //          1 - reference is a chunk
  68   // This also means a reference has to get by with 15 bits. Which covers 32K, which is enough for both
  69   // chunk headers and nodes within one root chunk area.
  70   static const u2 highest_possible_index_value = 0x7FFF;
  71 
  72   static u2 get_raw_index_from_reference(ref_t ref)     { return 0x7FFF & ref; }
  73 
  74   static bool reference_is_node(ref_t ref)              { return 0x8000 & ref == 0; }
  75   static ref_t make_node_reference(u2 raw_idx) {
  76     assert(raw_idx <= highest_possible_index_value, "invalid index");
  77     return raw_idx;
  78   }
  79 
  80   static bool reference_is_chunk(ref_t ref)             { return 0x8000 & ref != 0; }
  81   static ref_t make_chunk_reference(u2 raw_idx) {
  82     assert(raw_idx <= highest_possible_index_value, "invalid index");
  83     return 0x8000 | raw_idx;
  84   }
  85 
  86 #ifdef ASSERT
  87   void check_is_valid_node_ref(ref_t ref) const;
  88   void check_is_valid_chunk_ref(ref_t ref) const;
  89 #endif
  90 
  91   // Given a reference we assume to be a node, resolve it to the node pointer.
  92   // Returns 0 if the reference is not a node ref but a chunk ref.
  93   btnode_t* resolve_reference_to_node_or_null(ref_t ref) const {
  94     if (reference_is_node(ref)) {
  95       return _nodePool.elem_at_index(get_raw_index_from_reference(ref));
  96     }
  97     return NULL;
  98   }
  99 
 100   // Given a reference we know to be a node, resolve it to the node pointer.
 101   btnode_t* resolve_reference_to_node(ref_t ref) const {
 102     btnode_t* n = resolve_reference_to_node_or_null(ref);
 103     assert(n != NULL, "Not a node ref.");
 104     return n;
 105   }
 106 
 107   // Allocate a new node. Node is uninitialized.
 108   // Returns pointer to node, and reference in ref.
 109   btnode_t* allocate_new_node() {
 110     return _nodePool.allocate_element();
 111   }
 112 
 113   // Given a node pointer, return its correctly encoded reference.
 114   ref_t encode_reference_for_node(const btnode_t* n) const {
 115     const u2 raw_idx = _nodePool.index_for_elem(n);
 116     return make_node_reference(raw_idx);
 117   }
 118 
 119   // Release a node to the pool.
 120   void release_node(btnode_t* n) {
 121     _nodePool.return_element(n);
 122   }
 123 
 124   // Given a reference we assume is a chunk reference, resolve it to the chunk pointer.
 125   // Returns 0 if reference is not a chunk reference (but a node reference).
 126   Metachunk* resolve_reference_to_chunk_or_null(ref_t ref) const {
 127     if (reference_is_chunk(ref)) {
 128       return _chunkPool.elem_at_index(get_raw_index_from_reference(ref));
 129     }
 130     return NULL;
 131   }
 132 
 133   // Given a reference we know to be a chunk, resolve it to the chunk pointer.
 134   Metachunk* resolve_reference_to_chunk(ref_t ref) const {
 135     Metachunk* c = resolve_reference_to_chunk_or_null(ref);
 136     assert(c != NULL, "Not a chunk ref.");
 137     return c;
 138   }
 139 
 140   // Allocate a new node. Node is uninitialized.
 141   // Returns pointer to node, and reference in ref.
 142   Metachunk* allocate_new_chunk() {
 143     return _chunkPool.allocate_element();
 144   }
 145 
 146   // Given a chunk pointer, return its correctly encoded reference.
 147   ref_t encode_reference_for_chunk(Metachunk* c) const {
 148     const u2 raw_idx = _chunkPool.index_for_elem(c);
 149     return make_chunk_reference(raw_idx);
 150   }
 151 
 152   // Release a chunk to the pool.
 153   void release_chunk(Metachunk* c) {
 154     _chunkPool.return_element(c);
 155   }
 156 
 157   //// Helpers for tree traversal ////
 158 
 159   class ConstChunkClosure;
 160   bool iterate_chunks_helper(ref_t ref, ChunkClosure* cc) const;
 161 
 162 #ifdef ASSERT
 163   // Verify a life node (one which lives in the tree).
 164   void verify_node(const btnode_t* n) const;
 165   // Helper for verify()
 166   void verify_helper(bool slow, ref_t ref, const MetaWord* p, int* num_chunks, int* num_nodes) const;
 167 #endif
 168 
 169   // Given a chunk c, split it once.
 170   //
 171   // The original chunk must not be part of a freelist.
 172   //
 173   // Returns pointer to the result chunk; updates the splinters array to return the splintered off chunk.
 174   //
 175   // Returns NULL if chunk cannot be split any further.
 176   Metachunk* split_once(Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]);
 177 
 178   // Given a chunk, attempt to merge it with its sibling if it is free.
 179   // Returns pointer to the result chunk if successful, NULL otherwise.
 180   //
 181   // Returns number of merged chunks, by chunk level, in num_merged array. These numbers
 182   // includes the original chunk.
 183   //
 184   // !!! Please note that if this method returns a non-NULL value, the
 185   // original chunk will be invalid and should not be accessed anymore! !!!
 186   Metachunk* merge_once(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]);
 187 
 188 public:
 189 
 190   ChunkTree();
 191   virtual ~ChunkTree() {}
 192 
 193   // Initialize: allocate a root node and a root chunk header; return the
 194   // root chunk header. It will be partly initialized.
 195   // Note: this just allocates a memory-less header; memory itself is allocated inside VirtualSpaceNode.
 196   Metachunk* alloc_root_chunk_header();
 197 
 198   // Given a chunk c, split it recursively until you get a chunk of the given target_level.
 199   //
 200   // The original chunk must not be part of a freelist.
 201   //
 202   // Returns pointer to the result chunk; returns split off chunks in splinters array.
 203   //
 204   // Returns NULL if chunk cannot be split at least once.
 205   Metachunk* split(chklvl_t target_level, Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]);
 206 
 207   // Given a chunk, attempt to merge it recursively with its neighboring chunks.
 208   //
 209   // If successful (merged at least once), returns address of
 210   // the merged chunk; NULL otherwise.
 211   //
 212   // The merged chunks are removed from their freelist; the number of merged chunks is
 213   // returned, split by level, in num_merged array. Note that these numbers does not
 214   // include the original chunk.
 215   //
 216   // !!! Please note that if this method returns a non-NULL value, the
 217   // original chunk will be invalid and should not be accessed anymore! !!!
 218   Metachunk* merge(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]);
 219 
 220   //// tree traversal ////
 221 
 222   // Iterate over all nodes in this tree. Returns true for complete traversal,
 223   // false if traversal was cancelled.
 224   bool iterate_chunks(ChunkClosure* cc) const;
 225 
 226 
 227   //// Debug stuff ////
 228 
 229   // Verify tree. If base != NULL, it should point to the location assumed
 230   // to be base of the first chunk.
 231   DEBUG_ONLY(void verify(bool slow, const MetaWord* base) const;)
 232 
 233   // Returns the footprint of this tree, in words.
 234   size_t memory_footprint_words() const;
 235 
 236 
 237 };
 238 
 239 
 240 ///////////////////////
 241 // An C-heap allocated array of chunk trees. Used to describe fragmentation over a range of multiple root chunks.
 242 class ChunkTreeArray {
 243 
 244   const MetaWord* const _base;
 245   const size_t _word_size;
 246 
 247   ChunkTree** _arr;
 248   int _num;
 249 
 250 #ifdef ASSERT
 251   void check_pointer(const MetaWord* p) const {
 252     assert(p >= _base && p < _base + _word_size, "Invalid pointer");
 253   }
 254 #endif
 255 
 256   int index_by_address(const MetaWord* p) const {
 257     DEBUG_ONLY(check_pointer(p);)
 258     return (p - _base) / chklvl::MAX_CHUNK_WORD_SIZE;
 259   }
 260 
 261 public:
 262 
 263   // Create an array of ChunkTree objects, all initialized to NULL, covering
 264   // a given memory range. Memory range must be aligned to size of root chunks.
 265   ChunkTreeArray(const MetaWord* base, size_t word_size);
 266 
 267   ~ChunkTreeArray();
 268 
 269   // Given a memory address into the range the trees cover, return the corresponding
 270   // tree. If none existed at this position, create it.
 271   ChunkTree* get_tree_by_address(const MetaWord* p) const {
 272     assert(p >= _base && p < _base + _word_size, "Invalid pointer");
 273     const int idx = index_by_address(p);
 274     assert(idx >= 0 && idx < _num, "Invalid index");
 275     if (_arr[idx] == NULL) {
 276       _arr[idx] = new ChunkTree();
 277     }
 278     return _arr[idx];
 279   }
 280 
 281   // Iterate over all nodes in all trees. Returns true for complete traversal,
 282   // false if traversal was cancelled.
 283   bool iterate_chunks(ChunkClosure* cc) const;
 284 
 285   DEBUG_ONLY(void verify(bool slow) const;)
 286 
 287   // Returns the footprint of all trees in this array, in words.
 288   size_t memory_footprint_words() const;
 289 
 290 };
 291 
 292 
 293 } // namespace metaspace
 294 
 295 #endif // SHARE_MEMORY_METASPACE_CHUNKTREE_HPP