1 /*
   2  * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_MEMORY_METASPACE_CHUNKTREE_HPP
  26 #define SHARE_MEMORY_METASPACE_CHUNKTREE_HPP
  27 
  28 #include "memory/metaspace/abstractPool.hpp"
  29 #include "memory/metaspace/chunkLevel.hpp"
  30 #include "memory/metaspace/metachunk.hpp"
  31 
  32 namespace metaspace {
  33 
  34 // Chunks live in a binary tree.
  35 //
  36 
  37 class ChunkClosure {
  38  public:
  39   // Return false to cancel traversal.
  40   virtual bool do_chunk(Metachunk* chunk) = 0;
  41 };
  42 
  43 
  44 class ChunkTree {
  45 
  46   typedef u2 ref_t;
  47 
  48   struct btnode_t {
  49 
  50     ref_t parent;
  51     ref_t child[2];
  52 
  53   };
  54 
  55   // full expanded (max fragmentation, 4MB root chunk split into 4096 1K chunks): 4096 headers, 4095 nodes.
  56   // typical expansion (4MB root chunk split into 64 64K chunks): 64 headers, 32 nodes under each => 1984 nodes
  57 
  58   typedef AbstractPool<btnode_t, ref_t, 1024, 1024, 4095> NodePoolType;
  59   typedef AbstractPool<Metachunk, ref_t, 64, 256, 4096> ChunkPoolType;
  60   NodePoolType _nodePool;
  61   ChunkPoolType _chunkPool;
  62 
  63   // The upper two bits of a reference encode information about it.
  64   // bit 0,1:  00 - reference is a btnode_t
  65   //           10 - reference is a free chunk
  66   //           11 - reference is a chunk in use.
  67   // This also means a reference has to get by with 14 bits. Which covers 16K, which is enough for both
  68   // chunk headers and nodes within one root chunk area.
  69   static const u2 highest_possible_index = (1 << 14) - 1;
  70   static const u2 node_marker = 0;
  71   static const u2 free_chunk_marker = 2;
  72   static const u2 used_chunk_marker = 3;
  73 
  74   static u2 get_raw_index_from_reference(ref_t ref)     { return 0x3FFF & ref; }
  75   static u2 get_info_from_reference(ref_t ref)          { return 0xc000 & ref; }
  76 
  77   static u2 encode_reference(u2 raw_idx, u2 info) {
  78     assert(raw_idx <= highest_possible_index, "invalid index");
  79     return (info << 14) | raw_idx;
  80   }
  81 
  82 #ifdef ASSERT
  83   static bool reference_is_node(ref_t ref)        { return get_info_from_reference(ref) == node_marker; }
  84   static bool reference_is_chunk(ref_t ref)       { u2 i = get_info_from_reference(ref); return i == free_chunk_marker || i == used_chunk_marker; }
  85   static bool reference_is_used_chunk(ref_t ref)  { return get_info_from_reference(ref) == used_chunk_marker; }
  86 
  87   static void check_is_valid_node_ref(ref_t ref)  { assert(resolve_reference_to_node(ref) != NULL, "invalid node ref"); }
  88   static void check_is_valid_chunk_ref(ref_t ref) { assert(resolve_reference_to_chunk(ref) != NULL, "invalid chunk ref"); }
  89   static void check_is_valid_ref(ref_t ref);
  90 #endif
  91 
  92   static bool reference_is_free_chunk(ref_t ref)  { return get_info_from_reference(ref) == free_chunk_marker; }
  93 
  94   // Given a reference we know to be a node, resolve it to the node pointer.
  95   btnode_t* resolve_reference_to_node(ref_t ref) const {
  96     assert(reference_is_node(ref), "Not a node ref");
  97     return _nodePool.elem_at_index(get_raw_index_from_reference(ref));
  98   }
  99 
 100   // Allocate a new node. Node is uninitialized.
 101   // Returns pointer to node, and reference in ref.
 102   btnode_t* allocate_new_node() {
 103     return _nodePool.allocate_element();
 104   }
 105 
 106   // Given a node pointer, return its correctly encoded reference.
 107   ref_t encode_reference_for_node(const btnode_t* n) const {
 108     const u2 raw_idx = _nodePool.index_for_elem(n);
 109     return encode_reference(raw_idx, node_marker);
 110   }
 111 
 112   // Release a node to the pool.
 113   void release_node(btnode_t* n) {
 114     _nodePool.return_element(n);
 115   }
 116 
 117   // Given a reference we know to be a chunk, resolve it to the chunk pointer.
 118   Metachunk* resolve_reference_to_chunk(ref_t ref) const {
 119     assert(reference_is_chunk(ref), "Not a chunk ref");
 120     return _chunkPool.elem_at_index(get_raw_index_from_reference(ref));
 121   }
 122 
 123   // Allocate a new node. Node is uninitialized.
 124   // Returns pointer to node, and reference in ref.
 125   Metachunk* allocate_new_chunk() {
 126     return _chunkPool.allocate_element();
 127   }
 128 
 129   // Given a chunk pointer, return its correctly encoded reference.
 130   ref_t encode_reference_for_chunk(Metachunk* c, bool is_free) const {
 131     const u2 raw_idx = _chunkPool.index_for_elem(c);
 132     return encode_reference(raw_idx, is_free ? free_chunk_marker : used_chunk_marker);
 133   }
 134 
 135   // Release a chunk to the pool.
 136   void release_chunk(Metachunk* c) {
 137     _chunkPool.return_element(c);
 138   }
 139 
 140   //// Helpers for tree traversal ////
 141   class ConstNodeClosure;
 142   static bool iterate_nodes_helper(ref_t ref, ConstNodeClosure* nc) const;
 143 
 144   class ConstChunkClosure;
 145   static bool iterate_chunks_helper(ref_t ref, ChunkClosure* cc) const;
 146 
 147   //////
 148 
 149   // Root is either a direct pointer to a Metachunk* (in that case, a root chunk of max. size)
 150   // or a pointer to a node.
 151   ref_t _root;
 152 
 153 
 154 
 155 #ifdef ASSERT
 156   // Verify a life node (one which lives in the tree).
 157   static void verify_node(const btnode_t* n);
 158   // Helper for verify()
 159   void verify_helper(bool slow, ref_t ref, const MetaWord* p, int* num_chunks, int* num_nodes) const;
 160 #endif
 161 
 162   // Given a chunk c, split it once.
 163   //
 164   // The original chunk must not be part of a freelist.
 165   //
 166   // Returns pointer to the result chunk; updates the splinters array to return the splintered off chunk.
 167   //
 168   // Returns NULL if chunk cannot be split any further.
 169   Metachunk* split_once(Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]);
 170 
 171   // Given a chunk, attempt to merge it with its sibling if it is free.
 172   // Returns pointer to the result chunk if successful, NULL otherwise.
 173   //
 174   // Returns number of merged chunks, by chunk level, in num_merged array. These numbers
 175   // includes the original chunk.
 176   //
 177   // !!! Please note that if this method returns a non-NULL value, the
 178   // original chunk will be invalid and should not be accessed anymore! !!!
 179   Metachunk* merge_once(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]);
 180 
 181 public:
 182 
 183   ChunkTree() : _root(NULL) {}
 184 
 185   // Initialize: allocate a root node and a root chunk header; return the
 186   // root chunk header. It will be partly initialized.
 187   // Note: this just allocates a memory-less header; memory itself is allocated inside VirtualSpaceNode.
 188   Metachunk* alloc_root_chunk_header();
 189 
 190   // Given a chunk c, split it recursively until you get a chunk of the given target_level.
 191   //
 192   // The original chunk must not be part of a freelist.
 193   //
 194   // Returns pointer to the result chunk; returns split off chunks in splinters array.
 195   //
 196   // Returns NULL if chunk cannot be split at least once.
 197   Metachunk* split(chklvl_t target_level, Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]);
 198 
 199   // Given a chunk, attempt to merge it recursively with its neighboring chunks.
 200   //
 201   // If successful (merged at least once), returns address of
 202   // the merged chunk; NULL otherwise.
 203   //
 204   // The merged chunks are removed from their freelist; the number of merged chunks is
 205   // returned, split by level, in num_merged array. Note that these numbers does not
 206   // include the original chunk.
 207   //
 208   // !!! Please note that if this method returns a non-NULL value, the
 209   // original chunk will be invalid and should not be accessed anymore! !!!
 210   Metachunk* merge(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]);
 211 
 212   //// tree traversal ////
 213 
 214   // Iterate over all nodes in this tree. Returns true for complete traversal,
 215   // false if traversal was cancelled.
 216   bool iterate_chunks(ChunkClosure* cc) const;
 217 
 218 
 219   //// Debug stuff ////
 220 
 221   // Verify tree. If base != NULL, it should point to the location assumed
 222   // to be base of the first chunk.
 223   DEBUG_ONLY(void verify(bool slow, const MetaWord* base) const;)
 224 
 225 
 226 };
 227 
 228 
 229 ///////////////////////
 230 // An C-heap allocated array of chunk trees. Used to describe fragmentation over a range of multiple root chunks.
 231 class ChunkTreeArray {
 232 
 233   const MetaWord* const _base;
 234   const size_t _word_size;
 235 
 236   ChunkTree** _arr;
 237   int _num;
 238 
 239 #ifdef ASSERT
 240   void check_pointer(const MetaWord* p) const {
 241     assert(p >= _base && p < _base + _word_size, "Invalid pointer");
 242   }
 243 #endif
 244 
 245   int index_by_address(const MetaWord* p) const {
 246     DEBUG_ONLY(check_pointer(p);)
 247     return (p - _base) / chklvl::MAX_CHUNK_WORD_SIZE;
 248   }
 249 
 250 public:
 251 
 252   // Create an array of ChunkTree objects, all initialized to NULL, covering
 253   // a given memory range. Memory range must be aligned to size of root chunks.
 254   ChunkTreeArray(const MetaWord* base, size_t word_size);
 255 
 256   ~ChunkTreeArray();
 257 
 258   // Given a memory address into the range the trees cover, return the corresponding
 259   // tree. If none existed at this position, create it.
 260   ChunkTree* get_tree_by_address(const MetaWord* p) const {
 261     assert(p >= _base && p < _base + _word_size, "Invalid pointer");
 262     const int idx = index_by_address(p);
 263     assert(idx >= 0 && idx < _num, "Invalid index");
 264     if (_arr[idx] == NULL) {
 265       _arr[idx] = new ChunkTree();
 266     }
 267     return _arr[idx];
 268   }
 269 
 270   // Iterate over all nodes in all trees. Returns true for complete traversal,
 271   // false if traversal was cancelled.
 272   bool iterate_chunks(ChunkClosure* cc) const;
 273 
 274   DEBUG_ONLY(void verify(bool slow) const;)
 275 
 276 };
 277 
 278 
 279 } // namespace metaspace
 280 
 281 #endif // SHARE_MEMORY_METASPACE_CHUNKTREE_HPP