/* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #include "precompiled.hpp" //#include "logging/log.hpp" #include "memory/metaspace/chunkTree.hpp" #include "memory/metaspace/metachunk.hpp" #include "utilities/debug.hpp" #include "utilities/globalDefinitions.hpp" namespace metaspace { // Our binary tree has 13 levels (number of chunk levels). // Since the leafs of this tree are Metachunk structures, // we need nodes (btnode_t) for the non-leaf nodes and // Metachunk structures for the leaf nodes. Thus, a fully // expanded binary tree (representing a 4MB section fragmented // into 4096 1K chunks) would need 4095 nodes and 4096 metachunk structures. static const u2 nodepool_max_capacity = 4095; static const u2 nodepool_capacity_increase = 300; static const u2 chunkpool_max_capacity = 4096; static const u2 chunkpool_capacity_increase = 300; ChunkTree::ChunkTree() : _root((ref_t)0), _nodePool("node pool", nodepool_max_capacity, nodepool_capacity_increase), _chunkPool("chunk pool", chunkpool_max_capacity, chunkpool_capacity_increase) {} #ifdef ASSERT void ChunkTree::check_is_valid_ref(ref_t ref) { const u2 raw_idx = get_raw_index_from_reference(ref); const u2 info = get_info_from_reference(ref); switch (info) { case node_marker: assert(_nodePool.is_valid_index(raw_idx), "Invalid node ref: %u", (unsigned)ref); break; case free_chunk_marker: case used_chunk_marker: assert(_chunkPool.is_valid_index(raw_idx), "Invalid chunk ref: %u", (unsigned)ref); break; default: ShouldNotReachHere(); } } void ChunkTree::verify_node(const btnode_t* n) const { const ref_t node_ref = encode_reference_for_node(n); // Check parent. const ref_t parent_ref = n->parent; if (parent_ref == 0) { assert(node_ref == _root, "This should be the root node"); } else { btnode_t* parent_node = resolve_reference_to_node(parent_ref); assert(parent_node->child[0] == node_ref || parent_node->child[1] == node_ref, "Incorrect parent backlink."); } // Check children. for (int i = 0; i < 2; i ++) { const ref_t child_ref = n->child[i]; assert(child_ref != 0, "child ref null"); // May be either a chunk or another node. if (reference_is_chunk(child_ref)) { Metachunk* c = resolve_reference_to_chunk(child_ref); assert(c->tree_node_ref() == node_ref, "Incorrect parent backlink."); if (reference_is_free_chunk(child_ref)) { assert(c->is_free(), "free info mismatch"); } else { assert(c->is_in_use(), "free info mismatch"); } } else { // This chunk is another node btnode_t* n = resolve_reference_to_node(child_ref); assert(n->parent == node_ref, "Incorrect parent backlink."); } } } #endif // Initialize: allocate a root node and a root chunk header; return the // root chunk header. It will be partly initialized. // Note: this just allocates a memory-less header; memory itself is allocated inside VirtualSpaceNode. Metachunk* ChunkTree::alloc_root_chunk_header() { assert(_root == 0, "already have a root"); Metachunk* c = _chunkPool.allocate_element(); c->wipe(); c->set_level(chklvl::ROOT_CHUNK_LEVEL); const ref_t root_node_ref = encode_reference_for_chunk(c, true); _root = root_node_ref; return c; } // Given a chunk c, split it once. // // The original chunk must not be part of a freelist. // // Returns pointer to the result chunk; updates the splinters array to return the splintered off chunk. // // Returns NULL if chunk cannot be split any further. Metachunk* ChunkTree::split_once(Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]) { assert(c->is_free(), "Can only split free chunks."); if (c->level() == chklvl::HIGHEST_CHUNK_LEVEL) { return NULL; } const ref_t orig_chunk_ref = encode_reference_for_chunk(c, true); // Split chunk into two chunks. Metachunk* leader = c; Metachunk* follower = allocate_new_chunk(); follower->wipe(); const chklvl_t new_level = c->level() + 1; const size_t new_word_size = chklvl::word_size_for_level(new_level); const size_t old_committed_words = c->committed_words(); assert(new_word_size == c->word_size() / 2, "Sanity"); assert(old_committed_words <= c->word_size(), "Sanity"); leader->set_level(new_level); follower->set_level(new_level); follower->set_base(c->base() + new_word_size); // Carry over commit boundary to new chunk; this is an optimization // to avoid too frequent commit requests. if (old_committed_words >= new_word_size) { leader->set_committed_words(new_word_size); follower->set_committed_words(old_committed_words - new_word_size); } else { leader->set_committed_words(old_committed_words); follower->set_committed_words(0); } // Create a new parent node for the two sibling chunks. btnode_t* const new_parent = allocate_new_node(); const ref_t new_parent_ref = encode_reference_for_node(new_parent); // Replace the reference to the original chunk in the parent node with // the reference to the parent node. if (_root == orig_chunk_ref) { assert(new_level == chklvl::ROOT_CHUNK_LEVEL + 1, "Original chunk should have been root chunk."); assert(c->tree_node_ref() == 0, "Original chunk should not have a parent."); _root = new_parent_ref; } else { const ref_t old_parent_ref = c->tree_node_ref(); btnode_t* const old_parent = resolve_reference_to_node(old_parent_ref); assert(old_parent != NULL, "Original chunk should have a parent."); if (old_parent->child[0] == orig_chunk_ref) { old_parent->child[0] = new_parent_ref; } else if (old_parent->child[1] == orig_chunk_ref) { old_parent->child[1] = new_parent_ref; } else { ShouldNotReachHere(); } } // Tell the two new chunks who their parent is. c->set_tree_node_ref(new_parent_ref); follower->set_tree_node_ref(new_parent_ref); // Remember the splintered off chunk in the splinters array assert(splinters[new_level] == NULL, "Sanity"); splinters[new_level] = follower; return c; } // Given a chunk, attempt to merge it with its sibling if it is free. // Returns pointer to the result chunk if successful, NULL otherwise. // // Returns number of merged chunks, by chunk level, in num_merged array. These numbers // includes the original chunk. // // !!! Please note that if this method returns a non-NULL value, the // original chunk will be invalid and should not be accessed anymore! !!! Metachunk* ChunkTree::merge_once(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]) { assert(c->is_free(), "Only call for free chunks."); const chklvl_t orig_level = c->level(); // If chunk is already a root chunk, we cannot merge any further. if (orig_level == chklvl::ROOT_CHUNK_LEVEL) { return NULL; } const ref_t orig_chunk_ref = encode_reference_for_chunk(c, true); // Get parent node and parent ref. const ref_t parent_ref = c->tree_node_ref(); btnode_t* const parent = resolve_reference_to_node(parent_ref); assert(parent != NULL, "Chunk should have a parent."); // Since we left for root chunks already // Check if sibling are free and not splintered. if (reference_is_free_chunk(parent->child[0]) == false || reference_is_free_chunk(parent->child[1]) == false) { return NULL; } // Merge chunks. Metachunk* c1 = resolve_reference_to_chunk(parent->child[0]); Metachunk* c2 = resolve_reference_to_chunk(parent->child[1]); // At this point, both metachunks should be free and not splintered. assert(c1->is_free() && c2->is_free() && c1->level() == c2->level(), "Sanity"); // Find out who is leader. Let the leader live on. Metachunk* leader = c1; Metachunk* follower = c2; if (c1->base() > c2->base()) { leader = c2; follower = c1; } // Chunk memory should be adjacent to each other. assert(leader->base() + leader->word_size() == follower->base(), "Wrong geometry"); leader->set_level(orig_level - 1); // Carry over committed words. size_t new_committed_words = leader->committed_words(); if (leader->is_fully_committed()) { new_committed_words += follower->committed_words(); } assert(new_committed_words <= leader->word_size(), "Sanity"); leader->set_committed_words(new_committed_words); const ref_t leader_ref = encode_reference_for_chunk(leader, true); // Re-hang new merged chunk in tree one level up. const ref_t grand_parent_ref = parent->parent; if (grand_parent_ref == 0) { // Seems old parent node was root. That means we should have a root chunk now. assert(leader->level() == chklvl::ROOT_CHUNK_LEVEL, "Sanity"); // Which we just hang under _root... _root = leader_ref; } else { btnode_t* grand_parent = resolve_reference_to_node(grand_parent_ref); if (grand_parent->child[0] == parent_ref) { grand_parent->child[0] = leader_ref; } else if (grand_parent->child[1] == parent_ref) { grand_parent->child[1] = leader_ref; } else { ShouldNotReachHere(); } } // Remove the follower from its free list follower->remove_from_list(); // Adjust stats num_merged[orig_level] ++; // Release the superfluous chunk header, and the old parent node. release_chunk(follower); release_node(parent); return leader; } // Given a chunk c, split it recursively until you get a chunk of the given target_level. // // The original chunk must not be part of a freelist. // // Returns pointer to the result chunk; returns split off chunks in splinters array. // // Returns NULL if chunk cannot be split at least once. Metachunk* ChunkTree::split(chklvl_t target_level, Metachunk* c, Metachunk* splinters[chklvl::NUM_CHUNK_LEVELS]) { DEBUG_ONLY(chklvl::check_valid_level(target_level)); assert(target_level > c->level(), "Wrong target level"); Metachunk* c_last = split_once(c, splinters); while (c_last != NULL && c_last->level() < target_level) { c_last = split_once(c, splinters); } assert(c != NULL, "Sanity"); return c; } // Given a chunk, attempt to merge it recursively with its neighboring chunks. // // If successful (merged at least once), returns address of // the merged chunk; NULL otherwise. // // The merged chunks are removed from their freelist; the number of merged chunks is // returned, split by level, in num_merged array. Note that these numbers does not // include the original chunk. // // !!! Please note that if this method returns a non-NULL value, the // original chunk will be invalid and should not be accessed anymore! !!! Metachunk* ChunkTree::merge(Metachunk* c, int num_merged[chklvl::NUM_CHUNK_LEVELS]) { DEBUG_ONLY(c->verify(false);) assert(c->is_free(), "Only merge free chunks."); assert(c->level() > chklvl::ROOT_CHUNK_LEVEL, "Do not merge root chunks"); // Original chunk should be outside the chunk manager freelist before attempting to merge. assert(c->prev() == NULL && c->next() == NULL, "Remove chunk from freelist before merging."); Metachunk* c_last = merge_once(c, num_merged); while (c_last != NULL && c_last->level() > chklvl::ROOT_CHUNK_LEVEL) { c_last = merge_once(c, num_merged); } return c_last; } //// tree traversal //// bool ChunkTree::iterate_chunks_helper(ref_t ref, ChunkClosure* cc) const { if (reference_is_chunk(ref)) { Metachunk* const c = resolve_reference_to_chunk(ref); return cc->do_chunk(c); } else { btnode_t* const n = resolve_reference_to_node(ref); assert(n->child[0] != 0 && n->child[1] != 0, "nodes have always children"); return iterate_chunks_helper(n->child[0], cc) && iterate_chunks_helper(n->child[1], cc); } return true; } // Iterate over all nodes in this tree. Returns true for complete traversal, // false if traversal was cancelled. bool ChunkTree::iterate_chunks(ChunkClosure* cc) const { if (_root == 0) { return true; } return iterate_chunks_helper(_root, cc); } #ifdef ASSERT // Helper for verify() void ChunkTree::verify_helper(bool slow, ref_t ref, const MetaWord* p, int* num_chunks, int* num_nodes) const { if (reference_is_node(ref)) { // A node const btnode_t* const n = resolve_reference_to_node(ref); verify_node(n); (*num_nodes) ++; // Verify childs recursively. verify_helper(slow, n->child[0], p, num_chunks, num_nodes); verify_helper(slow, n->child[1], p, num_chunks, num_nodes); } else { // A chunk. const Metachunk* const c = resolve_reference_to_chunk(ref); assert(c->is_free() == reference_is_free_chunk(ref), "free info mismatch"); // Chunks in the tree should be ordered by base address of the range // they represent, and there should be no address holes. if (p == NULL) { p = c->base() + c->word_size(); } else { assert(c->base() == p, "Chunk base address mismatch."); } c->verify(slow); (*num_chunks) ++; } } void ChunkTree::verify(bool slow, const MetaWord* base) const { int num_chunks = 0; int num_nodes = 0; verify_helper(slow, _root, base, &num_chunks, &num_nodes); // Number of chunks in the tree should be equal to the number of chunks // taken from the pool; same for nodes. assert(num_chunks == _chunkPool.used(), "chunk count mismatch"); assert(num_nodes == _nodePool.used(), "node count mismatch"); } #endif // ASSERT // Returns the footprint of this tree, in words. size_t ChunkTree::memory_footprint_words() const { return _nodePool.memory_footprint_words() + _chunkPool.memory_footprint_words() + (sizeof(this) / BytesPerWord); } //// ChunkTreeArray /////////////777 // Create an array of ChunkTree objects, all initialized to NULL, covering // a given memory range. Memory range must be aligned to size of root chunks. ChunkTreeArray::ChunkTreeArray(const MetaWord* base, size_t word_size) : _base(base), _word_size(word_size), _arr(NULL), _num(0) { assert(is_aligned(_word_size, chklvl::MAX_CHUNK_WORD_SIZE), "not aligned"); _num = _word_size / chklvl::MAX_CHUNK_WORD_SIZE; _arr = NEW_C_HEAP_ARRAY(ChunkTree*, _num, mtInternal); for (int i = 0; i < _num; i ++) { _arr[i] = NULL; } } ChunkTreeArray::~ChunkTreeArray() { FREE_C_HEAP_ARRAY(ChunkTree*, _arr); } // Iterate over all nodes in all trees. Returns true for complete traversal, // false if traversal was cancelled. bool ChunkTreeArray::iterate_chunks(ChunkClosure* cc) const { for (int i = 0; i < _num; i ++) { if (_arr[i] != NULL) { if (_arr[i]->iterate_chunks(cc) == false) { return false; } } } return true; } #ifdef ASSERT void ChunkTreeArray::verify(bool slow) const { for (int i = 0; i < _num; i ++) { if (_arr[i] != NULL) { _arr[i]->verify(slow, _base); } } } #endif // Returns the footprint of all trees in this array, in words. size_t ChunkTreeArray::memory_footprint_words() const { size_t s = 0; for (int i = 0; i < _num; i ++) { if (_arr[i] != NULL) { s += _arr[i]->memory_footprint_words(); } } return s; } } // namespace metaspace