diff --git a/src/hotspot/share/memory/metaspace.cpp b/src/hotspot/share/memory/metaspace.cpp index 2c42c013560..74b5d1a0b30 100644 --- a/src/hotspot/share/memory/metaspace.cpp +++ b/src/hotspot/share/memory/metaspace.cpp @@ -867,8 +867,7 @@ void Metaspace::post_initialize() { } size_t Metaspace::max_allocation_word_size() { - const size_t max_overhead_words = metaspace::get_raw_word_size_for_requested_word_size(1); - return metaspace::chunklevel::MAX_CHUNK_WORD_SIZE - max_overhead_words; + return metaspace::chunklevel::MAX_CHUNK_WORD_SIZE; } // This version of Metaspace::allocate does not throw OOM but simply returns NULL, and diff --git a/src/hotspot/share/memory/metaspace/allocationGuard.hpp b/src/hotspot/share/memory/metaspace/allocationGuard.hpp deleted file mode 100644 index 0125c23ed61..00000000000 --- a/src/hotspot/share/memory/metaspace/allocationGuard.hpp +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved. - * Copyright (c) 2020 SAP SE. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - * - */ - -#ifndef SHARE_MEMORY_METASPACE_ALLOCATIONGUARD_HPP -#define SHARE_MEMORY_METASPACE_ALLOCATIONGUARD_HPP - -#include "memory/allocation.hpp" -#include "memory/metaspace/chunklevel.hpp" -#include "utilities/globalDefinitions.hpp" - -// In Debug builds, Metadata in Metaspace can be optionally guarded - enclosed in canaries - -// to detect memory overwriters. -// -// These canaries are periodically checked, e.g. when the Metaspace is purged in a context -// of a GC. - -// The canaries precede any allocated block... -// -// +---------------+ -// | 'METAMETA' | -// +---------------+ -// | block size | -// +---------------+ -// | block... | -// . . -// . . -// . . -// | | -// +---------------+ -// . . -// +---------------+ -// | 'METAMETA' | -// +---------------+ -// | block size | -// +---------------+ -// | block... | - -// ... and since the blocks are allocated via pointer bump and closely follow each other, -// one block's prefix is its predecessor's suffix, so apart from the last block all -// blocks have an overwriter canary on both ends. -// - -// Note: this feature is only available in debug, and is activated using -// -XX:+MetaspaceGuardAllocations. When active, it disables deallocation handling - since -// freeblock handling in the freeblock lists would get too complex - so one may run leaks -// in deallocation-heavy scenarios (e.g. lots of class redefinitions). -// - -namespace metaspace { - -#ifdef ASSERT - -struct Prefix { - static const uintx EyeCatcher = - NOT_LP64(0x77698465) LP64_ONLY(0x7769846577698465ULL); // "META" resp "METAMETA" - - const uintx _mark; - const size_t _word_size; // raw word size including prefix - // MetaWord payload [0]; // varsized (but unfortunately not all our compilers understand that) - - Prefix(size_t word_size) : - _mark(EyeCatcher), - _word_size(word_size) - {} - - MetaWord* payload() const { - return (MetaWord*)(this + 1); - } - - bool is_valid() const { - return _mark == EyeCatcher && _word_size > 0 && _word_size < chunklevel::MAX_CHUNK_WORD_SIZE; - } - -}; - -// The prefix structure must be aligned to MetaWord size. -STATIC_ASSERT((sizeof(Prefix) & WordAlignmentMask) == 0); - -inline size_t prefix_size() { - return sizeof(Prefix); -} - -// Given a pointer to a memory area, establish the prefix at the start of that area and -// return the starting pointer to the payload. -inline MetaWord* establish_prefix(MetaWord* p_raw, size_t raw_word_size) { - const Prefix* pp = new(p_raw)Prefix(raw_word_size); - return pp->payload(); -} - -#endif - -} // namespace metaspace - -#endif // SHARE_MEMORY_METASPACE_ALLOCATIONGUARD_HPP diff --git a/src/hotspot/share/memory/metaspace/metaspaceArena.cpp b/src/hotspot/share/memory/metaspace/metaspaceArena.cpp index ae058fe2fdf..7d9441956d7 100644 --- a/src/hotspot/share/memory/metaspace/metaspaceArena.cpp +++ b/src/hotspot/share/memory/metaspace/metaspaceArena.cpp @@ -26,7 +26,6 @@ #include "precompiled.hpp" #include "logging/log.hpp" #include "logging/logStream.hpp" -#include "memory/metaspace/allocationGuard.hpp" #include "memory/metaspace/chunkManager.hpp" #include "memory/metaspace/counters.hpp" #include "memory/metaspace/freeBlocks.hpp" @@ -123,6 +122,9 @@ MetaspaceArena::MetaspaceArena(ChunkManager* chunk_manager, const ArenaGrowthPol _fbl(NULL), _total_used_words_counter(total_used_words_counter), _name(name) +#ifdef ASSERT + , _first_fence(NULL) +#endif { UL(debug, ": born."); @@ -229,28 +231,56 @@ MetaWord* MetaspaceArena::allocate(size_t requested_word_size) { MetaWord* p = NULL; const size_t raw_word_size = get_raw_word_size_for_requested_word_size(requested_word_size); - // 1) Attempt to allocate from the free blocks list - // (Note: to reduce complexity, deallocation handling is disabled if allocation guards - // are enabled, see Settings::ergo_initialize()) + // Before bothering the arena proper, attempt to re-use a block from the free blocks list if (Settings::handle_deallocations() && _fbl != NULL && !_fbl->is_empty()) { p = _fbl->remove_block(raw_word_size); if (p != NULL) { DEBUG_ONLY(InternalStats::inc_num_allocs_from_deallocated_blocks();) UL2(trace, "taken from fbl (now: %d, " SIZE_FORMAT ").", _fbl->count(), _fbl->total_size()); - // Note: Space which is kept in the freeblock dictionary still counts as used as far - // as statistics go; therefore we skip the epilogue in this function to avoid double - // accounting. + // Note: free blocks in freeblock dictionary still count as "used" as far as statistics go; + // therefore we have no need to adjust any usage counters (see epilogue of allocate_inner()) + // and can just return here. return p; } } + // Primary allocation + p = allocate_inner(requested_word_size); + +#ifdef ASSERT + // Fence allocation + if (p != NULL && Settings::use_allocation_guard()) { + STATIC_ASSERT(is_aligned(sizeof(Fence), BytesPerWord)); + MetaWord* guard = allocate_inner(sizeof(Fence) / BytesPerWord); + if (guard != NULL) { + // Ignore allocation errors for the fence to keep coding simple. If this + // happens (e.g. because right at this time we hit the Metaspace GC threshold) + // we miss adding this one fence. Not a big deal. Note that his would + // be pretty rare. Chances are much higher the primary allocation above + // would have already failed). + Fence* f = new(guard) Fence(_first_fence); + _first_fence = f; + } + } +#endif // ASSERT + + return p; +} + +// Allocate from the arena proper, once dictionary allocations and fencing are sorted out. +MetaWord* MetaspaceArena::allocate_inner(size_t requested_word_size) { + + assert_lock_strong(lock()); + + const size_t raw_word_size = get_raw_word_size_for_requested_word_size(requested_word_size); + MetaWord* p = NULL; bool current_chunk_too_small = false; bool commit_failure = false; if (current_chunk() != NULL) { - // 2) Attempt to satisfy the allocation from the current chunk. + // Attempt to satisfy the allocation from the current chunk. // If the current chunk is too small to hold the requested size, attempt to enlarge it. // If that fails, retire the chunk. @@ -311,13 +341,6 @@ MetaWord* MetaspaceArena::allocate(size_t requested_word_size) { } } -#ifdef ASSERT - // When using allocation guards, establish a prefix. - if (p != NULL && Settings::use_allocation_guard()) { - p = establish_prefix(p, raw_word_size); - } -#endif - if (p == NULL) { InternalStats::inc_num_allocs_failed_limit(); } else { @@ -425,36 +448,15 @@ void MetaspaceArena::verify_locked() const { } } +void MetaspaceArena::Fence::verify() const { + assert(_eye1 == EyeCatcher && _eye2 == EyeCatcher, + "Metaspace corruption: fence block at " PTR_FORMAT " broken.", p2i(this)); +} + void MetaspaceArena::verify_allocation_guards() const { assert(Settings::use_allocation_guard(), "Don't call with guards disabled."); - - // Verify canaries of all allocations. - // (We can walk all allocations since at the start of a chunk an allocation - // must be present, and the allocation header contains its size, so we can - // find the next one). - for (const Metachunk* c = _chunks.first(); c != NULL; c = c->next()) { - const Prefix* first_broken_block = NULL; - int num_broken_blocks = 0; - const MetaWord* p = c->base(); - while (p < c->top()) { - const Prefix* pp = (const Prefix*)p; - if (!pp->is_valid()) { - UL2(error, "Corrupt block at " PTR_FORMAT " (chunk: " METACHUNK_FORMAT ").", - p2i(pp), METACHUNK_FORMAT_ARGS(c)); - if (first_broken_block == NULL) { - first_broken_block = pp; - } - num_broken_blocks ++; - } - p += pp->_word_size; - } - // After examining all blocks in a chunk, assert if any of those blocks - // was found to be corrupted. - if (first_broken_block != NULL) { - assert(false, "Corrupt block: found at least %d corrupt metaspace block(s) - " - "first corrupted block at " PTR_FORMAT ".", - num_broken_blocks, p2i(first_broken_block)); - } + for (const Fence* f = _first_fence; f != NULL; f = f->next()) { + f->verify(); } } diff --git a/src/hotspot/share/memory/metaspace/metaspaceArena.hpp b/src/hotspot/share/memory/metaspace/metaspaceArena.hpp index 1edbc8997b9..0093e7d15fa 100644 --- a/src/hotspot/share/memory/metaspace/metaspaceArena.hpp +++ b/src/hotspot/share/memory/metaspace/metaspaceArena.hpp @@ -107,6 +107,27 @@ class MetaspaceArena : public CHeapObj { // A name for purely debugging/logging purposes. const char* const _name; +#ifdef ASSERT + // Allocation guards: When active, arena allocations are interleaved with + // fence allocations. An overwritten fence indicates a buffer overrun in either + // the preceding or the following user block. All fences are linked together; + // validating the fences just means walking that linked list. + // Note that for the Arena, fence blocks are just another form of user blocks. + class Fence { + static const uintx EyeCatcher = + NOT_LP64(0x77698465) LP64_ONLY(0x7769846577698465ULL); // "META" resp "METAMETA" + // Two eyecatchers to easily spot a corrupted _next pointer + const uintx _eye1; + const Fence* const _next; + const uintx _eye2; + public: + Fence(const Fence* next) : _eye1(EyeCatcher), _next(next), _eye2(EyeCatcher) {} + const Fence* next() const { return _next; } + void verify() const; + }; + const Fence* _first_fence; +#endif // ASSERT + Mutex* lock() const { return _lock; } ChunkManager* chunk_manager() const { return _chunk_manager; } @@ -138,6 +159,9 @@ class MetaspaceArena : public CHeapObj { // from this arena. DEBUG_ONLY(bool is_valid_area(MetaWord* p, size_t word_size) const;) + // Allocate from the arena proper, once dictionary allocations and fencing are sorted out. + MetaWord* allocate_inner(size_t word_size); + public: MetaspaceArena(ChunkManager* chunk_manager, const ArenaGrowthPolicy* growth_policy, diff --git a/src/hotspot/share/memory/metaspace/metaspaceCommon.cpp b/src/hotspot/share/memory/metaspace/metaspaceCommon.cpp index 61d364b7e33..00c9bb640ff 100644 --- a/src/hotspot/share/memory/metaspace/metaspaceCommon.cpp +++ b/src/hotspot/share/memory/metaspace/metaspaceCommon.cpp @@ -24,7 +24,6 @@ */ #include "precompiled.hpp" -#include "memory/metaspace/allocationGuard.hpp" #include "memory/metaspace/freeBlocks.hpp" #include "memory/metaspace/metaspaceCommon.hpp" #include "memory/metaspace/metaspaceSettings.hpp" @@ -182,12 +181,6 @@ size_t get_raw_word_size_for_requested_word_size(size_t word_size) { // Metaspace allocations are aligned to word size. byte_size = align_up(byte_size, AllocationAlignmentByteSize); - // If we guard allocations, we need additional space for a prefix. -#ifdef ASSERT - if (Settings::use_allocation_guard()) { - byte_size += align_up(prefix_size(), AllocationAlignmentByteSize); - } -#endif size_t raw_word_size = byte_size / BytesPerWord; assert(raw_word_size * BytesPerWord == byte_size, "Sanity"); return raw_word_size; diff --git a/src/hotspot/share/memory/metaspace/metaspaceSettings.cpp b/src/hotspot/share/memory/metaspace/metaspaceSettings.cpp index ffc8630e9be..c8b80b78ac0 100644 --- a/src/hotspot/share/memory/metaspace/metaspaceSettings.cpp +++ b/src/hotspot/share/memory/metaspace/metaspaceSettings.cpp @@ -84,11 +84,6 @@ void Settings::ergo_initialize() { // Deallocations can be manually switched off to aid error analysis, since this removes one layer of complexity // from allocation. _handle_deallocations = MetaspaceHandleDeallocations; - - // We also switch it off automatically if we use allocation guards. This is to keep prefix handling in MetaspaceArena simple. - if (_use_allocation_guard) { - _handle_deallocations = false; - } #endif LogStream ls(Log(metaspace)::info()); Settings::print_on(&ls); diff --git a/test/hotspot/gtest/metaspace/test_allocationGuard.cpp b/test/hotspot/gtest/metaspace/test_allocationGuard.cpp index 76eca074002..9b0eb3d69a5 100644 --- a/test/hotspot/gtest/metaspace/test_allocationGuard.cpp +++ b/test/hotspot/gtest/metaspace/test_allocationGuard.cpp @@ -44,13 +44,13 @@ using metaspace::Settings; // Note: We use TEST_VM_ASSERT_MSG. However, an assert is only triggered if allocation // guards are enabled; if guards are disabled for the gtests, this test would fail. // So for that case, we trigger a fake assert. -TEST_VM_ASSERT_MSG(metaspace, test_overwriter, ".*failed: Corrupt block") { +TEST_VM_ASSERT_MSG(metaspace, test_overwriter, ".*Metaspace corruption.*") { if (Settings::use_allocation_guard()) { MetaspaceGtestContext context; MetaspaceTestArena* arena = context.create_arena(Metaspace::StandardMetaspaceType); // We allocate two blocks. We then write over the end of the first block, which - // should corrupt the eyecatcher at the start of the second block. + // should corrupt the fence between the two blocks. // Note: there is of course no guarantee that blocks allocated sequentially are neighbors; // but in this case (clean standard-sized test arena and very small allocations) it can // be safely assumed). @@ -59,10 +59,9 @@ TEST_VM_ASSERT_MSG(metaspace, test_overwriter, ".*failed: Corrupt block") { p1[8] = (MetaWord)0x9345; // Overwriter // Now we delete the arena (as happens during class unloading); this will check all // block canaries and should trigger an assert (see MetaspaceArena::verify_allocation_guards()). - tty->print_cr("Death test, please ignore the following \"Corrupt block\" printout."); delete arena; } else { - assert(false, "Corrupt block fake message to satisfy tests"); + assert(false, "Metaspace corruption - please ignore this, fake message to satisfy tests"); } } diff --git a/test/hotspot/gtest/metaspace/test_metaspace_misc.cpp b/test/hotspot/gtest/metaspace/test_metaspace_misc.cpp index e749c84c88e..dba19bd60ab 100644 --- a/test/hotspot/gtest/metaspace/test_metaspace_misc.cpp +++ b/test/hotspot/gtest/metaspace/test_metaspace_misc.cpp @@ -55,11 +55,12 @@ TEST_VM(metaspace, misc_sizes) { TEST_VM(metaspace, misc_max_alloc_size) { - // Make sure we can allocate what we promise to allocate + // Make sure we can allocate what we promise to allocate... const size_t sz = Metaspace::max_allocation_word_size(); ClassLoaderData* cld = ClassLoaderData::the_null_class_loader_data(); MetaWord* p = cld->metaspace_non_null()->allocate(sz, Metaspace::NonClassType); ASSERT_NOT_NULL(p); + // And also, successfully deallocate it. cld->metaspace_non_null()->deallocate(p, sz, false); } diff --git a/test/hotspot/gtest/metaspace/test_metaspacearena_stress.cpp b/test/hotspot/gtest/metaspace/test_metaspacearena_stress.cpp index 17839b44080..373e9e8d874 100644 --- a/test/hotspot/gtest/metaspace/test_metaspacearena_stress.cpp +++ b/test/hotspot/gtest/metaspace/test_metaspacearena_stress.cpp @@ -28,6 +28,7 @@ #include "memory/metaspace/counters.hpp" #include "memory/metaspace/metaspaceArena.hpp" #include "memory/metaspace/metaspaceArenaGrowthPolicy.hpp" +#include "memory/metaspace/metaspaceSettings.hpp" #include "memory/metaspace/metaspaceStatistics.hpp" #include "runtime/mutexLocker.hpp" #include "utilities/debug.hpp" @@ -111,13 +112,15 @@ class MetaspaceArenaTestBed : public CHeapObj { // - alignment/padding of allocations // - inside used counter contains blocks in free list // - free block list splinter threshold + // - if +MetaspaceGuardAllocations, guard costs // Since what we deallocated may have been given back to us in a following allocation, // we only know fore sure we allocated what we did not give back. const size_t at_least_allocated = _alloc_count.total_size() - _dealloc_count.total_size(); // At most we allocated this: - const size_t max_word_overhead_per_alloc = 4; + const size_t max_word_overhead_per_alloc = + 4 + (metaspace::Settings::use_allocation_guard() ? 4 : 0); const size_t at_most_allocated = _alloc_count.total_size() + max_word_overhead_per_alloc * _alloc_count.count(); ASSERT_LE(at_least_allocated, in_use_stats._used_words - stats._free_blocks_word_size);