# HG changeset patch # User simonis # Date 1504277135 -7200 # Fri Sep 01 16:45:35 2017 +0200 # Node ID f10d44020023a25f78044de56c16033d330b4847 # Parent de57f3540d9aea407f123abb1b44a9054c77b027 8166317: InterpreterCodeSize should be computed Reviewed-by: kvn diff --git a/src/share/vm/code/codeBlob.hpp b/src/share/vm/code/codeBlob.hpp --- a/src/share/vm/code/codeBlob.hpp +++ b/src/share/vm/code/codeBlob.hpp @@ -72,6 +72,7 @@ friend class VMStructs; friend class JVMCIVMStructs; friend class CodeCacheDumper; + friend class CodeCache; protected: diff --git a/src/share/vm/code/codeCache.cpp b/src/share/vm/code/codeCache.cpp --- a/src/share/vm/code/codeCache.cpp +++ b/src/share/vm/code/codeCache.cpp @@ -569,6 +569,20 @@ assert(heap->blob_count() >= 0, "sanity check"); } +void CodeCache::free_unused_tail(CodeBlob* cb, size_t used) { + assert_locked_or_safepoint(CodeCache_lock); + guarantee(cb->is_buffer_blob() && strncmp("Interpreter", cb->name(), 11) == 0, "Only possible for interpreter!"); + print_trace("free_unused_tail", cb); + + // Get heap for given CodeBlob and deallocate its unused tail + get_code_heap(cb)->deallocate_tail(cb, used); + // Adjust the sizes of the CodeBlob + cb->_size = used; + cb->_data_offset = used; + cb->_code_end = (address)cb + used; + cb->_data_end = (address)cb + used; +} + void CodeCache::commit(CodeBlob* cb) { // this is called by nmethod::nmethod, which must already own CodeCache_lock assert_locked_or_safepoint(CodeCache_lock); diff --git a/src/share/vm/code/codeCache.hpp b/src/share/vm/code/codeCache.hpp --- a/src/share/vm/code/codeCache.hpp +++ b/src/share/vm/code/codeCache.hpp @@ -143,6 +143,7 @@ static int alignment_unit(); // guaranteed alignment of all CodeBlobs static int alignment_offset(); // guaranteed offset of first CodeBlob byte within alignment unit (i.e., allocation header) static void free(CodeBlob* cb); // frees a CodeBlob + static void free_unused_tail(CodeBlob* cb, size_t used); // frees the unused tail of a CodeBlob (only used by TemplateInterpreter::initialize()) static bool contains(void *p); // returns whether p is included static bool contains(nmethod* nm); // returns whether nm is included static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs diff --git a/src/share/vm/code/stubs.cpp b/src/share/vm/code/stubs.cpp --- a/src/share/vm/code/stubs.cpp +++ b/src/share/vm/code/stubs.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "code/codeBlob.hpp" +#include "code/codeCache.hpp" #include "code/stubs.hpp" #include "memory/allocation.inline.hpp" #include "oops/oop.inline.hpp" @@ -89,6 +90,16 @@ Unimplemented(); } +void StubQueue::deallocate_unused_tail() { + CodeBlob* blob = CodeCache::find_blob((void*)_stub_buffer); + // We also have to account for the extra space (i.e. header) used by the CodeBlob + // which provides our memory (see BufferBlob::create() in codeBlob.cpp). + size_t header_size = CodeBlob::align_code_offset(blob->header_size()); + CodeCache::free_unused_tail(blob, header_size + used_space()); + // Update the limits to the new, trimmed CodeBlob size + _buffer_size = blob->content_size(); + _buffer_limit = blob->content_size(); +} Stub* StubQueue::stub_containing(address pc) const { if (contains(pc)) { diff --git a/src/share/vm/code/stubs.hpp b/src/share/vm/code/stubs.hpp --- a/src/share/vm/code/stubs.hpp +++ b/src/share/vm/code/stubs.hpp @@ -201,6 +201,8 @@ void remove_first(int n); // remove the first n stubs in the queue void remove_all(); // remove all stubs in the queue + void deallocate_unused_tail(); // deallocate the unused tail of the underlying CodeBlob + // only used from TemplateInterpreter::initialize() // Iteration static void queues_do(void f(StubQueue* s)); // call f with each StubQueue void stubs_do(void f(Stub* s)); // call f with all stubs diff --git a/src/share/vm/interpreter/templateInterpreter.cpp b/src/share/vm/interpreter/templateInterpreter.cpp --- a/src/share/vm/interpreter/templateInterpreter.cpp +++ b/src/share/vm/interpreter/templateInterpreter.cpp @@ -54,6 +54,8 @@ _code = new StubQueue(new InterpreterCodeletInterface, code_size, NULL, "Interpreter"); TemplateInterpreterGenerator g(_code); + // Free the unused memory not occupied by the interpreter and the stubs + _code->deallocate_unused_tail(); } if (PrintInterpreter) { diff --git a/src/share/vm/memory/heap.cpp b/src/share/vm/memory/heap.cpp --- a/src/share/vm/memory/heap.cpp +++ b/src/share/vm/memory/heap.cpp @@ -222,6 +222,20 @@ } } +void CodeHeap::deallocate_tail(void* p, size_t used_size) { + assert(p == find_start(p), "illegal deallocation"); + // Find start of HeapBlock + HeapBlock* b = (((HeapBlock *)p) - 1); + assert(b->allocated_space() == p, "sanity check"); + size_t used_number_of_segments = size_to_segments(used_size + header_size()); + size_t actual_number_of_segments = b->length(); + guarantee(used_number_of_segments <= actual_number_of_segments, "Must be!"); + guarantee(b == block_at(_next_segment - actual_number_of_segments), "Intermediate allocation!"); + size_t number_of_segments_to_deallocate = actual_number_of_segments - used_number_of_segments; + _next_segment -= number_of_segments_to_deallocate; + mark_segmap_as_free(_next_segment, _next_segment + number_of_segments_to_deallocate); + b->initialize(used_number_of_segments); +} void CodeHeap::deallocate(void* p) { assert(p == find_start(p), "illegal deallocation"); diff --git a/src/share/vm/memory/heap.hpp b/src/share/vm/memory/heap.hpp --- a/src/share/vm/memory/heap.hpp +++ b/src/share/vm/memory/heap.hpp @@ -147,6 +147,12 @@ // Memory allocation void* allocate (size_t size); // Allocate 'size' bytes in the code cache or return NULL void deallocate(void* p); // Deallocate memory + // Free the tail of segments allocated by the last call to 'allocate()' which exceed 'used_size'. + // ATTENTION: this is only safe to use if there was no other call to 'allocate()' after + // 'p' was allocated. Only intended for freeing memory which would be otherwise + // wasted after the interpreter generation because we don't know the interpreter size + // beforehand and we also can't easily relocate the interpreter to a new location. + void deallocate_tail(void* p, size_t used_size); // Attributes char* low_boundary() const { return _memory.low_boundary(); }