--- old/src/share/vm/code/codeCache.cpp 2016-11-03 14:16:20.000000000 -0700 +++ new/src/share/vm/code/codeCache.cpp 2016-11-03 14:16:20.000000000 -0700 @@ -23,6 +23,7 @@ */ #include "precompiled.hpp" +#include "aot/aotLoader.hpp" #include "code/codeBlob.hpp" #include "code/codeCache.hpp" #include "code/compiledIC.hpp" @@ -128,6 +129,8 @@ // Iterate over all CodeHeaps #define FOR_ALL_HEAPS(heap) for (GrowableArrayIterator heap = _heaps->begin(); heap != _heaps->end(); ++heap) +#define FOR_ALL_NMETHOD_HEAPS(heap) for (GrowableArrayIterator heap = _nmethod_heaps->begin(); heap != _nmethod_heaps->end(); ++heap) + // Iterate over all CodeBlobs (cb) on the given CodeHeap #define FOR_ALL_BLOBS(cb, heap) for (CodeBlob* cb = first_blob(heap); cb != NULL; cb = next_blob(heap, cb)) @@ -139,6 +142,8 @@ // Initialize array of CodeHeaps GrowableArray* CodeCache::_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray (CodeBlobType::All, true); +GrowableArray* CodeCache::_compiled_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray (CodeBlobType::All, true); +GrowableArray* CodeCache::_nmethod_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray (CodeBlobType::All, true); void CodeCache::check_heap_sizes(size_t non_nmethod_size, size_t profiled_size, size_t non_profiled_size, size_t cache_size, bool all_set) { size_t total_size = non_nmethod_size + profiled_size + non_profiled_size; @@ -365,6 +370,28 @@ return NULL; } +int CodeCache::code_heap_compare(CodeHeap* const &lhs, CodeHeap* const &rhs) { + if (lhs->code_blob_type() == rhs->code_blob_type()) { + return (lhs > rhs) ? 1 : ((lhs < rhs) ? -1 : 0); + } else { + return lhs->code_blob_type() - rhs->code_blob_type(); + } +} + +void CodeCache::add_heap(CodeHeap* heap) { + assert(!Universe::is_fully_initialized(), "late heap addition?"); + + _heaps->insert_sorted(heap); + + int type = heap->code_blob_type(); + if (code_blob_type_accepts_compiled(type)) { + _compiled_heaps->insert_sorted(heap); + } + if (code_blob_type_accepts_nmethod(type)) { + _nmethod_heaps->insert_sorted(heap); + } +} + void CodeCache::add_heap(ReservedSpace rs, const char* name, int code_blob_type) { // Check if heap is needed if (!heap_available(code_blob_type)) { @@ -373,7 +400,7 @@ // Create CodeHeap CodeHeap* heap = new CodeHeap(name, code_blob_type); - _heaps->append(heap); + add_heap(heap); // Reserve Space size_t size_initial = MIN2(InitialCodeCacheSize, rs.size()); @@ -389,7 +416,7 @@ CodeHeap* CodeCache::get_code_heap(const CodeBlob* cb) { assert(cb != NULL, "CodeBlob is null"); FOR_ALL_HEAPS(heap) { - if ((*heap)->contains(cb)) { + if ((*heap)->contains(cb->code_begin())) { return *heap; } } @@ -426,10 +453,6 @@ return (CodeBlob*)heap->next(cb); } -CodeBlob* CodeCache::next_blob(CodeBlob* cb) { - return next_blob(get_code_heap(cb), cb); -} - /** * Do not seize the CodeCache lock here--if the caller has not * already done so, we are going to lose bigtime, since the code @@ -494,7 +517,7 @@ } if (PrintCodeCacheExtension) { ResourceMark rm; - if (_heaps->length() >= 1) { + if (_nmethod_heaps->length() >= 1) { tty->print("%s", heap->name()); } else { tty->print("CodeCache"); @@ -559,6 +582,10 @@ return false; } +bool CodeCache::contains(nmethod *nm) { + return contains((void *)nm); +} + // This method is safe to call without holding the CodeCache_lock, as long as a dead CodeBlob is not // looked up (i.e., one that has been marked for deletion). It only depends on the _segmap to contain // valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled. @@ -575,8 +602,8 @@ // NMT can walk the stack before code cache is created if (_heaps != NULL && !_heaps->is_empty()) { FOR_ALL_HEAPS(heap) { - CodeBlob* result = (CodeBlob*) (*heap)->find_start(start); - if (result != NULL && result->blob_contains((address)start)) { + CodeBlob* result = (*heap)->find_blob_unsafe(start); + if (result != NULL) { return result; } } @@ -592,7 +619,7 @@ void CodeCache::blobs_do(void f(CodeBlob* nm)) { assert_locked_or_safepoint(CodeCache_lock); - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { FOR_ALL_BLOBS(cb, *heap) { f(cb); } @@ -613,6 +640,7 @@ while(iter.next_alive()) { iter.method()->metadata_do(f); } + AOTLoader::metadata_do(f); } int CodeCache::alignment_unit() { @@ -634,11 +662,10 @@ void CodeCache::blobs_do(CodeBlobClosure* f) { assert_locked_or_safepoint(CodeCache_lock); - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { FOR_ALL_BLOBS(cb, *heap) { if (cb->is_alive()) { f->do_code_blob(cb); - #ifdef ASSERT if (cb->is_nmethod()) ((nmethod*)cb)->verify_scavenge_root_oops(); @@ -835,13 +862,12 @@ int count = 0; FOR_ALL_HEAPS(heap) { FOR_ALL_BLOBS(cb, *heap) { - if (cb->is_nmethod()) { - nmethod* nm = (nmethod*)cb; + CompiledMethod *nm = cb->as_compiled_method_or_null(); + if (nm != NULL) { count += nm->verify_icholder_relocations(); } } } - assert(count + InlineCacheBuffer::pending_icholder_count() + CompiledICHolder::live_not_claimed_count() == CompiledICHolder::live_count(), "must agree"); #endif @@ -902,7 +928,7 @@ int CodeCache::nmethod_count() { int count = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { count += (*heap)->nmethod_count(); } return count; @@ -933,7 +959,7 @@ size_t CodeCache::capacity() { size_t cap = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { cap += (*heap)->capacity(); } return cap; @@ -946,7 +972,7 @@ size_t CodeCache::unallocated_capacity() { size_t unallocated_cap = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { unallocated_cap += (*heap)->unallocated_capacity(); } return unallocated_cap; @@ -954,7 +980,7 @@ size_t CodeCache::max_capacity() { size_t max_cap = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { max_cap += (*heap)->max_capacity(); } return max_cap; @@ -980,7 +1006,7 @@ size_t CodeCache::bytes_allocated_in_freelists() { size_t allocated_bytes = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { allocated_bytes += (*heap)->allocated_in_freelist(); } return allocated_bytes; @@ -988,7 +1014,7 @@ int CodeCache::allocated_segments() { int number_of_segments = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { number_of_segments += (*heap)->allocated_segments(); } return number_of_segments; @@ -996,7 +1022,7 @@ size_t CodeCache::freelists_length() { size_t length = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { length += (*heap)->freelist_length(); } return length; @@ -1039,6 +1065,8 @@ void codeCache_init() { CodeCache::initialize(); + // Load AOT libraries and add AOT code heaps. + AOTLoader::initialize(); } //------------------------------------------------------------------------------------------------ @@ -1102,6 +1130,15 @@ return (CompiledMethod*)cb; } +bool CodeCache::is_far_target(address target) { +#if INCLUDE_AOT + return NativeCall::is_far_call(_low_bound, target) || + NativeCall::is_far_call(_high_bound, target); +#else + return false; +#endif +} + #ifdef HOTSWAP int CodeCache::mark_for_evol_deoptimization(instanceKlassHandle dependee) { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); @@ -1204,7 +1241,7 @@ void CodeCache::flush_evol_dependents_on(instanceKlassHandle ev_k_h) { // --- Compile_lock is not held. However we are at a safepoint. assert_locked_or_safepoint(Compile_lock); - if (number_of_nmethods_with_dependencies() == 0) return; + if (number_of_nmethods_with_dependencies() == 0 && !UseAOT) return; // CodeCache can only be updated by a thread_in_VM and they will all be // stopped during the safepoint so CodeCache will be safe to update without @@ -1316,7 +1353,7 @@ void CodeCache::print_memory_overhead() { size_t wasted_bytes = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { CodeHeap* curr_heap = *heap; for (CodeBlob* cb = (CodeBlob*)curr_heap->first(); cb != NULL; cb = (CodeBlob*)curr_heap->next(cb)) { HeapBlock* heap_block = ((HeapBlock*)cb) - 1; @@ -1362,8 +1399,8 @@ ResourceMark rm; int i = 0; - FOR_ALL_HEAPS(heap) { - if ((_heaps->length() >= 1) && Verbose) { + FOR_ALL_NMETHOD_HEAPS(heap) { + if ((_nmethod_heaps->length() >= 1) && Verbose) { tty->print_cr("-- %s --", (*heap)->name()); } FOR_ALL_BLOBS(cb, *heap) { @@ -1459,7 +1496,7 @@ CodeBlob_sizes live; CodeBlob_sizes dead; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { FOR_ALL_BLOBS(cb, *heap) { if (!cb->is_alive()) { dead.add(cb); @@ -1485,7 +1522,7 @@ int number_of_blobs = 0; int number_of_oop_maps = 0; int map_size = 0; - FOR_ALL_HEAPS(heap) { + FOR_ALL_NMETHOD_HEAPS(heap) { FOR_ALL_BLOBS(cb, *heap) { if (cb->is_alive()) { number_of_blobs++; @@ -1568,35 +1605,3 @@ unallocated_capacity()); } -// Initialize iterator to given compiled method -void CompiledMethodIterator::initialize(CompiledMethod* cm) { - _code_blob = (CodeBlob*)cm; - if (!SegmentedCodeCache) { - // Iterate over all CodeBlobs - _code_blob_type = CodeBlobType::All; - } else if (cm != NULL) { - _code_blob_type = CodeCache::get_code_blob_type(cm); - } else { - // Only iterate over method code heaps, starting with non-profiled - _code_blob_type = CodeBlobType::MethodNonProfiled; - } -} - -// Advance iterator to the next compiled method in the current code heap -bool CompiledMethodIterator::next_compiled_method() { - // Get first method CodeBlob - if (_code_blob == NULL) { - _code_blob = CodeCache::first_blob(_code_blob_type); - if (_code_blob == NULL) { - return false; - } else if (_code_blob->is_nmethod()) { - return true; - } - } - // Search for next method CodeBlob - _code_blob = CodeCache::next_blob(_code_blob); - while (_code_blob != NULL && !_code_blob->is_compiled()) { - _code_blob = CodeCache::next_blob(_code_blob); - } - return _code_blob != NULL; -}