--- old/src/share/vm/code/codeCache.cpp 2014-10-10 14:56:09.231266594 +0200 +++ new/src/share/vm/code/codeCache.cpp 2014-10-10 14:56:08.247266625 +0200 @@ -44,6 +44,7 @@ #include "runtime/icache.hpp" #include "runtime/java.hpp" #include "runtime/mutexLocker.hpp" +#include "runtime/sweeper.hpp" #include "runtime/compilationPolicy.hpp" #include "services/memoryService.hpp" #include "trace/tracing.hpp" @@ -192,16 +193,16 @@ } // Make sure we have enough space for VM internal code - uint min_code_cache_size = (CodeCacheMinimumUseSpace DEBUG_ONLY(* 3)) + CodeCacheMinimumFreeSpace; + uint min_code_cache_size = CodeCacheMinimumUseSpace DEBUG_ONLY(* 3); if (NonNMethodCodeHeapSize < (min_code_cache_size + code_buffers_size)) { vm_exit_during_initialization("Not enough space in non-nmethod code heap to run VM."); } guarantee(NonProfiledCodeHeapSize + ProfiledCodeHeapSize + NonNMethodCodeHeapSize <= ReservedCodeCacheSize, "Size check"); // Align reserved sizes of CodeHeaps - size_t non_method_size = ReservedCodeSpace::allocation_align_size_up(NonNMethodCodeHeapSize); - size_t profiled_size = ReservedCodeSpace::allocation_align_size_up(ProfiledCodeHeapSize); - size_t non_profiled_size = ReservedCodeSpace::allocation_align_size_up(NonProfiledCodeHeapSize); + size_t non_method_size = ReservedCodeSpace::allocation_align_size_up(NonNMethodCodeHeapSize); + size_t profiled_size = ReservedCodeSpace::allocation_align_size_up(ProfiledCodeHeapSize); + size_t non_profiled_size = ReservedCodeSpace::allocation_align_size_up(NonProfiledCodeHeapSize); // Compute initial sizes of CodeHeaps size_t init_non_method_size = MIN2(InitialCodeCacheSize, non_method_size); @@ -333,14 +334,18 @@ return next_blob(get_code_heap(cb), cb); } -CodeBlob* CodeCache::allocate(int size, int code_blob_type, bool is_critical) { - // Do not seize the CodeCache lock here--if the caller has not - // already done so, we are going to lose bigtime, since the code - // cache will contain a garbage CodeBlob until the caller can - // run the constructor for the CodeBlob subclass he is busy - // instantiating. +/** + * Do not seize the CodeCache lock here--if the caller has not + * already done so, we are going to lose bigtime, since the code + * cache will contain a garbage CodeBlob until the caller can + * run the constructor for the CodeBlob subclass he is busy + * instantiating. + */ +CodeBlob* CodeCache::allocate(int size, int code_blob_type) { + // Possibly wakes up the sweeper thread. + NMethodSweeper::notify(code_blob_type); assert_locked_or_safepoint(CodeCache_lock); - assert(size > 0, "allocation request must be reasonable"); + assert(size > 0, err_msg_res("Code cache allocation request must be > 0 but is %d", size)); if (size <= 0) { return NULL; } @@ -351,14 +356,18 @@ assert(heap != NULL, "heap is null"); while (true) { - cb = (CodeBlob*)heap->allocate(size, is_critical); + cb = (CodeBlob*)heap->allocate(size); if (cb != NULL) break; if (!heap->expand_by(CodeCacheExpansionSize)) { // Expansion failed if (SegmentedCodeCache && (code_blob_type == CodeBlobType::NonNMethod)) { - // Fallback solution: Store non-nmethod code in the non-profiled code heap - return allocate(size, CodeBlobType::MethodNonProfiled, is_critical); + // Fallback solution: Store non-nmethod code in the non-profiled code heap. + // Note that at in the sweeper, we check the reverse_free_ratio of the non-profiled + // code heap and force stack scanning if less than 10% if the code heap are free. + return allocate(size, CodeBlobType::MethodNonProfiled); } + MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); + CompileBroker::handle_full_code_cache(code_blob_type); return NULL; } if (PrintCodeCacheExtension) { @@ -756,19 +765,6 @@ } /** - * Returns true if a CodeHeap is full and sets code_blob_type accordingly. - */ -bool CodeCache::is_full(int* code_blob_type) { - FOR_ALL_HEAPS(heap) { - if ((*heap)->unallocated_capacity() < CodeCacheMinimumFreeSpace) { - *code_blob_type = (*heap)->code_blob_type(); - return true; - } - } - return false; -} - -/** * Returns the reverse free ratio. E.g., if 25% (1/4) of the code heap * is free, reverse_free_ratio() returns 4. */ @@ -777,8 +773,8 @@ if (heap == NULL) { return 0; } - double unallocated_capacity = (double)(heap->unallocated_capacity() - CodeCacheMinimumFreeSpace); - double max_capacity = (double)heap->max_capacity(); + double unallocated_capacity = (double)CodeCache::unallocated_capacity() + 1; // Avoid division by 0 + double max_capacity = CodeCache::max_capacity(); return max_capacity / unallocated_capacity; }