--- old/src/share/vm/code/nmethod.cpp 2014-09-03 12:15:30.259797425 +0200 +++ new/src/share/vm/code/nmethod.cpp 2014-09-03 12:15:29.879797414 +0200 @@ -500,7 +500,7 @@ CodeOffsets offsets; offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); - nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size, + nm = new (native_nmethod_size, CompLevel_none) nmethod(method(), native_nmethod_size, compile_id, &offsets, code_buffer, frame_size, basic_lock_owner_sp_offset, @@ -538,7 +538,7 @@ offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); - nm = new (nmethod_size) nmethod(method(), nmethod_size, + nm = new (nmethod_size, CompLevel_none) nmethod(method(), nmethod_size, &offsets, code_buffer, frame_size); NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); @@ -586,7 +586,7 @@ + round_to(nul_chk_table->size_in_bytes(), oopSize) + round_to(debug_info->data_size() , oopSize); - nm = new (nmethod_size) + nm = new (nmethod_size, comp_level) nmethod(method(), nmethod_size, compile_id, entry_bci, offsets, orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, oop_maps, @@ -803,9 +803,11 @@ } #endif // def HAVE_DTRACE_H -void* nmethod::operator new(size_t size, int nmethod_size) throw() { - // Not critical, may return null if there is too little continuous memory - return CodeCache::allocate(nmethod_size); +void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () { + // With a SegmentedCodeCache, nmethods are allocated on separate heaps and therefore do not share memory + // with critical CodeBlobs. We define the allocation as critical to make sure all code heap memory is used. + bool is_critical = SegmentedCodeCache; + return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level), is_critical); } nmethod::nmethod( @@ -1532,7 +1534,7 @@ Events::log(JavaThread::current(), "flushing nmethod " INTPTR_FORMAT, this); if (PrintMethodFlushing) { tty->print_cr("*flushing nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT "/Free CodeCache:" SIZE_FORMAT "Kb", - _compile_id, this, CodeCache::nof_blobs(), CodeCache::unallocated_capacity()/1024); + _compile_id, this, CodeCache::nof_blobs(), CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(_comp_level))/1024); } // We need to deallocate any ExceptionCache data. @@ -1559,7 +1561,6 @@ CodeCache::free(this); } - // // Notify all classes this nmethod is dependent on that it is no // longer dependent. This should only be called in two situations. @@ -2420,15 +2421,18 @@ // Turn off dependency tracing while actually testing dependencies. NOT_PRODUCT( FlagSetting fs(TraceDependencies, false) ); - typedef ResourceHashtable DepTable; + typedef ResourceHashtable DepTable; - DepTable* table = new DepTable(); + DepTable* table = new DepTable(); // Iterate over live nmethods and check dependencies of all nmethods that are not // marked for deoptimization. A particular dependency is only checked once. - for(nmethod* nm = CodeCache::alive_nmethod(CodeCache::first()); nm != NULL; nm = CodeCache::alive_nmethod(CodeCache::next(nm))) { - if (!nm->is_marked_for_deoptimization()) { + NMethodIterator iter; + while(iter.next()) { + nmethod* nm = iter.method(); + // Only notify for live nmethods + if (nm->is_alive() && !nm->is_marked_for_deoptimization()) { for (Dependencies::DepStream deps(nm); deps.next(); ) { // Construct abstraction of a dependency. DependencySignature* current_sig = new DependencySignature(deps);