src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8015774 Cdiff src/share/vm/code/nmethod.cpp

src/share/vm/code/nmethod.cpp

Print this page

        

*** 498,508 **** MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); CodeOffsets offsets; offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); ! nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size, compile_id, &offsets, code_buffer, frame_size, basic_lock_owner_sp_offset, basic_lock_sp_offset, oop_maps); NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_native_nmethod(nm)); --- 498,508 ---- MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); CodeOffsets offsets; offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); ! nm = new (native_nmethod_size, CompLevel_none) nmethod(method(), native_nmethod_size, compile_id, &offsets, code_buffer, frame_size, basic_lock_owner_sp_offset, basic_lock_sp_offset, oop_maps); NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_native_nmethod(nm));
*** 536,546 **** CodeOffsets offsets; offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); ! nm = new (nmethod_size) nmethod(method(), nmethod_size, &offsets, code_buffer, frame_size); NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); if (PrintAssembly && nm != NULL) { Disassembler::decode(nm); --- 536,546 ---- CodeOffsets offsets; offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset); offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); ! nm = new (nmethod_size, CompLevel_none) nmethod(method(), nmethod_size, &offsets, code_buffer, frame_size); NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); if (PrintAssembly && nm != NULL) { Disassembler::decode(nm);
*** 584,594 **** + round_to(dependencies->size_in_bytes() , oopSize) + round_to(handler_table->size_in_bytes(), oopSize) + round_to(nul_chk_table->size_in_bytes(), oopSize) + round_to(debug_info->data_size() , oopSize); ! nm = new (nmethod_size) nmethod(method(), nmethod_size, compile_id, entry_bci, offsets, orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, oop_maps, handler_table, nul_chk_table, --- 584,594 ---- + round_to(dependencies->size_in_bytes() , oopSize) + round_to(handler_table->size_in_bytes(), oopSize) + round_to(nul_chk_table->size_in_bytes(), oopSize) + round_to(debug_info->data_size() , oopSize); ! nm = new (nmethod_size, comp_level) nmethod(method(), nmethod_size, compile_id, entry_bci, offsets, orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, oop_maps, handler_table, nul_chk_table,
*** 801,813 **** } } } #endif // def HAVE_DTRACE_H ! void* nmethod::operator new(size_t size, int nmethod_size) throw() { ! // Not critical, may return null if there is too little continuous memory ! return CodeCache::allocate(nmethod_size); } nmethod::nmethod( Method* method, int nmethod_size, --- 801,815 ---- } } } #endif // def HAVE_DTRACE_H ! void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () { ! // With a SegmentedCodeCache, nmethods are allocated on separate heaps and therefore do not share memory ! // with critical CodeBlobs. We define the allocation as critical to make sure all code heap memory is used. ! bool is_critical = SegmentedCodeCache; ! return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level), is_critical); } nmethod::nmethod( Method* method, int nmethod_size,
*** 1530,1540 **** // completely deallocate this method Events::log(JavaThread::current(), "flushing nmethod " INTPTR_FORMAT, this); if (PrintMethodFlushing) { tty->print_cr("*flushing nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT "/Free CodeCache:" SIZE_FORMAT "Kb", ! _compile_id, this, CodeCache::nof_blobs(), CodeCache::unallocated_capacity()/1024); } // We need to deallocate any ExceptionCache data. // Note that we do not need to grab the nmethod lock for this, it // better be thread safe if we're disposing of it! --- 1532,1542 ---- // completely deallocate this method Events::log(JavaThread::current(), "flushing nmethod " INTPTR_FORMAT, this); if (PrintMethodFlushing) { tty->print_cr("*flushing nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT "/Free CodeCache:" SIZE_FORMAT "Kb", ! _compile_id, this, CodeCache::nof_blobs(), CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(_comp_level))/1024); } // We need to deallocate any ExceptionCache data. // Note that we do not need to grab the nmethod lock for this, it // better be thread safe if we're disposing of it!
*** 1557,1567 **** ((CodeBlob*)(this))->flush(); CodeCache::free(this); } - // // Notify all classes this nmethod is dependent on that it is no // longer dependent. This should only be called in two situations. // First, when a nmethod transitions to a zombie all dependents need // to be clear. Since zombification happens at a safepoint there's no --- 1559,1568 ----
*** 2425,2436 **** DepTable* table = new DepTable(); // Iterate over live nmethods and check dependencies of all nmethods that are not // marked for deoptimization. A particular dependency is only checked once. ! for(nmethod* nm = CodeCache::alive_nmethod(CodeCache::first()); nm != NULL; nm = CodeCache::alive_nmethod(CodeCache::next(nm))) { ! if (!nm->is_marked_for_deoptimization()) { for (Dependencies::DepStream deps(nm); deps.next(); ) { // Construct abstraction of a dependency. DependencySignature* current_sig = new DependencySignature(deps); // Determine if dependency is already checked. table->put(...) returns --- 2426,2440 ---- DepTable* table = new DepTable(); // Iterate over live nmethods and check dependencies of all nmethods that are not // marked for deoptimization. A particular dependency is only checked once. ! NMethodIterator iter; ! while(iter.next()) { ! nmethod* nm = iter.method(); ! // Only notify for live nmethods ! if (nm->is_alive() && !nm->is_marked_for_deoptimization()) { for (Dependencies::DepStream deps(nm); deps.next(); ) { // Construct abstraction of a dependency. DependencySignature* current_sig = new DependencySignature(deps); // Determine if dependency is already checked. table->put(...) returns
src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File