--- old/src/cpu/aarch64/vm/sharedRuntime_aarch64.cpp 2016-02-18 15:25:00.841784912 -0800 +++ new/src/cpu/aarch64/vm/sharedRuntime_aarch64.cpp 2016-02-18 15:25:00.720776439 -0800 @@ -197,6 +197,16 @@ bool SharedRuntime::is_wide_vector(int size) { return size > 8; } + +size_t SharedRuntime::trampoline_size() { + return 16; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + __ mov(rscratch1, destination); + __ br(rscratch1); +} + // The java_calling_convention describes stack locations as ideal slots on // a frame with no abi restrictions. Since we must observe abi restrictions // (like the placement of the register window) the slots must be biased by --- old/src/cpu/ppc/vm/sharedRuntime_ppc.cpp 2016-02-18 15:25:01.386823077 -0800 +++ new/src/cpu/ppc/vm/sharedRuntime_ppc.cpp 2016-02-18 15:25:01.249813483 -0800 @@ -482,6 +482,18 @@ assert(size <= 8, "%d bytes vectors are not supported", size); return size > 8; } + +size_t SharedRuntime::trampoline_size() { + return Assembler::load_const_size + 8; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + Register Rtemp = R12; + __ load_const(Rtemp, destination); + __ mtctr(Rtemp); + __ bctr(); +} + #ifdef COMPILER2 static int reg2slot(VMReg r) { return r->reg2stack() + SharedRuntime::out_preserve_stack_slots(); --- old/src/cpu/sparc/vm/sharedRuntime_sparc.cpp 2016-02-18 15:25:02.053869785 -0800 +++ new/src/cpu/sparc/vm/sharedRuntime_sparc.cpp 2016-02-18 15:25:01.885858020 -0800 @@ -323,6 +323,16 @@ return size > 8; } +size_t SharedRuntime::trampoline_size() { + return 40; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + __ set((intptr_t)destination, G3_scratch); + __ JMP(G3_scratch, 0); + __ delayed()->nop(); +} + // The java_calling_convention describes stack locations as ideal slots on // a frame with no abi restrictions. Since we must observe abi restrictions // (like the placement of the register window) the slots must be biased by --- old/src/cpu/x86/vm/sharedRuntime_x86_32.cpp 2016-02-18 15:25:02.621909560 -0800 +++ new/src/cpu/x86/vm/sharedRuntime_x86_32.cpp 2016-02-18 15:25:02.492900527 -0800 @@ -339,6 +339,14 @@ return size > 16; } +size_t SharedRuntime::trampoline_size() { + return 16; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + __ jump(RuntimeAddress(destination)); +} + // The java_calling_convention describes stack locations as ideal slots on // a frame with no abi restrictions. Since we must observe abi restrictions // (like the placement of the register window) the slots must be biased by --- old/src/cpu/x86/vm/sharedRuntime_x86_64.cpp 2016-02-18 15:25:03.229952136 -0800 +++ new/src/cpu/x86/vm/sharedRuntime_x86_64.cpp 2016-02-18 15:25:03.093942613 -0800 @@ -390,6 +390,14 @@ return size > 16; } +size_t SharedRuntime::trampoline_size() { + return 16; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + __ jump(RuntimeAddress(destination)); +} + // The java_calling_convention describes stack locations as ideal slots on // a frame with no abi restrictions. Since we must observe abi restrictions // (like the placement of the register window) the slots must be biased by --- old/src/cpu/zero/vm/sharedRuntime_zero.cpp 2016-02-18 15:25:03.798991981 -0800 +++ new/src/cpu/zero/vm/sharedRuntime_zero.cpp 2016-02-18 15:25:03.659982249 -0800 @@ -132,6 +132,15 @@ return generate_empty_runtime_stub("resolve_blob"); } +size_t SharedRuntime::trampoline_size() { + ShouldNotCallThis(); + return 0; +} + +void SharedRuntime::generate_trampoline(MacroAssembler *masm, address destination) { + ShouldNotCallThis(); + return; +} int SharedRuntime::c_calling_convention(const BasicType *sig_bt, VMRegPair *regs, --- old/src/share/vm/interpreter/abstractInterpreter.cpp 2016-02-18 15:25:04.320028467 -0800 +++ new/src/share/vm/interpreter/abstractInterpreter.cpp 2016-02-18 15:25:04.173018173 -0800 @@ -25,6 +25,7 @@ #include "precompiled.hpp" #include "asm/macroAssembler.hpp" #include "asm/macroAssembler.inline.hpp" +#include "compiler/disassembler.hpp" #include "interpreter/bytecodeHistogram.hpp" #include "interpreter/bytecodeInterpreter.hpp" #include "interpreter/interpreter.hpp" @@ -32,6 +33,9 @@ #include "interpreter/interp_masm.hpp" #include "interpreter/templateTable.hpp" #include "memory/allocation.inline.hpp" +#if INCLUDE_CDS +#include "memory/metaspaceShared.hpp" +#endif #include "memory/resourceArea.hpp" #include "oops/arrayOop.hpp" #include "oops/methodData.hpp" @@ -93,6 +97,7 @@ address AbstractInterpreter::_native_entry_end = NULL; address AbstractInterpreter::_slow_signature_handler; address AbstractInterpreter::_entry_table [AbstractInterpreter::number_of_method_entries]; +address AbstractInterpreter::_cds_entry_table [AbstractInterpreter::number_of_method_entries]; address AbstractInterpreter::_native_abi_to_tosca [AbstractInterpreter::number_of_result_handlers]; //------------------------------------------------------------------------------------------------------------------------ @@ -204,14 +209,41 @@ return zerolocals; } +#if INCLUDE_CDS + +address AbstractInterpreter::get_trampoline_code_buffer(AbstractInterpreter::MethodKind kind) { + const size_t trampoline_size = SharedRuntime::trampoline_size(); + address addr = MetaspaceShared::cds_i2i_entry_code_buffers((size_t)(AbstractInterpreter::number_of_method_entries) * trampoline_size); + addr += (size_t)(kind) * trampoline_size; + + return addr; +} + +void AbstractInterpreter::update_cds_entry_table(AbstractInterpreter::MethodKind kind) { + if (DumpSharedSpaces || UseSharedSpaces) { + address trampoline = get_trampoline_code_buffer(kind); + _cds_entry_table[kind] = trampoline; + + CodeBuffer buffer(trampoline, (int)(SharedRuntime::trampoline_size())); + MacroAssembler _masm(&buffer); + SharedRuntime::generate_trampoline(&_masm, _entry_table[kind]); + + if (PrintInterpreter) { + Disassembler::decode(buffer.insts_begin(), buffer.insts_end()); + } + } +} + +#endif void AbstractInterpreter::set_entry_for_kind(AbstractInterpreter::MethodKind kind, address entry) { assert(kind >= method_handle_invoke_FIRST && kind <= method_handle_invoke_LAST, "late initialization only for MH entry points"); assert(_entry_table[kind] == _entry_table[abstract], "previous value must be AME entry"); _entry_table[kind] = entry; -} + update_cds_entry_table(kind); +} // Return true if the interpreter can prove that the given bytecode has // not yet been executed (in Java semantics, not in actual operation). @@ -416,5 +448,6 @@ for (int i = method_handle_invoke_FIRST; i <= method_handle_invoke_LAST; i++) { MethodKind kind = (MethodKind) i; _entry_table[kind] = _entry_table[Interpreter::abstract]; + Interpreter::update_cds_entry_table(kind); } } --- old/src/share/vm/interpreter/abstractInterpreter.hpp 2016-02-18 15:25:04.828064039 -0800 +++ new/src/share/vm/interpreter/abstractInterpreter.hpp 2016-02-18 15:25:04.694054656 -0800 @@ -113,6 +113,7 @@ // method entry points static address _entry_table[number_of_method_entries]; // entry points for a given method + static address _cds_entry_table[number_of_method_entries]; // entry points for methods in the CDS archive static address _native_abi_to_tosca[number_of_result_handlers]; // for native method result handlers static address _slow_signature_handler; // the native method generic (slow) signature handler @@ -132,6 +133,17 @@ static address entry_for_kind(MethodKind k) { assert(0 <= k && k < number_of_method_entries, "illegal kind"); return _entry_table[k]; } static address entry_for_method(methodHandle m) { return entry_for_kind(method_kind(m)); } + static address entry_for_cds_method(methodHandle m) { + MethodKind k = method_kind(m); + assert(0 <= k && k < number_of_method_entries, "illegal kind"); + return _cds_entry_table[k]; + } + + // used by class data sharing + static void update_cds_entry_table(MethodKind kind) NOT_CDS_RETURN; + + static address get_trampoline_code_buffer(AbstractInterpreter::MethodKind kind) NOT_CDS_RETURN_(0); + // used for bootstrapping method handles: static void set_entry_for_kind(MethodKind k, address e); --- old/src/share/vm/interpreter/templateInterpreterGenerator.cpp 2016-02-18 15:25:05.389103324 -0800 +++ new/src/share/vm/interpreter/templateInterpreterGenerator.cpp 2016-02-18 15:25:05.244093170 -0800 @@ -212,6 +212,7 @@ #define method_entry(kind) \ { CodeletMark cm(_masm, "method entry point (kind = " #kind ")"); \ Interpreter::_entry_table[Interpreter::kind] = generate_method_entry(Interpreter::kind); \ + Interpreter::update_cds_entry_table(Interpreter::kind); \ } // all non-native method kinds --- old/src/share/vm/memory/filemap.hpp 2016-02-18 15:25:05.969143940 -0800 +++ new/src/share/vm/memory/filemap.hpp 2016-02-18 15:25:05.778130565 -0800 @@ -101,6 +101,8 @@ int _narrow_klass_shift; // save narrow klass base and shift address _narrow_klass_base; char* _misc_data_patching_start; + address _cds_i2i_entry_code_buffers; + size_t _cds_i2i_entry_code_buffers_size; struct space_info { int _crc; // crc checksum of the current space @@ -189,6 +191,19 @@ char* misc_data_patching_start() { return _header->_misc_data_patching_start; } void set_misc_data_patching_start(char* p) { _header->_misc_data_patching_start = p; } + address cds_i2i_entry_code_buffers() { + return _header->_cds_i2i_entry_code_buffers; + } + void set_cds_i2i_entry_code_buffers(address addr) { + _header->_cds_i2i_entry_code_buffers = addr; + } + size_t cds_i2i_entry_code_buffers_size() { + return _header->_cds_i2i_entry_code_buffers_size; + } + void set_cds_i2i_entry_code_buffers_size(size_t s) { + _header->_cds_i2i_entry_code_buffers_size = s; + } + static FileMapInfo* current_info() { CDS_ONLY(return _current_info;) NOT_CDS(return NULL;) --- old/src/share/vm/memory/metaspaceShared.cpp 2016-02-18 15:25:06.466178744 -0800 +++ new/src/share/vm/memory/metaspaceShared.cpp 2016-02-18 15:25:06.322168661 -0800 @@ -57,6 +57,8 @@ bool MetaspaceShared::_check_classes_made_progress; bool MetaspaceShared::_has_error_classes; bool MetaspaceShared::_archive_loading_failed = false; +address MetaspaceShared::_cds_i2i_entry_code_buffers = NULL; +size_t MetaspaceShared::_cds_i2i_entry_code_buffers_size = 0; SharedMiscRegion MetaspaceShared::_mc; SharedMiscRegion MetaspaceShared::_md; @@ -127,6 +129,21 @@ soc->do_tag(666); } +address MetaspaceShared::cds_i2i_entry_code_buffers(size_t total_size) { + if (DumpSharedSpaces) { + if (_cds_i2i_entry_code_buffers == NULL) { + _cds_i2i_entry_code_buffers = (address)misc_code_space_alloc(total_size); + _cds_i2i_entry_code_buffers_size = total_size; + } + } else if (UseSharedSpaces) { + assert(_cds_i2i_entry_code_buffers != NULL, "must already been initialized"); + } else { + return NULL; + } + + assert(_cds_i2i_entry_code_buffers_size == total_size, "must not change"); + return _cds_i2i_entry_code_buffers; +} // CDS code for dumping shared archive. @@ -673,6 +690,8 @@ FileMapInfo* mapinfo = new FileMapInfo(); mapinfo->populate_header(MetaspaceShared::max_alignment()); mapinfo->set_misc_data_patching_start((char*)vtbl_list); + mapinfo->set_cds_i2i_entry_code_buffers(MetaspaceShared::cds_i2i_entry_code_buffers()); + mapinfo->set_cds_i2i_entry_code_buffers_size(MetaspaceShared::cds_i2i_entry_code_buffers_size()); for (int pass=1; pass<=2; pass++) { if (pass == 1) { @@ -695,7 +714,7 @@ mapinfo->write_region(MetaspaceShared::mc, _mc_vs.low(), pointer_delta(mc_top, _mc_vs.low(), sizeof(char)), SharedMiscCodeSize, - true, true); + false, true); mapinfo->write_string_regions(_string_regions); } @@ -1047,6 +1066,8 @@ void MetaspaceShared::initialize_shared_spaces() { FileMapInfo *mapinfo = FileMapInfo::current_info(); + _cds_i2i_entry_code_buffers = mapinfo->cds_i2i_entry_code_buffers(); + _cds_i2i_entry_code_buffers_size = mapinfo->cds_i2i_entry_code_buffers_size(); char* buffer = mapinfo->misc_data_patching_start(); // Skip over (reserve space for) a list of addresses of C++ vtables --- old/src/share/vm/memory/metaspaceShared.hpp 2016-02-18 15:25:07.083221950 -0800 +++ new/src/share/vm/memory/metaspaceShared.hpp 2016-02-18 15:25:06.934211516 -0800 @@ -128,6 +128,8 @@ static bool _check_classes_made_progress; static bool _has_error_classes; static bool _archive_loading_failed; + static address _cds_i2i_entry_code_buffers; + static size_t _cds_i2i_entry_code_buffers_size; // Used only during dumping. static SharedMiscRegion _md; @@ -217,6 +219,15 @@ static char* misc_code_space_alloc(size_t num_bytes) { return _mc.alloc(num_bytes); } static char* misc_data_space_alloc(size_t num_bytes) { return _md.alloc(num_bytes); } + static address cds_i2i_entry_code_buffers(size_t total_size); + + static address cds_i2i_entry_code_buffers() { + return _cds_i2i_entry_code_buffers; + } + static size_t cds_i2i_entry_code_buffers_size() { + return _cds_i2i_entry_code_buffers_size; + } + static SharedMiscRegion* misc_code_region() { assert(DumpSharedSpaces, "used during dumping only"); return &_mc; --- old/src/share/vm/oops/constMethod.hpp 2016-02-18 15:25:07.589257384 -0800 +++ new/src/share/vm/oops/constMethod.hpp 2016-02-18 15:25:07.474249331 -0800 @@ -121,6 +121,7 @@ }; class KlassSizeStats; +class AdapterHandlerEntry; // Class to collect the sizes of ConstMethod inline tables #define INLINE_TABLES_DO(do_element) \ @@ -201,6 +202,12 @@ // Raw stackmap data for the method Array* _stackmap_data; + // Adapter blob (i2c/c2i) for this Method*. Set once when method is linked. + union { + AdapterHandlerEntry* _adapter; + AdapterHandlerEntry** _adapter_trampoline; + }; + int _constMethod_size; u2 _flags; @@ -276,6 +283,29 @@ void copy_stackmap_data(ClassLoaderData* loader_data, u1* sd, int length, TRAPS); bool has_stackmap_table() const { return _stackmap_data != NULL; } + // adapter + void set_adapter_entry(AdapterHandlerEntry* adapter) { + assert(!is_shared(), "shared methods have fixed adapter_trampoline"); + _adapter = adapter; + } + void set_adapter_trampoline(AdapterHandlerEntry** trampoline) { + assert(DumpSharedSpaces, "must be"); + assert(*trampoline == NULL, "must be NULL during dump time, to be initialized at run time"); + _adapter_trampoline = trampoline; + } + void update_adapter_trampoline(AdapterHandlerEntry* adapter) { + assert(is_shared(), "must be"); + *_adapter_trampoline = adapter; + assert(this->adapter() == adapter, "must be"); + } + AdapterHandlerEntry* adapter() { + if (is_shared()) { + return *_adapter_trampoline; + } else { + return _adapter; + } + } + void init_fingerprint() { const uint64_t initval = UCONST64(0x8000000000000000); _fingerprint = initval; --- old/src/share/vm/oops/method.cpp 2016-02-18 15:25:08.097292960 -0800 +++ new/src/share/vm/oops/method.cpp 2016-02-18 15:25:07.984285045 -0800 @@ -37,6 +37,7 @@ #include "interpreter/oopMapCache.hpp" #include "memory/heapInspection.hpp" #include "memory/metadataFactory.hpp" +#include "memory/metaspaceShared.hpp" #include "memory/oopFactory.hpp" #include "oops/constMethod.hpp" #include "oops/method.hpp" @@ -121,18 +122,18 @@ } address Method::get_i2c_entry() { - assert(_adapter != NULL, "must have"); - return _adapter->get_i2c_entry(); + assert(adapter() != NULL, "must have"); + return adapter()->get_i2c_entry(); } address Method::get_c2i_entry() { - assert(_adapter != NULL, "must have"); - return _adapter->get_c2i_entry(); + assert(adapter() != NULL, "must have"); + return adapter()->get_c2i_entry(); } address Method::get_c2i_unverified_entry() { - assert(_adapter != NULL, "must have"); - return _adapter->get_c2i_unverified_entry(); + assert(adapter() != NULL, "must have"); + return adapter()->get_c2i_unverified_entry(); } char* Method::name_and_sig_as_C_string() const { @@ -890,10 +891,10 @@ // this may be NULL if c2i adapters have not been made yet // Only should happen at allocate time. - if (_adapter == NULL) { + if (adapter() == NULL) { _from_compiled_entry = NULL; } else { - _from_compiled_entry = _adapter->get_c2i_entry(); + _from_compiled_entry = adapter()->get_c2i_entry(); } OrderAccess::storestore(); _from_interpreted_entry = _i2i_entry; @@ -904,15 +905,28 @@ // Called by class data sharing to remove any entry points (which are not shared) void Method::unlink_method() { _code = NULL; - _i2i_entry = NULL; - _from_interpreted_entry = NULL; + if (!DumpSharedSpaces) { + _i2i_entry = NULL; + _from_interpreted_entry = NULL; + } else { + _i2i_entry = Interpreter::entry_for_cds_method(this); + _from_interpreted_entry = _i2i_entry; + } + if (is_native()) { *native_function_addr() = NULL; set_signature_handler(NULL); } NOT_PRODUCT(set_compiled_invocation_count(0);) - _adapter = NULL; - _from_compiled_entry = NULL; + if (!DumpSharedSpaces) { + set_adapter_entry(NULL); + _from_compiled_entry = NULL; + } else { + CDSAdapterHandlerEntry* cds_adapter = (CDSAdapterHandlerEntry*)adapter(); + constMethod()->set_adapter_trampoline(cds_adapter->get_adapter_trampoline()); + _from_compiled_entry = cds_adapter->get_c2i_entry_trampoline(); + assert(*((int*)_from_compiled_entry) == 0, "must be NULL during dump time, to be initialized at run time"); + } // In case of DumpSharedSpaces, _method_data should always be NULL. // @@ -931,14 +945,24 @@ void Method::link_method(const methodHandle& h_method, TRAPS) { // If the code cache is full, we may reenter this function for the // leftover methods that weren't linked. - if (_i2i_entry != NULL) return; + if (is_shared()) { + if (adapter() != NULL) return; + } else { + if (_i2i_entry != NULL) return; - assert(_adapter == NULL, "init'd to NULL" ); + assert(adapter() == NULL, "init'd to NULL" ); + } assert( _code == NULL, "nothing compiled yet" ); // Setup interpreter entrypoint assert(this == h_method(), "wrong h_method()" ); - address entry = Interpreter::entry_for_method(h_method); + address entry; + + if (this->is_shared()) { + entry = Interpreter::entry_for_cds_method(h_method); + } else { + entry = Interpreter::entry_for_method(h_method); + } assert(entry != NULL, "interpreter entry must be non-null"); // Sets both _i2i_entry and _from_interpreted_entry set_interpreter_entry(entry); @@ -973,8 +997,13 @@ THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "Out of space in CodeCache for adapters"); } - mh->set_adapter_entry(adapter); - mh->_from_compiled_entry = adapter->get_c2i_entry(); + if (mh->is_shared()) { + assert(mh->adapter() == adapter, "must be"); + assert(mh->_from_compiled_entry != NULL, "must be"); // FIXME, the instructions also not NULL + } else { + mh->set_adapter_entry(adapter); + mh->_from_compiled_entry = adapter->get_c2i_entry(); + } return adapter->get_c2i_entry(); } @@ -990,6 +1019,14 @@ } } +volatile address Method::from_compiled_entry_no_trampoline() const { + nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code); + if (code) { + return code->verified_entry_point(); + } else { + return adapter()->get_c2i_entry(); + } +} // The verified_code_entry() must be called when a invoke is resolved // on this method. --- old/src/share/vm/oops/method.hpp 2016-02-18 15:25:08.636330703 -0800 +++ new/src/share/vm/oops/method.hpp 2016-02-18 15:25:08.522322720 -0800 @@ -93,8 +93,6 @@ #endif // Entry point for calling both from and to the interpreter. address _i2i_entry; // All-args-on-stack calling convention - // Adapter blob (i2c/c2i) for this Method*. Set once when method is linked. - AdapterHandlerEntry* _adapter; // Entry point for calling from compiled code, to compiled code if it exists // or else the interpreter. volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry() @@ -137,6 +135,7 @@ static address make_adapters(methodHandle mh, TRAPS); volatile address from_compiled_entry() const { return (address)OrderAccess::load_ptr_acquire(&_from_compiled_entry); } + volatile address from_compiled_entry_no_trampoline() const; volatile address from_interpreted_entry() const{ return (address)OrderAccess::load_ptr_acquire(&_from_interpreted_entry); } // access flag @@ -431,11 +430,19 @@ nmethod* volatile code() const { assert( check_code(), "" ); return (nmethod *)OrderAccess::load_ptr_acquire(&_code); } void clear_code(); // Clear out any compiled code static void set_code(methodHandle mh, nmethod* code); - void set_adapter_entry(AdapterHandlerEntry* adapter) { _adapter = adapter; } + void set_adapter_entry(AdapterHandlerEntry* adapter) { + constMethod()->set_adapter_entry(adapter); + } + void update_adapter_trampoline(AdapterHandlerEntry* adapter) { + constMethod()->update_adapter_trampoline(adapter); + } + address get_i2c_entry(); address get_c2i_entry(); address get_c2i_unverified_entry(); - AdapterHandlerEntry* adapter() { return _adapter; } + AdapterHandlerEntry* adapter() const { + return constMethod()->adapter(); + } // setup entry points void link_method(const methodHandle& method, TRAPS); // clear entry points. Used by sharing code @@ -465,7 +472,14 @@ // interpreter entry address interpreter_entry() const { return _i2i_entry; } // Only used when first initialize so we can set _i2i_entry and _from_interpreted_entry - void set_interpreter_entry(address entry) { _i2i_entry = entry; _from_interpreted_entry = entry; } + void set_interpreter_entry(address entry) { + if (_i2i_entry != entry) { + _i2i_entry = entry; + } + if (_from_interpreted_entry != entry) { + _from_interpreted_entry = entry; + } + } // native function (used for native methods only) enum { --- old/src/share/vm/runtime/sharedRuntime.cpp 2016-02-18 15:25:09.205370547 -0800 +++ new/src/share/vm/runtime/sharedRuntime.cpp 2016-02-18 15:25:09.036358714 -0800 @@ -39,6 +39,7 @@ #include "interpreter/interpreterRuntime.hpp" #include "logging/log.hpp" #include "memory/universe.inline.hpp" +#include "memory/metaspaceShared.hpp" #include "oops/oop.inline.hpp" #include "prims/forte.hpp" #include "prims/jvmtiExport.hpp" @@ -1791,7 +1792,7 @@ IRT_LEAF(void, SharedRuntime::fixup_callers_callsite(Method* method, address caller_pc)) Method* moop(method); - address entry_point = moop->from_compiled_entry(); + address entry_point = moop->from_compiled_entry_no_trampoline(); // It's possible that deoptimization can occur at a call site which hasn't // been resolved yet, in which case this function will be called from @@ -2324,12 +2325,15 @@ public: AdapterHandlerTable() - : BasicHashtable(293, sizeof(AdapterHandlerEntry)) { } + : BasicHashtable(293, (DumpSharedSpaces ? sizeof(CDSAdapterHandlerEntry) : sizeof(AdapterHandlerEntry))) { } // Create a new entry suitable for insertion in the table AdapterHandlerEntry* new_entry(AdapterFingerPrint* fingerprint, address i2c_entry, address c2i_entry, address c2i_unverified_entry) { AdapterHandlerEntry* entry = (AdapterHandlerEntry*)BasicHashtable::new_entry(fingerprint->compute_hash()); entry->init(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry); + if (DumpSharedSpaces) { + ((CDSAdapterHandlerEntry*)entry)->init(); + } return entry; } @@ -2492,6 +2496,28 @@ } AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) { + AdapterHandlerEntry* entry = get_adapter0(method); + if (method->is_shared()) { + MutexLocker mu(AdapterHandlerLibrary_lock); + if (method->adapter() == NULL) { + method->update_adapter_trampoline(entry); + } + address trampoline = method->from_compiled_entry(); + if (*(int*)trampoline == 0) { + CodeBuffer buffer(trampoline, (int)SharedRuntime::trampoline_size()); + MacroAssembler _masm(&buffer); + SharedRuntime::generate_trampoline(&_masm, entry->get_c2i_entry()); + + if (PrintInterpreter) { + Disassembler::decode(buffer.insts_begin(), buffer.insts_end()); + } + } + } + + return entry; +} + +AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter0(const methodHandle& method) { // Use customized signature handler. Need to lock around updates to // the AdapterHandlerTable (it is not safe for concurrent readers // and a single writer: this could be fixed if it becomes a @@ -2508,7 +2534,7 @@ // make sure data structure is initialized initialize(); - if (CodeCacheExtensions::skip_compiler_support()) { + if (!DumpSharedSpaces && CodeCacheExtensions::skip_compiler_support()) { // adapters are useless and should not be used, including the // abstract_method_handler. However, some callers check that // an adapter was installed. @@ -2990,6 +3016,17 @@ } +#if INCLUDE_CDS + +void CDSAdapterHandlerEntry::init() { + assert(DumpSharedSpaces, "used during dump time only"); + _c2i_entry_trampoline = (address)MetaspaceShared::misc_code_space_alloc(SharedRuntime::trampoline_size()); + _adapter_trampoline = (AdapterHandlerEntry**)MetaspaceShared::misc_data_space_alloc(sizeof(AdapterHandlerEntry*)); +}; + +#endif // INCLUDE_CDS + + #ifndef PRODUCT void AdapterHandlerLibrary::print_statistics() { --- old/src/share/vm/runtime/sharedRuntime.hpp 2016-02-18 15:25:09.733407523 -0800 +++ new/src/share/vm/runtime/sharedRuntime.hpp 2016-02-18 15:25:09.606398628 -0800 @@ -398,6 +398,10 @@ static void convert_ints_to_longints(int i2l_argcnt, int& in_args_count, BasicType*& in_sig_bt, VMRegPair*& in_regs); + static size_t trampoline_size(); + + static void generate_trampoline(MacroAssembler *masm, address destination); + // Generate I2C and C2I adapters. These adapters are simple argument marshalling // blobs. Unlike adapters in the tiger and earlier releases the code in these // blobs does not create a new frame and are therefore virtually invisible @@ -680,6 +684,17 @@ void print_adapter_on(outputStream* st) const; }; +class CDSAdapterHandlerEntry: public AdapterHandlerEntry { + address _c2i_entry_trampoline; // allocated from shared spaces "MC" region + AdapterHandlerEntry** _adapter_trampoline; // allocated from shared spaces "MD" region + +public: + address get_c2i_entry_trampoline() const { return _c2i_entry_trampoline; } + AdapterHandlerEntry** get_adapter_trampoline() const { return _adapter_trampoline; } + void init() NOT_CDS_RETURN; +}; + + class AdapterHandlerLibrary: public AllStatic { private: static BufferBlob* _buffer; // the temporary code buffer in CodeCache @@ -687,6 +702,7 @@ static AdapterHandlerEntry* _abstract_method_handler; static BufferBlob* buffer_blob(); static void initialize(); + static AdapterHandlerEntry* get_adapter0(const methodHandle& method); public: --- old/src/share/vm/runtime/vmStructs.cpp 2016-02-18 15:25:10.306447647 -0800 +++ new/src/share/vm/runtime/vmStructs.cpp 2016-02-18 15:25:10.182438964 -0800 @@ -399,7 +399,6 @@ nonproduct_nonstatic_field(Method, _compiled_invocation_count, int) \ volatile_nonstatic_field(Method, _code, nmethod*) \ nonstatic_field(Method, _i2i_entry, address) \ - nonstatic_field(Method, _adapter, AdapterHandlerEntry*) \ volatile_nonstatic_field(Method, _from_compiled_entry, address) \ volatile_nonstatic_field(Method, _from_interpreted_entry, address) \ volatile_nonstatic_field(ConstMethod, _fingerprint, uint64_t) \