src/share/vm/interpreter/rewriter.cpp

Print this page

        

*** 24,33 **** --- 24,34 ---- #include "precompiled.hpp" #include "interpreter/bytecodes.hpp" #include "interpreter/interpreter.hpp" #include "interpreter/rewriter.hpp" + #include "memory/metaspaceShared.hpp" #include "memory/gcLocker.hpp" #include "memory/resourceArea.hpp" #include "oops/generateOopMap.hpp" #include "prims/methodHandles.hpp"
*** 140,149 **** --- 141,152 ---- // Rewrite a classfile-order CP index into a native-order CPC index. void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) { address p = bcp + offset; if (!reverse) { + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_xxxx for archived methods should only happen at dump time"); int cp_index = Bytes::get_Java_u2(p); int cache_index = cp_entry_to_cp_cache(cp_index); Bytes::put_native_u2(p, cache_index); if (!_method_handle_invokers.is_empty()) maybe_rewrite_invokehandle(p - 1, cp_index, cache_index, reverse);
*** 163,172 **** --- 166,177 ---- // InterfaceMethodrefs would resolve to the same thing so a new cpCache entry // is created for each one. This was added with lambda. void Rewriter::rewrite_invokespecial(address bcp, int offset, bool reverse, bool* invokespecial_error) { address p = bcp + offset; if (!reverse) { + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_invokevfinal for archived methods should only happen at dump time"); int cp_index = Bytes::get_Java_u2(p); if (_pool->tag_at(cp_index).is_interface_method()) { int cache_index = add_invokespecial_cp_cache_entry(cp_index); if (cache_index != (int)(jushort) cache_index) { *invokespecial_error = true;
*** 291,300 **** --- 296,307 ---- // Rewrite some ldc bytecodes to _fast_aldc void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide, bool reverse) { if (!reverse) { assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode"); + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_aldc or _fast_aldc_w for archived methods should only happen at dump time"); address p = bcp + offset; int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); constantTag tag = _pool->tag_at(cp_index).value(); if (tag.is_method_handle() || tag.is_method_type() || tag.is_string()) { int ref_index = cp_entry_to_resolved_references(cp_index);
*** 372,381 **** --- 379,390 ---- assert(bc_length != 0, "impossible bytecode length"); switch (c) { case Bytecodes::_lookupswitch : { #ifndef CC_INTERP + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_xxxswitch for archived methods should only happen at dump time"); Bytecode_lookupswitch bc(method, bcp); (*bcp) = ( bc.number_of_pairs() < BinarySwitchThreshold ? Bytecodes::_fast_linearswitch : Bytecodes::_fast_binaryswitch
*** 399,414 **** --- 408,428 ---- case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through case Bytecodes::_putfield : // fall through case Bytecodes::_invokevirtual : // fall through + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_getXXX/putXXX or _fast_invokeXXX for archived methods should" + " only happen at dump time"); case Bytecodes::_invokestatic : case Bytecodes::_invokeinterface: case Bytecodes::_invokehandle : // if reverse=true rewrite_member_reference(bcp, prefix_length+1, reverse); break; case Bytecodes::_invokedynamic: + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting _invoke_dynamic for archived methods should only happen at dump time"); rewrite_invokedynamic(bcp, prefix_length+1, reverse); break; case Bytecodes::_ldc: case Bytecodes::_fast_aldc: // if reverse=true maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);