--- old/src/cpu/ppc/vm/templateTable_ppc_64.cpp 2015-03-20 13:40:12.480625610 -0700 +++ new/src/cpu/ppc/vm/templateTable_ppc_64.cpp 2015-03-20 13:40:12.365619843 -0700 @@ -438,6 +438,14 @@ } void TemplateTable::iload() { + iload_internal(); +} + +void TemplateTable::nofast_iload() { + iload_internal(may_not_rewrite); +} + +void TemplateTable::iload_internal(RewriteControl rc) { transition(vtos, itos); // Get the local value into tos @@ -446,7 +454,7 @@ // Rewrite iload,iload pair into fast_iload2 // iload,caload pair into fast_icaload - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label Lrewrite, Ldone; Register Rnext_byte = R3_ARG1, Rrewrite_to = R6_ARG4, @@ -710,6 +718,14 @@ } void TemplateTable::aload_0() { + aload_0_internal(); +} + +void TemplateTable::nofast_aload_0() { + aload_0_internal(may_not_rewrite); +} + +void TemplateTable::aload_0_internal(RewriteControl rc) { transition(vtos, atos); // According to bytecode histograms, the pairs: // @@ -733,7 +749,7 @@ // These bytecodes with a small amount of code are most profitable // to rewrite. - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label Lrewrite, Ldont_rewrite; Register Rnext_byte = R3_ARG1, @@ -2145,6 +2161,12 @@ __ get_cache_and_index_at_bcp(Rcache, 1, index_size); Label Lresolved, Ldone; + Bytecodes::Code code = bytecode(); + switch (code) { + case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; + case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; + } + assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); // We are resolved if the indices offset contains the current bytecode. #if defined(VM_LITTLE_ENDIAN) @@ -2153,11 +2175,11 @@ __ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + 7 - (byte_no + 1), Rcache); #endif // Acquire by cmp-br-isync (see below). - __ cmpdi(CCR0, Rscratch, (int)bytecode()); + __ cmpdi(CCR0, Rscratch, code); __ beq(CCR0, Lresolved); address entry = NULL; - switch (bytecode()) { + switch (code) { case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through @@ -2170,7 +2192,7 @@ case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; default : ShouldNotReachHere(); break; } - __ li(R4_ARG2, (int)bytecode()); + __ li(R4_ARG2, code); __ call_VM(noreg, entry, R4_ARG2, true); // Update registers with resolved info. @@ -2351,7 +2373,7 @@ } // PPC64: implement volatile loads as fence-store-acquire. -void TemplateTable::getfield_or_static(int byte_no, bool is_static) { +void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); Label Lacquire, Lisync; @@ -2435,7 +2457,11 @@ branch_table[dtos] = __ pc(); // non-volatile_entry point __ lfdx(F15_ftos, Rclass_or_obj, Roffset); __ push(dtos); - if (!is_static) patch_bytecode(Bytecodes::_fast_dgetfield, Rbc, Rscratch); + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_dgetfield, Rbc, Rscratch); + } + { Label acquire_double; __ beq(CCR6, acquire_double); // Volatile? @@ -2454,7 +2480,11 @@ branch_table[ftos] = __ pc(); // non-volatile_entry point __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ push(ftos); - if (!is_static) { patch_bytecode(Bytecodes::_fast_fgetfield, Rbc, Rscratch); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_fgetfield, Rbc, Rscratch); + } + { Label acquire_float; __ beq(CCR6, acquire_float); // Volatile? @@ -2473,7 +2503,11 @@ branch_table[itos] = __ pc(); // non-volatile_entry point __ lwax(R17_tos, Rclass_or_obj, Roffset); __ push(itos); - if (!is_static) patch_bytecode(Bytecodes::_fast_igetfield, Rbc, Rscratch); + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_igetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2484,7 +2518,11 @@ branch_table[ltos] = __ pc(); // non-volatile_entry point __ ldx(R17_tos, Rclass_or_obj, Roffset); __ push(ltos); - if (!is_static) patch_bytecode(Bytecodes::_fast_lgetfield, Rbc, Rscratch); + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_lgetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2496,7 +2534,11 @@ __ lbzx(R17_tos, Rclass_or_obj, Roffset); __ extsb(R17_tos, R17_tos); __ push(btos); - if (!is_static) patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch); + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2507,7 +2549,11 @@ branch_table[ctos] = __ pc(); // non-volatile_entry point __ lhzx(R17_tos, Rclass_or_obj, Roffset); __ push(ctos); - if (!is_static) patch_bytecode(Bytecodes::_fast_cgetfield, Rbc, Rscratch); + + if (!is_static && rc ==may_rewrite) { + patch_bytecode(Bytecodes::_fast_cgetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2518,7 +2564,11 @@ branch_table[stos] = __ pc(); // non-volatile_entry point __ lhax(R17_tos, Rclass_or_obj, Roffset); __ push(stos); - if (!is_static) patch_bytecode(Bytecodes::_fast_sgetfield, Rbc, Rscratch); + + if (!is_statiac && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_sgetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2530,8 +2580,12 @@ __ load_heap_oop(R17_tos, (RegisterOrConstant)Roffset, Rclass_or_obj); __ verify_oop(R17_tos); __ push(atos); + //__ dcbt(R17_tos); // prefetch - if (!is_static) patch_bytecode(Bytecodes::_fast_agetfield, Rbc, Rscratch); + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_agetfield, Rbc, Rscratch); + } + __ beq(CCR6, Lacquire); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2554,6 +2608,10 @@ getfield_or_static(byte_no, false); } +void TemplateTable::nofast_getfield(int byte_no) { + getfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::getstatic(int byte_no) { getfield_or_static(byte_no, true); } @@ -2644,7 +2702,7 @@ } // PPC64: implement volatile stores as release-store (return bytecode contains an additional release). -void TemplateTable::putfield_or_static(int byte_no, bool is_static) { +void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) { Label Lvolatile; const Register Rcache = R5_ARG3, // Do not use ARG1/2 (causes trouble in jvmti_post_field_mod). @@ -2718,7 +2776,11 @@ __ pop(dtos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stfdx(F15_ftos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2732,7 +2794,11 @@ __ pop(ftos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stfsx(F15_ftos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2746,7 +2812,11 @@ __ pop(itos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stwx(R17_tos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2760,7 +2830,11 @@ __ pop(ltos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stdx(R17_tos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2774,7 +2848,11 @@ __ pop(btos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stbx(R17_tos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2788,7 +2866,11 @@ __ pop(ctos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.. __ sthx(R17_tos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2802,7 +2884,11 @@ __ pop(stos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ sthx(R17_tos, Rclass_or_obj, Roffset); - if (!is_static) { patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } @@ -2816,7 +2902,11 @@ __ pop(atos); if (!is_static) { pop_and_check_object(Rclass_or_obj); } // kills R11_scratch1 do_oop_store(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, Rscratch2, Rscratch3, _bs->kind(), false /* precise */, true /* check null */); - if (!is_static) { patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); } + + if (!is_static && rc == may_rewrite) { + patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); + } + if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); @@ -2840,6 +2930,10 @@ putfield_or_static(byte_no, false); } +void TemplateTable::nofast_putfield(int byte_no) { + putfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::putstatic(int byte_no) { putfield_or_static(byte_no, true); } --- old/src/cpu/sparc/vm/templateTable_sparc.cpp 2015-03-20 13:40:13.046653987 -0700 +++ new/src/cpu/sparc/vm/templateTable_sparc.cpp 2015-03-20 13:40:12.928648070 -0700 @@ -386,7 +386,6 @@ __ verify_oop(Otos_i); } - void TemplateTable::ldc2_w() { transition(vtos, vtos); Label Long, exit; @@ -431,22 +430,28 @@ __ bind(exit); } - void TemplateTable::locals_index(Register reg, int offset) { __ ldub( at_bcp(offset), reg ); } - void TemplateTable::locals_index_wide(Register reg) { // offset is 2, not 1, because Lbcp points to wide prefix code __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned); } void TemplateTable::iload() { + iload_internal(); +} + +void TemplateTable::nofast_iload() { + iload_internal(may_not_rewrite); +} + +void TemplateTable::iload_internal(RewriteControl rc) { transition(vtos, itos); // Rewrite iload,iload pair into fast_iload2 // iload,caload pair into fast_icaload - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label rewrite, done; // get next byte @@ -673,8 +678,15 @@ __ ld_ptr( Llocals, Interpreter::local_offset_in_bytes(n), Otos_i ); } - void TemplateTable::aload_0() { + aload_0_internal(); +} + +void TemplateTable::nofast_aload_0() { + aload_0_internal(may_not_rewrite); +} + +void TemplateTable::aload_0_internal(RewriteControl rc) { transition(vtos, atos); // According to bytecode histograms, the pairs: @@ -688,7 +700,7 @@ // bytecode into a pair bytecode; otherwise it rewrites the current // bytecode into _fast_aload_0 that doesn't do the pair check anymore. // - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label rewrite, done; // get next byte @@ -732,7 +744,6 @@ } } - void TemplateTable::istore() { transition(itos, vtos); locals_index(G3_scratch); @@ -2046,16 +2057,23 @@ Register index, size_t index_size) { // Depends on cpCacheOop layout! + Label resolved; + Bytecodes::Code code = bytecode(); + switch (code) { + case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; + case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; + } - assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); - __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size); - __ cmp(Lbyte_code, (int) bytecode()); // have we resolved this bytecode? - __ br(Assembler::equal, false, Assembler::pt, resolved); - __ delayed()->set((int)bytecode(), O1); + assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); + __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size); + __ cmp(Lbyte_code, code); // have we resolved this bytecode? + __ br(Assembler::equal, false, Assembler::pt, resolved); + __ delayed()->set(code, O1); address entry; - switch (bytecode()) { + + switch (code) { case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through @@ -2067,7 +2085,7 @@ case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break; case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; default: - fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); + fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code))); break; } // first time invocation - must resolve first @@ -2184,7 +2202,7 @@ } } -void TemplateTable::getfield_or_static(int byte_no, bool is_static) { +void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); Register Rcache = G3_scratch; @@ -2232,7 +2250,7 @@ __ load_heap_oop(Rclass, Roffset, Otos_i); __ verify_oop(Otos_i); __ push(atos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2247,7 +2265,7 @@ // itos __ ld(Rclass, Roffset, Otos_i); __ push(itos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2263,7 +2281,7 @@ // load must be atomic __ ld_long(Rclass, Roffset, Otos_l); __ push(ltos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2278,7 +2296,7 @@ // btos __ ldsb(Rclass, Roffset, Otos_i); __ push(itos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2293,7 +2311,7 @@ // ctos __ lduh(Rclass, Roffset, Otos_i); __ push(itos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2308,7 +2326,7 @@ // stos __ ldsh(Rclass, Roffset, Otos_i); __ push(itos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2324,7 +2342,7 @@ // ftos __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f); __ push(ftos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch); } __ ba(checkVolatile); @@ -2336,7 +2354,7 @@ // dtos __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d); __ push(dtos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch); } @@ -2351,16 +2369,18 @@ __ bind(exit); } - void TemplateTable::getfield(int byte_no) { getfield_or_static(byte_no, false); } +void TemplateTable::nofast_getfield(int byte_no) { + getfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::getstatic(int byte_no) { getfield_or_static(byte_no, true); } - void TemplateTable::fast_accessfield(TosState state) { transition(atos, state); Register Rcache = G3_scratch; @@ -2545,7 +2565,7 @@ __ verify_oop(r); } -void TemplateTable::putfield_or_static(int byte_no, bool is_static) { +void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); Register Rcache = G3_scratch; Register index = G4_scratch; @@ -2621,7 +2641,7 @@ __ pop_i(); pop_and_check_object(Rclass); __ st(Otos_i, Rclass, Roffset); - patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch, true, byte_no); + if (rc == may_rewrite) patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch, true, byte_no); __ ba(checkVolatile); __ delayed()->tst(Lscratch); } @@ -2637,7 +2657,7 @@ pop_and_check_object(Rclass); __ verify_oop(Otos_i); do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false); - patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch, true, byte_no); + if (rc == may_rewrite) patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch, true, byte_no); __ ba(checkVolatile); __ delayed()->tst(Lscratch); } @@ -2654,7 +2674,7 @@ __ pop_i(); if (!is_static) pop_and_check_object(Rclass); __ stb(Otos_i, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch, true, byte_no); } __ ba(checkVolatile); @@ -2671,7 +2691,7 @@ __ pop_l(); if (!is_static) pop_and_check_object(Rclass); __ st_long(Otos_l, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch, true, byte_no); } __ ba(checkVolatile); @@ -2688,7 +2708,7 @@ __ pop_i(); if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch, true, byte_no); } __ ba(checkVolatile); @@ -2705,7 +2725,7 @@ __ pop_i(); if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch, true, byte_no); } __ ba(checkVolatile); @@ -2722,7 +2742,7 @@ __ pop_f(); if (!is_static) pop_and_check_object(Rclass); __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch, true, byte_no); } __ ba(checkVolatile); @@ -2736,7 +2756,7 @@ __ pop_d(); if (!is_static) pop_and_check_object(Rclass); __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch, true, byte_no); } } @@ -2810,16 +2830,18 @@ } } - void TemplateTable::putfield(int byte_no) { putfield_or_static(byte_no, false); } +void TemplateTable::nofast_putfield(int byte_no) { + putfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::putstatic(int byte_no) { putfield_or_static(byte_no, true); } - void TemplateTable::fast_xaccess(TosState state) { transition(vtos, state); Register Rcache = G3_scratch; @@ -2972,7 +2994,9 @@ __ br(Assembler::zero, false, Assembler::pt, notFinal); __ delayed()->and3(Rret, 0xFF, G4_scratch); // gets number of parameters - patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp); + if (RewriteBytecodes && !UseSharedSpaces) { + patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp); + } invokevfinal_helper(Rscratch, Rret); --- old/src/cpu/x86/vm/templateTable_x86.cpp 2015-03-20 13:40:13.766690084 -0700 +++ new/src/cpu/x86/vm/templateTable_x86.cpp 2015-03-20 13:40:13.646684068 -0700 @@ -544,8 +544,16 @@ } void TemplateTable::iload() { + iload_internal(); +} + +void TemplateTable::nofast_iload() { + iload_internal(may_not_rewrite); +} + +void TemplateTable::iload_internal(RewriteControl rc) { transition(vtos, itos); - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label rewrite, done; const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); LP64_ONLY(assert(rbx != bc, "register damaged")); @@ -816,6 +824,14 @@ } void TemplateTable::aload_0() { + aload_0_internal(); +} + +void TemplateTable::nofast_aload_0() { + aload_0_internal(may_not_rewrite); +} + +void TemplateTable::aload_0_internal(RewriteControl rc) { transition(vtos, atos); // According to bytecode histograms, the pairs: // @@ -838,7 +854,7 @@ // aload_0, iload_1 // These bytecodes with a small amount of code are most profitable // to rewrite - if (RewriteFrequentPairs) { + if (RewriteFrequentPairs && rc == may_rewrite) { Label rewrite, done; const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); @@ -2492,14 +2508,21 @@ assert_different_registers(Rcache, index, temp); Label resolved; - assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); - __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); - __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode? - __ jcc(Assembler::equal, resolved); + + Bytecodes::Code code = bytecode(); + switch (code) { + case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; + case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; + } + + assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); + __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); + __ cmpl(temp, code); // have we resolved this bytecode? + __ jcc(Assembler::equal, resolved); // resolve first time through address entry; - switch (bytecode()) { + switch (code) { case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through @@ -2511,10 +2534,10 @@ case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break; case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; default: - fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); + fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code))); break; } - __ movl(temp, (int)bytecode()); + __ movl(temp, code); __ call_VM(noreg, entry, temp); // Update registers with resolved info __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); @@ -2629,7 +2652,7 @@ __ verify_oop(r); } -void TemplateTable::getfield_or_static(int byte_no, bool is_static) { +void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); const Register cache = rcx; @@ -2661,7 +2684,7 @@ __ load_signed_byte(rax, field); __ push(btos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); } __ jmp(Done); @@ -2672,7 +2695,7 @@ // atos __ load_heap_oop(rax, field); __ push(atos); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx); } __ jmp(Done); @@ -2684,7 +2707,7 @@ __ movl(rax, field); __ push(itos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx); } __ jmp(Done); @@ -2696,7 +2719,7 @@ __ load_unsigned_short(rax, field); __ push(ctos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx); } __ jmp(Done); @@ -2708,7 +2731,7 @@ __ load_signed_short(rax, field); __ push(stos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx); } __ jmp(Done); @@ -2732,7 +2755,7 @@ __ push(ltos); // Rewrite bytecode to be faster - LP64_ONLY(if (!is_static) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx)); + LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx)); __ jmp(Done); __ bind(notLong); @@ -2744,7 +2767,7 @@ NOT_LP64(__ fld_s(field)); __ push(ftos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx); } __ jmp(Done); @@ -2759,7 +2782,7 @@ NOT_LP64(__ fld_d(field)); __ push(dtos); // Rewrite bytecode to be faster - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx); } #ifdef ASSERT @@ -2780,6 +2803,10 @@ getfield_or_static(byte_no, false); } +void TemplateTable::nofast_getfield(int byte_no) { + getfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::getstatic(int byte_no) { getfield_or_static(byte_no, true); } @@ -2871,7 +2898,7 @@ } } -void TemplateTable::putfield_or_static(int byte_no, bool is_static) { +void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); const Register cache = rcx; @@ -2912,7 +2939,7 @@ __ pop(btos); if (!is_static) pop_and_check_object(obj); __ movb(field, rax); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -2928,7 +2955,7 @@ if (!is_static) pop_and_check_object(obj); // Store into the field do_oop_store(_masm, field, rax, _bs->kind(), false); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -2943,7 +2970,7 @@ __ pop(itos); if (!is_static) pop_and_check_object(obj); __ movl(field, rax); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -2958,7 +2985,7 @@ __ pop(ctos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -2973,7 +3000,7 @@ __ pop(stos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -2989,7 +3016,7 @@ __ pop(ltos); if (!is_static) pop_and_check_object(obj); __ movq(field, rax); - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -3036,7 +3063,7 @@ if (!is_static) pop_and_check_object(obj); NOT_LP64( __ fstp_s(field);) LP64_ONLY( __ movflt(field, xmm0);) - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done); @@ -3054,7 +3081,7 @@ if (!is_static) pop_and_check_object(obj); NOT_LP64( __ fstp_d(field);) LP64_ONLY( __ movdbl(field, xmm0);) - if (!is_static) { + if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } } @@ -3080,6 +3107,10 @@ putfield_or_static(byte_no, false); } +void TemplateTable::nofast_putfield(int byte_no) { + putfield_or_static(byte_no, false, may_not_rewrite); +} + void TemplateTable::putstatic(int byte_no) { putfield_or_static(byte_no, true); } --- old/src/share/vm/interpreter/bytecodeInterpreter.cpp 2015-03-20 13:40:14.381720916 -0700 +++ new/src/share/vm/interpreter/bytecodeInterpreter.cpp 2015-03-20 13:40:14.261714900 -0700 @@ -576,10 +576,10 @@ /* 0xD8 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, /* 0xDC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, -/* 0xE0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, -/* 0xE4 */ &&opc_default, &&opc_fast_aldc, &&opc_fast_aldc_w, &&opc_return_register_finalizer, -/* 0xE8 */ &&opc_invokehandle,&&opc_default, &&opc_default, &&opc_default, -/* 0xEC */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, +/* 0xE0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, +/* 0xE4 */ &&opc_default, &&opc_fast_aldc, &&opc_fast_aldc_w, &&opc_return_register_finalizer, +/* 0xE8 */ &&opc_invokehandle,&&opc_nofast_getfield,&&opc_nofast_putfield, &&opc_nofast_aload_0, +/* 0xEC */ &&opc_nofast_iload,&&opc_default, &&opc_default, &&opc_default, /* 0xF0 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, /* 0xF4 */ &&opc_default, &&opc_default, &&opc_default, &&opc_default, --- old/src/share/vm/interpreter/bytecodes.cpp 2015-03-20 13:40:15.155759721 -0700 +++ new/src/share/vm/interpreter/bytecodes.cpp 2015-03-20 13:40:15.032753556 -0700 @@ -525,6 +525,12 @@ def(_fast_aldc , "fast_aldc" , "bj" , NULL , T_OBJECT, 1, true, _ldc ); def(_fast_aldc_w , "fast_aldc_w" , "bJJ" , NULL , T_OBJECT, 1, true, _ldc_w ); + def(_nofast_getfield , "nofast_getfield" , "bJJ" , NULL , T_ILLEGAL, 0, true, _getfield ); + def(_nofast_putfield , "nofast_putfield" , "bJJ" , NULL , T_ILLEGAL, -2, true , _putfield ); + + def(_nofast_aload_0 , "nofast_aload_0" , "b" , NULL , T_ILLEGAL, 1, true , _aload_0 ); + def(_nofast_iload , "nofast_iload" , "bi" , NULL , T_ILLEGAL, 1, false, _iload ); + def(_shouldnotreachhere , "_shouldnotreachhere" , "b" , NULL , T_VOID , 0, false); // compare can_trap information for each bytecode with the --- old/src/share/vm/interpreter/bytecodes.hpp 2015-03-20 13:40:15.766790353 -0700 +++ new/src/share/vm/interpreter/bytecodes.hpp 2015-03-20 13:40:15.636783837 -0700 @@ -285,6 +285,19 @@ // special handling of signature-polymorphic methods: _invokehandle , + // These bytecodes are rewritten at CDS dump time, so that we can prevent them from being + // rewritten at run time. This way, the ConstMethods can be placed in the CDS ReadOnly + // section, and RewriteByteCodes/RewriteFrequentPairs can rewrite non-CDS bytecodes + // at run time. + // + // Rewritten at CDS dump time to | Original bytecode + // _invoke_virtual rewritten on sparc, will be disabled if UseSharedSpaces turned on. + // ------------------------------+------------------ + _nofast_getfield , // <- _getfield + _nofast_putfield , // <- _putfield + _nofast_aload_0 , // <- _aload_0 + _nofast_iload , // <- _iload + _shouldnotreachhere, // For debugging --- old/src/share/vm/interpreter/interpreterRuntime.cpp 2015-03-20 13:40:16.301817177 -0700 +++ new/src/share/vm/interpreter/interpreterRuntime.cpp 2015-03-20 13:40:16.184811311 -0700 @@ -541,7 +541,8 @@ // resolve field fieldDescriptor info; constantPoolHandle pool(thread, method(thread)->constants()); - bool is_put = (bytecode == Bytecodes::_putfield || bytecode == Bytecodes::_putstatic); + bool is_put = (bytecode == Bytecodes::_putfield || bytecode == Bytecodes::_nofast_putfield || + bytecode == Bytecodes::_putstatic); bool is_static = (bytecode == Bytecodes::_getstatic || bytecode == Bytecodes::_putstatic); { --- old/src/share/vm/interpreter/linkResolver.cpp 2015-03-20 13:40:16.860845202 -0700 +++ new/src/share/vm/interpreter/linkResolver.cpp 2015-03-20 13:40:16.743839338 -0700 @@ -777,11 +777,11 @@ TRAPS) { assert(byte == Bytecodes::_getstatic || byte == Bytecodes::_putstatic || byte == Bytecodes::_getfield || byte == Bytecodes::_putfield || + byte == Bytecodes::_nofast_getfield || byte == Bytecodes::_nofast_putfield || (byte == Bytecodes::_nop && !check_access), "bad field access bytecode"); bool is_static = (byte == Bytecodes::_getstatic || byte == Bytecodes::_putstatic); - bool is_put = (byte == Bytecodes::_putfield || byte == Bytecodes::_putstatic); - + bool is_put = (byte == Bytecodes::_putfield || byte == Bytecodes::_putstatic || byte == Bytecodes::_nofast_putfield); // Check if there's a resolved klass containing the field if (resolved_klass.is_null()) { ResourceMark rm(THREAD); --- old/src/share/vm/interpreter/rewriter.cpp 2015-03-20 13:40:17.383871423 -0700 +++ new/src/share/vm/interpreter/rewriter.cpp 2015-03-20 13:40:17.246864554 -0700 @@ -26,6 +26,7 @@ #include "interpreter/bytecodes.hpp" #include "interpreter/interpreter.hpp" #include "interpreter/rewriter.hpp" +#include "memory/metaspaceShared.hpp" #include "memory/gcLocker.hpp" #include "memory/resourceArea.hpp" #include "oops/generateOopMap.hpp" @@ -142,6 +143,8 @@ void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) { address p = bcp + offset; if (!reverse) { + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_xxxx for archived methods should only happen at dump time"); int cp_index = Bytes::get_Java_u2(p); int cache_index = cp_entry_to_cp_cache(cp_index); Bytes::put_native_u2(p, cache_index); @@ -165,14 +168,16 @@ void Rewriter::rewrite_invokespecial(address bcp, int offset, bool reverse, bool* invokespecial_error) { address p = bcp + offset; if (!reverse) { + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_invokevfinal for archived methods should only happen at dump time"); int cp_index = Bytes::get_Java_u2(p); if (_pool->tag_at(cp_index).is_interface_method()) { - int cache_index = add_invokespecial_cp_cache_entry(cp_index); - if (cache_index != (int)(jushort) cache_index) { - *invokespecial_error = true; - } - Bytes::put_native_u2(p, cache_index); - } else { + int cache_index = add_invokespecial_cp_cache_entry(cp_index); + if (cache_index != (int)(jushort) cache_index) { + *invokespecial_error = true; + } + Bytes::put_native_u2(p, cache_index); + } else { rewrite_member_reference(bcp, offset, reverse); } } else { @@ -293,6 +298,8 @@ bool reverse) { if (!reverse) { assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode"); + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_aldc or _fast_aldc_w for archived methods should only happen at dump time"); address p = bcp + offset; int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); constantTag tag = _pool->tag_at(cp_index).value(); @@ -374,6 +381,8 @@ switch (c) { case Bytecodes::_lookupswitch : { #ifndef CC_INTERP + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_xxxswitch for archived methods should only happen at dump time"); Bytecode_lookupswitch bc(method, bcp); (*bcp) = ( bc.number_of_pairs() < BinarySwitchThreshold @@ -401,12 +410,17 @@ case Bytecodes::_getfield : // fall through case Bytecodes::_putfield : // fall through case Bytecodes::_invokevirtual : // fall through + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting to _fast_getXXX/putXXX or _fast_invokeXXX for archived methods should" + " only happen at dump time"); case Bytecodes::_invokestatic : case Bytecodes::_invokeinterface: case Bytecodes::_invokehandle : // if reverse=true rewrite_member_reference(bcp, prefix_length+1, reverse); break; case Bytecodes::_invokedynamic: + assert(DumpSharedSpaces || !MetaspaceShared::is_in_shared_space(bcp), + "rewirting _invoke_dynamic for archived methods should only happen at dump time"); rewrite_invokedynamic(bcp, prefix_length+1, reverse); break; case Bytecodes::_ldc: --- old/src/share/vm/interpreter/templateTable.cpp 2015-03-20 13:40:17.884896541 -0700 +++ new/src/share/vm/interpreter/templateTable.cpp 2015-03-20 13:40:17.769890775 -0700 @@ -517,6 +517,12 @@ def(Bytecodes::_invokehandle , ubcp|disp|clvm|____, vtos, vtos, invokehandle , f1_byte ); + def(Bytecodes::_nofast_getfield , ubcp|____|clvm|____, vtos, vtos, nofast_getfield , f1_byte ); + def(Bytecodes::_nofast_putfield , ubcp|____|clvm|____, vtos, vtos, nofast_putfield , f2_byte ); + + def(Bytecodes::_nofast_aload_0 , ____|____|clvm|____, vtos, atos, nofast_aload_0 , _ ); + def(Bytecodes::_nofast_iload , ubcp|____|clvm|____, vtos, itos, nofast_iload , _ ); + def(Bytecodes::_shouldnotreachhere , ____|____|____|____, vtos, vtos, shouldnotreachhere , _ ); // platform specific bytecodes pd_initialize(); --- old/src/share/vm/interpreter/templateTable.hpp 2015-03-20 13:40:18.429923865 -0700 +++ new/src/share/vm/interpreter/templateTable.hpp 2015-03-20 13:40:18.310917899 -0700 @@ -82,6 +82,7 @@ enum Operation { add, sub, mul, div, rem, _and, _or, _xor, shl, shr, ushr }; enum Condition { equal, not_equal, less, less_equal, greater, greater_equal }; enum CacheByte { f1_byte = 1, f2_byte = 2 }; // byte_no codes + enum RewriteControl { may_rewrite, may_not_rewrite }; // control for fast code under CDS private: static bool _is_initialized; // true if TemplateTable has been initialized @@ -165,7 +166,11 @@ static void dload(int n); static void aload(int n); static void aload_0(); - + static void nofast_aload_0(); + static void nofast_iload(); + static void iload_internal(RewriteControl rc = may_rewrite); + static void aload_0_internal(RewriteControl rc = may_rewrite); + static void istore(); static void lstore(); static void fstore(); @@ -279,10 +284,13 @@ static void invokehandle(int byte_no); static void fast_invokevfinal(int byte_no); - static void getfield_or_static(int byte_no, bool is_static); - static void putfield_or_static(int byte_no, bool is_static); + static void getfield_or_static(int byte_no, bool is_static, RewriteControl rc = may_rewrite); + static void putfield_or_static(int byte_no, bool is_static, RewriteControl rc = may_rewrite); + static void getfield(int byte_no); static void putfield(int byte_no); + static void nofast_getfield(int byte_no); + static void nofast_putfield(int byte_no); static void getstatic(int byte_no); static void putstatic(int byte_no); static void pop_and_check_object(Register obj); --- old/src/share/vm/memory/metaspaceShared.cpp 2015-03-20 13:40:18.984951690 -0700 +++ new/src/share/vm/memory/metaspaceShared.cpp 2015-03-20 13:40:18.868945874 -0700 @@ -30,6 +30,8 @@ #include "classfile/symbolTable.hpp" #include "classfile/systemDictionary.hpp" #include "code/codeCache.hpp" +#include "interpreter/bytecodes.hpp" +#include "interpreter/bytecodeStream.hpp" #include "memory/filemap.hpp" #include "memory/gcLocker.hpp" #include "memory/metaspace.hpp" @@ -104,15 +106,33 @@ } } -// Walk all methods in the class list and assign a fingerprint. -// so that this part of the ConstMethod* is read only. -static void calculate_fingerprints() { +static void rewrite_nofast_bytecode(Method* method) { + RawBytecodeStream bcs(method); + while (!bcs.is_last_bytecode()) { + Bytecodes::Code opcode = bcs.raw_next(); + switch (opcode) { + case Bytecodes::_getfield: *bcs.bcp() = Bytecodes::_nofast_getfield; break; + case Bytecodes::_putfield: *bcs.bcp() = Bytecodes::_nofast_putfield; break; + case Bytecodes::_aload_0: *bcs.bcp() = Bytecodes::_nofast_aload_0; break; + case Bytecodes::_iload: *bcs.bcp() = Bytecodes::_nofast_iload; break; + default: break; + } + } +} + +// Walk all methods in the class list to ensure that they won't be modified at +// run time. This includes: +// [1] Rewrite all bytecodes as needed, so that the ConstMethod* will not be modified +// at run time by RewriteBytecodes/RewriteFrequentPairs +// [2] Assign a fingerprint, so one doesn't need to be assigned at run-time. +static void rewrite_nofast_bytecodes_and_calculate_fingerprints() { for (int i = 0; i < _global_klass_objects->length(); i++) { Klass* k = _global_klass_objects->at(i); if (k->oop_is_instance()) { InstanceKlass* ik = InstanceKlass::cast(k); for (int i = 0; i < ik->methods()->length(); i++) { Method* m = ik->methods()->at(i); + rewrite_nofast_bytecode(m); Fingerprinter fp(m); // The side effect of this call sets method's fingerprint field. fp.fingerprint(); @@ -476,9 +496,10 @@ tty->print_cr(" type array classes = %5d", num_type_array); } - // Update all the fingerprints in the shared methods. - tty->print("Calculating fingerprints ... "); - calculate_fingerprints(); + + // Ensure the ConstMethods won't be modified at run-time + tty->print("Updating ConstMethods ... "); + rewrite_nofast_bytecodes_and_calculate_fingerprints(); tty->print_cr("done. "); // Remove all references outside the metadata --- old/src/share/vm/oops/constMethod.hpp 2015-03-20 13:40:19.666985884 -0700 +++ new/src/share/vm/oops/constMethod.hpp 2015-03-20 13:40:19.482976657 -0700 @@ -32,7 +32,6 @@ // processes in a read-only section with Class Data Sharing (CDS). It's important // that this class doesn't have virtual functions because the vptr cannot be shared // with CDS. -// (*)RewriteByteCodes and RewriteFrequentPairs is an exception but turned off in CDS // // Note that most applications load thousands of methods, so keeping the size of this // structure small has a big impact on footprint. --- old/src/share/vm/runtime/arguments.cpp 2015-03-20 13:40:20.218013508 -0700 +++ new/src/share/vm/runtime/arguments.cpp 2015-03-20 13:40:20.073006238 -0700 @@ -1900,15 +1900,8 @@ } } -// This must be called after ergonomics because we want bytecode rewriting -// if the server compiler is used, or if UseSharedSpaces is disabled. +// This must be called after ergonomics. void Arguments::set_bytecode_flags() { - // Better not attempt to store into a read-only space. - if (UseSharedSpaces) { - FLAG_SET_DEFAULT(RewriteBytecodes, false); - FLAG_SET_DEFAULT(RewriteFrequentPairs, false); - } - if (!RewriteBytecodes) { FLAG_SET_DEFAULT(RewriteFrequentPairs, false); }