src/cpu/ppc/vm/templateTable_ppc_64.cpp
Print this page
*** 436,454 ****
void TemplateTable::locals_index(Register Rdst, int offset) {
__ lbz(Rdst, offset, R14_bcp);
}
void TemplateTable::iload() {
transition(vtos, itos);
// Get the local value into tos
const Register Rindex = R22_tmp2;
locals_index(Rindex);
// Rewrite iload,iload pair into fast_iload2
// iload,caload pair into fast_icaload
! if (RewriteFrequentPairs) {
Label Lrewrite, Ldone;
Register Rnext_byte = R3_ARG1,
Rrewrite_to = R6_ARG4,
Rscratch = R11_scratch1;
--- 436,462 ----
void TemplateTable::locals_index(Register Rdst, int offset) {
__ lbz(Rdst, offset, R14_bcp);
}
void TemplateTable::iload() {
+ iload_internal();
+ }
+
+ void TemplateTable::nofast_iload() {
+ iload_internal(MAY_NOT_REWRITE);
+ }
+
+ void TemplateTable::iload_internal(RewriteControl rc) {
transition(vtos, itos);
// Get the local value into tos
const Register Rindex = R22_tmp2;
locals_index(Rindex);
// Rewrite iload,iload pair into fast_iload2
// iload,caload pair into fast_icaload
! if (RewriteFrequentPairs && rc == MAY_REWRITE) {
Label Lrewrite, Ldone;
Register Rnext_byte = R3_ARG1,
Rrewrite_to = R6_ARG4,
Rscratch = R11_scratch1;
*** 708,717 ****
--- 716,733 ----
__ ld(R17_tos, Interpreter::local_offset_in_bytes(n), R18_locals);
}
void TemplateTable::aload_0() {
+ aload_0_internal();
+ }
+
+ void TemplateTable::nofast_aload_0() {
+ aload_0_internal(MAY_NOT_REWRITE);
+ }
+
+ void TemplateTable::aload_0_internal(RewriteControl rc) {
transition(vtos, atos);
// According to bytecode histograms, the pairs:
//
// _aload_0, _fast_igetfield
// _aload_0, _fast_agetfield
*** 731,741 ****
// aload_0, aload_1
// aload_0, iload_1
// These bytecodes with a small amount of code are most profitable
// to rewrite.
! if (RewriteFrequentPairs) {
Label Lrewrite, Ldont_rewrite;
Register Rnext_byte = R3_ARG1,
Rrewrite_to = R6_ARG4,
Rscratch = R11_scratch1;
--- 747,757 ----
// aload_0, aload_1
// aload_0, iload_1
// These bytecodes with a small amount of code are most profitable
// to rewrite.
! if (RewriteFrequentPairs && rc == MAY_REWRITE) {
Label Lrewrite, Ldont_rewrite;
Register Rnext_byte = R3_ARG1,
Rrewrite_to = R6_ARG4,
Rscratch = R11_scratch1;
*** 2143,2165 ****
void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register Rscratch, size_t index_size) {
__ get_cache_and_index_at_bcp(Rcache, 1, index_size);
Label Lresolved, Ldone;
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
// We are resolved if the indices offset contains the current bytecode.
#if defined(VM_LITTLE_ENDIAN)
__ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + byte_no + 1, Rcache);
#else
__ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + 7 - (byte_no + 1), Rcache);
#endif
// Acquire by cmp-br-isync (see below).
! __ cmpdi(CCR0, Rscratch, (int)bytecode());
__ beq(CCR0, Lresolved);
address entry = NULL;
! switch (bytecode()) {
case Bytecodes::_getstatic : // fall through
case Bytecodes::_putstatic : // fall through
case Bytecodes::_getfield : // fall through
case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
case Bytecodes::_invokevirtual : // fall through
--- 2159,2188 ----
void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register Rscratch, size_t index_size) {
__ get_cache_and_index_at_bcp(Rcache, 1, index_size);
Label Lresolved, Ldone;
+ Bytecodes::Code code = bytecode();
+ switch (code) {
+ case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
+ case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
+ case Bytecodes::_nofast_invokevirtual: code = Bytecodes::_invokevirtual;
+ }
+
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
// We are resolved if the indices offset contains the current bytecode.
#if defined(VM_LITTLE_ENDIAN)
__ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + byte_no + 1, Rcache);
#else
__ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + 7 - (byte_no + 1), Rcache);
#endif
// Acquire by cmp-br-isync (see below).
! __ cmpdi(CCR0, Rscratch, code);
__ beq(CCR0, Lresolved);
address entry = NULL;
! switch (code) {
case Bytecodes::_getstatic : // fall through
case Bytecodes::_putstatic : // fall through
case Bytecodes::_getfield : // fall through
case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
case Bytecodes::_invokevirtual : // fall through
*** 2168,2178 ****
case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
default : ShouldNotReachHere(); break;
}
! __ li(R4_ARG2, (int)bytecode());
__ call_VM(noreg, entry, R4_ARG2, true);
// Update registers with resolved info.
__ get_cache_and_index_at_bcp(Rcache, 1, index_size);
__ b(Ldone);
--- 2191,2201 ----
case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
default : ShouldNotReachHere(); break;
}
! __ li(R4_ARG2, code);
__ call_VM(noreg, entry, R4_ARG2, true);
// Update registers with resolved info.
__ get_cache_and_index_at_bcp(Rcache, 1, index_size);
__ b(Ldone);
*** 2349,2359 ****
__ null_check_throw(Roop, -1, Rtmp);
__ verify_oop(Roop);
}
// PPC64: implement volatile loads as fence-store-acquire.
! void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
transition(vtos, vtos);
Label Lacquire, Lisync;
const Register Rcache = R3_ARG1,
--- 2372,2382 ----
__ null_check_throw(Roop, -1, Rtmp);
__ verify_oop(Roop);
}
// PPC64: implement volatile loads as fence-store-acquire.
! void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
transition(vtos, vtos);
Label Lacquire, Lisync;
const Register Rcache = R3_ARG1,
*** 2433,2443 ****
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[dtos] == 0, "can't compute twice");
branch_table[dtos] = __ pc(); // non-volatile_entry point
__ lfdx(F15_ftos, Rclass_or_obj, Roffset);
__ push(dtos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_dgetfield, Rbc, Rscratch);
{
Label acquire_double;
__ beq(CCR6, acquire_double); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2456,2466 ----
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[dtos] == 0, "can't compute twice");
branch_table[dtos] = __ pc(); // non-volatile_entry point
__ lfdx(F15_ftos, Rclass_or_obj, Roffset);
__ push(dtos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_dgetfield, Rbc, Rscratch);
{
Label acquire_double;
__ beq(CCR6, acquire_double); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2452,2462 ****
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ftos] == 0, "can't compute twice");
branch_table[ftos] = __ pc(); // non-volatile_entry point
__ lfsx(F15_ftos, Rclass_or_obj, Roffset);
__ push(ftos);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_fgetfield, Rbc, Rscratch); }
{
Label acquire_float;
__ beq(CCR6, acquire_float); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2475,2485 ----
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ftos] == 0, "can't compute twice");
branch_table[ftos] = __ pc(); // non-volatile_entry point
__ lfsx(F15_ftos, Rclass_or_obj, Roffset);
__ push(ftos);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_fgetfield, Rbc, Rscratch); }
{
Label acquire_float;
__ beq(CCR6, acquire_float); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2471,2492 ****
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[itos] == 0, "can't compute twice");
branch_table[itos] = __ pc(); // non-volatile_entry point
__ lwax(R17_tos, Rclass_or_obj, Roffset);
__ push(itos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_igetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lltos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ltos] == 0, "can't compute twice");
branch_table[ltos] = __ pc(); // non-volatile_entry point
__ ldx(R17_tos, Rclass_or_obj, Roffset);
__ push(ltos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_lgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lbtos);
--- 2494,2515 ----
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[itos] == 0, "can't compute twice");
branch_table[itos] = __ pc(); // non-volatile_entry point
__ lwax(R17_tos, Rclass_or_obj, Roffset);
__ push(itos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_igetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lltos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ltos] == 0, "can't compute twice");
branch_table[ltos] = __ pc(); // non-volatile_entry point
__ ldx(R17_tos, Rclass_or_obj, Roffset);
__ push(ltos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_lgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lbtos);
*** 2494,2526 ****
assert(branch_table[btos] == 0, "can't compute twice");
branch_table[btos] = __ pc(); // non-volatile_entry point
__ lbzx(R17_tos, Rclass_or_obj, Roffset);
__ extsb(R17_tos, R17_tos);
__ push(btos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lctos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ctos] == 0, "can't compute twice");
branch_table[ctos] = __ pc(); // non-volatile_entry point
__ lhzx(R17_tos, Rclass_or_obj, Roffset);
__ push(ctos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_cgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lstos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[stos] == 0, "can't compute twice");
branch_table[stos] = __ pc(); // non-volatile_entry point
__ lhax(R17_tos, Rclass_or_obj, Roffset);
__ push(stos);
! if (!is_static) patch_bytecode(Bytecodes::_fast_sgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Latos);
--- 2517,2549 ----
assert(branch_table[btos] == 0, "can't compute twice");
branch_table[btos] = __ pc(); // non-volatile_entry point
__ lbzx(R17_tos, Rclass_or_obj, Roffset);
__ extsb(R17_tos, R17_tos);
__ push(btos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_bgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lctos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[ctos] == 0, "can't compute twice");
branch_table[ctos] = __ pc(); // non-volatile_entry point
__ lhzx(R17_tos, Rclass_or_obj, Roffset);
__ push(ctos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_cgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Lstos);
__ fence(); // Volatile entry point (one instruction before non-volatile_entry point).
assert(branch_table[stos] == 0, "can't compute twice");
branch_table[stos] = __ pc(); // non-volatile_entry point
__ lhax(R17_tos, Rclass_or_obj, Roffset);
__ push(stos);
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_sgetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 28, 28); // Align load.
// __ bind(Latos);
*** 2529,2539 ****
branch_table[atos] = __ pc(); // non-volatile_entry point
__ load_heap_oop(R17_tos, (RegisterOrConstant)Roffset, Rclass_or_obj);
__ verify_oop(R17_tos);
__ push(atos);
//__ dcbt(R17_tos); // prefetch
! if (!is_static) patch_bytecode(Bytecodes::_fast_agetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 12);
__ bind(Lacquire);
--- 2552,2562 ----
branch_table[atos] = __ pc(); // non-volatile_entry point
__ load_heap_oop(R17_tos, (RegisterOrConstant)Roffset, Rclass_or_obj);
__ verify_oop(R17_tos);
__ push(atos);
//__ dcbt(R17_tos); // prefetch
! if (!is_static && rc == MAY_REWRITE) patch_bytecode(Bytecodes::_fast_agetfield, Rbc, Rscratch);
__ beq(CCR6, Lacquire); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 12);
__ bind(Lacquire);
*** 2552,2561 ****
--- 2575,2588 ----
void TemplateTable::getfield(int byte_no) {
getfield_or_static(byte_no, false);
}
+ void TemplateTable::nofast_getfield(int byte_no) {
+ getfield_or_static(byte_no, false, MAY_NOT_REWRITE);
+ }
+
void TemplateTable::getstatic(int byte_no) {
getfield_or_static(byte_no, true);
}
// The registers cache and index expected to be set before call.
*** 2642,2652 ****
__ bind(Lno_field_mod_post);
}
}
// PPC64: implement volatile stores as release-store (return bytecode contains an additional release).
! void TemplateTable::putfield_or_static(int byte_no, bool is_static) {
Label Lvolatile;
const Register Rcache = R5_ARG3, // Do not use ARG1/2 (causes trouble in jvmti_post_field_mod).
Rclass_or_obj = R31, // Needs to survive C call.
Roffset = R22_tmp2, // Needs to survive C call.
--- 2669,2679 ----
__ bind(Lno_field_mod_post);
}
}
// PPC64: implement volatile stores as release-store (return bytecode contains an additional release).
! void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
Label Lvolatile;
const Register Rcache = R5_ARG3, // Do not use ARG1/2 (causes trouble in jvmti_post_field_mod).
Rclass_or_obj = R31, // Needs to survive C call.
Roffset = R22_tmp2, // Needs to survive C call.
*** 2716,2726 ****
assert(branch_table[dtos] == 0, "can't compute twice");
branch_table[dtos] = __ pc(); // non-volatile_entry point
__ pop(dtos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stfdx(F15_ftos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2743,2753 ----
assert(branch_table[dtos] == 0, "can't compute twice");
branch_table[dtos] = __ pc(); // non-volatile_entry point
__ pop(dtos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stfdx(F15_ftos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2730,2740 ****
assert(branch_table[ftos] == 0, "can't compute twice");
branch_table[ftos] = __ pc(); // non-volatile_entry point
__ pop(ftos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stfsx(F15_ftos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2757,2767 ----
assert(branch_table[ftos] == 0, "can't compute twice");
branch_table[ftos] = __ pc(); // non-volatile_entry point
__ pop(ftos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stfsx(F15_ftos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2744,2754 ****
assert(branch_table[itos] == 0, "can't compute twice");
branch_table[itos] = __ pc(); // non-volatile_entry point
__ pop(itos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stwx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2771,2781 ----
assert(branch_table[itos] == 0, "can't compute twice");
branch_table[itos] = __ pc(); // non-volatile_entry point
__ pop(itos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stwx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2758,2768 ****
assert(branch_table[ltos] == 0, "can't compute twice");
branch_table[ltos] = __ pc(); // non-volatile_entry point
__ pop(ltos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stdx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2785,2795 ----
assert(branch_table[ltos] == 0, "can't compute twice");
branch_table[ltos] = __ pc(); // non-volatile_entry point
__ pop(ltos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stdx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2772,2782 ****
assert(branch_table[btos] == 0, "can't compute twice");
branch_table[btos] = __ pc(); // non-volatile_entry point
__ pop(btos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stbx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2799,2809 ----
assert(branch_table[btos] == 0, "can't compute twice");
branch_table[btos] = __ pc(); // non-volatile_entry point
__ pop(btos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ stbx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2786,2796 ****
assert(branch_table[ctos] == 0, "can't compute twice");
branch_table[ctos] = __ pc(); // non-volatile_entry point
__ pop(ctos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1..
__ sthx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2813,2823 ----
assert(branch_table[ctos] == 0, "can't compute twice");
branch_table[ctos] = __ pc(); // non-volatile_entry point
__ pop(ctos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1..
__ sthx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2800,2810 ****
assert(branch_table[stos] == 0, "can't compute twice");
branch_table[stos] = __ pc(); // non-volatile_entry point
__ pop(stos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ sthx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
--- 2827,2837 ----
assert(branch_table[stos] == 0, "can't compute twice");
branch_table[stos] = __ pc(); // non-volatile_entry point
__ pop(stos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.
__ sthx(R17_tos, Rclass_or_obj, Roffset);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
}
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
*** 2814,2824 ****
assert(branch_table[atos] == 0, "can't compute twice");
branch_table[atos] = __ pc(); // non-volatile_entry point
__ pop(atos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // kills R11_scratch1
do_oop_store(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, Rscratch2, Rscratch3, _bs->kind(), false /* precise */, true /* check null */);
! if (!is_static) { patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 12);
--- 2841,2851 ----
assert(branch_table[atos] == 0, "can't compute twice");
branch_table[atos] = __ pc(); // non-volatile_entry point
__ pop(atos);
if (!is_static) { pop_and_check_object(Rclass_or_obj); } // kills R11_scratch1
do_oop_store(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, Rscratch2, Rscratch3, _bs->kind(), false /* precise */, true /* check null */);
! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); }
if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
__ beq(CR_is_vol, Lvolatile); // Volatile?
__ dispatch_epilog(vtos, Bytecodes::length_for(bytecode()));
__ align(32, 12);
*** 2838,2847 ****
--- 2865,2878 ----
void TemplateTable::putfield(int byte_no) {
putfield_or_static(byte_no, false);
}
+ void TemplateTable::nofast_putfield(int byte_no) {
+ putfield_or_static(byte_no, false, MAY_NOT_REWRITE);
+ }
+
void TemplateTable::putstatic(int byte_no) {
putfield_or_static(byte_no, true);
}
// See SPARC. On PPC64, we have a different jvmti_post_field_mod which does the job.
*** 3164,3174 ****
Register Rrecv, // If caller wants to see it.
Register Rflags, // If caller wants to test it.
Register Rscratch
) {
// Determine flags.
! const Bytecodes::Code code = bytecode();
const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
const bool is_invokedynamic = code == Bytecodes::_invokedynamic;
const bool is_invokehandle = code == Bytecodes::_invokehandle;
const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
const bool is_invokespecial = code == Bytecodes::_invokespecial;
--- 3195,3205 ----
Register Rrecv, // If caller wants to see it.
Register Rflags, // If caller wants to test it.
Register Rscratch
) {
// Determine flags.
! const Bytecodes::Code code = bytecode() == Bytecodes::_nofast_invokevirtual ? Bytecodes::_invokevirtual : bytecode();
const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
const bool is_invokedynamic = code == Bytecodes::_invokedynamic;
const bool is_invokehandle = code == Bytecodes::_invokehandle;
const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
const bool is_invokespecial = code == Bytecodes::_invokespecial;
*** 3237,3249 ****
// Argument and return type profiling.
__ profile_arguments_type(Rtarget_method, Rrecv_klass /* scratch1 */, Rtemp /* scratch2 */, true);
__ call_from_interpreter(Rtarget_method, Rret, Rrecv_klass /* scratch1 */, Rtemp /* scratch2 */);
}
- // Virtual or final call. Final calls are rewritten on the fly to run through "fast_finalcall" next time.
void TemplateTable::invokevirtual(int byte_no) {
! transition(vtos, vtos);
Register Rtable_addr = R11_scratch1,
Rret_type = R12_scratch2,
Rret_addr = R5_ARG3,
Rflags = R22_tmp2, // Should survive C call.
--- 3268,3287 ----
// Argument and return type profiling.
__ profile_arguments_type(Rtarget_method, Rrecv_klass /* scratch1 */, Rtemp /* scratch2 */, true);
__ call_from_interpreter(Rtarget_method, Rret, Rrecv_klass /* scratch1 */, Rtemp /* scratch2 */);
}
void TemplateTable::invokevirtual(int byte_no) {
! invokevirtual_internal(byte_no);
! }
!
! void TemplateTable::nofast_invokevirtual(int byte_no) {
! invokevirtual_internal(byte_no, MAY_NOT_REWRITE);
! }
!
! // Virtual or final call. Final calls are rewritten on the fly to run through "fast_finalcall" next time.
! void TemplateTable::invokevirtual_internal(int byte_no, RewriteControl rc) {
Register Rtable_addr = R11_scratch1,
Rret_type = R12_scratch2,
Rret_addr = R5_ARG3,
Rflags = R22_tmp2, // Should survive C call.
*** 3258,3268 ****
--- 3296,3308 ----
load_invoke_cp_cache_entry(byte_no, Rvtableindex_or_method, noreg, Rflags, /*virtual*/ true, false, false);
__ testbitdi(CCR0, R0, Rflags, ConstantPoolCacheEntry::is_vfinal_shift);
__ bfalse(CCR0, LnotFinal);
+ if (rc == MAY_REWRITE) {
patch_bytecode(Bytecodes::_fast_invokevfinal, Rnew_bc, R12_scratch2);
+ }
invokevfinal_helper(Rvtableindex_or_method, Rflags, R11_scratch1, R12_scratch2);
__ align(32, 12);
__ bind(LnotFinal);
// Load "this" pointer (receiver).