< prev index next >

src/hotspot/cpu/ppc/templateTable_ppc_64.cpp

Print this page
rev 56016 : 8229422: Taskqueue: Outdated selection of weak memory model platforms
Reviewed-by:

*** 2494,2504 **** #endif // Load from branch table and dispatch (volatile case: one instruction ahead). __ sldi(Rflags, Rflags, LogBytesPerWord); __ cmpwi(CCR6, Rscratch, 1); // Volatile? ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // Volatile ? size of 1 instruction : 0. } __ ldx(Rbtable, Rbtable, Rflags); // Get the obj from stack. --- 2494,2504 ---- #endif // Load from branch table and dispatch (volatile case: one instruction ahead). __ sldi(Rflags, Rflags, LogBytesPerWord); __ cmpwi(CCR6, Rscratch, 1); // Volatile? ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // Volatile ? size of 1 instruction : 0. } __ ldx(Rbtable, Rbtable, Rflags); // Get the obj from stack.
*** 2506,2516 **** pop_and_check_object(Rclass_or_obj); // Kills R11_scratch1. } else { __ verify_oop(Rclass_or_obj); } ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ subf(Rbtable, Rscratch, Rbtable); // Point to volatile/non-volatile entry point. } __ mtctr(Rbtable); __ bctr(); --- 2506,2516 ---- pop_and_check_object(Rclass_or_obj); // Kills R11_scratch1. } else { __ verify_oop(Rclass_or_obj); } ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ subf(Rbtable, Rscratch, Rbtable); // Point to volatile/non-volatile entry point. } __ mtctr(Rbtable); __ bctr();
*** 2831,2841 **** __ bge(CCR0, LFlagInvalid); #endif // Load from branch table and dispatch (volatile case: one instruction ahead). __ sldi(Rflags, Rflags, LogBytesPerWord); ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ cmpwi(CR_is_vol, Rscratch, 1); // Volatile? } __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // Volatile? size of instruction 1 : 0. __ ldx(Rbtable, Rbtable, Rflags); --- 2831,2841 ---- __ bge(CCR0, LFlagInvalid); #endif // Load from branch table and dispatch (volatile case: one instruction ahead). __ sldi(Rflags, Rflags, LogBytesPerWord); ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ cmpwi(CR_is_vol, Rscratch, 1); // Volatile? } __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // Volatile? size of instruction 1 : 0. __ ldx(Rbtable, Rbtable, Rflags);
*** 2867,2877 **** } __ stfdx(F15_ftos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2867,2877 ---- } __ stfdx(F15_ftos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2883,2893 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stfsx(F15_ftos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2883,2893 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stfsx(F15_ftos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2899,2909 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stwx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2899,2909 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stwx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2915,2925 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stdx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2915,2925 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stdx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2931,2941 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stbx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2931,2941 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ stbx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2948,2958 **** __ andi(R17_tos, R17_tos, 0x1); __ stbx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_zputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2948,2958 ---- __ andi(R17_tos, R17_tos, 0x1); __ stbx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_zputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2964,2974 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.. __ sthx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2964,2974 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1.. __ sthx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2980,2990 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ sthx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop. --- 2980,2990 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // Kills R11_scratch1. __ sthx(R17_tos, Rclass_or_obj, Roffset); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? } __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 28, 28); // Align pop.
*** 2996,3006 **** if (!is_static) { pop_and_check_object(Rclass_or_obj); } // kills R11_scratch1 do_oop_store(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, Rscratch2, Rscratch3, IN_HEAP); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ beq(CR_is_vol, Lvolatile); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 12); __ bind(Lvolatile); --- 2996,3006 ---- if (!is_static) { pop_and_check_object(Rclass_or_obj); } // kills R11_scratch1 do_oop_store(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, Rscratch2, Rscratch3, IN_HEAP); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_aputfield, Rbc, Rscratch, true, byte_no); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ beq(CR_is_vol, Lvolatile); // Volatile? __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 12); __ bind(Lvolatile);
*** 3054,3064 **** // Get the obj and the final store addr. pop_and_check_object(Rclass_or_obj); // Kills R11_scratch1. // Get volatile flag. __ rldicl_(Rscratch, Rflags, 64-ConstantPoolCacheEntry::is_volatile_shift, 63); // Extract volatile bit. ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { __ cmpdi(CR_is_vol, Rscratch, 1); } { Label LnotVolatile; __ beq(CCR0, LnotVolatile); __ release(); __ align(32, 12); --- 3054,3064 ---- // Get the obj and the final store addr. pop_and_check_object(Rclass_or_obj); // Kills R11_scratch1. // Get volatile flag. __ rldicl_(Rscratch, Rflags, 64-ConstantPoolCacheEntry::is_volatile_shift, 63); // Extract volatile bit. ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ cmpdi(CR_is_vol, Rscratch, 1); } { Label LnotVolatile; __ beq(CCR0, LnotVolatile); __ release(); __ align(32, 12);
*** 3101,3111 **** break; default: ShouldNotReachHere(); } ! if (!support_IRIW_for_not_multiple_copy_atomic_cpu) { Label LVolatile; __ beq(CR_is_vol, LVolatile); __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 12); --- 3101,3111 ---- break; default: ShouldNotReachHere(); } ! if (!SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { Label LVolatile; __ beq(CR_is_vol, LVolatile); __ dispatch_epilog(vtos, Bytecodes::length_for(bytecode())); __ align(32, 12);
*** 3146,3156 **** do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ twi_0(R17_tos); __ isync(); break; --- 3146,3156 ---- do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ twi_0(R17_tos); __ isync(); break;
*** 3159,3169 **** { __ lwax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lwax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; } --- 3159,3169 ---- { __ lwax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lwax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; }
*** 3171,3181 **** { __ ldx(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ ldx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; } --- 3171,3181 ---- { __ ldx(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ ldx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; }
*** 3184,3194 **** __ lbzx(R17_tos, Rclass_or_obj, Roffset); __ extsb(R17_tos, R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lbzx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ extsb(R17_tos, R17_tos); __ isync(); break; --- 3184,3194 ---- __ lbzx(R17_tos, Rclass_or_obj, Roffset); __ extsb(R17_tos, R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lbzx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ extsb(R17_tos, R17_tos); __ isync(); break;
*** 3197,3207 **** { __ lhzx(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lhzx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; } --- 3197,3207 ---- { __ lhzx(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lhzx(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; }
*** 3209,3219 **** { __ lhax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lhax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; } --- 3209,3219 ---- { __ lhax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lhax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; }
*** 3222,3232 **** __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); Label Ldummy; ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync(); --- 3222,3232 ---- __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); Label Ldummy; ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync();
*** 3237,3247 **** __ lfdx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); Label Ldummy; ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lfdx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync(); --- 3237,3247 ---- __ lfdx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode())); __ bind(LisVolatile); Label Ldummy; ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lfdx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync();
*** 3286,3296 **** do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ twi_0(R17_tos); __ isync(); break; --- 3286,3296 ---- do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } do_oop_load(_masm, Rclass_or_obj, Roffset, R17_tos, Rscratch, /* nv temp */ Rflags, IN_HEAP); __ verify_oop(R17_tos); __ twi_0(R17_tos); __ isync(); break;
*** 3299,3309 **** { __ lwax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lwax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; } --- 3299,3309 ---- { __ lwax(R17_tos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lwax(R17_tos, Rclass_or_obj, Roffset); __ twi_0(R17_tos); __ isync(); break; }
*** 3312,3322 **** __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); Label Ldummy; ! if (support_IRIW_for_not_multiple_copy_atomic_cpu) { __ fence(); } __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync(); --- 3312,3322 ---- __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ dispatch_epilog(state, Bytecodes::length_for(bytecode()) - 1); // Undo bcp increment. __ bind(LisVolatile); Label Ldummy; ! if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) { __ fence(); } __ lfsx(F15_ftos, Rclass_or_obj, Roffset); __ fcmpu(CCR0, F15_ftos, F15_ftos); // Acquire by cmp-br-isync. __ bne_predict_not_taken(CCR0, Ldummy); __ bind(Ldummy); __ isync();
< prev index next >