< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




3435         // Not flattenable case, covers not flattenable values and objects
3436         pop_and_check_object(obj);
3437         // Store into the field
3438         do_oop_store(_masm, field, rax);
3439         __ bind(rewriteNotFlattenable);
3440         if (rc == may_rewrite) {
3441           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3442         }
3443         __ jmp(Done);
3444         // Implementation of the flattenable semantic
3445         __ bind(isFlattenable);
3446         __ null_check(rax);
3447         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3448         // Not flattened case
3449         pop_and_check_object(obj);
3450         // Store into the field
3451         do_oop_store(_masm, field, rax);
3452         __ jmp(rewriteFlattenable);
3453         __ bind(isFlattened);
3454         pop_and_check_object(obj);
3455         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3456                 rax, off, obj);



3457         __ bind(rewriteFlattenable);
3458         if (rc == may_rewrite) {
3459           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3460         }
3461         __ jmp(Done);
3462       }
3463     }
3464   }
3465 
3466   __ bind(notObj);
3467   __ cmpl(flags, itos);
3468   __ jcc(Assembler::notEqual, notInt);
3469 
3470   // itos
3471   {
3472     __ pop(itos);
3473     if (!is_static) pop_and_check_object(obj);
3474     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3475     if (!is_static && rc == may_rewrite) {
3476       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);


3649   jvmti_post_fast_field_mod();
3650 
3651   // access constant pool cache
3652   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3653 
3654   // test for volatile with rdx but rdx is tos register for lputfield.
3655   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3656                        in_bytes(base +
3657                                 ConstantPoolCacheEntry::flags_offset())));
3658 
3659   // replace index with field offset from cache entry
3660   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3661                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3662 
3663   // [jk] not needed currently
3664   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3665   //                                              Assembler::StoreStore));
3666 
3667   Label notVolatile, Done;
3668   if (bytecode() == Bytecodes::_fast_qputfield) {
3669     __ movl(rscratch2, rdx);
3670   }
3671 
3672   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3673   __ andl(rdx, 0x1);
3674 
3675   // Get object from stack
3676   pop_and_check_object(rcx);
3677 
3678   // field address
3679   const Address field(rcx, rbx, Address::times_1);
3680 
3681   // Check for volatile store
3682   __ testl(rdx, rdx);
3683   __ jcc(Assembler::zero, notVolatile);
3684 
3685   fast_storefield_helper(field, rax);



3686   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3687                                                Assembler::StoreStore));
3688   __ jmp(Done);
3689   __ bind(notVolatile);
3690 
3691   fast_storefield_helper(field, rax);



3692 
3693   __ bind(Done);
3694 }
3695 
3696 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3697 
3698   // access field
3699   switch (bytecode()) {
3700   case Bytecodes::_fast_qputfield:
3701     {
3702       Label isFlattened, done;
3703       __ null_check(rax);
3704       __ test_field_is_flattened(rscratch2, rscratch1, isFlattened);
3705       // No Flattened case
3706       do_oop_store(_masm, field, rax);
3707       __ jmp(done);
3708       __ bind(isFlattened);
3709       call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3710           rax, rbx, rcx);



3711       __ bind(done);
3712     }
3713     break;
3714   case Bytecodes::_fast_aputfield:
3715     {
3716       do_oop_store(_masm, field, rax);
3717     }
3718     break;
3719   case Bytecodes::_fast_lputfield:
3720 #ifdef _LP64
3721     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3722 #else
3723   __ stop("should not be rewritten");
3724 #endif
3725     break;
3726   case Bytecodes::_fast_iputfield:
3727     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3728     break;
3729   case Bytecodes::_fast_zputfield:
3730     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);




3435         // Not flattenable case, covers not flattenable values and objects
3436         pop_and_check_object(obj);
3437         // Store into the field
3438         do_oop_store(_masm, field, rax);
3439         __ bind(rewriteNotFlattenable);
3440         if (rc == may_rewrite) {
3441           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3442         }
3443         __ jmp(Done);
3444         // Implementation of the flattenable semantic
3445         __ bind(isFlattenable);
3446         __ null_check(rax);
3447         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3448         // Not flattened case
3449         pop_and_check_object(obj);
3450         // Store into the field
3451         do_oop_store(_masm, field, rax);
3452         __ jmp(rewriteFlattenable);
3453         __ bind(isFlattened);
3454         pop_and_check_object(obj);
3455         assert_different_registers(rax, rdx, obj, off);
3456         __ load_klass(rdx, rax);
3457         __ data_for_oop(rax, rax, rdx);
3458         __ addptr(obj, off);
3459         __ access_value_copy(IN_HEAP, rax, obj, rdx);
3460         __ bind(rewriteFlattenable);
3461         if (rc == may_rewrite) {
3462           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3463         }
3464         __ jmp(Done);
3465       }
3466     }
3467   }
3468 
3469   __ bind(notObj);
3470   __ cmpl(flags, itos);
3471   __ jcc(Assembler::notEqual, notInt);
3472 
3473   // itos
3474   {
3475     __ pop(itos);
3476     if (!is_static) pop_and_check_object(obj);
3477     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3478     if (!is_static && rc == may_rewrite) {
3479       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);


3652   jvmti_post_fast_field_mod();
3653 
3654   // access constant pool cache
3655   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3656 
3657   // test for volatile with rdx but rdx is tos register for lputfield.
3658   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3659                        in_bytes(base +
3660                                 ConstantPoolCacheEntry::flags_offset())));
3661 
3662   // replace index with field offset from cache entry
3663   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3664                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3665 
3666   // [jk] not needed currently
3667   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3668   //                                              Assembler::StoreStore));
3669 
3670   Label notVolatile, Done;
3671   if (bytecode() == Bytecodes::_fast_qputfield) {
3672     __ movl(rscratch2, rdx);  // saving flags for isFlattened test
3673   }
3674 
3675   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3676   __ andl(rdx, 0x1);
3677 
3678   // Get object from stack
3679   pop_and_check_object(rcx);
3680 
3681   // field address
3682   const Address field(rcx, rbx, Address::times_1);
3683 
3684   // Check for volatile store
3685   __ testl(rdx, rdx);
3686   __ jcc(Assembler::zero, notVolatile);
3687 
3688   if (bytecode() == Bytecodes::_fast_qputfield) {
3689     __ movl(rdx, rscratch2);  // restoring flags for isFlattened test
3690   }
3691   fast_storefield_helper(field, rax, rdx);
3692   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3693                                                Assembler::StoreStore));
3694   __ jmp(Done);
3695   __ bind(notVolatile);
3696 
3697   if (bytecode() == Bytecodes::_fast_qputfield) {
3698     __ movl(rdx, rscratch2);  // restoring flags for isFlattened test
3699   }
3700   fast_storefield_helper(field, rax, rdx);
3701 
3702   __ bind(Done);
3703 }
3704 
3705 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3706 
3707   // access field
3708   switch (bytecode()) {
3709   case Bytecodes::_fast_qputfield:
3710     {
3711       Label isFlattened, done;
3712       __ null_check(rax);
3713       __ test_field_is_flattened(flags, rscratch1, isFlattened);
3714       // No Flattened case
3715       do_oop_store(_masm, field, rax);
3716       __ jmp(done);
3717       __ bind(isFlattened);
3718       // Flattened case
3719       __ load_klass(rdx, rax);
3720       __ data_for_oop(rax, rax, rdx);
3721       __ lea(rcx, field);
3722       __ access_value_copy(IN_HEAP, rax, rcx, rdx);
3723       __ bind(done);
3724     }
3725     break;
3726   case Bytecodes::_fast_aputfield:
3727     {
3728       do_oop_store(_masm, field, rax);
3729     }
3730     break;
3731   case Bytecodes::_fast_lputfield:
3732 #ifdef _LP64
3733     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3734 #else
3735   __ stop("should not be rewritten");
3736 #endif
3737     break;
3738   case Bytecodes::_fast_iputfield:
3739     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3740     break;
3741   case Bytecodes::_fast_zputfield:
3742     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);


< prev index next >