< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




2697 // (2) A read cannot let unrelated NON-volatile memory refs that
2698 //     happen after the read float up to before the read.  It's OK for
2699 //     non-volatile memory refs that happen before the volatile read to
2700 //     float down below it.
2701 // (3) Similar a volatile write cannot let unrelated NON-volatile
2702 //     memory refs that happen BEFORE the write float down to after the
2703 //     write.  It's OK for non-volatile memory refs that happen after the
2704 //     volatile write to float up before it.
2705 //
2706 // We only put in barriers around volatile refs (they are expensive),
2707 // not _between_ memory refs (that would require us to track the
2708 // flavor of the previous memory refs).  Requirements (2) and (3)
2709 // require some barriers before volatile stores and after volatile
2710 // loads.  These nearly cover requirement (1) but miss the
2711 // volatile-store-volatile-load case.  This final case is placed after
2712 // volatile-stores although it could just as well go before
2713 // volatile-loads.
2714 
2715 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2716   // Helper function to insert a is-volatile test and memory barrier
2717   if(!os::is_MP()) return;    // Not needed on single CPU
2718   __ membar(order_constraint);
2719 }
2720 
2721 void TemplateTable::resolve_cache_and_index(int byte_no,
2722                                             Register Rcache,
2723                                             Register index,
2724                                             size_t index_size) {
2725   const Register temp = rbx;
2726   assert_different_registers(Rcache, index, temp);
2727 
2728   Label resolved;
2729 
2730   Bytecodes::Code code = bytecode();
2731   switch (code) {
2732   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2733   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2734   default: break;
2735   }
2736 
2737   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");


3476     __ testl(rcx, rcx);
3477     __ jcc(Assembler::zero, L1);
3478     // access constant pool cache entry
3479     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1));
3480     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rcx, rdx, 1));
3481     __ verify_oop(rax);
3482     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3483     LP64_ONLY(__ mov(c_rarg1, rax));
3484     // c_rarg1: object pointer copied above
3485     // c_rarg2: cache entry pointer
3486     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3487     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3488     __ pop_ptr(rax); // restore object pointer
3489     __ bind(L1);
3490   }
3491 
3492   // access constant pool cache
3493   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3494   // replace index with field offset from cache entry
3495   // [jk] not needed currently
3496   // if (os::is_MP()) {
3497   //   __ movl(rdx, Address(rcx, rbx, Address::times_8,
3498   //                        in_bytes(ConstantPoolCache::base_offset() +
3499   //                                 ConstantPoolCacheEntry::flags_offset())));
3500   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3501   //   __ andl(rdx, 0x1);
3502   // }
3503   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3504                          in_bytes(ConstantPoolCache::base_offset() +
3505                                   ConstantPoolCacheEntry::f2_offset())));
3506 
3507   // rax: object
3508   __ verify_oop(rax);
3509   __ null_check(rax);
3510   Address field(rax, rbx, Address::times_1);
3511 
3512   // access field
3513   switch (bytecode()) {
3514   case Bytecodes::_fast_agetfield:
3515     do_oop_load(_masm, field, rax);
3516     __ verify_oop(rax);
3517     break;
3518   case Bytecodes::_fast_lgetfield:
3519 #ifdef _LP64
3520     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3521 #else
3522   __ stop("should not be rewritten");


3527     break;
3528   case Bytecodes::_fast_bgetfield:
3529     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3530     break;
3531   case Bytecodes::_fast_sgetfield:
3532     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3533     break;
3534   case Bytecodes::_fast_cgetfield:
3535     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3536     break;
3537   case Bytecodes::_fast_fgetfield:
3538     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3539     break;
3540   case Bytecodes::_fast_dgetfield:
3541     __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
3542     break;
3543   default:
3544     ShouldNotReachHere();
3545   }
3546   // [jk] not needed currently
3547   // if (os::is_MP()) {
3548   //   Label notVolatile;
3549   //   __ testl(rdx, rdx);
3550   //   __ jcc(Assembler::zero, notVolatile);
3551   //   __ membar(Assembler::LoadLoad);
3552   //   __ bind(notVolatile);
3553   //};
3554 }
3555 
3556 void TemplateTable::fast_xaccess(TosState state) {
3557   transition(vtos, state);
3558 
3559   // get receiver
3560   __ movptr(rax, aaddress(0));
3561   // access constant pool cache
3562   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3563   __ movptr(rbx,
3564             Address(rcx, rdx, Address::times_ptr,
3565                     in_bytes(ConstantPoolCache::base_offset() +
3566                              ConstantPoolCacheEntry::f2_offset())));
3567   // make sure exception is reported in correct bcp range (getfield is
3568   // next instruction)
3569   __ increment(rbcp);
3570   __ null_check(rax);
3571   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3572   switch (state) {
3573   case itos:
3574     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3575     break;
3576   case atos:
3577     do_oop_load(_masm, field, rax);
3578     __ verify_oop(rax);
3579     break;
3580   case ftos:
3581     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3582     break;
3583   default:
3584     ShouldNotReachHere();
3585   }
3586 
3587   // [jk] not needed currently
3588   // if (os::is_MP()) {
3589   //   Label notVolatile;
3590   //   __ movl(rdx, Address(rcx, rdx, Address::times_8,
3591   //                        in_bytes(ConstantPoolCache::base_offset() +
3592   //                                 ConstantPoolCacheEntry::flags_offset())));
3593   //   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3594   //   __ testl(rdx, 0x1);
3595   //   __ jcc(Assembler::zero, notVolatile);
3596   //   __ membar(Assembler::LoadLoad);
3597   //   __ bind(notVolatile);
3598   // }
3599 
3600   __ decrement(rbcp);
3601 }
3602 
3603 //-----------------------------------------------------------------------------
3604 // Calls
3605 
3606 void TemplateTable::count_calls(Register method, Register temp) {
3607   // implemented elsewhere
3608   ShouldNotReachHere();
3609 }
3610 
3611 void TemplateTable::prepare_invoke(int byte_no,
3612                                    Register method,  // linked method (or i-klass)
3613                                    Register index,   // itable index, MethodType, etc.
3614                                    Register recv,    // if caller wants to see it
3615                                    Register flags    // if caller wants to test it
3616                                    ) {
3617   // determine flags
3618   const Bytecodes::Code code = bytecode();




2697 // (2) A read cannot let unrelated NON-volatile memory refs that
2698 //     happen after the read float up to before the read.  It's OK for
2699 //     non-volatile memory refs that happen before the volatile read to
2700 //     float down below it.
2701 // (3) Similar a volatile write cannot let unrelated NON-volatile
2702 //     memory refs that happen BEFORE the write float down to after the
2703 //     write.  It's OK for non-volatile memory refs that happen after the
2704 //     volatile write to float up before it.
2705 //
2706 // We only put in barriers around volatile refs (they are expensive),
2707 // not _between_ memory refs (that would require us to track the
2708 // flavor of the previous memory refs).  Requirements (2) and (3)
2709 // require some barriers before volatile stores and after volatile
2710 // loads.  These nearly cover requirement (1) but miss the
2711 // volatile-store-volatile-load case.  This final case is placed after
2712 // volatile-stores although it could just as well go before
2713 // volatile-loads.
2714 
2715 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2716   // Helper function to insert a is-volatile test and memory barrier

2717   __ membar(order_constraint);
2718 }
2719 
2720 void TemplateTable::resolve_cache_and_index(int byte_no,
2721                                             Register Rcache,
2722                                             Register index,
2723                                             size_t index_size) {
2724   const Register temp = rbx;
2725   assert_different_registers(Rcache, index, temp);
2726 
2727   Label resolved;
2728 
2729   Bytecodes::Code code = bytecode();
2730   switch (code) {
2731   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2732   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2733   default: break;
2734   }
2735 
2736   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");


3475     __ testl(rcx, rcx);
3476     __ jcc(Assembler::zero, L1);
3477     // access constant pool cache entry
3478     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1));
3479     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rcx, rdx, 1));
3480     __ verify_oop(rax);
3481     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3482     LP64_ONLY(__ mov(c_rarg1, rax));
3483     // c_rarg1: object pointer copied above
3484     // c_rarg2: cache entry pointer
3485     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3486     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3487     __ pop_ptr(rax); // restore object pointer
3488     __ bind(L1);
3489   }
3490 
3491   // access constant pool cache
3492   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3493   // replace index with field offset from cache entry
3494   // [jk] not needed currently

3495   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3496   //                      in_bytes(ConstantPoolCache::base_offset() +
3497   //                               ConstantPoolCacheEntry::flags_offset())));
3498   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3499   // __ andl(rdx, 0x1);
3500   //
3501   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3502                          in_bytes(ConstantPoolCache::base_offset() +
3503                                   ConstantPoolCacheEntry::f2_offset())));
3504 
3505   // rax: object
3506   __ verify_oop(rax);
3507   __ null_check(rax);
3508   Address field(rax, rbx, Address::times_1);
3509 
3510   // access field
3511   switch (bytecode()) {
3512   case Bytecodes::_fast_agetfield:
3513     do_oop_load(_masm, field, rax);
3514     __ verify_oop(rax);
3515     break;
3516   case Bytecodes::_fast_lgetfield:
3517 #ifdef _LP64
3518     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3519 #else
3520   __ stop("should not be rewritten");


3525     break;
3526   case Bytecodes::_fast_bgetfield:
3527     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3528     break;
3529   case Bytecodes::_fast_sgetfield:
3530     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3531     break;
3532   case Bytecodes::_fast_cgetfield:
3533     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3534     break;
3535   case Bytecodes::_fast_fgetfield:
3536     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3537     break;
3538   case Bytecodes::_fast_dgetfield:
3539     __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
3540     break;
3541   default:
3542     ShouldNotReachHere();
3543   }
3544   // [jk] not needed currently

3545   //   Label notVolatile;
3546   //   __ testl(rdx, rdx);
3547   //   __ jcc(Assembler::zero, notVolatile);
3548   //   __ membar(Assembler::LoadLoad);
3549   //   __ bind(notVolatile);

3550 }
3551 
3552 void TemplateTable::fast_xaccess(TosState state) {
3553   transition(vtos, state);
3554 
3555   // get receiver
3556   __ movptr(rax, aaddress(0));
3557   // access constant pool cache
3558   __ get_cache_and_index_at_bcp(rcx, rdx, 2);
3559   __ movptr(rbx,
3560             Address(rcx, rdx, Address::times_ptr,
3561                     in_bytes(ConstantPoolCache::base_offset() +
3562                              ConstantPoolCacheEntry::f2_offset())));
3563   // make sure exception is reported in correct bcp range (getfield is
3564   // next instruction)
3565   __ increment(rbcp);
3566   __ null_check(rax);
3567   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
3568   switch (state) {
3569   case itos:
3570     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3571     break;
3572   case atos:
3573     do_oop_load(_masm, field, rax);
3574     __ verify_oop(rax);
3575     break;
3576   case ftos:
3577     __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3578     break;
3579   default:
3580     ShouldNotReachHere();
3581   }
3582 
3583   // [jk] not needed currently

3584   // Label notVolatile;
3585   // __ movl(rdx, Address(rcx, rdx, Address::times_8,
3586   //                      in_bytes(ConstantPoolCache::base_offset() +
3587   //                               ConstantPoolCacheEntry::flags_offset())));
3588   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3589   // __ testl(rdx, 0x1);
3590   // __ jcc(Assembler::zero, notVolatile);
3591   // __ membar(Assembler::LoadLoad);
3592   // __ bind(notVolatile);

3593 
3594   __ decrement(rbcp);
3595 }
3596 
3597 //-----------------------------------------------------------------------------
3598 // Calls
3599 
3600 void TemplateTable::count_calls(Register method, Register temp) {
3601   // implemented elsewhere
3602   ShouldNotReachHere();
3603 }
3604 
3605 void TemplateTable::prepare_invoke(int byte_no,
3606                                    Register method,  // linked method (or i-klass)
3607                                    Register index,   // itable index, MethodType, etc.
3608                                    Register recv,    // if caller wants to see it
3609                                    Register flags    // if caller wants to test it
3610                                    ) {
3611   // determine flags
3612   const Bytecodes::Code code = bytecode();


< prev index next >