< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page




6604 #if INCLUDE_ALL_GCS && defined(_LP64)
6605 
6606 void MacroAssembler::load_barrier(Register ref, Address ref_addr, bool expand_call, LoadBarrierOn on) {
6607   Label done;
6608   const Register resolved_ref_addr = rsi;
6609   assert_different_registers(ref, resolved_ref_addr);
6610 
6611   BLOCK_COMMENT("load_barrier {");
6612 
6613   // Save temp register
6614   push(resolved_ref_addr);
6615 
6616   // Resolve reference address now, ref_addr might use the same register as ref,
6617   // which means it gets killed when we write to ref.
6618   lea(resolved_ref_addr, ref_addr);
6619 
6620   // Load reference
6621   movptr(ref, Address(resolved_ref_addr, 0));
6622 
6623   // Check if mask is not bad, which includes an implicit null check.
6624   testptr(ref, ExternalAddress((address)&ZAddressBadMask));
6625   jcc(Assembler::zero, done);
6626 
6627   // Save live registers
6628   push(rax);
6629   push(rcx);
6630   push(rdx);
6631   push(rdi);
6632   push(r8);
6633   push(r9);
6634   push(r10);
6635   push(r11);
6636 
6637   // We may end up here from generate_native_wrapper, then the method may have
6638   // floats as arguments, and we must spill them before calling the VM runtime
6639   // leaf. From the interpreter all floats are passed on the stack.
6640   assert(Argument::n_float_register_parameters_j == 8, "Found %d float regs", Argument::n_float_register_parameters_j);
6641   int f_spill_size = Argument::n_float_register_parameters_j * wordSize * 2;
6642   subptr(rsp, f_spill_size);
6643   movdqu(Address(rsp, 14 * wordSize), xmm7);
6644   movdqu(Address(rsp, 12 * wordSize), xmm6);


6734 #endif
6735     movptr(dst, src);
6736 }
6737 
6738 // Doesn't do verfication, generates fixed size code
6739 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src) {
6740 #ifdef _LP64
6741   if (UseCompressedOops) {
6742     movl(dst, src);
6743     decode_heap_oop_not_null(dst);
6744   } else
6745 #endif
6746     movptr(dst, src);
6747 }
6748 
6749 void MacroAssembler::store_heap_oop(Address dst, Register src) {
6750 #ifdef ASSERT
6751   if (VerifyOops && UseLoadBarrier) {
6752     // Check if mask is good
6753     Label done;
6754     testptr(src, as_Address(ExternalAddress((address)&ZAddressBadMask)));
6755     jcc(Assembler::zero, done);
6756     STOP("Writing broken oop");
6757     should_not_reach_here();
6758     bind(done);
6759   }
6760 #endif
6761 
6762 #ifdef _LP64
6763   if (UseCompressedOops) {
6764     assert(!dst.uses(src), "not enough registers");
6765     encode_heap_oop(src);
6766     movl(dst, src);
6767   } else
6768 #endif
6769     movptr(dst, src);
6770 }
6771 
6772 void MacroAssembler::cmp_heap_oop(Register src1, Address src2, Register tmp) {
6773   assert_different_registers(src1, tmp);
6774 #ifdef _LP64




6604 #if INCLUDE_ALL_GCS && defined(_LP64)
6605 
6606 void MacroAssembler::load_barrier(Register ref, Address ref_addr, bool expand_call, LoadBarrierOn on) {
6607   Label done;
6608   const Register resolved_ref_addr = rsi;
6609   assert_different_registers(ref, resolved_ref_addr);
6610 
6611   BLOCK_COMMENT("load_barrier {");
6612 
6613   // Save temp register
6614   push(resolved_ref_addr);
6615 
6616   // Resolve reference address now, ref_addr might use the same register as ref,
6617   // which means it gets killed when we write to ref.
6618   lea(resolved_ref_addr, ref_addr);
6619 
6620   // Load reference
6621   movptr(ref, Address(resolved_ref_addr, 0));
6622 
6623   // Check if mask is not bad, which includes an implicit null check.
6624   testptr(ref, Address(r15_thread, JavaThread::zaddress_bad_mask_offset()));
6625   jcc(Assembler::zero, done);
6626 
6627   // Save live registers
6628   push(rax);
6629   push(rcx);
6630   push(rdx);
6631   push(rdi);
6632   push(r8);
6633   push(r9);
6634   push(r10);
6635   push(r11);
6636 
6637   // We may end up here from generate_native_wrapper, then the method may have
6638   // floats as arguments, and we must spill them before calling the VM runtime
6639   // leaf. From the interpreter all floats are passed on the stack.
6640   assert(Argument::n_float_register_parameters_j == 8, "Found %d float regs", Argument::n_float_register_parameters_j);
6641   int f_spill_size = Argument::n_float_register_parameters_j * wordSize * 2;
6642   subptr(rsp, f_spill_size);
6643   movdqu(Address(rsp, 14 * wordSize), xmm7);
6644   movdqu(Address(rsp, 12 * wordSize), xmm6);


6734 #endif
6735     movptr(dst, src);
6736 }
6737 
6738 // Doesn't do verfication, generates fixed size code
6739 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src) {
6740 #ifdef _LP64
6741   if (UseCompressedOops) {
6742     movl(dst, src);
6743     decode_heap_oop_not_null(dst);
6744   } else
6745 #endif
6746     movptr(dst, src);
6747 }
6748 
6749 void MacroAssembler::store_heap_oop(Address dst, Register src) {
6750 #ifdef ASSERT
6751   if (VerifyOops && UseLoadBarrier) {
6752     // Check if mask is good
6753     Label done;
6754     testptr(src, Address(r15_thread, JavaThread::zaddress_bad_mask_offset()));
6755     jcc(Assembler::zero, done);
6756     STOP("Writing broken oop");
6757     should_not_reach_here();
6758     bind(done);
6759   }
6760 #endif
6761 
6762 #ifdef _LP64
6763   if (UseCompressedOops) {
6764     assert(!dst.uses(src), "not enough registers");
6765     encode_heap_oop(src);
6766     movl(dst, src);
6767   } else
6768 #endif
6769     movptr(dst, src);
6770 }
6771 
6772 void MacroAssembler::cmp_heap_oop(Register src1, Address src2, Register tmp) {
6773   assert_different_registers(src1, tmp);
6774 #ifdef _LP64


< prev index next >