< prev index next >

src/hotspot/cpu/x86/x86_64.ad

Print this page




 890     st->print("# stack alignment check");
 891 #endif
 892   }
 893   if (C->stub_function() != NULL && BarrierSet::barrier_set()->barrier_set_nmethod() != NULL) {
 894     st->print("\n\t");
 895     st->print("cmpl    [r15_thread + #disarmed_offset], #disarmed_value\t");
 896     st->print("\n\t");
 897     st->print("je      fast_entry\t");
 898     st->print("\n\t");
 899     st->print("call    #nmethod_entry_barrier_stub\t");
 900     st->print("\n\tfast_entry:");
 901   }
 902   st->cr();
 903 }
 904 #endif
 905 
 906 void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
 907   Compile* C = ra_->C;
 908   MacroAssembler _masm(&cbuf);
 909 
 910   int framesize = C->frame_size_in_bytes();
 911   int bangsize = C->bang_size_in_bytes();
 912 
 913   __ verified_entry(framesize, C->need_stack_bang(bangsize)?bangsize:0, false, C->stub_function() != NULL);
 914 
 915   C->set_frame_complete(cbuf.insts_size());
 916 
 917   if (C->has_mach_constant_base_node()) {
 918     // NOTE: We set the table base offset here because users might be
 919     // emitted before MachConstantBaseNode.
 920     Compile::ConstantTable& constant_table = C->constant_table();
 921     constant_table.set_table_base_offset(constant_table.calculate_table_base_offset());
 922   }
 923 }
 924 
 925 uint MachPrologNode::size(PhaseRegAlloc* ra_) const
 926 {
 927   return MachNode::size(ra_); // too many variables; just compute it
 928                               // the hard way
 929 }
 930 
 931 int MachPrologNode::reloc() const
 932 {
 933   return 0; // a large enough number


 967                    "# Safepoint: poll for GC");
 968     } else {
 969       st->print_cr("testl  rax, [rip + #offset_to_poll_page]\t"
 970                    "# Safepoint: poll for GC");
 971     }
 972   }
 973 }
 974 #endif
 975 
 976 void MachEpilogNode::emit(CodeBuffer& cbuf, PhaseRegAlloc* ra_) const
 977 {
 978   Compile* C = ra_->C;
 979   MacroAssembler _masm(&cbuf);
 980 
 981   if (generate_vzeroupper(C)) {
 982     // Clear upper bits of YMM registers when current compiled code uses
 983     // wide vectors to avoid AVX <-> SSE transition penalty during call.
 984     __ vzeroupper();
 985   }
 986 
 987   int framesize = C->frame_size_in_bytes();
 988   assert((framesize & (StackAlignmentInBytes-1)) == 0, "frame size not aligned");
 989   // Remove word for return adr already pushed
 990   // and RBP
 991   framesize -= 2*wordSize;
 992 
 993   // Note that VerifyStackAtCalls' Majik cookie does not change the frame size popped here
 994 
 995   if (framesize) {
 996     emit_opcode(cbuf, Assembler::REX_W);
 997     if (framesize < 0x80) {
 998       emit_opcode(cbuf, 0x83); // addq rsp, #framesize
 999       emit_rm(cbuf, 0x3, 0x00, RSP_enc);
1000       emit_d8(cbuf, framesize);
1001     } else {
1002       emit_opcode(cbuf, 0x81); // addq rsp, #framesize
1003       emit_rm(cbuf, 0x3, 0x00, RSP_enc);
1004       emit_d32(cbuf, framesize);
1005     }
1006   }
1007 
1008   // popq rbp
1009   emit_opcode(cbuf, 0x58 | RBP_enc);
1010 
1011   if (StackReservedPages > 0 && C->has_reserved_stack_access()) {
1012     __ reserved_stack_check();
1013   }
1014 
1015   if (do_polling() && C->is_method_compilation()) {
1016     MacroAssembler _masm(&cbuf);
1017     if (SafepointMechanism::uses_thread_local_poll()) {
1018       __ movq(rscratch1, Address(r15_thread, Thread::polling_page_offset()));
1019       __ relocate(relocInfo::poll_return_type);
1020       __ testl(rax, Address(rscratch1, 0));
1021     } else {
1022       AddressLiteral polling_page(os::get_polling_page(), relocInfo::poll_return_type);
1023       if (Assembler::is_polling_page_far()) {
1024         __ lea(rscratch1, polling_page);
1025         __ relocate(relocInfo::poll_return_type);
1026         __ testl(rax, Address(rscratch1, 0));
1027       } else {
1028         __ testl(rax, polling_page);
1029       }


1558   int reg = ra_->get_encode(this);
1559   if (offset >= 0x80) {
1560     emit_opcode(cbuf, reg < 8 ? Assembler::REX_W : Assembler::REX_WR);
1561     emit_opcode(cbuf, 0x8D); // LEA  reg,[SP+offset]
1562     emit_rm(cbuf, 0x2, reg & 7, 0x04);
1563     emit_rm(cbuf, 0x0, 0x04, RSP_enc);
1564     emit_d32(cbuf, offset);
1565   } else {
1566     emit_opcode(cbuf, reg < 8 ? Assembler::REX_W : Assembler::REX_WR);
1567     emit_opcode(cbuf, 0x8D); // LEA  reg,[SP+offset]
1568     emit_rm(cbuf, 0x1, reg & 7, 0x04);
1569     emit_rm(cbuf, 0x0, 0x04, RSP_enc);
1570     emit_d8(cbuf, offset);
1571   }
1572 }
1573 
1574 uint BoxLockNode::size(PhaseRegAlloc *ra_) const
1575 {
1576   int offset = ra_->reg2offset(in_RegMask(0).find_first_elem());
1577   return (offset < 0x80) ? 5 : 8; // REX






















1578 }
1579 
1580 //=============================================================================
1581 #ifndef PRODUCT
1582 void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
1583 {
1584   if (UseCompressedClassPointers) {
1585     st->print_cr("movl    rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
1586     st->print_cr("\tdecode_klass_not_null rscratch1, rscratch1");
1587     st->print_cr("\tcmpq    rax, rscratch1\t # Inline cache check");
1588   } else {
1589     st->print_cr("\tcmpq    rax, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t"
1590                  "# Inline cache check");
1591   }
1592   st->print_cr("\tjne     SharedRuntime::_ic_miss_stub");
1593   st->print_cr("\tnop\t# nops to align entry point");
1594 }
1595 #endif
1596 
1597 void MachUEPNode::emit(CodeBuffer& cbuf, PhaseRegAlloc* ra_) const




 890     st->print("# stack alignment check");
 891 #endif
 892   }
 893   if (C->stub_function() != NULL && BarrierSet::barrier_set()->barrier_set_nmethod() != NULL) {
 894     st->print("\n\t");
 895     st->print("cmpl    [r15_thread + #disarmed_offset], #disarmed_value\t");
 896     st->print("\n\t");
 897     st->print("je      fast_entry\t");
 898     st->print("\n\t");
 899     st->print("call    #nmethod_entry_barrier_stub\t");
 900     st->print("\n\tfast_entry:");
 901   }
 902   st->cr();
 903 }
 904 #endif
 905 
 906 void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
 907   Compile* C = ra_->C;
 908   MacroAssembler _masm(&cbuf);
 909 
 910   __ verified_entry(C);
 911   __ bind(*_verified_entry);


 912 
 913   C->set_frame_complete(cbuf.insts_size());
 914 
 915   if (C->has_mach_constant_base_node()) {
 916     // NOTE: We set the table base offset here because users might be
 917     // emitted before MachConstantBaseNode.
 918     Compile::ConstantTable& constant_table = C->constant_table();
 919     constant_table.set_table_base_offset(constant_table.calculate_table_base_offset());
 920   }
 921 }
 922 
 923 uint MachPrologNode::size(PhaseRegAlloc* ra_) const
 924 {
 925   return MachNode::size(ra_); // too many variables; just compute it
 926                               // the hard way
 927 }
 928 
 929 int MachPrologNode::reloc() const
 930 {
 931   return 0; // a large enough number


 965                    "# Safepoint: poll for GC");
 966     } else {
 967       st->print_cr("testl  rax, [rip + #offset_to_poll_page]\t"
 968                    "# Safepoint: poll for GC");
 969     }
 970   }
 971 }
 972 #endif
 973 
 974 void MachEpilogNode::emit(CodeBuffer& cbuf, PhaseRegAlloc* ra_) const
 975 {
 976   Compile* C = ra_->C;
 977   MacroAssembler _masm(&cbuf);
 978 
 979   if (generate_vzeroupper(C)) {
 980     // Clear upper bits of YMM registers when current compiled code uses
 981     // wide vectors to avoid AVX <-> SSE transition penalty during call.
 982     __ vzeroupper();
 983   }
 984 
 985   __ restore_stack(C);



















 986 


 987 
 988   if (StackReservedPages > 0 && C->has_reserved_stack_access()) {
 989     __ reserved_stack_check();
 990   }
 991 
 992   if (do_polling() && C->is_method_compilation()) {
 993     MacroAssembler _masm(&cbuf);
 994     if (SafepointMechanism::uses_thread_local_poll()) {
 995       __ movq(rscratch1, Address(r15_thread, Thread::polling_page_offset()));
 996       __ relocate(relocInfo::poll_return_type);
 997       __ testl(rax, Address(rscratch1, 0));
 998     } else {
 999       AddressLiteral polling_page(os::get_polling_page(), relocInfo::poll_return_type);
1000       if (Assembler::is_polling_page_far()) {
1001         __ lea(rscratch1, polling_page);
1002         __ relocate(relocInfo::poll_return_type);
1003         __ testl(rax, Address(rscratch1, 0));
1004       } else {
1005         __ testl(rax, polling_page);
1006       }


1535   int reg = ra_->get_encode(this);
1536   if (offset >= 0x80) {
1537     emit_opcode(cbuf, reg < 8 ? Assembler::REX_W : Assembler::REX_WR);
1538     emit_opcode(cbuf, 0x8D); // LEA  reg,[SP+offset]
1539     emit_rm(cbuf, 0x2, reg & 7, 0x04);
1540     emit_rm(cbuf, 0x0, 0x04, RSP_enc);
1541     emit_d32(cbuf, offset);
1542   } else {
1543     emit_opcode(cbuf, reg < 8 ? Assembler::REX_W : Assembler::REX_WR);
1544     emit_opcode(cbuf, 0x8D); // LEA  reg,[SP+offset]
1545     emit_rm(cbuf, 0x1, reg & 7, 0x04);
1546     emit_rm(cbuf, 0x0, 0x04, RSP_enc);
1547     emit_d8(cbuf, offset);
1548   }
1549 }
1550 
1551 uint BoxLockNode::size(PhaseRegAlloc *ra_) const
1552 {
1553   int offset = ra_->reg2offset(in_RegMask(0).find_first_elem());
1554   return (offset < 0x80) ? 5 : 8; // REX
1555 }
1556 
1557 //=============================================================================
1558 #ifndef PRODUCT
1559 void MachVVEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
1560 {
1561   st->print_cr("MachVVEPNode");
1562 }
1563 #endif
1564 
1565 void MachVVEPNode::emit(CodeBuffer& cbuf, PhaseRegAlloc* ra_) const
1566 {
1567   // Unpack all value type args passed as oop and then jump to
1568   // the verified entry point (skipping the unverified entry).
1569   MacroAssembler masm(&cbuf);
1570   masm.unpack_value_args(ra_->C);
1571   masm.jmp(*_verified_entry);
1572 }
1573 
1574 uint MachVVEPNode::size(PhaseRegAlloc* ra_) const
1575 {
1576   return MachNode::size(ra_); // too many variables; just compute it the hard way
1577 }
1578 
1579 //=============================================================================
1580 #ifndef PRODUCT
1581 void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
1582 {
1583   if (UseCompressedClassPointers) {
1584     st->print_cr("movl    rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
1585     st->print_cr("\tdecode_klass_not_null rscratch1, rscratch1");
1586     st->print_cr("\tcmpq    rax, rscratch1\t # Inline cache check");
1587   } else {
1588     st->print_cr("\tcmpq    rax, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t"
1589                  "# Inline cache check");
1590   }
1591   st->print_cr("\tjne     SharedRuntime::_ic_miss_stub");
1592   st->print_cr("\tnop\t# nops to align entry point");
1593 }
1594 #endif
1595 
1596 void MachUEPNode::emit(CodeBuffer& cbuf, PhaseRegAlloc* ra_) const


< prev index next >