1639 void BoxLockNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
1640 MacroAssembler _masm(&cbuf);
1641 int offset = ra_->reg2offset(in_RegMask(0).find_first_elem()) + STACK_BIAS;
1642 int reg = ra_->get_encode(this);
1643
1644 if (Assembler::is_simm13(offset)) {
1645 __ add(SP, offset, reg_to_register_object(reg));
1646 } else {
1647 __ set(offset, O7);
1648 __ add(SP, O7, reg_to_register_object(reg));
1649 }
1650 }
1651
1652 uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
1653 // BoxLockNode is not a MachNode, so we can't just call MachNode::size(ra_)
1654 assert(ra_ == ra_->C->regalloc(), "sanity");
1655 return ra_->C->scratch_emit_size(this);
1656 }
1657
1658 //=============================================================================
1659
1660 // emit call stub, compiled java to interpretor
1661 void emit_java_to_interp(CodeBuffer &cbuf ) {
1662
1663 // Stub is fixed up when the corresponding call is converted from calling
1664 // compiled code to calling interpreted code.
1665 // set (empty), G5
1666 // jmp -1
1667
1668 address mark = cbuf.insts_mark(); // get mark within main instrs section
1669
1670 MacroAssembler _masm(&cbuf);
1671
1672 address base =
1673 __ start_a_stub(Compile::MAX_stubs_size);
1674 if (base == NULL) return; // CodeBuffer::expand failed
1675
1676 // static stub relocation stores the instruction address of the call
1677 __ relocate(static_stub_Relocation::spec(mark));
1678
1679 __ set_metadata(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode()));
1680
1681 __ set_inst_mark();
1682 AddressLiteral addrlit(-1);
1683 __ JUMP(addrlit, G3, 0);
1684
1685 __ delayed()->nop();
1686
1687 // Update current stubs pointer and restore code_end.
1688 __ end_a_stub();
1689 }
1690
1691 // size of call stub, compiled java to interpretor
1692 uint size_java_to_interp() {
1693 // This doesn't need to be accurate but it must be larger or equal to
1694 // the real size of the stub.
1695 return (NativeMovConstReg::instruction_size + // sethi/setlo;
1696 NativeJump::instruction_size + // sethi; jmp; nop
1697 (TraceJumps ? 20 * BytesPerInstWord : 0) );
1698 }
1699 // relocation entries for call stub, compiled java to interpretor
1700 uint reloc_java_to_interp() {
1701 return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call
1702 }
1703
1704
1705 //=============================================================================
1706 #ifndef PRODUCT
1707 void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream *st ) const {
1708 st->print_cr("\nUEP:");
1709 #ifdef _LP64
1710 if (UseCompressedKlassPointers) {
1711 assert(Universe::heap() != NULL, "java heap should be initialized");
1712 st->print_cr("\tLDUW [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check - compressed klass");
1713 st->print_cr("\tSLL R_G5,3,R_G5");
1714 if (Universe::narrow_klass_base() != NULL)
1715 st->print_cr("\tADD R_G5,R_G6_heap_base,R_G5");
1716 } else {
1717 st->print_cr("\tLDX [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check");
1718 }
1719 st->print_cr("\tCMP R_G5,R_G3" );
1720 st->print ("\tTne xcc,R_G0+ST_RESERVED_FOR_USER_0+2");
1721 #else // _LP64
1722 st->print_cr("\tLDUW [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check");
1723 st->print_cr("\tCMP R_G5,R_G3" );
1724 st->print ("\tTne icc,R_G0+ST_RESERVED_FOR_USER_0+2");
1725 #endif // _LP64
2559 enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime
2560 // CALL directly to the runtime
2561 // The user of this is responsible for ensuring that R_L7 is empty (killed).
2562 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2563 /*preserve_g2=*/true);
2564 %}
2565
2566 enc_class preserve_SP %{
2567 MacroAssembler _masm(&cbuf);
2568 __ mov(SP, L7_mh_SP_save);
2569 %}
2570
2571 enc_class restore_SP %{
2572 MacroAssembler _masm(&cbuf);
2573 __ mov(L7_mh_SP_save, SP);
2574 %}
2575
2576 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
2577 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
2578 // who we intended to call.
2579 if ( !_method ) {
2580 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2581 } else if (_optimized_virtual) {
2582 emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2583 } else {
2584 emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
2585 }
2586 if( _method ) { // Emit stub for static call
2587 emit_java_to_interp(cbuf);
2588 }
2589 %}
2590
2591 enc_class Java_Dynamic_Call (method meth) %{ // JAVA DYNAMIC CALL
2592 MacroAssembler _masm(&cbuf);
2593 __ set_inst_mark();
2594 int vtable_index = this->_vtable_index;
2595 // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2596 if (vtable_index < 0) {
2597 // must be invalid_vtable_index, not nonvirtual_vtable_index
2598 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2599 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2600 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2601 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2602 __ ic_call((address)$meth$$method);
2603 } else {
2604 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2605 // Just go thru the vtable
2606 // get receiver klass (receiver already checked for non-null)
2607 // If we end up going thru a c2i adapter interpreter expects method in G5
|
1639 void BoxLockNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
1640 MacroAssembler _masm(&cbuf);
1641 int offset = ra_->reg2offset(in_RegMask(0).find_first_elem()) + STACK_BIAS;
1642 int reg = ra_->get_encode(this);
1643
1644 if (Assembler::is_simm13(offset)) {
1645 __ add(SP, offset, reg_to_register_object(reg));
1646 } else {
1647 __ set(offset, O7);
1648 __ add(SP, O7, reg_to_register_object(reg));
1649 }
1650 }
1651
1652 uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
1653 // BoxLockNode is not a MachNode, so we can't just call MachNode::size(ra_)
1654 assert(ra_ == ra_->C->regalloc(), "sanity");
1655 return ra_->C->scratch_emit_size(this);
1656 }
1657
1658 //=============================================================================
1659 #ifndef PRODUCT
1660 void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream *st ) const {
1661 st->print_cr("\nUEP:");
1662 #ifdef _LP64
1663 if (UseCompressedKlassPointers) {
1664 assert(Universe::heap() != NULL, "java heap should be initialized");
1665 st->print_cr("\tLDUW [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check - compressed klass");
1666 st->print_cr("\tSLL R_G5,3,R_G5");
1667 if (Universe::narrow_klass_base() != NULL)
1668 st->print_cr("\tADD R_G5,R_G6_heap_base,R_G5");
1669 } else {
1670 st->print_cr("\tLDX [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check");
1671 }
1672 st->print_cr("\tCMP R_G5,R_G3" );
1673 st->print ("\tTne xcc,R_G0+ST_RESERVED_FOR_USER_0+2");
1674 #else // _LP64
1675 st->print_cr("\tLDUW [R_O0 + oopDesc::klass_offset_in_bytes],R_G5\t! Inline cache check");
1676 st->print_cr("\tCMP R_G5,R_G3" );
1677 st->print ("\tTne icc,R_G0+ST_RESERVED_FOR_USER_0+2");
1678 #endif // _LP64
2512 enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime
2513 // CALL directly to the runtime
2514 // The user of this is responsible for ensuring that R_L7 is empty (killed).
2515 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type,
2516 /*preserve_g2=*/true);
2517 %}
2518
2519 enc_class preserve_SP %{
2520 MacroAssembler _masm(&cbuf);
2521 __ mov(SP, L7_mh_SP_save);
2522 %}
2523
2524 enc_class restore_SP %{
2525 MacroAssembler _masm(&cbuf);
2526 __ mov(L7_mh_SP_save, SP);
2527 %}
2528
2529 enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
2530 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
2531 // who we intended to call.
2532 if (!_method) {
2533 emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
2534 } else if (_optimized_virtual) {
2535 emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
2536 } else {
2537 emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
2538 }
2539 if (_method) { // Emit stub for static call.
2540 CompiledStaticCall::emit_to_interp_stub(cbuf);
2541 }
2542 %}
2543
2544 enc_class Java_Dynamic_Call (method meth) %{ // JAVA DYNAMIC CALL
2545 MacroAssembler _masm(&cbuf);
2546 __ set_inst_mark();
2547 int vtable_index = this->_vtable_index;
2548 // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2549 if (vtable_index < 0) {
2550 // must be invalid_vtable_index, not nonvirtual_vtable_index
2551 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2552 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2553 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2554 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2555 __ ic_call((address)$meth$$method);
2556 } else {
2557 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2558 // Just go thru the vtable
2559 // get receiver klass (receiver already checked for non-null)
2560 // If we end up going thru a c2i adapter interpreter expects method in G5
|