3546 (_load_ic_node != NULL && _load_ic_node->_cbuf_insts_offset != -1),
3547 "must contain instruction offset");
3548 const int virtual_call_oop_addr_offset = _load_ic_hi_node != NULL
3549 ? _load_ic_hi_node->_cbuf_insts_offset
3550 : _load_ic_node->_cbuf_insts_offset;
3551 const address virtual_call_oop_addr = __ addr_at(virtual_call_oop_addr_offset);
3552 assert(MacroAssembler::is_load_const_from_method_toc_at(virtual_call_oop_addr),
3553 "should be load from TOC");
3554
3555 __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr));
3556 }
3557
3558 // At this point I do not have the address of the trampoline stub,
3559 // and the entry point might be too far away for bl. Pc() serves
3560 // as dummy and bl will be patched later.
3561 __ bl((address) __ pc());
3562 %}
3563
3564 // postalloc expand emitter for virtual calls.
3565 enc_class postalloc_expand_java_dynamic_call_sched(method meth, iRegLdst toc) %{
3566 // Toc is in return address field, though not accessible via postalloc_expand
3567 // functionaliy.
3568 Node *toc = in(TypeFunc::ReturnAdr);
3569
3570 // Create the nodes for loading the IC from the TOC.
3571 loadConLNodesTuple loadConLNodes_IC =
3572 loadConLNodesTuple_create(C, ra_, n_toc, new (C) immLOper((jlong)Universe::non_oop_word()),
3573 OptoReg::Name(R19_H_num), OptoReg::Name(R19_num));
3574
3575 // Create the call node.
3576 CallDynamicJavaDirectSchedNode *call = new (C) CallDynamicJavaDirectSchedNode();
3577 call->_method_handle_invoke = _method_handle_invoke;
3578 call->_vtable_index = _vtable_index;
3579 call->_method = _method;
3580 call->_bci = _bci;
3581 call->_optimized_virtual = _optimized_virtual;
3582 call->_tf = _tf;
3583 call->_entry_point = _entry_point;
3584 call->_cnt = _cnt;
3585 call->_argsize = _argsize;
3586 call->_oop_map = _oop_map;
3587 call->_jvms = _jvms;
3588 call->_jvmadj = _jvmadj;
3589 call->_in_rms = _in_rms;
3590 call->_nesting = _nesting;
3591
3592 // New call needs all inputs of old call.
3593 // Req...
3594 for (uint i = 0; i < req(); ++i) {
3595 if (i != TypeFunc::ReturnAdr) {
3596 call->add_req(in(i));
3597 } else {
3598 // The expanded node does not need toc any more.
3599 call->add_req(C->top());
3600 }
3601 }
3602 // ...as well as prec
3603 for (uint i = req(); i < len() ; ++i) {
3604 call->add_prec(in(i));
3605 }
3606
3607 // The cache must come before the call, but it's not a req edge.
3608 // GL: actually it should be a req edge to express that the
3609 // register must be live in the Call. But as R19 is declared to be
3610 // the inline_cache_reg that's fine.
3611 call->add_prec(loadConLNodes_IC._last);
3612 // Remember nodes loading the inline cache into r19.
3613 call->_load_ic_hi_node = loadConLNodes_IC._large_hi;
3614 call->_load_ic_node = loadConLNodes_IC._small;
3615
3616 // Operands for new nodes.
3617 call->_opnds[0] = _opnds[0];
3618 call->_opnds[1] = _opnds[1];
3619
3620 // Only the inline cache is associated with a register.
3621 assert(Matcher::inline_cache_reg() == OptoReg::Name(R19_num), "ic reg should be R19");
3622
3623 // Push new nodes.
3624 if (loadConLNodes_IC._large_hi) nodes->push(loadConLNodes_IC._large_hi);
3625 if (loadConLNodes_IC._last) nodes->push(loadConLNodes_IC._last);
3626 nodes->push(call);
3627 %}
3628
3629 // Compound version of call dynamic
3630 enc_class enc_java_dynamic_call(method meth, iRegLdst toc) %{
3631 // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3632 MacroAssembler _masm(&cbuf);
3633 int start_offset = __ offset();
3634
3635 Register Rtoc = (ra_) ? $constanttablebase : R2_TOC;
3636 #if 0
3637 if (_vtable_index < 0) {
3638 // Must be invalid_vtable_index, not nonvirtual_vtable_index.
3639 assert(_vtable_index == Method::invalid_vtable_index, "correct sentinel value");
3640 Register ic_reg = as_Register(Matcher::inline_cache_reg_encode());
3641 AddressLiteral oop = __ allocate_metadata_address((Metadata *)Universe::non_oop_word());
3642
3643 address virtual_call_oop_addr = __ pc();
3644 __ load_const_from_method_toc(ic_reg, oop, Rtoc);
3645 // CALL to fixup routine. Fixup routine uses ScopeDesc info
3646 // to determine who we intended to call.
3647 __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr));
3648 emit_call_with_trampoline_stub(_masm, (address)$meth$$method, relocInfo::none);
3649 assert(((MachCallDynamicJavaNode*)this)->ret_addr_offset() == __ offset() - start_offset,
3650 "Fix constant in ret_addr_offset()");
3651 } else {
3652 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
3653 // Go thru the vtable. Get receiver klass. Receiver already
3654 // checked for non-null. If we'll go thru a C2I adapter, the
3655 // interpreter expects method in R19_method.
3656
3657 __ load_klass(R11_scratch1, R3);
3658
3659 int entry_offset = InstanceKlass::vtable_start_offset() + _vtable_index * vtableEntry::size();
3660 int v_off = entry_offset * wordSize + vtableEntry::method_offset_in_bytes();
3661 __ li(R19_method, v_off);
3662 __ ldx(R19_method/*method oop*/, R19_method/*method offset*/, R11_scratch1/*class*/);
3663 // NOTE: for vtable dispatches, the vtable entry will never be
3664 // null. However it may very well end up in handle_wrong_method
3665 // if the method is abstract for the particular class.
3666 __ ld(R11_scratch1, in_bytes(Method::from_compiled_offset()), R19_method);
3667 // Call target. Either compiled code or C2I adapter.
3668 __ mtctr(R11_scratch1);
3669 __ bctrl();
3670 if (((MachCallDynamicJavaNode*)this)->ret_addr_offset() != __ offset() - start_offset) {
3671 tty->print(" %d, %d\n", ((MachCallDynamicJavaNode*)this)->ret_addr_offset(),__ offset() - start_offset);
3672 }
3673 assert(((MachCallDynamicJavaNode*)this)->ret_addr_offset() == __ offset() - start_offset,
3674 "Fix constant in ret_addr_offset()");
3675 }
3676 #endif
3677 Unimplemented(); // ret_addr_offset not yet fixed. Depends on compressed oops (load klass!).
3678 %}
3679
3680 // a runtime call
3681 enc_class enc_java_to_runtime_call (method meth) %{
3682 // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3683
3684 MacroAssembler _masm(&cbuf);
3685 const address start_pc = __ pc();
3686
3687 // The function we're going to call.
3688 FunctionDescriptor fdtemp;
3689 const FunctionDescriptor* fd = !($meth$$method) ? &fdtemp : (FunctionDescriptor*)$meth$$method;
3690
3691 Register Rtoc = R12_scratch2;
3692 // Calculate the method's TOC.
3693 __ calculate_address_from_global_toc(Rtoc, __ method_toc());
3694 // Put entry, env, toc into the constant pool, this needs up to 3 constant
3695 // pool entries; call_c_using_toc will optimize the call.
3696 __ call_c_using_toc(fd, relocInfo::runtime_call_type, Rtoc);
3758 call->_opnds[0] = _opnds[0];
3759 call->_opnds[1] = new (C) methodOper((intptr_t) fd->entry()); // may get set later
3760
3761 // Make the new call node look like the old one.
3762 call->_name = _name;
3763 call->_tf = _tf;
3764 call->_entry_point = _entry_point;
3765 call->_cnt = _cnt;
3766 call->_argsize = _argsize;
3767 call->_oop_map = _oop_map;
3768 guarantee(!_jvms, "You must clone the jvms and adapt the offsets by fix_jvms().");
3769 call->_jvms = NULL;
3770 call->_jvmadj = _jvmadj;
3771 call->_in_rms = _in_rms;
3772 call->_nesting = _nesting;
3773
3774
3775 // New call needs all inputs of old call.
3776 // Req...
3777 for (uint i = 0; i < req(); ++i) {
3778 if (i != TypeFunc::ReturnAdr) {
3779 call->add_req(in(i));
3780 } else {
3781 // put the mtctr where ReturnAdr would be
3782 call->add_req(mtctr);
3783 }
3784 }
3785
3786 // These must be reqired edges, as the registers are live up to
3787 // the call. Else the constants are handled as kills.
3788 call->add_req(loadConLNodes_Env._last);
3789 call->add_req(loadConLNodes_Toc._last);
3790
3791 // ...as well as prec
3792 for (uint i = req(); i < len(); ++i) {
3793 call->add_prec(in(i));
3794 }
3795
3796 // registers
3797 ra_->set1(mtctr->_idx, OptoReg::Name(SR_CTR_num));
3798
3799 // Insert the new nodes.
3800 if (loadConLNodes_Entry._large_hi) nodes->push(loadConLNodes_Entry._large_hi);
3801 if (loadConLNodes_Entry._last) nodes->push(loadConLNodes_Entry._last);
3802 if (loadConLNodes_Env._large_hi) nodes->push(loadConLNodes_Env._large_hi);
3803 if (loadConLNodes_Env._last) nodes->push(loadConLNodes_Env._last);
3804 if (loadConLNodes_Toc._large_hi) nodes->push(loadConLNodes_Toc._large_hi);
3805 if (loadConLNodes_Toc._last) nodes->push(loadConLNodes_Toc._last);
3806 nodes->push(mtctr);
3807 nodes->push(call);
3808 %}
3809 %}
3810
3811 //----------FRAME--------------------------------------------------------------
3812 // Definition of frame structure and management information.
3813
3814 frame %{
3815 // What direction does stack grow in (assumed to be same for native & Java).
3816 stack_direction(TOWARDS_LOW);
3817
3818 // These two registers define part of the calling convention between
3819 // compiled code and the interpreter.
3820
3821 // Inline Cache Register or methodOop for I2C.
3822 inline_cache_reg(R19); // R19_method
3823
3824 // Method Oop Register when calling interpreter.
3825 interpreter_method_oop_reg(R19); // R19_method
3826
3827 // Optional: name the operand used by cisc-spilling to access
3828 // [stack_pointer + offset].
3829 cisc_spilling_operand_name(indOffset);
3830
3831 // Number of stack slots consumed by a Monitor enter.
3832 sync_stack_slots((frame::jit_monitor_size / VMRegImpl::stack_slot_size));
3833
3834 // Compiled code's Frame Pointer.
3835 frame_pointer(R1); // R1_SP
3836
3837 // Interpreter stores its frame pointer in a register which is
3838 // stored to the stack by I2CAdaptors. I2CAdaptors convert from
3839 // interpreted java to compiled java.
3840 //
3841 // R14_state holds pointer to caller's cInterpreter.
6132 ins_num_consts(1);
6133 ins_field_const_toc_offset(int);
6134
6135 format %{ "ADDIS $dst, $toc, offset \t// load ptr $src from TOC (hi)" %}
6136 size(4);
6137 ins_encode( enc_load_long_constP_hi(dst, src, toc) );
6138 ins_pipe(pipe_class_default);
6139 %}
6140
6141 // Expand node for constant pool load: large offset.
6142 instruct loadConP_lo(iRegPdst dst, immP_NM src, iRegLdst base) %{
6143 match(Set dst src);
6144 effect(TEMP base);
6145
6146 ins_field_const_toc_offset_hi_node(loadConP_hiNode*);
6147
6148 format %{ "LD $dst, offset, $base \t// load ptr $src from TOC (lo)" %}
6149 size(4);
6150 ins_encode %{
6151 // TODO: PPC port $archOpcode(ppc64Opcode_ld);
6152 int offset = ra_->C->in_scratch_emit_size() ? 0 : MacroAssembler::largeoffset_si16_si16_lo(_const_toc_offset_hi_node->_const_toc_offset);
6153 __ ld($dst$$Register, offset, $base$$Register);
6154 %}
6155 ins_pipe(pipe_class_memory);
6156 %}
6157
6158 // Load pointer constant from constant table. Expand in case an
6159 // offset > 16 bit is needed.
6160 // Adlc adds toc node MachConstantTableBase.
6161 instruct loadConP_Ex(iRegPdst dst, immP src) %{
6162 match(Set dst src);
6163 ins_cost(MEMORY_REF_COST);
6164
6165 // This rule does not use "expand" because then
6166 // the result type is not known to be an Oop. An ADLC
6167 // enhancement will be needed to make that work - not worth it!
6168
6169 // If this instruction rematerializes, it prolongs the live range
6170 // of the toc node, causing illegal graphs.
6171 // assert(edge_from_to(_reg_node[reg_lo],def)) fails in verify_good_schedule().
6172 ins_cannot_rematerialize(true);
6173
|
3546 (_load_ic_node != NULL && _load_ic_node->_cbuf_insts_offset != -1),
3547 "must contain instruction offset");
3548 const int virtual_call_oop_addr_offset = _load_ic_hi_node != NULL
3549 ? _load_ic_hi_node->_cbuf_insts_offset
3550 : _load_ic_node->_cbuf_insts_offset;
3551 const address virtual_call_oop_addr = __ addr_at(virtual_call_oop_addr_offset);
3552 assert(MacroAssembler::is_load_const_from_method_toc_at(virtual_call_oop_addr),
3553 "should be load from TOC");
3554
3555 __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr));
3556 }
3557
3558 // At this point I do not have the address of the trampoline stub,
3559 // and the entry point might be too far away for bl. Pc() serves
3560 // as dummy and bl will be patched later.
3561 __ bl((address) __ pc());
3562 %}
3563
3564 // postalloc expand emitter for virtual calls.
3565 enc_class postalloc_expand_java_dynamic_call_sched(method meth, iRegLdst toc) %{
3566
3567 // Create the nodes for loading the IC from the TOC.
3568 loadConLNodesTuple loadConLNodes_IC =
3569 loadConLNodesTuple_create(C, ra_, n_toc, new (C) immLOper((jlong)Universe::non_oop_word()),
3570 OptoReg::Name(R19_H_num), OptoReg::Name(R19_num));
3571
3572 // Create the call node.
3573 CallDynamicJavaDirectSchedNode *call = new (C) CallDynamicJavaDirectSchedNode();
3574 call->_method_handle_invoke = _method_handle_invoke;
3575 call->_vtable_index = _vtable_index;
3576 call->_method = _method;
3577 call->_bci = _bci;
3578 call->_optimized_virtual = _optimized_virtual;
3579 call->_tf = _tf;
3580 call->_entry_point = _entry_point;
3581 call->_cnt = _cnt;
3582 call->_argsize = _argsize;
3583 call->_oop_map = _oop_map;
3584 call->_jvms = _jvms;
3585 call->_jvmadj = _jvmadj;
3586 call->_in_rms = _in_rms;
3587 call->_nesting = _nesting;
3588
3589 // New call needs all inputs of old call.
3590 // Req...
3591 for (uint i = 0; i < req(); ++i) {
3592 // The expanded node does not need toc any more.
3593 // Add the inline cache constant here instead. This expresses the
3594 // register of the inline cache must be live at the call.
3595 // Else we would have to adapt JVMState by -1.
3596 if (i == mach_constant_base_node_input()) {
3597 call->add_req(loadConLNodes_IC._last);
3598 } else {
3599 call->add_req(in(i));
3600 }
3601 }
3602 // ...as well as prec
3603 for (uint i = req(); i < len(); ++i) {
3604 call->add_prec(in(i));
3605 }
3606
3607 // Remember nodes loading the inline cache into r19.
3608 call->_load_ic_hi_node = loadConLNodes_IC._large_hi;
3609 call->_load_ic_node = loadConLNodes_IC._small;
3610
3611 // Operands for new nodes.
3612 call->_opnds[0] = _opnds[0];
3613 call->_opnds[1] = _opnds[1];
3614
3615 // Only the inline cache is associated with a register.
3616 assert(Matcher::inline_cache_reg() == OptoReg::Name(R19_num), "ic reg should be R19");
3617
3618 // Push new nodes.
3619 if (loadConLNodes_IC._large_hi) nodes->push(loadConLNodes_IC._large_hi);
3620 if (loadConLNodes_IC._last) nodes->push(loadConLNodes_IC._last);
3621 nodes->push(call);
3622 %}
3623
3624 // Compound version of call dynamic
3625 enc_class enc_java_dynamic_call(method meth, iRegLdst toc) %{
3626 // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3627 MacroAssembler _masm(&cbuf);
3628 int start_offset = __ offset();
3629
3630 Register Rtoc = (ra_) ? $constanttablebase : R2_TOC;
3631 #if 0
3632 if (_vtable_index < 0) {
3633 // Must be invalid_vtable_index, not nonvirtual_vtable_index.
3634 assert(_vtable_index == Method::invalid_vtable_index, "correct sentinel value");
3635 Register ic_reg = as_Register(Matcher::inline_cache_reg_encode());
3636 AddressLiteral meta = __ allocate_metadata_address((Metadata *)Universe::non_oop_word());
3637
3638 address virtual_call_meta_addr = __ pc();
3639 __ load_const_from_method_toc(ic_reg, meta, Rtoc);
3640 // CALL to fixup routine. Fixup routine uses ScopeDesc info
3641 // to determine who we intended to call.
3642 __ relocate(virtual_call_Relocation::spec(virtual_call_meta_addr));
3643 emit_call_with_trampoline_stub(_masm, (address)$meth$$method, relocInfo::none);
3644 assert(((MachCallDynamicJavaNode*)this)->ret_addr_offset() == __ offset() - start_offset,
3645 "Fix constant in ret_addr_offset()");
3646 } else {
3647 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
3648 // Go thru the vtable. Get receiver klass. Receiver already
3649 // checked for non-null. If we'll go thru a C2I adapter, the
3650 // interpreter expects method in R19_method.
3651
3652 __ load_klass(R11_scratch1, R3);
3653
3654 int entry_offset = InstanceKlass::vtable_start_offset() + _vtable_index * vtableEntry::size();
3655 int v_off = entry_offset * wordSize + vtableEntry::method_offset_in_bytes();
3656 __ li(R19_method, v_off);
3657 __ ldx(R19_method/*method oop*/, R19_method/*method offset*/, R11_scratch1/*class*/);
3658 // NOTE: for vtable dispatches, the vtable entry will never be
3659 // null. However it may very well end up in handle_wrong_method
3660 // if the method is abstract for the particular class.
3661 __ ld(R11_scratch1, in_bytes(Method::from_compiled_offset()), R19_method);
3662 // Call target. Either compiled code or C2I adapter.
3663 __ mtctr(R11_scratch1);
3664 __ bctrl();
3665 if (((MachCallDynamicJavaNode*)this)->ret_addr_offset() != __ offset() - start_offset) {
3666 tty->print(" %d, %d\n", ((MachCallDynamicJavaNode*)this)->ret_addr_offset(),__ offset() - start_offset);
3667 }
3668 assert(((MachCallDynamicJavaNode*)this)->ret_addr_offset() == __ offset() - start_offset,
3669 "Fix constant in ret_addr_offset()");
3670 }
3671 #endif
3672 guarantee(0, "Fix handling of toc edge: messes up derived/base pairs.");
3673 Unimplemented(); // ret_addr_offset not yet fixed. Depends on compressed oops (load klass!).
3674 %}
3675
3676 // a runtime call
3677 enc_class enc_java_to_runtime_call (method meth) %{
3678 // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3679
3680 MacroAssembler _masm(&cbuf);
3681 const address start_pc = __ pc();
3682
3683 // The function we're going to call.
3684 FunctionDescriptor fdtemp;
3685 const FunctionDescriptor* fd = !($meth$$method) ? &fdtemp : (FunctionDescriptor*)$meth$$method;
3686
3687 Register Rtoc = R12_scratch2;
3688 // Calculate the method's TOC.
3689 __ calculate_address_from_global_toc(Rtoc, __ method_toc());
3690 // Put entry, env, toc into the constant pool, this needs up to 3 constant
3691 // pool entries; call_c_using_toc will optimize the call.
3692 __ call_c_using_toc(fd, relocInfo::runtime_call_type, Rtoc);
3754 call->_opnds[0] = _opnds[0];
3755 call->_opnds[1] = new (C) methodOper((intptr_t) fd->entry()); // may get set later
3756
3757 // Make the new call node look like the old one.
3758 call->_name = _name;
3759 call->_tf = _tf;
3760 call->_entry_point = _entry_point;
3761 call->_cnt = _cnt;
3762 call->_argsize = _argsize;
3763 call->_oop_map = _oop_map;
3764 guarantee(!_jvms, "You must clone the jvms and adapt the offsets by fix_jvms().");
3765 call->_jvms = NULL;
3766 call->_jvmadj = _jvmadj;
3767 call->_in_rms = _in_rms;
3768 call->_nesting = _nesting;
3769
3770
3771 // New call needs all inputs of old call.
3772 // Req...
3773 for (uint i = 0; i < req(); ++i) {
3774 if (i != mach_constant_base_node_input()) {
3775 call->add_req(in(i));
3776 }
3777 }
3778
3779 // These must be reqired edges, as the registers are live up to
3780 // the call. Else the constants are handled as kills.
3781 call->add_req(mtctr);
3782 call->add_req(loadConLNodes_Env._last);
3783 call->add_req(loadConLNodes_Toc._last);
3784
3785 // ...as well as prec
3786 for (uint i = req(); i < len(); ++i) {
3787 call->add_prec(in(i));
3788 }
3789
3790 // registers
3791 ra_->set1(mtctr->_idx, OptoReg::Name(SR_CTR_num));
3792
3793 // Insert the new nodes.
3794 if (loadConLNodes_Entry._large_hi) nodes->push(loadConLNodes_Entry._large_hi);
3795 if (loadConLNodes_Entry._last) nodes->push(loadConLNodes_Entry._last);
3796 if (loadConLNodes_Env._large_hi) nodes->push(loadConLNodes_Env._large_hi);
3797 if (loadConLNodes_Env._last) nodes->push(loadConLNodes_Env._last);
3798 if (loadConLNodes_Toc._large_hi) nodes->push(loadConLNodes_Toc._large_hi);
3799 if (loadConLNodes_Toc._last) nodes->push(loadConLNodes_Toc._last);
3800 nodes->push(mtctr);
3801 nodes->push(call);
3802 %}
3803 %}
3804
3805 //----------FRAME--------------------------------------------------------------
3806 // Definition of frame structure and management information.
3807
3808 frame %{
3809 // What direction does stack grow in (assumed to be same for native & Java).
3810 stack_direction(TOWARDS_LOW);
3811
3812 // These two registers define part of the calling convention between
3813 // compiled code and the interpreter.
3814
3815 // Inline Cache Register or method for I2C.
3816 inline_cache_reg(R19); // R19_method
3817
3818 // Method Oop Register when calling interpreter.
3819 interpreter_method_oop_reg(R19); // R19_method
3820
3821 // Optional: name the operand used by cisc-spilling to access
3822 // [stack_pointer + offset].
3823 cisc_spilling_operand_name(indOffset);
3824
3825 // Number of stack slots consumed by a Monitor enter.
3826 sync_stack_slots((frame::jit_monitor_size / VMRegImpl::stack_slot_size));
3827
3828 // Compiled code's Frame Pointer.
3829 frame_pointer(R1); // R1_SP
3830
3831 // Interpreter stores its frame pointer in a register which is
3832 // stored to the stack by I2CAdaptors. I2CAdaptors convert from
3833 // interpreted java to compiled java.
3834 //
3835 // R14_state holds pointer to caller's cInterpreter.
6126 ins_num_consts(1);
6127 ins_field_const_toc_offset(int);
6128
6129 format %{ "ADDIS $dst, $toc, offset \t// load ptr $src from TOC (hi)" %}
6130 size(4);
6131 ins_encode( enc_load_long_constP_hi(dst, src, toc) );
6132 ins_pipe(pipe_class_default);
6133 %}
6134
6135 // Expand node for constant pool load: large offset.
6136 instruct loadConP_lo(iRegPdst dst, immP_NM src, iRegLdst base) %{
6137 match(Set dst src);
6138 effect(TEMP base);
6139
6140 ins_field_const_toc_offset_hi_node(loadConP_hiNode*);
6141
6142 format %{ "LD $dst, offset, $base \t// load ptr $src from TOC (lo)" %}
6143 size(4);
6144 ins_encode %{
6145 // TODO: PPC port $archOpcode(ppc64Opcode_ld);
6146 int offset = ra_->C->in_scratch_emit_size() ? 0 : _const_toc_offset_hi_node->_const_toc_offset;
6147 __ ld($dst$$Register, MacroAssembler::largeoffset_si16_si16_lo(offset), $base$$Register);
6148 %}
6149 ins_pipe(pipe_class_memory);
6150 %}
6151
6152 // Load pointer constant from constant table. Expand in case an
6153 // offset > 16 bit is needed.
6154 // Adlc adds toc node MachConstantTableBase.
6155 instruct loadConP_Ex(iRegPdst dst, immP src) %{
6156 match(Set dst src);
6157 ins_cost(MEMORY_REF_COST);
6158
6159 // This rule does not use "expand" because then
6160 // the result type is not known to be an Oop. An ADLC
6161 // enhancement will be needed to make that work - not worth it!
6162
6163 // If this instruction rematerializes, it prolongs the live range
6164 // of the toc node, causing illegal graphs.
6165 // assert(edge_from_to(_reg_node[reg_lo],def)) fails in verify_good_schedule().
6166 ins_cannot_rematerialize(true);
6167
|