< prev index next >

src/cpu/ppc/vm/ppc.ad

Print this page




1154 
1155 %} // end source_hpp
1156 
1157 source %{
1158 
1159 // Emit a trampoline stub for a call to a target which is too far away.
1160 //
1161 // code sequences:
1162 //
1163 // call-site:
1164 //   branch-and-link to <destination> or <trampoline stub>
1165 //
1166 // Related trampoline stub for this call-site in the stub section:
1167 //   load the call target from the constant pool
1168 //   branch via CTR (LR/link still points to the call-site above)
1169 
1170 void CallStubImpl::emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int insts_call_instruction_offset) {
1171   // Start the stub.
1172   address stub = __ start_a_stub(Compile::MAX_stubs_size/2);
1173   if (stub == NULL) {
1174     Compile::current()->env()->record_out_of_memory_failure();
1175     return;
1176   }
1177 
1178   // For java_to_interp stubs we use R11_scratch1 as scratch register
1179   // and in call trampoline stubs we use R12_scratch2. This way we
1180   // can distinguish them (see is_NativeCallTrampolineStub_at()).
1181   Register reg_scratch = R12_scratch2;
1182 
1183   // Create a trampoline stub relocation which relates this trampoline stub
1184   // with the call instruction at insts_call_instruction_offset in the
1185   // instructions code-section.
1186   __ relocate(trampoline_stub_Relocation::spec(__ code()->insts()->start() + insts_call_instruction_offset));
1187   const int stub_start_offset = __ offset();
1188 
1189   // Now, create the trampoline stub's code:
1190   // - load the TOC
1191   // - load the call target from the constant pool
1192   // - call
1193   __ calculate_address_from_global_toc(reg_scratch, __ method_toc());
1194   __ ld_largeoffset_unchecked(reg_scratch, destination_toc_offset, reg_scratch, false);


1232 // Emit a branch-and-link instruction that branches to a trampoline.
1233 // - Remember the offset of the branch-and-link instruction.
1234 // - Add a relocation at the branch-and-link instruction.
1235 // - Emit a branch-and-link.
1236 // - Remember the return pc offset.
1237 EmitCallOffsets emit_call_with_trampoline_stub(MacroAssembler &_masm, address entry_point, relocInfo::relocType rtype) {
1238   EmitCallOffsets offsets = { -1, -1 };
1239   const int start_offset = __ offset();
1240   offsets.insts_call_instruction_offset = __ offset();
1241 
1242   // No entry point given, use the current pc.
1243   if (entry_point == NULL) entry_point = __ pc();
1244 
1245   if (!Compile::current()->in_scratch_emit_size()) {
1246     // Put the entry point as a constant into the constant pool.
1247     const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
1248     const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
1249 
1250     // Emit the trampoline stub which will be related to the branch-and-link below.
1251     CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
1252     if (Compile::current()->env()->failing()) { return offsets; } // Code cache may be full.
1253     __ relocate(rtype);
1254   }
1255 
1256   // Note: At this point we do not have the address of the trampoline
1257   // stub, and the entry point might be too far away for bl, so __ pc()
1258   // serves as dummy and the bl will be patched later.
1259   __ bl((address) __ pc());
1260 
1261   offsets.ret_addr_offset = __ offset() - start_offset;
1262 
1263   return offsets;
1264 }
1265 
1266 //=============================================================================
1267 
1268 // Factory for creating loadConL* nodes for large/small constant pool.
1269 
1270 static inline jlong replicate_immF(float con) {
1271   // Replicate float con 2 times and pack into vector.
1272   int val = *((int*)&con);


3471     address entry_point = (address)$meth$$method;
3472 
3473     if (!_method) {
3474       // A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap.
3475       emit_call_with_trampoline_stub(_masm, entry_point, relocInfo::runtime_call_type);
3476     } else {
3477       // Remember the offset not the address.
3478       const int start_offset = __ offset();
3479       // The trampoline stub.
3480       if (!Compile::current()->in_scratch_emit_size()) {
3481         // No entry point given, use the current pc.
3482         // Make sure branch fits into
3483         if (entry_point == 0) entry_point = __ pc();
3484 
3485         // Put the entry point as a constant into the constant pool.
3486         const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
3487         const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
3488 
3489         // Emit the trampoline stub which will be related to the branch-and-link below.
3490         CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
3491         if (Compile::current()->env()->failing()) { return; } // Code cache may be full.
3492         __ relocate(_optimized_virtual ?
3493                     relocInfo::opt_virtual_call_type : relocInfo::static_call_type);
3494       }
3495 
3496       // The real call.
3497       // Note: At this point we do not have the address of the trampoline
3498       // stub, and the entry point might be too far away for bl, so __ pc()
3499       // serves as dummy and the bl will be patched later.
3500       cbuf.set_insts_mark();
3501       __ bl(__ pc());  // Emits a relocation.
3502 
3503       // The stub for call to interpreter.
3504       CompiledStaticCall::emit_to_interp_stub(cbuf);




3505     }
3506   %}
3507 
3508   // Emit a method handle call.
3509   //
3510   // Method handle calls from compiled to compiled are going thru a
3511   // c2i -> i2c adapter, extending the frame for their arguments. The
3512   // caller however, returns directly to the compiled callee, that has
3513   // to cope with the extended frame. We restore the original frame by
3514   // loading the callers sp and adding the calculated framesize.
3515   enc_class enc_java_handle_call(method meth) %{
3516     // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3517 
3518     MacroAssembler _masm(&cbuf);
3519     address entry_point = (address)$meth$$method;
3520 
3521     // Remember the offset not the address.
3522     const int start_offset = __ offset();
3523     // The trampoline stub.
3524     if (!ra_->C->in_scratch_emit_size()) {


3529       // Put the entry point as a constant into the constant pool.
3530       const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
3531       const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
3532 
3533       // Emit the trampoline stub which will be related to the branch-and-link below.
3534       CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
3535       if (ra_->C->env()->failing()) { return; } // Code cache may be full.
3536       assert(_optimized_virtual, "methodHandle call should be a virtual call");
3537       __ relocate(relocInfo::opt_virtual_call_type);
3538     }
3539 
3540     // The real call.
3541     // Note: At this point we do not have the address of the trampoline
3542     // stub, and the entry point might be too far away for bl, so __ pc()
3543     // serves as dummy and the bl will be patched later.
3544     cbuf.set_insts_mark();
3545     __ bl(__ pc());  // Emits a relocation.
3546 
3547     assert(_method, "execute next statement conditionally");
3548     // The stub for call to interpreter.
3549     CompiledStaticCall::emit_to_interp_stub(cbuf);




3550 
3551     // Restore original sp.
3552     __ ld(R11_scratch1, 0, R1_SP); // Load caller sp.
3553     const long framesize = ra_->C->frame_slots() << LogBytesPerInt;
3554     unsigned int bytes = (unsigned int)framesize;
3555     long offset = Assembler::align_addr(bytes, frame::alignment_in_bytes);
3556     if (Assembler::is_simm(-offset, 16)) {
3557       __ addi(R1_SP, R11_scratch1, -offset);
3558     } else {
3559       __ load_const_optimized(R12_scratch2, -offset);
3560       __ add(R1_SP, R11_scratch1, R12_scratch2);
3561     }
3562 #ifdef ASSERT
3563   __ ld(R12_scratch2, 0, R1_SP); // Load from unextended_sp.
3564   __ cmpd(CCR0, R11_scratch1, R12_scratch2);
3565   __ asm_assert_eq("backlink changed", 0x8000);
3566 #endif
3567     // If fails should store backlink before unextending.
3568 
3569     if (ra_->C->env()->failing()) {




1154 
1155 %} // end source_hpp
1156 
1157 source %{
1158 
1159 // Emit a trampoline stub for a call to a target which is too far away.
1160 //
1161 // code sequences:
1162 //
1163 // call-site:
1164 //   branch-and-link to <destination> or <trampoline stub>
1165 //
1166 // Related trampoline stub for this call-site in the stub section:
1167 //   load the call target from the constant pool
1168 //   branch via CTR (LR/link still points to the call-site above)
1169 
1170 void CallStubImpl::emit_trampoline_stub(MacroAssembler &_masm, int destination_toc_offset, int insts_call_instruction_offset) {
1171   // Start the stub.
1172   address stub = __ start_a_stub(Compile::MAX_stubs_size/2);
1173   if (stub == NULL) {
1174     ciEnv::current()->record_failure("CodeCache is full");
1175     return;
1176   }
1177 
1178   // For java_to_interp stubs we use R11_scratch1 as scratch register
1179   // and in call trampoline stubs we use R12_scratch2. This way we
1180   // can distinguish them (see is_NativeCallTrampolineStub_at()).
1181   Register reg_scratch = R12_scratch2;
1182 
1183   // Create a trampoline stub relocation which relates this trampoline stub
1184   // with the call instruction at insts_call_instruction_offset in the
1185   // instructions code-section.
1186   __ relocate(trampoline_stub_Relocation::spec(__ code()->insts()->start() + insts_call_instruction_offset));
1187   const int stub_start_offset = __ offset();
1188 
1189   // Now, create the trampoline stub's code:
1190   // - load the TOC
1191   // - load the call target from the constant pool
1192   // - call
1193   __ calculate_address_from_global_toc(reg_scratch, __ method_toc());
1194   __ ld_largeoffset_unchecked(reg_scratch, destination_toc_offset, reg_scratch, false);


1232 // Emit a branch-and-link instruction that branches to a trampoline.
1233 // - Remember the offset of the branch-and-link instruction.
1234 // - Add a relocation at the branch-and-link instruction.
1235 // - Emit a branch-and-link.
1236 // - Remember the return pc offset.
1237 EmitCallOffsets emit_call_with_trampoline_stub(MacroAssembler &_masm, address entry_point, relocInfo::relocType rtype) {
1238   EmitCallOffsets offsets = { -1, -1 };
1239   const int start_offset = __ offset();
1240   offsets.insts_call_instruction_offset = __ offset();
1241 
1242   // No entry point given, use the current pc.
1243   if (entry_point == NULL) entry_point = __ pc();
1244 
1245   if (!Compile::current()->in_scratch_emit_size()) {
1246     // Put the entry point as a constant into the constant pool.
1247     const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
1248     const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
1249 
1250     // Emit the trampoline stub which will be related to the branch-and-link below.
1251     CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
1252     if (ciEnv::current()->failing()) { return offsets; } // Code cache may be full.
1253     __ relocate(rtype);
1254   }
1255 
1256   // Note: At this point we do not have the address of the trampoline
1257   // stub, and the entry point might be too far away for bl, so __ pc()
1258   // serves as dummy and the bl will be patched later.
1259   __ bl((address) __ pc());
1260 
1261   offsets.ret_addr_offset = __ offset() - start_offset;
1262 
1263   return offsets;
1264 }
1265 
1266 //=============================================================================
1267 
1268 // Factory for creating loadConL* nodes for large/small constant pool.
1269 
1270 static inline jlong replicate_immF(float con) {
1271   // Replicate float con 2 times and pack into vector.
1272   int val = *((int*)&con);


3471     address entry_point = (address)$meth$$method;
3472 
3473     if (!_method) {
3474       // A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap.
3475       emit_call_with_trampoline_stub(_masm, entry_point, relocInfo::runtime_call_type);
3476     } else {
3477       // Remember the offset not the address.
3478       const int start_offset = __ offset();
3479       // The trampoline stub.
3480       if (!Compile::current()->in_scratch_emit_size()) {
3481         // No entry point given, use the current pc.
3482         // Make sure branch fits into
3483         if (entry_point == 0) entry_point = __ pc();
3484 
3485         // Put the entry point as a constant into the constant pool.
3486         const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
3487         const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
3488 
3489         // Emit the trampoline stub which will be related to the branch-and-link below.
3490         CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
3491         if (ciEnv::current()->failing()) { return; } // Code cache may be full.
3492         __ relocate(_optimized_virtual ?
3493                     relocInfo::opt_virtual_call_type : relocInfo::static_call_type);
3494       }
3495 
3496       // The real call.
3497       // Note: At this point we do not have the address of the trampoline
3498       // stub, and the entry point might be too far away for bl, so __ pc()
3499       // serves as dummy and the bl will be patched later.
3500       cbuf.set_insts_mark();
3501       __ bl(__ pc());  // Emits a relocation.
3502 
3503       // The stub for call to interpreter.
3504       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
3505       if (stub == NULL) {
3506         ciEnv::current()->record_failure("CodeCache is full"); 
3507         return;
3508       }
3509     }
3510   %}
3511 
3512   // Emit a method handle call.
3513   //
3514   // Method handle calls from compiled to compiled are going thru a
3515   // c2i -> i2c adapter, extending the frame for their arguments. The
3516   // caller however, returns directly to the compiled callee, that has
3517   // to cope with the extended frame. We restore the original frame by
3518   // loading the callers sp and adding the calculated framesize.
3519   enc_class enc_java_handle_call(method meth) %{
3520     // TODO: PPC port $archOpcode(ppc64Opcode_compound);
3521 
3522     MacroAssembler _masm(&cbuf);
3523     address entry_point = (address)$meth$$method;
3524 
3525     // Remember the offset not the address.
3526     const int start_offset = __ offset();
3527     // The trampoline stub.
3528     if (!ra_->C->in_scratch_emit_size()) {


3533       // Put the entry point as a constant into the constant pool.
3534       const address entry_point_toc_addr   = __ address_constant(entry_point, RelocationHolder::none);
3535       const int     entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr);
3536 
3537       // Emit the trampoline stub which will be related to the branch-and-link below.
3538       CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
3539       if (ra_->C->env()->failing()) { return; } // Code cache may be full.
3540       assert(_optimized_virtual, "methodHandle call should be a virtual call");
3541       __ relocate(relocInfo::opt_virtual_call_type);
3542     }
3543 
3544     // The real call.
3545     // Note: At this point we do not have the address of the trampoline
3546     // stub, and the entry point might be too far away for bl, so __ pc()
3547     // serves as dummy and the bl will be patched later.
3548     cbuf.set_insts_mark();
3549     __ bl(__ pc());  // Emits a relocation.
3550 
3551     assert(_method, "execute next statement conditionally");
3552     // The stub for call to interpreter.
3553     address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
3554     if (stub == NULL) {
3555       ciEnv::current()->record_failure("CodeCache is full"); 
3556       return;
3557     }
3558 
3559     // Restore original sp.
3560     __ ld(R11_scratch1, 0, R1_SP); // Load caller sp.
3561     const long framesize = ra_->C->frame_slots() << LogBytesPerInt;
3562     unsigned int bytes = (unsigned int)framesize;
3563     long offset = Assembler::align_addr(bytes, frame::alignment_in_bytes);
3564     if (Assembler::is_simm(-offset, 16)) {
3565       __ addi(R1_SP, R11_scratch1, -offset);
3566     } else {
3567       __ load_const_optimized(R12_scratch2, -offset);
3568       __ add(R1_SP, R11_scratch1, R12_scratch2);
3569     }
3570 #ifdef ASSERT
3571   __ ld(R12_scratch2, 0, R1_SP); // Load from unextended_sp.
3572   __ cmpd(CCR0, R11_scratch1, R12_scratch2);
3573   __ asm_assert_eq("backlink changed", 0x8000);
3574 #endif
3575     // If fails should store backlink before unextending.
3576 
3577     if (ra_->C->env()->failing()) {


< prev index next >