< prev index next >

src/cpu/x86/vm/sharedRuntime_x86_32.cpp

Print this page




 492         regs[i].set2(xmm0->as_VMReg());
 493       } else if( freg_arg1 == (uint)i ) {
 494         regs[i].set2(xmm1->as_VMReg());
 495       } else {
 496         regs[i].set2(VMRegImpl::stack2reg(dstack));
 497         dstack += 2;
 498       }
 499       break;
 500     case T_VOID: regs[i].set_bad(); break;
 501       break;
 502     default:
 503       ShouldNotReachHere();
 504       break;
 505     }
 506   }
 507 
 508   // return value can be odd number of VMRegImpl stack slots make multiple of 2
 509   return round_to(stack, 2);
 510 }
 511 









 512 // Patch the callers callsite with entry to compiled code if it exists.
 513 static void patch_callers_callsite(MacroAssembler *masm) {
 514   Label L;
 515   __ cmpptr(Address(rbx, in_bytes(Method::code_offset())), (int32_t)NULL_WORD);
 516   __ jcc(Assembler::equal, L);
 517   // Schedule the branch target address early.
 518   // Call into the VM to patch the caller, then jump to compiled callee
 519   // rax, isn't live so capture return address while we easily can
 520   __ movptr(rax, Address(rsp, 0));
 521   __ pusha();
 522   __ pushf();
 523 
 524   if (UseSSE == 1) {
 525     __ subptr(rsp, 2*wordSize);
 526     __ movflt(Address(rsp, 0), xmm0);
 527     __ movflt(Address(rsp, wordSize), xmm1);
 528   }
 529   if (UseSSE >= 2) {
 530     __ subptr(rsp, 4*wordSize);
 531     __ movdbl(Address(rsp, 0), xmm0);




 492         regs[i].set2(xmm0->as_VMReg());
 493       } else if( freg_arg1 == (uint)i ) {
 494         regs[i].set2(xmm1->as_VMReg());
 495       } else {
 496         regs[i].set2(VMRegImpl::stack2reg(dstack));
 497         dstack += 2;
 498       }
 499       break;
 500     case T_VOID: regs[i].set_bad(); break;
 501       break;
 502     default:
 503       ShouldNotReachHere();
 504       break;
 505     }
 506   }
 507 
 508   // return value can be odd number of VMRegImpl stack slots make multiple of 2
 509   return round_to(stack, 2);
 510 }
 511 
 512 const uint SharedRuntime::java_return_convention_max_int = 1;
 513 const uint SharedRuntime::java_return_convention_max_float = 1;
 514 int SharedRuntime::java_return_convention(const BasicType *sig_bt,
 515                                           VMRegPair *regs,
 516                                           int total_args_passed) {
 517   Unimplemented();
 518   return 0;
 519 }
 520 
 521 // Patch the callers callsite with entry to compiled code if it exists.
 522 static void patch_callers_callsite(MacroAssembler *masm) {
 523   Label L;
 524   __ cmpptr(Address(rbx, in_bytes(Method::code_offset())), (int32_t)NULL_WORD);
 525   __ jcc(Assembler::equal, L);
 526   // Schedule the branch target address early.
 527   // Call into the VM to patch the caller, then jump to compiled callee
 528   // rax, isn't live so capture return address while we easily can
 529   __ movptr(rax, Address(rsp, 0));
 530   __ pusha();
 531   __ pushf();
 532 
 533   if (UseSSE == 1) {
 534     __ subptr(rsp, 2*wordSize);
 535     __ movflt(Address(rsp, 0), xmm0);
 536     __ movflt(Address(rsp, wordSize), xmm1);
 537   }
 538   if (UseSSE >= 2) {
 539     __ subptr(rsp, 4*wordSize);
 540     __ movdbl(Address(rsp, 0), xmm0);


< prev index next >