365 __ lea(rsp, rsp_after_call);
366
367 #ifdef ASSERT
368 // verify that threads correspond
369 {
370 Label L, S;
371 __ cmpptr(r15_thread, thread);
372 __ jcc(Assembler::notEqual, S);
373 __ get_thread(rbx);
374 __ cmpptr(r15_thread, rbx);
375 __ jcc(Assembler::equal, L);
376 __ bind(S);
377 __ jcc(Assembler::equal, L);
378 __ stop("StubRoutines::call_stub: threads must correspond");
379 __ bind(L);
380 }
381 #endif
382
383 // restore regs belonging to calling function
384 #ifdef _WIN64
385 for (int i = 15; i >= 6; i--) {
386 __ movdqu(as_XMMRegister(i), xmm_save(i));
387 }
388 #endif
389 __ movptr(r15, r15_save);
390 __ movptr(r14, r14_save);
391 __ movptr(r13, r13_save);
392 __ movptr(r12, r12_save);
393 __ movptr(rbx, rbx_save);
394
395 #ifdef _WIN64
396 __ movptr(rdi, rdi_save);
397 __ movptr(rsi, rsi_save);
398 #else
399 __ ldmxcsr(mxcsr_save);
400 #endif
401
402 // restore rsp
403 __ addptr(rsp, -rsp_after_call_off * wordSize);
404
405 // return
|
365 __ lea(rsp, rsp_after_call);
366
367 #ifdef ASSERT
368 // verify that threads correspond
369 {
370 Label L, S;
371 __ cmpptr(r15_thread, thread);
372 __ jcc(Assembler::notEqual, S);
373 __ get_thread(rbx);
374 __ cmpptr(r15_thread, rbx);
375 __ jcc(Assembler::equal, L);
376 __ bind(S);
377 __ jcc(Assembler::equal, L);
378 __ stop("StubRoutines::call_stub: threads must correspond");
379 __ bind(L);
380 }
381 #endif
382
383 // restore regs belonging to calling function
384 #ifdef _WIN64
385 int xmm_ub = 15;
386 if (UseAVX > 2) {
387 xmm_ub = 31;
388 }
389 // emit the restores for xmm regs
390 for (int i = 6; i <= xmm_ub; i++) {
391 __ movdqu(as_XMMRegister(i), xmm_save(i));
392 }
393 #endif
394 __ movptr(r15, r15_save);
395 __ movptr(r14, r14_save);
396 __ movptr(r13, r13_save);
397 __ movptr(r12, r12_save);
398 __ movptr(rbx, rbx_save);
399
400 #ifdef _WIN64
401 __ movptr(rdi, rdi_save);
402 __ movptr(rsi, rsi_save);
403 #else
404 __ ldmxcsr(mxcsr_save);
405 #endif
406
407 // restore rsp
408 __ addptr(rsp, -rsp_after_call_off * wordSize);
409
410 // return
|