587 intptr_t* entry_sp;
588 };
589 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) {
590 trace_method_handle_stub(args->adaptername,
591 args->mh,
592 args->saved_regs,
593 args->entry_sp);
594 }
595
596 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
597 if (!TraceMethodHandles) return;
598 BLOCK_COMMENT(err_msg("trace_method_handle %s {", adaptername));
599 __ enter();
600 __ andptr(rsp, -16); // align stack if needed for FPU state
601 __ pusha();
602 __ mov(rbx, rsp); // for retreiving saved_regs
603 // Note: saved_regs must be in the entered frame for the
604 // robust stack walking implemented in trace_method_handle_stub.
605
606 // save FP result, valid at some call sites (adapter_opt_return_float, ...)
607 __ increment(rsp, -2 * wordSize);
608 if (UseSSE >= 2) {
609 __ movdbl(Address(rsp, 0), xmm0);
610 } else if (UseSSE == 1) {
611 __ movflt(Address(rsp, 0), xmm0);
612 } else {
613 __ fst_d(Address(rsp, 0));
614 }
615
616 // Incoming state:
617 // rcx: method handle
618 //
619 // To avoid calling convention issues, build a record on the stack
620 // and pass the pointer to that instead.
621 __ push(rbp); // entry_sp (with extra align space)
622 __ push(rbx); // pusha saved_regs
623 __ push(rcx); // mh
624 __ push(rcx); // slot for adaptername
625 __ movptr(Address(rsp, 0), (intptr_t) adaptername);
626 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, trace_method_handle_stub_wrapper), rsp);
627 __ increment(rsp, sizeof(MethodHandleStubArguments));
628
629 if (UseSSE >= 2) {
630 __ movdbl(xmm0, Address(rsp, 0));
631 } else if (UseSSE == 1) {
632 __ movflt(xmm0, Address(rsp, 0));
633 } else {
634 __ fld_d(Address(rsp, 0));
635 }
636 __ increment(rsp, 2 * wordSize);
637
638 __ popa();
639 __ leave();
640 BLOCK_COMMENT("} trace_method_handle");
641 }
642 #endif //PRODUCT
|
587 intptr_t* entry_sp;
588 };
589 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) {
590 trace_method_handle_stub(args->adaptername,
591 args->mh,
592 args->saved_regs,
593 args->entry_sp);
594 }
595
596 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
597 if (!TraceMethodHandles) return;
598 BLOCK_COMMENT(err_msg("trace_method_handle %s {", adaptername));
599 __ enter();
600 __ andptr(rsp, -16); // align stack if needed for FPU state
601 __ pusha();
602 __ mov(rbx, rsp); // for retreiving saved_regs
603 // Note: saved_regs must be in the entered frame for the
604 // robust stack walking implemented in trace_method_handle_stub.
605
606 // save FP result, valid at some call sites (adapter_opt_return_float, ...)
607 __ decrement(rsp, 2 * wordSize);
608 #ifdef _LP64
609 __ movdbl(Address(rsp, 0), xmm0);
610 #else
611 if (UseSSE >= 2) {
612 __ movdbl(Address(rsp, 0), xmm0);
613 } else if (UseSSE == 1) {
614 __ movflt(Address(rsp, 0), xmm0);
615 } else {
616 __ fst_d(Address(rsp, 0));
617 }
618 #endif // LP64
619
620 // Incoming state:
621 // rcx: method handle
622 //
623 // To avoid calling convention issues, build a record on the stack
624 // and pass the pointer to that instead.
625 __ push(rbp); // entry_sp (with extra align space)
626 __ push(rbx); // pusha saved_regs
627 __ push(rcx); // mh
628 __ push(rcx); // slot for adaptername
629 __ movptr(Address(rsp, 0), (intptr_t) adaptername);
630 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, trace_method_handle_stub_wrapper), rsp);
631 __ increment(rsp, sizeof(MethodHandleStubArguments));
632
633 #ifdef _LP64
634 __ movdbl(xmm0, Address(rsp, 0));
635 #else
636 if (UseSSE >= 2) {
637 __ movdbl(xmm0, Address(rsp, 0));
638 } else if (UseSSE == 1) {
639 __ movflt(xmm0, Address(rsp, 0));
640 } else {
641 __ fld_d(Address(rsp, 0));
642 }
643 #endif // LP64
644 __ increment(rsp, 2 * wordSize);
645
646 __ popa();
647 __ leave();
648 BLOCK_COMMENT("} trace_method_handle");
649 }
650 #endif //PRODUCT
|