98 __ verify_method_ptr(method);
99
100 if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
101 Label run_compiled_code;
102 // JVMTI events, such as single-stepping, are implemented partly by avoiding running
103 // compiled code in threads for which the event is enabled. Check here for
104 // interp_only_mode if these events CAN be enabled.
105
106 __ ldrb(rscratch1, Address(rthread, JavaThread::interp_only_mode_offset()));
107 __ cbnz(rscratch1, run_compiled_code);
108 __ ldr(rscratch1, Address(method, Method::interpreter_entry_offset()));
109 __ br(rscratch1);
110 __ BIND(run_compiled_code);
111 }
112
113 const ByteSize entry_offset = for_compiler_entry ? Method::from_compiled_offset() :
114 Method::from_interpreted_offset();
115 __ ldr(rscratch1,Address(method, entry_offset));
116 __ br(rscratch1);
117 __ bind(L_no_such_method);
118 __ b(RuntimeAddress(StubRoutines::throw_AbstractMethodError_entry()));
119 }
120
121 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
122 Register recv, Register method_temp,
123 Register temp2,
124 bool for_compiler_entry) {
125 BLOCK_COMMENT("jump_to_lambda_form {");
126 // This is the initial entry point of a lazy method handle.
127 // After type checking, it picks up the invoker from the LambdaForm.
128 assert_different_registers(recv, method_temp, temp2);
129 assert(recv != noreg, "required register");
130 assert(method_temp == rmethod, "required register for loading method");
131
132 //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
133
134 // Load the invoker, as MH -> MH.form -> LF.vmentry
135 __ verify_oop(recv);
136 __ load_heap_oop(method_temp, Address(recv, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())));
137 __ verify_oop(method_temp);
138 __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())));
401 rindex, rmethod,
402 temp2,
403 L_incompatible_class_change_error);
404 break;
405 }
406
407 default:
408 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
409 break;
410 }
411
412 // live at this point: rmethod, r13 (if interpreted)
413
414 // After figuring out which concrete method to call, jump into it.
415 // Note that this works in the interpreter with no data motion.
416 // But the compiled version will require that r2_recv be shifted out.
417 __ verify_method_ptr(rmethod);
418 jump_from_method_handle(_masm, rmethod, temp1, for_compiler_entry);
419 if (iid == vmIntrinsics::_linkToInterface) {
420 __ bind(L_incompatible_class_change_error);
421 __ b(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));
422 }
423 }
424 }
425
426 #ifndef PRODUCT
427 void trace_method_handle_stub(const char* adaptername,
428 oop mh,
429 intptr_t* saved_regs,
430 intptr_t* entry_sp) { }
431
432 // The stub wraps the arguments in a struct on the stack to avoid
433 // dealing with the different calling conventions for passing 6
434 // arguments.
435 struct MethodHandleStubArguments {
436 const char* adaptername;
437 oopDesc* mh;
438 intptr_t* saved_regs;
439 intptr_t* entry_sp;
440 };
441 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) { }
|
98 __ verify_method_ptr(method);
99
100 if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
101 Label run_compiled_code;
102 // JVMTI events, such as single-stepping, are implemented partly by avoiding running
103 // compiled code in threads for which the event is enabled. Check here for
104 // interp_only_mode if these events CAN be enabled.
105
106 __ ldrb(rscratch1, Address(rthread, JavaThread::interp_only_mode_offset()));
107 __ cbnz(rscratch1, run_compiled_code);
108 __ ldr(rscratch1, Address(method, Method::interpreter_entry_offset()));
109 __ br(rscratch1);
110 __ BIND(run_compiled_code);
111 }
112
113 const ByteSize entry_offset = for_compiler_entry ? Method::from_compiled_offset() :
114 Method::from_interpreted_offset();
115 __ ldr(rscratch1,Address(method, entry_offset));
116 __ br(rscratch1);
117 __ bind(L_no_such_method);
118 __ far_jump(RuntimeAddress(StubRoutines::throw_AbstractMethodError_entry()));
119 }
120
121 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
122 Register recv, Register method_temp,
123 Register temp2,
124 bool for_compiler_entry) {
125 BLOCK_COMMENT("jump_to_lambda_form {");
126 // This is the initial entry point of a lazy method handle.
127 // After type checking, it picks up the invoker from the LambdaForm.
128 assert_different_registers(recv, method_temp, temp2);
129 assert(recv != noreg, "required register");
130 assert(method_temp == rmethod, "required register for loading method");
131
132 //NOT_PRODUCT({ FlagSetting fs(TraceMethodHandles, true); trace_method_handle(_masm, "LZMH"); });
133
134 // Load the invoker, as MH -> MH.form -> LF.vmentry
135 __ verify_oop(recv);
136 __ load_heap_oop(method_temp, Address(recv, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes())));
137 __ verify_oop(method_temp);
138 __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes())));
401 rindex, rmethod,
402 temp2,
403 L_incompatible_class_change_error);
404 break;
405 }
406
407 default:
408 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
409 break;
410 }
411
412 // live at this point: rmethod, r13 (if interpreted)
413
414 // After figuring out which concrete method to call, jump into it.
415 // Note that this works in the interpreter with no data motion.
416 // But the compiled version will require that r2_recv be shifted out.
417 __ verify_method_ptr(rmethod);
418 jump_from_method_handle(_masm, rmethod, temp1, for_compiler_entry);
419 if (iid == vmIntrinsics::_linkToInterface) {
420 __ bind(L_incompatible_class_change_error);
421 __ far_jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));
422 }
423 }
424 }
425
426 #ifndef PRODUCT
427 void trace_method_handle_stub(const char* adaptername,
428 oop mh,
429 intptr_t* saved_regs,
430 intptr_t* entry_sp) { }
431
432 // The stub wraps the arguments in a struct on the stack to avoid
433 // dealing with the different calling conventions for passing 6
434 // arguments.
435 struct MethodHandleStubArguments {
436 const char* adaptername;
437 oopDesc* mh;
438 intptr_t* saved_regs;
439 intptr_t* entry_sp;
440 };
441 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) { }
|