src/cpu/sparc/vm/methodHandles_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/cpu/sparc/vm/methodHandles_sparc.cpp	Thu Jul 14 13:07:08 2011
--- new/src/cpu/sparc/vm/methodHandles_sparc.cpp	Thu Jul 14 13:07:08 2011

*** 522,531 **** --- 522,555 ---- __ BIND(L_ok); BLOCK_COMMENT("} verify_klass"); } #endif // ASSERT + + void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp) { + assert(method == G5_method, "interpreter calling convention"); + __ verify_oop(method); + __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_interpreted_offset()), target); + if (JvmtiExport::can_post_interpreter_events()) { + // JVMTI events, such as single-stepping, are implemented partly by avoiding running + // compiled code in threads for which the event is enabled. Check here for + // interp_only_mode if these events CAN be enabled. + __ verify_thread(); + Label skip_compiled_code; + + const Address interp_only(G2_thread, JavaThread::interp_only_mode_offset()); + __ ld(interp_only, temp); + __ tst(temp); + __ br(Assembler::notZero, true, Assembler::pn, skip_compiled_code); + __ delayed()->ld_ptr(G5_method, in_bytes(methodOopDesc::interpreter_entry_offset()), target); + __ bind(skip_compiled_code); + } + __ jmp(target, 0); + __ delayed()->nop(); + } + + // Code generation address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm) { // I5_savedSP/O5_savedSP: sender SP (must preserve) // G4 (Gargs): incoming argument list (must preserve) // G5_method: invoke methodOop
*** 1103,1115 **** --- 1127,1136 ---- const Register O2_required = O2; guarantee(java_lang_invoke_MethodHandle::vmentry_offset_in_bytes() != 0, "must have offsets"); // Some handy addresses: Address G5_method_fie( G5_method, in_bytes(methodOopDesc::from_interpreted_offset())); Address G5_method_fce( G5_method, in_bytes(methodOopDesc::from_compiled_offset())); Address G3_mh_vmtarget( G3_method_handle, java_lang_invoke_MethodHandle::vmtarget_offset_in_bytes()); Address G3_dmh_vmindex( G3_method_handle, java_lang_invoke_DirectMethodHandle::vmindex_offset_in_bytes()); Address G3_bmh_vmargslot( G3_method_handle, java_lang_invoke_BoundMethodHandle::vmargslot_offset_in_bytes());
*** 1134,1180 **** --- 1155,1198 ---- switch ((int) ek) { case _raise_exception: { // Not a real MH entry, but rather shared code for raising an ! // exception. Since we use the compiled entry, arguments are ! // expected in compiler argument registers. ! // exception. For sharing purposes the arguments are passed into registers ! // and then placed in the intepreter calling convention here. assert(raise_exception_method(), "must be set"); assert(raise_exception_method()->from_compiled_entry(), "method must be linked"); __ mov(O5_savedSP, SP); // Cut the stack back to where the caller started. Label L_no_method; // FIXME: fill in _raise_exception_method with a suitable java.lang.invoke method __ set(AddressLiteral((address) &_raise_exception_method), G5_method); __ ld_ptr(Address(G5_method, 0), G5_method); const int jobject_oop_offset = 0; __ ld_ptr(Address(G5_method, jobject_oop_offset), G5_method); ! __ verify_oop(G5_method); __ jump_indirect_to(G5_method_fce, O3_scratch); // jump to compiled entry ! __ delayed()->nop(); ! adjust_SP_and_Gargs_down_by_slots(_masm, 3, noreg, noreg); + ! __ st_ptr(O0_code, __ argument_address(constant(2), noreg, 0)); + __ st_ptr(O1_actual, __ argument_address(constant(1), noreg, 0)); + __ st_ptr(O2_required, __ argument_address(constant(0), noreg, 0)); + jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch); } break; case _invokestatic_mh: case _invokespecial_mh: { __ load_heap_oop(G3_mh_vmtarget, G5_method); // target is a methodOop __ verify_oop(G5_method); // Same as TemplateTable::invokestatic or invokespecial, // minus the CP setup and profiling: if (ek == _invokespecial_mh) { // Must load & check the first argument before entering the target method. __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch); __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle); __ null_check(G3_method_handle); __ verify_oop(G3_method_handle); } ! __ jump_indirect_to(G5_method_fie, O1_scratch); __ delayed()->nop(); ! jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch); } break; case _invokevirtual_mh: {
*** 1202,1214 **** --- 1220,1230 ---- __ sll_ptr(O2_index, LogBytesPerWord, O2_index); __ add(O0_klass, O2_index, O0_klass); Address vtable_entry_addr(O0_klass, base + vtableEntry::method_offset_in_bytes()); __ ld_ptr(vtable_entry_addr, G5_method); ! __ verify_oop(G5_method); __ jump_indirect_to(G5_method_fie, O1_scratch); __ delayed()->nop(); ! jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch); } break; case _invokeinterface_mh: {
*** 1235,1247 **** --- 1251,1261 ---- G5_index, G5_method, O2_scratch, O3_scratch, no_such_interface); ! __ verify_oop(G5_method); __ jump_indirect_to(G5_method_fie, O1_scratch); __ delayed()->nop(); ! jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch); __ bind(no_such_interface); // Throw an exception. // For historical reasons, it will be IncompatibleClassChangeError. __ unimplemented("not tested yet");
*** 1281,1293 **** --- 1295,1305 ---- O2_scratch); // must be an even register for !_LP64 long moves (uses O2/O3) } if (direct_to_method) { __ load_heap_oop(G3_mh_vmtarget, G5_method); // target is a methodOop ! __ verify_oop(G5_method); __ jump_indirect_to(G5_method_fie, O1_scratch); __ delayed()->nop(); ! jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch); } else { __ load_heap_oop(G3_mh_vmtarget, G3_method_handle); // target is a methodOop __ verify_oop(G3_method_handle); __ jump_to_method_handle_entry(G3_method_handle, O1_scratch); }

src/cpu/sparc/vm/methodHandles_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File