src/cpu/sparc/vm/methodHandles_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 6990212 Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/methodHandles_sparc.cpp

Print this page




 507   __ load_klass(obj_reg, temp_reg);
 508   __ set(ExternalAddress(klass_addr), temp2_reg);
 509   __ ld_ptr(Address(temp2_reg, 0), temp2_reg);
 510   __ cmp(temp_reg, temp2_reg);
 511   __ brx(Assembler::equal, false, Assembler::pt, L_ok);
 512   __ delayed()->nop();
 513   intptr_t super_check_offset = klass->super_check_offset();
 514   __ ld_ptr(Address(temp_reg, super_check_offset), temp_reg);
 515   __ set(ExternalAddress(klass_addr), temp2_reg);
 516   __ ld_ptr(Address(temp2_reg, 0), temp2_reg);
 517   __ cmp(temp_reg, temp2_reg);
 518   __ brx(Assembler::equal, false, Assembler::pt, L_ok);
 519   __ delayed()->nop();
 520   __ BIND(L_bad);
 521   __ stop(error_message);
 522   __ BIND(L_ok);
 523   BLOCK_COMMENT("} verify_klass");
 524 }
 525 #endif // ASSERT
 526 
























 527 // Code generation
 528 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm) {
 529   // I5_savedSP/O5_savedSP: sender SP (must preserve)
 530   // G4 (Gargs): incoming argument list (must preserve)
 531   // G5_method:  invoke methodOop
 532   // G3_method_handle: receiver method handle (must load from sp[MethodTypeForm.vmslots])
 533   // O0, O1, O2, O3, O4: garbage temps, blown away
 534   Register O0_mtype   = O0;
 535   Register O1_scratch = O1;
 536   Register O2_scratch = O2;
 537   Register O3_scratch = O3;
 538   Register O4_argslot = O4;
 539   Register O4_argbase = O4;
 540 
 541   // emit WrongMethodType path first, to enable back-branch from main path
 542   Label wrong_method_type;
 543   __ bind(wrong_method_type);
 544   Label invoke_generic_slow_path;
 545   assert(methodOopDesc::intrinsic_id_size_in_bytes() == sizeof(u1), "");;
 546   __ ldub(Address(G5_method, methodOopDesc::intrinsic_id_offset_in_bytes()), O1_scratch);


1088   // - O5_savedSP: sender SP (must preserve)
1089 
1090   const Register O0_scratch = O0;
1091   const Register O1_scratch = O1;
1092   const Register O2_scratch = O2;
1093   const Register O3_scratch = O3;
1094   const Register O4_scratch = O4;
1095   const Register G5_scratch = G5;
1096 
1097   // Often used names:
1098   const Register O0_argslot = O0;
1099 
1100   // Argument registers for _raise_exception:
1101   const Register O0_code     = O0;
1102   const Register O1_actual   = O1;
1103   const Register O2_required = O2;
1104 
1105   guarantee(java_lang_invoke_MethodHandle::vmentry_offset_in_bytes() != 0, "must have offsets");
1106 
1107   // Some handy addresses:
1108   Address G5_method_fie(    G5_method,        in_bytes(methodOopDesc::from_interpreted_offset()));
1109   Address G5_method_fce(    G5_method,        in_bytes(methodOopDesc::from_compiled_offset()));
1110 
1111   Address G3_mh_vmtarget(   G3_method_handle, java_lang_invoke_MethodHandle::vmtarget_offset_in_bytes());
1112 
1113   Address G3_dmh_vmindex(   G3_method_handle, java_lang_invoke_DirectMethodHandle::vmindex_offset_in_bytes());
1114 
1115   Address G3_bmh_vmargslot( G3_method_handle, java_lang_invoke_BoundMethodHandle::vmargslot_offset_in_bytes());
1116   Address G3_bmh_argument(  G3_method_handle, java_lang_invoke_BoundMethodHandle::argument_offset_in_bytes());
1117 
1118   Address G3_amh_vmargslot( G3_method_handle, java_lang_invoke_AdapterMethodHandle::vmargslot_offset_in_bytes());
1119   Address G3_amh_argument ( G3_method_handle, java_lang_invoke_AdapterMethodHandle::argument_offset_in_bytes());
1120   Address G3_amh_conversion(G3_method_handle, java_lang_invoke_AdapterMethodHandle::conversion_offset_in_bytes());
1121 
1122   const int java_mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
1123 
1124   if (have_entry(ek)) {
1125     __ nop();  // empty stubs make SG sick
1126     return;
1127   }
1128 
1129   address interp_entry = __ pc();
1130 
1131   trace_method_handle(_masm, entry_name(ek));
1132 
1133   BLOCK_COMMENT(err_msg("Entry %s {", entry_name(ek)));
1134 
1135   switch ((int) ek) {
1136   case _raise_exception:
1137     {
1138       // Not a real MH entry, but rather shared code for raising an
1139       // exception.  Since we use the compiled entry, arguments are
1140       // expected in compiler argument registers.
1141       assert(raise_exception_method(), "must be set");
1142       assert(raise_exception_method()->from_compiled_entry(), "method must be linked");
1143 
1144       __ mov(O5_savedSP, SP);  // Cut the stack back to where the caller started.
1145 
1146       Label L_no_method;
1147       // FIXME: fill in _raise_exception_method with a suitable java.lang.invoke method
1148       __ set(AddressLiteral((address) &_raise_exception_method), G5_method);
1149       __ ld_ptr(Address(G5_method, 0), G5_method);
1150 
1151       const int jobject_oop_offset = 0;
1152       __ ld_ptr(Address(G5_method, jobject_oop_offset), G5_method);
1153 
1154       __ verify_oop(G5_method);
1155       __ jump_indirect_to(G5_method_fce, O3_scratch);  // jump to compiled entry
1156       __ delayed()->nop();



1157     }
1158     break;
1159 
1160   case _invokestatic_mh:
1161   case _invokespecial_mh:
1162     {
1163       __ load_heap_oop(G3_mh_vmtarget, G5_method);  // target is a methodOop
1164       __ verify_oop(G5_method);
1165       // Same as TemplateTable::invokestatic or invokespecial,
1166       // minus the CP setup and profiling:
1167       if (ek == _invokespecial_mh) {
1168         // Must load & check the first argument before entering the target method.
1169         __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1170         __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1171         __ null_check(G3_method_handle);
1172         __ verify_oop(G3_method_handle);
1173       }
1174       __ jump_indirect_to(G5_method_fie, O1_scratch);
1175       __ delayed()->nop();
1176     }
1177     break;
1178 
1179   case _invokevirtual_mh:
1180     {
1181       // Same as TemplateTable::invokevirtual,
1182       // minus the CP setup and profiling:
1183 
1184       // Pick out the vtable index and receiver offset from the MH,
1185       // and then we can discard it:
1186       Register O2_index = O2_scratch;
1187       __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1188       __ ldsw(G3_dmh_vmindex, O2_index);
1189       // Note:  The verifier allows us to ignore G3_mh_vmtarget.
1190       __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1191       __ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes());
1192 
1193       // Get receiver klass:
1194       Register O0_klass = O0_argslot;
1195       __ load_klass(G3_method_handle, O0_klass);
1196       __ verify_oop(O0_klass);
1197 
1198       // Get target methodOop & entry point:
1199       const int base = instanceKlass::vtable_start_offset() * wordSize;
1200       assert(vtableEntry::size() * wordSize == wordSize, "adjust the scaling in the code below");
1201 
1202       __ sll_ptr(O2_index, LogBytesPerWord, O2_index);
1203       __ add(O0_klass, O2_index, O0_klass);
1204       Address vtable_entry_addr(O0_klass, base + vtableEntry::method_offset_in_bytes());
1205       __ ld_ptr(vtable_entry_addr, G5_method);
1206 
1207       __ verify_oop(G5_method);
1208       __ jump_indirect_to(G5_method_fie, O1_scratch);
1209       __ delayed()->nop();
1210     }
1211     break;
1212 
1213   case _invokeinterface_mh:
1214     {
1215       // Same as TemplateTable::invokeinterface,
1216       // minus the CP setup and profiling:
1217       __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1218       Register O1_intf  = O1_scratch;
1219       Register G5_index = G5_scratch;
1220       __ load_heap_oop(G3_mh_vmtarget, O1_intf);
1221       __ ldsw(G3_dmh_vmindex, G5_index);
1222       __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1223       __ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes());
1224 
1225       // Get receiver klass:
1226       Register O0_klass = O0_argslot;
1227       __ load_klass(G3_method_handle, O0_klass);
1228       __ verify_oop(O0_klass);
1229 
1230       // Get interface:
1231       Label no_such_interface;
1232       __ verify_oop(O1_intf);
1233       __ lookup_interface_method(O0_klass, O1_intf,
1234                                  // Note: next two args must be the same:
1235                                  G5_index, G5_method,
1236                                  O2_scratch,
1237                                  O3_scratch,
1238                                  no_such_interface);
1239 
1240       __ verify_oop(G5_method);
1241       __ jump_indirect_to(G5_method_fie, O1_scratch);
1242       __ delayed()->nop();
1243 
1244       __ bind(no_such_interface);
1245       // Throw an exception.
1246       // For historical reasons, it will be IncompatibleClassChangeError.
1247       __ unimplemented("not tested yet");
1248       __ ld_ptr(Address(O1_intf, java_mirror_offset), O2_required);  // required interface
1249       __ mov(   O0_klass,                             O1_actual);    // bad receiver
1250       __ jump_to(AddressLiteral(from_interpreted_entry(_raise_exception)), O3_scratch);
1251       __ delayed()->mov(Bytecodes::_invokeinterface,  O0_code);      // who is complaining?
1252     }
1253     break;
1254 
1255   case _bound_ref_mh:
1256   case _bound_int_mh:
1257   case _bound_long_mh:
1258   case _bound_ref_direct_mh:
1259   case _bound_int_direct_mh:
1260   case _bound_long_direct_mh:
1261     {
1262       const bool direct_to_method = (ek >= _bound_ref_direct_mh);


1266       // Make room for the new argument:
1267       load_vmargslot(_masm, G3_bmh_vmargslot, O0_argslot);
1268       __ add(__ argument_address(O0_argslot, O0_argslot), O0_argslot);
1269 
1270       insert_arg_slots(_masm, arg_slots * stack_move_unit(), O0_argslot, O1_scratch, O2_scratch, O3_scratch);
1271 
1272       // Store bound argument into the new stack slot:
1273       __ load_heap_oop(G3_bmh_argument, O1_scratch);
1274       if (arg_type == T_OBJECT) {
1275         __ st_ptr(O1_scratch, Address(O0_argslot, 0));
1276       } else {
1277         Address prim_value_addr(O1_scratch, java_lang_boxing_object::value_offset_in_bytes(arg_type));
1278         move_typed_arg(_masm, arg_type, false,
1279                        prim_value_addr,
1280                        Address(O0_argslot, 0),
1281                        O2_scratch);  // must be an even register for !_LP64 long moves (uses O2/O3)
1282       }
1283 
1284       if (direct_to_method) {
1285         __ load_heap_oop(G3_mh_vmtarget, G5_method);  // target is a methodOop
1286         __ verify_oop(G5_method);
1287         __ jump_indirect_to(G5_method_fie, O1_scratch);
1288         __ delayed()->nop();
1289       } else {
1290         __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);  // target is a methodOop
1291         __ verify_oop(G3_method_handle);
1292         __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
1293       }
1294     }
1295     break;
1296 
1297   case _adapter_retype_only:
1298   case _adapter_retype_raw:
1299     // Immediately jump to the next MH layer:
1300     __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
1301     __ verify_oop(G3_method_handle);
1302     __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
1303     // This is OK when all parameter types widen.
1304     // It is also OK when a return type narrows.
1305     break;
1306 
1307   case _adapter_check_cast:
1308     {




 507   __ load_klass(obj_reg, temp_reg);
 508   __ set(ExternalAddress(klass_addr), temp2_reg);
 509   __ ld_ptr(Address(temp2_reg, 0), temp2_reg);
 510   __ cmp(temp_reg, temp2_reg);
 511   __ brx(Assembler::equal, false, Assembler::pt, L_ok);
 512   __ delayed()->nop();
 513   intptr_t super_check_offset = klass->super_check_offset();
 514   __ ld_ptr(Address(temp_reg, super_check_offset), temp_reg);
 515   __ set(ExternalAddress(klass_addr), temp2_reg);
 516   __ ld_ptr(Address(temp2_reg, 0), temp2_reg);
 517   __ cmp(temp_reg, temp2_reg);
 518   __ brx(Assembler::equal, false, Assembler::pt, L_ok);
 519   __ delayed()->nop();
 520   __ BIND(L_bad);
 521   __ stop(error_message);
 522   __ BIND(L_ok);
 523   BLOCK_COMMENT("} verify_klass");
 524 }
 525 #endif // ASSERT
 526 
 527 
 528 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp) {
 529   assert(method == G5_method, "interpreter calling convention");
 530   __ verify_oop(method);
 531   __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_interpreted_offset()), target);
 532   if (JvmtiExport::can_post_interpreter_events()) {
 533     // JVMTI events, such as single-stepping, are implemented partly by avoiding running
 534     // compiled code in threads for which the event is enabled.  Check here for
 535     // interp_only_mode if these events CAN be enabled.
 536     __ verify_thread();
 537     Label skip_compiled_code;
 538 
 539     const Address interp_only(G2_thread, JavaThread::interp_only_mode_offset());
 540     __ ld(interp_only, temp);
 541     __ tst(temp);
 542     __ br(Assembler::notZero, true, Assembler::pn, skip_compiled_code);
 543     __ delayed()->ld_ptr(G5_method, in_bytes(methodOopDesc::interpreter_entry_offset()), target);
 544     __ bind(skip_compiled_code);
 545   }
 546   __ jmp(target, 0);
 547   __ delayed()->nop();
 548 }
 549 
 550 
 551 // Code generation
 552 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm) {
 553   // I5_savedSP/O5_savedSP: sender SP (must preserve)
 554   // G4 (Gargs): incoming argument list (must preserve)
 555   // G5_method:  invoke methodOop
 556   // G3_method_handle: receiver method handle (must load from sp[MethodTypeForm.vmslots])
 557   // O0, O1, O2, O3, O4: garbage temps, blown away
 558   Register O0_mtype   = O0;
 559   Register O1_scratch = O1;
 560   Register O2_scratch = O2;
 561   Register O3_scratch = O3;
 562   Register O4_argslot = O4;
 563   Register O4_argbase = O4;
 564 
 565   // emit WrongMethodType path first, to enable back-branch from main path
 566   Label wrong_method_type;
 567   __ bind(wrong_method_type);
 568   Label invoke_generic_slow_path;
 569   assert(methodOopDesc::intrinsic_id_size_in_bytes() == sizeof(u1), "");;
 570   __ ldub(Address(G5_method, methodOopDesc::intrinsic_id_offset_in_bytes()), O1_scratch);


1112   // - O5_savedSP: sender SP (must preserve)
1113 
1114   const Register O0_scratch = O0;
1115   const Register O1_scratch = O1;
1116   const Register O2_scratch = O2;
1117   const Register O3_scratch = O3;
1118   const Register O4_scratch = O4;
1119   const Register G5_scratch = G5;
1120 
1121   // Often used names:
1122   const Register O0_argslot = O0;
1123 
1124   // Argument registers for _raise_exception:
1125   const Register O0_code     = O0;
1126   const Register O1_actual   = O1;
1127   const Register O2_required = O2;
1128 
1129   guarantee(java_lang_invoke_MethodHandle::vmentry_offset_in_bytes() != 0, "must have offsets");
1130 
1131   // Some handy addresses:



1132   Address G3_mh_vmtarget(   G3_method_handle, java_lang_invoke_MethodHandle::vmtarget_offset_in_bytes());
1133 
1134   Address G3_dmh_vmindex(   G3_method_handle, java_lang_invoke_DirectMethodHandle::vmindex_offset_in_bytes());
1135 
1136   Address G3_bmh_vmargslot( G3_method_handle, java_lang_invoke_BoundMethodHandle::vmargslot_offset_in_bytes());
1137   Address G3_bmh_argument(  G3_method_handle, java_lang_invoke_BoundMethodHandle::argument_offset_in_bytes());
1138 
1139   Address G3_amh_vmargslot( G3_method_handle, java_lang_invoke_AdapterMethodHandle::vmargslot_offset_in_bytes());
1140   Address G3_amh_argument ( G3_method_handle, java_lang_invoke_AdapterMethodHandle::argument_offset_in_bytes());
1141   Address G3_amh_conversion(G3_method_handle, java_lang_invoke_AdapterMethodHandle::conversion_offset_in_bytes());
1142 
1143   const int java_mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
1144 
1145   if (have_entry(ek)) {
1146     __ nop();  // empty stubs make SG sick
1147     return;
1148   }
1149 
1150   address interp_entry = __ pc();
1151 
1152   trace_method_handle(_masm, entry_name(ek));
1153 
1154   BLOCK_COMMENT(err_msg("Entry %s {", entry_name(ek)));
1155 
1156   switch ((int) ek) {
1157   case _raise_exception:
1158     {
1159       // Not a real MH entry, but rather shared code for raising an
1160       // exception.  For sharing purposes the arguments are passed into registers
1161       // and then placed in the intepreter calling convention here.
1162       assert(raise_exception_method(), "must be set");
1163       assert(raise_exception_method()->from_compiled_entry(), "method must be linked");
1164 




1165       __ set(AddressLiteral((address) &_raise_exception_method), G5_method);
1166       __ ld_ptr(Address(G5_method, 0), G5_method);
1167 
1168       const int jobject_oop_offset = 0;
1169       __ ld_ptr(Address(G5_method, jobject_oop_offset), G5_method);
1170 
1171       adjust_SP_and_Gargs_down_by_slots(_masm, 3, noreg, noreg);
1172 
1173       __ st_ptr(O0_code,     __ argument_address(constant(2), noreg, 0));
1174       __ st_ptr(O1_actual,   __ argument_address(constant(1), noreg, 0));
1175       __ st_ptr(O2_required, __ argument_address(constant(0), noreg, 0));
1176       jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch);
1177     }
1178     break;
1179 
1180   case _invokestatic_mh:
1181   case _invokespecial_mh:
1182     {
1183       __ load_heap_oop(G3_mh_vmtarget, G5_method);  // target is a methodOop

1184       // Same as TemplateTable::invokestatic or invokespecial,
1185       // minus the CP setup and profiling:
1186       if (ek == _invokespecial_mh) {
1187         // Must load & check the first argument before entering the target method.
1188         __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1189         __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1190         __ null_check(G3_method_handle);
1191         __ verify_oop(G3_method_handle);
1192       }
1193       jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch);

1194     }
1195     break;
1196 
1197   case _invokevirtual_mh:
1198     {
1199       // Same as TemplateTable::invokevirtual,
1200       // minus the CP setup and profiling:
1201 
1202       // Pick out the vtable index and receiver offset from the MH,
1203       // and then we can discard it:
1204       Register O2_index = O2_scratch;
1205       __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1206       __ ldsw(G3_dmh_vmindex, O2_index);
1207       // Note:  The verifier allows us to ignore G3_mh_vmtarget.
1208       __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1209       __ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes());
1210 
1211       // Get receiver klass:
1212       Register O0_klass = O0_argslot;
1213       __ load_klass(G3_method_handle, O0_klass);
1214       __ verify_oop(O0_klass);
1215 
1216       // Get target methodOop & entry point:
1217       const int base = instanceKlass::vtable_start_offset() * wordSize;
1218       assert(vtableEntry::size() * wordSize == wordSize, "adjust the scaling in the code below");
1219 
1220       __ sll_ptr(O2_index, LogBytesPerWord, O2_index);
1221       __ add(O0_klass, O2_index, O0_klass);
1222       Address vtable_entry_addr(O0_klass, base + vtableEntry::method_offset_in_bytes());
1223       __ ld_ptr(vtable_entry_addr, G5_method);
1224 
1225       jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch);


1226     }
1227     break;
1228 
1229   case _invokeinterface_mh:
1230     {
1231       // Same as TemplateTable::invokeinterface,
1232       // minus the CP setup and profiling:
1233       __ load_method_handle_vmslots(O0_argslot, G3_method_handle, O1_scratch);
1234       Register O1_intf  = O1_scratch;
1235       Register G5_index = G5_scratch;
1236       __ load_heap_oop(G3_mh_vmtarget, O1_intf);
1237       __ ldsw(G3_dmh_vmindex, G5_index);
1238       __ ld_ptr(__ argument_address(O0_argslot, O0_argslot, -1), G3_method_handle);
1239       __ null_check(G3_method_handle, oopDesc::klass_offset_in_bytes());
1240 
1241       // Get receiver klass:
1242       Register O0_klass = O0_argslot;
1243       __ load_klass(G3_method_handle, O0_klass);
1244       __ verify_oop(O0_klass);
1245 
1246       // Get interface:
1247       Label no_such_interface;
1248       __ verify_oop(O1_intf);
1249       __ lookup_interface_method(O0_klass, O1_intf,
1250                                  // Note: next two args must be the same:
1251                                  G5_index, G5_method,
1252                                  O2_scratch,
1253                                  O3_scratch,
1254                                  no_such_interface);
1255 
1256       jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch);


1257 
1258       __ bind(no_such_interface);
1259       // Throw an exception.
1260       // For historical reasons, it will be IncompatibleClassChangeError.
1261       __ unimplemented("not tested yet");
1262       __ ld_ptr(Address(O1_intf, java_mirror_offset), O2_required);  // required interface
1263       __ mov(   O0_klass,                             O1_actual);    // bad receiver
1264       __ jump_to(AddressLiteral(from_interpreted_entry(_raise_exception)), O3_scratch);
1265       __ delayed()->mov(Bytecodes::_invokeinterface,  O0_code);      // who is complaining?
1266     }
1267     break;
1268 
1269   case _bound_ref_mh:
1270   case _bound_int_mh:
1271   case _bound_long_mh:
1272   case _bound_ref_direct_mh:
1273   case _bound_int_direct_mh:
1274   case _bound_long_direct_mh:
1275     {
1276       const bool direct_to_method = (ek >= _bound_ref_direct_mh);


1280       // Make room for the new argument:
1281       load_vmargslot(_masm, G3_bmh_vmargslot, O0_argslot);
1282       __ add(__ argument_address(O0_argslot, O0_argslot), O0_argslot);
1283 
1284       insert_arg_slots(_masm, arg_slots * stack_move_unit(), O0_argslot, O1_scratch, O2_scratch, O3_scratch);
1285 
1286       // Store bound argument into the new stack slot:
1287       __ load_heap_oop(G3_bmh_argument, O1_scratch);
1288       if (arg_type == T_OBJECT) {
1289         __ st_ptr(O1_scratch, Address(O0_argslot, 0));
1290       } else {
1291         Address prim_value_addr(O1_scratch, java_lang_boxing_object::value_offset_in_bytes(arg_type));
1292         move_typed_arg(_masm, arg_type, false,
1293                        prim_value_addr,
1294                        Address(O0_argslot, 0),
1295                        O2_scratch);  // must be an even register for !_LP64 long moves (uses O2/O3)
1296       }
1297 
1298       if (direct_to_method) {
1299         __ load_heap_oop(G3_mh_vmtarget, G5_method);  // target is a methodOop
1300         jump_from_method_handle(_masm, G5_method, O1_scratch, O2_scratch);


1301       } else {
1302         __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);  // target is a methodOop
1303         __ verify_oop(G3_method_handle);
1304         __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
1305       }
1306     }
1307     break;
1308 
1309   case _adapter_retype_only:
1310   case _adapter_retype_raw:
1311     // Immediately jump to the next MH layer:
1312     __ load_heap_oop(G3_mh_vmtarget, G3_method_handle);
1313     __ verify_oop(G3_method_handle);
1314     __ jump_to_method_handle_entry(G3_method_handle, O1_scratch);
1315     // This is OK when all parameter types widen.
1316     // It is also OK when a return type narrows.
1317     break;
1318 
1319   case _adapter_check_cast:
1320     {


src/cpu/sparc/vm/methodHandles_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File