< prev index next >

src/cpu/ppc/vm/sharedRuntime_ppc.cpp

Print this page




1457     if (in_regs[i].first()->is_FloatRegister()) {
1458       if (in_sig_bt[i] == T_FLOAT) {
1459         int offset = slot * VMRegImpl::stack_slot_size;
1460         slot++;
1461         assert(slot <= stack_slots, "overflow (after FLOAT stack slot)");
1462         if (map != NULL) {
1463           __ stfs(in_regs[i].first()->as_FloatRegister(), offset, R1_SP);
1464         } else {
1465           __ lfs(in_regs[i].first()->as_FloatRegister(), offset, R1_SP);
1466         }
1467       }
1468     } else if (in_regs[i].first()->is_stack()) {
1469       if (in_sig_bt[i] == T_ARRAY && map != NULL) {
1470         int offset_in_older_frame = in_regs[i].first()->reg2stack() + SharedRuntime::out_preserve_stack_slots();
1471         map->set_oop(VMRegImpl::stack2reg(offset_in_older_frame + stack_slots));
1472       }
1473     }
1474   }
1475 }
1476 
1477 // Check GC_locker::needs_gc and enter the runtime if it's true. This
1478 // keeps a new JNI critical region from starting until a GC has been
1479 // forced. Save down any oops in registers and describe them in an
1480 // OopMap.
1481 static void check_needs_gc_for_critical_native(MacroAssembler* masm,
1482                                                const int stack_slots,
1483                                                const int total_in_args,
1484                                                const int arg_save_area,
1485                                                OopMapSet* oop_maps,
1486                                                VMRegPair* in_regs,
1487                                                BasicType* in_sig_bt,
1488                                                Register tmp_reg ) {
1489   __ block_comment("check GC_locker::needs_gc");
1490   Label cont;
1491   __ lbz(tmp_reg, (RegisterOrConstant)(intptr_t)GC_locker::needs_gc_address());
1492   __ cmplwi(CCR0, tmp_reg, 0);
1493   __ beq(CCR0, cont);
1494 
1495   // Save down any values that are live in registers and call into the
1496   // runtime to halt for a GC.
1497   OopMap* map = new OopMap(stack_slots * 2, 0 /* arg_slots*/);
1498   save_or_restore_arguments(masm, stack_slots, total_in_args,
1499                             arg_save_area, map, in_regs, in_sig_bt);
1500 
1501   __ mr(R3_ARG1, R16_thread);
1502   __ set_last_Java_frame(R1_SP, noreg);
1503 
1504   __ block_comment("block_for_jni_critical");
1505   address entry_point = CAST_FROM_FN_PTR(address, SharedRuntime::block_for_jni_critical);
1506 #if defined(ABI_ELFv2)
1507   __ call_c(entry_point, relocInfo::runtime_call_type);
1508 #else
1509   __ call_c(CAST_FROM_FN_PTR(FunctionDescriptor*, entry_point), relocInfo::runtime_call_type);
1510 #endif
1511   address start           = __ pc() - __ offset(),


1670   }
1671 
1672   // Figure out which address we are really jumping to:
1673   MethodHandles::generate_method_handle_dispatch(masm, iid,
1674                                                  receiver_reg, member_reg, /*for_compiler_entry:*/ true);
1675 }
1676 
1677 #endif // COMPILER2
1678 
1679 // ---------------------------------------------------------------------------
1680 // Generate a native wrapper for a given method. The method takes arguments
1681 // in the Java compiled code convention, marshals them to the native
1682 // convention (handlizes oops, etc), transitions to native, makes the call,
1683 // returns to java state (possibly blocking), unhandlizes any result and
1684 // returns.
1685 //
1686 // Critical native functions are a shorthand for the use of
1687 // GetPrimtiveArrayCritical and disallow the use of any other JNI
1688 // functions.  The wrapper is expected to unpack the arguments before
1689 // passing them to the callee and perform checks before and after the
1690 // native call to ensure that they GC_locker
1691 // lock_critical/unlock_critical semantics are followed.  Some other
1692 // parts of JNI setup are skipped like the tear down of the JNI handle
1693 // block and the check for pending exceptions it's impossible for them
1694 // to be thrown.
1695 //
1696 // They are roughly structured like this:
1697 //   if (GC_locker::needs_gc())
1698 //     SharedRuntime::block_for_jni_critical();
1699 //   tranistion to thread_in_native
1700 //   unpack arrray arguments and call native entry point
1701 //   check for safepoint in progress
1702 //   check if any thread suspend flags are set
1703 //     call into JVM and possible unlock the JNI critical
1704 //     if a GC was suppressed while in the critical native.
1705 //   transition back to thread_in_Java
1706 //   return to caller
1707 //
1708 nmethod *SharedRuntime::generate_native_wrapper(MacroAssembler *masm,
1709                                                 const methodHandle& method,
1710                                                 int compile_id,
1711                                                 BasicType *in_sig_bt,
1712                                                 VMRegPair *in_regs,
1713                                                 BasicType ret_type) {
1714 #ifdef COMPILER2
1715   if (method->is_method_handle_intrinsic()) {
1716     vmIntrinsics::ID iid = method->intrinsic_id();
1717     intptr_t start = (intptr_t)__ pc();




1457     if (in_regs[i].first()->is_FloatRegister()) {
1458       if (in_sig_bt[i] == T_FLOAT) {
1459         int offset = slot * VMRegImpl::stack_slot_size;
1460         slot++;
1461         assert(slot <= stack_slots, "overflow (after FLOAT stack slot)");
1462         if (map != NULL) {
1463           __ stfs(in_regs[i].first()->as_FloatRegister(), offset, R1_SP);
1464         } else {
1465           __ lfs(in_regs[i].first()->as_FloatRegister(), offset, R1_SP);
1466         }
1467       }
1468     } else if (in_regs[i].first()->is_stack()) {
1469       if (in_sig_bt[i] == T_ARRAY && map != NULL) {
1470         int offset_in_older_frame = in_regs[i].first()->reg2stack() + SharedRuntime::out_preserve_stack_slots();
1471         map->set_oop(VMRegImpl::stack2reg(offset_in_older_frame + stack_slots));
1472       }
1473     }
1474   }
1475 }
1476 
1477 // Check GCLocker::needs_gc and enter the runtime if it's true. This
1478 // keeps a new JNI critical region from starting until a GC has been
1479 // forced. Save down any oops in registers and describe them in an
1480 // OopMap.
1481 static void check_needs_gc_for_critical_native(MacroAssembler* masm,
1482                                                const int stack_slots,
1483                                                const int total_in_args,
1484                                                const int arg_save_area,
1485                                                OopMapSet* oop_maps,
1486                                                VMRegPair* in_regs,
1487                                                BasicType* in_sig_bt,
1488                                                Register tmp_reg ) {
1489   __ block_comment("check GCLocker::needs_gc");
1490   Label cont;
1491   __ lbz(tmp_reg, (RegisterOrConstant)(intptr_t)GCLocker::needs_gc_address());
1492   __ cmplwi(CCR0, tmp_reg, 0);
1493   __ beq(CCR0, cont);
1494 
1495   // Save down any values that are live in registers and call into the
1496   // runtime to halt for a GC.
1497   OopMap* map = new OopMap(stack_slots * 2, 0 /* arg_slots*/);
1498   save_or_restore_arguments(masm, stack_slots, total_in_args,
1499                             arg_save_area, map, in_regs, in_sig_bt);
1500 
1501   __ mr(R3_ARG1, R16_thread);
1502   __ set_last_Java_frame(R1_SP, noreg);
1503 
1504   __ block_comment("block_for_jni_critical");
1505   address entry_point = CAST_FROM_FN_PTR(address, SharedRuntime::block_for_jni_critical);
1506 #if defined(ABI_ELFv2)
1507   __ call_c(entry_point, relocInfo::runtime_call_type);
1508 #else
1509   __ call_c(CAST_FROM_FN_PTR(FunctionDescriptor*, entry_point), relocInfo::runtime_call_type);
1510 #endif
1511   address start           = __ pc() - __ offset(),


1670   }
1671 
1672   // Figure out which address we are really jumping to:
1673   MethodHandles::generate_method_handle_dispatch(masm, iid,
1674                                                  receiver_reg, member_reg, /*for_compiler_entry:*/ true);
1675 }
1676 
1677 #endif // COMPILER2
1678 
1679 // ---------------------------------------------------------------------------
1680 // Generate a native wrapper for a given method. The method takes arguments
1681 // in the Java compiled code convention, marshals them to the native
1682 // convention (handlizes oops, etc), transitions to native, makes the call,
1683 // returns to java state (possibly blocking), unhandlizes any result and
1684 // returns.
1685 //
1686 // Critical native functions are a shorthand for the use of
1687 // GetPrimtiveArrayCritical and disallow the use of any other JNI
1688 // functions.  The wrapper is expected to unpack the arguments before
1689 // passing them to the callee and perform checks before and after the
1690 // native call to ensure that they GCLocker
1691 // lock_critical/unlock_critical semantics are followed.  Some other
1692 // parts of JNI setup are skipped like the tear down of the JNI handle
1693 // block and the check for pending exceptions it's impossible for them
1694 // to be thrown.
1695 //
1696 // They are roughly structured like this:
1697 //   if (GCLocker::needs_gc())
1698 //     SharedRuntime::block_for_jni_critical();
1699 //   tranistion to thread_in_native
1700 //   unpack arrray arguments and call native entry point
1701 //   check for safepoint in progress
1702 //   check if any thread suspend flags are set
1703 //     call into JVM and possible unlock the JNI critical
1704 //     if a GC was suppressed while in the critical native.
1705 //   transition back to thread_in_Java
1706 //   return to caller
1707 //
1708 nmethod *SharedRuntime::generate_native_wrapper(MacroAssembler *masm,
1709                                                 const methodHandle& method,
1710                                                 int compile_id,
1711                                                 BasicType *in_sig_bt,
1712                                                 VMRegPair *in_regs,
1713                                                 BasicType ret_type) {
1714 #ifdef COMPILER2
1715   if (method->is_method_handle_intrinsic()) {
1716     vmIntrinsics::ID iid = method->intrinsic_id();
1717     intptr_t start = (intptr_t)__ pc();


< prev index next >