318 __ movl(c_rarg1, c_rarg3); // parameter counter is in c_rarg1
319 __ BIND(loop);
320 __ movptr(rax, Address(c_rarg2, 0));// get parameter
321 __ addptr(c_rarg2, wordSize); // advance to next parameter
322 __ decrementl(c_rarg1); // decrement counter
323 __ push(rax); // pass parameter
324 __ jcc(Assembler::notZero, loop);
325
326 // call Java function
327 __ BIND(parameters_done);
328 __ movptr(rbx, method); // get Method*
329 __ movptr(c_rarg1, entry_point); // get entry_point
330 __ mov(r13, rsp); // set sender sp
331 BLOCK_COMMENT("call Java function");
332 __ call(c_rarg1);
333
334 BLOCK_COMMENT("call_stub_return_address:");
335 return_address = __ pc();
336
337 // store result depending on type (everything that is not
338 // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
339 __ movptr(c_rarg0, result);
340 Label is_long, is_float, is_double, exit;
341 __ movl(c_rarg1, result_type);
342 __ cmpl(c_rarg1, T_OBJECT);
343 __ jcc(Assembler::equal, is_long);
344 __ cmpl(c_rarg1, T_LONG);
345 __ jcc(Assembler::equal, is_long);
346 __ cmpl(c_rarg1, T_FLOAT);
347 __ jcc(Assembler::equal, is_float);
348 __ cmpl(c_rarg1, T_DOUBLE);
349 __ jcc(Assembler::equal, is_double);
350
351 // handle T_INT case
352 __ movl(Address(c_rarg0, 0), rax);
353
354 __ BIND(exit);
355
356 // pop parameters
357 __ lea(rsp, rsp_after_call);
358
359 #ifdef ASSERT
360 // verify that threads correspond
361 {
362 Label L1, L2, L3;
363 __ cmpptr(r15_thread, thread);
982 // Non-destructive plausibility checks for oops
983 //
984 // Arguments:
985 // all args on stack!
986 //
987 // Stack after saving c_rarg3:
988 // [tos + 0]: saved c_rarg3
989 // [tos + 1]: saved c_rarg2
990 // [tos + 2]: saved r12 (several TemplateTable methods use it)
991 // [tos + 3]: saved flags
992 // [tos + 4]: return address
993 // * [tos + 5]: error message (char*)
994 // * [tos + 6]: object to verify (oop)
995 // * [tos + 7]: saved rax - saved by caller and bashed
996 // * [tos + 8]: saved r10 (rscratch1) - saved by caller
997 // * = popped on exit
998 address generate_verify_oop() {
999 StubCodeMark mark(this, "StubRoutines", "verify_oop");
1000 address start = __ pc();
1001
1002 Label exit, error;
1003
1004 __ pushf();
1005 __ incrementl(ExternalAddress((address) StubRoutines::verify_oop_count_addr()));
1006
1007 __ push(r12);
1008
1009 // save c_rarg2 and c_rarg3
1010 __ push(c_rarg2);
1011 __ push(c_rarg3);
1012
1013 enum {
1014 // After previous pushes.
1015 oop_to_verify = 6 * wordSize,
1016 saved_rax = 7 * wordSize,
1017 saved_r10 = 8 * wordSize,
1018
1019 // Before the call to MacroAssembler::debug(), see below.
1020 return_addr = 16 * wordSize,
1021 error_msg = 17 * wordSize
1022 };
1025 __ movptr(rax, Address(rsp, oop_to_verify));
1026
1027 // make sure object is 'reasonable'
1028 __ testptr(rax, rax);
1029 __ jcc(Assembler::zero, exit); // if obj is NULL it is OK
1030
1031 #if INCLUDE_ZGC
1032 if (UseZGC) {
1033 // Check if metadata bits indicate a bad oop
1034 __ testptr(rax, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
1035 __ jcc(Assembler::notZero, error);
1036 }
1037 #endif
1038
1039 // Check if the oop is in the right area of memory
1040 __ movptr(c_rarg2, rax);
1041 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_mask());
1042 __ andptr(c_rarg2, c_rarg3);
1043 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_bits());
1044 __ cmpptr(c_rarg2, c_rarg3);
1045 __ jcc(Assembler::notZero, error);
1046
1047 // set r12 to heapbase for load_klass()
1048 __ reinit_heapbase();
1049
1050 // make sure klass is 'reasonable', which is not zero.
1051 __ load_klass(rax, rax); // get klass
1052 __ testptr(rax, rax);
1053 __ jcc(Assembler::zero, error); // if klass is NULL it is broken
1054
1055 // return if everything seems ok
1056 __ bind(exit);
1057 __ movptr(rax, Address(rsp, saved_rax)); // get saved rax back
1058 __ movptr(rscratch1, Address(rsp, saved_r10)); // get saved r10 back
1059 __ pop(c_rarg3); // restore c_rarg3
1060 __ pop(c_rarg2); // restore c_rarg2
1061 __ pop(r12); // restore r12
1062 __ popf(); // restore flags
1063 __ ret(4 * wordSize); // pop caller saved stuff
1064
1065 // handle errors
5717 // Round to nearest, 53-bit mode, exceptions masked
5718 StubRoutines::_fpu_cntrl_wrd_std = 0x027F;
5719 // Round to zero, 53-bit mode, exception mased
5720 StubRoutines::_fpu_cntrl_wrd_trunc = 0x0D7F;
5721 // Round to nearest, 24-bit mode, exceptions masked
5722 StubRoutines::_fpu_cntrl_wrd_24 = 0x007F;
5723 // Round to nearest, 64-bit mode, exceptions masked
5724 StubRoutines::_mxcsr_std = 0x1F80;
5725 // Note: the following two constants are 80-bit values
5726 // layout is critical for correct loading by FPU.
5727 // Bias for strict fp multiply/divide
5728 StubRoutines::_fpu_subnormal_bias1[0]= 0x00000000; // 2^(-15360) == 0x03ff 8000 0000 0000 0000
5729 StubRoutines::_fpu_subnormal_bias1[1]= 0x80000000;
5730 StubRoutines::_fpu_subnormal_bias1[2]= 0x03ff;
5731 // Un-Bias for strict fp multiply/divide
5732 StubRoutines::_fpu_subnormal_bias2[0]= 0x00000000; // 2^(+15360) == 0x7bff 8000 0000 0000 0000
5733 StubRoutines::_fpu_subnormal_bias2[1]= 0x80000000;
5734 StubRoutines::_fpu_subnormal_bias2[2]= 0x7bff;
5735 }
5736
5737 // Initialization
5738 void generate_initial() {
5739 // Generates all stubs and initializes the entry points
5740
5741 // This platform-specific settings are needed by generate_call_stub()
5742 create_control_words();
5743
5744 // entry points that exist in all platforms Note: This is code
5745 // that could be shared among different platforms - however the
5746 // benefit seems to be smaller than the disadvantage of having a
5747 // much more complicated generator structure. See also comment in
5748 // stubRoutines.hpp.
5749
5750 StubRoutines::_forward_exception_entry = generate_forward_exception();
5751
5752 StubRoutines::_call_stub_entry =
5753 generate_call_stub(StubRoutines::_call_stub_return_address);
5754
5755 // is referenced by megamorphic call
5756 StubRoutines::_catch_exception_entry = generate_catch_exception();
5818 }
5819 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
5820 StubRoutines::_dlog = generate_libmLog();
5821 }
5822 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
5823 StubRoutines::_dlog10 = generate_libmLog10();
5824 }
5825 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
5826 StubRoutines::_dpow = generate_libmPow();
5827 }
5828 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
5829 StubRoutines::_dsin = generate_libmSin();
5830 }
5831 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
5832 StubRoutines::_dcos = generate_libmCos();
5833 }
5834 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
5835 StubRoutines::_dtan = generate_libmTan();
5836 }
5837 }
5838 }
5839
5840 void generate_all() {
5841 // Generates all stubs and initializes the entry points
5842
5843 // These entry points require SharedInfo::stack0 to be set up in
5844 // non-core builds and need to be relocatable, so they each
5845 // fabricate a RuntimeStub internally.
5846 StubRoutines::_throw_AbstractMethodError_entry =
5847 generate_throw_exception("AbstractMethodError throw_exception",
5848 CAST_FROM_FN_PTR(address,
5849 SharedRuntime::
5850 throw_AbstractMethodError));
5851
5852 StubRoutines::_throw_IncompatibleClassChangeError_entry =
5853 generate_throw_exception("IncompatibleClassChangeError throw_exception",
5854 CAST_FROM_FN_PTR(address,
5855 SharedRuntime::
5856 throw_IncompatibleClassChangeError));
5857
|
318 __ movl(c_rarg1, c_rarg3); // parameter counter is in c_rarg1
319 __ BIND(loop);
320 __ movptr(rax, Address(c_rarg2, 0));// get parameter
321 __ addptr(c_rarg2, wordSize); // advance to next parameter
322 __ decrementl(c_rarg1); // decrement counter
323 __ push(rax); // pass parameter
324 __ jcc(Assembler::notZero, loop);
325
326 // call Java function
327 __ BIND(parameters_done);
328 __ movptr(rbx, method); // get Method*
329 __ movptr(c_rarg1, entry_point); // get entry_point
330 __ mov(r13, rsp); // set sender sp
331 BLOCK_COMMENT("call Java function");
332 __ call(c_rarg1);
333
334 BLOCK_COMMENT("call_stub_return_address:");
335 return_address = __ pc();
336
337 // store result depending on type (everything that is not
338 // T_OBJECT, T_VALUETYPE, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
339 __ movptr(c_rarg0, result);
340 Label is_long, is_float, is_double, exit;
341 __ movl(c_rarg1, result_type);
342 __ cmpl(c_rarg1, T_OBJECT);
343 __ jcc(Assembler::equal, is_long);
344 __ cmpl(c_rarg1, T_VALUETYPE);
345 __ jcc(Assembler::equal, is_long);
346 __ cmpl(c_rarg1, T_LONG);
347 __ jcc(Assembler::equal, is_long);
348 __ cmpl(c_rarg1, T_FLOAT);
349 __ jcc(Assembler::equal, is_float);
350 __ cmpl(c_rarg1, T_DOUBLE);
351 __ jcc(Assembler::equal, is_double);
352
353 // handle T_INT case
354 __ movl(Address(c_rarg0, 0), rax);
355
356 __ BIND(exit);
357
358 // pop parameters
359 __ lea(rsp, rsp_after_call);
360
361 #ifdef ASSERT
362 // verify that threads correspond
363 {
364 Label L1, L2, L3;
365 __ cmpptr(r15_thread, thread);
984 // Non-destructive plausibility checks for oops
985 //
986 // Arguments:
987 // all args on stack!
988 //
989 // Stack after saving c_rarg3:
990 // [tos + 0]: saved c_rarg3
991 // [tos + 1]: saved c_rarg2
992 // [tos + 2]: saved r12 (several TemplateTable methods use it)
993 // [tos + 3]: saved flags
994 // [tos + 4]: return address
995 // * [tos + 5]: error message (char*)
996 // * [tos + 6]: object to verify (oop)
997 // * [tos + 7]: saved rax - saved by caller and bashed
998 // * [tos + 8]: saved r10 (rscratch1) - saved by caller
999 // * = popped on exit
1000 address generate_verify_oop() {
1001 StubCodeMark mark(this, "StubRoutines", "verify_oop");
1002 address start = __ pc();
1003
1004 Label exit, error, in_Java_heap;
1005
1006 __ pushf();
1007 __ incrementl(ExternalAddress((address) StubRoutines::verify_oop_count_addr()));
1008
1009 __ push(r12);
1010
1011 // save c_rarg2 and c_rarg3
1012 __ push(c_rarg2);
1013 __ push(c_rarg3);
1014
1015 enum {
1016 // After previous pushes.
1017 oop_to_verify = 6 * wordSize,
1018 saved_rax = 7 * wordSize,
1019 saved_r10 = 8 * wordSize,
1020
1021 // Before the call to MacroAssembler::debug(), see below.
1022 return_addr = 16 * wordSize,
1023 error_msg = 17 * wordSize
1024 };
1027 __ movptr(rax, Address(rsp, oop_to_verify));
1028
1029 // make sure object is 'reasonable'
1030 __ testptr(rax, rax);
1031 __ jcc(Assembler::zero, exit); // if obj is NULL it is OK
1032
1033 #if INCLUDE_ZGC
1034 if (UseZGC) {
1035 // Check if metadata bits indicate a bad oop
1036 __ testptr(rax, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
1037 __ jcc(Assembler::notZero, error);
1038 }
1039 #endif
1040
1041 // Check if the oop is in the right area of memory
1042 __ movptr(c_rarg2, rax);
1043 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_mask());
1044 __ andptr(c_rarg2, c_rarg3);
1045 __ movptr(c_rarg3, (intptr_t) Universe::verify_oop_bits());
1046 __ cmpptr(c_rarg2, c_rarg3);
1047 __ jcc(Assembler::zero, in_Java_heap);
1048 // Not in Java heap, but could be valid if it's a bufferable value type
1049 __ load_klass(c_rarg2, rax);
1050 __ movbool(c_rarg2, Address(c_rarg2, InstanceKlass::extra_flags_offset()));
1051 __ andptr(c_rarg2, InstanceKlass::_extra_is_bufferable);
1052 __ testbool(c_rarg2);
1053 __ jcc(Assembler::zero, error);
1054 __ bind(in_Java_heap);
1055
1056 // set r12 to heapbase for load_klass()
1057 __ reinit_heapbase();
1058
1059 // make sure klass is 'reasonable', which is not zero.
1060 __ load_klass(rax, rax); // get klass
1061 __ testptr(rax, rax);
1062 __ jcc(Assembler::zero, error); // if klass is NULL it is broken
1063
1064 // return if everything seems ok
1065 __ bind(exit);
1066 __ movptr(rax, Address(rsp, saved_rax)); // get saved rax back
1067 __ movptr(rscratch1, Address(rsp, saved_r10)); // get saved r10 back
1068 __ pop(c_rarg3); // restore c_rarg3
1069 __ pop(c_rarg2); // restore c_rarg2
1070 __ pop(r12); // restore r12
1071 __ popf(); // restore flags
1072 __ ret(4 * wordSize); // pop caller saved stuff
1073
1074 // handle errors
5726 // Round to nearest, 53-bit mode, exceptions masked
5727 StubRoutines::_fpu_cntrl_wrd_std = 0x027F;
5728 // Round to zero, 53-bit mode, exception mased
5729 StubRoutines::_fpu_cntrl_wrd_trunc = 0x0D7F;
5730 // Round to nearest, 24-bit mode, exceptions masked
5731 StubRoutines::_fpu_cntrl_wrd_24 = 0x007F;
5732 // Round to nearest, 64-bit mode, exceptions masked
5733 StubRoutines::_mxcsr_std = 0x1F80;
5734 // Note: the following two constants are 80-bit values
5735 // layout is critical for correct loading by FPU.
5736 // Bias for strict fp multiply/divide
5737 StubRoutines::_fpu_subnormal_bias1[0]= 0x00000000; // 2^(-15360) == 0x03ff 8000 0000 0000 0000
5738 StubRoutines::_fpu_subnormal_bias1[1]= 0x80000000;
5739 StubRoutines::_fpu_subnormal_bias1[2]= 0x03ff;
5740 // Un-Bias for strict fp multiply/divide
5741 StubRoutines::_fpu_subnormal_bias2[0]= 0x00000000; // 2^(+15360) == 0x7bff 8000 0000 0000 0000
5742 StubRoutines::_fpu_subnormal_bias2[1]= 0x80000000;
5743 StubRoutines::_fpu_subnormal_bias2[2]= 0x7bff;
5744 }
5745
5746 // Call here from the interpreter or compiled code to either load
5747 // multiple returned values from the value type instance being
5748 // returned to registers or to store returned values to a newly
5749 // allocated value type instance.
5750 address generate_return_value_stub(address destination, const char* name, bool has_res) {
5751 // We need to save all registers the calling convention may use so
5752 // the runtime calls read or update those registers. This needs to
5753 // be in sync with SharedRuntime::java_return_convention().
5754 enum layout {
5755 pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
5756 rax_off, rax_off_2,
5757 j_rarg5_off, j_rarg5_2,
5758 j_rarg4_off, j_rarg4_2,
5759 j_rarg3_off, j_rarg3_2,
5760 j_rarg2_off, j_rarg2_2,
5761 j_rarg1_off, j_rarg1_2,
5762 j_rarg0_off, j_rarg0_2,
5763 j_farg0_off, j_farg0_2,
5764 j_farg1_off, j_farg1_2,
5765 j_farg2_off, j_farg2_2,
5766 j_farg3_off, j_farg3_2,
5767 j_farg4_off, j_farg4_2,
5768 j_farg5_off, j_farg5_2,
5769 j_farg6_off, j_farg6_2,
5770 j_farg7_off, j_farg7_2,
5771 rbp_off, rbp_off_2,
5772 return_off, return_off_2,
5773
5774 framesize
5775 };
5776
5777 CodeBuffer buffer(name, 1000, 512);
5778 MacroAssembler* masm = new MacroAssembler(&buffer);
5779
5780 int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
5781 assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
5782 int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
5783 int frame_size_in_words = frame_size_in_bytes / wordSize;
5784
5785 OopMapSet *oop_maps = new OopMapSet();
5786 OopMap* map = new OopMap(frame_size_in_slots, 0);
5787
5788 map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
5789 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
5790 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
5791 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
5792 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
5793 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
5794 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
5795 map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
5796 map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
5797 map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
5798 map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
5799 map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
5800 map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
5801 map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
5802 map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
5803
5804 int start = __ offset();
5805
5806 __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
5807
5808 __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
5809 __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
5810 __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
5811 __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
5812 __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
5813 __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
5814 __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
5815 __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
5816 __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
5817
5818 __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
5819 __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
5820 __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
5821 __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
5822 __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
5823 __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
5824 __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
5825
5826 int frame_complete = __ offset();
5827
5828 __ set_last_Java_frame(noreg, noreg, NULL);
5829
5830 __ mov(c_rarg0, r15_thread);
5831 __ mov(c_rarg1, rax);
5832
5833 __ call(RuntimeAddress(destination));
5834
5835 // Set an oopmap for the call site.
5836
5837 oop_maps->add_gc_map( __ offset() - start, map);
5838
5839 // clear last_Java_sp
5840 __ reset_last_Java_frame(false);
5841
5842 __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
5843 __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
5844 __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
5845 __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
5846 __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
5847 __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
5848 __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
5849 __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
5850 __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
5851
5852 __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
5853 __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
5854 __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
5855 __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
5856 __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
5857 __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
5858 __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
5859
5860 __ addptr(rsp, frame_size_in_bytes-8);
5861
5862 // check for pending exceptions
5863 Label pending;
5864 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
5865 __ jcc(Assembler::notEqual, pending);
5866
5867 if (has_res) {
5868 __ get_vm_result(rax, r15_thread);
5869 }
5870
5871 __ ret(0);
5872
5873 __ bind(pending);
5874
5875 __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
5876 __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
5877
5878 // -------------
5879 // make sure all code is generated
5880 masm->flush();
5881
5882 RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
5883 return stub->entry_point();
5884 }
5885
5886 // Initialization
5887 void generate_initial() {
5888 // Generates all stubs and initializes the entry points
5889
5890 // This platform-specific settings are needed by generate_call_stub()
5891 create_control_words();
5892
5893 // entry points that exist in all platforms Note: This is code
5894 // that could be shared among different platforms - however the
5895 // benefit seems to be smaller than the disadvantage of having a
5896 // much more complicated generator structure. See also comment in
5897 // stubRoutines.hpp.
5898
5899 StubRoutines::_forward_exception_entry = generate_forward_exception();
5900
5901 StubRoutines::_call_stub_entry =
5902 generate_call_stub(StubRoutines::_call_stub_return_address);
5903
5904 // is referenced by megamorphic call
5905 StubRoutines::_catch_exception_entry = generate_catch_exception();
5967 }
5968 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
5969 StubRoutines::_dlog = generate_libmLog();
5970 }
5971 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
5972 StubRoutines::_dlog10 = generate_libmLog10();
5973 }
5974 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
5975 StubRoutines::_dpow = generate_libmPow();
5976 }
5977 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
5978 StubRoutines::_dsin = generate_libmSin();
5979 }
5980 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
5981 StubRoutines::_dcos = generate_libmCos();
5982 }
5983 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
5984 StubRoutines::_dtan = generate_libmTan();
5985 }
5986 }
5987
5988 StubRoutines::_load_value_type_fields_in_regs = generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_value_type_fields_in_regs), "load_value_type_fields_in_regs", false);
5989 StubRoutines::_store_value_type_fields_to_buf = generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_value_type_fields_to_buf), "store_value_type_fields_to_buf", true);
5990 }
5991
5992 void generate_all() {
5993 // Generates all stubs and initializes the entry points
5994
5995 // These entry points require SharedInfo::stack0 to be set up in
5996 // non-core builds and need to be relocatable, so they each
5997 // fabricate a RuntimeStub internally.
5998 StubRoutines::_throw_AbstractMethodError_entry =
5999 generate_throw_exception("AbstractMethodError throw_exception",
6000 CAST_FROM_FN_PTR(address,
6001 SharedRuntime::
6002 throw_AbstractMethodError));
6003
6004 StubRoutines::_throw_IncompatibleClassChangeError_entry =
6005 generate_throw_exception("IncompatibleClassChangeError throw_exception",
6006 CAST_FROM_FN_PTR(address,
6007 SharedRuntime::
6008 throw_IncompatibleClassChangeError));
6009
|