< prev index next >

src/cpu/x86/vm/stubGenerator_x86_64.cpp

Print this page




4958     // Round to zero, 53-bit mode, exception mased
4959     StubRoutines::_fpu_cntrl_wrd_trunc = 0x0D7F;
4960     // Round to nearest, 24-bit mode, exceptions masked
4961     StubRoutines::_fpu_cntrl_wrd_24    = 0x007F;
4962     // Round to nearest, 64-bit mode, exceptions masked
4963     StubRoutines::_fpu_cntrl_wrd_64    = 0x037F;
4964     // Round to nearest, 64-bit mode, exceptions masked
4965     StubRoutines::_mxcsr_std           = 0x1F80;
4966     // Note: the following two constants are 80-bit values
4967     //       layout is critical for correct loading by FPU.
4968     // Bias for strict fp multiply/divide
4969     StubRoutines::_fpu_subnormal_bias1[0]= 0x00000000; // 2^(-15360) == 0x03ff 8000 0000 0000 0000
4970     StubRoutines::_fpu_subnormal_bias1[1]= 0x80000000;
4971     StubRoutines::_fpu_subnormal_bias1[2]= 0x03ff;
4972     // Un-Bias for strict fp multiply/divide
4973     StubRoutines::_fpu_subnormal_bias2[0]= 0x00000000; // 2^(+15360) == 0x7bff 8000 0000 0000 0000
4974     StubRoutines::_fpu_subnormal_bias2[1]= 0x80000000;
4975     StubRoutines::_fpu_subnormal_bias2[2]= 0x7bff;
4976   }
4977 












































































































































4978   // Initialization
4979   void generate_initial() {
4980     // Generates all stubs and initializes the entry points
4981 
4982     // This platform-specific settings are needed by generate_call_stub()
4983     create_control_words();
4984 
4985     // entry points that exist in all platforms Note: This is code
4986     // that could be shared among different platforms - however the
4987     // benefit seems to be smaller than the disadvantage of having a
4988     // much more complicated generator structure. See also comment in
4989     // stubRoutines.hpp.
4990 
4991     StubRoutines::_forward_exception_entry = generate_forward_exception();
4992 
4993     StubRoutines::_call_stub_entry =
4994       generate_call_stub(StubRoutines::_call_stub_return_address);
4995 
4996     // is referenced by megamorphic call
4997     StubRoutines::_catch_exception_entry = generate_catch_exception();


5059       }
5060       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
5061         StubRoutines::_dlog = generate_libmLog();
5062       }
5063       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
5064         StubRoutines::_dlog10 = generate_libmLog10();
5065       }
5066       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
5067         StubRoutines::_dpow = generate_libmPow();
5068       }
5069       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
5070         StubRoutines::_dsin = generate_libmSin();
5071       }
5072       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
5073         StubRoutines::_dcos = generate_libmCos();
5074       }
5075       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
5076         StubRoutines::_dtan = generate_libmTan();
5077       }
5078     }



5079   }
5080 
5081   void generate_all() {
5082     // Generates all stubs and initializes the entry points
5083 
5084     // These entry points require SharedInfo::stack0 to be set up in
5085     // non-core builds and need to be relocatable, so they each
5086     // fabricate a RuntimeStub internally.
5087     StubRoutines::_throw_AbstractMethodError_entry =
5088       generate_throw_exception("AbstractMethodError throw_exception",
5089                                CAST_FROM_FN_PTR(address,
5090                                                 SharedRuntime::
5091                                                 throw_AbstractMethodError));
5092 
5093     StubRoutines::_throw_IncompatibleClassChangeError_entry =
5094       generate_throw_exception("IncompatibleClassChangeError throw_exception",
5095                                CAST_FROM_FN_PTR(address,
5096                                                 SharedRuntime::
5097                                                 throw_IncompatibleClassChangeError));
5098 




4958     // Round to zero, 53-bit mode, exception mased
4959     StubRoutines::_fpu_cntrl_wrd_trunc = 0x0D7F;
4960     // Round to nearest, 24-bit mode, exceptions masked
4961     StubRoutines::_fpu_cntrl_wrd_24    = 0x007F;
4962     // Round to nearest, 64-bit mode, exceptions masked
4963     StubRoutines::_fpu_cntrl_wrd_64    = 0x037F;
4964     // Round to nearest, 64-bit mode, exceptions masked
4965     StubRoutines::_mxcsr_std           = 0x1F80;
4966     // Note: the following two constants are 80-bit values
4967     //       layout is critical for correct loading by FPU.
4968     // Bias for strict fp multiply/divide
4969     StubRoutines::_fpu_subnormal_bias1[0]= 0x00000000; // 2^(-15360) == 0x03ff 8000 0000 0000 0000
4970     StubRoutines::_fpu_subnormal_bias1[1]= 0x80000000;
4971     StubRoutines::_fpu_subnormal_bias1[2]= 0x03ff;
4972     // Un-Bias for strict fp multiply/divide
4973     StubRoutines::_fpu_subnormal_bias2[0]= 0x00000000; // 2^(+15360) == 0x7bff 8000 0000 0000 0000
4974     StubRoutines::_fpu_subnormal_bias2[1]= 0x80000000;
4975     StubRoutines::_fpu_subnormal_bias2[2]= 0x7bff;
4976   }
4977 
4978   // Call here from the interpreter or compiled code to either load
4979   // multiple returned values from the value type instance being
4980   // returned to registers or to store returned values to a newly
4981   // allocated value type instance.
4982   address generate_return_value_stub(address destination, const char* name, bool has_res) {
4983     // We need to save all registers the calling convention may use so
4984     // the runtime calls read or update those registers. This needs to
4985     // be in sync with SharedRuntime::java_return_convention().
4986     enum layout {
4987       pad_off, pad_off_2,
4988       rax_off, rax_off_2,
4989       j_rarg5_off, j_rarg5_2,
4990       j_rarg4_off, j_rarg4_2,
4991       j_rarg3_off, j_rarg3_2,
4992       j_rarg2_off, j_rarg2_2,
4993       j_rarg1_off, j_rarg1_2,
4994       j_rarg0_off, j_rarg0_2,
4995       j_farg0_off, j_farg0_2,
4996       j_farg1_off, j_farg1_2,
4997       j_farg2_off, j_farg2_2,
4998       j_farg3_off, j_farg3_2,
4999       j_farg4_off, j_farg4_2,
5000       j_farg5_off, j_farg5_2,
5001       j_farg6_off, j_farg6_2,
5002       j_farg7_off, j_farg7_2,
5003       rbp_off, rbp_off_2,
5004       return_off, return_off_2,
5005 
5006       framesize
5007     };
5008 
5009     CodeBuffer buffer(name, 1000, 512);
5010     MacroAssembler* masm = new MacroAssembler(&buffer);
5011 
5012     int frame_size_in_bytes = round_to(framesize*BytesPerInt, 16);
5013     assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
5014     int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
5015     int frame_size_in_words = frame_size_in_bytes / wordSize;
5016 
5017     OopMapSet *oop_maps = new OopMapSet();
5018     OopMap* map = new OopMap(frame_size_in_slots, 0);
5019 
5020     map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
5021     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
5022     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
5023     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
5024     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
5025     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
5026     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
5027     map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
5028     map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
5029     map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
5030     map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
5031     map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
5032     map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
5033     map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
5034     map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
5035 
5036     int start = __ offset();
5037 
5038     __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
5039 
5040     __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
5041     __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
5042     __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
5043     __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
5044     __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
5045     __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
5046     __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
5047     __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
5048     __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
5049 
5050     __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
5051     __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
5052     __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
5053     __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
5054     __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
5055     __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
5056     __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
5057 
5058     int frame_complete = __ offset();
5059 
5060     __ set_last_Java_frame(noreg, noreg, NULL);
5061 
5062     __ mov(c_rarg0, r15_thread);
5063     __ mov(c_rarg1, rax);
5064 
5065     __ call(RuntimeAddress(destination));
5066 
5067     // Set an oopmap for the call site.
5068 
5069     oop_maps->add_gc_map( __ offset() - start, map);
5070 
5071     // clear last_Java_sp
5072     __ reset_last_Java_frame(false);
5073 
5074     __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
5075     __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
5076     __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
5077     __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
5078     __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
5079     __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
5080     __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
5081     __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
5082     __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
5083 
5084     __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
5085     __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
5086     __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
5087     __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
5088     __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
5089     __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
5090     __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
5091 
5092     __ addptr(rsp, frame_size_in_bytes-8);
5093 
5094     // check for pending exceptions
5095     Label pending;
5096     __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
5097     __ jcc(Assembler::notEqual, pending);
5098 
5099     if (has_res) {
5100       __ get_vm_result(rax, r15_thread);
5101     }
5102 
5103     __ ret(0);
5104 
5105     __ bind(pending);
5106 
5107     __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
5108     __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
5109 
5110     // -------------
5111     // make sure all code is generated
5112     masm->flush();
5113 
5114     RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
5115     return stub->entry_point();
5116   }
5117 
5118   // Initialization
5119   void generate_initial() {
5120     // Generates all stubs and initializes the entry points
5121 
5122     // This platform-specific settings are needed by generate_call_stub()
5123     create_control_words();
5124 
5125     // entry points that exist in all platforms Note: This is code
5126     // that could be shared among different platforms - however the
5127     // benefit seems to be smaller than the disadvantage of having a
5128     // much more complicated generator structure. See also comment in
5129     // stubRoutines.hpp.
5130 
5131     StubRoutines::_forward_exception_entry = generate_forward_exception();
5132 
5133     StubRoutines::_call_stub_entry =
5134       generate_call_stub(StubRoutines::_call_stub_return_address);
5135 
5136     // is referenced by megamorphic call
5137     StubRoutines::_catch_exception_entry = generate_catch_exception();


5199       }
5200       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
5201         StubRoutines::_dlog = generate_libmLog();
5202       }
5203       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
5204         StubRoutines::_dlog10 = generate_libmLog10();
5205       }
5206       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
5207         StubRoutines::_dpow = generate_libmPow();
5208       }
5209       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
5210         StubRoutines::_dsin = generate_libmSin();
5211       }
5212       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
5213         StubRoutines::_dcos = generate_libmCos();
5214       }
5215       if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
5216         StubRoutines::_dtan = generate_libmTan();
5217       }
5218     }
5219 
5220     StubRoutines::_load_value_type_fields_in_regs = generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_value_type_fields_in_regs), "load_value_type_fields_in_regs", false);
5221     StubRoutines::_store_value_type_fields_to_buf = generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_value_type_fields_to_buf), "store_value_type_fields_to_buf", true);
5222   }
5223 
5224   void generate_all() {
5225     // Generates all stubs and initializes the entry points
5226 
5227     // These entry points require SharedInfo::stack0 to be set up in
5228     // non-core builds and need to be relocatable, so they each
5229     // fabricate a RuntimeStub internally.
5230     StubRoutines::_throw_AbstractMethodError_entry =
5231       generate_throw_exception("AbstractMethodError throw_exception",
5232                                CAST_FROM_FN_PTR(address,
5233                                                 SharedRuntime::
5234                                                 throw_AbstractMethodError));
5235 
5236     StubRoutines::_throw_IncompatibleClassChangeError_entry =
5237       generate_throw_exception("IncompatibleClassChangeError throw_exception",
5238                                CAST_FROM_FN_PTR(address,
5239                                                 SharedRuntime::
5240                                                 throw_IncompatibleClassChangeError));
5241 


< prev index next >