src/cpu/x86/vm/x86_32.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/x86/vm

src/cpu/x86/vm/x86_32.ad

Print this page




1881       MacroAssembler _masm(&cbuf);
1882       __ vzeroupper();
1883     }
1884     debug_only(int off1 = cbuf.insts_size());
1885     assert(off1 - off0 == pre_call_resets_size(), "correct size prediction");
1886   %}
1887 
1888   enc_class post_call_FPU %{
1889     // If method sets FPU control word do it here also
1890     if (Compile::current()->in_24_bit_fp_mode()) {
1891       MacroAssembler masm(&cbuf);
1892       masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1893     }
1894   %}
1895 
1896   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
1897     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
1898     // who we intended to call.
1899     cbuf.set_insts_mark();
1900     $$$emit8$primary;

1901     if (!_method) {
1902       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1903                      runtime_call_Relocation::spec(), RELOC_IMM32 );
1904     } else if (_optimized_virtual) {
1905       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1906                      opt_virtual_call_Relocation::spec(), RELOC_IMM32 );
1907     } else {



1908       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1909                      static_call_Relocation::spec(), RELOC_IMM32 );
1910     }
1911     if (_method) {  // Emit stub for static call.
1912       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
1913       if (stub == NULL) {
1914         ciEnv::current()->record_failure("CodeCache is full");
1915         return;
1916       }
1917     }
1918   %}
1919 
1920   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
1921     MacroAssembler _masm(&cbuf);
1922     __ ic_call((address)$meth$$method);
1923   %}
1924 
1925   enc_class Java_Compiled_Call (method meth) %{    // JAVA COMPILED CALL
1926     int disp = in_bytes(Method::from_compiled_offset());
1927     assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1928 
1929     // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1930     cbuf.set_insts_mark();
1931     $$$emit8$primary;
1932     emit_rm(cbuf, 0x01, $secondary, EAX_enc );  // R/M byte
1933     emit_d8(cbuf, disp);             // Displacement
1934 
1935   %}
1936 
1937 //   Following encoding is no longer used, but may be restored if calling
1938 //   convention changes significantly.
1939 //   Became: Xor_Reg(EBP), Java_To_Runtime( labl )
1940 //
1941 //   enc_class Java_Interpreter_Call (label labl) %{    // JAVA INTERPRETER CALL
1942 //     // int ic_reg     = Matcher::inline_cache_reg();




1881       MacroAssembler _masm(&cbuf);
1882       __ vzeroupper();
1883     }
1884     debug_only(int off1 = cbuf.insts_size());
1885     assert(off1 - off0 == pre_call_resets_size(), "correct size prediction");
1886   %}
1887 
1888   enc_class post_call_FPU %{
1889     // If method sets FPU control word do it here also
1890     if (Compile::current()->in_24_bit_fp_mode()) {
1891       MacroAssembler masm(&cbuf);
1892       masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1893     }
1894   %}
1895 
1896   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
1897     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
1898     // who we intended to call.
1899     cbuf.set_insts_mark();
1900     $$$emit8$primary;
1901 
1902     if (!_method) {
1903       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1904                      runtime_call_Relocation::spec(),
1905                      RELOC_IMM32);


1906     } else {
1907       int method_index = resolved_method_index(cbuf);
1908       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1909                                                   : static_call_Relocation::spec(method_index);
1910       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1911                      rspec, RELOC_DISP32);
1912       // Emit stubs for static call.

1913       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
1914       if (stub == NULL) {
1915         ciEnv::current()->record_failure("CodeCache is full");
1916         return;
1917       }
1918     }
1919   %}
1920 
1921   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
1922     MacroAssembler _masm(&cbuf);
1923     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));
1924   %}
1925 
1926   enc_class Java_Compiled_Call (method meth) %{    // JAVA COMPILED CALL
1927     int disp = in_bytes(Method::from_compiled_offset());
1928     assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1929 
1930     // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1931     cbuf.set_insts_mark();
1932     $$$emit8$primary;
1933     emit_rm(cbuf, 0x01, $secondary, EAX_enc );  // R/M byte
1934     emit_d8(cbuf, disp);             // Displacement
1935 
1936   %}
1937 
1938 //   Following encoding is no longer used, but may be restored if calling
1939 //   convention changes significantly.
1940 //   Became: Xor_Reg(EBP), Java_To_Runtime( labl )
1941 //
1942 //   enc_class Java_Interpreter_Call (label labl) %{    // JAVA INTERPRETER CALL
1943 //     // int ic_reg     = Matcher::inline_cache_reg();


src/cpu/x86/vm/x86_32.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File