src/cpu/x86/vm/x86_32.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/x86/vm

src/cpu/x86/vm/x86_32.ad

Print this page




1883       MacroAssembler _masm(&cbuf);
1884       __ vzeroupper();
1885     }
1886     debug_only(int off1 = cbuf.insts_size());
1887     assert(off1 - off0 == pre_call_resets_size(), "correct size prediction");
1888   %}
1889 
1890   enc_class post_call_FPU %{
1891     // If method sets FPU control word do it here also
1892     if (Compile::current()->in_24_bit_fp_mode()) {
1893       MacroAssembler masm(&cbuf);
1894       masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1895     }
1896   %}
1897 
1898   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
1899     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
1900     // who we intended to call.
1901     cbuf.set_insts_mark();
1902     $$$emit8$primary;

1903     if (!_method) {
1904       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1905                      runtime_call_Relocation::spec(), RELOC_IMM32 );
1906     } else if (_optimized_virtual) {
1907       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1908                      opt_virtual_call_Relocation::spec(), RELOC_IMM32 );
1909     } else {



1910       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1911                      static_call_Relocation::spec(), RELOC_IMM32 );
1912     }
1913     if (_method) {  // Emit stub for static call.
1914       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
1915       if (stub == NULL) {
1916         ciEnv::current()->record_failure("CodeCache is full");
1917         return;
1918       } 
1919     }
1920   %}
1921 
1922   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
1923     MacroAssembler _masm(&cbuf);
1924     __ ic_call((address)$meth$$method);
1925   %}
1926 
1927   enc_class Java_Compiled_Call (method meth) %{    // JAVA COMPILED CALL
1928     int disp = in_bytes(Method::from_compiled_offset());
1929     assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1930 
1931     // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1932     cbuf.set_insts_mark();
1933     $$$emit8$primary;
1934     emit_rm(cbuf, 0x01, $secondary, EAX_enc );  // R/M byte
1935     emit_d8(cbuf, disp);             // Displacement
1936 
1937   %}
1938 
1939 //   Following encoding is no longer used, but may be restored if calling
1940 //   convention changes significantly.
1941 //   Became: Xor_Reg(EBP), Java_To_Runtime( labl )
1942 //
1943 //   enc_class Java_Interpreter_Call (label labl) %{    // JAVA INTERPRETER CALL
1944 //     // int ic_reg     = Matcher::inline_cache_reg();




1883       MacroAssembler _masm(&cbuf);
1884       __ vzeroupper();
1885     }
1886     debug_only(int off1 = cbuf.insts_size());
1887     assert(off1 - off0 == pre_call_resets_size(), "correct size prediction");
1888   %}
1889 
1890   enc_class post_call_FPU %{
1891     // If method sets FPU control word do it here also
1892     if (Compile::current()->in_24_bit_fp_mode()) {
1893       MacroAssembler masm(&cbuf);
1894       masm.fldcw(ExternalAddress(StubRoutines::addr_fpu_cntrl_wrd_24()));
1895     }
1896   %}
1897 
1898   enc_class Java_Static_Call (method meth) %{    // JAVA STATIC CALL
1899     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to determine
1900     // who we intended to call.
1901     cbuf.set_insts_mark();
1902     $$$emit8$primary;
1903 
1904     if (!_method) {
1905       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1906                      runtime_call_Relocation::spec(),
1907                      RELOC_IMM32);


1908     } else {
1909       int method_index = resolved_method_index(cbuf);
1910       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1911                                                   : static_call_Relocation::spec(method_index);
1912       emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
1913                      rspec, RELOC_DISP32);
1914       // Emit stubs for static call.

1915       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
1916       if (stub == NULL) {
1917         ciEnv::current()->record_failure("CodeCache is full");
1918         return;
1919       }
1920     }
1921   %}
1922 
1923   enc_class Java_Dynamic_Call (method meth) %{    // JAVA DYNAMIC CALL
1924     MacroAssembler _masm(&cbuf);
1925     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));
1926   %}
1927 
1928   enc_class Java_Compiled_Call (method meth) %{    // JAVA COMPILED CALL
1929     int disp = in_bytes(Method::from_compiled_offset());
1930     assert( -128 <= disp && disp <= 127, "compiled_code_offset isn't small");
1931 
1932     // CALL *[EAX+in_bytes(Method::from_compiled_code_entry_point_offset())]
1933     cbuf.set_insts_mark();
1934     $$$emit8$primary;
1935     emit_rm(cbuf, 0x01, $secondary, EAX_enc );  // R/M byte
1936     emit_d8(cbuf, disp);             // Displacement
1937 
1938   %}
1939 
1940 //   Following encoding is no longer used, but may be restored if calling
1941 //   convention changes significantly.
1942 //   Became: Xor_Reg(EBP), Java_To_Runtime( labl )
1943 //
1944 //   enc_class Java_Interpreter_Call (label labl) %{    // JAVA INTERPRETER CALL
1945 //     // int ic_reg     = Matcher::inline_cache_reg();


src/cpu/x86/vm/x86_32.ad
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File