< prev index next >

src/share/vm/c1/c1_LIRAssembler.cpp

Print this page




  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_Compilation.hpp"
  27 #include "c1/c1_Instruction.hpp"
  28 #include "c1/c1_InstructionPrinter.hpp"
  29 #include "c1/c1_LIRAssembler.hpp"
  30 #include "c1/c1_MacroAssembler.hpp"
  31 #include "c1/c1_ValueStack.hpp"
  32 #include "ci/ciInstance.hpp"
  33 #ifdef TARGET_ARCH_x86
  34 # include "nativeInst_x86.hpp"
  35 # include "vmreg_x86.inline.hpp"
  36 #endif




  37 #ifdef TARGET_ARCH_sparc
  38 # include "nativeInst_sparc.hpp"
  39 # include "vmreg_sparc.inline.hpp"
  40 #endif
  41 #ifdef TARGET_ARCH_zero
  42 # include "nativeInst_zero.hpp"
  43 # include "vmreg_zero.inline.hpp"
  44 #endif
  45 #ifdef TARGET_ARCH_arm
  46 # include "nativeInst_arm.hpp"
  47 # include "vmreg_arm.inline.hpp"
  48 #endif
  49 #ifdef TARGET_ARCH_ppc
  50 # include "nativeInst_ppc.hpp"
  51 # include "vmreg_ppc.inline.hpp"
  52 #endif
  53 
  54 
  55 void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {
  56   // we must have enough patching space so that call can be inserted


 107   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
 108   if (Bytecodes::has_optional_appendix(bc_raw)) {
 109     return PatchingStub::load_appendix_id;
 110   }
 111   return PatchingStub::load_mirror_id;
 112 }
 113 
 114 //---------------------------------------------------------------
 115 
 116 
 117 LIR_Assembler::LIR_Assembler(Compilation* c):
 118    _compilation(c)
 119  , _masm(c->masm())
 120  , _bs(Universe::heap()->barrier_set())
 121  , _frame_map(c->frame_map())
 122  , _current_block(NULL)
 123  , _pending_non_safepoint(NULL)
 124  , _pending_non_safepoint_offset(0)
 125 {
 126   _slow_case_stubs = new CodeStubList();



 127 }
 128 
 129 
 130 LIR_Assembler::~LIR_Assembler() {
 131   // The unwind handler label may be unbound if this destructor is invoked because of a bail-out.
 132   // Reset it here to avoid an assertion.
 133   _unwind_handler_entry.reset();
 134 }
 135 
 136 
 137 void LIR_Assembler::check_codespace() {
 138   CodeSection* cs = _masm->code_section();
 139   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
 140     BAILOUT("CodeBuffer overflow");
 141   }
 142 }
 143 
 144 
 145 void LIR_Assembler::append_code_stub(CodeStub* stub) {
 146   _slow_case_stubs->append(stub);
 147 }
 148 
 149 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
 150   for (int m = 0; m < stub_list->length(); m++) {
 151     CodeStub* s = (*stub_list)[m];
 152 
 153     check_codespace();
 154     CHECK_BAILOUT();
 155 
 156 #ifndef PRODUCT
 157     if (CommentedAssembly) {
 158       stringStream st;
 159       s->print_name(&st);
 160       st.print(" slow case");
 161       _masm->block_comment(st.as_string());
 162     }
 163 #endif
 164     s->emit_code(this);
 165 #ifdef ASSERT

 166     s->assert_no_unbound_labels();

 167 #endif
 168   }
 169 }
 170 
 171 
 172 void LIR_Assembler::emit_slow_case_stubs() {
 173   emit_stubs(_slow_case_stubs);
 174 }
 175 
 176 
 177 bool LIR_Assembler::needs_icache(ciMethod* method) const {
 178   return !method->is_static();
 179 }
 180 
 181 
 182 int LIR_Assembler::code_offset() const {
 183   return _masm->offset();
 184 }
 185 
 186 




  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_Compilation.hpp"
  27 #include "c1/c1_Instruction.hpp"
  28 #include "c1/c1_InstructionPrinter.hpp"
  29 #include "c1/c1_LIRAssembler.hpp"
  30 #include "c1/c1_MacroAssembler.hpp"
  31 #include "c1/c1_ValueStack.hpp"
  32 #include "ci/ciInstance.hpp"
  33 #ifdef TARGET_ARCH_x86
  34 # include "nativeInst_x86.hpp"
  35 # include "vmreg_x86.inline.hpp"
  36 #endif
  37 #ifdef TARGET_ARCH_aarch64
  38 # include "nativeInst_aarch64.hpp"
  39 # include "vmreg_aarch64.inline.hpp"
  40 #endif
  41 #ifdef TARGET_ARCH_sparc
  42 # include "nativeInst_sparc.hpp"
  43 # include "vmreg_sparc.inline.hpp"
  44 #endif
  45 #ifdef TARGET_ARCH_zero
  46 # include "nativeInst_zero.hpp"
  47 # include "vmreg_zero.inline.hpp"
  48 #endif
  49 #ifdef TARGET_ARCH_arm
  50 # include "nativeInst_arm.hpp"
  51 # include "vmreg_arm.inline.hpp"
  52 #endif
  53 #ifdef TARGET_ARCH_ppc
  54 # include "nativeInst_ppc.hpp"
  55 # include "vmreg_ppc.inline.hpp"
  56 #endif
  57 
  58 
  59 void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {
  60   // we must have enough patching space so that call can be inserted


 111   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
 112   if (Bytecodes::has_optional_appendix(bc_raw)) {
 113     return PatchingStub::load_appendix_id;
 114   }
 115   return PatchingStub::load_mirror_id;
 116 }
 117 
 118 //---------------------------------------------------------------
 119 
 120 
 121 LIR_Assembler::LIR_Assembler(Compilation* c):
 122    _compilation(c)
 123  , _masm(c->masm())
 124  , _bs(Universe::heap()->barrier_set())
 125  , _frame_map(c->frame_map())
 126  , _current_block(NULL)
 127  , _pending_non_safepoint(NULL)
 128  , _pending_non_safepoint_offset(0)
 129 {
 130   _slow_case_stubs = new CodeStubList();
 131 #ifdef TARGET_ARCH_aarch64
 132   init(); // Target-dependent initialization
 133 #endif
 134 }
 135 
 136 
 137 LIR_Assembler::~LIR_Assembler() {
 138   // The unwind handler label may be unbound if this destructor is invoked because of a bail-out.
 139   // Reset it here to avoid an assertion.
 140   _unwind_handler_entry.reset();
 141 }
 142 
 143 
 144 void LIR_Assembler::check_codespace() {
 145   CodeSection* cs = _masm->code_section();
 146   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
 147     BAILOUT("CodeBuffer overflow");
 148   }
 149 }
 150 
 151 
 152 void LIR_Assembler::append_code_stub(CodeStub* stub) {
 153   _slow_case_stubs->append(stub);
 154 }
 155 
 156 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
 157   for (int m = 0; m < stub_list->length(); m++) {
 158     CodeStub* s = (*stub_list)[m];
 159 
 160     check_codespace();
 161     CHECK_BAILOUT();
 162 
 163 #ifndef PRODUCT
 164     if (CommentedAssembly) {
 165       stringStream st;
 166       s->print_name(&st);
 167       st.print(" slow case");
 168       _masm->block_comment(st.as_string());
 169     }
 170 #endif
 171     s->emit_code(this);
 172 #ifdef ASSERT
 173 #ifndef AARCH64
 174     s->assert_no_unbound_labels();
 175 #endif
 176 #endif
 177   }
 178 }
 179 
 180 
 181 void LIR_Assembler::emit_slow_case_stubs() {
 182   emit_stubs(_slow_case_stubs);
 183 }
 184 
 185 
 186 bool LIR_Assembler::needs_icache(ciMethod* method) const {
 187   return !method->is_static();
 188 }
 189 
 190 
 191 int LIR_Assembler::code_offset() const {
 192   return _masm->offset();
 193 }
 194 
 195 


< prev index next >