< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page
rev 54011 : 8219993: AArch64: Compiled CI stubs are unsafely modified
Reviewed-by: adinn


  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);
  64   } else {
  65     __ mov_metadata(rmethod, (Metadata*)NULL);
  66   }
  67 #else
  68   __ mov_metadata(rmethod, (Metadata*)NULL);
  69 #endif
  70   __ movptr(rscratch1, 0);
  71   __ br(rscratch1);
  72 






  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   return 7 * NativeInstruction::instruction_size;

  81 }
  82 
  83 int CompiledStaticCall::to_trampoline_stub_size() {
  84   // Somewhat pessimistically, we count 3 instructions here (although
  85   // there are only two) because we sometimes emit an alignment nop.
  86   // Trampoline stubs are always word aligned.
  87   return 3 * NativeInstruction::instruction_size + wordSize;
  88 }
  89 
  90 // Relocation entries for call stub, compiled java to interpreter.
  91 int CompiledStaticCall::reloc_to_interp_stub() {
  92   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  93 }
  94 
  95 #if INCLUDE_AOT
  96 #define __ _masm.
  97 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  98   if (!UseAOT) {
  99     return;
 100   }


 142   if (UseAOT) {
 143     return 5 * 4;  // movz; movk; movk; movk; br
 144   } else {
 145     return 0;
 146   }
 147 }
 148 #endif // INCLUDE_AOT
 149 
 150 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 151   address stub = find_stub(false /* is_aot */);
 152   guarantee(stub != NULL, "stub not found");
 153 
 154   if (TraceICs) {
 155     ResourceMark rm;
 156     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 157                   p2i(instruction_address()),
 158                   callee->name_and_sig_as_C_string());
 159   }
 160 
 161   // Creation also verifies the object.
 162   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 163 #ifndef PRODUCT
 164   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 165 

 166   // read the value once
 167   volatile intptr_t data = method_holder->data();
 168   assert(data == 0 || data == (intptr_t)callee(),
 169          "a) MT-unsafe modification of inline cache");
 170   assert(data == 0 || jump->jump_destination() == entry,
 171          "b) MT-unsafe modification of inline cache");
 172 #endif
 173   // Update stub.
 174   method_holder->set_data((intptr_t)callee());
 175   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
 176   ICache::invalidate_range(stub, to_interp_stub_size());
 177   // Update jump to call.
 178   set_destination_mt_safe(stub);
 179 }
 180 
 181 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 182   // Reset stub.
 183   address stub = static_stub->addr();
 184   assert(stub != NULL, "stub not found");
 185   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 186   // Creation also verifies the object.
 187   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 188   method_holder->set_data(0);
 189 }
 190 
 191 //-----------------------------------------------------------------------------
 192 // Non-product mode code
 193 #ifndef PRODUCT
 194 
 195 void CompiledDirectStaticCall::verify() {
 196   // Verify call.
 197   _call->verify();
 198   _call->verify_alignment();
 199 
 200   // Verify stub.
 201   address stub = find_stub(false /* is_aot */);
 202   assert(stub != NULL, "no stub found for static call");
 203   // Creation also verifies the object.
 204   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
 205   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
 206 
 207   // Verify state.
 208   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 209 }
 210 
 211 #endif // !PRODUCT


  44   if (mark == NULL) {
  45     mark = cbuf.insts_mark();  // Get mark within main instrs section.
  46   }
  47 
  48   // Note that the code buffer's insts_mark is always relative to insts.
  49   // That's why we must use the macroassembler to generate a stub.
  50   MacroAssembler _masm(&cbuf);
  51 
  52   address base = __ start_a_stub(to_interp_stub_size());
  53   int offset = __ offset();
  54   if (base == NULL) {
  55     return NULL;  // CodeBuffer::expand failed
  56   }
  57   // static stub relocation stores the instruction address of the call
  58   __ relocate(static_stub_Relocation::spec(mark));
  59 
  60 #if INCLUDE_AOT
  61   // Don't create a Metadata reloc if we're generating immutable PIC.
  62   if (cbuf.immutable_PIC()) {
  63     __ movptr(rmethod, 0);






  64     __ movptr(rscratch1, 0);
  65     __ br(rscratch1);
  66 
  67   } else
  68 #endif
  69   {
  70     __ emit_static_call_stub();
  71   }
  72 
  73   assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
  74   __ end_a_stub();
  75   return base;
  76 }
  77 #undef __
  78 
  79 int CompiledStaticCall::to_interp_stub_size() {
  80   // align; dmb; ldr, ldr, br; two words of data
  81   return 5 * NativeInstruction::instruction_size + 2 * wordSize;
  82 }
  83 
  84 int CompiledStaticCall::to_trampoline_stub_size() {
  85   // Somewhat pessimistically, we count 3 instructions here (although
  86   // there are only two) because we sometimes emit an alignment nop.
  87   // Trampoline stubs are always word aligned.
  88   return 3 * NativeInstruction::instruction_size + wordSize;
  89 }
  90 
  91 // Relocation entries for call stub, compiled java to interpreter.
  92 int CompiledStaticCall::reloc_to_interp_stub() {
  93   return 4; // 3 in emit_to_interp_stub + 1 in emit_call
  94 }
  95 
  96 #if INCLUDE_AOT
  97 #define __ _masm.
  98 void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
  99   if (!UseAOT) {
 100     return;
 101   }


 143   if (UseAOT) {
 144     return 5 * 4;  // movz; movk; movk; movk; br
 145   } else {
 146     return 0;
 147   }
 148 }
 149 #endif // INCLUDE_AOT
 150 
 151 void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
 152   address stub = find_stub(false /* is_aot */);
 153   guarantee(stub != NULL, "stub not found");
 154 
 155   if (TraceICs) {
 156     ResourceMark rm;
 157     tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
 158                   p2i(instruction_address()),
 159                   callee->name_and_sig_as_C_string());
 160   }
 161 
 162   // Creation also verifies the object.
 163   NativeMovConstReg *method_holder = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 164   NativeMovConstReg *jump = nativeMovConstReg_at(method_holder->next_instruction_address());

 165 
 166 #ifndef PRODUCT
 167   // read the value once
 168   volatile intptr_t data = method_holder->data();
 169   assert(data == 0 || data == (intptr_t)callee(),
 170          "a) MT-unsafe modification of inline cache");
 171   assert(data == 0 || jump->data() == (intptr_t)entry,
 172          "b) MT-unsafe modification of inline cache");
 173 #endif
 174   // Update stub.
 175   method_holder->set_data((intptr_t)callee());
 176   jump->set_data((intptr_t)entry);
 177   OrderAccess::fence();

 178   set_destination_mt_safe(stub);
 179 }
 180 
 181 void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
 182   // Reset stub.
 183   address stub = static_stub->addr();
 184   assert(stub != NULL, "stub not found");
 185   assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
 186   // Creation also verifies the object.
 187   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 188   method_holder->set_data(0);
 189 }
 190 
 191 //-----------------------------------------------------------------------------
 192 // Non-product mode code
 193 #ifndef PRODUCT
 194 
 195 void CompiledDirectStaticCall::verify() {
 196   // Verify call.
 197   _call->verify();
 198   _call->verify_alignment();
 199 
 200   // Verify stub.
 201   address stub = find_stub(false /* is_aot */);
 202   assert(stub != NULL, "no stub found for static call");
 203   // Creation also verifies the object.
 204   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
 205   NativeMovConstReg* jump          = nativeMovConstReg_at(method_holder->next_instruction_address());
 206 
 207   // Verify state.
 208   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 209 }
 210 
 211 #endif // !PRODUCT
< prev index next >