src/hotspot/cpu/arm/vtableStubs_arm.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File open Sdiff src/hotspot/cpu/arm

src/hotspot/cpu/arm/vtableStubs_arm.cpp

Print this page




  75 
  76   assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
  77 
  78   const Register tmp = Rtemp; // Rtemp OK, should be free at call sites
  79 
  80   address npe_addr = __ pc();
  81   __ load_klass(tmp, R0);
  82 
  83 #ifndef PRODUCT
  84   if (DebugVtables) {
  85     // Implementation required?
  86   }
  87 #endif
  88 
  89   start_pc = __ pc();
  90   { // lookup virtual method
  91     int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index * vtableEntry::size_in_bytes();
  92     int method_offset = vtableEntry::method_offset_in_bytes() + entry_offset;
  93 
  94     assert ((method_offset & (wordSize - 1)) == 0, "offset should be aligned");
  95     int offset_mask = AARCH64_ONLY(0xfff << LogBytesPerWord) NOT_AARCH64(0xfff);
  96     if (method_offset & ~offset_mask) {
  97       __ add(tmp, tmp, method_offset & ~offset_mask);
  98     }
  99     __ ldr(Rmethod, Address(tmp, method_offset & offset_mask));
 100   }
 101   slop_delta  = 8 - (int)(__ pc() - start_pc);
 102   slop_bytes += slop_delta;
 103   assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
 104 
 105 #ifndef PRODUCT
 106   if (DebugVtables) {
 107     // Implementation required?
 108   }
 109 #endif
 110 
 111   address ame_addr = __ pc();
 112 #ifdef AARCH64
 113   __ ldr(tmp, Address(Rmethod, Method::from_compiled_offset()));
 114   __ br(tmp);
 115 #else
 116   __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
 117 #endif // AARCH64
 118 
 119   masm->flush();
 120   bookkeeping(masm, tty, s, npe_addr, ame_addr, true, vtable_index, slop_bytes, 0);
 121 
 122   return s;
 123 }
 124 
 125 VtableStub* VtableStubs::create_itable_stub(int itable_index) {
 126   // Read "A word on VtableStub sizing" in share/code/vtableStubs.hpp for details on stub sizing.
 127   const int stub_code_length = code_size_limit(false);
 128   VtableStub* s = new(stub_code_length) VtableStub(false, itable_index);
 129   // Can be NULL if there is no free space in the code cache.
 130   if (s == NULL) {
 131     return NULL;
 132   }
 133   // Count unused bytes in instruction sequences of variable size.
 134   // We add them to the computed buffer size in order to avoid
 135   // overflow in subsequently generated stubs.
 136   address   start_pc;
 137   int       slop_bytes = 0;
 138   int       slop_delta = 0;
 139 
 140   ResourceMark    rm;
 141   CodeBuffer      cb(s->entry_point(), stub_code_length);
 142   MacroAssembler* masm = new MacroAssembler(&cb);
 143 
 144 #if (!defined(PRODUCT) && defined(COMPILER2))
 145   if (CountCompiledCalls) {
 146     // Implementation required?
 147   }
 148 #endif
 149 
 150   assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
 151 
 152   // R0-R3 / R0-R7 registers hold the arguments and cannot be spoiled
 153   const Register Rclass  = AARCH64_ONLY(R9)  NOT_AARCH64(R4);
 154   const Register Rintf   = AARCH64_ONLY(R10) NOT_AARCH64(R5);
 155   const Register Rscan   = AARCH64_ONLY(R11) NOT_AARCH64(R6);
 156 
 157   Label L_no_such_interface;
 158 
 159   assert_different_registers(Ricklass, Rclass, Rintf, Rscan, Rtemp);
 160 
 161   start_pc = __ pc();
 162 
 163   // get receiver klass (also an implicit null-check)
 164   address npe_addr = __ pc();
 165   __ load_klass(Rclass, R0);
 166 
 167   // Receiver subtype check against REFC.
 168   __ ldr(Rintf, Address(Ricklass, CompiledICHolder::holder_klass_offset()));
 169   __ lookup_interface_method(// inputs: rec. class, interface, itable index
 170                              Rclass, Rintf, noreg,
 171                              // outputs: temp reg1, temp reg2
 172                              noreg, Rscan, Rtemp,
 173                              L_no_such_interface);
 174 
 175   const ptrdiff_t  typecheckSize = __ pc() - start_pc;


 183                              Rmethod, Rscan, Rtemp,
 184                              L_no_such_interface);
 185 
 186   const ptrdiff_t lookupSize = __ pc() - start_pc;
 187 
 188   // Reduce "estimate" such that "padding" does not drop below 8.
 189   const ptrdiff_t estimate = 140;
 190   const ptrdiff_t codesize = typecheckSize + lookupSize;
 191   slop_delta  = (int)(estimate - codesize);
 192   slop_bytes += slop_delta;
 193   assert(slop_delta >= 0, "itable #%d: Code size estimate (%d) for lookup_interface_method too small, required: %d", itable_index, (int)estimate, (int)codesize);
 194 
 195 #ifndef PRODUCT
 196   if (DebugVtables) {
 197     // Implementation required?
 198   }
 199 #endif
 200 
 201   address ame_addr = __ pc();
 202 
 203 #ifdef AARCH64
 204   __ ldr(Rtemp, Address(Rmethod, Method::from_compiled_offset()));
 205   __ br(Rtemp);
 206 #else
 207   __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));
 208 #endif // AARCH64
 209 
 210   __ bind(L_no_such_interface);
 211   // Handle IncompatibleClassChangeError in itable stubs.
 212   // More detailed error message.
 213   // We force resolving of the call site by jumping to the "handle
 214   // wrong method" stub, and so let the interpreter runtime do all the
 215   // dirty work.
 216   assert(SharedRuntime::get_handle_wrong_method_stub() != NULL, "check initialization order");
 217   __ jump(SharedRuntime::get_handle_wrong_method_stub(), relocInfo::runtime_call_type, Rtemp);
 218 
 219   masm->flush();
 220   bookkeeping(masm, tty, s, npe_addr, ame_addr, false, itable_index, slop_bytes, 0);
 221 
 222   return s;
 223 }
 224 
 225 int VtableStub::pd_code_alignment() {
 226   // ARM32 cache line size is not an architected constant. We just align on word size.
 227   const unsigned int icache_line_size = wordSize;
 228   return icache_line_size;


  75 
  76   assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
  77 
  78   const Register tmp = Rtemp; // Rtemp OK, should be free at call sites
  79 
  80   address npe_addr = __ pc();
  81   __ load_klass(tmp, R0);
  82 
  83 #ifndef PRODUCT
  84   if (DebugVtables) {
  85     // Implementation required?
  86   }
  87 #endif
  88 
  89   start_pc = __ pc();
  90   { // lookup virtual method
  91     int entry_offset = in_bytes(Klass::vtable_start_offset()) + vtable_index * vtableEntry::size_in_bytes();
  92     int method_offset = vtableEntry::method_offset_in_bytes() + entry_offset;
  93 
  94     assert ((method_offset & (wordSize - 1)) == 0, "offset should be aligned");
  95     int offset_mask = 0xfff;
  96     if (method_offset & ~offset_mask) {
  97       __ add(tmp, tmp, method_offset & ~offset_mask);
  98     }
  99     __ ldr(Rmethod, Address(tmp, method_offset & offset_mask));
 100   }
 101   slop_delta  = 8 - (int)(__ pc() - start_pc);
 102   slop_bytes += slop_delta;
 103   assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
 104 
 105 #ifndef PRODUCT
 106   if (DebugVtables) {
 107     // Implementation required?
 108   }
 109 #endif
 110 
 111   address ame_addr = __ pc();




 112   __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));

 113 
 114   masm->flush();
 115   bookkeeping(masm, tty, s, npe_addr, ame_addr, true, vtable_index, slop_bytes, 0);
 116 
 117   return s;
 118 }
 119 
 120 VtableStub* VtableStubs::create_itable_stub(int itable_index) {
 121   // Read "A word on VtableStub sizing" in share/code/vtableStubs.hpp for details on stub sizing.
 122   const int stub_code_length = code_size_limit(false);
 123   VtableStub* s = new(stub_code_length) VtableStub(false, itable_index);
 124   // Can be NULL if there is no free space in the code cache.
 125   if (s == NULL) {
 126     return NULL;
 127   }
 128   // Count unused bytes in instruction sequences of variable size.
 129   // We add them to the computed buffer size in order to avoid
 130   // overflow in subsequently generated stubs.
 131   address   start_pc;
 132   int       slop_bytes = 0;
 133   int       slop_delta = 0;
 134 
 135   ResourceMark    rm;
 136   CodeBuffer      cb(s->entry_point(), stub_code_length);
 137   MacroAssembler* masm = new MacroAssembler(&cb);
 138 
 139 #if (!defined(PRODUCT) && defined(COMPILER2))
 140   if (CountCompiledCalls) {
 141     // Implementation required?
 142   }
 143 #endif
 144 
 145   assert(VtableStub::receiver_location() == R0->as_VMReg(), "receiver expected in R0");
 146 
 147   // R0-R3 / R0-R7 registers hold the arguments and cannot be spoiled
 148   const Register Rclass  = R4;
 149   const Register Rintf   = R5;
 150   const Register Rscan   = R6;
 151 
 152   Label L_no_such_interface;
 153 
 154   assert_different_registers(Ricklass, Rclass, Rintf, Rscan, Rtemp);
 155 
 156   start_pc = __ pc();
 157 
 158   // get receiver klass (also an implicit null-check)
 159   address npe_addr = __ pc();
 160   __ load_klass(Rclass, R0);
 161 
 162   // Receiver subtype check against REFC.
 163   __ ldr(Rintf, Address(Ricklass, CompiledICHolder::holder_klass_offset()));
 164   __ lookup_interface_method(// inputs: rec. class, interface, itable index
 165                              Rclass, Rintf, noreg,
 166                              // outputs: temp reg1, temp reg2
 167                              noreg, Rscan, Rtemp,
 168                              L_no_such_interface);
 169 
 170   const ptrdiff_t  typecheckSize = __ pc() - start_pc;


 178                              Rmethod, Rscan, Rtemp,
 179                              L_no_such_interface);
 180 
 181   const ptrdiff_t lookupSize = __ pc() - start_pc;
 182 
 183   // Reduce "estimate" such that "padding" does not drop below 8.
 184   const ptrdiff_t estimate = 140;
 185   const ptrdiff_t codesize = typecheckSize + lookupSize;
 186   slop_delta  = (int)(estimate - codesize);
 187   slop_bytes += slop_delta;
 188   assert(slop_delta >= 0, "itable #%d: Code size estimate (%d) for lookup_interface_method too small, required: %d", itable_index, (int)estimate, (int)codesize);
 189 
 190 #ifndef PRODUCT
 191   if (DebugVtables) {
 192     // Implementation required?
 193   }
 194 #endif
 195 
 196   address ame_addr = __ pc();
 197 




 198   __ ldr(PC, Address(Rmethod, Method::from_compiled_offset()));

 199 
 200   __ bind(L_no_such_interface);
 201   // Handle IncompatibleClassChangeError in itable stubs.
 202   // More detailed error message.
 203   // We force resolving of the call site by jumping to the "handle
 204   // wrong method" stub, and so let the interpreter runtime do all the
 205   // dirty work.
 206   assert(SharedRuntime::get_handle_wrong_method_stub() != NULL, "check initialization order");
 207   __ jump(SharedRuntime::get_handle_wrong_method_stub(), relocInfo::runtime_call_type, Rtemp);
 208 
 209   masm->flush();
 210   bookkeeping(masm, tty, s, npe_addr, ame_addr, false, itable_index, slop_bytes, 0);
 211 
 212   return s;
 213 }
 214 
 215 int VtableStub::pd_code_alignment() {
 216   // ARM32 cache line size is not an architected constant. We just align on word size.
 217   const unsigned int icache_line_size = wordSize;
 218   return icache_line_size;
src/hotspot/cpu/arm/vtableStubs_arm.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File