253 class RelocIterator;
254
255 class relocInfo VALUE_OBJ_CLASS_SPEC {
256 friend class RelocIterator;
257 public:
258 enum relocType {
259 none = 0, // Used when no relocation should be generated
260 oop_type = 1, // embedded oop
261 virtual_call_type = 2, // a standard inline cache call for a virtual send
262 opt_virtual_call_type = 3, // a virtual call that has been statically bound (i.e., no IC cache)
263 static_call_type = 4, // a static send
264 static_stub_type = 5, // stub-entry for static send (takes care of interpreter case)
265 runtime_call_type = 6, // call to fixed external routine
266 external_word_type = 7, // reference to fixed external address
267 internal_word_type = 8, // reference within the current code blob
268 section_word_type = 9, // internal, but a cross-section reference
269 poll_type = 10, // polling instruction for safepoints
270 poll_return_type = 11, // polling instruction for safepoints at return
271 metadata_type = 12, // metadata that used to be oops
272 trampoline_stub_type = 13, // stub-entry for trampoline
273 yet_unused_type_1 = 14, // Still unused
274 data_prefix_tag = 15, // tag for a prefix (carries data arguments)
275 type_mask = 15 // A mask which selects only the above values
276 };
277
278 protected:
279 unsigned short _value;
280
281 enum RawBitsToken { RAW_BITS };
282 relocInfo(relocType type, RawBitsToken ignore, int bits)
283 : _value((type << nontype_width) + bits) { }
284
285 relocInfo(relocType type, RawBitsToken ignore, int off, int f)
286 : _value((type << nontype_width) + (off / (unsigned)offset_unit) + (f << offset_width)) { }
287
288 public:
289 // constructor
290 relocInfo(relocType type, int offset, int format = 0)
291 #ifndef ASSERT
292 {
293 (*this) = relocInfo(type, RAW_BITS, offset, format);
294 }
295 #else
296 // Put a bunch of assertions out-of-line.
297 ;
298 #endif
299
300 #define APPLY_TO_RELOCATIONS(visitor) \
301 visitor(oop) \
302 visitor(metadata) \
303 visitor(virtual_call) \
304 visitor(opt_virtual_call) \
305 visitor(static_call) \
306 visitor(static_stub) \
307 visitor(runtime_call) \
308 visitor(external_word) \
309 visitor(internal_word) \
310 visitor(poll) \
311 visitor(poll_return) \
312 visitor(section_word) \
313 visitor(trampoline_stub) \
314
315
316 public:
317 enum {
318 value_width = sizeof(unsigned short) * BitsPerByte,
319 type_width = 4, // == log2(type_mask+1)
320 nontype_width = value_width - type_width,
321 datalen_width = nontype_width-1,
322 datalen_tag = 1 << datalen_width, // or-ed into _value
323 datalen_limit = 1 << datalen_width,
324 datalen_mask = (1 << datalen_width)-1
325 };
326
327 // accessors
810 virtual relocInfo::relocType type() { return relocInfo::none; }
811
812 // is it a call instruction?
813 virtual bool is_call() { return false; }
814
815 // is it a data movement instruction?
816 virtual bool is_data() { return false; }
817
818 // some relocations can compute their own values
819 virtual address value();
820
821 // all relocations are able to reassert their values
822 virtual void set_value(address x);
823
824 virtual void clear_inline_cache() { }
825
826 // This method assumes that all virtual/static (inline) caches are cleared (since for static_call_type and
827 // ic_call_type is not always posisition dependent (depending on the state of the cache)). However, this is
828 // probably a reasonable assumption, since empty caches simplifies code reloacation.
829 virtual void fix_relocation_after_move(const CodeBuffer* src, CodeBuffer* dest) { }
830
831 void print();
832 };
833
834
835 // certain inlines must be deferred until class Relocation is defined:
836
837 inline RelocationHolder::RelocationHolder() {
838 // initialize the vtbl, just to keep things type-safe
839 new(*this) Relocation();
840 }
841
842
843 inline RelocationHolder::RelocationHolder(Relocation* r) {
844 // wordwise copy from r (ok if it copies garbage after r)
845 for (int i = 0; i < _relocbuf_size; i++) {
846 _relocbuf[i] = ((void**)r)[i];
847 }
848 }
849
850
851 relocInfo::relocType RelocationHolder::type() const {
1156 // data is packed as a scaled offset in "1_int" format: [c] or [Cc]
1157 void pack_data_to(CodeSection* dest);
1158 void unpack_data();
1159 };
1160
1161 class runtime_call_Relocation : public CallRelocation {
1162 relocInfo::relocType type() { return relocInfo::runtime_call_type; }
1163
1164 public:
1165 static RelocationHolder spec() {
1166 RelocationHolder rh = newHolder();
1167 new(rh) runtime_call_Relocation();
1168 return rh;
1169 }
1170
1171 private:
1172 friend class RelocIterator;
1173 runtime_call_Relocation() { }
1174
1175 public:
1176 };
1177
1178 // Trampoline Relocations.
1179 // A trampoline allows to encode a small branch in the code, even if there
1180 // is the chance that this branch can not reach all possible code locations.
1181 // If the relocation finds that a branch is too far for the instruction
1182 // in the code, it can patch it to jump to the trampoline where is
1183 // sufficient space for a far branch. Needed on PPC.
1184 class trampoline_stub_Relocation : public Relocation {
1185 relocInfo::relocType type() { return relocInfo::trampoline_stub_type; }
1186
1187 public:
1188 static RelocationHolder spec(address static_call) {
1189 RelocationHolder rh = newHolder();
1190 return (new (rh) trampoline_stub_Relocation(static_call));
1191 }
1192
1193 private:
1194 address _owner; // Address of the NativeCall that owns the trampoline.
1195
|
253 class RelocIterator;
254
255 class relocInfo VALUE_OBJ_CLASS_SPEC {
256 friend class RelocIterator;
257 public:
258 enum relocType {
259 none = 0, // Used when no relocation should be generated
260 oop_type = 1, // embedded oop
261 virtual_call_type = 2, // a standard inline cache call for a virtual send
262 opt_virtual_call_type = 3, // a virtual call that has been statically bound (i.e., no IC cache)
263 static_call_type = 4, // a static send
264 static_stub_type = 5, // stub-entry for static send (takes care of interpreter case)
265 runtime_call_type = 6, // call to fixed external routine
266 external_word_type = 7, // reference to fixed external address
267 internal_word_type = 8, // reference within the current code blob
268 section_word_type = 9, // internal, but a cross-section reference
269 poll_type = 10, // polling instruction for safepoints
270 poll_return_type = 11, // polling instruction for safepoints at return
271 metadata_type = 12, // metadata that used to be oops
272 trampoline_stub_type = 13, // stub-entry for trampoline
273 runtime_call_w_cp_type = 14, // Runtime call which may load its target from the constant pool
274 data_prefix_tag = 15, // tag for a prefix (carries data arguments)
275 type_mask = 15 // A mask which selects only the above values
276 };
277
278 protected:
279 unsigned short _value;
280
281 enum RawBitsToken { RAW_BITS };
282 relocInfo(relocType type, RawBitsToken ignore, int bits)
283 : _value((type << nontype_width) + bits) { }
284
285 relocInfo(relocType type, RawBitsToken ignore, int off, int f)
286 : _value((type << nontype_width) + (off / (unsigned)offset_unit) + (f << offset_width)) { }
287
288 public:
289 // constructor
290 relocInfo(relocType type, int offset, int format = 0)
291 #ifndef ASSERT
292 {
293 (*this) = relocInfo(type, RAW_BITS, offset, format);
294 }
295 #else
296 // Put a bunch of assertions out-of-line.
297 ;
298 #endif
299
300 #define APPLY_TO_RELOCATIONS(visitor) \
301 visitor(oop) \
302 visitor(metadata) \
303 visitor(virtual_call) \
304 visitor(opt_virtual_call) \
305 visitor(static_call) \
306 visitor(static_stub) \
307 visitor(runtime_call) \
308 visitor(runtime_call_w_cp) \
309 visitor(external_word) \
310 visitor(internal_word) \
311 visitor(poll) \
312 visitor(poll_return) \
313 visitor(section_word) \
314 visitor(trampoline_stub) \
315
316
317 public:
318 enum {
319 value_width = sizeof(unsigned short) * BitsPerByte,
320 type_width = 4, // == log2(type_mask+1)
321 nontype_width = value_width - type_width,
322 datalen_width = nontype_width-1,
323 datalen_tag = 1 << datalen_width, // or-ed into _value
324 datalen_limit = 1 << datalen_width,
325 datalen_mask = (1 << datalen_width)-1
326 };
327
328 // accessors
811 virtual relocInfo::relocType type() { return relocInfo::none; }
812
813 // is it a call instruction?
814 virtual bool is_call() { return false; }
815
816 // is it a data movement instruction?
817 virtual bool is_data() { return false; }
818
819 // some relocations can compute their own values
820 virtual address value();
821
822 // all relocations are able to reassert their values
823 virtual void set_value(address x);
824
825 virtual void clear_inline_cache() { }
826
827 // This method assumes that all virtual/static (inline) caches are cleared (since for static_call_type and
828 // ic_call_type is not always posisition dependent (depending on the state of the cache)). However, this is
829 // probably a reasonable assumption, since empty caches simplifies code reloacation.
830 virtual void fix_relocation_after_move(const CodeBuffer* src, CodeBuffer* dest) { }
831 };
832
833
834 // certain inlines must be deferred until class Relocation is defined:
835
836 inline RelocationHolder::RelocationHolder() {
837 // initialize the vtbl, just to keep things type-safe
838 new(*this) Relocation();
839 }
840
841
842 inline RelocationHolder::RelocationHolder(Relocation* r) {
843 // wordwise copy from r (ok if it copies garbage after r)
844 for (int i = 0; i < _relocbuf_size; i++) {
845 _relocbuf[i] = ((void**)r)[i];
846 }
847 }
848
849
850 relocInfo::relocType RelocationHolder::type() const {
1155 // data is packed as a scaled offset in "1_int" format: [c] or [Cc]
1156 void pack_data_to(CodeSection* dest);
1157 void unpack_data();
1158 };
1159
1160 class runtime_call_Relocation : public CallRelocation {
1161 relocInfo::relocType type() { return relocInfo::runtime_call_type; }
1162
1163 public:
1164 static RelocationHolder spec() {
1165 RelocationHolder rh = newHolder();
1166 new(rh) runtime_call_Relocation();
1167 return rh;
1168 }
1169
1170 private:
1171 friend class RelocIterator;
1172 runtime_call_Relocation() { }
1173
1174 public:
1175 };
1176
1177
1178 class runtime_call_w_cp_Relocation : public CallRelocation {
1179 relocInfo::relocType type() { return relocInfo::runtime_call_w_cp_type; }
1180
1181 public:
1182 static RelocationHolder spec() {
1183 RelocationHolder rh = newHolder();
1184 new(rh) runtime_call_w_cp_Relocation();
1185 return rh;
1186 }
1187
1188 private:
1189 friend class RelocIterator;
1190 runtime_call_w_cp_Relocation() { _offset = -4; /* <0 = invalid */ }
1191 // On z/Architecture, runtime calls are either a sequence
1192 // of two instructions (load destination of call from constant pool + do call)
1193 // or a pc-relative call. The pc-relative call is faster, but it can only
1194 // be used if the destination of the call is not too far away.
1195 // In order to be able to patch a pc-relative call back into one using
1196 // the constant pool, we have to remember the location of the call's destination
1197 // in the constant pool.
1198 int _offset;
1199
1200 public:
1201 void set_constant_pool_offset(int offset) { _offset = offset; }
1202 int get_constant_pool_offset() { return _offset; }
1203 void pack_data_to(CodeSection * dest);
1204 void unpack_data();
1205 };
1206
1207 // Trampoline Relocations.
1208 // A trampoline allows to encode a small branch in the code, even if there
1209 // is the chance that this branch can not reach all possible code locations.
1210 // If the relocation finds that a branch is too far for the instruction
1211 // in the code, it can patch it to jump to the trampoline where is
1212 // sufficient space for a far branch. Needed on PPC.
1213 class trampoline_stub_Relocation : public Relocation {
1214 relocInfo::relocType type() { return relocInfo::trampoline_stub_type; }
1215
1216 public:
1217 static RelocationHolder spec(address static_call) {
1218 RelocationHolder rh = newHolder();
1219 return (new (rh) trampoline_stub_Relocation(static_call));
1220 }
1221
1222 private:
1223 address _owner; // Address of the NativeCall that owns the trampoline.
1224
|