< prev index next >

src/hotspot/share/oops/oop.inline.hpp

Print this page




  69 void oopDesc::release_set_mark(markOop m) {
  70   HeapAccess<MO_RELEASE>::store_at(as_oop(), mark_offset_in_bytes(), m);
  71 }
  72 
  73 markOop oopDesc::cas_set_mark(markOop new_mark, markOop old_mark) {
  74   return HeapAccess<>::atomic_cmpxchg_at(new_mark, as_oop(), mark_offset_in_bytes(), old_mark);
  75 }
  76 
  77 markOop oopDesc::cas_set_mark_raw(markOop new_mark, markOop old_mark, atomic_memory_order order) {
  78   return Atomic::cmpxchg(new_mark, &_mark, old_mark, order);
  79 }
  80 
  81 void oopDesc::init_mark() {
  82   set_mark(markOopDesc::prototype_for_object(this));
  83 }
  84 
  85 void oopDesc::init_mark_raw() {
  86   set_mark_raw(markOopDesc::prototype_for_object(this));
  87 }
  88 







  89 Klass* oopDesc::klass() const {
  90   if (UseCompressedClassPointers) {
  91     return Klass::decode_klass_not_null(_metadata._compressed_klass);
  92   } else {
  93     return _metadata._klass;
  94   }
  95 }
  96 
  97 Klass* oopDesc::klass_or_null() const volatile {
  98   if (UseCompressedClassPointers) {
  99     return Klass::decode_klass(_metadata._compressed_klass);
 100   } else {
 101     return _metadata._klass;
 102   }
 103 }
 104 
 105 Klass* oopDesc::klass_or_null_acquire() const volatile {
 106   if (UseCompressedClassPointers) {
 107     // Workaround for non-const load_acquire parameter.
 108     const volatile narrowKlass* addr = &_metadata._compressed_klass;
 109     volatile narrowKlass* xaddr = const_cast<volatile narrowKlass*>(addr);
 110     return Klass::decode_klass(OrderAccess::load_acquire(xaddr));
 111   } else {
 112     return OrderAccess::load_acquire(&_metadata._klass);
 113   }
 114 }
 115 
 116 Klass** oopDesc::klass_addr(HeapWord* mem) {
 117   // Only used internally and with CMS and will not work with
 118   // UseCompressedOops
 119   assert(!UseCompressedClassPointers, "only supported with uncompressed klass pointers");
 120   ByteSize offset = byte_offset_of(oopDesc, _metadata._klass);
 121   return (Klass**) (((char*)mem) + in_bytes(offset));
 122 }
 123 






 124 narrowKlass* oopDesc::compressed_klass_addr(HeapWord* mem) {
 125   assert(UseCompressedClassPointers, "only called by compressed klass pointers");
 126   ByteSize offset = byte_offset_of(oopDesc, _metadata._compressed_klass);
 127   return (narrowKlass*) (((char*)mem) + in_bytes(offset));
 128 }
 129 
 130 Klass** oopDesc::klass_addr() {
 131   return klass_addr((HeapWord*)this);
 132 }
 133 
 134 narrowKlass* oopDesc::compressed_klass_addr() {
 135   return compressed_klass_addr((HeapWord*)this);
 136 }
 137 
 138 #define CHECK_SET_KLASS(k)                                                \
 139   do {                                                                    \
 140     assert(Universe::is_bootstrapping() || k != NULL, "NULL Klass");      \
 141     assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass"); \


 142   } while (0)
 143 
 144 void oopDesc::set_klass(Klass* k) {
 145   CHECK_SET_KLASS(k);
 146   if (UseCompressedClassPointers) {
 147     *compressed_klass_addr() = Klass::encode_klass_not_null(k);
 148   } else {
 149     *klass_addr() = k;
 150   }
 151 }
 152 
 153 void oopDesc::release_set_klass(HeapWord* mem, Klass* klass) {
 154   CHECK_SET_KLASS(klass);
 155   if (UseCompressedClassPointers) {
 156     OrderAccess::release_store(compressed_klass_addr(mem),
 157                                Klass::encode_klass_not_null(klass));
 158   } else {
 159     OrderAccess::release_store(klass_addr(mem), klass);
 160   }










 161 }
 162 










 163 #undef CHECK_SET_KLASS










 164 
 165 int oopDesc::klass_gap() const {
 166   return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
 167 }
 168 
 169 void oopDesc::set_klass_gap(HeapWord* mem, int v) {
 170   if (UseCompressedClassPointers) {
 171     *(int*)(((char*)mem) + klass_gap_offset_in_bytes()) = v;
 172   }
 173 }
 174 
 175 void oopDesc::set_klass_gap(int v) {
 176   set_klass_gap((HeapWord*)this, v);
 177 }
 178 
 179 void oopDesc::set_klass_to_list_ptr(oop k) {
 180   // This is only to be used during GC, for from-space objects, so no
 181   // barrier is needed.
 182   if (UseCompressedClassPointers) {
 183     _metadata._compressed_klass = (narrowKlass)CompressedOops::encode(k);  // may be null (parnew overflow handling)




  69 void oopDesc::release_set_mark(markOop m) {
  70   HeapAccess<MO_RELEASE>::store_at(as_oop(), mark_offset_in_bytes(), m);
  71 }
  72 
  73 markOop oopDesc::cas_set_mark(markOop new_mark, markOop old_mark) {
  74   return HeapAccess<>::atomic_cmpxchg_at(new_mark, as_oop(), mark_offset_in_bytes(), old_mark);
  75 }
  76 
  77 markOop oopDesc::cas_set_mark_raw(markOop new_mark, markOop old_mark, atomic_memory_order order) {
  78   return Atomic::cmpxchg(new_mark, &_mark, old_mark, order);
  79 }
  80 
  81 void oopDesc::init_mark() {
  82   set_mark(markOopDesc::prototype_for_object(this));
  83 }
  84 
  85 void oopDesc::init_mark_raw() {
  86   set_mark_raw(markOopDesc::prototype_for_object(this));
  87 }
  88 
  89 narrowKlass oopDesc::compressed_klass_mask() { return ((narrowKlass) 1 << narrow_storage_props_shift) - 1; }
  90 uintptr_t   oopDesc::klass_mask()   { return ((uintptr_t) 1 << wide_storage_props_shift) - 1; }
  91 
  92 narrowKlass oopDesc::compressed_klass_masked(narrowKlass raw) { return raw & compressed_klass_mask(); }
  93 Klass*      oopDesc::klass_masked(uintptr_t raw)     { return reinterpret_cast<Klass*>(raw & klass_mask()); }
  94 
  95 
  96 Klass* oopDesc::klass() const {
  97   if (UseCompressedClassPointers) {
  98     return Klass::decode_klass_not_null(compressed_klass_masked(_metadata._compressed_klass));
  99   } else {
 100     return klass_masked(_metadata._wide_storage_props);
 101   }
 102 }
 103 
 104 Klass* oopDesc::klass_or_null() const volatile {
 105   if (UseCompressedClassPointers) {
 106     return Klass::decode_klass(compressed_klass_masked(_metadata._compressed_klass));
 107   } else {
 108     return klass_masked(_metadata._wide_storage_props);
 109   }
 110 }
 111 
 112 Klass* oopDesc::klass_or_null_acquire() const volatile {
 113   if (UseCompressedClassPointers) {
 114     // Workaround for non-const load_acquire parameter.
 115     const volatile narrowKlass* addr = &_metadata._compressed_klass;
 116     volatile narrowKlass* xaddr = const_cast<volatile narrowKlass*>(addr);
 117     return Klass::decode_klass(compressed_klass_masked(OrderAccess::load_acquire(xaddr)));
 118   } else {
 119     return klass_masked(OrderAccess::load_acquire(&_metadata._wide_storage_props));
 120   }
 121 }
 122 
 123 Klass** oopDesc::klass_addr(HeapWord* mem) {
 124   // Only used internally and with CMS and will not work with
 125   // UseCompressedOops
 126   assert(!UseCompressedClassPointers, "only supported with uncompressed klass pointers");
 127   ByteSize offset = byte_offset_of(oopDesc, _metadata._klass);
 128   return (Klass**) (((char*)mem) + in_bytes(offset));
 129 }
 130 
 131 uintptr_t* oopDesc::wide_metadata_addr(HeapWord* mem) {
 132   assert(!UseCompressedClassPointers, "only supported with uncompressed klass pointers");
 133   ByteSize offset = byte_offset_of(oopDesc, _metadata._wide_storage_props);
 134   return (uintptr_t*) (((char*)mem) + in_bytes(offset));
 135 }
 136 
 137 narrowKlass* oopDesc::compressed_klass_addr(HeapWord* mem) {
 138   assert(UseCompressedClassPointers, "only called by compressed klass pointers");
 139   ByteSize offset = byte_offset_of(oopDesc, _metadata._compressed_klass);
 140   return (narrowKlass*) (((char*)mem) + in_bytes(offset));
 141 }
 142 
 143 Klass** oopDesc::klass_addr() {
 144   return klass_addr((HeapWord*)this);
 145 }
 146 
 147 narrowKlass* oopDesc::compressed_klass_addr() {
 148   return compressed_klass_addr((HeapWord*)this);
 149 }
 150 
 151 #define CHECK_SET_KLASS(k)                                                \
 152   do {                                                                    \
 153     assert(Universe::is_bootstrapping() || k != NULL, "NULL Klass");      \
 154     assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass"); \
 155     assert(((reinterpret_cast<uintptr_t>(k) & (~ oopDesc::klass_mask())) == 0), \
 156       "No room for storage props "); \
 157   } while (0)
 158 
 159 void oopDesc::set_klass(Klass* k) {
 160   CHECK_SET_KLASS(k);
 161   if (UseCompressedClassPointers) {
 162     *compressed_klass_addr() = Klass::encode_klass_not_null(k);
 163   } else {
 164     *klass_addr() = k;
 165   }
 166 }
 167 
 168 void oopDesc::release_set_klass(HeapWord* mem, Klass* klass) {
 169   CHECK_SET_KLASS(klass);
 170   if (UseCompressedClassPointers) {
 171     OrderAccess::release_store(compressed_klass_addr(mem),
 172                                Klass::encode_klass_not_null(klass));
 173   } else {
 174     OrderAccess::release_store(klass_addr(mem), klass);
 175   }
 176   assert(((oopDesc*)mem)->klass() == klass, "failed oopDesc::klass() encode/decode");
 177 }
 178 
 179 void oopDesc::set_metadata(ArrayStorageProperties storage_props, Klass* klass) {
 180   CHECK_SET_KLASS(klass);
 181   if (UseCompressedClassPointers) {
 182     *compressed_klass_addr() = (Klass::encode_klass_not_null(klass) | storage_props.encode<narrowKlass>(narrow_storage_props_shift));
 183   } else {
 184     *wide_metadata_addr((HeapWord*)this) = (reinterpret_cast<uintptr_t>(klass) | storage_props.encode<uintptr_t>(wide_storage_props_shift));
 185   }
 186 }
 187 
 188 void oopDesc::release_set_metadata(HeapWord* mem, ArrayStorageProperties storage_props, Klass* klass) {
 189   CHECK_SET_KLASS(klass);
 190   if (UseCompressedClassPointers) {
 191     OrderAccess::release_store(oopDesc::compressed_klass_addr(mem),
 192                                Klass::encode_klass_not_null(klass) | storage_props.encode<narrowKlass>(narrow_storage_props_shift));
 193   } else {
 194     OrderAccess::release_store(oopDesc::wide_metadata_addr(mem),
 195                                (reinterpret_cast<uintptr_t>(klass) | storage_props.encode<uintptr_t>(wide_storage_props_shift)));
 196   }
 197 }
 198 #undef CHECK_SET_KLASS
 199 
 200 
 201 ArrayStorageProperties oopDesc::array_storage_properties() const {
 202   if (UseCompressedClassPointers) {
 203     return ArrayStorageProperties(_metadata._narrow_storage_props >> narrow_storage_props_shift);
 204   } else {
 205     return ArrayStorageProperties(_metadata._wide_storage_props >> wide_storage_props_shift);
 206   }
 207 }
 208 
 209 
 210 int oopDesc::klass_gap() const {
 211   return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
 212 }
 213 
 214 void oopDesc::set_klass_gap(HeapWord* mem, int v) {
 215   if (UseCompressedClassPointers) {
 216     *(int*)(((char*)mem) + klass_gap_offset_in_bytes()) = v;
 217   }
 218 }
 219 
 220 void oopDesc::set_klass_gap(int v) {
 221   set_klass_gap((HeapWord*)this, v);
 222 }
 223 
 224 void oopDesc::set_klass_to_list_ptr(oop k) {
 225   // This is only to be used during GC, for from-space objects, so no
 226   // barrier is needed.
 227   if (UseCompressedClassPointers) {
 228     _metadata._compressed_klass = (narrowKlass)CompressedOops::encode(k);  // may be null (parnew overflow handling)


< prev index next >