112 // can be NULL in CMS
113 if (UseCompressedClassPointers) {
114 return Klass::decode_klass(_metadata._compressed_klass);
115 } else {
116 return _metadata._klass;
117 }
118 }
119
120 Klass** oopDesc::klass_addr() {
121 // Only used internally and with CMS and will not work with
122 // UseCompressedOops
123 assert(!UseCompressedClassPointers, "only supported with uncompressed klass pointers");
124 return (Klass**) &_metadata._klass;
125 }
126
127 narrowKlass* oopDesc::compressed_klass_addr() {
128 assert(UseCompressedClassPointers, "only called by compressed klass pointers");
129 return &_metadata._compressed_klass;
130 }
131
132 void oopDesc::set_klass(Klass* k) {
133 // since klasses are promoted no store check is needed
134 assert(Universe::is_bootstrapping() || k != NULL, "must be a real Klass*");
135 assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass*");
136 if (UseCompressedClassPointers) {
137 *compressed_klass_addr() = Klass::encode_klass_not_null(k);
138 } else {
139 *klass_addr() = k;
140 }
141 }
142
143 int oopDesc::klass_gap() const {
144 return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
145 }
146
147 void oopDesc::set_klass_gap(int v) {
148 if (UseCompressedClassPointers) {
149 *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes()) = v;
150 }
151 }
152
153 void oopDesc::set_klass_to_list_ptr(oop k) {
154 // This is only to be used during GC, for from-space objects, so no
155 // barrier is needed.
156 if (UseCompressedClassPointers) {
157 _metadata._compressed_klass = (narrowKlass)encode_heap_oop(k); // may be null (parnew overflow handling)
158 } else {
159 _metadata._klass = (Klass*)(address)k;
160 }
161 }
|
112 // can be NULL in CMS
113 if (UseCompressedClassPointers) {
114 return Klass::decode_klass(_metadata._compressed_klass);
115 } else {
116 return _metadata._klass;
117 }
118 }
119
120 Klass** oopDesc::klass_addr() {
121 // Only used internally and with CMS and will not work with
122 // UseCompressedOops
123 assert(!UseCompressedClassPointers, "only supported with uncompressed klass pointers");
124 return (Klass**) &_metadata._klass;
125 }
126
127 narrowKlass* oopDesc::compressed_klass_addr() {
128 assert(UseCompressedClassPointers, "only called by compressed klass pointers");
129 return &_metadata._compressed_klass;
130 }
131
132 #define CHECK_SET_KLASS(k) \
133 do { \
134 assert(Universe::is_bootstrapping() || k != NULL, "NULL Klass"); \
135 assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass"); \
136 } while (0)
137
138 void oopDesc::set_klass(Klass* k) {
139 CHECK_SET_KLASS(k);
140 if (UseCompressedClassPointers) {
141 *compressed_klass_addr() = Klass::encode_klass_not_null(k);
142 } else {
143 *klass_addr() = k;
144 }
145 }
146
147 void oopDesc::release_set_klass(Klass* k) {
148 CHECK_SET_KLASS(k);
149 if (UseCompressedClassPointers) {
150 OrderAccess::release_store(compressed_klass_addr(),
151 Klass::encode_klass_not_null(k));
152 } else {
153 OrderAccess::release_store_ptr(klass_addr(), k);
154 }
155 }
156
157 #undef CHECK_SET_KLASS
158
159 int oopDesc::klass_gap() const {
160 return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
161 }
162
163 void oopDesc::set_klass_gap(int v) {
164 if (UseCompressedClassPointers) {
165 *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes()) = v;
166 }
167 }
168
169 void oopDesc::set_klass_to_list_ptr(oop k) {
170 // This is only to be used during GC, for from-space objects, so no
171 // barrier is needed.
172 if (UseCompressedClassPointers) {
173 _metadata._compressed_klass = (narrowKlass)encode_heap_oop(k); // may be null (parnew overflow handling)
174 } else {
175 _metadata._klass = (Klass*)(address)k;
176 }
177 }
|