79 }
80
81 // Unrewrite the bytecodes if an error occurs.
82 void Rewriter::restore_bytecodes() {
83 int len = _methods->length();
84 bool invokespecial_error = false;
85
86 for (int i = len-1; i >= 0; i--) {
87 Method* method = _methods->at(i);
88 scan_method(method, true, &invokespecial_error);
89 assert(!invokespecial_error, "reversing should not get an invokespecial error");
90 }
91 }
92
93 // Creates a constant pool cache given a CPC map
94 void Rewriter::make_constant_pool_cache(TRAPS) {
95 ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data();
96 ConstantPoolCache* cache =
97 ConstantPoolCache::allocate(loader_data, _cp_cache_map,
98 _invokedynamic_cp_cache_map,
99 _invokedynamic_references_map, CHECK);
100
101 // initialize object cache in constant pool
102 _pool->set_cache(cache);
103 cache->set_constant_pool(_pool());
104
105 // _resolved_references is stored in pool->cache(), so need to be done after
106 // the above lines.
107 _pool->initialize_resolved_references(loader_data, _resolved_references_map,
108 _resolved_reference_limit,
109 THREAD);
110
111 // Clean up constant pool cache if initialize_resolved_references() failed.
112 if (HAS_PENDING_EXCEPTION) {
113 MetadataFactory::free_metadata(loader_data, cache);
114 _pool->set_cache(NULL); // so the verifier isn't confused
115 } else {
116 DEBUG_ONLY(
117 if (DumpSharedSpaces) {
118 cache->verify_just_initialized();
119 })
204 }
205 }
206
207
208 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.)
209 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) {
210 if (!reverse) {
211 if ((*opc) == (u1)Bytecodes::_invokevirtual ||
212 // allow invokespecial as an alias, although it would be very odd:
213 (*opc) == (u1)Bytecodes::_invokespecial) {
214 assert(_pool->tag_at(cp_index).is_method(), "wrong index");
215 // Determine whether this is a signature-polymorphic method.
216 if (cp_index >= _method_handle_invokers.length()) return;
217 int status = _method_handle_invokers.at(cp_index);
218 assert(status >= -1 && status <= 1, "oob tri-state");
219 if (status == 0) {
220 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() &&
221 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(),
222 _pool->name_ref_at(cp_index))) {
223 // we may need a resolved_refs entry for the appendix
224 add_invokedynamic_resolved_references_entries(cp_index, cache_index);
225 status = +1;
226 } else if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_VarHandle() &&
227 MethodHandles::is_signature_polymorphic_name(SystemDictionary::VarHandle_klass(),
228 _pool->name_ref_at(cp_index))) {
229 // we may need a resolved_refs entry for the appendix
230 add_invokedynamic_resolved_references_entries(cp_index, cache_index);
231 status = +1;
232 } else {
233 status = -1;
234 }
235 _method_handle_invokers.at(cp_index) = status;
236 }
237 // We use a special internal bytecode for such methods (if non-static).
238 // The basic reason for this is that such methods need an extra "appendix" argument
239 // to transmit the call site's intended call type.
240 if (status > 0) {
241 (*opc) = (u1)Bytecodes::_invokehandle;
242 }
243 }
244 } else {
245 // Do not need to look at cp_index.
246 if ((*opc) == (u1)Bytecodes::_invokehandle) {
247 (*opc) = (u1)Bytecodes::_invokevirtual;
248 // Ignore corner case of original _invokespecial instruction.
249 // This is safe because (a) the signature polymorphic method was final, and
250 // (b) the implementation of MethodHandle will not call invokespecial on it.
251 }
252 }
253 }
254
255
256 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
257 address p = bcp + offset;
258 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
259 if (!reverse) {
260 int cp_index = Bytes::get_Java_u2(p);
261 int cache_index = add_invokedynamic_cp_cache_entry(cp_index);
262 int resolved_index = add_invokedynamic_resolved_references_entries(cp_index, cache_index);
263 // Replace the trailing four bytes with a CPC index for the dynamic
264 // call site. Unlike other CPC entries, there is one per bytecode,
265 // not just one per distinct CP entry. In other words, the
266 // CPC-to-CP relation is many-to-one for invokedynamic entries.
267 // This means we must use a larger index size than u2 to address
268 // all these entries. That is the main reason invokedynamic
269 // must have a five-byte instruction format. (Of course, other JVM
270 // implementations can use the bytes for other purposes.)
271 // Note: We use native_u4 format exclusively for 4-byte indexes.
272 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index));
273 // add the bcp in case we need to patch this bytecode if we also find a
274 // invokespecial/InterfaceMethodref in the bytecode stream
275 _patch_invokedynamic_bcps->push(p);
276 _patch_invokedynamic_refs->push(resolved_index);
277 } else {
278 int cache_index = ConstantPool::decode_invokedynamic_index(
279 Bytes::get_native_u4(p));
280 // We will reverse the bytecode rewriting _after_ adjusting them.
281 // Adjust the cache index by offset to the invokedynamic entries in the
282 // cpCache plus the delta if the invokedynamic bytecodes were adjusted.
290 }
291
292 void Rewriter::patch_invokedynamic_bytecodes() {
293 // If the end of the cp_cache is the same as after initializing with the
294 // cpool, nothing needs to be done. Invokedynamic bytecodes are at the
295 // correct offsets. ie. no invokespecials added
296 int delta = cp_cache_delta();
297 if (delta > 0) {
298 int length = _patch_invokedynamic_bcps->length();
299 assert(length == _patch_invokedynamic_refs->length(),
300 "lengths should match");
301 for (int i = 0; i < length; i++) {
302 address p = _patch_invokedynamic_bcps->at(i);
303 int cache_index = ConstantPool::decode_invokedynamic_index(
304 Bytes::get_native_u4(p));
305 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index + delta));
306
307 // invokedynamic resolved references map also points to cp cache and must
308 // add delta to each.
309 int resolved_index = _patch_invokedynamic_refs->at(i);
310 for (int entry = 0; entry < ConstantPoolCacheEntry::_indy_resolved_references_entries; entry++) {
311 assert(_invokedynamic_references_map.at(resolved_index + entry) == cache_index,
312 "should be the same index");
313 _invokedynamic_references_map.at_put(resolved_index+entry,
314 cache_index + delta);
315 }
316 }
317 }
318 }
319
320
321 // Rewrite some ldc bytecodes to _fast_aldc
322 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
323 bool reverse) {
324 if (!reverse) {
325 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
326 address p = bcp + offset;
327 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
328 constantTag tag = _pool->tag_at(cp_index).value();
329
330 if (tag.is_method_handle() ||
331 tag.is_method_type() ||
332 tag.is_string() ||
333 (tag.is_dynamic_constant() &&
334 // keep regular ldc interpreter logic for condy primitives
335 is_reference_type(FieldType::basic_type(_pool->uncached_signature_ref_at(cp_index))))
567 patch_invokedynamic_bytecodes();
568 }
569
570 void Rewriter::rewrite(InstanceKlass* klass, TRAPS) {
571 if (!DumpSharedSpaces) {
572 assert(!klass->is_shared(), "archive methods must not be rewritten at run time");
573 }
574 ResourceMark rm(THREAD);
575 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK);
576 // (That's all, folks.)
577 }
578
579 Rewriter::Rewriter(InstanceKlass* klass, const constantPoolHandle& cpool, Array<Method*>* methods, TRAPS)
580 : _klass(klass),
581 _pool(cpool),
582 _methods(methods),
583 _cp_map(cpool->length()),
584 _cp_cache_map(cpool->length() / 2),
585 _reference_map(cpool->length()),
586 _resolved_references_map(cpool->length() / 2),
587 _invokedynamic_references_map(cpool->length() / 2),
588 _method_handle_invokers(cpool->length()),
589 _invokedynamic_cp_cache_map(cpool->length() / 4)
590 {
591
592 // Rewrite bytecodes - exception here exits.
593 rewrite_bytecodes(CHECK);
594
595 // Stress restoring bytecodes
596 if (StressRewriter) {
597 restore_bytecodes();
598 rewrite_bytecodes(CHECK);
599 }
600
601 // allocate constant pool cache, now that we've seen all the bytecodes
602 make_constant_pool_cache(THREAD);
603
604 // Restore bytecodes to their unrewritten state if there are exceptions
605 // rewriting bytecodes or allocating the cpCache
606 if (HAS_PENDING_EXCEPTION) {
607 restore_bytecodes();
|
79 }
80
81 // Unrewrite the bytecodes if an error occurs.
82 void Rewriter::restore_bytecodes() {
83 int len = _methods->length();
84 bool invokespecial_error = false;
85
86 for (int i = len-1; i >= 0; i--) {
87 Method* method = _methods->at(i);
88 scan_method(method, true, &invokespecial_error);
89 assert(!invokespecial_error, "reversing should not get an invokespecial error");
90 }
91 }
92
93 // Creates a constant pool cache given a CPC map
94 void Rewriter::make_constant_pool_cache(TRAPS) {
95 ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data();
96 ConstantPoolCache* cache =
97 ConstantPoolCache::allocate(loader_data, _cp_cache_map,
98 _invokedynamic_cp_cache_map,
99 _appendix_references_map, CHECK);
100
101 // initialize object cache in constant pool
102 _pool->set_cache(cache);
103 cache->set_constant_pool(_pool());
104
105 // _resolved_references is stored in pool->cache(), so need to be done after
106 // the above lines.
107 _pool->initialize_resolved_references(loader_data, _resolved_references_map,
108 _resolved_reference_limit,
109 THREAD);
110
111 // Clean up constant pool cache if initialize_resolved_references() failed.
112 if (HAS_PENDING_EXCEPTION) {
113 MetadataFactory::free_metadata(loader_data, cache);
114 _pool->set_cache(NULL); // so the verifier isn't confused
115 } else {
116 DEBUG_ONLY(
117 if (DumpSharedSpaces) {
118 cache->verify_just_initialized();
119 })
204 }
205 }
206
207
208 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.)
209 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) {
210 if (!reverse) {
211 if ((*opc) == (u1)Bytecodes::_invokevirtual ||
212 // allow invokespecial as an alias, although it would be very odd:
213 (*opc) == (u1)Bytecodes::_invokespecial) {
214 assert(_pool->tag_at(cp_index).is_method(), "wrong index");
215 // Determine whether this is a signature-polymorphic method.
216 if (cp_index >= _method_handle_invokers.length()) return;
217 int status = _method_handle_invokers.at(cp_index);
218 assert(status >= -1 && status <= 1, "oob tri-state");
219 if (status == 0) {
220 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() &&
221 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(),
222 _pool->name_ref_at(cp_index))) {
223 // we may need a resolved_refs entry for the appendix
224 add_appendix_references_entry(cp_index, cache_index);
225 status = +1;
226 } else if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_VarHandle() &&
227 MethodHandles::is_signature_polymorphic_name(SystemDictionary::VarHandle_klass(),
228 _pool->name_ref_at(cp_index))) {
229 // we may need a resolved_refs entry for the appendix
230 add_appendix_references_entry(cp_index, cache_index);
231 status = +1;
232 } else {
233 status = -1;
234 }
235 _method_handle_invokers.at(cp_index) = status;
236 }
237 // We use a special internal bytecode for such methods (if non-static).
238 // The basic reason for this is that such methods need an extra "appendix" argument
239 // to transmit the call site's intended call type.
240 if (status > 0) {
241 (*opc) = (u1)Bytecodes::_invokehandle;
242 }
243 }
244 } else {
245 // Do not need to look at cp_index.
246 if ((*opc) == (u1)Bytecodes::_invokehandle) {
247 (*opc) = (u1)Bytecodes::_invokevirtual;
248 // Ignore corner case of original _invokespecial instruction.
249 // This is safe because (a) the signature polymorphic method was final, and
250 // (b) the implementation of MethodHandle will not call invokespecial on it.
251 }
252 }
253 }
254
255
256 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
257 address p = bcp + offset;
258 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
259 if (!reverse) {
260 int cp_index = Bytes::get_Java_u2(p);
261 int cache_index = add_invokedynamic_cp_cache_entry(cp_index);
262 int resolved_index = add_appendix_references_entry(cp_index, cache_index);
263 // Replace the trailing four bytes with a CPC index for the dynamic
264 // call site. Unlike other CPC entries, there is one per bytecode,
265 // not just one per distinct CP entry. In other words, the
266 // CPC-to-CP relation is many-to-one for invokedynamic entries.
267 // This means we must use a larger index size than u2 to address
268 // all these entries. That is the main reason invokedynamic
269 // must have a five-byte instruction format. (Of course, other JVM
270 // implementations can use the bytes for other purposes.)
271 // Note: We use native_u4 format exclusively for 4-byte indexes.
272 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index));
273 // add the bcp in case we need to patch this bytecode if we also find a
274 // invokespecial/InterfaceMethodref in the bytecode stream
275 _patch_invokedynamic_bcps->push(p);
276 _patch_invokedynamic_refs->push(resolved_index);
277 } else {
278 int cache_index = ConstantPool::decode_invokedynamic_index(
279 Bytes::get_native_u4(p));
280 // We will reverse the bytecode rewriting _after_ adjusting them.
281 // Adjust the cache index by offset to the invokedynamic entries in the
282 // cpCache plus the delta if the invokedynamic bytecodes were adjusted.
290 }
291
292 void Rewriter::patch_invokedynamic_bytecodes() {
293 // If the end of the cp_cache is the same as after initializing with the
294 // cpool, nothing needs to be done. Invokedynamic bytecodes are at the
295 // correct offsets. ie. no invokespecials added
296 int delta = cp_cache_delta();
297 if (delta > 0) {
298 int length = _patch_invokedynamic_bcps->length();
299 assert(length == _patch_invokedynamic_refs->length(),
300 "lengths should match");
301 for (int i = 0; i < length; i++) {
302 address p = _patch_invokedynamic_bcps->at(i);
303 int cache_index = ConstantPool::decode_invokedynamic_index(
304 Bytes::get_native_u4(p));
305 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index + delta));
306
307 // invokedynamic resolved references map also points to cp cache and must
308 // add delta to each.
309 int resolved_index = _patch_invokedynamic_refs->at(i);
310 assert(_appendix_references_map.at(resolved_index) == cache_index,
311 "should be the same index");
312 _appendix_references_map.at_put(resolved_index, cache_index + delta);
313 }
314 }
315 }
316
317
318 // Rewrite some ldc bytecodes to _fast_aldc
319 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
320 bool reverse) {
321 if (!reverse) {
322 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
323 address p = bcp + offset;
324 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
325 constantTag tag = _pool->tag_at(cp_index).value();
326
327 if (tag.is_method_handle() ||
328 tag.is_method_type() ||
329 tag.is_string() ||
330 (tag.is_dynamic_constant() &&
331 // keep regular ldc interpreter logic for condy primitives
332 is_reference_type(FieldType::basic_type(_pool->uncached_signature_ref_at(cp_index))))
564 patch_invokedynamic_bytecodes();
565 }
566
567 void Rewriter::rewrite(InstanceKlass* klass, TRAPS) {
568 if (!DumpSharedSpaces) {
569 assert(!klass->is_shared(), "archive methods must not be rewritten at run time");
570 }
571 ResourceMark rm(THREAD);
572 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK);
573 // (That's all, folks.)
574 }
575
576 Rewriter::Rewriter(InstanceKlass* klass, const constantPoolHandle& cpool, Array<Method*>* methods, TRAPS)
577 : _klass(klass),
578 _pool(cpool),
579 _methods(methods),
580 _cp_map(cpool->length()),
581 _cp_cache_map(cpool->length() / 2),
582 _reference_map(cpool->length()),
583 _resolved_references_map(cpool->length() / 2),
584 _appendix_references_map(cpool->length() / 2),
585 _method_handle_invokers(cpool->length()),
586 _invokedynamic_cp_cache_map(cpool->length() / 4)
587 {
588
589 // Rewrite bytecodes - exception here exits.
590 rewrite_bytecodes(CHECK);
591
592 // Stress restoring bytecodes
593 if (StressRewriter) {
594 restore_bytecodes();
595 rewrite_bytecodes(CHECK);
596 }
597
598 // allocate constant pool cache, now that we've seen all the bytecodes
599 make_constant_pool_cache(THREAD);
600
601 // Restore bytecodes to their unrewritten state if there are exceptions
602 // rewriting bytecodes or allocating the cpCache
603 if (HAS_PENDING_EXCEPTION) {
604 restore_bytecodes();
|