src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp

Print this page
rev 5100 : 7199175: JSR 292: C1 needs patching when invokedynamic/invokehandle call site is not linked
Summary: Do patching rather bailing out for unlinked call with appendix
Reviewed-by: twisti, kvn


 290 
 291   Label call_patch;
 292 
 293   int being_initialized_entry = __ offset();
 294 
 295   if (_id == load_klass_id) {
 296     // produce a copy of the load klass instruction for use by the being initialized case
 297 #ifdef ASSERT
 298     address start = __ pc();
 299 #endif
 300     AddressLiteral addrlit(NULL, metadata_Relocation::spec(_index));
 301     __ patchable_set(addrlit, _obj);
 302 
 303 #ifdef ASSERT
 304     for (int i = 0; i < _bytes_to_copy; i++) {
 305       address ptr = (address)(_pc_start + i);
 306       int a_byte = (*ptr) & 0xFF;
 307       assert(a_byte == *start++, "should be the same code");
 308     }
 309 #endif
 310   } else if (_id == load_mirror_id) {
 311     // produce a copy of the load mirror instruction for use by the being initialized case
 312 #ifdef ASSERT
 313     address start = __ pc();
 314 #endif
 315     AddressLiteral addrlit(NULL, oop_Relocation::spec(_index));
 316     __ patchable_set(addrlit, _obj);
 317 
 318 #ifdef ASSERT
 319     for (int i = 0; i < _bytes_to_copy; i++) {
 320       address ptr = (address)(_pc_start + i);
 321       int a_byte = (*ptr) & 0xFF;
 322       assert(a_byte == *start++, "should be the same code");
 323     }
 324 #endif
 325   } else {
 326     // make a copy the code which is going to be patched.
 327     for (int i = 0; i < _bytes_to_copy; i++) {
 328       address ptr = (address)(_pc_start + i);
 329       int a_byte = (*ptr) & 0xFF;
 330       __ emit_int8 (a_byte);


 367 
 368   // emit the offsets needed to find the code to patch
 369   int being_initialized_entry_offset = __ offset() - being_initialized_entry + sizeof_patch_record;
 370 
 371   // Emit the patch record.  We need to emit a full word, so emit an extra empty byte
 372   __ emit_int8(0);
 373   __ emit_int8(being_initialized_entry_offset);
 374   __ emit_int8(bytes_to_skip);
 375   __ emit_int8(_bytes_to_copy);
 376   address patch_info_pc = __ pc();
 377   assert(patch_info_pc - end_of_patch == bytes_to_skip, "incorrect patch info");
 378 
 379   address entry = __ pc();
 380   NativeGeneralJump::insert_unconditional((address)_pc_start, entry);
 381   address target = NULL;
 382   relocInfo::relocType reloc_type = relocInfo::none;
 383   switch (_id) {
 384     case access_field_id:  target = Runtime1::entry_for(Runtime1::access_field_patching_id); break;
 385     case load_klass_id:    target = Runtime1::entry_for(Runtime1::load_klass_patching_id); reloc_type = relocInfo::metadata_type; break;
 386     case load_mirror_id:   target = Runtime1::entry_for(Runtime1::load_mirror_patching_id); reloc_type = relocInfo::oop_type; break;

 387     default: ShouldNotReachHere();
 388   }
 389   __ bind(call_patch);
 390 
 391   if (CommentedAssembly) {
 392     __ block_comment("patch entry point");
 393   }
 394   __ call(target, relocInfo::runtime_call_type);
 395   __ delayed()->nop();
 396   assert(_patch_info_offset == (patch_info_pc - __ pc()), "must not change");
 397   ce->add_call_info_here(_info);
 398   __ br(Assembler::always, false, Assembler::pt, _patch_site_entry);
 399   __ delayed()->nop();
 400   if (_id == load_klass_id || _id == load_mirror_id) {
 401     CodeSection* cs = __ code_section();
 402     address pc = (address)_pc_start;
 403     RelocIterator iter(cs, pc, pc + 1);
 404     relocInfo::change_reloc_info_for_address(&iter, (address) pc, reloc_type, relocInfo::none);
 405 
 406     pc = (address)(_pc_start + NativeMovConstReg::add_offset);
 407     RelocIterator iter2(cs, pc, pc+1);
 408     relocInfo::change_reloc_info_for_address(&iter2, (address) pc, reloc_type, relocInfo::none);
 409   }
 410 
 411 }
 412 
 413 
 414 void DeoptimizeStub::emit_code(LIR_Assembler* ce) {
 415   __ bind(_entry);
 416   __ call(Runtime1::entry_for(Runtime1::deoptimize_id), relocInfo::runtime_call_type);
 417   __ delayed()->nop();
 418   ce->add_call_info_here(_info);
 419   DEBUG_ONLY(__ should_not_reach_here());
 420 }




 290 
 291   Label call_patch;
 292 
 293   int being_initialized_entry = __ offset();
 294 
 295   if (_id == load_klass_id) {
 296     // produce a copy of the load klass instruction for use by the being initialized case
 297 #ifdef ASSERT
 298     address start = __ pc();
 299 #endif
 300     AddressLiteral addrlit(NULL, metadata_Relocation::spec(_index));
 301     __ patchable_set(addrlit, _obj);
 302 
 303 #ifdef ASSERT
 304     for (int i = 0; i < _bytes_to_copy; i++) {
 305       address ptr = (address)(_pc_start + i);
 306       int a_byte = (*ptr) & 0xFF;
 307       assert(a_byte == *start++, "should be the same code");
 308     }
 309 #endif
 310   } else if (_id == load_mirror_id || _id == load_appendix_id) {
 311     // produce a copy of the load mirror instruction for use by the being initialized case
 312 #ifdef ASSERT
 313     address start = __ pc();
 314 #endif
 315     AddressLiteral addrlit(NULL, oop_Relocation::spec(_index));
 316     __ patchable_set(addrlit, _obj);
 317 
 318 #ifdef ASSERT
 319     for (int i = 0; i < _bytes_to_copy; i++) {
 320       address ptr = (address)(_pc_start + i);
 321       int a_byte = (*ptr) & 0xFF;
 322       assert(a_byte == *start++, "should be the same code");
 323     }
 324 #endif
 325   } else {
 326     // make a copy the code which is going to be patched.
 327     for (int i = 0; i < _bytes_to_copy; i++) {
 328       address ptr = (address)(_pc_start + i);
 329       int a_byte = (*ptr) & 0xFF;
 330       __ emit_int8 (a_byte);


 367 
 368   // emit the offsets needed to find the code to patch
 369   int being_initialized_entry_offset = __ offset() - being_initialized_entry + sizeof_patch_record;
 370 
 371   // Emit the patch record.  We need to emit a full word, so emit an extra empty byte
 372   __ emit_int8(0);
 373   __ emit_int8(being_initialized_entry_offset);
 374   __ emit_int8(bytes_to_skip);
 375   __ emit_int8(_bytes_to_copy);
 376   address patch_info_pc = __ pc();
 377   assert(patch_info_pc - end_of_patch == bytes_to_skip, "incorrect patch info");
 378 
 379   address entry = __ pc();
 380   NativeGeneralJump::insert_unconditional((address)_pc_start, entry);
 381   address target = NULL;
 382   relocInfo::relocType reloc_type = relocInfo::none;
 383   switch (_id) {
 384     case access_field_id:  target = Runtime1::entry_for(Runtime1::access_field_patching_id); break;
 385     case load_klass_id:    target = Runtime1::entry_for(Runtime1::load_klass_patching_id); reloc_type = relocInfo::metadata_type; break;
 386     case load_mirror_id:   target = Runtime1::entry_for(Runtime1::load_mirror_patching_id); reloc_type = relocInfo::oop_type; break;
 387     case load_appendix_id: target = Runtime1::entry_for(Runtime1::load_appendix_patching_id); reloc_type = relocInfo::oop_type; break;
 388     default: ShouldNotReachHere();
 389   }
 390   __ bind(call_patch);
 391 
 392   if (CommentedAssembly) {
 393     __ block_comment("patch entry point");
 394   }
 395   __ call(target, relocInfo::runtime_call_type);
 396   __ delayed()->nop();
 397   assert(_patch_info_offset == (patch_info_pc - __ pc()), "must not change");
 398   ce->add_call_info_here(_info);
 399   __ br(Assembler::always, false, Assembler::pt, _patch_site_entry);
 400   __ delayed()->nop();
 401   if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) {
 402     CodeSection* cs = __ code_section();
 403     address pc = (address)_pc_start;
 404     RelocIterator iter(cs, pc, pc + 1);
 405     relocInfo::change_reloc_info_for_address(&iter, (address) pc, reloc_type, relocInfo::none);
 406 
 407     pc = (address)(_pc_start + NativeMovConstReg::add_offset);
 408     RelocIterator iter2(cs, pc, pc+1);
 409     relocInfo::change_reloc_info_for_address(&iter2, (address) pc, reloc_type, relocInfo::none);
 410   }
 411 
 412 }
 413 
 414 
 415 void DeoptimizeStub::emit_code(LIR_Assembler* ce) {
 416   __ bind(_entry);
 417   __ call(Runtime1::entry_for(Runtime1::deoptimize_id), relocInfo::runtime_call_type);
 418   __ delayed()->nop();
 419   ce->add_call_info_here(_info);
 420   DEBUG_ONLY(__ should_not_reach_here());
 421 }


src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File