src/cpu/x86/vm/c1_CodeStubs_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-comp-code-aging Sdiff src/cpu/x86/vm

src/cpu/x86/vm/c1_CodeStubs_x86.cpp

Print this page




 413   __ call(RuntimeAddress(target));
 414   assert(_patch_info_offset == (patch_info_pc - __ pc()), "must not change");
 415   ce->add_call_info_here(_info);
 416   int jmp_off = __ offset();
 417   __ jmp(_patch_site_entry);
 418   // Add enough nops so deoptimization can overwrite the jmp above with a call
 419   // and not destroy the world.
 420   for (int j = __ offset() ; j < jmp_off + 5 ; j++ ) {
 421     __ nop();
 422   }
 423   if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) {
 424     CodeSection* cs = __ code_section();
 425     RelocIterator iter(cs, (address)_pc_start, (address)(_pc_start + 1));
 426     relocInfo::change_reloc_info_for_address(&iter, (address) _pc_start, reloc_type, relocInfo::none);
 427   }
 428 }
 429 
 430 
 431 void DeoptimizeStub::emit_code(LIR_Assembler* ce) {
 432   __ bind(_entry);

 433   __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::deoptimize_id)));
 434   ce->add_call_info_here(_info);
 435   DEBUG_ONLY(__ should_not_reach_here());
 436 }
 437 
 438 
 439 void ImplicitNullCheckStub::emit_code(LIR_Assembler* ce) {
 440   address a;
 441   if (_info->deoptimize_on_exception()) {
 442     // Deoptimize, do not throw the exception, because it is probably wrong to do it here.
 443     a = Runtime1::entry_for(Runtime1::predicate_failed_trap_id);
 444   } else {
 445     a = Runtime1::entry_for(Runtime1::throw_null_pointer_exception_id);
 446   }
 447 
 448   ce->compilation()->implicit_exception_table()->append(_offset, __ offset());
 449   __ bind(_entry);
 450   __ call(RuntimeAddress(a));
 451   ce->add_call_info_here(_info);
 452   ce->verify_oop_map(_info);




 413   __ call(RuntimeAddress(target));
 414   assert(_patch_info_offset == (patch_info_pc - __ pc()), "must not change");
 415   ce->add_call_info_here(_info);
 416   int jmp_off = __ offset();
 417   __ jmp(_patch_site_entry);
 418   // Add enough nops so deoptimization can overwrite the jmp above with a call
 419   // and not destroy the world.
 420   for (int j = __ offset() ; j < jmp_off + 5 ; j++ ) {
 421     __ nop();
 422   }
 423   if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) {
 424     CodeSection* cs = __ code_section();
 425     RelocIterator iter(cs, (address)_pc_start, (address)(_pc_start + 1));
 426     relocInfo::change_reloc_info_for_address(&iter, (address) _pc_start, reloc_type, relocInfo::none);
 427   }
 428 }
 429 
 430 
 431 void DeoptimizeStub::emit_code(LIR_Assembler* ce) {
 432   __ bind(_entry);
 433   ce->store_parameter(_trap_request, 0);
 434   __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::deoptimize_id)));
 435   ce->add_call_info_here(_info);
 436   DEBUG_ONLY(__ should_not_reach_here());
 437 }
 438 
 439 
 440 void ImplicitNullCheckStub::emit_code(LIR_Assembler* ce) {
 441   address a;
 442   if (_info->deoptimize_on_exception()) {
 443     // Deoptimize, do not throw the exception, because it is probably wrong to do it here.
 444     a = Runtime1::entry_for(Runtime1::predicate_failed_trap_id);
 445   } else {
 446     a = Runtime1::entry_for(Runtime1::throw_null_pointer_exception_id);
 447   }
 448 
 449   ce->compilation()->implicit_exception_table()->append(_offset, __ offset());
 450   __ bind(_entry);
 451   __ call(RuntimeAddress(a));
 452   ce->add_call_info_here(_info);
 453   ce->verify_oop_map(_info);


src/cpu/x86/vm/c1_CodeStubs_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File