< prev index next >

src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp

Print this page
rev 49852 : 8200556: AArch64: assertion failure in slowdebug builds
Reviewed-by: duke


 351 
 352     unsigned int insn = (0b000101 << 26) | ((disp >> 2) & 0x3ffffff);
 353     *(unsigned int*)verified_entry = insn;
 354   } else {
 355     // We use an illegal instruction for marking a method as
 356     // not_entrant or zombie.
 357     NativeIllegalInstruction::insert(verified_entry);
 358   }
 359 
 360   ICache::invalidate_range(verified_entry, instruction_size);
 361 }
 362 
 363 void NativeGeneralJump::verify() {  }
 364 
 365 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
 366   NativeGeneralJump* n_jump = (NativeGeneralJump*)code_pos;
 367 
 368   CodeBuffer cb(code_pos, instruction_size);
 369   MacroAssembler a(&cb);
 370 
 371   a.mov(rscratch1, entry);
 372   a.br(rscratch1);
 373 
 374   ICache::invalidate_range(code_pos, instruction_size);
 375 }
 376 
 377 // MT-safe patching of a long jump instruction.
 378 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
 379   ShouldNotCallThis();
 380 }
 381 
 382 address NativeCallTrampolineStub::destination(nmethod *nm) const {
 383   return ptr_at(data_offset);
 384 }
 385 
 386 void NativeCallTrampolineStub::set_destination(address new_destination) {
 387   set_ptr_at(data_offset, new_destination);
 388   OrderAccess::release();
 389 }
 390 
 391 // Generate a trampoline for a branch to dest.  If there's no need for a


 351 
 352     unsigned int insn = (0b000101 << 26) | ((disp >> 2) & 0x3ffffff);
 353     *(unsigned int*)verified_entry = insn;
 354   } else {
 355     // We use an illegal instruction for marking a method as
 356     // not_entrant or zombie.
 357     NativeIllegalInstruction::insert(verified_entry);
 358   }
 359 
 360   ICache::invalidate_range(verified_entry, instruction_size);
 361 }
 362 
 363 void NativeGeneralJump::verify() {  }
 364 
 365 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
 366   NativeGeneralJump* n_jump = (NativeGeneralJump*)code_pos;
 367 
 368   CodeBuffer cb(code_pos, instruction_size);
 369   MacroAssembler a(&cb);
 370 
 371   a.movptr(rscratch1, entry);
 372   a.br(rscratch1);
 373 
 374   ICache::invalidate_range(code_pos, instruction_size);
 375 }
 376 
 377 // MT-safe patching of a long jump instruction.
 378 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
 379   ShouldNotCallThis();
 380 }
 381 
 382 address NativeCallTrampolineStub::destination(nmethod *nm) const {
 383   return ptr_at(data_offset);
 384 }
 385 
 386 void NativeCallTrampolineStub::set_destination(address new_destination) {
 387   set_ptr_at(data_offset, new_destination);
 388   OrderAccess::release();
 389 }
 390 
 391 // Generate a trampoline for a branch to dest.  If there's no need for a
< prev index next >