< prev index next >

src/cpu/aarch64/vm/macroAssembler_aarch64.cpp

Print this page
rev 12502 : 8172144: AArch64: Implement "JEP 270: Reserved Stack Areas for Critical Sections"
Reviewed-by: duke


 385 }
 386 
 387 void MacroAssembler::far_jump(Address entry, CodeBuffer *cbuf, Register tmp) {
 388   assert(ReservedCodeCacheSize < 4*G, "branch out of range");
 389   assert(CodeCache::find_blob(entry.target()) != NULL,
 390          "destination of far call not found in code cache");
 391   if (far_branches()) {
 392     unsigned long offset;
 393     // We can use ADRP here because we know that the total size of
 394     // the code cache cannot exceed 2Gb.
 395     adrp(tmp, entry, offset);
 396     add(tmp, tmp, offset);
 397     if (cbuf) cbuf->set_insts_mark();
 398     br(tmp);
 399   } else {
 400     if (cbuf) cbuf->set_insts_mark();
 401     b(entry);
 402   }
 403 }
 404 
























 405 int MacroAssembler::biased_locking_enter(Register lock_reg,
 406                                          Register obj_reg,
 407                                          Register swap_reg,
 408                                          Register tmp_reg,
 409                                          bool swap_reg_contains_mark,
 410                                          Label& done,
 411                                          Label* slow_case,
 412                                          BiasedLockingCounters* counters) {
 413   assert(UseBiasedLocking, "why call this otherwise?");
 414   assert_different_registers(lock_reg, obj_reg, swap_reg);
 415 
 416   if (PrintBiasedLockingStatistics && counters == NULL)
 417     counters = BiasedLocking::counters();
 418 
 419   assert_different_registers(lock_reg, obj_reg, swap_reg, tmp_reg, rscratch1, rscratch2, noreg);
 420   assert(markOopDesc::age_shift == markOopDesc::lock_bits + markOopDesc::biased_lock_bits, "biased locking makes assumptions about bit layout");
 421   Address mark_addr      (obj_reg, oopDesc::mark_offset_in_bytes());
 422   Address klass_addr     (obj_reg, oopDesc::klass_offset_in_bytes());
 423   Address saved_mark_addr(lock_reg, 0);
 424 




 385 }
 386 
 387 void MacroAssembler::far_jump(Address entry, CodeBuffer *cbuf, Register tmp) {
 388   assert(ReservedCodeCacheSize < 4*G, "branch out of range");
 389   assert(CodeCache::find_blob(entry.target()) != NULL,
 390          "destination of far call not found in code cache");
 391   if (far_branches()) {
 392     unsigned long offset;
 393     // We can use ADRP here because we know that the total size of
 394     // the code cache cannot exceed 2Gb.
 395     adrp(tmp, entry, offset);
 396     add(tmp, tmp, offset);
 397     if (cbuf) cbuf->set_insts_mark();
 398     br(tmp);
 399   } else {
 400     if (cbuf) cbuf->set_insts_mark();
 401     b(entry);
 402   }
 403 }
 404 
 405 void MacroAssembler::reserved_stack_check() {
 406     // testing if reserved zone needs to be enabled
 407     Label no_reserved_zone_enabling;
 408 
 409     ldr(rscratch1, Address(rthread, JavaThread::reserved_stack_activation_offset()));
 410     cmp(sp, rscratch1);
 411     br(Assembler::LO, no_reserved_zone_enabling);
 412 
 413     enter();   // LR and FP are live.
 414     lea(rscratch1, CAST_FROM_FN_PTR(address, SharedRuntime::enable_stack_reserved_zone));
 415     mov(c_rarg0, rthread);
 416     blr(rscratch1);
 417     leave();
 418 
 419     // We have already removed our own frame.
 420     // throw_delayed_StackOverflowError will think that it's been
 421     // called by our caller.
 422     lea(rscratch1, RuntimeAddress(StubRoutines::throw_delayed_StackOverflowError_entry()));
 423     br(rscratch1);
 424     should_not_reach_here();
 425 
 426     bind(no_reserved_zone_enabling);
 427 }
 428 
 429 int MacroAssembler::biased_locking_enter(Register lock_reg,
 430                                          Register obj_reg,
 431                                          Register swap_reg,
 432                                          Register tmp_reg,
 433                                          bool swap_reg_contains_mark,
 434                                          Label& done,
 435                                          Label* slow_case,
 436                                          BiasedLockingCounters* counters) {
 437   assert(UseBiasedLocking, "why call this otherwise?");
 438   assert_different_registers(lock_reg, obj_reg, swap_reg);
 439 
 440   if (PrintBiasedLockingStatistics && counters == NULL)
 441     counters = BiasedLocking::counters();
 442 
 443   assert_different_registers(lock_reg, obj_reg, swap_reg, tmp_reg, rscratch1, rscratch2, noreg);
 444   assert(markOopDesc::age_shift == markOopDesc::lock_bits + markOopDesc::biased_lock_bits, "biased locking makes assumptions about bit layout");
 445   Address mark_addr      (obj_reg, oopDesc::mark_offset_in_bytes());
 446   Address klass_addr     (obj_reg, oopDesc::klass_offset_in_bytes());
 447   Address saved_mark_addr(lock_reg, 0);
 448 


< prev index next >