< prev index next >

src/cpu/aarch64/vm/c1_MacroAssembler_aarch64.cpp

Print this page
rev 10524 : [backport] Some trivial-ish cleanups


 127   const int hdr_offset = oopDesc::mark_offset_in_bytes();
 128   assert(hdr != obj && hdr != disp_hdr && obj != disp_hdr, "registers must be different");
 129   Label done;
 130 
 131   if (UseBiasedLocking) {
 132     // load object
 133     ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 134     biased_locking_exit(obj, hdr, done);
 135   }
 136 
 137   // load displaced header
 138   ldr(hdr, Address(disp_hdr, 0));
 139   // if the loaded hdr is NULL we had recursive locking
 140   // if we had recursive locking, we are done
 141   cbz(hdr, done);
 142   if (!UseBiasedLocking) {
 143     // load object
 144     ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 145   }
 146   verify_oop(obj);
 147 
 148   // test if object header is pointing to the displaced header, and if so, restore
 149   // the displaced header in the object - if the object header is not pointing to
 150   // the displaced header, get the object header instead
 151   // if the object header was not pointing to the displaced header,
 152   // we do unlocking via runtime call
 153   if (hdr_offset) {
 154     lea(rscratch1, Address(obj, hdr_offset));
 155     cmpxchgptr(disp_hdr, hdr, rscratch1, rscratch2, done, &slow_case);
 156   } else {
 157     cmpxchgptr(disp_hdr, hdr, obj, rscratch2, done, &slow_case);
 158   }
 159   // done
 160   bind(done);
 161 }
 162 
 163 
 164 // Defines obj, preserves var_size_in_bytes
 165 void C1_MacroAssembler::try_allocate(Register obj, Register var_size_in_bytes, int con_size_in_bytes, Register t1, Register t2, Label& slow_case) {
 166   if (UseTLAB) {
 167     tlab_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case);




 127   const int hdr_offset = oopDesc::mark_offset_in_bytes();
 128   assert(hdr != obj && hdr != disp_hdr && obj != disp_hdr, "registers must be different");
 129   Label done;
 130 
 131   if (UseBiasedLocking) {
 132     // load object
 133     ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 134     biased_locking_exit(obj, hdr, done);
 135   }
 136 
 137   // load displaced header
 138   ldr(hdr, Address(disp_hdr, 0));
 139   // if the loaded hdr is NULL we had recursive locking
 140   // if we had recursive locking, we are done
 141   cbz(hdr, done);
 142   if (!UseBiasedLocking) {
 143     // load object
 144     ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 145   }
 146   verify_oop(obj);

 147   // test if object header is pointing to the displaced header, and if so, restore
 148   // the displaced header in the object - if the object header is not pointing to
 149   // the displaced header, get the object header instead
 150   // if the object header was not pointing to the displaced header,
 151   // we do unlocking via runtime call
 152   if (hdr_offset) {
 153     lea(rscratch1, Address(obj, hdr_offset));
 154     cmpxchgptr(disp_hdr, hdr, rscratch1, rscratch2, done, &slow_case);
 155   } else {
 156     cmpxchgptr(disp_hdr, hdr, obj, rscratch2, done, &slow_case);
 157   }
 158   // done
 159   bind(done);
 160 }
 161 
 162 
 163 // Defines obj, preserves var_size_in_bytes
 164 void C1_MacroAssembler::try_allocate(Register obj, Register var_size_in_bytes, int con_size_in_bytes, Register t1, Register t2, Label& slow_case) {
 165   if (UseTLAB) {
 166     tlab_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case);


< prev index next >