< prev index next >

src/hotspot/cpu/x86/c1_MacroAssembler_x86.cpp

Print this page




  48 
  49   // save object being locked into the BasicObjectLock
  50   movptr(Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()), obj);
  51 
  52   if (UseBiasedLocking) {
  53     assert(scratch != noreg, "should have scratch register at this point");
  54     null_check_offset = biased_locking_enter(disp_hdr, obj, hdr, scratch, false, done, &slow_case);
  55   } else {
  56     null_check_offset = offset();
  57   }
  58 
  59   // Load object header
  60   movptr(hdr, Address(obj, hdr_offset));
  61   // and mark it as unlocked
  62   orptr(hdr, markOopDesc::unlocked_value);
  63   // save unlocked object header into the displaced header location on the stack
  64   movptr(Address(disp_hdr, 0), hdr);
  65   // test if object header is still the same (i.e. unlocked), and if so, store the
  66   // displaced header address in the object header - if it is not the same, get the
  67   // object header instead
  68   if (os::is_MP()) MacroAssembler::lock(); // must be immediately before cmpxchg!
  69   cmpxchgptr(disp_hdr, Address(obj, hdr_offset));
  70   // if the object header was the same, we're done
  71   if (PrintBiasedLockingStatistics) {
  72     cond_inc32(Assembler::equal,
  73                ExternalAddress((address)BiasedLocking::fast_path_entry_count_addr()));
  74   }
  75   jcc(Assembler::equal, done);
  76   // if the object header was not the same, it is now in the hdr register
  77   // => test if it is a stack pointer into the same stack (recursive locking), i.e.:
  78   //
  79   // 1) (hdr & aligned_mask) == 0
  80   // 2) rsp <= hdr
  81   // 3) hdr <= rsp + page_size
  82   //
  83   // these 3 tests can be done by evaluating the following expression:
  84   //
  85   // (hdr - rsp) & (aligned_mask - page_size)
  86   //
  87   // assuming both the stack pointer and page_size have their least
  88   // significant 2 bits cleared and page_size is a power of 2


 109   if (UseBiasedLocking) {
 110     // load object
 111     movptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 112     biased_locking_exit(obj, hdr, done);
 113   }
 114 
 115   // load displaced header
 116   movptr(hdr, Address(disp_hdr, 0));
 117   // if the loaded hdr is NULL we had recursive locking
 118   testptr(hdr, hdr);
 119   // if we had recursive locking, we are done
 120   jcc(Assembler::zero, done);
 121   if (!UseBiasedLocking) {
 122     // load object
 123     movptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 124   }
 125   verify_oop(obj);
 126   // test if object header is pointing to the displaced header, and if so, restore
 127   // the displaced header in the object - if the object header is not pointing to
 128   // the displaced header, get the object header instead
 129   if (os::is_MP()) MacroAssembler::lock(); // must be immediately before cmpxchg!
 130   cmpxchgptr(hdr, Address(obj, hdr_offset));
 131   // if the object header was not pointing to the displaced header,
 132   // we do unlocking via runtime call
 133   jcc(Assembler::notEqual, slow_case);
 134   // done
 135   bind(done);
 136 }
 137 
 138 
 139 // Defines obj, preserves var_size_in_bytes
 140 void C1_MacroAssembler::try_allocate(Register obj, Register var_size_in_bytes, int con_size_in_bytes, Register t1, Register t2, Label& slow_case) {
 141   if (UseTLAB) {
 142     tlab_allocate(noreg, obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case);
 143   } else {
 144     eden_allocate(noreg, obj, var_size_in_bytes, con_size_in_bytes, t1, slow_case);
 145   }
 146 }
 147 
 148 
 149 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1, Register t2) {




  48 
  49   // save object being locked into the BasicObjectLock
  50   movptr(Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()), obj);
  51 
  52   if (UseBiasedLocking) {
  53     assert(scratch != noreg, "should have scratch register at this point");
  54     null_check_offset = biased_locking_enter(disp_hdr, obj, hdr, scratch, false, done, &slow_case);
  55   } else {
  56     null_check_offset = offset();
  57   }
  58 
  59   // Load object header
  60   movptr(hdr, Address(obj, hdr_offset));
  61   // and mark it as unlocked
  62   orptr(hdr, markOopDesc::unlocked_value);
  63   // save unlocked object header into the displaced header location on the stack
  64   movptr(Address(disp_hdr, 0), hdr);
  65   // test if object header is still the same (i.e. unlocked), and if so, store the
  66   // displaced header address in the object header - if it is not the same, get the
  67   // object header instead
  68   MacroAssembler::lock(); // must be immediately before cmpxchg!
  69   cmpxchgptr(disp_hdr, Address(obj, hdr_offset));
  70   // if the object header was the same, we're done
  71   if (PrintBiasedLockingStatistics) {
  72     cond_inc32(Assembler::equal,
  73                ExternalAddress((address)BiasedLocking::fast_path_entry_count_addr()));
  74   }
  75   jcc(Assembler::equal, done);
  76   // if the object header was not the same, it is now in the hdr register
  77   // => test if it is a stack pointer into the same stack (recursive locking), i.e.:
  78   //
  79   // 1) (hdr & aligned_mask) == 0
  80   // 2) rsp <= hdr
  81   // 3) hdr <= rsp + page_size
  82   //
  83   // these 3 tests can be done by evaluating the following expression:
  84   //
  85   // (hdr - rsp) & (aligned_mask - page_size)
  86   //
  87   // assuming both the stack pointer and page_size have their least
  88   // significant 2 bits cleared and page_size is a power of 2


 109   if (UseBiasedLocking) {
 110     // load object
 111     movptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 112     biased_locking_exit(obj, hdr, done);
 113   }
 114 
 115   // load displaced header
 116   movptr(hdr, Address(disp_hdr, 0));
 117   // if the loaded hdr is NULL we had recursive locking
 118   testptr(hdr, hdr);
 119   // if we had recursive locking, we are done
 120   jcc(Assembler::zero, done);
 121   if (!UseBiasedLocking) {
 122     // load object
 123     movptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
 124   }
 125   verify_oop(obj);
 126   // test if object header is pointing to the displaced header, and if so, restore
 127   // the displaced header in the object - if the object header is not pointing to
 128   // the displaced header, get the object header instead
 129   MacroAssembler::lock(); // must be immediately before cmpxchg!
 130   cmpxchgptr(hdr, Address(obj, hdr_offset));
 131   // if the object header was not pointing to the displaced header,
 132   // we do unlocking via runtime call
 133   jcc(Assembler::notEqual, slow_case);
 134   // done
 135   bind(done);
 136 }
 137 
 138 
 139 // Defines obj, preserves var_size_in_bytes
 140 void C1_MacroAssembler::try_allocate(Register obj, Register var_size_in_bytes, int con_size_in_bytes, Register t1, Register t2, Label& slow_case) {
 141   if (UseTLAB) {
 142     tlab_allocate(noreg, obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case);
 143   } else {
 144     eden_allocate(noreg, obj, var_size_in_bytes, con_size_in_bytes, t1, slow_case);
 145   }
 146 }
 147 
 148 
 149 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1, Register t2) {


< prev index next >