src/cpu/sparc/vm/c1_MacroAssembler_sparc.cpp

Print this page




  91   verify_oop(Roop);
  92 
  93   // save object being locked into the BasicObjectLock
  94   st_ptr(Roop, Rbox, BasicObjectLock::obj_offset_in_bytes());
  95 
  96   if (UseBiasedLocking) {
  97     biased_locking_enter(Roop, Rmark, Rscratch, done, &slow_case);
  98   }
  99 
 100   // Save Rbox in Rscratch to be used for the cas operation
 101   mov(Rbox, Rscratch);
 102 
 103   // and mark it unlocked
 104   or3(Rmark, markOopDesc::unlocked_value, Rmark);
 105 
 106   // save unlocked object header into the displaced header location on the stack
 107   st_ptr(Rmark, Rbox, BasicLock::displaced_header_offset_in_bytes());
 108 
 109   // compare object markOop with Rmark and if equal exchange Rscratch with object markOop
 110   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
 111   casx_under_lock(mark_addr.base(), Rmark, Rscratch, (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
 112   // if compare/exchange succeeded we found an unlocked object and we now have locked it
 113   // hence we are done
 114   cmp(Rmark, Rscratch);
 115   brx(Assembler::equal, false, Assembler::pt, done);
 116   delayed()->sub(Rscratch, SP, Rscratch);  //pull next instruction into delay slot
 117   // we did not find an unlocked object so see if this is a recursive case
 118   // sub(Rscratch, SP, Rscratch);
 119   assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
 120   andcc(Rscratch, 0xfffff003, Rscratch);
 121   brx(Assembler::notZero, false, Assembler::pn, slow_case);
 122   delayed()->st_ptr(Rscratch, Rbox, BasicLock::displaced_header_offset_in_bytes());
 123   bind(done);
 124 }
 125 
 126 
 127 void C1_MacroAssembler::unlock_object(Register Rmark, Register Roop, Register Rbox, Label& slow_case) {
 128   assert_different_registers(Rmark, Roop, Rbox);
 129 
 130   Label done;
 131 
 132   Address mark_addr(Roop, oopDesc::mark_offset_in_bytes());
 133   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
 134 
 135   if (UseBiasedLocking) {
 136     // load the object out of the BasicObjectLock
 137     ld_ptr(Rbox, BasicObjectLock::obj_offset_in_bytes(), Roop);
 138     verify_oop(Roop);
 139     biased_locking_exit(mark_addr, Rmark, done);
 140   }
 141   // Test first it it is a fast recursive unlock
 142   ld_ptr(Rbox, BasicLock::displaced_header_offset_in_bytes(), Rmark);
 143   br_null_short(Rmark, Assembler::pt, done);
 144   if (!UseBiasedLocking) {
 145     // load object
 146     ld_ptr(Rbox, BasicObjectLock::obj_offset_in_bytes(), Roop);
 147     verify_oop(Roop);
 148   }
 149 
 150   // Check if it is still a light weight lock, this is is true if we see
 151   // the stack address of the basicLock in the markOop of the object
 152   casx_under_lock(mark_addr.base(), Rbox, Rmark, (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
 153   cmp(Rbox, Rmark);
 154 
 155   brx(Assembler::notEqual, false, Assembler::pn, slow_case);
 156   delayed()->nop();
 157   // Done
 158   bind(done);
 159 }
 160 
 161 
 162 void C1_MacroAssembler::try_allocate(
 163   Register obj,                        // result: pointer to object after successful allocation
 164   Register var_size_in_bytes,          // object size in bytes if unknown at compile time; invalid otherwise
 165   int      con_size_in_bytes,          // object size in bytes if   known at compile time
 166   Register t1,                         // temp register, must be global register for incr_allocated_bytes
 167   Register t2,                         // temp register
 168   Label&   slow_case                   // continuation point if fast allocation fails
 169 ) {
 170   RegisterOrConstant size_in_bytes = var_size_in_bytes->is_valid()
 171     ? RegisterOrConstant(var_size_in_bytes) : RegisterOrConstant(con_size_in_bytes);
 172   if (UseTLAB) {


 259       cmp_and_brx_short(t1, var_size_in_bytes, Assembler::equal, Assembler::pt, ok);
 260     } else {
 261       cmp_and_brx_short(t1, con_size_in_bytes, Assembler::equal, Assembler::pt, ok);
 262     }
 263     stop("bad size in initialize_object");
 264     should_not_reach_here();
 265 
 266     bind(ok);
 267   }
 268 
 269 #endif
 270 
 271   // initialize body
 272   const int threshold = 5 * HeapWordSize;              // approximate break even point for code size
 273   if (var_size_in_bytes != noreg) {
 274     // use a loop
 275     add(obj, hdr_size_in_bytes, t1);               // compute address of first element
 276     sub(var_size_in_bytes, hdr_size_in_bytes, t2); // compute size of body
 277     initialize_body(t1, t2);
 278 #ifndef _LP64
 279   } else if (VM_Version::v9_instructions_work() && con_size_in_bytes < threshold * 2) {
 280     // on v9 we can do double word stores to fill twice as much space.
 281     assert(hdr_size_in_bytes % 8 == 0, "double word aligned");
 282     assert(con_size_in_bytes % 8 == 0, "double word aligned");
 283     for (int i = hdr_size_in_bytes; i < con_size_in_bytes; i += 2 * HeapWordSize) stx(G0, obj, i);
 284 #endif
 285   } else if (con_size_in_bytes <= threshold) {
 286     // use explicit NULL stores
 287     for (int i = hdr_size_in_bytes; i < con_size_in_bytes; i += HeapWordSize)     st_ptr(G0, obj, i);
 288   } else if (con_size_in_bytes > hdr_size_in_bytes) {
 289     // use a loop
 290     const Register base  = t1;
 291     const Register index = t2;
 292     add(obj, hdr_size_in_bytes, base);               // compute address of first element
 293     // compute index = number of words to clear
 294     set(con_size_in_bytes - hdr_size_in_bytes, index);
 295     initialize_body(base, index);
 296   }
 297 
 298   if (CURRENT_ENV->dtrace_alloc_probes()) {
 299     assert(obj == O0, "must be");




  91   verify_oop(Roop);
  92 
  93   // save object being locked into the BasicObjectLock
  94   st_ptr(Roop, Rbox, BasicObjectLock::obj_offset_in_bytes());
  95 
  96   if (UseBiasedLocking) {
  97     biased_locking_enter(Roop, Rmark, Rscratch, done, &slow_case);
  98   }
  99 
 100   // Save Rbox in Rscratch to be used for the cas operation
 101   mov(Rbox, Rscratch);
 102 
 103   // and mark it unlocked
 104   or3(Rmark, markOopDesc::unlocked_value, Rmark);
 105 
 106   // save unlocked object header into the displaced header location on the stack
 107   st_ptr(Rmark, Rbox, BasicLock::displaced_header_offset_in_bytes());
 108 
 109   // compare object markOop with Rmark and if equal exchange Rscratch with object markOop
 110   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
 111   cas_ptr(mark_addr.base(), Rmark, Rscratch);
 112   // if compare/exchange succeeded we found an unlocked object and we now have locked it
 113   // hence we are done
 114   cmp(Rmark, Rscratch);
 115   brx(Assembler::equal, false, Assembler::pt, done);
 116   delayed()->sub(Rscratch, SP, Rscratch);  //pull next instruction into delay slot
 117   // we did not find an unlocked object so see if this is a recursive case
 118   // sub(Rscratch, SP, Rscratch);
 119   assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
 120   andcc(Rscratch, 0xfffff003, Rscratch);
 121   brx(Assembler::notZero, false, Assembler::pn, slow_case);
 122   delayed()->st_ptr(Rscratch, Rbox, BasicLock::displaced_header_offset_in_bytes());
 123   bind(done);
 124 }
 125 
 126 
 127 void C1_MacroAssembler::unlock_object(Register Rmark, Register Roop, Register Rbox, Label& slow_case) {
 128   assert_different_registers(Rmark, Roop, Rbox);
 129 
 130   Label done;
 131 
 132   Address mark_addr(Roop, oopDesc::mark_offset_in_bytes());
 133   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
 134 
 135   if (UseBiasedLocking) {
 136     // load the object out of the BasicObjectLock
 137     ld_ptr(Rbox, BasicObjectLock::obj_offset_in_bytes(), Roop);
 138     verify_oop(Roop);
 139     biased_locking_exit(mark_addr, Rmark, done);
 140   }
 141   // Test first it it is a fast recursive unlock
 142   ld_ptr(Rbox, BasicLock::displaced_header_offset_in_bytes(), Rmark);
 143   br_null_short(Rmark, Assembler::pt, done);
 144   if (!UseBiasedLocking) {
 145     // load object
 146     ld_ptr(Rbox, BasicObjectLock::obj_offset_in_bytes(), Roop);
 147     verify_oop(Roop);
 148   }
 149 
 150   // Check if it is still a light weight lock, this is is true if we see
 151   // the stack address of the basicLock in the markOop of the object
 152   cas_ptr(mark_addr.base(), Rbox, Rmark);
 153   cmp(Rbox, Rmark);
 154 
 155   brx(Assembler::notEqual, false, Assembler::pn, slow_case);
 156   delayed()->nop();
 157   // Done
 158   bind(done);
 159 }
 160 
 161 
 162 void C1_MacroAssembler::try_allocate(
 163   Register obj,                        // result: pointer to object after successful allocation
 164   Register var_size_in_bytes,          // object size in bytes if unknown at compile time; invalid otherwise
 165   int      con_size_in_bytes,          // object size in bytes if   known at compile time
 166   Register t1,                         // temp register, must be global register for incr_allocated_bytes
 167   Register t2,                         // temp register
 168   Label&   slow_case                   // continuation point if fast allocation fails
 169 ) {
 170   RegisterOrConstant size_in_bytes = var_size_in_bytes->is_valid()
 171     ? RegisterOrConstant(var_size_in_bytes) : RegisterOrConstant(con_size_in_bytes);
 172   if (UseTLAB) {


 259       cmp_and_brx_short(t1, var_size_in_bytes, Assembler::equal, Assembler::pt, ok);
 260     } else {
 261       cmp_and_brx_short(t1, con_size_in_bytes, Assembler::equal, Assembler::pt, ok);
 262     }
 263     stop("bad size in initialize_object");
 264     should_not_reach_here();
 265 
 266     bind(ok);
 267   }
 268 
 269 #endif
 270 
 271   // initialize body
 272   const int threshold = 5 * HeapWordSize;              // approximate break even point for code size
 273   if (var_size_in_bytes != noreg) {
 274     // use a loop
 275     add(obj, hdr_size_in_bytes, t1);               // compute address of first element
 276     sub(var_size_in_bytes, hdr_size_in_bytes, t2); // compute size of body
 277     initialize_body(t1, t2);
 278 #ifndef _LP64
 279   } else if (con_size_in_bytes < threshold * 2) {
 280     // on v9 we can do double word stores to fill twice as much space.
 281     assert(hdr_size_in_bytes % 8 == 0, "double word aligned");
 282     assert(con_size_in_bytes % 8 == 0, "double word aligned");
 283     for (int i = hdr_size_in_bytes; i < con_size_in_bytes; i += 2 * HeapWordSize) stx(G0, obj, i);
 284 #endif
 285   } else if (con_size_in_bytes <= threshold) {
 286     // use explicit NULL stores
 287     for (int i = hdr_size_in_bytes; i < con_size_in_bytes; i += HeapWordSize)     st_ptr(G0, obj, i);
 288   } else if (con_size_in_bytes > hdr_size_in_bytes) {
 289     // use a loop
 290     const Register base  = t1;
 291     const Register index = t2;
 292     add(obj, hdr_size_in_bytes, base);               // compute address of first element
 293     // compute index = number of words to clear
 294     set(con_size_in_bytes - hdr_size_in_bytes, index);
 295     initialize_body(base, index);
 296   }
 297 
 298   if (CURRENT_ENV->dtrace_alloc_probes()) {
 299     assert(obj == O0, "must be");