src/cpu/sparc/vm/interp_masm_sparc.cpp

Print this page




1193 
1194     assert_different_registers(lock_reg, obj_reg, mark_reg, temp_reg);
1195 
1196     // load markOop from object into mark_reg
1197     ld_ptr(mark_addr, mark_reg);
1198 
1199     if (UseBiasedLocking) {
1200       biased_locking_enter(obj_reg, mark_reg, temp_reg, done, &slow_case);
1201     }
1202 
1203     // get the address of basicLock on stack that will be stored in the object
1204     // we need a temporary register here as we do not want to clobber lock_reg
1205     // (cas clobbers the destination register)
1206     mov(lock_reg, temp_reg);
1207     // set mark reg to be (markOop of object | UNLOCK_VALUE)
1208     or3(mark_reg, markOopDesc::unlocked_value, mark_reg);
1209     // initialize the box  (Must happen before we update the object mark!)
1210     st_ptr(mark_reg, lock_addr, BasicLock::displaced_header_offset_in_bytes());
1211     // compare and exchange object_addr, markOop | 1, stack address of basicLock
1212     assert(mark_addr.disp() == 0, "cas must take a zero displacement");
1213     casx_under_lock(mark_addr.base(), mark_reg, temp_reg,
1214       (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
1215 
1216     // if the compare and exchange succeeded we are done (we saw an unlocked object)
1217     cmp_and_brx_short(mark_reg, temp_reg, Assembler::equal, Assembler::pt, done);
1218 
1219     // We did not see an unlocked object so try the fast recursive case
1220 
1221     // Check if owner is self by comparing the value in the markOop of object
1222     // with the stack pointer
1223     sub(temp_reg, SP, temp_reg);
1224 #ifdef _LP64
1225     sub(temp_reg, STACK_BIAS, temp_reg);
1226 #endif
1227     assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
1228 
1229     // Composite "andcc" test:
1230     // (a) %sp -vs- markword proximity check, and,
1231     // (b) verify mark word LSBs == 0 (Stack-locked).
1232     //
1233     // FFFFF003/FFFFFFFFFFFF003 is (markOopDesc::lock_mask_in_place | -os::vm_page_size())
1234     // Note that the page size used for %sp proximity testing is arbitrary and is


1274     }
1275 
1276     // Test first if we are in the fast recursive case
1277     Address lock_addr(lock_reg, BasicObjectLock::lock_offset_in_bytes() + BasicLock::displaced_header_offset_in_bytes());
1278     ld_ptr(lock_addr, displaced_header_reg);
1279     br_null(displaced_header_reg, true, Assembler::pn, done);
1280     delayed()->st_ptr(G0, lockobj_addr);  // free entry
1281 
1282     // See if it is still a light weight lock, if so we just unlock
1283     // the object and we are done
1284 
1285     if (!UseBiasedLocking) {
1286       // load the object out of the BasicObjectLock
1287       ld_ptr(lockobj_addr, obj_reg);
1288     }
1289 
1290     // we have the displaced header in displaced_header_reg
1291     // we expect to see the stack address of the basicLock in case the
1292     // lock is still a light weight lock (lock_reg)
1293     assert(mark_addr.disp() == 0, "cas must take a zero displacement");
1294     casx_under_lock(mark_addr.base(), lock_reg, displaced_header_reg,
1295       (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
1296     cmp(lock_reg, displaced_header_reg);
1297     brx(Assembler::equal, true, Assembler::pn, done);
1298     delayed()->st_ptr(G0, lockobj_addr);  // free entry
1299 
1300     // The lock has been converted into a heavy lock and hence
1301     // we need to get into the slow case
1302 
1303     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), lock_reg);
1304 
1305     bind(done);
1306   }
1307 }
1308 
1309 #ifndef CC_INTERP
1310 
1311 // Get the method data pointer from the Method* and set the
1312 // specified register to its value.
1313 
1314 void InterpreterMacroAssembler::set_method_data_pointer() {
1315   assert(ProfileInterpreter, "must be profiling interpreter");




1193 
1194     assert_different_registers(lock_reg, obj_reg, mark_reg, temp_reg);
1195 
1196     // load markOop from object into mark_reg
1197     ld_ptr(mark_addr, mark_reg);
1198 
1199     if (UseBiasedLocking) {
1200       biased_locking_enter(obj_reg, mark_reg, temp_reg, done, &slow_case);
1201     }
1202 
1203     // get the address of basicLock on stack that will be stored in the object
1204     // we need a temporary register here as we do not want to clobber lock_reg
1205     // (cas clobbers the destination register)
1206     mov(lock_reg, temp_reg);
1207     // set mark reg to be (markOop of object | UNLOCK_VALUE)
1208     or3(mark_reg, markOopDesc::unlocked_value, mark_reg);
1209     // initialize the box  (Must happen before we update the object mark!)
1210     st_ptr(mark_reg, lock_addr, BasicLock::displaced_header_offset_in_bytes());
1211     // compare and exchange object_addr, markOop | 1, stack address of basicLock
1212     assert(mark_addr.disp() == 0, "cas must take a zero displacement");
1213     cas_ptr(mark_addr.base(), mark_reg, temp_reg);

1214 
1215     // if the compare and exchange succeeded we are done (we saw an unlocked object)
1216     cmp_and_brx_short(mark_reg, temp_reg, Assembler::equal, Assembler::pt, done);
1217 
1218     // We did not see an unlocked object so try the fast recursive case
1219 
1220     // Check if owner is self by comparing the value in the markOop of object
1221     // with the stack pointer
1222     sub(temp_reg, SP, temp_reg);
1223 #ifdef _LP64
1224     sub(temp_reg, STACK_BIAS, temp_reg);
1225 #endif
1226     assert(os::vm_page_size() > 0xfff, "page size too small - change the constant");
1227 
1228     // Composite "andcc" test:
1229     // (a) %sp -vs- markword proximity check, and,
1230     // (b) verify mark word LSBs == 0 (Stack-locked).
1231     //
1232     // FFFFF003/FFFFFFFFFFFF003 is (markOopDesc::lock_mask_in_place | -os::vm_page_size())
1233     // Note that the page size used for %sp proximity testing is arbitrary and is


1273     }
1274 
1275     // Test first if we are in the fast recursive case
1276     Address lock_addr(lock_reg, BasicObjectLock::lock_offset_in_bytes() + BasicLock::displaced_header_offset_in_bytes());
1277     ld_ptr(lock_addr, displaced_header_reg);
1278     br_null(displaced_header_reg, true, Assembler::pn, done);
1279     delayed()->st_ptr(G0, lockobj_addr);  // free entry
1280 
1281     // See if it is still a light weight lock, if so we just unlock
1282     // the object and we are done
1283 
1284     if (!UseBiasedLocking) {
1285       // load the object out of the BasicObjectLock
1286       ld_ptr(lockobj_addr, obj_reg);
1287     }
1288 
1289     // we have the displaced header in displaced_header_reg
1290     // we expect to see the stack address of the basicLock in case the
1291     // lock is still a light weight lock (lock_reg)
1292     assert(mark_addr.disp() == 0, "cas must take a zero displacement");
1293     cas_ptr(mark_addr.base(), lock_reg, displaced_header_reg);

1294     cmp(lock_reg, displaced_header_reg);
1295     brx(Assembler::equal, true, Assembler::pn, done);
1296     delayed()->st_ptr(G0, lockobj_addr);  // free entry
1297 
1298     // The lock has been converted into a heavy lock and hence
1299     // we need to get into the slow case
1300 
1301     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorexit), lock_reg);
1302 
1303     bind(done);
1304   }
1305 }
1306 
1307 #ifndef CC_INTERP
1308 
1309 // Get the method data pointer from the Method* and set the
1310 // specified register to its value.
1311 
1312 void InterpreterMacroAssembler::set_method_data_pointer() {
1313   assert(ProfileInterpreter, "must be profiling interpreter");