< prev index next >

src/hotspot/cpu/arm/macroAssembler_arm.cpp

Print this page
rev 48562 : [mq]: heap23


1292   // Update heap_top if allocation succeeded
1293   cmp(obj_end, heap_end);
1294   b(slow_case, hi);
1295 
1296 #ifdef AARCH64
1297   stxr(heap_end/*scratched*/, obj_end, top_addr);
1298   cbnz_w(heap_end, retry);
1299 #else
1300   atomic_cas_bool(obj, obj_end, top_addr, 0, heap_end/*scratched*/);
1301   b(retry, ne);
1302 #endif // AARCH64
1303 }
1304 
1305 // Puts address of allocated object into register `obj` and end of allocated object into register `obj_end`.
1306 void MacroAssembler::tlab_allocate(Register obj, Register obj_end, Register tmp1,
1307                                  RegisterOrConstant size_expression, Label& slow_case) {
1308   const Register tlab_end = tmp1;
1309   assert_different_registers(obj, obj_end, tlab_end);
1310 
1311   ldr(obj, Address(Rthread, JavaThread::tlab_top_offset()));
1312   ldr(tlab_end, Address(Rthread, JavaThread::tlab_end_offset()));
1313   add_rc(obj_end, obj, size_expression);
1314   cmp(obj_end, tlab_end);
1315   b(slow_case, hi);
1316   str(obj_end, Address(Rthread, JavaThread::tlab_top_offset()));
1317 }
1318 
1319 void MacroAssembler::tlab_refill(Register top, Register tmp1, Register tmp2,
1320                                  Register tmp3, Register tmp4,
1321                                Label& try_eden, Label& slow_case) {
1322   if (!Universe::heap()->supports_inline_contig_alloc()) {
1323     b(slow_case);
1324     return;
1325   }
1326 
1327   InlinedAddress intArrayKlass_addr((address)Universe::intArrayKlassObj_addr());
1328   Label discard_tlab, do_refill;
1329   ldr(top,  Address(Rthread, JavaThread::tlab_top_offset()));
1330   ldr(tmp1, Address(Rthread, JavaThread::tlab_end_offset()));
1331   ldr(tmp2, Address(Rthread, JavaThread::tlab_refill_waste_limit_offset()));
1332 
1333   // Calculate amount of free space
1334   sub(tmp1, tmp1, top);
1335   // Retain tlab and allocate in shared space
1336   // if the amount of free space in tlab is too large to discard
1337   cmp(tmp2, AsmOperand(tmp1, lsr, LogHeapWordSize));
1338   b(discard_tlab, ge);
1339 
1340   // Increment waste limit to prevent getting stuck on this slow path
1341   mov_slow(tmp3, ThreadLocalAllocBuffer::refill_waste_limit_increment());
1342   add(tmp2, tmp2, tmp3);
1343   str(tmp2, Address(Rthread, JavaThread::tlab_refill_waste_limit_offset()));
1344   if (TLABStats) {
1345     ldr_u32(tmp2, Address(Rthread, JavaThread::tlab_slow_allocations_offset()));
1346     add_32(tmp2, tmp2, 1);
1347     str_32(tmp2, Address(Rthread, JavaThread::tlab_slow_allocations_offset()));
1348   }
1349   b(try_eden);
1350   bind_literal(intArrayKlass_addr);


1380   sub(tmp1, top, tmp1); // size of tlab's allocated portion
1381   incr_allocated_bytes(tmp1, tmp2);
1382 
1383   bind(do_refill);
1384   // Refill the tlab with an eden allocation
1385   ldr(tmp1, Address(Rthread, JavaThread::tlab_size_offset()));
1386   logical_shift_left(tmp4, tmp1, LogHeapWordSize);
1387   eden_allocate(top, tmp1, tmp2, tmp3, tmp4, slow_case);
1388   str(top, Address(Rthread, JavaThread::tlab_start_offset()));
1389   str(top, Address(Rthread, JavaThread::tlab_top_offset()));
1390 
1391 #ifdef ASSERT
1392   // Verify that tmp1 contains tlab_end
1393   ldr(tmp2, Address(Rthread, JavaThread::tlab_size_offset()));
1394   add(tmp2, top, AsmOperand(tmp2, lsl, LogHeapWordSize));
1395   cmp(tmp1, tmp2);
1396   breakpoint(ne);
1397 #endif
1398 
1399   sub(tmp1, tmp1, ThreadLocalAllocBuffer::alignment_reserve_in_bytes());
1400   str(tmp1, Address(Rthread, JavaThread::tlab_end_offset()));
1401 
1402   if (ZeroTLAB) {
1403     // clobbers start and tmp
1404     // top must be preserved!
1405     add(tmp1, tmp1, ThreadLocalAllocBuffer::alignment_reserve_in_bytes());
1406     ldr(tmp2, Address(Rthread, JavaThread::tlab_start_offset()));
1407     zero_memory(tmp2, tmp1, tmp3);
1408   }
1409 }
1410 
1411 // Fills memory regions [start..end] with zeroes. Clobbers `start` and `tmp` registers.
1412 void MacroAssembler::zero_memory(Register start, Register end, Register tmp) {
1413   Label loop;
1414   const Register ptr = start;
1415 
1416 #ifdef AARCH64
1417   // TODO-AARCH64 - compare performance of 2x word zeroing with simple 1x
1418   const Register size = tmp;
1419   Label remaining, done;
1420 




1292   // Update heap_top if allocation succeeded
1293   cmp(obj_end, heap_end);
1294   b(slow_case, hi);
1295 
1296 #ifdef AARCH64
1297   stxr(heap_end/*scratched*/, obj_end, top_addr);
1298   cbnz_w(heap_end, retry);
1299 #else
1300   atomic_cas_bool(obj, obj_end, top_addr, 0, heap_end/*scratched*/);
1301   b(retry, ne);
1302 #endif // AARCH64
1303 }
1304 
1305 // Puts address of allocated object into register `obj` and end of allocated object into register `obj_end`.
1306 void MacroAssembler::tlab_allocate(Register obj, Register obj_end, Register tmp1,
1307                                  RegisterOrConstant size_expression, Label& slow_case) {
1308   const Register tlab_end = tmp1;
1309   assert_different_registers(obj, obj_end, tlab_end);
1310 
1311   ldr(obj, Address(Rthread, JavaThread::tlab_top_offset()));
1312   ldr(tlab_end, Address(Rthread, JavaThread::tlab_current_end_offset()));
1313   add_rc(obj_end, obj, size_expression);
1314   cmp(obj_end, tlab_end);
1315   b(slow_case, hi);
1316   str(obj_end, Address(Rthread, JavaThread::tlab_top_offset()));
1317 }
1318 
1319 void MacroAssembler::tlab_refill(Register top, Register tmp1, Register tmp2,
1320                                  Register tmp3, Register tmp4,
1321                                Label& try_eden, Label& slow_case) {
1322   if (!Universe::heap()->supports_inline_contig_alloc()) {
1323     b(slow_case);
1324     return;
1325   }
1326 
1327   InlinedAddress intArrayKlass_addr((address)Universe::intArrayKlassObj_addr());
1328   Label discard_tlab, do_refill;
1329   ldr(top,  Address(Rthread, JavaThread::tlab_top_offset()));
1330   ldr(tmp1, Address(Rthread, JavaThread::tlab_current_end_offset()));
1331   ldr(tmp2, Address(Rthread, JavaThread::tlab_refill_waste_limit_offset()));
1332 
1333   // Calculate amount of free space
1334   sub(tmp1, tmp1, top);
1335   // Retain tlab and allocate in shared space
1336   // if the amount of free space in tlab is too large to discard
1337   cmp(tmp2, AsmOperand(tmp1, lsr, LogHeapWordSize));
1338   b(discard_tlab, ge);
1339 
1340   // Increment waste limit to prevent getting stuck on this slow path
1341   mov_slow(tmp3, ThreadLocalAllocBuffer::refill_waste_limit_increment());
1342   add(tmp2, tmp2, tmp3);
1343   str(tmp2, Address(Rthread, JavaThread::tlab_refill_waste_limit_offset()));
1344   if (TLABStats) {
1345     ldr_u32(tmp2, Address(Rthread, JavaThread::tlab_slow_allocations_offset()));
1346     add_32(tmp2, tmp2, 1);
1347     str_32(tmp2, Address(Rthread, JavaThread::tlab_slow_allocations_offset()));
1348   }
1349   b(try_eden);
1350   bind_literal(intArrayKlass_addr);


1380   sub(tmp1, top, tmp1); // size of tlab's allocated portion
1381   incr_allocated_bytes(tmp1, tmp2);
1382 
1383   bind(do_refill);
1384   // Refill the tlab with an eden allocation
1385   ldr(tmp1, Address(Rthread, JavaThread::tlab_size_offset()));
1386   logical_shift_left(tmp4, tmp1, LogHeapWordSize);
1387   eden_allocate(top, tmp1, tmp2, tmp3, tmp4, slow_case);
1388   str(top, Address(Rthread, JavaThread::tlab_start_offset()));
1389   str(top, Address(Rthread, JavaThread::tlab_top_offset()));
1390 
1391 #ifdef ASSERT
1392   // Verify that tmp1 contains tlab_end
1393   ldr(tmp2, Address(Rthread, JavaThread::tlab_size_offset()));
1394   add(tmp2, top, AsmOperand(tmp2, lsl, LogHeapWordSize));
1395   cmp(tmp1, tmp2);
1396   breakpoint(ne);
1397 #endif
1398 
1399   sub(tmp1, tmp1, ThreadLocalAllocBuffer::alignment_reserve_in_bytes());
1400   str(tmp1, Address(Rthread, JavaThread::tlab_current_end_offset()));
1401 
1402   if (ZeroTLAB) {
1403     // clobbers start and tmp
1404     // top must be preserved!
1405     add(tmp1, tmp1, ThreadLocalAllocBuffer::alignment_reserve_in_bytes());
1406     ldr(tmp2, Address(Rthread, JavaThread::tlab_start_offset()));
1407     zero_memory(tmp2, tmp1, tmp3);
1408   }
1409 }
1410 
1411 // Fills memory regions [start..end] with zeroes. Clobbers `start` and `tmp` registers.
1412 void MacroAssembler::zero_memory(Register start, Register end, Register tmp) {
1413   Label loop;
1414   const Register ptr = start;
1415 
1416 #ifdef AARCH64
1417   // TODO-AARCH64 - compare performance of 2x word zeroing with simple 1x
1418   const Register size = tmp;
1419   Label remaining, done;
1420 


< prev index next >