< prev index next >

src/hotspot/cpu/arm/templateTable_arm.cpp

Print this page




4485   __ tbnz(Rsize, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
4486 
4487   // Allocate the instance:
4488   //  If TLAB is enabled:
4489   //    Try to allocate in the TLAB.
4490   //    If fails, go to the slow path.
4491   //  Else If inline contiguous allocations are enabled:
4492   //    Try to allocate in eden.
4493   //    If fails due to heap end, go to slow path.
4494   //
4495   //  If TLAB is enabled OR inline contiguous is enabled:
4496   //    Initialize the allocation.
4497   //    Exit.
4498   //
4499   //  Go to slow path.
4500   if (UseTLAB) {
4501     const Register Rtlab_top = R1_tmp;
4502     const Register Rtlab_end = R2_tmp;
4503     assert_different_registers(Robj, Rsize, Rklass, Rtlab_top, Rtlab_end);
4504 
4505     __ ldr(Robj, Address(Rthread, JavaThread::tlab_top_offset()));
4506     __ ldr(Rtlab_end, Address(Rthread, in_bytes(JavaThread::tlab_end_offset())));
4507     __ add(Rtlab_top, Robj, Rsize);
4508     __ cmp(Rtlab_top, Rtlab_end);
4509     __ b(slow_case, hi);
4510     __ str(Rtlab_top, Address(Rthread, JavaThread::tlab_top_offset()));
4511     if (ZeroTLAB) {
4512       // the fields have been already cleared
4513       __ b(initialize_header);
4514     } else {
4515       // initialize both the header and fields
4516       __ b(initialize_object);
4517     }
4518   } else {
4519     // Allocation in the shared Eden, if allowed.
4520     if (allow_shared_alloc) {
4521       const Register Rheap_top_addr = R2_tmp;
4522       const Register Rheap_top = R5_tmp;
4523       const Register Rheap_end = Rtemp;
4524       assert_different_registers(Robj, Rklass, Rsize, Rheap_top_addr, Rheap_top, Rheap_end, LR);
4525 
4526       // heap_end now (re)loaded in the loop since also used as a scratch register in the CAS
4527       __ ldr_literal(Rheap_top_addr, Lheap_top_addr);
4528 
4529       Label retry;
4530       __ bind(retry);
4531 
4532 #ifdef AARCH64
4533       __ ldxr(Robj, Rheap_top_addr);
4534 #else
4535       __ ldr(Robj, Address(Rheap_top_addr));
4536 #endif // AARCH64
4537 
4538       __ ldr(Rheap_end, Address(Rheap_top_addr, (intptr_t)Universe::heap()->end_addr()-(intptr_t)Universe::heap()->top_addr()));
4539       __ add(Rheap_top, Robj, Rsize);
4540       __ cmp(Rheap_top, Rheap_end);
4541       __ b(slow_case, hi);
4542 
4543       // Update heap top atomically.
4544       // If someone beats us on the allocation, try again, otherwise continue.
4545 #ifdef AARCH64
4546       __ stxr(Rtemp2, Rheap_top, Rheap_top_addr);
4547       __ cbnz_w(Rtemp2, retry);
4548 #else
4549       __ atomic_cas_bool(Robj, Rheap_top, Rheap_top_addr, 0, Rheap_end/*scratched*/);
4550       __ b(retry, ne);
4551 #endif // AARCH64
4552 
4553       __ incr_allocated_bytes(Rsize, Rtemp);
4554     }
4555   }
4556 
4557   if (UseTLAB || allow_shared_alloc) {
4558     const Register Rzero0 = R1_tmp;
4559     const Register Rzero1 = R2_tmp;
4560     const Register Rzero_end = R5_tmp;
4561     const Register Rzero_cur = Rtemp;
4562     assert_different_registers(Robj, Rsize, Rklass, Rzero0, Rzero1, Rzero_cur, Rzero_end);
4563 
4564     // The object is initialized before the header.  If the object size is
4565     // zero, go directly to the header initialization.
4566     __ bind(initialize_object);
4567     __ subs(Rsize, Rsize, sizeof(oopDesc));
4568     __ add(Rzero_cur, Robj, sizeof(oopDesc));
4569     __ b(initialize_header, eq);
4570 
4571 #ifdef ASSERT
4572     // make sure Rsize is a multiple of 8
4573     Label L;




4485   __ tbnz(Rsize, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
4486 
4487   // Allocate the instance:
4488   //  If TLAB is enabled:
4489   //    Try to allocate in the TLAB.
4490   //    If fails, go to the slow path.
4491   //  Else If inline contiguous allocations are enabled:
4492   //    Try to allocate in eden.
4493   //    If fails due to heap end, go to slow path.
4494   //
4495   //  If TLAB is enabled OR inline contiguous is enabled:
4496   //    Initialize the allocation.
4497   //    Exit.
4498   //
4499   //  Go to slow path.
4500   if (UseTLAB) {
4501     const Register Rtlab_top = R1_tmp;
4502     const Register Rtlab_end = R2_tmp;
4503     assert_different_registers(Robj, Rsize, Rklass, Rtlab_top, Rtlab_end);
4504 
4505     __ tlab_allocate(Robj, Rtlab_top, Rtlab_end, Rsize, slow_case);





4506     if (ZeroTLAB) {
4507       // the fields have been already cleared
4508       __ b(initialize_header);
4509     } else {
4510       // initialize both the header and fields
4511       __ b(initialize_object);
4512     }
4513   } else {
4514     // Allocation in the shared Eden, if allowed.
4515     if (allow_shared_alloc) {
4516       const Register Rheap_top_addr = R2_tmp;
4517       const Register Rheap_top = R5_tmp;
4518       const Register Rheap_end = Rtemp;
4519       assert_different_registers(Robj, Rklass, Rsize, Rheap_top_addr, Rheap_top, Rheap_end, LR);
4520 
4521       __ eden_allocate(Robj, Rheap_top, Rheap_top_addr, Rheap_end, Rsize, slow_case);



























4522     }
4523   }
4524 
4525   if (UseTLAB || allow_shared_alloc) {
4526     const Register Rzero0 = R1_tmp;
4527     const Register Rzero1 = R2_tmp;
4528     const Register Rzero_end = R5_tmp;
4529     const Register Rzero_cur = Rtemp;
4530     assert_different_registers(Robj, Rsize, Rklass, Rzero0, Rzero1, Rzero_cur, Rzero_end);
4531 
4532     // The object is initialized before the header.  If the object size is
4533     // zero, go directly to the header initialization.
4534     __ bind(initialize_object);
4535     __ subs(Rsize, Rsize, sizeof(oopDesc));
4536     __ add(Rzero_cur, Robj, sizeof(oopDesc));
4537     __ b(initialize_header, eq);
4538 
4539 #ifdef ASSERT
4540     // make sure Rsize is a multiple of 8
4541     Label L;


< prev index next >