< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 47680 : [mq]: x86_tlab
rev 47681 : [mq]: x86_tlab2
rev 47682 : [mq]: x86_3


3827   // profile this call
3828   __ profile_call(rbcp);
3829   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3830 
3831   __ verify_oop(rax_callsite);
3832 
3833   __ jump_from_interpreted(rbx_method, rdx);
3834 }
3835 
3836 //-----------------------------------------------------------------------------
3837 // Allocation
3838 
3839 void TemplateTable::_new() {
3840   transition(vtos, atos);
3841   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3842   Label slow_case;
3843   Label slow_case_no_pop;
3844   Label done;
3845   Label initialize_header;
3846   Label initialize_object;  // including clearing the fields
3847   Label allocate_shared;
3848 
3849   __ get_cpool_and_tags(rcx, rax);
3850 
3851   // Make sure the class we're about to instantiate has been resolved.
3852   // This is done before loading InstanceKlass to be consistent with the order
3853   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3854   const int tags_offset = Array<u1>::base_offset_in_bytes();
3855   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3856   __ jcc(Assembler::notEqual, slow_case_no_pop);
3857 
3858   // get InstanceKlass
3859   __ load_resolved_klass_at_index(rcx, rdx, rcx);
3860   __ push(rcx);  // save the contexts of klass for initializing the header
3861 
3862   // make sure klass is initialized & doesn't have finalizer
3863   // make sure klass is fully initialized
3864   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3865   __ jcc(Assembler::notEqual, slow_case);
3866 
3867   // get instance_size in InstanceKlass (scaled to a count of bytes)
3868   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3869   // test to see if it has a finalizer or is malformed in some way
3870   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3871   __ jcc(Assembler::notZero, slow_case);
3872 
3873   //
3874   // Allocate the instance
3875   // 1) Try to allocate in the TLAB
3876   // 2) if fail and the object is large allocate in the shared Eden
3877   // 3) if the above fails (or is not applicable), go to a slow case
3878   // (creates a new TLAB, etc.)






3879 
3880   const bool allow_shared_alloc =
3881     Universe::heap()->supports_inline_contig_alloc();
3882 
3883   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
3884 #ifndef _LP64
3885   if (UseTLAB || allow_shared_alloc) {
3886     __ get_thread(thread);
3887   }
3888 #endif // _LP64
3889 
3890   if (UseTLAB) {
3891     __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
3892     __ lea(rbx, Address(rax, rdx, Address::times_1));
3893     __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
3894     __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3895     __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3896     if (ZeroTLAB) {
3897       // the fields have been already cleared
3898       __ jmp(initialize_header);
3899     } else {
3900       // initialize both the header and fields
3901       __ jmp(initialize_object);
3902     }
3903   }
3904 
3905   // Allocation in the shared Eden, if allowed.
3906   //
3907   // rdx: instance size in bytes
3908   if (allow_shared_alloc) {
3909     __ bind(allocate_shared);
3910 
3911     ExternalAddress heap_top((address)Universe::heap()->top_addr());
3912     ExternalAddress heap_end((address)Universe::heap()->end_addr());
3913 
3914     Label retry;
3915     __ bind(retry);
3916     __ movptr(rax, heap_top);
3917     __ lea(rbx, Address(rax, rdx, Address::times_1));
3918     __ cmpptr(rbx, heap_end);
3919     __ jcc(Assembler::above, slow_case);
3920 
3921     // Compare rax, with the top addr, and if still equal, store the new
3922     // top addr in rbx, at the address of the top addr pointer. Sets ZF if was
3923     // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
3924     //
3925     // rax,: object begin
3926     // rbx,: object end
3927     // rdx: instance size in bytes
3928     __ locked_cmpxchgptr(rbx, heap_top);
3929 
3930     // if someone beat us on the allocation, try again, otherwise continue
3931     __ jcc(Assembler::notEqual, retry);
3932 
3933     __ incr_allocated_bytes(thread, rdx, 0);
3934   }

3935 
3936   if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {


3937     // The object is initialized before the header.  If the object size is
3938     // zero, go directly to the header initialization.
3939     __ bind(initialize_object);
3940     __ decrement(rdx, sizeof(oopDesc));
3941     __ jcc(Assembler::zero, initialize_header);
3942 
3943     // Initialize topmost object field, divide rdx by 8, check if odd and
3944     // test if zero.
3945     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
3946     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3947 
3948     // rdx must have been multiple of 8
3949 #ifdef ASSERT
3950     // make sure rdx was multiple of 8
3951     Label L;
3952     // Ignore partial flag stall after shrl() since it is debug VM
3953     __ jccb(Assembler::carryClear, L);
3954     __ stop("object size is not multiple of 2 - adjust this code");
3955     __ bind(L);
3956     // rdx must be > 0, no extra check needed here




3827   // profile this call
3828   __ profile_call(rbcp);
3829   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3830 
3831   __ verify_oop(rax_callsite);
3832 
3833   __ jump_from_interpreted(rbx_method, rdx);
3834 }
3835 
3836 //-----------------------------------------------------------------------------
3837 // Allocation
3838 
3839 void TemplateTable::_new() {
3840   transition(vtos, atos);
3841   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3842   Label slow_case;
3843   Label slow_case_no_pop;
3844   Label done;
3845   Label initialize_header;
3846   Label initialize_object;  // including clearing the fields

3847 
3848   __ get_cpool_and_tags(rcx, rax);
3849 
3850   // Make sure the class we're about to instantiate has been resolved.
3851   // This is done before loading InstanceKlass to be consistent with the order
3852   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3853   const int tags_offset = Array<u1>::base_offset_in_bytes();
3854   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3855   __ jcc(Assembler::notEqual, slow_case_no_pop);
3856 
3857   // get InstanceKlass
3858   __ load_resolved_klass_at_index(rcx, rdx, rcx);
3859   __ push(rcx);  // save the contexts of klass for initializing the header
3860 
3861   // make sure klass is initialized & doesn't have finalizer
3862   // make sure klass is fully initialized
3863   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3864   __ jcc(Assembler::notEqual, slow_case);
3865 
3866   // get instance_size in InstanceKlass (scaled to a count of bytes)
3867   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3868   // test to see if it has a finalizer or is malformed in some way
3869   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3870   __ jcc(Assembler::notZero, slow_case);
3871 
3872   // Allocate the instance:
3873   // 1) If TLAB is enabled:
3874   //  a) Try to allocate in the TLAB
3875   //  b) If fails, go to the slow path.
3876   // 2) Else TLAB is disabled:
3877   //  a) If inline contiguous allocations are enabled:
3878   //    i) Try to allocate in eden
3879   //    ii) If fails due to heap end, go to slow path.
3880   // 3) If TLAB is enabled OR inline contiguous is enabled:
3881   //  a) Initialize the allocation
3882   //  b) Exit.
3883   // 4) If neither 1 OR 2 are applicable, go to slow path.
3884 
3885   const bool allow_shared_alloc =
3886     Universe::heap()->supports_inline_contig_alloc();
3887 
3888   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
3889 #ifndef _LP64
3890   if (UseTLAB || allow_shared_alloc) {
3891     __ get_thread(thread);
3892   }
3893 #endif // _LP64
3894 
3895   if (UseTLAB) {
3896     __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
3897     __ lea(rbx, Address(rax, rdx, Address::times_1));
3898     __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
3899     __ jcc(Assembler::above, slow_case);
3900     __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3901     if (ZeroTLAB) {
3902       // the fields have been already cleared
3903       __ jmp(initialize_header);
3904     } else {
3905       // initialize both the header and fields
3906       __ jmp(initialize_object);
3907     }
3908   } else {

3909     // Allocation in the shared Eden, if allowed.
3910     //
3911     // rdx: instance size in bytes
3912     if (allow_shared_alloc) {


3913       ExternalAddress heap_top((address)Universe::heap()->top_addr());
3914       ExternalAddress heap_end((address)Universe::heap()->end_addr());
3915 
3916       Label retry;
3917       __ bind(retry);
3918       __ movptr(rax, heap_top);
3919       __ lea(rbx, Address(rax, rdx, Address::times_1));
3920       __ cmpptr(rbx, heap_end);
3921       __ jcc(Assembler::above, slow_case);
3922 
3923       // Compare rax, with the top addr, and if still equal, store the new
3924       // top addr in rbx, at the address of the top addr pointer. Sets ZF if was
3925       // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
3926       //
3927       // rax,: object begin
3928       // rbx,: object end
3929       // rdx: instance size in bytes
3930       __ locked_cmpxchgptr(rbx, heap_top);
3931 
3932       // if someone beat us on the allocation, try again, otherwise continue
3933       __ jcc(Assembler::notEqual, retry);
3934 
3935       __ incr_allocated_bytes(thread, rdx, 0);
3936     }
3937   }
3938 
3939   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3940   // there is an initialize need. Otherwise, skip and go to the slow path.
3941   if (UseTLAB || allow_shared_alloc) {
3942     // The object is initialized before the header.  If the object size is
3943     // zero, go directly to the header initialization.
3944     __ bind(initialize_object);
3945     __ decrement(rdx, sizeof(oopDesc));
3946     __ jcc(Assembler::zero, initialize_header);
3947 
3948     // Initialize topmost object field, divide rdx by 8, check if odd and
3949     // test if zero.
3950     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
3951     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3952 
3953     // rdx must have been multiple of 8
3954 #ifdef ASSERT
3955     // make sure rdx was multiple of 8
3956     Label L;
3957     // Ignore partial flag stall after shrl() since it is debug VM
3958     __ jccb(Assembler::carryClear, L);
3959     __ stop("object size is not multiple of 2 - adjust this code");
3960     __ bind(L);
3961     // rdx must be > 0, no extra check needed here


< prev index next >