< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




3775   // [jk] not needed currently
3776   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3777   //                      in_bytes(ConstantPoolCache::base_offset() +
3778   //                               ConstantPoolCacheEntry::flags_offset())));
3779   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3780   // __ andl(rdx, 0x1);
3781   //
3782   __ movptr(rdx, Address(rcx, rbx, Address::times_ptr,
3783                          in_bytes(ConstantPoolCache::base_offset() +
3784                                   ConstantPoolCacheEntry::f2_offset())));
3785 
3786   // rax: object
3787   __ verify_oop(rax);
3788   __ null_check(rax);
3789   Address field(rax, rdx, Address::times_1);
3790 
3791   // access field
3792   switch (bytecode()) {
3793   case Bytecodes::_fast_qgetfield:
3794     {
3795       Label isFlattened, nonnull, Done;
3796       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3797                                    in_bytes(ConstantPoolCache::base_offset() +
3798                                             ConstantPoolCacheEntry::flags_offset())));
3799       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3800         // Non-flattened field case
3801         __ movptr(rscratch1, rax);
3802         __ load_heap_oop(rax, field);
3803         __ testptr(rax, rax);
3804         __ jcc(Assembler::notZero, nonnull);
3805           __ movptr(rax, rscratch1);
3806           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3807                              in_bytes(ConstantPoolCache::base_offset() +
3808                                       ConstantPoolCacheEntry::flags_offset())));
3809           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3810           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3811                      rax, rcx);
3812         __ bind(nonnull);
3813         __ verify_oop(rax);
3814         __ jmp(Done);
3815       __ bind(isFlattened);

3816         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3817                            in_bytes(ConstantPoolCache::base_offset() +
3818                                     ConstantPoolCacheEntry::flags_offset())));
3819         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3820         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3821                                      in_bytes(ConstantPoolCache::base_offset() +
3822                                               ConstantPoolCacheEntry::f1_offset())));
3823         call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3824                 rax, rdx, rcx);
3825         __ verify_oop(rax);
3826       __ bind(Done);

3827     }
3828     break;
3829   case Bytecodes::_fast_agetfield:
3830     do_oop_load(_masm, field, rax);
3831     __ verify_oop(rax);
3832     break;
3833   case Bytecodes::_fast_lgetfield:
3834 #ifdef _LP64
3835     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3836 #else
3837   __ stop("should not be rewritten");
3838 #endif
3839     break;
3840   case Bytecodes::_fast_igetfield:
3841     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3842     break;
3843   case Bytecodes::_fast_bgetfield:
3844     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3845     break;
3846   case Bytecodes::_fast_sgetfield:


4282 
4283   // Note:  rax_callsite is already pushed by prepare_invoke
4284 
4285   // %%% should make a type profile for any invokedynamic that takes a ref argument
4286   // profile this call
4287   __ profile_call(rbcp);
4288   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4289 
4290   __ verify_oop(rax_callsite);
4291 
4292   __ jump_from_interpreted(rbx_method, rdx);
4293 }
4294 
4295 //-----------------------------------------------------------------------------
4296 // Allocation
4297 
4298 void TemplateTable::_new() {
4299   transition(vtos, atos);
4300   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4301   Label slow_case;
4302   Label slow_case_no_pop;
4303   Label done;
4304   Label initialize_header;
4305   Label initialize_object;  // including clearing the fields
4306 
4307   __ get_cpool_and_tags(rcx, rax);
4308 
4309   // Make sure the class we're about to instantiate has been resolved.
4310   // This is done before loading InstanceKlass to be consistent with the order
4311   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4312   const int tags_offset = Array<u1>::base_offset_in_bytes();
4313   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4314   __ jcc(Assembler::notEqual, slow_case_no_pop);
4315 
4316   // get InstanceKlass
4317   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4318   __ push(rcx);  // save the contexts of klass for initializing the header
4319 
4320   // make sure klass is initialized & doesn't have finalizer
4321   // make sure klass is fully initialized
4322   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4323   __ jcc(Assembler::notEqual, slow_case);
4324 
4325   // get instance_size in InstanceKlass (scaled to a count of bytes)
4326   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4327   // test to see if it has a finalizer or is malformed in some way
4328   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4329   __ jcc(Assembler::notZero, slow_case);
4330 
4331   // Allocate the instance:
4332   //  If TLAB is enabled:
4333   //    Try to allocate in the TLAB.
4334   //    If fails, go to the slow path.
4335   //  Else If inline contiguous allocations are enabled:
4336   //    Try to allocate in eden.
4337   //    If fails due to heap end, go to slow path.
4338   //
4339   //  If TLAB is enabled OR inline contiguous is enabled:
4340   //    Initialize the allocation.
4341   //    Exit.
4342   //
4343   //  Go to slow path.
4344 
4345   const bool allow_shared_alloc =
4346     Universe::heap()->supports_inline_contig_alloc();
4347 
4348   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4349 #ifndef _LP64
4350   if (UseTLAB || allow_shared_alloc) {
4351     __ get_thread(thread);
4352   }
4353 #endif // _LP64
4354 
4355   if (UseTLAB) {
4356     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4357     if (ZeroTLAB) {
4358       // the fields have been already cleared
4359       __ jmp(initialize_header);
4360     } else {
4361       // initialize both the header and fields
4362       __ jmp(initialize_object);
4363     }
4364   } else {
4365     // Allocation in the shared Eden, if allowed.
4366     //
4367     // rdx: instance size in bytes
4368     __ eden_allocate(thread, rax, rdx, 0, rbx, slow_case);
4369   }
4370 
4371   // If UseTLAB or allow_shared_alloc are true, the object is created above and
4372   // there is an initialize need. Otherwise, skip and go to the slow path.
4373   if (UseTLAB || allow_shared_alloc) {
4374     // The object is initialized before the header.  If the object size is
4375     // zero, go directly to the header initialization.
4376     __ bind(initialize_object);
4377     __ decrement(rdx, sizeof(oopDesc));
4378     __ jcc(Assembler::zero, initialize_header);
4379 
4380     // Initialize topmost object field, divide rdx by 8, check if odd and
4381     // test if zero.
4382     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4383     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4384 
4385     // rdx must have been multiple of 8
4386 #ifdef ASSERT
4387     // make sure rdx was multiple of 8
4388     Label L;
4389     // Ignore partial flag stall after shrl() since it is debug VM
4390     __ jcc(Assembler::carryClear, L);
4391     __ stop("object size is not multiple of 2 - adjust this code");
4392     __ bind(L);
4393     // rdx must be > 0, no extra check needed here
4394 #endif
4395 
4396     // initialize remaining object fields: rdx was a multiple of 8
4397     { Label loop;
4398     __ bind(loop);
4399     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4400     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4401     __ decrement(rdx);
4402     __ jcc(Assembler::notZero, loop);
4403     }
4404 
4405     // initialize object header only.
4406     __ bind(initialize_header);
4407     if (UseBiasedLocking) {
4408       __ pop(rcx);   // get saved klass back in the register.
4409       __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4410       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
4411     } else {
4412       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4413                 (intptr_t)markWord::prototype().value()); // header
4414       __ pop(rcx);   // get saved klass back in the register.
4415     }
4416 #ifdef _LP64
4417     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4418     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4419 #endif
4420     __ store_klass(rax, rcx);  // klass
4421 
4422     {
4423       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4424       // Trigger dtrace event for fastpath
4425       __ push(atos);
4426       __ call_VM_leaf(
4427            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4428       __ pop(atos);
4429     }
4430 
4431     __ jmp(done);
4432   }
4433 
4434   // slow case
4435   __ bind(slow_case);
4436   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4437   __ bind(slow_case_no_pop);
4438 
4439   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4440   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4441 
4442   __ get_constant_pool(rarg1);
4443   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4444   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4445    __ verify_oop(rax);
4446 
4447   // continue
4448   __ bind(done);
4449 }
4450 
4451 void TemplateTable::defaultvalue() {
4452   transition(vtos, atos);
4453 
4454   Label slow_case;
4455   Label done;
4456 
4457   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);




3775   // [jk] not needed currently
3776   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3777   //                      in_bytes(ConstantPoolCache::base_offset() +
3778   //                               ConstantPoolCacheEntry::flags_offset())));
3779   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3780   // __ andl(rdx, 0x1);
3781   //
3782   __ movptr(rdx, Address(rcx, rbx, Address::times_ptr,
3783                          in_bytes(ConstantPoolCache::base_offset() +
3784                                   ConstantPoolCacheEntry::f2_offset())));
3785 
3786   // rax: object
3787   __ verify_oop(rax);
3788   __ null_check(rax);
3789   Address field(rax, rdx, Address::times_1);
3790 
3791   // access field
3792   switch (bytecode()) {
3793   case Bytecodes::_fast_qgetfield:
3794     {
3795       Label isFlattened, nonnull, Done, slow_path;
3796       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3797                                    in_bytes(ConstantPoolCache::base_offset() +
3798                                             ConstantPoolCacheEntry::flags_offset())));
3799       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3800         // Non-flattened field case
3801         __ movptr(rscratch1, rax);
3802         __ load_heap_oop(rax, field);
3803         __ testptr(rax, rax);
3804         __ jcc(Assembler::notZero, nonnull);
3805           __ movptr(rax, rscratch1);
3806           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3807                              in_bytes(ConstantPoolCache::base_offset() +
3808                                       ConstantPoolCacheEntry::flags_offset())));
3809           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3810           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3811                      rax, rcx);
3812         __ bind(nonnull);
3813         __ verify_oop(rax);
3814         __ jmp(Done);
3815       __ bind(isFlattened);
3816         __ push(rdx); // save offset
3817         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3818                            in_bytes(ConstantPoolCache::base_offset() +
3819                                     ConstantPoolCacheEntry::flags_offset())));
3820         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3821         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3822                                      in_bytes(ConstantPoolCache::base_offset() +
3823                                               ConstantPoolCacheEntry::f1_offset())));
3824         __ pop(rbx); // restore offset
3825         __ read_flattened_field(rcx, rdx, rbx, rax);

3826       __ bind(Done);
3827       __ verify_oop(rax);
3828     }
3829     break;
3830   case Bytecodes::_fast_agetfield:
3831     do_oop_load(_masm, field, rax);
3832     __ verify_oop(rax);
3833     break;
3834   case Bytecodes::_fast_lgetfield:
3835 #ifdef _LP64
3836     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3837 #else
3838   __ stop("should not be rewritten");
3839 #endif
3840     break;
3841   case Bytecodes::_fast_igetfield:
3842     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3843     break;
3844   case Bytecodes::_fast_bgetfield:
3845     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3846     break;
3847   case Bytecodes::_fast_sgetfield:


4283 
4284   // Note:  rax_callsite is already pushed by prepare_invoke
4285 
4286   // %%% should make a type profile for any invokedynamic that takes a ref argument
4287   // profile this call
4288   __ profile_call(rbcp);
4289   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4290 
4291   __ verify_oop(rax_callsite);
4292 
4293   __ jump_from_interpreted(rbx_method, rdx);
4294 }
4295 
4296 //-----------------------------------------------------------------------------
4297 // Allocation
4298 
4299 void TemplateTable::_new() {
4300   transition(vtos, atos);
4301   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4302   Label slow_case;

4303   Label done;


4304 
4305   __ get_cpool_and_tags(rcx, rax);
4306 
4307   // Make sure the class we're about to instantiate has been resolved.
4308   // This is done before loading InstanceKlass to be consistent with the order
4309   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4310   const int tags_offset = Array<u1>::base_offset_in_bytes();
4311   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4312   __ jcc(Assembler::notEqual, slow_case);
4313 
4314   // get InstanceKlass
4315   __ load_resolved_klass_at_index(rcx, rcx, rdx);

4316 
4317   // make sure klass is initialized & doesn't have finalizer

4318   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4319   __ jcc(Assembler::notEqual, slow_case);
4320 
4321   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);









































































































4322   __ jmp(done);

4323 
4324   // slow case
4325   __ bind(slow_case);


4326 
4327   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4328   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4329 
4330   __ get_constant_pool(rarg1);
4331   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4332   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4333    __ verify_oop(rax);
4334 
4335   // continue
4336   __ bind(done);
4337 }
4338 
4339 void TemplateTable::defaultvalue() {
4340   transition(vtos, atos);
4341 
4342   Label slow_case;
4343   Label done;
4344 
4345   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);


< prev index next >