< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




3801       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3802                                    in_bytes(ConstantPoolCache::base_offset() +
3803                                             ConstantPoolCacheEntry::flags_offset())));
3804       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3805         // Non-flattened field case
3806         __ movptr(rscratch1, rax);
3807         __ load_heap_oop(rax, field);
3808         __ testptr(rax, rax);
3809         __ jcc(Assembler::notZero, nonnull);
3810           __ movptr(rax, rscratch1);
3811           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3812                              in_bytes(ConstantPoolCache::base_offset() +
3813                                       ConstantPoolCacheEntry::flags_offset())));
3814           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3815           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3816                      rax, rcx);
3817         __ bind(nonnull);
3818         __ verify_oop(rax);
3819         __ jmp(Done);
3820       __ bind(isFlattened);

3821         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3822                            in_bytes(ConstantPoolCache::base_offset() +
3823                                     ConstantPoolCacheEntry::flags_offset())));
3824         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3825         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3826                                      in_bytes(ConstantPoolCache::base_offset() +
3827                                               ConstantPoolCacheEntry::f1_offset())));
3828         call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3829                 rax, rdx, rcx);
3830         __ verify_oop(rax);
3831       __ bind(Done);

3832     }
3833     break;
3834   case Bytecodes::_fast_agetfield:
3835     do_oop_load(_masm, field, rax);
3836     __ verify_oop(rax);
3837     break;
3838   case Bytecodes::_fast_lgetfield:
3839 #ifdef _LP64
3840     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3841 #else
3842   __ stop("should not be rewritten");
3843 #endif
3844     break;
3845   case Bytecodes::_fast_igetfield:
3846     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3847     break;
3848   case Bytecodes::_fast_bgetfield:
3849     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3850     break;
3851   case Bytecodes::_fast_sgetfield:


4287 
4288   // Note:  rax_callsite is already pushed by prepare_invoke
4289 
4290   // %%% should make a type profile for any invokedynamic that takes a ref argument
4291   // profile this call
4292   __ profile_call(rbcp);
4293   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4294 
4295   __ verify_oop(rax_callsite);
4296 
4297   __ jump_from_interpreted(rbx_method, rdx);
4298 }
4299 
4300 //-----------------------------------------------------------------------------
4301 // Allocation
4302 
4303 void TemplateTable::_new() {
4304   transition(vtos, atos);
4305   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4306   Label slow_case;
4307   Label slow_case_no_pop;
4308   Label done;
4309   Label initialize_header;
4310   Label initialize_object;  // including clearing the fields
4311 
4312   __ get_cpool_and_tags(rcx, rax);
4313 
4314   // Make sure the class we're about to instantiate has been resolved.
4315   // This is done before loading InstanceKlass to be consistent with the order
4316   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4317   const int tags_offset = Array<u1>::base_offset_in_bytes();
4318   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4319   __ jcc(Assembler::notEqual, slow_case_no_pop);
4320 
4321   // get InstanceKlass
4322   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4323   __ push(rcx);  // save the contexts of klass for initializing the header
4324 
4325   // make sure klass is initialized & doesn't have finalizer
4326   // make sure klass is fully initialized
4327   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4328   __ jcc(Assembler::notEqual, slow_case);
4329 
4330   // get instance_size in InstanceKlass (scaled to a count of bytes)
4331   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4332   // test to see if it has a finalizer or is malformed in some way
4333   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4334   __ jcc(Assembler::notZero, slow_case);
4335 
4336   // Allocate the instance:
4337   //  If TLAB is enabled:
4338   //    Try to allocate in the TLAB.
4339   //    If fails, go to the slow path.
4340   //  Else If inline contiguous allocations are enabled:
4341   //    Try to allocate in eden.
4342   //    If fails due to heap end, go to slow path.
4343   //
4344   //  If TLAB is enabled OR inline contiguous is enabled:
4345   //    Initialize the allocation.
4346   //    Exit.
4347   //
4348   //  Go to slow path.
4349 
4350   const bool allow_shared_alloc =
4351     Universe::heap()->supports_inline_contig_alloc();
4352 
4353   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4354 #ifndef _LP64
4355   if (UseTLAB || allow_shared_alloc) {
4356     __ get_thread(thread);
4357   }
4358 #endif // _LP64
4359 
4360   if (UseTLAB) {
4361     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4362     if (ZeroTLAB) {
4363       // the fields have been already cleared
4364       __ jmp(initialize_header);
4365     } else {
4366       // initialize both the header and fields
4367       __ jmp(initialize_object);
4368     }
4369   } else {
4370     // Allocation in the shared Eden, if allowed.
4371     //
4372     // rdx: instance size in bytes
4373     __ eden_allocate(thread, rax, rdx, 0, rbx, slow_case);
4374   }
4375 
4376   // If UseTLAB or allow_shared_alloc are true, the object is created above and
4377   // there is an initialize need. Otherwise, skip and go to the slow path.
4378   if (UseTLAB || allow_shared_alloc) {
4379     // The object is initialized before the header.  If the object size is
4380     // zero, go directly to the header initialization.
4381     __ bind(initialize_object);
4382     __ decrement(rdx, sizeof(oopDesc));
4383     __ jcc(Assembler::zero, initialize_header);
4384 
4385     // Initialize topmost object field, divide rdx by 8, check if odd and
4386     // test if zero.
4387     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4388     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4389 
4390     // rdx must have been multiple of 8
4391 #ifdef ASSERT
4392     // make sure rdx was multiple of 8
4393     Label L;
4394     // Ignore partial flag stall after shrl() since it is debug VM
4395     __ jcc(Assembler::carryClear, L);
4396     __ stop("object size is not multiple of 2 - adjust this code");
4397     __ bind(L);
4398     // rdx must be > 0, no extra check needed here
4399 #endif
4400 
4401     // initialize remaining object fields: rdx was a multiple of 8
4402     { Label loop;
4403     __ bind(loop);
4404     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4405     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4406     __ decrement(rdx);
4407     __ jcc(Assembler::notZero, loop);
4408     }
4409 
4410     // initialize object header only.
4411     __ bind(initialize_header);
4412     if (UseBiasedLocking) {
4413       __ pop(rcx);   // get saved klass back in the register.
4414       __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4415       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
4416     } else {
4417       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4418                 (intptr_t)markWord::prototype().value()); // header
4419       __ pop(rcx);   // get saved klass back in the register.
4420     }
4421 #ifdef _LP64
4422     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4423     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4424 #endif
4425     __ store_klass(rax, rcx);  // klass
4426 
4427     {
4428       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4429       // Trigger dtrace event for fastpath
4430       __ push(atos);
4431       __ call_VM_leaf(
4432            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4433       __ pop(atos);
4434     }
4435 
4436     __ jmp(done);
4437   }
4438 
4439   // slow case
4440   __ bind(slow_case);
4441   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4442   __ bind(slow_case_no_pop);
4443 
4444   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4445   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4446 
4447   __ get_constant_pool(rarg1);
4448   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4449   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4450    __ verify_oop(rax);
4451 
4452   // continue
4453   __ bind(done);
4454 }
4455 
4456 void TemplateTable::defaultvalue() {
4457   transition(vtos, atos);
4458 
4459   Label slow_case;
4460   Label done;
4461 
4462   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4463   __ get_cpool_and_tags(rcx, rax);
4464 
4465   // Make sure the class we're about to instantiate has been resolved.
4466   // This is done before loading InstanceKlass to be consistent with the order
4467   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4468   const int tags_offset = Array<u1>::base_offset_in_bytes();
4469   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4470   __ jcc(Assembler::notEqual, slow_case);
4471 
4472   // get InstanceKlass
4473   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4474 
4475   // make sure klass is fully initialized
4476   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4477   __ jcc(Assembler::notEqual, slow_case);
4478 
4479   // Getting the offset of the pre-allocated default value
4480   __ movptr(rdx, Address(rcx, in_bytes(InstanceKlass::adr_valueklass_fixed_block_offset())));
4481   __ movl(rdx, Address(rdx, in_bytes(ValueKlass::default_value_offset_offset())));
4482 
4483   // Getting the mirror
4484   __ movptr(rbx, Address(rcx, in_bytes(Klass::java_mirror_offset())));
4485   __ resolve_oop_handle(rbx, rcx);
4486   __ verify_oop(rbx);
4487 
4488   // Getting the pre-allocated default value from the mirror
4489   Address field(rbx, rdx, Address::times_1);
4490   do_oop_load(_masm, field, rax);
4491 
4492   __ jmp(done);
4493 
4494   __ bind(slow_case);
4495 
4496   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4497   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4498 
4499   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4500   __ get_constant_pool(rarg1);
4501 
4502   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
4503       rarg1, rarg2);
4504 
4505   __ bind(done);
4506   __ verify_oop(rax);
4507 }
4508 
4509 void TemplateTable::newarray() {
4510   transition(itos, atos);
4511   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);




3801       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3802                                    in_bytes(ConstantPoolCache::base_offset() +
3803                                             ConstantPoolCacheEntry::flags_offset())));
3804       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3805         // Non-flattened field case
3806         __ movptr(rscratch1, rax);
3807         __ load_heap_oop(rax, field);
3808         __ testptr(rax, rax);
3809         __ jcc(Assembler::notZero, nonnull);
3810           __ movptr(rax, rscratch1);
3811           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3812                              in_bytes(ConstantPoolCache::base_offset() +
3813                                       ConstantPoolCacheEntry::flags_offset())));
3814           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3815           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3816                      rax, rcx);
3817         __ bind(nonnull);
3818         __ verify_oop(rax);
3819         __ jmp(Done);
3820       __ bind(isFlattened);
3821         __ push(rdx); // save offset
3822         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3823                            in_bytes(ConstantPoolCache::base_offset() +
3824                                     ConstantPoolCacheEntry::flags_offset())));
3825         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3826         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3827                                      in_bytes(ConstantPoolCache::base_offset() +
3828                                               ConstantPoolCacheEntry::f1_offset())));
3829         __ pop(rbx); // restore offset
3830         __ read_flattened_field(rcx, rdx, rbx, rax);

3831       __ bind(Done);
3832       __ verify_oop(rax);
3833     }
3834     break;
3835   case Bytecodes::_fast_agetfield:
3836     do_oop_load(_masm, field, rax);
3837     __ verify_oop(rax);
3838     break;
3839   case Bytecodes::_fast_lgetfield:
3840 #ifdef _LP64
3841     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3842 #else
3843   __ stop("should not be rewritten");
3844 #endif
3845     break;
3846   case Bytecodes::_fast_igetfield:
3847     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3848     break;
3849   case Bytecodes::_fast_bgetfield:
3850     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3851     break;
3852   case Bytecodes::_fast_sgetfield:


4288 
4289   // Note:  rax_callsite is already pushed by prepare_invoke
4290 
4291   // %%% should make a type profile for any invokedynamic that takes a ref argument
4292   // profile this call
4293   __ profile_call(rbcp);
4294   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4295 
4296   __ verify_oop(rax_callsite);
4297 
4298   __ jump_from_interpreted(rbx_method, rdx);
4299 }
4300 
4301 //-----------------------------------------------------------------------------
4302 // Allocation
4303 
4304 void TemplateTable::_new() {
4305   transition(vtos, atos);
4306   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4307   Label slow_case;

4308   Label done;


4309 
4310   __ get_cpool_and_tags(rcx, rax);
4311 
4312   // Make sure the class we're about to instantiate has been resolved.
4313   // This is done before loading InstanceKlass to be consistent with the order
4314   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4315   const int tags_offset = Array<u1>::base_offset_in_bytes();
4316   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4317   __ jcc(Assembler::notEqual, slow_case);
4318 
4319   // get InstanceKlass
4320   __ load_resolved_klass_at_index(rcx, rcx, rdx);

4321 
4322   // make sure klass is initialized & doesn't have finalizer

4323   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4324   __ jcc(Assembler::notEqual, slow_case);
4325 
4326   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);









































































































4327   __ jmp(done);

4328 
4329   // slow case
4330   __ bind(slow_case);


4331 
4332   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4333   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4334 
4335   __ get_constant_pool(rarg1);
4336   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4337   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4338    __ verify_oop(rax);
4339 
4340   // continue
4341   __ bind(done);
4342 }
4343 
4344 void TemplateTable::defaultvalue() {
4345   transition(vtos, atos);
4346 
4347   Label slow_case;
4348   Label done;
4349 
4350   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4351   __ get_cpool_and_tags(rcx, rax);
4352 
4353   // Make sure the class we're about to instantiate has been resolved.
4354   // This is done before loading InstanceKlass to be consistent with the order
4355   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4356   const int tags_offset = Array<u1>::base_offset_in_bytes();
4357   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4358   __ jcc(Assembler::notEqual, slow_case);
4359 
4360   // get InstanceKlass
4361   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4362 
4363   // make sure klass is fully initialized
4364   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4365   __ jcc(Assembler::notEqual, slow_case);
4366 
4367   // have a resolved ValueKlass in rcx, return the default value oop from it
4368   __ get_default_value_oop(rcx, rdx, rax);











4369   __ jmp(done);
4370 
4371   __ bind(slow_case);
4372 
4373   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4374   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4375 
4376   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4377   __ get_constant_pool(rarg1);
4378 
4379   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
4380       rarg1, rarg2);
4381 
4382   __ bind(done);
4383   __ verify_oop(rax);
4384 }
4385 
4386 void TemplateTable::newarray() {
4387   transition(itos, atos);
4388   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);


< prev index next >