src/cpu/x86/vm/c1_LIRAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 7118863 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/c1_LIRAssembler_x86.cpp

Print this page




1541 
1542     case Bytecodes::_f2l:
1543     case Bytecodes::_d2l:
1544       assert(!src->is_xmm_register(), "input in xmm register not supported (no SSE instruction present)");
1545       assert(src->fpu() == 0, "input must be on TOS");
1546       assert(dest == FrameMap::long0_opr, "runtime stub places result in these registers");
1547 
1548       // instruction sequence too long to inline it here
1549       {
1550         __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::fpu2long_stub_id)));
1551       }
1552       break;
1553 
1554     default: ShouldNotReachHere();
1555   }
1556 }
1557 
1558 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
1559   if (op->init_check()) {
1560     __ cmpl(Address(op->klass()->as_register(),
1561                     instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)),
1562             instanceKlass::fully_initialized);
1563     add_debug_info_for_null_check_here(op->stub()->info());
1564     __ jcc(Assembler::notEqual, *op->stub()->entry());
1565   }
1566   __ allocate_object(op->obj()->as_register(),
1567                      op->tmp1()->as_register(),
1568                      op->tmp2()->as_register(),
1569                      op->header_size(),
1570                      op->object_size(),
1571                      op->klass()->as_register(),
1572                      *op->stub()->entry());
1573   __ bind(*op->stub()->continuation());
1574 }
1575 
1576 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {
1577   Register len =  op->len()->as_register();
1578   LP64_ONLY( __ movslq(len, len); )
1579 
1580   if (UseSlowPath ||
1581       (!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) ||


1713 #else
1714     if (k->is_loaded()) {
1715       __ cmpoop(Address(obj, oopDesc::klass_offset_in_bytes()), k->constant_encoding());
1716     } else {
1717       __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
1718     }
1719 #endif
1720     __ jcc(Assembler::notEqual, *failure_target);
1721     // successful cast, fall through to profile or jump
1722   } else {
1723     // get object class
1724     // not a safepoint as obj null check happens earlier
1725     __ load_klass(klass_RInfo, obj);
1726     if (k->is_loaded()) {
1727       // See if we get an immediate positive hit
1728 #ifdef _LP64
1729       __ cmpptr(k_RInfo, Address(klass_RInfo, k->super_check_offset()));
1730 #else
1731       __ cmpoop(Address(klass_RInfo, k->super_check_offset()), k->constant_encoding());
1732 #endif // _LP64
1733       if (sizeof(oopDesc) + Klass::secondary_super_cache_offset_in_bytes() != k->super_check_offset()) {
1734         __ jcc(Assembler::notEqual, *failure_target);
1735         // successful cast, fall through to profile or jump
1736       } else {
1737         // See if we get an immediate positive hit
1738         __ jcc(Assembler::equal, *success_target);
1739         // check for self
1740 #ifdef _LP64
1741         __ cmpptr(klass_RInfo, k_RInfo);
1742 #else
1743         __ cmpoop(klass_RInfo, k->constant_encoding());
1744 #endif // _LP64
1745         __ jcc(Assembler::equal, *success_target);
1746 
1747         __ push(klass_RInfo);
1748 #ifdef _LP64
1749         __ push(k_RInfo);
1750 #else
1751         __ pushoop(k->constant_encoding());
1752 #endif // _LP64
1753         __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));


1825     if (op->should_profile()) {
1826       Label not_null;
1827       __ jccb(Assembler::notEqual, not_null);
1828       // Object is null; update MDO and exit
1829       Register mdo  = klass_RInfo;
1830       __ movoop(mdo, md->constant_encoding());
1831       Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset()));
1832       int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant());
1833       __ orl(data_addr, header_bits);
1834       __ jmp(done);
1835       __ bind(not_null);
1836     } else {
1837       __ jcc(Assembler::equal, done);
1838     }
1839 
1840     add_debug_info_for_null_check_here(op->info_for_exception());
1841     __ load_klass(k_RInfo, array);
1842     __ load_klass(klass_RInfo, value);
1843 
1844     // get instance klass (it's already uncompressed)
1845     __ movptr(k_RInfo, Address(k_RInfo, objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc)));
1846     // perform the fast part of the checking logic
1847     __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, success_target, failure_target, NULL);
1848     // call out-of-line instance of __ check_klass_subtype_slow_path(...):
1849     __ push(klass_RInfo);
1850     __ push(k_RInfo);
1851     __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
1852     __ pop(klass_RInfo);
1853     __ pop(k_RInfo);
1854     // result is a boolean
1855     __ cmpl(k_RInfo, 0);
1856     __ jcc(Assembler::equal, *failure_target);
1857     // fall through to the success case
1858 
1859     if (op->should_profile()) {
1860       Register mdo  = klass_RInfo, recv = k_RInfo;
1861       __ bind(profile_cast_success);
1862       __ movoop(mdo, md->constant_encoding());
1863       __ load_klass(recv, value);
1864       Label update_done;
1865       type_profile_helper(mdo, md, data, recv, &done);


3272 
3273       __ bind(slow);
3274       __ pop(dst);
3275       __ pop(src);
3276 
3277       address copyfunc_addr = StubRoutines::checkcast_arraycopy();
3278       if (copyfunc_addr != NULL) { // use stub if available
3279         // src is not a sub class of dst so we have to do a
3280         // per-element check.
3281 
3282         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
3283         if ((flags & mask) != mask) {
3284           // Check that at least both of them object arrays.
3285           assert(flags & mask, "one of the two should be known to be an object array");
3286 
3287           if (!(flags & LIR_OpArrayCopy::src_objarray)) {
3288             __ load_klass(tmp, src);
3289           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3290             __ load_klass(tmp, dst);
3291           }
3292           int lh_offset = klassOopDesc::header_size() * HeapWordSize +
3293             Klass::layout_helper_offset_in_bytes();
3294           Address klass_lh_addr(tmp, lh_offset);
3295           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3296           __ cmpl(klass_lh_addr, objArray_lh);
3297           __ jcc(Assembler::notEqual, *stub->entry());
3298         }
3299 
3300        // Spill because stubs can use any register they like and it's
3301        // easier to restore just those that we care about.
3302        store_parameter(dst, 0);
3303        store_parameter(dst_pos, 1);
3304        store_parameter(length, 2);
3305        store_parameter(src_pos, 3);
3306        store_parameter(src, 4);
3307 
3308 #ifndef _LP64
3309         __ movptr(tmp, dst_klass_addr);
3310         __ movptr(tmp, Address(tmp, objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc)));
3311         __ push(tmp);
3312         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset_in_bytes() + sizeof(oopDesc)));
3313         __ push(tmp);
3314         __ push(length);
3315         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3316         __ push(tmp);
3317         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3318         __ push(tmp);
3319 
3320         __ call_VM_leaf(copyfunc_addr, 5);
3321 #else
3322         __ movl2ptr(length, length); //higher 32bits must be null
3323 
3324         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3325         assert_different_registers(c_rarg0, dst, dst_pos, length);
3326         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3327         assert_different_registers(c_rarg1, dst, length);
3328 
3329         __ mov(c_rarg2, length);
3330         assert_different_registers(c_rarg2, dst);
3331 
3332 #ifdef _WIN64
3333         // Allocate abi space for args but be sure to keep stack aligned
3334         __ subptr(rsp, 6*wordSize);
3335         __ load_klass(c_rarg3, dst);
3336         __ movptr(c_rarg3, Address(c_rarg3, objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc)));
3337         store_parameter(c_rarg3, 4);
3338         __ movl(c_rarg3, Address(c_rarg3, Klass::super_check_offset_offset_in_bytes() + sizeof(oopDesc)));
3339         __ call(RuntimeAddress(copyfunc_addr));
3340         __ addptr(rsp, 6*wordSize);
3341 #else
3342         __ load_klass(c_rarg4, dst);
3343         __ movptr(c_rarg4, Address(c_rarg4, objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc)));
3344         __ movl(c_rarg3, Address(c_rarg4, Klass::super_check_offset_offset_in_bytes() + sizeof(oopDesc)));
3345         __ call(RuntimeAddress(copyfunc_addr));
3346 #endif
3347 
3348 #endif
3349 
3350 #ifndef PRODUCT
3351         if (PrintC1Statistics) {
3352           Label failed;
3353           __ testl(rax, rax);
3354           __ jcc(Assembler::notZero, failed);
3355           __ incrementl(ExternalAddress((address)&Runtime1::_arraycopy_checkcast_cnt));
3356           __ bind(failed);
3357         }
3358 #endif
3359 
3360         __ testl(rax, rax);
3361         __ jcc(Assembler::zero, *stub->continuation());
3362 
3363 #ifndef PRODUCT
3364         if (PrintC1Statistics) {




1541 
1542     case Bytecodes::_f2l:
1543     case Bytecodes::_d2l:
1544       assert(!src->is_xmm_register(), "input in xmm register not supported (no SSE instruction present)");
1545       assert(src->fpu() == 0, "input must be on TOS");
1546       assert(dest == FrameMap::long0_opr, "runtime stub places result in these registers");
1547 
1548       // instruction sequence too long to inline it here
1549       {
1550         __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::fpu2long_stub_id)));
1551       }
1552       break;
1553 
1554     default: ShouldNotReachHere();
1555   }
1556 }
1557 
1558 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
1559   if (op->init_check()) {
1560     __ cmpl(Address(op->klass()->as_register(),
1561                     instanceKlass::init_state_offset()),
1562             instanceKlass::fully_initialized);
1563     add_debug_info_for_null_check_here(op->stub()->info());
1564     __ jcc(Assembler::notEqual, *op->stub()->entry());
1565   }
1566   __ allocate_object(op->obj()->as_register(),
1567                      op->tmp1()->as_register(),
1568                      op->tmp2()->as_register(),
1569                      op->header_size(),
1570                      op->object_size(),
1571                      op->klass()->as_register(),
1572                      *op->stub()->entry());
1573   __ bind(*op->stub()->continuation());
1574 }
1575 
1576 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {
1577   Register len =  op->len()->as_register();
1578   LP64_ONLY( __ movslq(len, len); )
1579 
1580   if (UseSlowPath ||
1581       (!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) ||


1713 #else
1714     if (k->is_loaded()) {
1715       __ cmpoop(Address(obj, oopDesc::klass_offset_in_bytes()), k->constant_encoding());
1716     } else {
1717       __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
1718     }
1719 #endif
1720     __ jcc(Assembler::notEqual, *failure_target);
1721     // successful cast, fall through to profile or jump
1722   } else {
1723     // get object class
1724     // not a safepoint as obj null check happens earlier
1725     __ load_klass(klass_RInfo, obj);
1726     if (k->is_loaded()) {
1727       // See if we get an immediate positive hit
1728 #ifdef _LP64
1729       __ cmpptr(k_RInfo, Address(klass_RInfo, k->super_check_offset()));
1730 #else
1731       __ cmpoop(Address(klass_RInfo, k->super_check_offset()), k->constant_encoding());
1732 #endif // _LP64
1733       if ((juint)in_bytes(Klass::secondary_super_cache_offset()) != k->super_check_offset()) {
1734         __ jcc(Assembler::notEqual, *failure_target);
1735         // successful cast, fall through to profile or jump
1736       } else {
1737         // See if we get an immediate positive hit
1738         __ jcc(Assembler::equal, *success_target);
1739         // check for self
1740 #ifdef _LP64
1741         __ cmpptr(klass_RInfo, k_RInfo);
1742 #else
1743         __ cmpoop(klass_RInfo, k->constant_encoding());
1744 #endif // _LP64
1745         __ jcc(Assembler::equal, *success_target);
1746 
1747         __ push(klass_RInfo);
1748 #ifdef _LP64
1749         __ push(k_RInfo);
1750 #else
1751         __ pushoop(k->constant_encoding());
1752 #endif // _LP64
1753         __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));


1825     if (op->should_profile()) {
1826       Label not_null;
1827       __ jccb(Assembler::notEqual, not_null);
1828       // Object is null; update MDO and exit
1829       Register mdo  = klass_RInfo;
1830       __ movoop(mdo, md->constant_encoding());
1831       Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset()));
1832       int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant());
1833       __ orl(data_addr, header_bits);
1834       __ jmp(done);
1835       __ bind(not_null);
1836     } else {
1837       __ jcc(Assembler::equal, done);
1838     }
1839 
1840     add_debug_info_for_null_check_here(op->info_for_exception());
1841     __ load_klass(k_RInfo, array);
1842     __ load_klass(klass_RInfo, value);
1843 
1844     // get instance klass (it's already uncompressed)
1845     __ movptr(k_RInfo, Address(k_RInfo, objArrayKlass::element_klass_offset()));
1846     // perform the fast part of the checking logic
1847     __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, success_target, failure_target, NULL);
1848     // call out-of-line instance of __ check_klass_subtype_slow_path(...):
1849     __ push(klass_RInfo);
1850     __ push(k_RInfo);
1851     __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
1852     __ pop(klass_RInfo);
1853     __ pop(k_RInfo);
1854     // result is a boolean
1855     __ cmpl(k_RInfo, 0);
1856     __ jcc(Assembler::equal, *failure_target);
1857     // fall through to the success case
1858 
1859     if (op->should_profile()) {
1860       Register mdo  = klass_RInfo, recv = k_RInfo;
1861       __ bind(profile_cast_success);
1862       __ movoop(mdo, md->constant_encoding());
1863       __ load_klass(recv, value);
1864       Label update_done;
1865       type_profile_helper(mdo, md, data, recv, &done);


3272 
3273       __ bind(slow);
3274       __ pop(dst);
3275       __ pop(src);
3276 
3277       address copyfunc_addr = StubRoutines::checkcast_arraycopy();
3278       if (copyfunc_addr != NULL) { // use stub if available
3279         // src is not a sub class of dst so we have to do a
3280         // per-element check.
3281 
3282         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
3283         if ((flags & mask) != mask) {
3284           // Check that at least both of them object arrays.
3285           assert(flags & mask, "one of the two should be known to be an object array");
3286 
3287           if (!(flags & LIR_OpArrayCopy::src_objarray)) {
3288             __ load_klass(tmp, src);
3289           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3290             __ load_klass(tmp, dst);
3291           }
3292           int lh_offset = in_bytes(Klass::layout_helper_offset());

3293           Address klass_lh_addr(tmp, lh_offset);
3294           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3295           __ cmpl(klass_lh_addr, objArray_lh);
3296           __ jcc(Assembler::notEqual, *stub->entry());
3297         }
3298 
3299        // Spill because stubs can use any register they like and it's
3300        // easier to restore just those that we care about.
3301        store_parameter(dst, 0);
3302        store_parameter(dst_pos, 1);
3303        store_parameter(length, 2);
3304        store_parameter(src_pos, 3);
3305        store_parameter(src, 4);
3306 
3307 #ifndef _LP64
3308         __ movptr(tmp, dst_klass_addr);
3309         __ movptr(tmp, Address(tmp, objArrayKlass::element_klass_offset()));
3310         __ push(tmp);
3311         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3312         __ push(tmp);
3313         __ push(length);
3314         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3315         __ push(tmp);
3316         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3317         __ push(tmp);
3318 
3319         __ call_VM_leaf(copyfunc_addr, 5);
3320 #else
3321         __ movl2ptr(length, length); //higher 32bits must be null
3322 
3323         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3324         assert_different_registers(c_rarg0, dst, dst_pos, length);
3325         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3326         assert_different_registers(c_rarg1, dst, length);
3327 
3328         __ mov(c_rarg2, length);
3329         assert_different_registers(c_rarg2, dst);
3330 
3331 #ifdef _WIN64
3332         // Allocate abi space for args but be sure to keep stack aligned
3333         __ subptr(rsp, 6*wordSize);
3334         __ load_klass(c_rarg3, dst);
3335         __ movptr(c_rarg3, Address(c_rarg3, objArrayKlass::element_klass_offset()));
3336         store_parameter(c_rarg3, 4);
3337         __ movl(c_rarg3, Address(c_rarg3, Klass::super_check_offset_offset()));
3338         __ call(RuntimeAddress(copyfunc_addr));
3339         __ addptr(rsp, 6*wordSize);
3340 #else
3341         __ load_klass(c_rarg4, dst);
3342         __ movptr(c_rarg4, Address(c_rarg4, objArrayKlass::element_klass_offset()));
3343         __ movl(c_rarg3, Address(c_rarg4, Klass::super_check_offset_offset()));
3344         __ call(RuntimeAddress(copyfunc_addr));
3345 #endif
3346 
3347 #endif
3348 
3349 #ifndef PRODUCT
3350         if (PrintC1Statistics) {
3351           Label failed;
3352           __ testl(rax, rax);
3353           __ jcc(Assembler::notZero, failed);
3354           __ incrementl(ExternalAddress((address)&Runtime1::_arraycopy_checkcast_cnt));
3355           __ bind(failed);
3356         }
3357 #endif
3358 
3359         __ testl(rax, rax);
3360         __ jcc(Assembler::zero, *stub->continuation());
3361 
3362 #ifndef PRODUCT
3363         if (PrintC1Statistics) {


src/cpu/x86/vm/c1_LIRAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File