src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 7118863 Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp

Print this page




2185 
2186       __ cmp(G3, 0);
2187       if (copyfunc_addr != NULL) { // use stub if available
2188         // src is not a sub class of dst so we have to do a
2189         // per-element check.
2190         __ br(Assembler::notEqual, false, Assembler::pt, cont);
2191         __ delayed()->nop();
2192 
2193         __ bind(slow);
2194 
2195         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
2196         if ((flags & mask) != mask) {
2197           // Check that at least both of them object arrays.
2198           assert(flags & mask, "one of the two should be known to be an object array");
2199 
2200           if (!(flags & LIR_OpArrayCopy::src_objarray)) {
2201             __ load_klass(src, tmp);
2202           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
2203             __ load_klass(dst, tmp);
2204           }
2205           int lh_offset = klassOopDesc::header_size() * HeapWordSize +
2206             Klass::layout_helper_offset_in_bytes();
2207 
2208           __ lduw(tmp, lh_offset, tmp2);
2209 
2210           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
2211           __ set(objArray_lh, tmp);
2212           __ cmp(tmp, tmp2);
2213           __ br(Assembler::notEqual, false, Assembler::pt,  *stub->entry());
2214           __ delayed()->nop();
2215         }
2216 
2217         Register src_ptr = O0;
2218         Register dst_ptr = O1;
2219         Register len     = O2;
2220         Register chk_off = O3;
2221         Register super_k = O4;
2222 
2223         __ add(src, arrayOopDesc::base_offset_in_bytes(basic_type), src_ptr);
2224         if (shift == 0) {
2225           __ add(src_ptr, src_pos, src_ptr);
2226         } else {
2227           __ sll(src_pos, shift, tmp);
2228           __ add(src_ptr, tmp, src_ptr);
2229         }
2230 
2231         __ add(dst, arrayOopDesc::base_offset_in_bytes(basic_type), dst_ptr);
2232         if (shift == 0) {
2233           __ add(dst_ptr, dst_pos, dst_ptr);
2234         } else {
2235           __ sll(dst_pos, shift, tmp);
2236           __ add(dst_ptr, tmp, dst_ptr);
2237         }
2238         __ mov(length, len);
2239         __ load_klass(dst, tmp);
2240 
2241         int ek_offset = (klassOopDesc::header_size() * HeapWordSize +
2242                          objArrayKlass::element_klass_offset_in_bytes());
2243         __ ld_ptr(tmp, ek_offset, super_k);
2244 
2245         int sco_offset = (klassOopDesc::header_size() * HeapWordSize +
2246                           Klass::super_check_offset_offset_in_bytes());
2247         __ lduw(super_k, sco_offset, chk_off);
2248 
2249         __ call_VM_leaf(tmp, copyfunc_addr);
2250 
2251 #ifndef PRODUCT
2252         if (PrintC1Statistics) {
2253           Label failed;
2254           __ br_notnull_short(O0, Assembler::pn, failed);
2255           __ inc_counter((address)&Runtime1::_arraycopy_checkcast_cnt, G1, G3);
2256           __ bind(failed);
2257         }
2258 #endif
2259 
2260         __ br_null(O0, false, Assembler::pt,  *stub->continuation());
2261         __ delayed()->xor3(O0, -1, tmp);
2262 
2263 #ifndef PRODUCT
2264         if (PrintC1Statistics) {
2265           __ inc_counter((address)&Runtime1::_arraycopy_checkcast_attempt_cnt, G1, G3);
2266         }


2439     switch (code) {
2440       case lir_shl:  __ sllx  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2441       case lir_shr:  __ srax  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2442       case lir_ushr: __ srlx  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2443       default: ShouldNotReachHere();
2444     }
2445   } else {
2446     ShouldNotReachHere();
2447   }
2448 }
2449 
2450 
2451 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
2452   assert(op->tmp1()->as_register()  == G1 &&
2453          op->tmp2()->as_register()  == G3 &&
2454          op->tmp3()->as_register()  == G4 &&
2455          op->obj()->as_register()   == O0 &&
2456          op->klass()->as_register() == G5, "must be");
2457   if (op->init_check()) {
2458     __ ld(op->klass()->as_register(),
2459           instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc),
2460           op->tmp1()->as_register());
2461     add_debug_info_for_null_check_here(op->stub()->info());
2462     __ cmp(op->tmp1()->as_register(), instanceKlass::fully_initialized);
2463     __ br(Assembler::notEqual, false, Assembler::pn, *op->stub()->entry());
2464     __ delayed()->nop();
2465   }
2466   __ allocate_object(op->obj()->as_register(),
2467                      op->tmp1()->as_register(),
2468                      op->tmp2()->as_register(),
2469                      op->tmp3()->as_register(),
2470                      op->header_size(),
2471                      op->object_size(),
2472                      op->klass()->as_register(),
2473                      *op->stub()->entry());
2474   __ bind(*op->stub()->continuation());
2475   __ verify_oop(op->obj()->as_register());
2476 }
2477 
2478 
2479 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {


2610   // patching may screw with our temporaries on sparc,
2611   // so let's do it before loading the class
2612   if (k->is_loaded()) {
2613     jobject2reg(k->constant_encoding(), k_RInfo);
2614   } else {
2615     jobject2reg_with_patching(k_RInfo, op->info_for_patch());
2616   }
2617   assert(obj != k_RInfo, "must be different");
2618 
2619   // get object class
2620   // not a safepoint as obj null check happens earlier
2621   __ load_klass(obj, klass_RInfo);
2622   if (op->fast_check()) {
2623     assert_different_registers(klass_RInfo, k_RInfo);
2624     __ cmp(k_RInfo, klass_RInfo);
2625     __ brx(Assembler::notEqual, false, Assembler::pt, *failure_target);
2626     __ delayed()->nop();
2627   } else {
2628     bool need_slow_path = true;
2629     if (k->is_loaded()) {
2630       if (k->super_check_offset() != sizeof(oopDesc) + Klass::secondary_super_cache_offset_in_bytes())
2631         need_slow_path = false;
2632       // perform the fast part of the checking logic
2633       __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, noreg,
2634                                        (need_slow_path ? success_target : NULL),
2635                                        failure_target, NULL,
2636                                        RegisterOrConstant(k->super_check_offset()));
2637     } else {
2638       // perform the fast part of the checking logic
2639       __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, O7, success_target,
2640                                        failure_target, NULL);
2641     }
2642     if (need_slow_path) {
2643       // call out-of-line instance of __ check_klass_subtype_slow_path(...):
2644       assert(klass_RInfo == G3 && k_RInfo == G1, "incorrect call setup");
2645       __ call(Runtime1::entry_for(Runtime1::slow_subtype_check_id), relocInfo::runtime_call_type);
2646       __ delayed()->nop();
2647       __ cmp(G3, 0);
2648       __ br(Assembler::equal, false, Assembler::pn, *failure_target);
2649       __ delayed()->nop();
2650       // Fall through to success case


2714       Register data_val = Rtmp1;
2715       jobject2reg(md->constant_encoding(), mdo);
2716       if (mdo_offset_bias > 0) {
2717         __ set(mdo_offset_bias, data_val);
2718         __ add(mdo, data_val, mdo);
2719       }
2720       Address flags_addr(mdo, md->byte_offset_of_slot(data, DataLayout::flags_offset()) - mdo_offset_bias);
2721       __ ldub(flags_addr, data_val);
2722       __ or3(data_val, BitData::null_seen_byte_constant(), data_val);
2723       __ stb(data_val, flags_addr);
2724       __ ba_short(done);
2725       __ bind(not_null);
2726     } else {
2727       __ br_null_short(value, Assembler::pn, done);
2728     }
2729     add_debug_info_for_null_check_here(op->info_for_exception());
2730     __ load_klass(array, k_RInfo);
2731     __ load_klass(value, klass_RInfo);
2732 
2733     // get instance klass
2734     __ ld_ptr(Address(k_RInfo, objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc)), k_RInfo);
2735     // perform the fast part of the checking logic
2736     __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, O7, success_target, failure_target, NULL);
2737 
2738     // call out-of-line instance of __ check_klass_subtype_slow_path(...):
2739     assert(klass_RInfo == G3 && k_RInfo == G1, "incorrect call setup");
2740     __ call(Runtime1::entry_for(Runtime1::slow_subtype_check_id), relocInfo::runtime_call_type);
2741     __ delayed()->nop();
2742     __ cmp(G3, 0);
2743     __ br(Assembler::equal, false, Assembler::pn, *failure_target);
2744     __ delayed()->nop();
2745     // fall through to the success case
2746 
2747     if (op->should_profile()) {
2748       Register mdo  = klass_RInfo, recv = k_RInfo, tmp1 = Rtmp1;
2749       assert_different_registers(value, mdo, recv, tmp1);
2750       __ bind(profile_cast_success);
2751       jobject2reg(md->constant_encoding(), mdo);
2752       if (mdo_offset_bias > 0) {
2753         __ set(mdo_offset_bias, tmp1);
2754         __ add(mdo, tmp1, mdo);




2185 
2186       __ cmp(G3, 0);
2187       if (copyfunc_addr != NULL) { // use stub if available
2188         // src is not a sub class of dst so we have to do a
2189         // per-element check.
2190         __ br(Assembler::notEqual, false, Assembler::pt, cont);
2191         __ delayed()->nop();
2192 
2193         __ bind(slow);
2194 
2195         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
2196         if ((flags & mask) != mask) {
2197           // Check that at least both of them object arrays.
2198           assert(flags & mask, "one of the two should be known to be an object array");
2199 
2200           if (!(flags & LIR_OpArrayCopy::src_objarray)) {
2201             __ load_klass(src, tmp);
2202           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
2203             __ load_klass(dst, tmp);
2204           }
2205           int lh_offset = Klass::layout_helper_offset_in_bytes();

2206 
2207           __ lduw(tmp, lh_offset, tmp2);
2208 
2209           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
2210           __ set(objArray_lh, tmp);
2211           __ cmp(tmp, tmp2);
2212           __ br(Assembler::notEqual, false, Assembler::pt,  *stub->entry());
2213           __ delayed()->nop();
2214         }
2215 
2216         Register src_ptr = O0;
2217         Register dst_ptr = O1;
2218         Register len     = O2;
2219         Register chk_off = O3;
2220         Register super_k = O4;
2221 
2222         __ add(src, arrayOopDesc::base_offset_in_bytes(basic_type), src_ptr);
2223         if (shift == 0) {
2224           __ add(src_ptr, src_pos, src_ptr);
2225         } else {
2226           __ sll(src_pos, shift, tmp);
2227           __ add(src_ptr, tmp, src_ptr);
2228         }
2229 
2230         __ add(dst, arrayOopDesc::base_offset_in_bytes(basic_type), dst_ptr);
2231         if (shift == 0) {
2232           __ add(dst_ptr, dst_pos, dst_ptr);
2233         } else {
2234           __ sll(dst_pos, shift, tmp);
2235           __ add(dst_ptr, tmp, dst_ptr);
2236         }
2237         __ mov(length, len);
2238         __ load_klass(dst, tmp);
2239 
2240         int ek_offset = objArrayKlass::element_klass_offset_in_bytes();

2241         __ ld_ptr(tmp, ek_offset, super_k);
2242 
2243         int sco_offset = Klass::super_check_offset_offset_in_bytes();

2244         __ lduw(super_k, sco_offset, chk_off);
2245 
2246         __ call_VM_leaf(tmp, copyfunc_addr);
2247 
2248 #ifndef PRODUCT
2249         if (PrintC1Statistics) {
2250           Label failed;
2251           __ br_notnull_short(O0, Assembler::pn, failed);
2252           __ inc_counter((address)&Runtime1::_arraycopy_checkcast_cnt, G1, G3);
2253           __ bind(failed);
2254         }
2255 #endif
2256 
2257         __ br_null(O0, false, Assembler::pt,  *stub->continuation());
2258         __ delayed()->xor3(O0, -1, tmp);
2259 
2260 #ifndef PRODUCT
2261         if (PrintC1Statistics) {
2262           __ inc_counter((address)&Runtime1::_arraycopy_checkcast_attempt_cnt, G1, G3);
2263         }


2436     switch (code) {
2437       case lir_shl:  __ sllx  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2438       case lir_shr:  __ srax  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2439       case lir_ushr: __ srlx  (left->as_pointer_register(), count, dest->as_pointer_register()); break;
2440       default: ShouldNotReachHere();
2441     }
2442   } else {
2443     ShouldNotReachHere();
2444   }
2445 }
2446 
2447 
2448 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) {
2449   assert(op->tmp1()->as_register()  == G1 &&
2450          op->tmp2()->as_register()  == G3 &&
2451          op->tmp3()->as_register()  == G4 &&
2452          op->obj()->as_register()   == O0 &&
2453          op->klass()->as_register() == G5, "must be");
2454   if (op->init_check()) {
2455     __ ld(op->klass()->as_register(),
2456           instanceKlass::init_state_offset_in_bytes(),
2457           op->tmp1()->as_register());
2458     add_debug_info_for_null_check_here(op->stub()->info());
2459     __ cmp(op->tmp1()->as_register(), instanceKlass::fully_initialized);
2460     __ br(Assembler::notEqual, false, Assembler::pn, *op->stub()->entry());
2461     __ delayed()->nop();
2462   }
2463   __ allocate_object(op->obj()->as_register(),
2464                      op->tmp1()->as_register(),
2465                      op->tmp2()->as_register(),
2466                      op->tmp3()->as_register(),
2467                      op->header_size(),
2468                      op->object_size(),
2469                      op->klass()->as_register(),
2470                      *op->stub()->entry());
2471   __ bind(*op->stub()->continuation());
2472   __ verify_oop(op->obj()->as_register());
2473 }
2474 
2475 
2476 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {


2607   // patching may screw with our temporaries on sparc,
2608   // so let's do it before loading the class
2609   if (k->is_loaded()) {
2610     jobject2reg(k->constant_encoding(), k_RInfo);
2611   } else {
2612     jobject2reg_with_patching(k_RInfo, op->info_for_patch());
2613   }
2614   assert(obj != k_RInfo, "must be different");
2615 
2616   // get object class
2617   // not a safepoint as obj null check happens earlier
2618   __ load_klass(obj, klass_RInfo);
2619   if (op->fast_check()) {
2620     assert_different_registers(klass_RInfo, k_RInfo);
2621     __ cmp(k_RInfo, klass_RInfo);
2622     __ brx(Assembler::notEqual, false, Assembler::pt, *failure_target);
2623     __ delayed()->nop();
2624   } else {
2625     bool need_slow_path = true;
2626     if (k->is_loaded()) {
2627       if ((int) k->super_check_offset() != Klass::secondary_super_cache_offset_in_bytes())
2628         need_slow_path = false;
2629       // perform the fast part of the checking logic
2630       __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, noreg,
2631                                        (need_slow_path ? success_target : NULL),
2632                                        failure_target, NULL,
2633                                        RegisterOrConstant(k->super_check_offset()));
2634     } else {
2635       // perform the fast part of the checking logic
2636       __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, O7, success_target,
2637                                        failure_target, NULL);
2638     }
2639     if (need_slow_path) {
2640       // call out-of-line instance of __ check_klass_subtype_slow_path(...):
2641       assert(klass_RInfo == G3 && k_RInfo == G1, "incorrect call setup");
2642       __ call(Runtime1::entry_for(Runtime1::slow_subtype_check_id), relocInfo::runtime_call_type);
2643       __ delayed()->nop();
2644       __ cmp(G3, 0);
2645       __ br(Assembler::equal, false, Assembler::pn, *failure_target);
2646       __ delayed()->nop();
2647       // Fall through to success case


2711       Register data_val = Rtmp1;
2712       jobject2reg(md->constant_encoding(), mdo);
2713       if (mdo_offset_bias > 0) {
2714         __ set(mdo_offset_bias, data_val);
2715         __ add(mdo, data_val, mdo);
2716       }
2717       Address flags_addr(mdo, md->byte_offset_of_slot(data, DataLayout::flags_offset()) - mdo_offset_bias);
2718       __ ldub(flags_addr, data_val);
2719       __ or3(data_val, BitData::null_seen_byte_constant(), data_val);
2720       __ stb(data_val, flags_addr);
2721       __ ba_short(done);
2722       __ bind(not_null);
2723     } else {
2724       __ br_null_short(value, Assembler::pn, done);
2725     }
2726     add_debug_info_for_null_check_here(op->info_for_exception());
2727     __ load_klass(array, k_RInfo);
2728     __ load_klass(value, klass_RInfo);
2729 
2730     // get instance klass
2731     __ ld_ptr(Address(k_RInfo, objArrayKlass::element_klass_offset_in_bytes()), k_RInfo);
2732     // perform the fast part of the checking logic
2733     __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1, O7, success_target, failure_target, NULL);
2734 
2735     // call out-of-line instance of __ check_klass_subtype_slow_path(...):
2736     assert(klass_RInfo == G3 && k_RInfo == G1, "incorrect call setup");
2737     __ call(Runtime1::entry_for(Runtime1::slow_subtype_check_id), relocInfo::runtime_call_type);
2738     __ delayed()->nop();
2739     __ cmp(G3, 0);
2740     __ br(Assembler::equal, false, Assembler::pn, *failure_target);
2741     __ delayed()->nop();
2742     // fall through to the success case
2743 
2744     if (op->should_profile()) {
2745       Register mdo  = klass_RInfo, recv = k_RInfo, tmp1 = Rtmp1;
2746       assert_different_registers(value, mdo, recv, tmp1);
2747       __ bind(profile_cast_success);
2748       jobject2reg(md->constant_encoding(), mdo);
2749       if (mdo_offset_bias > 0) {
2750         __ set(mdo_offset_bias, tmp1);
2751         __ add(mdo, tmp1, mdo);


src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File