213 bool inline_math_addExactI(bool is_increment);
214 bool inline_math_addExactL(bool is_increment);
215 bool inline_math_multiplyExactI();
216 bool inline_math_multiplyExactL();
217 bool inline_math_negateExactI();
218 bool inline_math_negateExactL();
219 bool inline_math_subtractExactI(bool is_decrement);
220 bool inline_math_subtractExactL(bool is_decrement);
221 bool inline_exp();
222 bool inline_pow();
223 Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
224 bool inline_min_max(vmIntrinsics::ID id);
225 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
226 // This returns Type::AnyPtr, RawPtr, or OopPtr.
227 int classify_unsafe_addr(Node* &base, Node* &offset);
228 Node* make_unsafe_address(Node* base, Node* offset);
229 // Helper for inline_unsafe_access.
230 // Generates the guards that check whether the result of
231 // Unsafe.getObject should be recorded in an SATB log buffer.
232 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
233 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
234 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
235 static bool klass_needs_init_guard(Node* kls);
236 bool inline_unsafe_allocate();
237 bool inline_unsafe_copyMemory();
238 bool inline_native_currentThread();
239 #ifdef TRACE_HAVE_INTRINSICS
240 bool inline_native_classID();
241 bool inline_native_threadID();
242 #endif
243 bool inline_native_time_funcs(address method, const char* funcName);
244 bool inline_native_isInterrupted();
245 bool inline_native_Class_query(vmIntrinsics::ID id);
246 bool inline_native_subtype_check();
247
248 bool inline_native_newArray();
249 bool inline_native_getLength();
250 bool inline_array_copyOf(bool is_copyOfRange);
251 bool inline_array_equals();
252 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
253 bool inline_native_clone(bool is_virtual);
778
779 case vmIntrinsics::_addExactI: return inline_math_addExactI(false /* add */);
780 case vmIntrinsics::_addExactL: return inline_math_addExactL(false /* add */);
781 case vmIntrinsics::_decrementExactI: return inline_math_subtractExactI(true /* decrement */);
782 case vmIntrinsics::_decrementExactL: return inline_math_subtractExactL(true /* decrement */);
783 case vmIntrinsics::_incrementExactI: return inline_math_addExactI(true /* increment */);
784 case vmIntrinsics::_incrementExactL: return inline_math_addExactL(true /* increment */);
785 case vmIntrinsics::_multiplyExactI: return inline_math_multiplyExactI();
786 case vmIntrinsics::_multiplyExactL: return inline_math_multiplyExactL();
787 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
788 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
789 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
790 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
791
792 case vmIntrinsics::_arraycopy: return inline_arraycopy();
793
794 case vmIntrinsics::_compareTo: return inline_string_compareTo();
795 case vmIntrinsics::_indexOf: return inline_string_indexOf();
796 case vmIntrinsics::_equals: return inline_string_equals();
797
798 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile);
799 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile);
800 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile);
801 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile);
802 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile);
803 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile);
804 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile);
805 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile);
806 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
807
808 case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile);
809 case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile);
810 case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile);
811 case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile);
812 case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile);
813 case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile);
814 case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile);
815 case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile);
816 case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile);
817
818 case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile);
819 case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile);
820 case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile);
821 case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile);
822 case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile);
823 case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile);
824 case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile);
825 case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile);
826
827 case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile);
828 case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, !is_volatile);
829 case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, !is_volatile);
830 case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, !is_volatile);
831 case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, !is_volatile);
832 case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, !is_volatile);
833 case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, !is_volatile);
834 case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, !is_volatile);
835
836 case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, is_volatile);
837 case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, is_volatile);
838 case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, is_volatile);
839 case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, is_volatile);
840 case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, is_volatile);
841 case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, is_volatile);
842 case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, is_volatile);
843 case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, is_volatile);
844 case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, is_volatile);
845
846 case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, is_volatile);
847 case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, is_volatile);
848 case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, is_volatile);
849 case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, is_volatile);
850 case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, is_volatile);
851 case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile);
852 case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile);
853 case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile);
854 case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile);
855
856 case vmIntrinsics::_prefetchRead: return inline_unsafe_prefetch(!is_native_ptr, !is_store, !is_static);
857 case vmIntrinsics::_prefetchWrite: return inline_unsafe_prefetch(!is_native_ptr, is_store, !is_static);
858 case vmIntrinsics::_prefetchReadStatic: return inline_unsafe_prefetch(!is_native_ptr, !is_store, is_static);
859 case vmIntrinsics::_prefetchWriteStatic: return inline_unsafe_prefetch(!is_native_ptr, is_store, is_static);
860
861 case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
862 case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmpxchg);
863 case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmpxchg);
864
865 case vmIntrinsics::_putOrderedObject: return inline_unsafe_ordered_store(T_OBJECT);
866 case vmIntrinsics::_putOrderedInt: return inline_unsafe_ordered_store(T_INT);
867 case vmIntrinsics::_putOrderedLong: return inline_unsafe_ordered_store(T_LONG);
868
869 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_xadd);
870 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_xadd);
871 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_xchg);
872 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_xchg);
873 case vmIntrinsics::_getAndSetObject: return inline_unsafe_load_store(T_OBJECT, LS_xchg);
874
2537 }
2538 }
2539
2540 // The sharpened class might be unloaded if there is no class loader
2541 // contraint in place.
2542 if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2543 const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2544
2545 #ifndef PRODUCT
2546 if (C->print_intrinsics() || C->print_inlining()) {
2547 tty->print(" from base type: "); adr_type->dump();
2548 tty->print(" sharpened value: "); tjp->dump();
2549 }
2550 #endif
2551 // Sharpen the value type.
2552 return tjp;
2553 }
2554 return NULL;
2555 }
2556
2557 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) {
2558 if (callee()->is_static()) return false; // caller must have the capability!
2559
2560 #ifndef PRODUCT
2561 {
2562 ResourceMark rm;
2563 // Check the signatures.
2564 ciSignature* sig = callee()->signature();
2565 #ifdef ASSERT
2566 if (!is_store) {
2567 // Object getObject(Object base, int/long offset), etc.
2568 BasicType rtype = sig->return_type()->basic_type();
2569 if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2570 rtype = T_ADDRESS; // it is really a C void*
2571 assert(rtype == type, "getter must return the expected value");
2572 if (!is_native_ptr) {
2573 assert(sig->count() == 2, "oop getter has 2 arguments");
2574 assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2575 assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2576 } else {
2577 assert(sig->count() == 1, "native getter has 1 argument");
2578 assert(sig->type_at(0)->basic_type() == T_LONG, "getter base is long");
2579 }
2612 Node* base = argument(1); // type: oop
2613 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2614 offset = argument(2); // type: long
2615 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2616 // to be plain byte offsets, which are also the same as those accepted
2617 // by oopDesc::field_base.
2618 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2619 "fieldOffset must be byte-scaled");
2620 // 32-bit machines ignore the high half!
2621 offset = ConvL2X(offset);
2622 adr = make_unsafe_address(base, offset);
2623 heap_base_oop = base;
2624 val = is_store ? argument(4) : NULL;
2625 } else {
2626 Node* ptr = argument(1); // type: long
2627 ptr = ConvL2X(ptr); // adjust Java long to machine word
2628 adr = make_unsafe_address(NULL, ptr);
2629 val = is_store ? argument(3) : NULL;
2630 }
2631
2632 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2633
2634 // First guess at the value type.
2635 const Type *value_type = Type::get_const_basic_type(type);
2636
2637 // Try to categorize the address. If it comes up as TypeJavaPtr::BOTTOM,
2638 // there was not enough information to nail it down.
2639 Compile::AliasType* alias_type = C->alias_type(adr_type);
2640 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2641
2642 // We will need memory barriers unless we can determine a unique
2643 // alias category for this reference. (Note: If for some reason
2644 // the barriers get omitted and the unsafe reference begins to "pollute"
2645 // the alias analysis of the rest of the graph, either Compile::can_alias
2646 // or Compile::must_alias will throw a diagnostic assert.)
2647 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2648
2649 // If we are reading the value of the referent field of a Reference
2650 // object (either by using Unsafe directly or through reflection)
2651 // then, if G1 is enabled, we need to record the referent in an
2652 // SATB log buffer using the pre-barrier mechanism.
2653 // Also we need to add memory barrier to prevent commoning reads
2654 // from this field across safepoint since GC can change its value.
2655 bool need_read_barrier = !is_native_ptr && !is_store &&
2656 offset != top() && heap_base_oop != top();
2657
2658 if (!is_store && type == T_OBJECT) {
2659 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type, is_native_ptr);
2660 if (tjp != NULL) {
2661 value_type = tjp;
2684 insert_mem_bar(Op_MemBarRelease);
2685 } else {
2686 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2687 insert_mem_bar(Op_MemBarVolatile);
2688 }
2689 }
2690 }
2691
2692 // Memory barrier to prevent normal and 'unsafe' accesses from
2693 // bypassing each other. Happens after null checks, so the
2694 // exception paths do not take memory state from the memory barrier,
2695 // so there's no problems making a strong assert about mixing users
2696 // of safe & unsafe memory. Otherwise fails in a CTW of rt.jar
2697 // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2698 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2699
2700 if (!is_store) {
2701 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2702 // To be valid, unsafe loads may depend on other conditions than
2703 // the one that guards them: pin the Load node
2704 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2705 // load value
2706 switch (type) {
2707 case T_BOOLEAN:
2708 case T_CHAR:
2709 case T_BYTE:
2710 case T_SHORT:
2711 case T_INT:
2712 case T_LONG:
2713 case T_FLOAT:
2714 case T_DOUBLE:
2715 break;
2716 case T_OBJECT:
2717 if (need_read_barrier) {
2718 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2719 }
2720 break;
2721 case T_ADDRESS:
2722 // Cast to an int type.
2723 p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2724 p = ConvX2UL(p);
2730 // The load node has the control of the preceding MemBarCPUOrder. All
2731 // following nodes will have the control of the MemBarCPUOrder inserted at
2732 // the end of this method. So, pushing the load onto the stack at a later
2733 // point is fine.
2734 set_result(p);
2735 } else {
2736 // place effect of store into memory
2737 switch (type) {
2738 case T_DOUBLE:
2739 val = dstore_rounding(val);
2740 break;
2741 case T_ADDRESS:
2742 // Repackage the long as a pointer.
2743 val = ConvL2X(val);
2744 val = _gvn.transform(new (C) CastX2PNode(val));
2745 break;
2746 }
2747
2748 MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2749 if (type != T_OBJECT ) {
2750 (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile);
2751 } else {
2752 // Possibly an oop being stored to Java heap or native memory
2753 if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2754 // oop to Java heap.
2755 (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2756 } else {
2757 // We can't tell at compile time if we are storing in the Java heap or outside
2758 // of it. So we need to emit code to conditionally do the proper type of
2759 // store.
2760
2761 IdealKit ideal(this);
2762 #define __ ideal.
2763 // QQQ who knows what probability is here??
2764 __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2765 // Sync IdealKit and graphKit.
2766 sync_kit(ideal);
2767 Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo);
2768 // Update IdealKit memory.
2769 __ sync_kit(this);
2770 } __ else_(); {
2771 __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile);
2772 } __ end_if();
2773 // Final sync IdealKit and GraphKit.
2774 final_sync(ideal);
2775 #undef __
2776 }
2777 }
2778 }
2779
2780 if (is_volatile) {
2781 if (!is_store) {
2782 insert_mem_bar(Op_MemBarAcquire);
2783 } else {
2784 if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2785 insert_mem_bar(Op_MemBarVolatile);
2786 }
2787 }
2788 }
2789
2790 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2791
2922 Node* receiver = NULL;
2923 Node* base = NULL;
2924 Node* offset = NULL;
2925 Node* oldval = NULL;
2926 Node* newval = NULL;
2927 if (kind == LS_cmpxchg) {
2928 const bool two_slot_type = type2size[type] == 2;
2929 receiver = argument(0); // type: oop
2930 base = argument(1); // type: oop
2931 offset = argument(2); // type: long
2932 oldval = argument(4); // type: oop, int, or long
2933 newval = argument(two_slot_type ? 6 : 5); // type: oop, int, or long
2934 } else if (kind == LS_xadd || kind == LS_xchg){
2935 receiver = argument(0); // type: oop
2936 base = argument(1); // type: oop
2937 offset = argument(2); // type: long
2938 oldval = NULL;
2939 newval = argument(4); // type: oop, int, or long
2940 }
2941
2942 // Null check receiver.
2943 receiver = null_check(receiver);
2944 if (stopped()) {
2945 return true;
2946 }
2947
2948 // Build field offset expression.
2949 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2950 // to be plain byte offsets, which are also the same as those accepted
2951 // by oopDesc::field_base.
2952 assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled");
2953 // 32-bit machines ignore the high half of long offsets
2954 offset = ConvL2X(offset);
2955 Node* adr = make_unsafe_address(base, offset);
2956 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2957
2958 // For CAS, unlike inline_unsafe_access, there seems no point in
2959 // trying to refine types. Just use the coarse types here.
2960 const Type *value_type = Type::get_const_basic_type(type);
2961 Compile::AliasType* alias_type = C->alias_type(adr_type);
2962 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2963
2964 if (kind == LS_xchg && type == T_OBJECT) {
2965 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
2966 if (tjp != NULL) {
2967 value_type = tjp;
2968 }
2969 }
2970
2971 int alias_idx = C->get_alias_index(adr_type);
2972
2973 // Memory-model-wise, a LoadStore acts like a little synchronized
2974 // block, so needs barriers on each side. These don't translate
2975 // into actual barriers on most machines, but we still need rest of
2976 // compiler to respect ordering.
2977
2978 insert_mem_bar(Op_MemBarRelease);
2979 insert_mem_bar(Op_MemBarCPUOrder);
2980
2981 // 4984716: MemBars must be inserted before this
2982 // memory node in order to avoid a false
2983 // dependency which will confuse the scheduler.
2984 Node *mem = memory(alias_idx);
2985
2986 // For now, we handle only those cases that actually exist: ints,
2987 // longs, and Object. Adding others should be straightforward.
2988 Node* load_store = NULL;
|
213 bool inline_math_addExactI(bool is_increment);
214 bool inline_math_addExactL(bool is_increment);
215 bool inline_math_multiplyExactI();
216 bool inline_math_multiplyExactL();
217 bool inline_math_negateExactI();
218 bool inline_math_negateExactL();
219 bool inline_math_subtractExactI(bool is_decrement);
220 bool inline_math_subtractExactL(bool is_decrement);
221 bool inline_exp();
222 bool inline_pow();
223 Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName);
224 bool inline_min_max(vmIntrinsics::ID id);
225 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
226 // This returns Type::AnyPtr, RawPtr, or OopPtr.
227 int classify_unsafe_addr(Node* &base, Node* &offset);
228 Node* make_unsafe_address(Node* base, Node* offset);
229 // Helper for inline_unsafe_access.
230 // Generates the guards that check whether the result of
231 // Unsafe.getObject should be recorded in an SATB log buffer.
232 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
233 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned);
234 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
235 static bool klass_needs_init_guard(Node* kls);
236 bool inline_unsafe_allocate();
237 bool inline_unsafe_copyMemory();
238 bool inline_native_currentThread();
239 #ifdef TRACE_HAVE_INTRINSICS
240 bool inline_native_classID();
241 bool inline_native_threadID();
242 #endif
243 bool inline_native_time_funcs(address method, const char* funcName);
244 bool inline_native_isInterrupted();
245 bool inline_native_Class_query(vmIntrinsics::ID id);
246 bool inline_native_subtype_check();
247
248 bool inline_native_newArray();
249 bool inline_native_getLength();
250 bool inline_array_copyOf(bool is_copyOfRange);
251 bool inline_array_equals();
252 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
253 bool inline_native_clone(bool is_virtual);
778
779 case vmIntrinsics::_addExactI: return inline_math_addExactI(false /* add */);
780 case vmIntrinsics::_addExactL: return inline_math_addExactL(false /* add */);
781 case vmIntrinsics::_decrementExactI: return inline_math_subtractExactI(true /* decrement */);
782 case vmIntrinsics::_decrementExactL: return inline_math_subtractExactL(true /* decrement */);
783 case vmIntrinsics::_incrementExactI: return inline_math_addExactI(true /* increment */);
784 case vmIntrinsics::_incrementExactL: return inline_math_addExactL(true /* increment */);
785 case vmIntrinsics::_multiplyExactI: return inline_math_multiplyExactI();
786 case vmIntrinsics::_multiplyExactL: return inline_math_multiplyExactL();
787 case vmIntrinsics::_negateExactI: return inline_math_negateExactI();
788 case vmIntrinsics::_negateExactL: return inline_math_negateExactL();
789 case vmIntrinsics::_subtractExactI: return inline_math_subtractExactI(false /* subtract */);
790 case vmIntrinsics::_subtractExactL: return inline_math_subtractExactL(false /* subtract */);
791
792 case vmIntrinsics::_arraycopy: return inline_arraycopy();
793
794 case vmIntrinsics::_compareTo: return inline_string_compareTo();
795 case vmIntrinsics::_indexOf: return inline_string_indexOf();
796 case vmIntrinsics::_equals: return inline_string_equals();
797
798 case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile, false);
799 case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile, false);
800 case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile, false);
801 case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile, false);
802 case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile, false);
803 case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile, false);
804 case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile, false);
805 case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile, false);
806 case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile, false);
807
808 case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile, false);
809 case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile, false);
810 case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile, false);
811 case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile, false);
812 case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile, false);
813 case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile, false);
814 case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile, false);
815 case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile, false);
816 case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile, false);
817
818 case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile, false);
819 case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile, false);
820 case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile, false);
821 case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile, false);
822 case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile, false);
823 case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile, false);
824 case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile, false);
825 case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile, false);
826
827 case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile, false);
828 case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, !is_volatile, false);
829 case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, !is_volatile, false);
830 case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, !is_volatile, false);
831 case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, !is_volatile, false);
832 case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, !is_volatile, false);
833 case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, !is_volatile, false);
834 case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, !is_volatile, false);
835
836 case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, is_volatile, false);
837 case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, is_volatile, false);
838 case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, is_volatile, false);
839 case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, is_volatile, false);
840 case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, is_volatile, false);
841 case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, is_volatile, false);
842 case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, is_volatile, false);
843 case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, is_volatile, false);
844 case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, is_volatile, false);
845
846 case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, is_volatile, false);
847 case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, is_volatile, false);
848 case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, is_volatile, false);
849 case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, is_volatile, false);
850 case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, is_volatile, false);
851 case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile, false);
852 case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile, false);
853 case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile, false);
854 case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile, false);
855
856 case vmIntrinsics::_prefetchRead: return inline_unsafe_prefetch(!is_native_ptr, !is_store, !is_static);
857 case vmIntrinsics::_prefetchWrite: return inline_unsafe_prefetch(!is_native_ptr, is_store, !is_static);
858 case vmIntrinsics::_prefetchReadStatic: return inline_unsafe_prefetch(!is_native_ptr, !is_store, is_static);
859 case vmIntrinsics::_prefetchWriteStatic: return inline_unsafe_prefetch(!is_native_ptr, is_store, is_static);
860
861 case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
862 case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmpxchg);
863 case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmpxchg);
864
865 case vmIntrinsics::_putOrderedObject: return inline_unsafe_ordered_store(T_OBJECT);
866 case vmIntrinsics::_putOrderedInt: return inline_unsafe_ordered_store(T_INT);
867 case vmIntrinsics::_putOrderedLong: return inline_unsafe_ordered_store(T_LONG);
868
869 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_xadd);
870 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_xadd);
871 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_xchg);
872 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_xchg);
873 case vmIntrinsics::_getAndSetObject: return inline_unsafe_load_store(T_OBJECT, LS_xchg);
874
2537 }
2538 }
2539
2540 // The sharpened class might be unloaded if there is no class loader
2541 // contraint in place.
2542 if (sharpened_klass != NULL && sharpened_klass->is_loaded()) {
2543 const TypeOopPtr* tjp = TypeOopPtr::make_from_klass(sharpened_klass);
2544
2545 #ifndef PRODUCT
2546 if (C->print_intrinsics() || C->print_inlining()) {
2547 tty->print(" from base type: "); adr_type->dump();
2548 tty->print(" sharpened value: "); tjp->dump();
2549 }
2550 #endif
2551 // Sharpen the value type.
2552 return tjp;
2553 }
2554 return NULL;
2555 }
2556
2557 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) {
2558 if (callee()->is_static()) return false; // caller must have the capability!
2559 assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type");
2560 #ifndef PRODUCT
2561 {
2562 ResourceMark rm;
2563 // Check the signatures.
2564 ciSignature* sig = callee()->signature();
2565 #ifdef ASSERT
2566 if (!is_store) {
2567 // Object getObject(Object base, int/long offset), etc.
2568 BasicType rtype = sig->return_type()->basic_type();
2569 if (rtype == T_ADDRESS_HOLDER && callee()->name() == ciSymbol::getAddress_name())
2570 rtype = T_ADDRESS; // it is really a C void*
2571 assert(rtype == type, "getter must return the expected value");
2572 if (!is_native_ptr) {
2573 assert(sig->count() == 2, "oop getter has 2 arguments");
2574 assert(sig->type_at(0)->basic_type() == T_OBJECT, "getter base is object");
2575 assert(sig->type_at(1)->basic_type() == T_LONG, "getter offset is correct");
2576 } else {
2577 assert(sig->count() == 1, "native getter has 1 argument");
2578 assert(sig->type_at(0)->basic_type() == T_LONG, "getter base is long");
2579 }
2612 Node* base = argument(1); // type: oop
2613 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2614 offset = argument(2); // type: long
2615 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2616 // to be plain byte offsets, which are also the same as those accepted
2617 // by oopDesc::field_base.
2618 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2619 "fieldOffset must be byte-scaled");
2620 // 32-bit machines ignore the high half!
2621 offset = ConvL2X(offset);
2622 adr = make_unsafe_address(base, offset);
2623 heap_base_oop = base;
2624 val = is_store ? argument(4) : NULL;
2625 } else {
2626 Node* ptr = argument(1); // type: long
2627 ptr = ConvL2X(ptr); // adjust Java long to machine word
2628 adr = make_unsafe_address(NULL, ptr);
2629 val = is_store ? argument(3) : NULL;
2630 }
2631
2632 // Can base be NULL? Otherwise, always on-heap access.
2633 bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop));
2634
2635 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2636
2637 // Try to categorize the address.
2638 Compile::AliasType* alias_type = C->alias_type(adr_type);
2639 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2640
2641 if (alias_type->adr_type() == TypeInstPtr::KLASS ||
2642 alias_type->adr_type() == TypeAryPtr::RANGE) {
2643 return false; // not supported
2644 }
2645
2646 bool mismatched = false;
2647 BasicType bt = alias_type->basic_type();
2648 if (bt != T_ILLEGAL) {
2649 assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access");
2650 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2651 // Alias type doesn't differentiate between byte[] and boolean[]).
2652 // Use address type to get the element type.
2653 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2654 }
2655 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2656 // accessing an array field with getObject is not a mismatch
2657 bt = T_OBJECT;
2658 }
2659 if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2660 // Don't intrinsify mismatched object accesses
2661 return false;
2662 }
2663 mismatched = (bt != type);
2664 }
2665
2666 assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2667
2668 // First guess at the value type.
2669 const Type *value_type = Type::get_const_basic_type(type);
2670
2671 // We will need memory barriers unless we can determine a unique
2672 // alias category for this reference. (Note: If for some reason
2673 // the barriers get omitted and the unsafe reference begins to "pollute"
2674 // the alias analysis of the rest of the graph, either Compile::can_alias
2675 // or Compile::must_alias will throw a diagnostic assert.)
2676 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2677
2678 // If we are reading the value of the referent field of a Reference
2679 // object (either by using Unsafe directly or through reflection)
2680 // then, if G1 is enabled, we need to record the referent in an
2681 // SATB log buffer using the pre-barrier mechanism.
2682 // Also we need to add memory barrier to prevent commoning reads
2683 // from this field across safepoint since GC can change its value.
2684 bool need_read_barrier = !is_native_ptr && !is_store &&
2685 offset != top() && heap_base_oop != top();
2686
2687 if (!is_store && type == T_OBJECT) {
2688 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type, is_native_ptr);
2689 if (tjp != NULL) {
2690 value_type = tjp;
2713 insert_mem_bar(Op_MemBarRelease);
2714 } else {
2715 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2716 insert_mem_bar(Op_MemBarVolatile);
2717 }
2718 }
2719 }
2720
2721 // Memory barrier to prevent normal and 'unsafe' accesses from
2722 // bypassing each other. Happens after null checks, so the
2723 // exception paths do not take memory state from the memory barrier,
2724 // so there's no problems making a strong assert about mixing users
2725 // of safe & unsafe memory. Otherwise fails in a CTW of rt.jar
2726 // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2727 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2728
2729 if (!is_store) {
2730 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2731 // To be valid, unsafe loads may depend on other conditions than
2732 // the one that guards them: pin the Load node
2733 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile, unaligned, mismatched);
2734 // load value
2735 switch (type) {
2736 case T_BOOLEAN:
2737 case T_CHAR:
2738 case T_BYTE:
2739 case T_SHORT:
2740 case T_INT:
2741 case T_LONG:
2742 case T_FLOAT:
2743 case T_DOUBLE:
2744 break;
2745 case T_OBJECT:
2746 if (need_read_barrier) {
2747 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2748 }
2749 break;
2750 case T_ADDRESS:
2751 // Cast to an int type.
2752 p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2753 p = ConvX2UL(p);
2759 // The load node has the control of the preceding MemBarCPUOrder. All
2760 // following nodes will have the control of the MemBarCPUOrder inserted at
2761 // the end of this method. So, pushing the load onto the stack at a later
2762 // point is fine.
2763 set_result(p);
2764 } else {
2765 // place effect of store into memory
2766 switch (type) {
2767 case T_DOUBLE:
2768 val = dstore_rounding(val);
2769 break;
2770 case T_ADDRESS:
2771 // Repackage the long as a pointer.
2772 val = ConvL2X(val);
2773 val = _gvn.transform(new (C) CastX2PNode(val));
2774 break;
2775 }
2776
2777 MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2778 if (type != T_OBJECT ) {
2779 (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched);
2780 } else {
2781 // Possibly an oop being stored to Java heap or native memory
2782 if (!can_access_non_heap) {
2783 // oop to Java heap.
2784 (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2785 } else {
2786 // We can't tell at compile time if we are storing in the Java heap or outside
2787 // of it. So we need to emit code to conditionally do the proper type of
2788 // store.
2789
2790 IdealKit ideal(this);
2791 #define __ ideal.
2792 // QQQ who knows what probability is here??
2793 __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2794 // Sync IdealKit and graphKit.
2795 sync_kit(ideal);
2796 Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2797 // Update IdealKit memory.
2798 __ sync_kit(this);
2799 } __ else_(); {
2800 __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched);
2801 } __ end_if();
2802 // Final sync IdealKit and GraphKit.
2803 final_sync(ideal);
2804 #undef __
2805 }
2806 }
2807 }
2808
2809 if (is_volatile) {
2810 if (!is_store) {
2811 insert_mem_bar(Op_MemBarAcquire);
2812 } else {
2813 if (!support_IRIW_for_not_multiple_copy_atomic_cpu) {
2814 insert_mem_bar(Op_MemBarVolatile);
2815 }
2816 }
2817 }
2818
2819 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2820
2951 Node* receiver = NULL;
2952 Node* base = NULL;
2953 Node* offset = NULL;
2954 Node* oldval = NULL;
2955 Node* newval = NULL;
2956 if (kind == LS_cmpxchg) {
2957 const bool two_slot_type = type2size[type] == 2;
2958 receiver = argument(0); // type: oop
2959 base = argument(1); // type: oop
2960 offset = argument(2); // type: long
2961 oldval = argument(4); // type: oop, int, or long
2962 newval = argument(two_slot_type ? 6 : 5); // type: oop, int, or long
2963 } else if (kind == LS_xadd || kind == LS_xchg){
2964 receiver = argument(0); // type: oop
2965 base = argument(1); // type: oop
2966 offset = argument(2); // type: long
2967 oldval = NULL;
2968 newval = argument(4); // type: oop, int, or long
2969 }
2970
2971 // Build field offset expression.
2972 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2973 // to be plain byte offsets, which are also the same as those accepted
2974 // by oopDesc::field_base.
2975 assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled");
2976 // 32-bit machines ignore the high half of long offsets
2977 offset = ConvL2X(offset);
2978 Node* adr = make_unsafe_address(base, offset);
2979 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2980
2981 Compile::AliasType* alias_type = C->alias_type(adr_type);
2982 BasicType bt = alias_type->basic_type();
2983 if (bt != T_ILLEGAL &&
2984 ((bt == T_OBJECT || bt == T_ARRAY) != (type == T_OBJECT))) {
2985 // Don't intrinsify mismatched object accesses.
2986 return false;
2987 }
2988
2989 // For CAS, unlike inline_unsafe_access, there seems no point in
2990 // trying to refine types. Just use the coarse types here.
2991 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2992 const Type *value_type = Type::get_const_basic_type(type);
2993
2994 if (kind == LS_xchg && type == T_OBJECT) {
2995 const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type);
2996 if (tjp != NULL) {
2997 value_type = tjp;
2998 }
2999 }
3000
3001 // Null check receiver.
3002 receiver = null_check(receiver);
3003 if (stopped()) {
3004 return true;
3005 }
3006
3007 int alias_idx = C->get_alias_index(adr_type);
3008
3009 // Memory-model-wise, a LoadStore acts like a little synchronized
3010 // block, so needs barriers on each side. These don't translate
3011 // into actual barriers on most machines, but we still need rest of
3012 // compiler to respect ordering.
3013
3014 insert_mem_bar(Op_MemBarRelease);
3015 insert_mem_bar(Op_MemBarCPUOrder);
3016
3017 // 4984716: MemBars must be inserted before this
3018 // memory node in order to avoid a false
3019 // dependency which will confuse the scheduler.
3020 Node *mem = memory(alias_idx);
3021
3022 // For now, we handle only those cases that actually exist: ints,
3023 // longs, and Object. Adding others should be straightforward.
3024 Node* load_store = NULL;
|