2613 Node* base = argument(1); // type: oop
2614 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2615 offset = argument(2); // type: long
2616 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2617 // to be plain byte offsets, which are also the same as those accepted
2618 // by oopDesc::field_base.
2619 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2620 "fieldOffset must be byte-scaled");
2621 // 32-bit machines ignore the high half!
2622 offset = ConvL2X(offset);
2623 adr = make_unsafe_address(base, offset);
2624 heap_base_oop = base;
2625 val = is_store ? argument(4) : NULL;
2626 } else {
2627 Node* ptr = argument(1); // type: long
2628 ptr = ConvL2X(ptr); // adjust Java long to machine word
2629 adr = make_unsafe_address(NULL, ptr);
2630 val = is_store ? argument(3) : NULL;
2631 }
2632
2633 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2634
2635 // Try to categorize the address. If it comes up as TypeJavaPtr::BOTTOM,
2636 // there was not enough information to nail it down.
2637 Compile::AliasType* alias_type = C->alias_type(adr_type);
2638 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2639
2640 // Only field, array element or unknown locations are supported.
2641 if (alias_type->adr_type() != TypeRawPtr::BOTTOM &&
2642 alias_type->adr_type() != TypeOopPtr::BOTTOM &&
2643 alias_type->basic_type() == T_ILLEGAL) {
2644 return false;
2645 }
2646
2647 bool mismatched = false;
2648 BasicType bt = alias_type->basic_type();
2649 if (bt != T_ILLEGAL) {
2650 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2651 // Alias type doesn't differentiate between byte[] and boolean[]).
2652 // Use address type to get the element type.
2653 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2654 }
2655 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2656 // accessing an array field with getObject is not a mismatch
2657 bt = T_OBJECT;
2658 }
2659 if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2660 // Don't intrinsify mismatched object accesses
2661 return false;
2662 }
2663 mismatched = (bt != type);
2664 }
2665
2666 // First guess at the value type.
2667 const Type *value_type = Type::get_const_basic_type(type);
2668
2669 // We will need memory barriers unless we can determine a unique
2670 // alias category for this reference. (Note: If for some reason
2671 // the barriers get omitted and the unsafe reference begins to "pollute"
2672 // the alias analysis of the rest of the graph, either Compile::can_alias
2673 // or Compile::must_alias will throw a diagnostic assert.)
2674 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2675
2676 // If we are reading the value of the referent field of a Reference
2677 // object (either by using Unsafe directly or through reflection)
2678 // then, if G1 is enabled, we need to record the referent in an
2679 // SATB log buffer using the pre-barrier mechanism.
2680 // Also we need to add memory barrier to prevent commoning reads
2681 // from this field across safepoint since GC can change its value.
2682 bool need_read_barrier = !is_native_ptr && !is_store &&
2683 offset != top() && heap_base_oop != top();
2684
2685 if (!is_store && type == T_OBJECT) {
2760 // point is fine.
2761 set_result(p);
2762 } else {
2763 // place effect of store into memory
2764 switch (type) {
2765 case T_DOUBLE:
2766 val = dstore_rounding(val);
2767 break;
2768 case T_ADDRESS:
2769 // Repackage the long as a pointer.
2770 val = ConvL2X(val);
2771 val = _gvn.transform(new (C) CastX2PNode(val));
2772 break;
2773 }
2774
2775 MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2776 if (type != T_OBJECT ) {
2777 (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched);
2778 } else {
2779 // Possibly an oop being stored to Java heap or native memory
2780 if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) {
2781 // oop to Java heap.
2782 (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2783 } else {
2784 // We can't tell at compile time if we are storing in the Java heap or outside
2785 // of it. So we need to emit code to conditionally do the proper type of
2786 // store.
2787
2788 IdealKit ideal(this);
2789 #define __ ideal.
2790 // QQQ who knows what probability is here??
2791 __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2792 // Sync IdealKit and graphKit.
2793 sync_kit(ideal);
2794 Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2795 // Update IdealKit memory.
2796 __ sync_kit(this);
2797 } __ else_(); {
2798 __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched);
2799 } __ end_if();
2800 // Final sync IdealKit and GraphKit.
|
2613 Node* base = argument(1); // type: oop
2614 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2615 offset = argument(2); // type: long
2616 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2617 // to be plain byte offsets, which are also the same as those accepted
2618 // by oopDesc::field_base.
2619 assert(Unsafe_field_offset_to_byte_offset(11) == 11,
2620 "fieldOffset must be byte-scaled");
2621 // 32-bit machines ignore the high half!
2622 offset = ConvL2X(offset);
2623 adr = make_unsafe_address(base, offset);
2624 heap_base_oop = base;
2625 val = is_store ? argument(4) : NULL;
2626 } else {
2627 Node* ptr = argument(1); // type: long
2628 ptr = ConvL2X(ptr); // adjust Java long to machine word
2629 adr = make_unsafe_address(NULL, ptr);
2630 val = is_store ? argument(3) : NULL;
2631 }
2632
2633 // Can base be NULL? Otherwise, always on-heap access.
2634 bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop));
2635
2636 const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
2637
2638 // Try to categorize the address.
2639 Compile::AliasType* alias_type = C->alias_type(adr_type);
2640 assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here");
2641
2642 if (alias_type->adr_type() == TypeInstPtr::KLASS ||
2643 alias_type->adr_type() == TypeAryPtr::RANGE) {
2644 return false; // not supported
2645 }
2646
2647 bool mismatched = false;
2648 BasicType bt = alias_type->basic_type();
2649 if (bt != T_ILLEGAL) {
2650 assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access");
2651 if (bt == T_BYTE && adr_type->isa_aryptr()) {
2652 // Alias type doesn't differentiate between byte[] and boolean[]).
2653 // Use address type to get the element type.
2654 bt = adr_type->is_aryptr()->elem()->array_element_basic_type();
2655 }
2656 if (bt == T_ARRAY || bt == T_NARROWOOP) {
2657 // accessing an array field with getObject is not a mismatch
2658 bt = T_OBJECT;
2659 }
2660 if ((bt == T_OBJECT) != (type == T_OBJECT)) {
2661 // Don't intrinsify mismatched object accesses
2662 return false;
2663 }
2664 mismatched = (bt != type);
2665 }
2666
2667 assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
2668
2669 // First guess at the value type.
2670 const Type *value_type = Type::get_const_basic_type(type);
2671
2672 // We will need memory barriers unless we can determine a unique
2673 // alias category for this reference. (Note: If for some reason
2674 // the barriers get omitted and the unsafe reference begins to "pollute"
2675 // the alias analysis of the rest of the graph, either Compile::can_alias
2676 // or Compile::must_alias will throw a diagnostic assert.)
2677 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2678
2679 // If we are reading the value of the referent field of a Reference
2680 // object (either by using Unsafe directly or through reflection)
2681 // then, if G1 is enabled, we need to record the referent in an
2682 // SATB log buffer using the pre-barrier mechanism.
2683 // Also we need to add memory barrier to prevent commoning reads
2684 // from this field across safepoint since GC can change its value.
2685 bool need_read_barrier = !is_native_ptr && !is_store &&
2686 offset != top() && heap_base_oop != top();
2687
2688 if (!is_store && type == T_OBJECT) {
2763 // point is fine.
2764 set_result(p);
2765 } else {
2766 // place effect of store into memory
2767 switch (type) {
2768 case T_DOUBLE:
2769 val = dstore_rounding(val);
2770 break;
2771 case T_ADDRESS:
2772 // Repackage the long as a pointer.
2773 val = ConvL2X(val);
2774 val = _gvn.transform(new (C) CastX2PNode(val));
2775 break;
2776 }
2777
2778 MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2779 if (type != T_OBJECT ) {
2780 (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched);
2781 } else {
2782 // Possibly an oop being stored to Java heap or native memory
2783 if (!can_access_non_heap) {
2784 // oop to Java heap.
2785 (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2786 } else {
2787 // We can't tell at compile time if we are storing in the Java heap or outside
2788 // of it. So we need to emit code to conditionally do the proper type of
2789 // store.
2790
2791 IdealKit ideal(this);
2792 #define __ ideal.
2793 // QQQ who knows what probability is here??
2794 __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); {
2795 // Sync IdealKit and graphKit.
2796 sync_kit(ideal);
2797 Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched);
2798 // Update IdealKit memory.
2799 __ sync_kit(this);
2800 } __ else_(); {
2801 __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched);
2802 } __ end_if();
2803 // Final sync IdealKit and GraphKit.
|