< prev index next >

src/share/vm/opto/library_call.cpp

Print this page
rev 12619 : 8174164: SafePointNode::_replaced_nodes breaks with irreducible loops
Reviewed-by:
rev 12620 : 8174164: SafePointNode::_replaced_nodes breaks with irreducible loops
Reviewed-by:
rev 12681 : nio patch
rev 12683 : speculation for unsafe accesses
rev 12700 : 8176506: C2: loop unswitching and unsafe accesses cause crash
Reviewed-by:

@@ -45,10 +45,11 @@
 #include "opto/mulnode.hpp"
 #include "opto/narrowptrnode.hpp"
 #include "opto/opaquenode.hpp"
 #include "opto/parse.hpp"
 #include "opto/runtime.hpp"
+#include "opto/rootnode.hpp"
 #include "opto/subnode.hpp"
 #include "prims/nativeLookup.hpp"
 #include "prims/unsafe.hpp"
 #include "runtime/sharedRuntime.hpp"
 #ifdef TRACE_HAVE_INTRINSICS

@@ -236,12 +237,12 @@
   bool inline_math_subtractExactL(bool is_decrement);
   bool inline_min_max(vmIntrinsics::ID id);
   bool inline_notify(vmIntrinsics::ID id);
   Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
   // This returns Type::AnyPtr, RawPtr, or OopPtr.
-  int classify_unsafe_addr(Node* &base, Node* &offset);
-  Node* make_unsafe_address(Node* base, Node* offset);
+  int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
+  Node* make_unsafe_address(Node*& base, Node* offset, BasicType type);
   // Helper for inline_unsafe_access.
   // Generates the guards that check whether the result of
   // Unsafe.getObject should be recorded in an SATB log buffer.
   void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar);
 

@@ -2045,11 +2046,11 @@
   }
   */
 }
 
 inline int
-LibraryCallKit::classify_unsafe_addr(Node* &base, Node* &offset) {
+LibraryCallKit::classify_unsafe_addr(Node* &base, Node* &offset, BasicType type) {
   const TypePtr* base_type = TypePtr::NULL_PTR;
   if (base != NULL)  base_type = _gvn.type(base)->isa_ptr();
   if (base_type == NULL) {
     // Unknown type.
     return Type::AnyPtr;

@@ -2070,24 +2071,35 @@
     if (offset_type != NULL &&
         base_type->offset() == 0 &&     // (should always be?)
         offset_type->_lo >= 0 &&
         !MacroAssembler::needs_explicit_null_check(offset_type->_hi)) {
       return Type::OopPtr;
+    } else if (type == T_OBJECT) {
+      // off heap access to an oop doesn't make any sense. Has to be on
+      // heap.
+      return Type::OopPtr;
     }
     // Otherwise, it might either be oop+off or NULL+addr.
     return Type::AnyPtr;
   } else {
     // No information:
     return Type::AnyPtr;
   }
 }
 
-inline Node* LibraryCallKit::make_unsafe_address(Node* base, Node* offset) {
-  int kind = classify_unsafe_addr(base, offset);
+inline Node* LibraryCallKit::make_unsafe_address(Node*& base, Node* offset, BasicType type) {
+  int kind = classify_unsafe_addr(base, offset, type);
   if (kind == Type::RawPtr) {
     return basic_plus_adr(top(), base, offset);
+  } else if (kind == Type::AnyPtr) {
+    // We don't know if it's an on heap or off heap access. Fall back
+    // to raw memory access.
+    Node* raw = _gvn.transform(new CheckCastPPNode(control(), base, TypeRawPtr::BOTTOM));
+    return basic_plus_adr(top(), raw, offset);
   } else {
+    // We know it's an on heap access so base can't be null
+    base = must_be_not_null(base, true);
     return basic_plus_adr(base, offset);
   }
 }
 
 //--------------------------inline_number_methods-----------------------------

@@ -2315,11 +2327,11 @@
   // by oopDesc::field_base.
   assert(Unsafe_field_offset_to_byte_offset(11) == 11,
          "fieldOffset must be byte-scaled");
   // 32-bit machines ignore the high half!
   offset = ConvL2X(offset);
-  adr = make_unsafe_address(base, offset);
+  adr = make_unsafe_address(base, offset, type);
   if (_gvn.type(base)->isa_ptr() != TypePtr::NULL_PTR) {
     heap_base_oop = base;
   } else if (type == T_OBJECT) {
     return false; // off-heap oop accesses are not supported
   }

@@ -2723,11 +2735,11 @@
   // to be plain byte offsets, which are also the same as those accepted
   // by oopDesc::field_base.
   assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled");
   // 32-bit machines ignore the high half of long offsets
   offset = ConvL2X(offset);
-  Node* adr = make_unsafe_address(base, offset);
+  Node* adr = make_unsafe_address(base, offset, type);
   const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
 
   Compile::AliasType* alias_type = C->alias_type(adr_type);
   BasicType bt = alias_type->basic_type();
   if (bt != T_ILLEGAL &&

@@ -4489,12 +4501,12 @@
   Node* size    = ConvL2X(argument(7));  // type: long
 
   assert(Unsafe_field_offset_to_byte_offset(11) == 11,
          "fieldOffset must be byte-scaled");
 
-  Node* src = make_unsafe_address(src_ptr, src_off);
-  Node* dst = make_unsafe_address(dst_ptr, dst_off);
+  Node* src = make_unsafe_address(src_ptr, src_off, T_ILLEGAL);
+  Node* dst = make_unsafe_address(dst_ptr, dst_off, T_ILLEGAL);
 
   // Conservatively insert a memory barrier on all memory slices.
   // Do not let writes of the copy source or destination float below the copy.
   insert_mem_bar(Op_MemBarCPUOrder);
 

@@ -5653,12 +5665,12 @@
   }
 
   Node* call;
   jvms()->set_should_reexecute(true);
 
-  Node* obja_adr = make_unsafe_address(obja, aoffset);
-  Node* objb_adr = make_unsafe_address(objb, boffset);
+  Node* obja_adr = make_unsafe_address(obja, aoffset, T_ILLEGAL);
+  Node* objb_adr = make_unsafe_address(objb, boffset, T_ILLEGAL);
 
   call = make_runtime_call(RC_LEAF,
     OptoRuntime::vectorizedMismatch_Type(),
     stubAddr, stubName, TypePtr::BOTTOM,
     obja_adr, objb_adr, length, scale);
< prev index next >