src/share/vm/opto/memnode.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/memnode.hpp

Print this page
rev 8049 : castpp gcm
rev 9351 : 8142386: Octane crashes with assert(is_Load())
Summary: Unsafe access to an array is wrongly marked as mismatched
Reviewed-by:


 183   // Should LoadNode::Ideal() attempt to remove control edges?
 184   virtual bool can_remove_control() const;
 185   const Type* const _type;      // What kind of value is loaded?
 186 
 187   virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
 188 public:
 189 
 190   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 191     : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _depends_only_on_test(control_dependency == DependsOnlyOnTest) {
 192     init_class_id(Class_Load);
 193   }
 194   inline bool is_unordered() const { return !is_acquire(); }
 195   inline bool is_acquire() const {
 196     assert(_mo == unordered || _mo == acquire, "unexpected");
 197     return _mo == acquire;
 198   }
 199 
 200   // Polymorphic factory method:
 201    static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 202                      const TypePtr* at, const Type *rt, BasicType bt,
 203                      MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest);

 204 
 205   virtual uint hash()   const;  // Check the type
 206 
 207   // Handle algebraic identities here.  If we have an identity, return the Node
 208   // we are equivalent to.  We look for Load of a Store.
 209   virtual Node *Identity( PhaseTransform *phase );
 210 
 211   // If the load is from Field memory and the pointer is non-null, it might be possible to
 212   // zero out the control input.
 213   // If the offset is constant and the base is an object allocation,
 214   // try to hook me up to the exact initializing store.
 215   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 216 
 217   // Split instance field load through Phi.
 218   Node* split_through_phi(PhaseGVN *phase);
 219 
 220   // Recover original value from boxed values
 221   Node *eliminate_autobox(PhaseGVN *phase);
 222 
 223   // Compute a new Type for this node.  Basically we just do the pre-check,


 358 // Load a long from memory
 359 class LoadLNode : public LoadNode {
 360   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 361   virtual uint cmp( const Node &n ) const {
 362     return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
 363       && LoadNode::cmp(n);
 364   }
 365   virtual uint size_of() const { return sizeof(*this); }
 366   const bool _require_atomic_access;  // is piecewise load forbidden?
 367 
 368 public:
 369   LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
 370             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 371     : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 372   virtual int Opcode() const;
 373   virtual uint ideal_reg() const { return Op_RegL; }
 374   virtual int store_Opcode() const { return Op_StoreL; }
 375   virtual BasicType memory_type() const { return T_LONG; }
 376   bool require_atomic_access() const { return _require_atomic_access; }
 377   static LoadLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
 378                                 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest);

 379 #ifndef PRODUCT
 380   virtual void dump_spec(outputStream *st) const {
 381     LoadNode::dump_spec(st);
 382     if (_require_atomic_access)  st->print(" Atomic!");
 383   }
 384 #endif
 385 };
 386 
 387 //------------------------------LoadL_unalignedNode----------------------------
 388 // Load a long from unaligned memory
 389 class LoadL_unalignedNode : public LoadLNode {
 390 public:
 391   LoadL_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 392     : LoadLNode(c, mem, adr, at, TypeLong::LONG, mo, control_dependency) {}
 393   virtual int Opcode() const;
 394 };
 395 
 396 //------------------------------LoadFNode--------------------------------------
 397 // Load a float (64 bits) from memory
 398 class LoadFNode : public LoadNode {


 409 // Load a double (64 bits) from memory
 410 class LoadDNode : public LoadNode {
 411   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 412   virtual uint cmp( const Node &n ) const {
 413     return _require_atomic_access == ((LoadDNode&)n)._require_atomic_access
 414       && LoadNode::cmp(n);
 415   }
 416   virtual uint size_of() const { return sizeof(*this); }
 417   const bool _require_atomic_access;  // is piecewise load forbidden?
 418 
 419 public:
 420   LoadDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t,
 421             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 422     : LoadNode(c, mem, adr, at, t, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 423   virtual int Opcode() const;
 424   virtual uint ideal_reg() const { return Op_RegD; }
 425   virtual int store_Opcode() const { return Op_StoreD; }
 426   virtual BasicType memory_type() const { return T_DOUBLE; }
 427   bool require_atomic_access() const { return _require_atomic_access; }
 428   static LoadDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
 429                                 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest);

 430 #ifndef PRODUCT
 431   virtual void dump_spec(outputStream *st) const {
 432     LoadNode::dump_spec(st);
 433     if (_require_atomic_access)  st->print(" Atomic!");
 434   }
 435 #endif
 436 };
 437 
 438 //------------------------------LoadD_unalignedNode----------------------------
 439 // Load a double from unaligned memory
 440 class LoadD_unalignedNode : public LoadDNode {
 441 public:
 442   LoadD_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 443     : LoadDNode(c, mem, adr, at, Type::DOUBLE, mo, control_dependency) {}
 444   virtual int Opcode() const;
 445 };
 446 
 447 //------------------------------LoadPNode--------------------------------------
 448 // Load a pointer from memory (either object or array)
 449 class LoadPNode : public LoadNode {




 183   // Should LoadNode::Ideal() attempt to remove control edges?
 184   virtual bool can_remove_control() const;
 185   const Type* const _type;      // What kind of value is loaded?
 186 
 187   virtual Node* find_previous_arraycopy(PhaseTransform* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
 188 public:
 189 
 190   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 191     : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _depends_only_on_test(control_dependency == DependsOnlyOnTest) {
 192     init_class_id(Class_Load);
 193   }
 194   inline bool is_unordered() const { return !is_acquire(); }
 195   inline bool is_acquire() const {
 196     assert(_mo == unordered || _mo == acquire, "unexpected");
 197     return _mo == acquire;
 198   }
 199 
 200   // Polymorphic factory method:
 201   static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 202                     const TypePtr* at, const Type *rt, BasicType bt,
 203                     MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 204                     bool unaligned = false, bool mismatched = false);
 205 
 206   virtual uint hash()   const;  // Check the type
 207 
 208   // Handle algebraic identities here.  If we have an identity, return the Node
 209   // we are equivalent to.  We look for Load of a Store.
 210   virtual Node *Identity( PhaseTransform *phase );
 211 
 212   // If the load is from Field memory and the pointer is non-null, it might be possible to
 213   // zero out the control input.
 214   // If the offset is constant and the base is an object allocation,
 215   // try to hook me up to the exact initializing store.
 216   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 217 
 218   // Split instance field load through Phi.
 219   Node* split_through_phi(PhaseGVN *phase);
 220 
 221   // Recover original value from boxed values
 222   Node *eliminate_autobox(PhaseGVN *phase);
 223 
 224   // Compute a new Type for this node.  Basically we just do the pre-check,


 359 // Load a long from memory
 360 class LoadLNode : public LoadNode {
 361   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 362   virtual uint cmp( const Node &n ) const {
 363     return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
 364       && LoadNode::cmp(n);
 365   }
 366   virtual uint size_of() const { return sizeof(*this); }
 367   const bool _require_atomic_access;  // is piecewise load forbidden?
 368 
 369 public:
 370   LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
 371             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 372     : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 373   virtual int Opcode() const;
 374   virtual uint ideal_reg() const { return Op_RegL; }
 375   virtual int store_Opcode() const { return Op_StoreL; }
 376   virtual BasicType memory_type() const { return T_LONG; }
 377   bool require_atomic_access() const { return _require_atomic_access; }
 378   static LoadLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
 379                                 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 380                                 bool unaligned = false, bool mismatched = false);
 381 #ifndef PRODUCT
 382   virtual void dump_spec(outputStream *st) const {
 383     LoadNode::dump_spec(st);
 384     if (_require_atomic_access)  st->print(" Atomic!");
 385   }
 386 #endif
 387 };
 388 
 389 //------------------------------LoadL_unalignedNode----------------------------
 390 // Load a long from unaligned memory
 391 class LoadL_unalignedNode : public LoadLNode {
 392 public:
 393   LoadL_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 394     : LoadLNode(c, mem, adr, at, TypeLong::LONG, mo, control_dependency) {}
 395   virtual int Opcode() const;
 396 };
 397 
 398 //------------------------------LoadFNode--------------------------------------
 399 // Load a float (64 bits) from memory
 400 class LoadFNode : public LoadNode {


 411 // Load a double (64 bits) from memory
 412 class LoadDNode : public LoadNode {
 413   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 414   virtual uint cmp( const Node &n ) const {
 415     return _require_atomic_access == ((LoadDNode&)n)._require_atomic_access
 416       && LoadNode::cmp(n);
 417   }
 418   virtual uint size_of() const { return sizeof(*this); }
 419   const bool _require_atomic_access;  // is piecewise load forbidden?
 420 
 421 public:
 422   LoadDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t,
 423             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 424     : LoadNode(c, mem, adr, at, t, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 425   virtual int Opcode() const;
 426   virtual uint ideal_reg() const { return Op_RegD; }
 427   virtual int store_Opcode() const { return Op_StoreD; }
 428   virtual BasicType memory_type() const { return T_DOUBLE; }
 429   bool require_atomic_access() const { return _require_atomic_access; }
 430   static LoadDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
 431                                 const Type* rt, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 432                                 bool unaligned = false, bool mismatched = false);
 433 #ifndef PRODUCT
 434   virtual void dump_spec(outputStream *st) const {
 435     LoadNode::dump_spec(st);
 436     if (_require_atomic_access)  st->print(" Atomic!");
 437   }
 438 #endif
 439 };
 440 
 441 //------------------------------LoadD_unalignedNode----------------------------
 442 // Load a double from unaligned memory
 443 class LoadD_unalignedNode : public LoadDNode {
 444 public:
 445   LoadD_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 446     : LoadDNode(c, mem, adr, at, Type::DOUBLE, mo, control_dependency) {}
 447   virtual int Opcode() const;
 448 };
 449 
 450 //------------------------------LoadPNode--------------------------------------
 451 // Load a pointer from memory (either object or array)
 452 class LoadPNode : public LoadNode {


src/share/vm/opto/memnode.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File