< prev index next >

src/share/vm/opto/memnode.hpp

Print this page




 190   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 191     : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _depends_only_on_test(control_dependency == DependsOnlyOnTest) {
 192     init_class_id(Class_Load);
 193   }
 194   inline bool is_unordered() const { return !is_acquire(); }
 195   inline bool is_acquire() const {
 196     assert(_mo == unordered || _mo == acquire, "unexpected");
 197     return _mo == acquire;
 198   }
 199 
 200   // Polymorphic factory method:
 201   static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 202                     const TypePtr* at, const Type *rt, BasicType bt,
 203                     MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 204                     bool unaligned = false, bool mismatched = false);
 205 
 206   virtual uint hash()   const;  // Check the type
 207 
 208   // Handle algebraic identities here.  If we have an identity, return the Node
 209   // we are equivalent to.  We look for Load of a Store.
 210   virtual Node *Identity( PhaseTransform *phase );
 211 
 212   // If the load is from Field memory and the pointer is non-null, it might be possible to
 213   // zero out the control input.
 214   // If the offset is constant and the base is an object allocation,
 215   // try to hook me up to the exact initializing store.
 216   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 217 
 218   // Split instance field load through Phi.
 219   Node* split_through_phi(PhaseGVN *phase);
 220 
 221   // Recover original value from boxed values
 222   Node *eliminate_autobox(PhaseGVN *phase);
 223 
 224   // Compute a new Type for this node.  Basically we just do the pre-check,
 225   // then call the virtual add() to set the type.
 226   virtual const Type *Value( PhaseTransform *phase ) const;
 227 
 228   // Common methods for LoadKlass and LoadNKlass nodes.
 229   const Type *klass_value_common( PhaseTransform *phase ) const;
 230   Node *klass_identity_common( PhaseTransform *phase );
 231 
 232   virtual uint ideal_reg() const;
 233   virtual const Type *bottom_type() const;
 234   // Following method is copied from TypeNode:
 235   void set_type(const Type* t) {
 236     assert(t != NULL, "sanity");
 237     debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
 238     *(const Type**)&_type = t;   // cast away const-ness
 239     // If this node is in the hash table, make sure it doesn't need a rehash.
 240     assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
 241   }
 242   const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
 243 
 244   // Do not match memory edge
 245   virtual uint match_edge(uint idx) const;
 246 
 247   // Map a load opcode to its corresponding store opcode.
 248   virtual int store_Opcode() const = 0;
 249 
 250   // Check if the load's memory input is a Phi node with the same control.


 267   // true to enable key hoisting & commoning optimizations.  However, for the
 268   // special case of RawPtr loads from TLS top & end, and other loads performed by
 269   // GC barriers, the control edge carries the dependence preventing hoisting past
 270   // a Safepoint instead of the memory edge.  (An unfortunate consequence of having
 271   // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
 272   // which produce results (new raw memory state) inside of loops preventing all
 273   // manner of other optimizations).  Basically, it's ugly but so is the alternative.
 274   // See comment in macro.cpp, around line 125 expand_allocate_common().
 275   virtual bool depends_only_on_test() const { return adr_type() != TypeRawPtr::BOTTOM && _depends_only_on_test; }
 276 };
 277 
 278 //------------------------------LoadBNode--------------------------------------
 279 // Load a byte (8bits signed) from memory
 280 class LoadBNode : public LoadNode {
 281 public:
 282   LoadBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 283     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 284   virtual int Opcode() const;
 285   virtual uint ideal_reg() const { return Op_RegI; }
 286   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 287   virtual const Type *Value(PhaseTransform *phase) const;
 288   virtual int store_Opcode() const { return Op_StoreB; }
 289   virtual BasicType memory_type() const { return T_BYTE; }
 290 };
 291 
 292 //------------------------------LoadUBNode-------------------------------------
 293 // Load a unsigned byte (8bits unsigned) from memory
 294 class LoadUBNode : public LoadNode {
 295 public:
 296   LoadUBNode(Node* c, Node* mem, Node* adr, const TypePtr* at, const TypeInt* ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 297     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 298   virtual int Opcode() const;
 299   virtual uint ideal_reg() const { return Op_RegI; }
 300   virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
 301   virtual const Type *Value(PhaseTransform *phase) const;
 302   virtual int store_Opcode() const { return Op_StoreB; }
 303   virtual BasicType memory_type() const { return T_BYTE; }
 304 };
 305 
 306 //------------------------------LoadUSNode-------------------------------------
 307 // Load an unsigned short/char (16bits unsigned) from memory
 308 class LoadUSNode : public LoadNode {
 309 public:
 310   LoadUSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 311     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 312   virtual int Opcode() const;
 313   virtual uint ideal_reg() const { return Op_RegI; }
 314   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 315   virtual const Type *Value(PhaseTransform *phase) const;
 316   virtual int store_Opcode() const { return Op_StoreC; }
 317   virtual BasicType memory_type() const { return T_CHAR; }
 318 };
 319 
 320 //------------------------------LoadSNode--------------------------------------
 321 // Load a short (16bits signed) from memory
 322 class LoadSNode : public LoadNode {
 323 public:
 324   LoadSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 325     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 326   virtual int Opcode() const;
 327   virtual uint ideal_reg() const { return Op_RegI; }
 328   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 329   virtual const Type *Value(PhaseTransform *phase) const;
 330   virtual int store_Opcode() const { return Op_StoreC; }
 331   virtual BasicType memory_type() const { return T_SHORT; }
 332 };
 333 
 334 //------------------------------LoadINode--------------------------------------
 335 // Load an integer from memory
 336 class LoadINode : public LoadNode {
 337 public:
 338   LoadINode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 339     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 340   virtual int Opcode() const;
 341   virtual uint ideal_reg() const { return Op_RegI; }
 342   virtual int store_Opcode() const { return Op_StoreI; }
 343   virtual BasicType memory_type() const { return T_INT; }
 344 };
 345 
 346 //------------------------------LoadRangeNode----------------------------------
 347 // Load an array length from the array
 348 class LoadRangeNode : public LoadINode {
 349 public:
 350   LoadRangeNode(Node *c, Node *mem, Node *adr, const TypeInt *ti = TypeInt::POS)
 351     : LoadINode(c, mem, adr, TypeAryPtr::RANGE, ti, MemNode::unordered) {}
 352   virtual int Opcode() const;
 353   virtual const Type *Value( PhaseTransform *phase ) const;
 354   virtual Node *Identity( PhaseTransform *phase );
 355   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 356 };
 357 
 358 //------------------------------LoadLNode--------------------------------------
 359 // Load a long from memory
 360 class LoadLNode : public LoadNode {
 361   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 362   virtual uint cmp( const Node &n ) const {
 363     return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
 364       && LoadNode::cmp(n);
 365   }
 366   virtual uint size_of() const { return sizeof(*this); }
 367   const bool _require_atomic_access;  // is piecewise load forbidden?
 368 
 369 public:
 370   LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
 371             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 372     : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 373   virtual int Opcode() const;
 374   virtual uint ideal_reg() const { return Op_RegL; }


 466 public:
 467   LoadNNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const Type* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 468     : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
 469   virtual int Opcode() const;
 470   virtual uint ideal_reg() const { return Op_RegN; }
 471   virtual int store_Opcode() const { return Op_StoreN; }
 472   virtual BasicType memory_type() const { return T_NARROWOOP; }
 473 };
 474 
 475 //------------------------------LoadKlassNode----------------------------------
 476 // Load a Klass from an object
 477 class LoadKlassNode : public LoadPNode {
 478 protected:
 479   // In most cases, LoadKlassNode does not have the control input set. If the control
 480   // input is set, it must not be removed (by LoadNode::Ideal()).
 481   virtual bool can_remove_control() const;
 482 public:
 483   LoadKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeKlassPtr *tk, MemOrd mo)
 484     : LoadPNode(c, mem, adr, at, tk, mo) {}
 485   virtual int Opcode() const;
 486   virtual const Type *Value( PhaseTransform *phase ) const;
 487   virtual Node *Identity( PhaseTransform *phase );
 488   virtual bool depends_only_on_test() const { return true; }
 489 
 490   // Polymorphic factory method:
 491   static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
 492                     const TypeKlassPtr* tk = TypeKlassPtr::OBJECT);
 493 };
 494 
 495 //------------------------------LoadNKlassNode---------------------------------
 496 // Load a narrow Klass from an object.
 497 class LoadNKlassNode : public LoadNNode {
 498 public:
 499   LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
 500     : LoadNNode(c, mem, adr, at, tk, mo) {}
 501   virtual int Opcode() const;
 502   virtual uint ideal_reg() const { return Op_RegN; }
 503   virtual int store_Opcode() const { return Op_StoreNKlass; }
 504   virtual BasicType memory_type() const { return T_NARROWKLASS; }
 505 
 506   virtual const Type *Value( PhaseTransform *phase ) const;
 507   virtual Node *Identity( PhaseTransform *phase );
 508   virtual bool depends_only_on_test() const { return true; }
 509 };
 510 
 511 
 512 //------------------------------StoreNode--------------------------------------
 513 // Store value; requires Store, Address and Value
 514 class StoreNode : public MemNode {
 515 private:
 516   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 517   // stores that can be reordered, and such requiring release semantics to
 518   // adhere to the Java specification.  The required behaviour is stored in
 519   // this field.
 520   const MemOrd _mo;
 521   // Needed for proper cloning.
 522   virtual uint size_of() const { return sizeof(*this); }
 523 protected:
 524   virtual uint cmp( const Node &n ) const;
 525   virtual bool depends_only_on_test() const { return false; }
 526 
 527   Node *Ideal_masked_input       (PhaseGVN *phase, uint mask);


 564   }
 565 
 566   // Polymorphic factory method
 567   //
 568   // We must ensure that stores of object references will be visible
 569   // only after the object's initialization. So the callers of this
 570   // procedure must indicate that the store requires `release'
 571   // semantics, if the stored value is an object reference that might
 572   // point to a new object and may become externally visible.
 573   static StoreNode* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 574                          const TypePtr* at, Node *val, BasicType bt, MemOrd mo);
 575 
 576   virtual uint hash() const;    // Check the type
 577 
 578   // If the store is to Field memory and the pointer is non-null, we can
 579   // zero out the control input.
 580   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 581 
 582   // Compute a new Type for this node.  Basically we just do the pre-check,
 583   // then call the virtual add() to set the type.
 584   virtual const Type *Value( PhaseTransform *phase ) const;
 585 
 586   // Check for identity function on memory (Load then Store at same address)
 587   virtual Node *Identity( PhaseTransform *phase );
 588 
 589   // Do not match memory edge
 590   virtual uint match_edge(uint idx) const;
 591 
 592   virtual const Type *bottom_type() const;  // returns Type::MEMORY
 593 
 594   // Map a store opcode to its corresponding own opcode, trivially.
 595   virtual int store_Opcode() const { return Opcode(); }
 596 
 597   // have all possible loads of the value stored been optimized away?
 598   bool value_never_loaded(PhaseTransform *phase) const;
 599 };
 600 
 601 //------------------------------StoreBNode-------------------------------------
 602 // Store byte to memory
 603 class StoreBNode : public StoreNode {
 604 public:
 605   StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 606     : StoreNode(c, mem, adr, at, val, mo) {}
 607   virtual int Opcode() const;


 729 // Preceeding equivalent StoreCMs may be eliminated.
 730 class StoreCMNode : public StoreNode {
 731  private:
 732   virtual uint hash() const { return StoreNode::hash() + _oop_alias_idx; }
 733   virtual uint cmp( const Node &n ) const {
 734     return _oop_alias_idx == ((StoreCMNode&)n)._oop_alias_idx
 735       && StoreNode::cmp(n);
 736   }
 737   virtual uint size_of() const { return sizeof(*this); }
 738   int _oop_alias_idx;   // The alias_idx of OopStore
 739 
 740 public:
 741   StoreCMNode( Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, int oop_alias_idx ) :
 742     StoreNode(c, mem, adr, at, val, oop_store, MemNode::release),
 743     _oop_alias_idx(oop_alias_idx) {
 744     assert(_oop_alias_idx >= Compile::AliasIdxRaw ||
 745            _oop_alias_idx == Compile::AliasIdxBot && Compile::current()->AliasLevel() == 0,
 746            "bad oop alias idx");
 747   }
 748   virtual int Opcode() const;
 749   virtual Node *Identity( PhaseTransform *phase );
 750   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 751   virtual const Type *Value( PhaseTransform *phase ) const;
 752   virtual BasicType memory_type() const { return T_VOID; } // unspecific
 753   int oop_alias_idx() const { return _oop_alias_idx; }
 754 };
 755 
 756 //------------------------------LoadPLockedNode---------------------------------
 757 // Load-locked a pointer from memory (either object or array).
 758 // On Sparc & Intel this is implemented as a normal pointer load.
 759 // On PowerPC and friends it's a real load-locked.
 760 class LoadPLockedNode : public LoadPNode {
 761 public:
 762   LoadPLockedNode(Node *c, Node *mem, Node *adr, MemOrd mo)
 763     : LoadPNode(c, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, mo) {}
 764   virtual int Opcode() const;
 765   virtual int store_Opcode() const { return Op_StorePConditional; }
 766   virtual bool depends_only_on_test() const { return true; }
 767 };
 768 
 769 //------------------------------SCMemProjNode---------------------------------------
 770 // This class defines a projection of the memory  state of a store conditional node.
 771 // These nodes return a value, but also update memory.
 772 class SCMemProjNode : public ProjNode {
 773 public:
 774   enum {SCMEMPROJCON = (uint)-2};
 775   SCMemProjNode( Node *src) : ProjNode( src, SCMEMPROJCON) { }
 776   virtual int Opcode() const;
 777   virtual bool      is_CFG() const  { return false; }
 778   virtual const Type *bottom_type() const {return Type::MEMORY;}
 779   virtual const TypePtr *adr_type() const {
 780     Node* ctrl = in(0);
 781     if (ctrl == NULL)  return NULL; // node is dead
 782     return ctrl->in(MemNode::Memory)->adr_type();
 783   }
 784   virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
 785   virtual const Type *Value( PhaseTransform *phase ) const;
 786 #ifndef PRODUCT
 787   virtual void dump_spec(outputStream *st) const {};
 788 #endif
 789 };
 790 
 791 //------------------------------LoadStoreNode---------------------------
 792 // Note: is_Mem() method returns 'true' for this class.
 793 class LoadStoreNode : public Node {
 794 private:
 795   const Type* const _type;      // What kind of value is loaded?
 796   const TypePtr* _adr_type;     // What kind of memory is being addressed?
 797   virtual uint size_of() const; // Size is bigger
 798 public:
 799   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 800   virtual bool depends_only_on_test() const { return false; }
 801   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 802 
 803   virtual const Type *bottom_type() const { return _type; }
 804   virtual uint ideal_reg() const;
 805   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address


 917 
 918 //------------------------------GetAndSetNNode---------------------------
 919 class GetAndSetNNode : public LoadStoreNode {
 920 public:
 921   GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
 922   virtual int Opcode() const;
 923 };
 924 
 925 //------------------------------ClearArray-------------------------------------
 926 class ClearArrayNode: public Node {
 927 public:
 928   ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base )
 929     : Node(ctrl,arymem,word_cnt,base) {
 930     init_class_id(Class_ClearArray);
 931   }
 932   virtual int         Opcode() const;
 933   virtual const Type *bottom_type() const { return Type::MEMORY; }
 934   // ClearArray modifies array elements, and so affects only the
 935   // array memory addressed by the bottom_type of its base address.
 936   virtual const class TypePtr *adr_type() const;
 937   virtual Node *Identity( PhaseTransform *phase );
 938   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 939   virtual uint match_edge(uint idx) const;
 940 
 941   // Clear the given area of an object or array.
 942   // The start offset must always be aligned mod BytesPerInt.
 943   // The end offset must always be aligned mod BytesPerLong.
 944   // Return the new memory.
 945   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 946                             intptr_t start_offset,
 947                             intptr_t end_offset,
 948                             PhaseGVN* phase);
 949   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 950                             intptr_t start_offset,
 951                             Node* end_offset,
 952                             PhaseGVN* phase);
 953   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 954                             Node* start_offset,
 955                             Node* end_offset,
 956                             PhaseGVN* phase);
 957   // Return allocation input memory edge if it is different instance


 966 // volatile-load.  Monitor-exit and volatile-store act as Release: no
 967 // preceding ref can be moved to after them.  We insert a MemBar-Release
 968 // before a FastUnlock or volatile-store.  All volatiles need to be
 969 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
 970 // separate it from any following volatile-load.
 971 class MemBarNode: public MultiNode {
 972   virtual uint hash() const ;                  // { return NO_HASH; }
 973   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 974 
 975   virtual uint size_of() const { return sizeof(*this); }
 976   // Memory type this node is serializing.  Usually either rawptr or bottom.
 977   const TypePtr* _adr_type;
 978 
 979 public:
 980   enum {
 981     Precedent = TypeFunc::Parms  // optional edge to force precedence
 982   };
 983   MemBarNode(Compile* C, int alias_idx, Node* precedent);
 984   virtual int Opcode() const = 0;
 985   virtual const class TypePtr *adr_type() const { return _adr_type; }
 986   virtual const Type *Value( PhaseTransform *phase ) const;
 987   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 988   virtual uint match_edge(uint idx) const { return 0; }
 989   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
 990   virtual Node *match( const ProjNode *proj, const Matcher *m );
 991   // Factory method.  Builds a wide or narrow membar.
 992   // Optional 'precedent' becomes an extra edge if not null.
 993   static MemBarNode* make(Compile* C, int opcode,
 994                           int alias_idx = Compile::AliasIdxBot,
 995                           Node* precedent = NULL);
 996 };
 997 
 998 // "Acquire" - no following ref can move before (but earlier refs can
 999 // follow, like an early Load stalled in cache).  Requires multi-cpu
1000 // visibility.  Inserted after a volatile load.
1001 class MemBarAcquireNode: public MemBarNode {
1002 public:
1003   MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1004     : MemBarNode(C, alias_idx, precedent) {}
1005   virtual int Opcode() const;
1006 };


1182                                PhaseGVN* phase);
1183 
1184   intptr_t find_next_fullword_store(uint i, PhaseGVN* phase);
1185 };
1186 
1187 //------------------------------MergeMem---------------------------------------
1188 // (See comment in memnode.cpp near MergeMemNode::MergeMemNode for semantics.)
1189 class MergeMemNode: public Node {
1190   virtual uint hash() const ;                  // { return NO_HASH; }
1191   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
1192   friend class MergeMemStream;
1193   MergeMemNode(Node* def);  // clients use MergeMemNode::make
1194 
1195 public:
1196   // If the input is a whole memory state, clone it with all its slices intact.
1197   // Otherwise, make a new memory state with just that base memory input.
1198   // In either case, the result is a newly created MergeMem.
1199   static MergeMemNode* make(Node* base_memory);
1200 
1201   virtual int Opcode() const;
1202   virtual Node *Identity( PhaseTransform *phase );
1203   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1204   virtual uint ideal_reg() const { return NotAMachineReg; }
1205   virtual uint match_edge(uint idx) const { return 0; }
1206   virtual const RegMask &out_RegMask() const;
1207   virtual const Type *bottom_type() const { return Type::MEMORY; }
1208   virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
1209   // sparse accessors
1210   // Fetch the previously stored "set_memory_at", or else the base memory.
1211   // (Caller should clone it if it is a phi-nest.)
1212   Node* memory_at(uint alias_idx) const;
1213   // set the memory, regardless of its previous value
1214   void set_memory_at(uint alias_idx, Node* n);
1215   // the "base" is the memory that provides the non-finite support
1216   Node* base_memory() const       { return in(Compile::AliasIdxBot); }
1217   // warning: setting the base can implicitly set any of the other slices too
1218   void set_base_memory(Node* def);
1219   // sentinel value which denotes a copy of the base memory:
1220   Node*   empty_memory() const    { return in(Compile::AliasIdxTop); }
1221   static Node* make_empty_memory(); // where the sentinel comes from
1222   bool is_empty_memory(Node* n) const { assert((n == empty_memory()) == n->is_top(), "sanity"); return n->is_top(); }




 190   LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
 191     : MemNode(c,mem,adr,at), _type(rt), _mo(mo), _depends_only_on_test(control_dependency == DependsOnlyOnTest) {
 192     init_class_id(Class_Load);
 193   }
 194   inline bool is_unordered() const { return !is_acquire(); }
 195   inline bool is_acquire() const {
 196     assert(_mo == unordered || _mo == acquire, "unexpected");
 197     return _mo == acquire;
 198   }
 199 
 200   // Polymorphic factory method:
 201   static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 202                     const TypePtr* at, const Type *rt, BasicType bt,
 203                     MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
 204                     bool unaligned = false, bool mismatched = false);
 205 
 206   virtual uint hash()   const;  // Check the type
 207 
 208   // Handle algebraic identities here.  If we have an identity, return the Node
 209   // we are equivalent to.  We look for Load of a Store.
 210   virtual Node* Identity(PhaseGVN* phase);
 211 
 212   // If the load is from Field memory and the pointer is non-null, it might be possible to
 213   // zero out the control input.
 214   // If the offset is constant and the base is an object allocation,
 215   // try to hook me up to the exact initializing store.
 216   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 217 
 218   // Split instance field load through Phi.
 219   Node* split_through_phi(PhaseGVN *phase);
 220 
 221   // Recover original value from boxed values
 222   Node *eliminate_autobox(PhaseGVN *phase);
 223 
 224   // Compute a new Type for this node.  Basically we just do the pre-check,
 225   // then call the virtual add() to set the type.
 226   virtual const Type* Value(PhaseGVN* phase) const;
 227 
 228   // Common methods for LoadKlass and LoadNKlass nodes.
 229   const Type* klass_value_common(PhaseGVN* phase) const;
 230   Node* klass_identity_common(PhaseGVN* phase);
 231 
 232   virtual uint ideal_reg() const;
 233   virtual const Type *bottom_type() const;
 234   // Following method is copied from TypeNode:
 235   void set_type(const Type* t) {
 236     assert(t != NULL, "sanity");
 237     debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
 238     *(const Type**)&_type = t;   // cast away const-ness
 239     // If this node is in the hash table, make sure it doesn't need a rehash.
 240     assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
 241   }
 242   const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
 243 
 244   // Do not match memory edge
 245   virtual uint match_edge(uint idx) const;
 246 
 247   // Map a load opcode to its corresponding store opcode.
 248   virtual int store_Opcode() const = 0;
 249 
 250   // Check if the load's memory input is a Phi node with the same control.


 267   // true to enable key hoisting & commoning optimizations.  However, for the
 268   // special case of RawPtr loads from TLS top & end, and other loads performed by
 269   // GC barriers, the control edge carries the dependence preventing hoisting past
 270   // a Safepoint instead of the memory edge.  (An unfortunate consequence of having
 271   // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
 272   // which produce results (new raw memory state) inside of loops preventing all
 273   // manner of other optimizations).  Basically, it's ugly but so is the alternative.
 274   // See comment in macro.cpp, around line 125 expand_allocate_common().
 275   virtual bool depends_only_on_test() const { return adr_type() != TypeRawPtr::BOTTOM && _depends_only_on_test; }
 276 };
 277 
 278 //------------------------------LoadBNode--------------------------------------
 279 // Load a byte (8bits signed) from memory
 280 class LoadBNode : public LoadNode {
 281 public:
 282   LoadBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 283     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 284   virtual int Opcode() const;
 285   virtual uint ideal_reg() const { return Op_RegI; }
 286   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 287   virtual const Type* Value(PhaseGVN* phase) const;
 288   virtual int store_Opcode() const { return Op_StoreB; }
 289   virtual BasicType memory_type() const { return T_BYTE; }
 290 };
 291 
 292 //------------------------------LoadUBNode-------------------------------------
 293 // Load a unsigned byte (8bits unsigned) from memory
 294 class LoadUBNode : public LoadNode {
 295 public:
 296   LoadUBNode(Node* c, Node* mem, Node* adr, const TypePtr* at, const TypeInt* ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 297     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 298   virtual int Opcode() const;
 299   virtual uint ideal_reg() const { return Op_RegI; }
 300   virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
 301   virtual const Type* Value(PhaseGVN* phase) const;
 302   virtual int store_Opcode() const { return Op_StoreB; }
 303   virtual BasicType memory_type() const { return T_BYTE; }
 304 };
 305 
 306 //------------------------------LoadUSNode-------------------------------------
 307 // Load an unsigned short/char (16bits unsigned) from memory
 308 class LoadUSNode : public LoadNode {
 309 public:
 310   LoadUSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 311     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 312   virtual int Opcode() const;
 313   virtual uint ideal_reg() const { return Op_RegI; }
 314   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 315   virtual const Type* Value(PhaseGVN* phase) const;
 316   virtual int store_Opcode() const { return Op_StoreC; }
 317   virtual BasicType memory_type() const { return T_CHAR; }
 318 };
 319 
 320 //------------------------------LoadSNode--------------------------------------
 321 // Load a short (16bits signed) from memory
 322 class LoadSNode : public LoadNode {
 323 public:
 324   LoadSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 325     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 326   virtual int Opcode() const;
 327   virtual uint ideal_reg() const { return Op_RegI; }
 328   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 329   virtual const Type* Value(PhaseGVN* phase) const;
 330   virtual int store_Opcode() const { return Op_StoreC; }
 331   virtual BasicType memory_type() const { return T_SHORT; }
 332 };
 333 
 334 //------------------------------LoadINode--------------------------------------
 335 // Load an integer from memory
 336 class LoadINode : public LoadNode {
 337 public:
 338   LoadINode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 339     : LoadNode(c, mem, adr, at, ti, mo, control_dependency) {}
 340   virtual int Opcode() const;
 341   virtual uint ideal_reg() const { return Op_RegI; }
 342   virtual int store_Opcode() const { return Op_StoreI; }
 343   virtual BasicType memory_type() const { return T_INT; }
 344 };
 345 
 346 //------------------------------LoadRangeNode----------------------------------
 347 // Load an array length from the array
 348 class LoadRangeNode : public LoadINode {
 349 public:
 350   LoadRangeNode(Node *c, Node *mem, Node *adr, const TypeInt *ti = TypeInt::POS)
 351     : LoadINode(c, mem, adr, TypeAryPtr::RANGE, ti, MemNode::unordered) {}
 352   virtual int Opcode() const;
 353   virtual const Type* Value(PhaseGVN* phase) const;
 354   virtual Node* Identity(PhaseGVN* phase);
 355   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 356 };
 357 
 358 //------------------------------LoadLNode--------------------------------------
 359 // Load a long from memory
 360 class LoadLNode : public LoadNode {
 361   virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
 362   virtual uint cmp( const Node &n ) const {
 363     return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
 364       && LoadNode::cmp(n);
 365   }
 366   virtual uint size_of() const { return sizeof(*this); }
 367   const bool _require_atomic_access;  // is piecewise load forbidden?
 368 
 369 public:
 370   LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
 371             MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest, bool require_atomic_access = false)
 372     : LoadNode(c, mem, adr, at, tl, mo, control_dependency), _require_atomic_access(require_atomic_access) {}
 373   virtual int Opcode() const;
 374   virtual uint ideal_reg() const { return Op_RegL; }


 466 public:
 467   LoadNNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const Type* t, MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest)
 468     : LoadNode(c, mem, adr, at, t, mo, control_dependency) {}
 469   virtual int Opcode() const;
 470   virtual uint ideal_reg() const { return Op_RegN; }
 471   virtual int store_Opcode() const { return Op_StoreN; }
 472   virtual BasicType memory_type() const { return T_NARROWOOP; }
 473 };
 474 
 475 //------------------------------LoadKlassNode----------------------------------
 476 // Load a Klass from an object
 477 class LoadKlassNode : public LoadPNode {
 478 protected:
 479   // In most cases, LoadKlassNode does not have the control input set. If the control
 480   // input is set, it must not be removed (by LoadNode::Ideal()).
 481   virtual bool can_remove_control() const;
 482 public:
 483   LoadKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeKlassPtr *tk, MemOrd mo)
 484     : LoadPNode(c, mem, adr, at, tk, mo) {}
 485   virtual int Opcode() const;
 486   virtual const Type* Value(PhaseGVN* phase) const;
 487   virtual Node* Identity(PhaseGVN* phase);
 488   virtual bool depends_only_on_test() const { return true; }
 489 
 490   // Polymorphic factory method:
 491   static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
 492                     const TypeKlassPtr* tk = TypeKlassPtr::OBJECT);
 493 };
 494 
 495 //------------------------------LoadNKlassNode---------------------------------
 496 // Load a narrow Klass from an object.
 497 class LoadNKlassNode : public LoadNNode {
 498 public:
 499   LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
 500     : LoadNNode(c, mem, adr, at, tk, mo) {}
 501   virtual int Opcode() const;
 502   virtual uint ideal_reg() const { return Op_RegN; }
 503   virtual int store_Opcode() const { return Op_StoreNKlass; }
 504   virtual BasicType memory_type() const { return T_NARROWKLASS; }
 505 
 506   virtual const Type* Value(PhaseGVN* phase) const;
 507   virtual Node* Identity(PhaseGVN* phase);
 508   virtual bool depends_only_on_test() const { return true; }
 509 };
 510 
 511 
 512 //------------------------------StoreNode--------------------------------------
 513 // Store value; requires Store, Address and Value
 514 class StoreNode : public MemNode {
 515 private:
 516   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 517   // stores that can be reordered, and such requiring release semantics to
 518   // adhere to the Java specification.  The required behaviour is stored in
 519   // this field.
 520   const MemOrd _mo;
 521   // Needed for proper cloning.
 522   virtual uint size_of() const { return sizeof(*this); }
 523 protected:
 524   virtual uint cmp( const Node &n ) const;
 525   virtual bool depends_only_on_test() const { return false; }
 526 
 527   Node *Ideal_masked_input       (PhaseGVN *phase, uint mask);


 564   }
 565 
 566   // Polymorphic factory method
 567   //
 568   // We must ensure that stores of object references will be visible
 569   // only after the object's initialization. So the callers of this
 570   // procedure must indicate that the store requires `release'
 571   // semantics, if the stored value is an object reference that might
 572   // point to a new object and may become externally visible.
 573   static StoreNode* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
 574                          const TypePtr* at, Node *val, BasicType bt, MemOrd mo);
 575 
 576   virtual uint hash() const;    // Check the type
 577 
 578   // If the store is to Field memory and the pointer is non-null, we can
 579   // zero out the control input.
 580   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 581 
 582   // Compute a new Type for this node.  Basically we just do the pre-check,
 583   // then call the virtual add() to set the type.
 584   virtual const Type* Value(PhaseGVN* phase) const;
 585 
 586   // Check for identity function on memory (Load then Store at same address)
 587   virtual Node* Identity(PhaseGVN* phase);
 588 
 589   // Do not match memory edge
 590   virtual uint match_edge(uint idx) const;
 591 
 592   virtual const Type *bottom_type() const;  // returns Type::MEMORY
 593 
 594   // Map a store opcode to its corresponding own opcode, trivially.
 595   virtual int store_Opcode() const { return Opcode(); }
 596 
 597   // have all possible loads of the value stored been optimized away?
 598   bool value_never_loaded(PhaseTransform *phase) const;
 599 };
 600 
 601 //------------------------------StoreBNode-------------------------------------
 602 // Store byte to memory
 603 class StoreBNode : public StoreNode {
 604 public:
 605   StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
 606     : StoreNode(c, mem, adr, at, val, mo) {}
 607   virtual int Opcode() const;


 729 // Preceeding equivalent StoreCMs may be eliminated.
 730 class StoreCMNode : public StoreNode {
 731  private:
 732   virtual uint hash() const { return StoreNode::hash() + _oop_alias_idx; }
 733   virtual uint cmp( const Node &n ) const {
 734     return _oop_alias_idx == ((StoreCMNode&)n)._oop_alias_idx
 735       && StoreNode::cmp(n);
 736   }
 737   virtual uint size_of() const { return sizeof(*this); }
 738   int _oop_alias_idx;   // The alias_idx of OopStore
 739 
 740 public:
 741   StoreCMNode( Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, int oop_alias_idx ) :
 742     StoreNode(c, mem, adr, at, val, oop_store, MemNode::release),
 743     _oop_alias_idx(oop_alias_idx) {
 744     assert(_oop_alias_idx >= Compile::AliasIdxRaw ||
 745            _oop_alias_idx == Compile::AliasIdxBot && Compile::current()->AliasLevel() == 0,
 746            "bad oop alias idx");
 747   }
 748   virtual int Opcode() const;
 749   virtual Node* Identity(PhaseGVN* phase);
 750   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 751   virtual const Type* Value(PhaseGVN* phase) const;
 752   virtual BasicType memory_type() const { return T_VOID; } // unspecific
 753   int oop_alias_idx() const { return _oop_alias_idx; }
 754 };
 755 
 756 //------------------------------LoadPLockedNode---------------------------------
 757 // Load-locked a pointer from memory (either object or array).
 758 // On Sparc & Intel this is implemented as a normal pointer load.
 759 // On PowerPC and friends it's a real load-locked.
 760 class LoadPLockedNode : public LoadPNode {
 761 public:
 762   LoadPLockedNode(Node *c, Node *mem, Node *adr, MemOrd mo)
 763     : LoadPNode(c, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, mo) {}
 764   virtual int Opcode() const;
 765   virtual int store_Opcode() const { return Op_StorePConditional; }
 766   virtual bool depends_only_on_test() const { return true; }
 767 };
 768 
 769 //------------------------------SCMemProjNode---------------------------------------
 770 // This class defines a projection of the memory  state of a store conditional node.
 771 // These nodes return a value, but also update memory.
 772 class SCMemProjNode : public ProjNode {
 773 public:
 774   enum {SCMEMPROJCON = (uint)-2};
 775   SCMemProjNode( Node *src) : ProjNode( src, SCMEMPROJCON) { }
 776   virtual int Opcode() const;
 777   virtual bool      is_CFG() const  { return false; }
 778   virtual const Type *bottom_type() const {return Type::MEMORY;}
 779   virtual const TypePtr *adr_type() const {
 780     Node* ctrl = in(0);
 781     if (ctrl == NULL)  return NULL; // node is dead
 782     return ctrl->in(MemNode::Memory)->adr_type();
 783   }
 784   virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
 785   virtual const Type* Value(PhaseGVN* phase) const;
 786 #ifndef PRODUCT
 787   virtual void dump_spec(outputStream *st) const {};
 788 #endif
 789 };
 790 
 791 //------------------------------LoadStoreNode---------------------------
 792 // Note: is_Mem() method returns 'true' for this class.
 793 class LoadStoreNode : public Node {
 794 private:
 795   const Type* const _type;      // What kind of value is loaded?
 796   const TypePtr* _adr_type;     // What kind of memory is being addressed?
 797   virtual uint size_of() const; // Size is bigger
 798 public:
 799   LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
 800   virtual bool depends_only_on_test() const { return false; }
 801   virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
 802 
 803   virtual const Type *bottom_type() const { return _type; }
 804   virtual uint ideal_reg() const;
 805   virtual const class TypePtr *adr_type() const { return _adr_type; }  // returns bottom_type of address


 917 
 918 //------------------------------GetAndSetNNode---------------------------
 919 class GetAndSetNNode : public LoadStoreNode {
 920 public:
 921   GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
 922   virtual int Opcode() const;
 923 };
 924 
 925 //------------------------------ClearArray-------------------------------------
 926 class ClearArrayNode: public Node {
 927 public:
 928   ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base )
 929     : Node(ctrl,arymem,word_cnt,base) {
 930     init_class_id(Class_ClearArray);
 931   }
 932   virtual int         Opcode() const;
 933   virtual const Type *bottom_type() const { return Type::MEMORY; }
 934   // ClearArray modifies array elements, and so affects only the
 935   // array memory addressed by the bottom_type of its base address.
 936   virtual const class TypePtr *adr_type() const;
 937   virtual Node* Identity(PhaseGVN* phase);
 938   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 939   virtual uint match_edge(uint idx) const;
 940 
 941   // Clear the given area of an object or array.
 942   // The start offset must always be aligned mod BytesPerInt.
 943   // The end offset must always be aligned mod BytesPerLong.
 944   // Return the new memory.
 945   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 946                             intptr_t start_offset,
 947                             intptr_t end_offset,
 948                             PhaseGVN* phase);
 949   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 950                             intptr_t start_offset,
 951                             Node* end_offset,
 952                             PhaseGVN* phase);
 953   static Node* clear_memory(Node* control, Node* mem, Node* dest,
 954                             Node* start_offset,
 955                             Node* end_offset,
 956                             PhaseGVN* phase);
 957   // Return allocation input memory edge if it is different instance


 966 // volatile-load.  Monitor-exit and volatile-store act as Release: no
 967 // preceding ref can be moved to after them.  We insert a MemBar-Release
 968 // before a FastUnlock or volatile-store.  All volatiles need to be
 969 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
 970 // separate it from any following volatile-load.
 971 class MemBarNode: public MultiNode {
 972   virtual uint hash() const ;                  // { return NO_HASH; }
 973   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 974 
 975   virtual uint size_of() const { return sizeof(*this); }
 976   // Memory type this node is serializing.  Usually either rawptr or bottom.
 977   const TypePtr* _adr_type;
 978 
 979 public:
 980   enum {
 981     Precedent = TypeFunc::Parms  // optional edge to force precedence
 982   };
 983   MemBarNode(Compile* C, int alias_idx, Node* precedent);
 984   virtual int Opcode() const = 0;
 985   virtual const class TypePtr *adr_type() const { return _adr_type; }
 986   virtual const Type* Value(PhaseGVN* phase) const;
 987   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 988   virtual uint match_edge(uint idx) const { return 0; }
 989   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
 990   virtual Node *match( const ProjNode *proj, const Matcher *m );
 991   // Factory method.  Builds a wide or narrow membar.
 992   // Optional 'precedent' becomes an extra edge if not null.
 993   static MemBarNode* make(Compile* C, int opcode,
 994                           int alias_idx = Compile::AliasIdxBot,
 995                           Node* precedent = NULL);
 996 };
 997 
 998 // "Acquire" - no following ref can move before (but earlier refs can
 999 // follow, like an early Load stalled in cache).  Requires multi-cpu
1000 // visibility.  Inserted after a volatile load.
1001 class MemBarAcquireNode: public MemBarNode {
1002 public:
1003   MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
1004     : MemBarNode(C, alias_idx, precedent) {}
1005   virtual int Opcode() const;
1006 };


1182                                PhaseGVN* phase);
1183 
1184   intptr_t find_next_fullword_store(uint i, PhaseGVN* phase);
1185 };
1186 
1187 //------------------------------MergeMem---------------------------------------
1188 // (See comment in memnode.cpp near MergeMemNode::MergeMemNode for semantics.)
1189 class MergeMemNode: public Node {
1190   virtual uint hash() const ;                  // { return NO_HASH; }
1191   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
1192   friend class MergeMemStream;
1193   MergeMemNode(Node* def);  // clients use MergeMemNode::make
1194 
1195 public:
1196   // If the input is a whole memory state, clone it with all its slices intact.
1197   // Otherwise, make a new memory state with just that base memory input.
1198   // In either case, the result is a newly created MergeMem.
1199   static MergeMemNode* make(Node* base_memory);
1200 
1201   virtual int Opcode() const;
1202   virtual Node* Identity(PhaseGVN* phase);
1203   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1204   virtual uint ideal_reg() const { return NotAMachineReg; }
1205   virtual uint match_edge(uint idx) const { return 0; }
1206   virtual const RegMask &out_RegMask() const;
1207   virtual const Type *bottom_type() const { return Type::MEMORY; }
1208   virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
1209   // sparse accessors
1210   // Fetch the previously stored "set_memory_at", or else the base memory.
1211   // (Caller should clone it if it is a phi-nest.)
1212   Node* memory_at(uint alias_idx) const;
1213   // set the memory, regardless of its previous value
1214   void set_memory_at(uint alias_idx, Node* n);
1215   // the "base" is the memory that provides the non-finite support
1216   Node* base_memory() const       { return in(Compile::AliasIdxBot); }
1217   // warning: setting the base can implicitly set any of the other slices too
1218   void set_base_memory(Node* def);
1219   // sentinel value which denotes a copy of the base memory:
1220   Node*   empty_memory() const    { return in(Compile::AliasIdxTop); }
1221   static Node* make_empty_memory(); // where the sentinel comes from
1222   bool is_empty_memory(Node* n) const { assert((n == empty_memory()) == n->is_top(), "sanity"); return n->is_top(); }


< prev index next >