src/share/vm/opto/locknode.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 8031320_9 Sdiff src/share/vm/opto

src/share/vm/opto/locknode.hpp

Print this page




  76   }
  77   int stack_slot() const { return _slot; }
  78 
  79   bool is_eliminated() const { return _is_eliminated; }
  80   // mark lock as eliminated.
  81   void set_eliminated()      { _is_eliminated = true; }
  82 
  83   // Is BoxLock node used for one simple lock region?
  84   bool is_simple_lock_region(LockNode** unique_lock, Node* obj);
  85 
  86 #ifndef PRODUCT
  87   virtual void format( PhaseRegAlloc *, outputStream *st ) const;
  88   virtual void dump_spec(outputStream *st) const { st->print("  Lock %d",_slot); }
  89 #endif
  90 };
  91 
  92 //------------------------------FastLockNode-----------------------------------
  93 class FastLockNode: public CmpNode {
  94 private:
  95   BiasedLockingCounters* _counters;


  96 
  97 public:
  98   FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
  99     init_req(0,ctrl);
 100     init_class_id(Class_FastLock);
 101     _counters = NULL;


 102   }
 103   Node* obj_node() const { return in(1); }
 104   Node* box_node() const { return in(2); }
 105   void  set_box_node(Node* box) { set_req(2, box); }
 106 
 107   // FastLock and FastUnlockNode do not hash, we need one for each correspoding
 108   // LockNode/UnLockNode to avoid creating Phi's.
 109   virtual uint hash() const ;                  // { return NO_HASH; }

 110   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 111   virtual int Opcode() const;
 112   virtual const Type *Value( PhaseTransform *phase ) const { return TypeInt::CC; }
 113   const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
 114 
 115   void create_lock_counter(JVMState* s);

 116   BiasedLockingCounters* counters() const { return _counters; }


 117 };
 118 
 119 
 120 //------------------------------FastUnlockNode---------------------------------
 121 class FastUnlockNode: public CmpNode {
 122 public:
 123   FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
 124     init_req(0,ctrl);
 125     init_class_id(Class_FastUnlock);
 126   }
 127   Node* obj_node() const { return in(1); }
 128   Node* box_node() const { return in(2); }
 129 
 130 
 131   // FastLock and FastUnlockNode do not hash, we need one for each correspoding
 132   // LockNode/UnLockNode to avoid creating Phi's.
 133   virtual uint hash() const ;                  // { return NO_HASH; }
 134   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 135   virtual int Opcode() const;
 136   virtual const Type *Value( PhaseTransform *phase ) const { return TypeInt::CC; }


  76   }
  77   int stack_slot() const { return _slot; }
  78 
  79   bool is_eliminated() const { return _is_eliminated; }
  80   // mark lock as eliminated.
  81   void set_eliminated()      { _is_eliminated = true; }
  82 
  83   // Is BoxLock node used for one simple lock region?
  84   bool is_simple_lock_region(LockNode** unique_lock, Node* obj);
  85 
  86 #ifndef PRODUCT
  87   virtual void format( PhaseRegAlloc *, outputStream *st ) const;
  88   virtual void dump_spec(outputStream *st) const { st->print("  Lock %d",_slot); }
  89 #endif
  90 };
  91 
  92 //------------------------------FastLockNode-----------------------------------
  93 class FastLockNode: public CmpNode {
  94 private:
  95   BiasedLockingCounters* _counters;
  96   RTMLockingCounters* _rtmcounters;      // RTM lock counters for inflated locks
  97   RTMLockingCounters* _stackrtmcounters; // RTM lock counters for stack locks
  98 
  99 public:
 100   FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
 101     init_req(0,ctrl);
 102     init_class_id(Class_FastLock);
 103     _counters = NULL;
 104     _rtmcounters = NULL;
 105     _stackrtmcounters = NULL;
 106   }
 107   Node* obj_node() const { return in(1); }
 108   Node* box_node() const { return in(2); }
 109   void  set_box_node(Node* box) { set_req(2, box); }
 110 
 111   // FastLock and FastUnlockNode do not hash, we need one for each correspoding
 112   // LockNode/UnLockNode to avoid creating Phi's.
 113   virtual uint hash() const ;                  // { return NO_HASH; }
 114   virtual uint size_of() const;
 115   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 116   virtual int Opcode() const;
 117   virtual const Type *Value( PhaseTransform *phase ) const { return TypeInt::CC; }
 118   const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
 119 
 120   void create_lock_counter(JVMState* s);
 121   void create_rtm_lock_counter(JVMState* state);
 122   BiasedLockingCounters* counters() const { return _counters; }
 123   RTMLockingCounters* rtmcounters() const { return _rtmcounters; }
 124   RTMLockingCounters* stackrtmcounters() const { return _stackrtmcounters; }
 125 };
 126 
 127 
 128 //------------------------------FastUnlockNode---------------------------------
 129 class FastUnlockNode: public CmpNode {
 130 public:
 131   FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
 132     init_req(0,ctrl);
 133     init_class_id(Class_FastUnlock);
 134   }
 135   Node* obj_node() const { return in(1); }
 136   Node* box_node() const { return in(2); }
 137 
 138 
 139   // FastLock and FastUnlockNode do not hash, we need one for each correspoding
 140   // LockNode/UnLockNode to avoid creating Phi's.
 141   virtual uint hash() const ;                  // { return NO_HASH; }
 142   virtual uint cmp( const Node &n ) const ;    // Always fail, except on self
 143   virtual int Opcode() const;
 144   virtual const Type *Value( PhaseTransform *phase ) const { return TypeInt::CC; }
src/share/vm/opto/locknode.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File