< prev index next >

src/share/vm/opto/callnode.hpp

Print this page
rev 8739 : 8004073: Implement C2 Ideal node specific dump() method
Summary: add Node::dump_rel() to dump a node and its related nodes (the notion of "related" depends on the node at hand); add Node::dump_comp() to dump a node in compact representation; add Node::dump_rel_comp() to dump a node and its related nodes in compact representation; add the required machinery; extend some C2 IR nodes with compact and related dumping
Reviewed-by:


  67   virtual uint size_of() const; // Size is bigger
  68 public:
  69   const TypeTuple *_domain;
  70   StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) {
  71     init_class_id(Class_Start);
  72     init_req(0,this);
  73     init_req(1,root);
  74   }
  75   virtual int Opcode() const;
  76   virtual bool pinned() const { return true; };
  77   virtual const Type *bottom_type() const;
  78   virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
  79   virtual const Type *Value( PhaseTransform *phase ) const;
  80   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
  81   virtual void  calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;
  82   virtual const RegMask &in_RegMask(uint) const;
  83   virtual Node *match( const ProjNode *proj, const Matcher *m );
  84   virtual uint ideal_reg() const { return 0; }
  85 #ifndef PRODUCT
  86   virtual void  dump_spec(outputStream *st) const;

  87 #endif
  88 };
  89 
  90 //------------------------------StartOSRNode-----------------------------------
  91 // The method start node for on stack replacement code
  92 class StartOSRNode : public StartNode {
  93 public:
  94   StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {}
  95   virtual int   Opcode() const;
  96   static  const TypeTuple *osr_domain();
  97 };
  98 
  99 
 100 //------------------------------ParmNode---------------------------------------
 101 // Incoming parameters
 102 class ParmNode : public ProjNode {
 103   static const char * const names[TypeFunc::Parms+1];
 104 public:
 105   ParmNode( StartNode *src, uint con ) : ProjNode(src,con) {
 106     init_class_id(Class_Parm);
 107   }
 108   virtual int Opcode() const;
 109   virtual bool  is_CFG() const { return (_con == TypeFunc::Control); }
 110   virtual uint ideal_reg() const;
 111 #ifndef PRODUCT
 112   virtual void dump_spec(outputStream *st) const;


 113 #endif
 114 };
 115 
 116 
 117 //------------------------------ReturnNode-------------------------------------
 118 // Return from subroutine node
 119 class ReturnNode : public Node {
 120 public:
 121   ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr );
 122   virtual int Opcode() const;
 123   virtual bool  is_CFG() const { return true; }
 124   virtual uint hash() const { return NO_HASH; }  // CFG nodes do not hash
 125   virtual bool depends_only_on_test() const { return false; }
 126   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 127   virtual const Type *Value( PhaseTransform *phase ) const;
 128   virtual uint ideal_reg() const { return NotAMachineReg; }
 129   virtual uint match_edge(uint idx) const;
 130 #ifndef PRODUCT
 131   virtual void dump_req(outputStream *st = tty) const;
 132 #endif


 459     return !_replaced_nodes.is_empty();
 460   }
 461 
 462   // Standard Node stuff
 463   virtual int            Opcode() const;
 464   virtual bool           pinned() const { return true; }
 465   virtual const Type    *Value( PhaseTransform *phase ) const;
 466   virtual const Type    *bottom_type() const { return Type::CONTROL; }
 467   virtual const TypePtr *adr_type() const { return _adr_type; }
 468   virtual Node          *Ideal(PhaseGVN *phase, bool can_reshape);
 469   virtual Node          *Identity( PhaseTransform *phase );
 470   virtual uint           ideal_reg() const { return 0; }
 471   virtual const RegMask &in_RegMask(uint) const;
 472   virtual const RegMask &out_RegMask() const;
 473   virtual uint           match_edge(uint idx) const;
 474 
 475   static  bool           needs_polling_address_input();
 476 
 477 #ifndef PRODUCT
 478   virtual void           dump_spec(outputStream *st) const;

 479 #endif
 480 };
 481 
 482 //------------------------------SafePointScalarObjectNode----------------------
 483 // A SafePointScalarObjectNode represents the state of a scalarized object
 484 // at a safepoint.
 485 
 486 class SafePointScalarObjectNode: public TypeNode {
 487   uint _first_index; // First input edge relative index of a SafePoint node where
 488                      // states of the scalarized object fields are collected.
 489                      // It is relative to the last (youngest) jvms->_scloff.
 490   uint _n_fields;    // Number of non-static fields of the scalarized object.
 491   DEBUG_ONLY(AllocateNode* _alloc;)
 492 
 493   virtual uint hash() const ; // { return NO_HASH; }
 494   virtual uint cmp( const Node &n ) const;
 495 
 496   uint first_index() const { return _first_index; }
 497 
 498 public:


 658   const int       _bci;         // Byte Code Index of call byte code
 659   CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci)
 660     : CallNode(tf, addr, TypePtr::BOTTOM),
 661       _method(method), _bci(bci),
 662       _optimized_virtual(false),
 663       _method_handle_invoke(false)
 664   {
 665     init_class_id(Class_CallJava);
 666   }
 667 
 668   virtual int   Opcode() const;
 669   ciMethod* method() const                { return _method; }
 670   void  set_method(ciMethod *m)           { _method = m; }
 671   void  set_optimized_virtual(bool f)     { _optimized_virtual = f; }
 672   bool  is_optimized_virtual() const      { return _optimized_virtual; }
 673   void  set_method_handle_invoke(bool f)  { _method_handle_invoke = f; }
 674   bool  is_method_handle_invoke() const   { return _method_handle_invoke; }
 675 
 676 #ifndef PRODUCT
 677   virtual void  dump_spec(outputStream *st) const;

 678 #endif
 679 };
 680 
 681 //------------------------------CallStaticJavaNode-----------------------------
 682 // Make a direct subroutine call using Java calling convention (for static
 683 // calls and optimized virtual calls, plus calls to wrappers for run-time
 684 // routines); generates static stub.
 685 class CallStaticJavaNode : public CallJavaNode {
 686   virtual uint cmp( const Node &n ) const;
 687   virtual uint size_of() const; // Size is bigger
 688 public:
 689   CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
 690     : CallJavaNode(tf, addr, method, bci) {
 691     init_class_id(Class_CallStaticJava);
 692     if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
 693       init_flags(Flag_is_macro);
 694       C->add_macro_node(this);
 695     }
 696     _is_scalar_replaceable = false;
 697     _is_non_escaping = false;


 713 
 714   // If this is an uncommon trap, return the request code, else zero.
 715   int uncommon_trap_request() const;
 716   static int extract_uncommon_trap_request(const Node* call);
 717 
 718   bool is_boxing_method() const {
 719     return is_macro() && (method() != NULL) && method()->is_boxing_method();
 720   }
 721   // Later inlining modifies the JVMState, so we need to clone it
 722   // when the call node is cloned (because it is macro node).
 723   virtual void  clone_jvms(Compile* C) {
 724     if ((jvms() != NULL) && is_boxing_method()) {
 725       set_jvms(jvms()->clone_deep(C));
 726       jvms()->set_map_deep(this);
 727     }
 728   }
 729 
 730   virtual int         Opcode() const;
 731 #ifndef PRODUCT
 732   virtual void        dump_spec(outputStream *st) const;

 733 #endif
 734 };
 735 
 736 //------------------------------CallDynamicJavaNode----------------------------
 737 // Make a dispatched call using Java calling convention.
 738 class CallDynamicJavaNode : public CallJavaNode {
 739   virtual uint cmp( const Node &n ) const;
 740   virtual uint size_of() const; // Size is bigger
 741 public:
 742   CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
 743     init_class_id(Class_CallDynamicJava);
 744   }
 745 
 746   int _vtable_index;
 747   virtual int   Opcode() const;
 748 #ifndef PRODUCT
 749   virtual void  dump_spec(outputStream *st) const;
 750 #endif
 751 };
 752 


 934   // Pattern-match a possible usage of AllocateArrayNode.
 935   // Return null if no allocation is recognized.
 936   static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) {
 937     AllocateNode* allo = Ideal_allocation(ptr, phase);
 938     return (allo == NULL || !allo->is_AllocateArray())
 939            ? NULL : allo->as_AllocateArray();
 940   }
 941 };
 942 
 943 //------------------------------AbstractLockNode-----------------------------------
 944 class AbstractLockNode: public CallNode {
 945 private:
 946   enum {
 947     Regular = 0,  // Normal lock
 948     NonEscObj,    // Lock is used for non escaping object
 949     Coarsened,    // Lock was coarsened
 950     Nested        // Nested lock
 951   } _kind;
 952 #ifndef PRODUCT
 953   NamedCounter* _counter;

 954 #endif
 955 
 956 protected:
 957   // helper functions for lock elimination
 958   //
 959 
 960   bool find_matching_unlock(const Node* ctrl, LockNode* lock,
 961                             GrowableArray<AbstractLockNode*> &lock_ops);
 962   bool find_lock_and_unlock_through_if(Node* node, LockNode* lock,
 963                                        GrowableArray<AbstractLockNode*> &lock_ops);
 964   bool find_unlocks_for_region(const RegionNode* region, LockNode* lock,
 965                                GrowableArray<AbstractLockNode*> &lock_ops);
 966   LockNode *find_matching_lock(UnlockNode* unlock);
 967 
 968   // Update the counter to indicate that this lock was eliminated.
 969   void set_eliminated_lock_counter() PRODUCT_RETURN;
 970 
 971 public:
 972   AbstractLockNode(const TypeFunc *tf)
 973     : CallNode(tf, NULL, TypeRawPtr::BOTTOM),


 988   virtual uint size_of() const { return sizeof(*this); }
 989 
 990   bool is_eliminated()  const { return (_kind != Regular); }
 991   bool is_non_esc_obj() const { return (_kind == NonEscObj); }
 992   bool is_coarsened()   const { return (_kind == Coarsened); }
 993   bool is_nested()      const { return (_kind == Nested); }
 994 
 995   const char * kind_as_string() const;
 996   void log_lock_optimization(Compile* c, const char * tag) const;
 997 
 998   void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }
 999   void set_coarsened()   { _kind = Coarsened; set_eliminated_lock_counter(); }
1000   void set_nested()      { _kind = Nested; set_eliminated_lock_counter(); }
1001 
1002   // locking does not modify its arguments
1003   virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;}
1004 
1005 #ifndef PRODUCT
1006   void create_lock_counter(JVMState* s);
1007   NamedCounter* counter() const { return _counter; }



1008 #endif
1009 };
1010 
1011 //------------------------------Lock---------------------------------------
1012 // High-level lock operation
1013 //
1014 // This is a subclass of CallNode because it is a macro node which gets expanded
1015 // into a code sequence containing a call.  This node takes 3 "parameters":
1016 //    0  -  object to lock
1017 //    1 -   a BoxLockNode
1018 //    2 -   a FastLockNode
1019 //
1020 class LockNode : public AbstractLockNode {
1021 public:
1022 
1023   static const TypeFunc *lock_type() {
1024     // create input type (domain)
1025     const Type **fields = TypeTuple::fields(3);
1026     fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL;  // Object to be Locked
1027     fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM;    // Address of stack location for lock




  67   virtual uint size_of() const; // Size is bigger
  68 public:
  69   const TypeTuple *_domain;
  70   StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) {
  71     init_class_id(Class_Start);
  72     init_req(0,this);
  73     init_req(1,root);
  74   }
  75   virtual int Opcode() const;
  76   virtual bool pinned() const { return true; };
  77   virtual const Type *bottom_type() const;
  78   virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
  79   virtual const Type *Value( PhaseTransform *phase ) const;
  80   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
  81   virtual void  calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;
  82   virtual const RegMask &in_RegMask(uint) const;
  83   virtual Node *match( const ProjNode *proj, const Matcher *m );
  84   virtual uint ideal_reg() const { return 0; }
  85 #ifndef PRODUCT
  86   virtual void  dump_spec(outputStream *st) const;
  87   virtual void  dump_comp_spec(outputStream *st) const;
  88 #endif
  89 };
  90 
  91 //------------------------------StartOSRNode-----------------------------------
  92 // The method start node for on stack replacement code
  93 class StartOSRNode : public StartNode {
  94 public:
  95   StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {}
  96   virtual int   Opcode() const;
  97   static  const TypeTuple *osr_domain();
  98 };
  99 
 100 
 101 //------------------------------ParmNode---------------------------------------
 102 // Incoming parameters
 103 class ParmNode : public ProjNode {
 104   static const char * const names[TypeFunc::Parms+1];
 105 public:
 106   ParmNode( StartNode *src, uint con ) : ProjNode(src,con) {
 107     init_class_id(Class_Parm);
 108   }
 109   virtual int Opcode() const;
 110   virtual bool  is_CFG() const { return (_con == TypeFunc::Control); }
 111   virtual uint ideal_reg() const;
 112 #ifndef PRODUCT
 113   virtual void dump_spec(outputStream *st) const;
 114   virtual void dump_comp_spec(outputStream *st) const;
 115   virtual void rel(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
 116 #endif
 117 };
 118 
 119 
 120 //------------------------------ReturnNode-------------------------------------
 121 // Return from subroutine node
 122 class ReturnNode : public Node {
 123 public:
 124   ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr );
 125   virtual int Opcode() const;
 126   virtual bool  is_CFG() const { return true; }
 127   virtual uint hash() const { return NO_HASH; }  // CFG nodes do not hash
 128   virtual bool depends_only_on_test() const { return false; }
 129   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
 130   virtual const Type *Value( PhaseTransform *phase ) const;
 131   virtual uint ideal_reg() const { return NotAMachineReg; }
 132   virtual uint match_edge(uint idx) const;
 133 #ifndef PRODUCT
 134   virtual void dump_req(outputStream *st = tty) const;
 135 #endif


 462     return !_replaced_nodes.is_empty();
 463   }
 464 
 465   // Standard Node stuff
 466   virtual int            Opcode() const;
 467   virtual bool           pinned() const { return true; }
 468   virtual const Type    *Value( PhaseTransform *phase ) const;
 469   virtual const Type    *bottom_type() const { return Type::CONTROL; }
 470   virtual const TypePtr *adr_type() const { return _adr_type; }
 471   virtual Node          *Ideal(PhaseGVN *phase, bool can_reshape);
 472   virtual Node          *Identity( PhaseTransform *phase );
 473   virtual uint           ideal_reg() const { return 0; }
 474   virtual const RegMask &in_RegMask(uint) const;
 475   virtual const RegMask &out_RegMask() const;
 476   virtual uint           match_edge(uint idx) const;
 477 
 478   static  bool           needs_polling_address_input();
 479 
 480 #ifndef PRODUCT
 481   virtual void           dump_spec(outputStream *st) const;
 482   virtual void           rel(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
 483 #endif
 484 };
 485 
 486 //------------------------------SafePointScalarObjectNode----------------------
 487 // A SafePointScalarObjectNode represents the state of a scalarized object
 488 // at a safepoint.
 489 
 490 class SafePointScalarObjectNode: public TypeNode {
 491   uint _first_index; // First input edge relative index of a SafePoint node where
 492                      // states of the scalarized object fields are collected.
 493                      // It is relative to the last (youngest) jvms->_scloff.
 494   uint _n_fields;    // Number of non-static fields of the scalarized object.
 495   DEBUG_ONLY(AllocateNode* _alloc;)
 496 
 497   virtual uint hash() const ; // { return NO_HASH; }
 498   virtual uint cmp( const Node &n ) const;
 499 
 500   uint first_index() const { return _first_index; }
 501 
 502 public:


 662   const int       _bci;         // Byte Code Index of call byte code
 663   CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci)
 664     : CallNode(tf, addr, TypePtr::BOTTOM),
 665       _method(method), _bci(bci),
 666       _optimized_virtual(false),
 667       _method_handle_invoke(false)
 668   {
 669     init_class_id(Class_CallJava);
 670   }
 671 
 672   virtual int   Opcode() const;
 673   ciMethod* method() const                { return _method; }
 674   void  set_method(ciMethod *m)           { _method = m; }
 675   void  set_optimized_virtual(bool f)     { _optimized_virtual = f; }
 676   bool  is_optimized_virtual() const      { return _optimized_virtual; }
 677   void  set_method_handle_invoke(bool f)  { _method_handle_invoke = f; }
 678   bool  is_method_handle_invoke() const   { return _method_handle_invoke; }
 679 
 680 #ifndef PRODUCT
 681   virtual void  dump_spec(outputStream *st) const;
 682   virtual void  dump_comp_spec(outputStream *st) const;
 683 #endif
 684 };
 685 
 686 //------------------------------CallStaticJavaNode-----------------------------
 687 // Make a direct subroutine call using Java calling convention (for static
 688 // calls and optimized virtual calls, plus calls to wrappers for run-time
 689 // routines); generates static stub.
 690 class CallStaticJavaNode : public CallJavaNode {
 691   virtual uint cmp( const Node &n ) const;
 692   virtual uint size_of() const; // Size is bigger
 693 public:
 694   CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
 695     : CallJavaNode(tf, addr, method, bci) {
 696     init_class_id(Class_CallStaticJava);
 697     if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
 698       init_flags(Flag_is_macro);
 699       C->add_macro_node(this);
 700     }
 701     _is_scalar_replaceable = false;
 702     _is_non_escaping = false;


 718 
 719   // If this is an uncommon trap, return the request code, else zero.
 720   int uncommon_trap_request() const;
 721   static int extract_uncommon_trap_request(const Node* call);
 722 
 723   bool is_boxing_method() const {
 724     return is_macro() && (method() != NULL) && method()->is_boxing_method();
 725   }
 726   // Later inlining modifies the JVMState, so we need to clone it
 727   // when the call node is cloned (because it is macro node).
 728   virtual void  clone_jvms(Compile* C) {
 729     if ((jvms() != NULL) && is_boxing_method()) {
 730       set_jvms(jvms()->clone_deep(C));
 731       jvms()->set_map_deep(this);
 732     }
 733   }
 734 
 735   virtual int         Opcode() const;
 736 #ifndef PRODUCT
 737   virtual void        dump_spec(outputStream *st) const;
 738   virtual void        dump_comp_spec(outputStream *st) const;
 739 #endif
 740 };
 741 
 742 //------------------------------CallDynamicJavaNode----------------------------
 743 // Make a dispatched call using Java calling convention.
 744 class CallDynamicJavaNode : public CallJavaNode {
 745   virtual uint cmp( const Node &n ) const;
 746   virtual uint size_of() const; // Size is bigger
 747 public:
 748   CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
 749     init_class_id(Class_CallDynamicJava);
 750   }
 751 
 752   int _vtable_index;
 753   virtual int   Opcode() const;
 754 #ifndef PRODUCT
 755   virtual void  dump_spec(outputStream *st) const;
 756 #endif
 757 };
 758 


 940   // Pattern-match a possible usage of AllocateArrayNode.
 941   // Return null if no allocation is recognized.
 942   static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) {
 943     AllocateNode* allo = Ideal_allocation(ptr, phase);
 944     return (allo == NULL || !allo->is_AllocateArray())
 945            ? NULL : allo->as_AllocateArray();
 946   }
 947 };
 948 
 949 //------------------------------AbstractLockNode-----------------------------------
 950 class AbstractLockNode: public CallNode {
 951 private:
 952   enum {
 953     Regular = 0,  // Normal lock
 954     NonEscObj,    // Lock is used for non escaping object
 955     Coarsened,    // Lock was coarsened
 956     Nested        // Nested lock
 957   } _kind;
 958 #ifndef PRODUCT
 959   NamedCounter* _counter;
 960   static const char* _kind_names[Nested+1];
 961 #endif
 962 
 963 protected:
 964   // helper functions for lock elimination
 965   //
 966 
 967   bool find_matching_unlock(const Node* ctrl, LockNode* lock,
 968                             GrowableArray<AbstractLockNode*> &lock_ops);
 969   bool find_lock_and_unlock_through_if(Node* node, LockNode* lock,
 970                                        GrowableArray<AbstractLockNode*> &lock_ops);
 971   bool find_unlocks_for_region(const RegionNode* region, LockNode* lock,
 972                                GrowableArray<AbstractLockNode*> &lock_ops);
 973   LockNode *find_matching_lock(UnlockNode* unlock);
 974 
 975   // Update the counter to indicate that this lock was eliminated.
 976   void set_eliminated_lock_counter() PRODUCT_RETURN;
 977 
 978 public:
 979   AbstractLockNode(const TypeFunc *tf)
 980     : CallNode(tf, NULL, TypeRawPtr::BOTTOM),


 995   virtual uint size_of() const { return sizeof(*this); }
 996 
 997   bool is_eliminated()  const { return (_kind != Regular); }
 998   bool is_non_esc_obj() const { return (_kind == NonEscObj); }
 999   bool is_coarsened()   const { return (_kind == Coarsened); }
1000   bool is_nested()      const { return (_kind == Nested); }
1001 
1002   const char * kind_as_string() const;
1003   void log_lock_optimization(Compile* c, const char * tag) const;
1004 
1005   void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }
1006   void set_coarsened()   { _kind = Coarsened; set_eliminated_lock_counter(); }
1007   void set_nested()      { _kind = Nested; set_eliminated_lock_counter(); }
1008 
1009   // locking does not modify its arguments
1010   virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;}
1011 
1012 #ifndef PRODUCT
1013   void create_lock_counter(JVMState* s);
1014   NamedCounter* counter() const { return _counter; }
1015   virtual void dump_spec(outputStream* st) const;
1016   virtual void dump_comp_spec(outputStream* st) const;
1017   virtual void rel(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
1018 #endif
1019 };
1020 
1021 //------------------------------Lock---------------------------------------
1022 // High-level lock operation
1023 //
1024 // This is a subclass of CallNode because it is a macro node which gets expanded
1025 // into a code sequence containing a call.  This node takes 3 "parameters":
1026 //    0  -  object to lock
1027 //    1 -   a BoxLockNode
1028 //    2 -   a FastLockNode
1029 //
1030 class LockNode : public AbstractLockNode {
1031 public:
1032 
1033   static const TypeFunc *lock_type() {
1034     // create input type (domain)
1035     const Type **fields = TypeTuple::fields(3);
1036     fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL;  // Object to be Locked
1037     fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM;    // Address of stack location for lock


< prev index next >