< prev index next >

src/share/vm/opto/callnode.hpp

Print this page
rev 10504 : value type calling convention


 555 
 556 class CallGenerator;
 557 
 558 //------------------------------CallNode---------------------------------------
 559 // Call nodes now subsume the function of debug nodes at callsites, so they
 560 // contain the functionality of a full scope chain of debug nodes.
 561 class CallNode : public SafePointNode {
 562   friend class VMStructs;
 563 
 564 protected:
 565   bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
 566 
 567 public:
 568   const TypeFunc *_tf;        // Function type
 569   address      _entry_point;  // Address of method being called
 570   float        _cnt;          // Estimate of number of times called
 571   CallGenerator* _generator;  // corresponding CallGenerator for some late inline calls
 572   const char *_name;           // Printable name, if _method is NULL
 573 
 574   CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
 575     : SafePointNode(tf->domain()->cnt(), NULL, adr_type),
 576       _tf(tf),
 577       _entry_point(addr),
 578       _cnt(COUNT_UNKNOWN),
 579       _generator(NULL),
 580       _name(NULL)
 581   {
 582     init_class_id(Class_Call);
 583   }
 584 
 585   const TypeFunc* tf()         const { return _tf; }
 586   const address  entry_point() const { return _entry_point; }
 587   const float    cnt()         const { return _cnt; }
 588   CallGenerator* generator()   const { return _generator; }
 589 
 590   void set_tf(const TypeFunc* tf)       { _tf = tf; }
 591   void set_entry_point(address p)       { _entry_point = p; }
 592   void set_cnt(float c)                 { _cnt = c; }
 593   void set_generator(CallGenerator* cg) { _generator = cg; }
 594 
 595   virtual const Type *bottom_type() const;


 836     InitialTest,                      // slow-path test (may be constant)
 837     ALength,                          // array length (or TOP if none)
 838     ParmLimit
 839   };
 840 
 841   static const TypeFunc* alloc_type(const Type* t) {
 842     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
 843     fields[AllocSize]   = TypeInt::POS;
 844     fields[KlassNode]   = TypeInstPtr::NOTNULL;
 845     fields[InitialTest] = TypeInt::BOOL;
 846     fields[ALength]     = t;  // length (can be a bad length)
 847 
 848     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
 849 
 850     // create result type (range)
 851     fields = TypeTuple::fields(1);
 852     fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
 853 
 854     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
 855 
 856     return TypeFunc::make(domain, range);
 857   }
 858 
 859   // Result of Escape Analysis
 860   bool _is_scalar_replaceable;
 861   bool _is_non_escaping;
 862   // True when MemBar for new is redundant with MemBar at initialzer exit
 863   bool _is_allocation_MemBar_redundant;
 864 
 865   virtual uint size_of() const; // Size is bigger
 866   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
 867                Node *size, Node *klass_node, Node *initial_test);
 868   // Expansion modifies the JVMState, so we need to clone it
 869   virtual void  clone_jvms(Compile* C) {
 870     if (jvms() != NULL) {
 871       set_jvms(jvms()->clone_deep(C));
 872       jvms()->set_map_deep(this);
 873     }
 874   }
 875   virtual int Opcode() const;
 876   virtual uint ideal_reg() const { return Op_RegP; }


1052 //    0  -  object to lock
1053 //    1 -   a BoxLockNode
1054 //    2 -   a FastLockNode
1055 //
1056 class LockNode : public AbstractLockNode {
1057 public:
1058 
1059   static const TypeFunc *lock_type() {
1060     // create input type (domain)
1061     const Type **fields = TypeTuple::fields(3);
1062     fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL;  // Object to be Locked
1063     fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM;    // Address of stack location for lock
1064     fields[TypeFunc::Parms+2] = TypeInt::BOOL;         // FastLock
1065     const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1066 
1067     // create result type (range)
1068     fields = TypeTuple::fields(0);
1069 
1070     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1071 
1072     return TypeFunc::make(domain,range);
1073   }
1074 
1075   virtual int Opcode() const;
1076   virtual uint size_of() const; // Size is bigger
1077   LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1078     init_class_id(Class_Lock);
1079     init_flags(Flag_is_macro);
1080     C->add_macro_node(this);
1081   }
1082   virtual bool        guaranteed_safepoint()  { return false; }
1083 
1084   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1085   // Expansion modifies the JVMState, so we need to clone it
1086   virtual void  clone_jvms(Compile* C) {
1087     if (jvms() != NULL) {
1088       set_jvms(jvms()->clone_deep(C));
1089       jvms()->set_map_deep(this);
1090     }
1091   }
1092 




 555 
 556 class CallGenerator;
 557 
 558 //------------------------------CallNode---------------------------------------
 559 // Call nodes now subsume the function of debug nodes at callsites, so they
 560 // contain the functionality of a full scope chain of debug nodes.
 561 class CallNode : public SafePointNode {
 562   friend class VMStructs;
 563 
 564 protected:
 565   bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
 566 
 567 public:
 568   const TypeFunc *_tf;        // Function type
 569   address      _entry_point;  // Address of method being called
 570   float        _cnt;          // Estimate of number of times called
 571   CallGenerator* _generator;  // corresponding CallGenerator for some late inline calls
 572   const char *_name;           // Printable name, if _method is NULL
 573 
 574   CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
 575     : SafePointNode(tf->domain_cc()->cnt(), NULL, adr_type),
 576       _tf(tf),
 577       _entry_point(addr),
 578       _cnt(COUNT_UNKNOWN),
 579       _generator(NULL),
 580       _name(NULL)
 581   {
 582     init_class_id(Class_Call);
 583   }
 584 
 585   const TypeFunc* tf()         const { return _tf; }
 586   const address  entry_point() const { return _entry_point; }
 587   const float    cnt()         const { return _cnt; }
 588   CallGenerator* generator()   const { return _generator; }
 589 
 590   void set_tf(const TypeFunc* tf)       { _tf = tf; }
 591   void set_entry_point(address p)       { _entry_point = p; }
 592   void set_cnt(float c)                 { _cnt = c; }
 593   void set_generator(CallGenerator* cg) { _generator = cg; }
 594 
 595   virtual const Type *bottom_type() const;


 836     InitialTest,                      // slow-path test (may be constant)
 837     ALength,                          // array length (or TOP if none)
 838     ParmLimit
 839   };
 840 
 841   static const TypeFunc* alloc_type(const Type* t) {
 842     const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
 843     fields[AllocSize]   = TypeInt::POS;
 844     fields[KlassNode]   = TypeInstPtr::NOTNULL;
 845     fields[InitialTest] = TypeInt::BOOL;
 846     fields[ALength]     = t;  // length (can be a bad length)
 847 
 848     const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
 849 
 850     // create result type (range)
 851     fields = TypeTuple::fields(1);
 852     fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
 853 
 854     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
 855 
 856     return TypeFunc::make(domain, domain, range);
 857   }
 858 
 859   // Result of Escape Analysis
 860   bool _is_scalar_replaceable;
 861   bool _is_non_escaping;
 862   // True when MemBar for new is redundant with MemBar at initialzer exit
 863   bool _is_allocation_MemBar_redundant;
 864 
 865   virtual uint size_of() const; // Size is bigger
 866   AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
 867                Node *size, Node *klass_node, Node *initial_test);
 868   // Expansion modifies the JVMState, so we need to clone it
 869   virtual void  clone_jvms(Compile* C) {
 870     if (jvms() != NULL) {
 871       set_jvms(jvms()->clone_deep(C));
 872       jvms()->set_map_deep(this);
 873     }
 874   }
 875   virtual int Opcode() const;
 876   virtual uint ideal_reg() const { return Op_RegP; }


1052 //    0  -  object to lock
1053 //    1 -   a BoxLockNode
1054 //    2 -   a FastLockNode
1055 //
1056 class LockNode : public AbstractLockNode {
1057 public:
1058 
1059   static const TypeFunc *lock_type() {
1060     // create input type (domain)
1061     const Type **fields = TypeTuple::fields(3);
1062     fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL;  // Object to be Locked
1063     fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM;    // Address of stack location for lock
1064     fields[TypeFunc::Parms+2] = TypeInt::BOOL;         // FastLock
1065     const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1066 
1067     // create result type (range)
1068     fields = TypeTuple::fields(0);
1069 
1070     const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1071 
1072     return TypeFunc::make(domain, domain, range);
1073   }
1074 
1075   virtual int Opcode() const;
1076   virtual uint size_of() const; // Size is bigger
1077   LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1078     init_class_id(Class_Lock);
1079     init_flags(Flag_is_macro);
1080     C->add_macro_node(this);
1081   }
1082   virtual bool        guaranteed_safepoint()  { return false; }
1083 
1084   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1085   // Expansion modifies the JVMState, so we need to clone it
1086   virtual void  clone_jvms(Compile* C) {
1087     if (jvms() != NULL) {
1088       set_jvms(jvms()->clone_deep(C));
1089       jvms()->set_map_deep(this);
1090     }
1091   }
1092 


< prev index next >