555
556 class CallGenerator;
557
558 //------------------------------CallNode---------------------------------------
559 // Call nodes now subsume the function of debug nodes at callsites, so they
560 // contain the functionality of a full scope chain of debug nodes.
561 class CallNode : public SafePointNode {
562 friend class VMStructs;
563
564 protected:
565 bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
566
567 public:
568 const TypeFunc *_tf; // Function type
569 address _entry_point; // Address of method being called
570 float _cnt; // Estimate of number of times called
571 CallGenerator* _generator; // corresponding CallGenerator for some late inline calls
572 const char *_name; // Printable name, if _method is NULL
573
574 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
575 : SafePointNode(tf->domain()->cnt(), NULL, adr_type),
576 _tf(tf),
577 _entry_point(addr),
578 _cnt(COUNT_UNKNOWN),
579 _generator(NULL),
580 _name(NULL)
581 {
582 init_class_id(Class_Call);
583 }
584
585 const TypeFunc* tf() const { return _tf; }
586 const address entry_point() const { return _entry_point; }
587 const float cnt() const { return _cnt; }
588 CallGenerator* generator() const { return _generator; }
589
590 void set_tf(const TypeFunc* tf) { _tf = tf; }
591 void set_entry_point(address p) { _entry_point = p; }
592 void set_cnt(float c) { _cnt = c; }
593 void set_generator(CallGenerator* cg) { _generator = cg; }
594
595 virtual const Type *bottom_type() const;
1052 // 0 - object to lock
1053 // 1 - a BoxLockNode
1054 // 2 - a FastLockNode
1055 //
1056 class LockNode : public AbstractLockNode {
1057 public:
1058
1059 static const TypeFunc *lock_type() {
1060 // create input type (domain)
1061 const Type **fields = TypeTuple::fields(3);
1062 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked
1063 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock
1064 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock
1065 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1066
1067 // create result type (range)
1068 fields = TypeTuple::fields(0);
1069
1070 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1071
1072 return TypeFunc::make(domain,range);
1073 }
1074
1075 virtual int Opcode() const;
1076 virtual uint size_of() const; // Size is bigger
1077 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1078 init_class_id(Class_Lock);
1079 init_flags(Flag_is_macro);
1080 C->add_macro_node(this);
1081 }
1082 virtual bool guaranteed_safepoint() { return false; }
1083
1084 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1085 // Expansion modifies the JVMState, so we need to clone it
1086 virtual void clone_jvms(Compile* C) {
1087 if (jvms() != NULL) {
1088 set_jvms(jvms()->clone_deep(C));
1089 jvms()->set_map_deep(this);
1090 }
1091 }
1092
|
555
556 class CallGenerator;
557
558 //------------------------------CallNode---------------------------------------
559 // Call nodes now subsume the function of debug nodes at callsites, so they
560 // contain the functionality of a full scope chain of debug nodes.
561 class CallNode : public SafePointNode {
562 friend class VMStructs;
563
564 protected:
565 bool may_modify_arraycopy_helper(const TypeOopPtr* dest_t, const TypeOopPtr *t_oop, PhaseTransform *phase);
566
567 public:
568 const TypeFunc *_tf; // Function type
569 address _entry_point; // Address of method being called
570 float _cnt; // Estimate of number of times called
571 CallGenerator* _generator; // corresponding CallGenerator for some late inline calls
572 const char *_name; // Printable name, if _method is NULL
573
574 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type)
575 : SafePointNode(tf->domain_cc()->cnt(), NULL, adr_type),
576 _tf(tf),
577 _entry_point(addr),
578 _cnt(COUNT_UNKNOWN),
579 _generator(NULL),
580 _name(NULL)
581 {
582 init_class_id(Class_Call);
583 }
584
585 const TypeFunc* tf() const { return _tf; }
586 const address entry_point() const { return _entry_point; }
587 const float cnt() const { return _cnt; }
588 CallGenerator* generator() const { return _generator; }
589
590 void set_tf(const TypeFunc* tf) { _tf = tf; }
591 void set_entry_point(address p) { _entry_point = p; }
592 void set_cnt(float c) { _cnt = c; }
593 void set_generator(CallGenerator* cg) { _generator = cg; }
594
595 virtual const Type *bottom_type() const;
1052 // 0 - object to lock
1053 // 1 - a BoxLockNode
1054 // 2 - a FastLockNode
1055 //
1056 class LockNode : public AbstractLockNode {
1057 public:
1058
1059 static const TypeFunc *lock_type() {
1060 // create input type (domain)
1061 const Type **fields = TypeTuple::fields(3);
1062 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked
1063 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock
1064 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock
1065 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1066
1067 // create result type (range)
1068 fields = TypeTuple::fields(0);
1069
1070 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1071
1072 return TypeFunc::make(domain, range);
1073 }
1074
1075 virtual int Opcode() const;
1076 virtual uint size_of() const; // Size is bigger
1077 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1078 init_class_id(Class_Lock);
1079 init_flags(Flag_is_macro);
1080 C->add_macro_node(this);
1081 }
1082 virtual bool guaranteed_safepoint() { return false; }
1083
1084 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1085 // Expansion modifies the JVMState, so we need to clone it
1086 virtual void clone_jvms(Compile* C) {
1087 if (jvms() != NULL) {
1088 set_jvms(jvms()->clone_deep(C));
1089 jvms()->set_map_deep(this);
1090 }
1091 }
1092
|