884 fields[ValueNode] = Type::BOTTOM;
885 fields[DefaultValue] = TypeInstPtr::NOTNULL;
886 fields[RawDefaultValue] = TypeX_X;
887
888 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
889
890 // create result type (range)
891 fields = TypeTuple::fields(1);
892 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
893
894 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
895
896 return TypeFunc::make(domain, range);
897 }
898
899 // Result of Escape Analysis
900 bool _is_scalar_replaceable;
901 bool _is_non_escaping;
902 // True when MemBar for new is redundant with MemBar at initialzer exit
903 bool _is_allocation_MemBar_redundant;
904
905 virtual uint size_of() const; // Size is bigger
906 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
907 Node *size, Node *klass_node, Node *initial_test,
908 ValueTypeBaseNode* value_node = NULL);
909 // Expansion modifies the JVMState, so we need to clone it
910 virtual void clone_jvms(Compile* C) {
911 if (jvms() != NULL) {
912 set_jvms(jvms()->clone_deep(C));
913 jvms()->set_map_deep(this);
914 }
915 }
916 virtual int Opcode() const;
917 virtual uint ideal_reg() const { return Op_RegP; }
918 virtual bool guaranteed_safepoint() { return false; }
919
920 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
921
922 // allocations do not modify their arguments
923 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
959 bool maybe_set_complete(PhaseGVN* phase);
960
961 // Return true if allocation doesn't escape thread, its escape state
962 // needs be noEscape or ArgEscape. InitializeNode._does_not_escape
963 // is true when its allocation's escape state is noEscape or
964 // ArgEscape. In case allocation's InitializeNode is NULL, check
965 // AlllocateNode._is_non_escaping flag.
966 // AlllocateNode._is_non_escaping is true when its escape state is
967 // noEscape.
968 bool does_not_escape_thread() {
969 InitializeNode* init = NULL;
970 return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape());
971 }
972
973 // If object doesn't escape in <.init> method and there is memory barrier
974 // inserted at exit of its <.init>, memory barrier for new is not necessary.
975 // Inovke this method when MemBar at exit of initializer and post-dominate
976 // allocation node.
977 void compute_MemBar_redundancy(ciMethod* initializer);
978 bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
979 };
980
981 //------------------------------AllocateArray---------------------------------
982 //
983 // High-level array allocation
984 //
985 class AllocateArrayNode : public AllocateNode {
986 public:
987 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
988 Node* size, Node* klass_node, Node* initial_test,
989 Node* count_val, Node* default_value, Node* raw_default_value
990 )
991 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
992 initial_test)
993 {
994 init_class_id(Class_AllocateArray);
995 set_req(AllocateNode::ALength, count_val);
996 init_req(AllocateNode::DefaultValue, default_value);
997 init_req(AllocateNode::RawDefaultValue, raw_default_value);
998 }
|
884 fields[ValueNode] = Type::BOTTOM;
885 fields[DefaultValue] = TypeInstPtr::NOTNULL;
886 fields[RawDefaultValue] = TypeX_X;
887
888 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
889
890 // create result type (range)
891 fields = TypeTuple::fields(1);
892 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
893
894 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
895
896 return TypeFunc::make(domain, range);
897 }
898
899 // Result of Escape Analysis
900 bool _is_scalar_replaceable;
901 bool _is_non_escaping;
902 // True when MemBar for new is redundant with MemBar at initialzer exit
903 bool _is_allocation_MemBar_redundant;
904 bool _larval;
905
906 virtual uint size_of() const; // Size is bigger
907 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
908 Node *size, Node *klass_node, Node *initial_test,
909 ValueTypeBaseNode* value_node = NULL);
910 // Expansion modifies the JVMState, so we need to clone it
911 virtual void clone_jvms(Compile* C) {
912 if (jvms() != NULL) {
913 set_jvms(jvms()->clone_deep(C));
914 jvms()->set_map_deep(this);
915 }
916 }
917 virtual int Opcode() const;
918 virtual uint ideal_reg() const { return Op_RegP; }
919 virtual bool guaranteed_safepoint() { return false; }
920
921 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
922
923 // allocations do not modify their arguments
924 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
960 bool maybe_set_complete(PhaseGVN* phase);
961
962 // Return true if allocation doesn't escape thread, its escape state
963 // needs be noEscape or ArgEscape. InitializeNode._does_not_escape
964 // is true when its allocation's escape state is noEscape or
965 // ArgEscape. In case allocation's InitializeNode is NULL, check
966 // AlllocateNode._is_non_escaping flag.
967 // AlllocateNode._is_non_escaping is true when its escape state is
968 // noEscape.
969 bool does_not_escape_thread() {
970 InitializeNode* init = NULL;
971 return _is_non_escaping || (((init = initialization()) != NULL) && init->does_not_escape());
972 }
973
974 // If object doesn't escape in <.init> method and there is memory barrier
975 // inserted at exit of its <.init>, memory barrier for new is not necessary.
976 // Inovke this method when MemBar at exit of initializer and post-dominate
977 // allocation node.
978 void compute_MemBar_redundancy(ciMethod* initializer);
979 bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
980
981 Node* make_ideal_mark(PhaseGVN *phase, Node* obj, Node* control, Node* mem, Node* klass_node);
982 };
983
984 //------------------------------AllocateArray---------------------------------
985 //
986 // High-level array allocation
987 //
988 class AllocateArrayNode : public AllocateNode {
989 public:
990 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
991 Node* size, Node* klass_node, Node* initial_test,
992 Node* count_val, Node* default_value, Node* raw_default_value
993 )
994 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
995 initial_test)
996 {
997 init_class_id(Class_AllocateArray);
998 set_req(AllocateNode::ALength, count_val);
999 init_req(AllocateNode::DefaultValue, default_value);
1000 init_req(AllocateNode::RawDefaultValue, raw_default_value);
1001 }
|