819 //------------------------------Allocate---------------------------------------
820 // High-level memory allocation
821 //
822 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
823 // get expanded into a code sequence containing a call. Unlike other CallNodes,
824 // they have 2 memory projections and 2 i_o projections (which are distinguished by
825 // the _is_io_use flag in the projection.) This is needed when expanding the node in
826 // order to differentiate the uses of the projection on the normal control path from
827 // those on the exception return path.
828 //
829 class AllocateNode : public CallNode {
830 public:
831 enum {
832 // Output:
833 RawAddress = TypeFunc::Parms, // the newly-allocated raw address
834 // Inputs:
835 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object
836 KlassNode, // type (maybe dynamic) of the obj.
837 InitialTest, // slow-path test (may be constant)
838 ALength, // array length (or TOP if none)
839 ParmLimit
840 };
841
842 static const TypeFunc* alloc_type(const Type* t) {
843 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
844 fields[AllocSize] = TypeInt::POS;
845 fields[KlassNode] = TypeInstPtr::NOTNULL;
846 fields[InitialTest] = TypeInt::BOOL;
847 fields[ALength] = t; // length (can be a bad length)
848
849 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
850
851 // create result type (range)
852 fields = TypeTuple::fields(1);
853 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
854
855 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
856
857 return TypeFunc::make(domain, range);
858 }
859
860 // Result of Escape Analysis
861 bool _is_scalar_replaceable;
862 bool _is_non_escaping;
863 // True when MemBar for new is redundant with MemBar at initialzer exit
864 bool _is_allocation_MemBar_redundant;
865
866 virtual uint size_of() const; // Size is bigger
867 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
868 Node *size, Node *klass_node, Node *initial_test);
869 // Expansion modifies the JVMState, so we need to clone it
870 virtual void clone_jvms(Compile* C) {
871 if (jvms() != NULL) {
872 set_jvms(jvms()->clone_deep(C));
873 jvms()->set_map_deep(this);
874 }
875 }
876 virtual int Opcode() const;
877 virtual uint ideal_reg() const { return Op_RegP; }
878 virtual bool guaranteed_safepoint() { return false; }
879
880 // allocations do not modify their arguments
881 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
882
883 // Pattern-match a possible usage of AllocateNode.
884 // Return null if no allocation is recognized.
885 // The operand is the pointer produced by the (possible) allocation.
886 // It must be a projection of the Allocate or its subsequent CastPP.
887 // (Note: This function is defined in file graphKit.cpp, near
888 // GraphKit::new_instance/new_array, whose output it recognizes.)
|
819 //------------------------------Allocate---------------------------------------
820 // High-level memory allocation
821 //
822 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
823 // get expanded into a code sequence containing a call. Unlike other CallNodes,
824 // they have 2 memory projections and 2 i_o projections (which are distinguished by
825 // the _is_io_use flag in the projection.) This is needed when expanding the node in
826 // order to differentiate the uses of the projection on the normal control path from
827 // those on the exception return path.
828 //
829 class AllocateNode : public CallNode {
830 public:
831 enum {
832 // Output:
833 RawAddress = TypeFunc::Parms, // the newly-allocated raw address
834 // Inputs:
835 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object
836 KlassNode, // type (maybe dynamic) of the obj.
837 InitialTest, // slow-path test (may be constant)
838 ALength, // array length (or TOP if none)
839 ValueNode,
840 ParmLimit
841 };
842
843 static const TypeFunc* alloc_type(const Type* t) {
844 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
845 fields[AllocSize] = TypeInt::POS;
846 fields[KlassNode] = TypeInstPtr::NOTNULL;
847 fields[InitialTest] = TypeInt::BOOL;
848 fields[ALength] = t; // length (can be a bad length)
849 fields[ValueNode] = Type::BOTTOM;
850
851 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
852
853 // create result type (range)
854 fields = TypeTuple::fields(1);
855 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop
856
857 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
858
859 return TypeFunc::make(domain, range);
860 }
861
862 // Result of Escape Analysis
863 bool _is_scalar_replaceable;
864 bool _is_non_escaping;
865 // True when MemBar for new is redundant with MemBar at initialzer exit
866 bool _is_allocation_MemBar_redundant;
867
868 virtual uint size_of() const; // Size is bigger
869 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
870 Node *size, Node *klass_node, Node *initial_test, ValueTypeNode* value_node = NULL);
871 // Expansion modifies the JVMState, so we need to clone it
872 virtual void clone_jvms(Compile* C) {
873 if (jvms() != NULL) {
874 set_jvms(jvms()->clone_deep(C));
875 jvms()->set_map_deep(this);
876 }
877 }
878 virtual int Opcode() const;
879 virtual uint ideal_reg() const { return Op_RegP; }
880 virtual bool guaranteed_safepoint() { return false; }
881
882 // allocations do not modify their arguments
883 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
884
885 // Pattern-match a possible usage of AllocateNode.
886 // Return null if no allocation is recognized.
887 // The operand is the pointer produced by the (possible) allocation.
888 // It must be a projection of the Allocate or its subsequent CastPP.
889 // (Note: This function is defined in file graphKit.cpp, near
890 // GraphKit::new_instance/new_array, whose output it recognizes.)
|