src/share/vm/opto/node.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7069452 Cdiff src/share/vm/opto/node.hpp

src/share/vm/opto/node.hpp

Print this page

        

*** 65,74 **** --- 65,76 ---- class DecodeNNode; class EncodePNode; class FastLockNode; class FastUnlockNode; class IfNode; + class IfFalseNode; + class IfTrueNode; class InitializeNode; class JVMState; class JumpNode; class JumpProjNode; class LoadNode;
*** 81,102 **** class MachCallNode; class MachCallRuntimeNode; class MachCallStaticJavaNode; class MachConstantBaseNode; class MachConstantNode; class MachIfNode; class MachNode; class MachNullCheckNode; class MachReturnNode; class MachSafePointNode; class MachSpillCopyNode; class MachTempNode; class Matcher; class MemBarNode; class MemNode; class MergeMemNode; - class MulNode; class MultiNode; class MultiBranchNode; class NeverBranchNode; class Node; class Node_Array; --- 83,105 ---- class MachCallNode; class MachCallRuntimeNode; class MachCallStaticJavaNode; class MachConstantBaseNode; class MachConstantNode; + class MachGotoNode; class MachIfNode; class MachNode; class MachNullCheckNode; + class MachProjNode; class MachReturnNode; class MachSafePointNode; class MachSpillCopyNode; class MachTempNode; class Matcher; class MemBarNode; class MemNode; class MergeMemNode; class MultiNode; class MultiBranchNode; class NeverBranchNode; class Node; class Node_Array;
*** 125,137 **** class StoreNode; class SubNode; class Type; class TypeNode; class UnlockNode; class VectorSet; - class IfTrueNode; - class IfFalseNode; typedef void (*NFunc)(Node&,void*); extern "C" { typedef int (*C_sort_func_t)(const void *, const void *); } --- 128,141 ---- class StoreNode; class SubNode; class Type; class TypeNode; class UnlockNode; + class VectorNode; + class VectorLoadNode; + class VectorStoreNode; class VectorSet; typedef void (*NFunc)(Node&,void*); extern "C" { typedef int (*C_sort_func_t)(const void *, const void *); }
*** 572,643 **** DEFINE_CLASS_ID(MachNullCheck, Mach, 2) DEFINE_CLASS_ID(MachIf, Mach, 3) DEFINE_CLASS_ID(MachTemp, Mach, 4) DEFINE_CLASS_ID(MachConstantBase, Mach, 5) DEFINE_CLASS_ID(MachConstant, Mach, 6) ! DEFINE_CLASS_ID(Proj, Node, 2) DEFINE_CLASS_ID(CatchProj, Proj, 0) DEFINE_CLASS_ID(JumpProj, Proj, 1) DEFINE_CLASS_ID(IfTrue, Proj, 2) DEFINE_CLASS_ID(IfFalse, Proj, 3) DEFINE_CLASS_ID(Parm, Proj, 4) ! DEFINE_CLASS_ID(Region, Node, 3) DEFINE_CLASS_ID(Loop, Region, 0) DEFINE_CLASS_ID(Root, Loop, 0) DEFINE_CLASS_ID(CountedLoop, Loop, 1) ! DEFINE_CLASS_ID(Sub, Node, 4) DEFINE_CLASS_ID(Cmp, Sub, 0) DEFINE_CLASS_ID(FastLock, Cmp, 0) DEFINE_CLASS_ID(FastUnlock, Cmp, 1) - DEFINE_CLASS_ID(Type, Node, 5) - DEFINE_CLASS_ID(Phi, Type, 0) - DEFINE_CLASS_ID(ConstraintCast, Type, 1) - DEFINE_CLASS_ID(CheckCastPP, Type, 2) - DEFINE_CLASS_ID(CMove, Type, 3) - DEFINE_CLASS_ID(SafePointScalarObject, Type, 4) - DEFINE_CLASS_ID(DecodeN, Type, 5) - DEFINE_CLASS_ID(EncodeP, Type, 6) - - DEFINE_CLASS_ID(Mem, Node, 6) - DEFINE_CLASS_ID(Load, Mem, 0) - DEFINE_CLASS_ID(Store, Mem, 1) - DEFINE_CLASS_ID(LoadStore, Mem, 2) - DEFINE_CLASS_ID(MergeMem, Node, 7) DEFINE_CLASS_ID(Bool, Node, 8) DEFINE_CLASS_ID(AddP, Node, 9) DEFINE_CLASS_ID(BoxLock, Node, 10) DEFINE_CLASS_ID(Add, Node, 11) ! DEFINE_CLASS_ID(Mul, Node, 12) DEFINE_CLASS_ID(ClearArray, Node, 13) _max_classes = ClassMask_ClearArray }; #undef DEFINE_CLASS_ID // Flags are sorted by usage frequency. enum NodeFlags { Flag_is_Copy = 0x01, // should be first bit to avoid shift ! Flag_is_Call = Flag_is_Copy << 1, ! Flag_rematerialize = Flag_is_Call << 1, Flag_needs_anti_dependence_check = Flag_rematerialize << 1, Flag_is_macro = Flag_needs_anti_dependence_check << 1, Flag_is_Con = Flag_is_macro << 1, Flag_is_cisc_alternate = Flag_is_Con << 1, Flag_is_Branch = Flag_is_cisc_alternate << 1, ! Flag_is_block_start = Flag_is_Branch << 1, ! Flag_is_Goto = Flag_is_block_start << 1, ! Flag_is_dead_loop_safe = Flag_is_Goto << 1, Flag_may_be_short_branch = Flag_is_dead_loop_safe << 1, ! Flag_is_safepoint_node = Flag_may_be_short_branch << 1, ! Flag_is_pc_relative = Flag_is_safepoint_node << 1, ! Flag_is_Vector = Flag_is_pc_relative << 1, ! _max_flags = (Flag_is_Vector << 1) - 1 // allow flags combination }; private: jushort _class_id; jushort _flags; --- 576,645 ---- DEFINE_CLASS_ID(MachNullCheck, Mach, 2) DEFINE_CLASS_ID(MachIf, Mach, 3) DEFINE_CLASS_ID(MachTemp, Mach, 4) DEFINE_CLASS_ID(MachConstantBase, Mach, 5) DEFINE_CLASS_ID(MachConstant, Mach, 6) + DEFINE_CLASS_ID(MachGoto, Mach, 7) ! DEFINE_CLASS_ID(Type, Node, 2) ! DEFINE_CLASS_ID(Phi, Type, 0) ! DEFINE_CLASS_ID(ConstraintCast, Type, 1) ! DEFINE_CLASS_ID(CheckCastPP, Type, 2) ! DEFINE_CLASS_ID(CMove, Type, 3) ! DEFINE_CLASS_ID(SafePointScalarObject, Type, 4) ! DEFINE_CLASS_ID(DecodeN, Type, 5) ! DEFINE_CLASS_ID(EncodeP, Type, 6) ! ! DEFINE_CLASS_ID(Proj, Node, 3) DEFINE_CLASS_ID(CatchProj, Proj, 0) DEFINE_CLASS_ID(JumpProj, Proj, 1) DEFINE_CLASS_ID(IfTrue, Proj, 2) DEFINE_CLASS_ID(IfFalse, Proj, 3) DEFINE_CLASS_ID(Parm, Proj, 4) + DEFINE_CLASS_ID(MachProj, Proj, 5) ! DEFINE_CLASS_ID(Mem, Node, 4) ! DEFINE_CLASS_ID(Load, Mem, 0) ! DEFINE_CLASS_ID(VectorLoad, Load, 0) ! DEFINE_CLASS_ID(Store, Mem, 1) ! DEFINE_CLASS_ID(VectorStore, Store, 0) ! DEFINE_CLASS_ID(LoadStore, Mem, 2) ! ! DEFINE_CLASS_ID(Region, Node, 5) DEFINE_CLASS_ID(Loop, Region, 0) DEFINE_CLASS_ID(Root, Loop, 0) DEFINE_CLASS_ID(CountedLoop, Loop, 1) ! DEFINE_CLASS_ID(Sub, Node, 6) DEFINE_CLASS_ID(Cmp, Sub, 0) DEFINE_CLASS_ID(FastLock, Cmp, 0) DEFINE_CLASS_ID(FastUnlock, Cmp, 1) DEFINE_CLASS_ID(MergeMem, Node, 7) DEFINE_CLASS_ID(Bool, Node, 8) DEFINE_CLASS_ID(AddP, Node, 9) DEFINE_CLASS_ID(BoxLock, Node, 10) DEFINE_CLASS_ID(Add, Node, 11) ! DEFINE_CLASS_ID(Vector, Node, 12) DEFINE_CLASS_ID(ClearArray, Node, 13) _max_classes = ClassMask_ClearArray }; #undef DEFINE_CLASS_ID // Flags are sorted by usage frequency. enum NodeFlags { Flag_is_Copy = 0x01, // should be first bit to avoid shift ! Flag_rematerialize = Flag_is_Copy << 1, Flag_needs_anti_dependence_check = Flag_rematerialize << 1, Flag_is_macro = Flag_needs_anti_dependence_check << 1, Flag_is_Con = Flag_is_macro << 1, Flag_is_cisc_alternate = Flag_is_Con << 1, Flag_is_Branch = Flag_is_cisc_alternate << 1, ! Flag_is_dead_loop_safe = Flag_is_Branch << 1, Flag_may_be_short_branch = Flag_is_dead_loop_safe << 1, ! _max_flags = (Flag_may_be_short_branch << 1) - 1 // allow flags combination }; private: jushort _class_id; jushort _flags;
*** 667,691 **** // Virtual inherited Node size virtual uint size_of() const; // Other interesting Node properties - - // Special case: is_Call() returns true for both CallNode and MachCallNode. - bool is_Call() const { - return (_flags & Flag_is_Call) != 0; - } - - CallNode* isa_Call() const { - return is_Call() ? as_Call() : NULL; - } - - CallNode *as_Call() const { // Only for CallNode (not for MachCallNode) - assert((_class_id & ClassMask_Call) == Class_Call, "invalid node class"); - return (CallNode*)this; - } - #define DEFINE_CLASS_QUERY(type) \ bool is_##type() const { \ return ((_class_id & ClassMask_##type) == Class_##type); \ } \ type##Node *as_##type() const { \ --- 669,678 ----
*** 701,710 **** --- 688,698 ---- DEFINE_CLASS_QUERY(AddP) DEFINE_CLASS_QUERY(Allocate) DEFINE_CLASS_QUERY(AllocateArray) DEFINE_CLASS_QUERY(Bool) DEFINE_CLASS_QUERY(BoxLock) + DEFINE_CLASS_QUERY(Call) DEFINE_CLASS_QUERY(CallDynamicJava) DEFINE_CLASS_QUERY(CallJava) DEFINE_CLASS_QUERY(CallLeaf) DEFINE_CLASS_QUERY(CallRuntime) DEFINE_CLASS_QUERY(CallStaticJava)
*** 738,757 **** DEFINE_CLASS_QUERY(MachCallLeaf) DEFINE_CLASS_QUERY(MachCallRuntime) DEFINE_CLASS_QUERY(MachCallStaticJava) DEFINE_CLASS_QUERY(MachConstantBase) DEFINE_CLASS_QUERY(MachConstant) DEFINE_CLASS_QUERY(MachIf) DEFINE_CLASS_QUERY(MachNullCheck) DEFINE_CLASS_QUERY(MachReturn) DEFINE_CLASS_QUERY(MachSafePoint) DEFINE_CLASS_QUERY(MachSpillCopy) DEFINE_CLASS_QUERY(MachTemp) DEFINE_CLASS_QUERY(Mem) DEFINE_CLASS_QUERY(MemBar) DEFINE_CLASS_QUERY(MergeMem) - DEFINE_CLASS_QUERY(Mul) DEFINE_CLASS_QUERY(Multi) DEFINE_CLASS_QUERY(MultiBranch) DEFINE_CLASS_QUERY(Parm) DEFINE_CLASS_QUERY(PCTable) DEFINE_CLASS_QUERY(Phi) --- 726,746 ---- DEFINE_CLASS_QUERY(MachCallLeaf) DEFINE_CLASS_QUERY(MachCallRuntime) DEFINE_CLASS_QUERY(MachCallStaticJava) DEFINE_CLASS_QUERY(MachConstantBase) DEFINE_CLASS_QUERY(MachConstant) + DEFINE_CLASS_QUERY(MachGoto) DEFINE_CLASS_QUERY(MachIf) DEFINE_CLASS_QUERY(MachNullCheck) + DEFINE_CLASS_QUERY(MachProj) DEFINE_CLASS_QUERY(MachReturn) DEFINE_CLASS_QUERY(MachSafePoint) DEFINE_CLASS_QUERY(MachSpillCopy) DEFINE_CLASS_QUERY(MachTemp) DEFINE_CLASS_QUERY(Mem) DEFINE_CLASS_QUERY(MemBar) DEFINE_CLASS_QUERY(MergeMem) DEFINE_CLASS_QUERY(Multi) DEFINE_CLASS_QUERY(MultiBranch) DEFINE_CLASS_QUERY(Parm) DEFINE_CLASS_QUERY(PCTable) DEFINE_CLASS_QUERY(Phi)
*** 762,771 **** --- 751,763 ---- DEFINE_CLASS_QUERY(SafePointScalarObject) DEFINE_CLASS_QUERY(Start) DEFINE_CLASS_QUERY(Store) DEFINE_CLASS_QUERY(Sub) DEFINE_CLASS_QUERY(Type) + DEFINE_CLASS_QUERY(Vector) + DEFINE_CLASS_QUERY(VectorLoad) + DEFINE_CLASS_QUERY(VectorStore) DEFINE_CLASS_QUERY(Unlock) #undef DEFINE_CLASS_QUERY // duplicate of is_MachSpillCopy()
*** 772,782 **** bool is_SpillCopy () const { return ((_class_id & ClassMask_MachSpillCopy) == Class_MachSpillCopy); } bool is_Con () const { return (_flags & Flag_is_Con) != 0; } - bool is_Goto() const { return (_flags & Flag_is_Goto) != 0; } // The data node which is safe to leave in dead loop during IGVN optimization. bool is_dead_loop_safe() const { return is_Phi() || (is_Proj() && in(0) == NULL) || ((_flags & (Flag_is_dead_loop_safe | Flag_is_Con)) != 0 && (!is_Proj() || !in(0)->is_Allocate())); --- 764,773 ----
*** 793,803 **** // depend for their correct sequencing on more than one test. // (In that case, hoisting to a dominating test may silently // skip some other important test.) virtual bool depends_only_on_test() const { assert(!is_CFG(), ""); return true; }; ! // defined for MachNodes that match 'If' | 'Goto' | 'CountedLoopEnd' bool is_Branch() const { return (_flags & Flag_is_Branch) != 0; } // When building basic blocks, I need to have a notion of block beginning // Nodes, next block selector Nodes (block enders), and next block // projections. These calls need to work on their machine equivalents. The --- 784,794 ---- // depend for their correct sequencing on more than one test. // (In that case, hoisting to a dominating test may silently // skip some other important test.) virtual bool depends_only_on_test() const { assert(!is_CFG(), ""); return true; }; ! // defined for MachNodes that match 'If' | 'Goto' | 'CountedLoopEnd' | 'Jump' bool is_Branch() const { return (_flags & Flag_is_Branch) != 0; } // When building basic blocks, I need to have a notion of block beginning // Nodes, next block selector Nodes (block enders), and next block // projections. These calls need to work on their machine equivalents. The
*** 804,826 **** // Ideal beginning Nodes are RootNode, RegionNode and StartNode. bool is_block_start() const { if ( is_Region() ) return this == (const Node*)in(0); else ! return (_flags & Flag_is_block_start) != 0; } // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root, // Goto and Return. This call also returns the block ending Node. virtual const Node *is_block_proj() const; // The node is a "macro" node which needs to be expanded before matching bool is_macro() const { return (_flags & Flag_is_macro) != 0; } - // Value is a vector of primitive values - bool is_Vector() const { return (_flags & Flag_is_Vector) != 0; } - //----------------- Optimization // Get the worst-case Type output for this Node. virtual const class Type *bottom_type() const; --- 795,814 ---- // Ideal beginning Nodes are RootNode, RegionNode and StartNode. bool is_block_start() const { if ( is_Region() ) return this == (const Node*)in(0); else ! return is_Start(); } // The Ideal control projection Nodes are IfTrue/IfFalse, JumpProjNode, Root, // Goto and Return. This call also returns the block ending Node. virtual const Node *is_block_proj() const; // The node is a "macro" node which needs to be expanded before matching bool is_macro() const { return (_flags & Flag_is_macro) != 0; } //----------------- Optimization // Get the worst-case Type output for this Node. virtual const class Type *bottom_type() const;
src/share/vm/opto/node.hpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File