992 // important (common) to do address expressions.
993 Node *remix_address_expressions( Node *n );
994
995 // Attempt to use a conditional move instead of a phi/branch
996 Node *conditional_move( Node *n );
997
998 // Reorganize offset computations to lower register pressure.
999 // Mostly prevent loop-fallout uses of the pre-incremented trip counter
1000 // (which are then alive with the post-incremented trip counter
1001 // forcing an extra register move)
1002 void reorg_offsets( IdealLoopTree *loop );
1003
1004 // Check for aggressive application of 'split-if' optimization,
1005 // using basic block level info.
1006 void split_if_with_blocks ( VectorSet &visited, Node_Stack &nstack );
1007 Node *split_if_with_blocks_pre ( Node *n );
1008 void split_if_with_blocks_post( Node *n );
1009 Node *has_local_phi_input( Node *n );
1010 // Mark an IfNode as being dominated by a prior test,
1011 // without actually altering the CFG (and hence IDOM info).
1012 void dominated_by( Node *prevdom, Node *iff, bool flip = false );
1013
1014 // Split Node 'n' through merge point
1015 Node *split_thru_region( Node *n, Node *region );
1016 // Split Node 'n' through merge point if there is enough win.
1017 Node *split_thru_phi( Node *n, Node *region, int policy );
1018 // Found an If getting its condition-code input from a Phi in the
1019 // same block. Split thru the Region.
1020 void do_split_if( Node *iff );
1021
1022 // Conversion of fill/copy patterns into intrisic versions
1023 bool do_intrinsify_fill();
1024 bool intrinsify_fill(IdealLoopTree* lpt);
1025 bool match_fill_loop(IdealLoopTree* lpt, Node*& store, Node*& store_value,
1026 Node*& shift, Node*& offset);
1027
1028 private:
1029 // Return a type based on condition control flow
1030 const TypeInt* filtered_type( Node *n, Node* n_ctrl);
1031 const TypeInt* filtered_type( Node *n ) { return filtered_type(n, NULL); }
1032 // Helpers for filtered type
|
992 // important (common) to do address expressions.
993 Node *remix_address_expressions( Node *n );
994
995 // Attempt to use a conditional move instead of a phi/branch
996 Node *conditional_move( Node *n );
997
998 // Reorganize offset computations to lower register pressure.
999 // Mostly prevent loop-fallout uses of the pre-incremented trip counter
1000 // (which are then alive with the post-incremented trip counter
1001 // forcing an extra register move)
1002 void reorg_offsets( IdealLoopTree *loop );
1003
1004 // Check for aggressive application of 'split-if' optimization,
1005 // using basic block level info.
1006 void split_if_with_blocks ( VectorSet &visited, Node_Stack &nstack );
1007 Node *split_if_with_blocks_pre ( Node *n );
1008 void split_if_with_blocks_post( Node *n );
1009 Node *has_local_phi_input( Node *n );
1010 // Mark an IfNode as being dominated by a prior test,
1011 // without actually altering the CFG (and hence IDOM info).
1012 void dominated_by( Node *prevdom, Node *iff, bool flip = false, bool exclude_loop_predicate = false );
1013
1014 // Split Node 'n' through merge point
1015 Node *split_thru_region( Node *n, Node *region );
1016 // Split Node 'n' through merge point if there is enough win.
1017 Node *split_thru_phi( Node *n, Node *region, int policy );
1018 // Found an If getting its condition-code input from a Phi in the
1019 // same block. Split thru the Region.
1020 void do_split_if( Node *iff );
1021
1022 // Conversion of fill/copy patterns into intrisic versions
1023 bool do_intrinsify_fill();
1024 bool intrinsify_fill(IdealLoopTree* lpt);
1025 bool match_fill_loop(IdealLoopTree* lpt, Node*& store, Node*& store_value,
1026 Node*& shift, Node*& offset);
1027
1028 private:
1029 // Return a type based on condition control flow
1030 const TypeInt* filtered_type( Node *n, Node* n_ctrl);
1031 const TypeInt* filtered_type( Node *n ) { return filtered_type(n, NULL); }
1032 // Helpers for filtered type
|