< prev index next >

src/share/vm/opto/compile.cpp

Print this page




 387   for (int i = C->macro_count()-1; i >= 0; i--) {
 388     Node* n = C->macro_node(i);
 389     if (!useful.member(n)) {
 390       remove_macro_node(n);
 391     }
 392   }
 393   // Remove useless CastII nodes with range check dependency
 394   for (int i = range_check_cast_count() - 1; i >= 0; i--) {
 395     Node* cast = range_check_cast_node(i);
 396     if (!useful.member(cast)) {
 397       remove_range_check_cast(cast);
 398     }
 399   }
 400   // Remove useless expensive node
 401   for (int i = C->expensive_count()-1; i >= 0; i--) {
 402     Node* n = C->expensive_node(i);
 403     if (!useful.member(n)) {
 404       remove_expensive_node(n);
 405     }
 406   }






 407   // clean up the late inline lists
 408   remove_useless_late_inlines(&_string_late_inlines, useful);
 409   remove_useless_late_inlines(&_boxing_late_inlines, useful);
 410   remove_useless_late_inlines(&_late_inlines, useful);
 411   debug_only(verify_graph_edges(true/*check for no_dead_code*/);)
 412 }
 413 
 414 //------------------------------frame_size_in_words-----------------------------
 415 // frame_slots in units of words
 416 int Compile::frame_size_in_words() const {
 417   // shift is 0 in LP32 and 1 in LP64
 418   const int shift = (LogBytesPerWord - LogBytesPerInt);
 419   int words = _frame_slots >> shift;
 420   assert( words << shift == _frame_slots, "frame size must be properly aligned in LP64" );
 421   return words;
 422 }
 423 
 424 // To bang the stack of this compiled method we use the stack size
 425 // that the interpreter would need in case of a deoptimization. This
 426 // removes the need to bang the stack in the deoptimization blob which


1160   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1161   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1162   {
1163     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1164   }
1165   // Initialize the first few types.
1166   _alias_types[AliasIdxTop]->Init(AliasIdxTop, NULL);
1167   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1168   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1169   _num_alias_types = AliasIdxRaw+1;
1170   // Zero out the alias type cache.
1171   Copy::zero_to_bytes(_alias_cache, sizeof(_alias_cache));
1172   // A NULL adr_type hits in the cache right away.  Preload the right answer.
1173   probe_alias_cache(NULL)->_index = AliasIdxTop;
1174 
1175   _intrinsics = NULL;
1176   _macro_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1177   _predicate_opaqs = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1178   _expensive_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1179   _range_check_casts = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);

1180   register_library_intrinsics();
1181 }
1182 
1183 //---------------------------init_start----------------------------------------
1184 // Install the StartNode on this compile object.
1185 void Compile::init_start(StartNode* s) {
1186   if (failing())
1187     return; // already failing
1188   assert(s == start(), "");
1189 }
1190 
1191 /**
1192  * Return the 'StartNode'. We must not have a pending failure, since the ideal graph
1193  * can be in an inconsistent state, i.e., we can get segmentation faults when traversing
1194  * the ideal graph.
1195  */
1196 StartNode* Compile::start() const {
1197   assert (!failing(), "Must not have pending failure. Reason is: %s", failure_reason());
1198   for (DUIterator_Fast imax, i = root()->fast_outs(imax); i < imax; i++) {
1199     Node* start = root()->fast_out(i);


1945   }
1946   assert(predicate_count()==0, "should be clean!");
1947 }
1948 
1949 void Compile::add_range_check_cast(Node* n) {
1950   assert(n->isa_CastII()->has_range_check(), "CastII should have range check dependency");
1951   assert(!_range_check_casts->contains(n), "duplicate entry in range check casts");
1952   _range_check_casts->append(n);
1953 }
1954 
1955 // Remove all range check dependent CastIINodes.
1956 void Compile::remove_range_check_casts(PhaseIterGVN &igvn) {
1957   for (int i = range_check_cast_count(); i > 0; i--) {
1958     Node* cast = range_check_cast_node(i-1);
1959     assert(cast->isa_CastII()->has_range_check(), "CastII should have range check dependency");
1960     igvn.replace_node(cast, cast->in(1));
1961   }
1962   assert(range_check_cast_count() == 0, "should be empty");
1963 }
1964 

















1965 // StringOpts and late inlining of string methods
1966 void Compile::inline_string_calls(bool parse_time) {
1967   {
1968     // remove useless nodes to make the usage analysis simpler
1969     ResourceMark rm;
1970     PhaseRemoveUseless pru(initial_gvn(), for_igvn());
1971   }
1972 
1973   {
1974     ResourceMark rm;
1975     print_method(PHASE_BEFORE_STRINGOPTS, 3);
1976     PhaseStringOpts pso(initial_gvn(), for_igvn());
1977     print_method(PHASE_AFTER_STRINGOPTS, 3);
1978   }
1979 
1980   // now inline anything that we skipped the first time around
1981   if (!parse_time) {
1982     _late_inlines_pos = _late_inlines.length();
1983   }
1984 


2192   remove_speculative_types(igvn);
2193 
2194   // No more new expensive nodes will be added to the list from here
2195   // so keep only the actual candidates for optimizations.
2196   cleanup_expensive_nodes(igvn);
2197 
2198   if (!failing() && RenumberLiveNodes && live_nodes() + NodeLimitFudgeFactor < unique()) {
2199     Compile::TracePhase tp("", &timers[_t_renumberLive]);
2200     initial_gvn()->replace_with(&igvn);
2201     for_igvn()->clear();
2202     Unique_Node_List new_worklist(C->comp_arena());
2203     {
2204       ResourceMark rm;
2205       PhaseRenumberLive prl = PhaseRenumberLive(initial_gvn(), for_igvn(), &new_worklist);
2206     }
2207     set_for_igvn(&new_worklist);
2208     igvn = PhaseIterGVN(initial_gvn());
2209     igvn.optimize();
2210   }
2211 




2212   // Perform escape analysis
2213   if (_do_escape_analysis && ConnectionGraph::has_candidates(this)) {
2214     if (has_loops()) {
2215       // Cleanup graph (remove dead nodes).
2216       TracePhase tp("idealLoop", &timers[_t_idealLoop]);
2217       PhaseIdealLoop ideal_loop( igvn, false, true );
2218       if (major_progress()) print_method(PHASE_PHASEIDEAL_BEFORE_EA, 2);
2219       if (failing())  return;
2220     }
2221     ConnectionGraph::do_analysis(this, &igvn);
2222 
2223     if (failing())  return;
2224 
2225     // Optimize out fields loads from scalar replaceable allocations.
2226     igvn.optimize();
2227     print_method(PHASE_ITER_GVN_AFTER_EA, 2);
2228 
2229     if (failing())  return;
2230 
2231     if (congraph() != NULL && macro_count() > 0) {


3421           // redundant
3422           for (DUIterator_Fast imax, i = k->fast_outs(imax); i < imax; i++) {
3423             Node* u = k->fast_out(i);
3424             assert(!wq.contains(u), "shouldn't process one node several times");
3425             if (u->Opcode() == Op_LShiftL ||
3426                 u->Opcode() == Op_AddL ||
3427                 u->Opcode() == Op_SubL ||
3428                 u->Opcode() == Op_AddP) {
3429               wq.push(u);
3430             }
3431           }
3432           // Replace all nodes with identical edges as m with m
3433           k->subsume_by(m, this);
3434         }
3435       }
3436     }
3437     break;
3438   }
3439   case Op_ValueType: {
3440     ValueTypeNode* vt = n->as_ValueType();
3441     vt->make_scalar_in_safepoints(this);
3442     if (vt->outcnt() == 0) {
3443       vt->disconnect_inputs(NULL, this);
3444     }




3445     break;
3446   }
3447   default:
3448     assert( !n->is_Call(), "" );
3449     assert( !n->is_Mem(), "" );
3450     assert( nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3451     break;
3452   }
3453 
3454   // Collect CFG split points
3455   if (n->is_MultiBranch() && !n->is_RangeCheck()) {
3456     frc._tests.push(n);
3457   }
3458 }
3459 
3460 //------------------------------final_graph_reshaping_walk---------------------
3461 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3462 // requires that the walk visits a node's inputs before visiting the node.
3463 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3464   ResourceArea *area = Thread::current()->resource_area();




 387   for (int i = C->macro_count()-1; i >= 0; i--) {
 388     Node* n = C->macro_node(i);
 389     if (!useful.member(n)) {
 390       remove_macro_node(n);
 391     }
 392   }
 393   // Remove useless CastII nodes with range check dependency
 394   for (int i = range_check_cast_count() - 1; i >= 0; i--) {
 395     Node* cast = range_check_cast_node(i);
 396     if (!useful.member(cast)) {
 397       remove_range_check_cast(cast);
 398     }
 399   }
 400   // Remove useless expensive node
 401   for (int i = C->expensive_count()-1; i >= 0; i--) {
 402     Node* n = C->expensive_node(i);
 403     if (!useful.member(n)) {
 404       remove_expensive_node(n);
 405     }
 406   }
 407   for (int i = value_type_ptr_count() - 1; i >= 0; i--) {
 408     ValueTypePtrNode* vtptr = value_type_ptr(i);
 409     if (!useful.member(vtptr)) {
 410       remove_value_type_ptr(vtptr);
 411     }
 412   }
 413   // clean up the late inline lists
 414   remove_useless_late_inlines(&_string_late_inlines, useful);
 415   remove_useless_late_inlines(&_boxing_late_inlines, useful);
 416   remove_useless_late_inlines(&_late_inlines, useful);
 417   debug_only(verify_graph_edges(true/*check for no_dead_code*/);)
 418 }
 419 
 420 //------------------------------frame_size_in_words-----------------------------
 421 // frame_slots in units of words
 422 int Compile::frame_size_in_words() const {
 423   // shift is 0 in LP32 and 1 in LP64
 424   const int shift = (LogBytesPerWord - LogBytesPerInt);
 425   int words = _frame_slots >> shift;
 426   assert( words << shift == _frame_slots, "frame size must be properly aligned in LP64" );
 427   return words;
 428 }
 429 
 430 // To bang the stack of this compiled method we use the stack size
 431 // that the interpreter would need in case of a deoptimization. This
 432 // removes the need to bang the stack in the deoptimization blob which


1166   AliasType* ats = NEW_ARENA_ARRAY(comp_arena(), AliasType,  grow_ats);
1167   Copy::zero_to_bytes(ats, sizeof(AliasType)*grow_ats);
1168   {
1169     for (int i = 0; i < grow_ats; i++)  _alias_types[i] = &ats[i];
1170   }
1171   // Initialize the first few types.
1172   _alias_types[AliasIdxTop]->Init(AliasIdxTop, NULL);
1173   _alias_types[AliasIdxBot]->Init(AliasIdxBot, TypePtr::BOTTOM);
1174   _alias_types[AliasIdxRaw]->Init(AliasIdxRaw, TypeRawPtr::BOTTOM);
1175   _num_alias_types = AliasIdxRaw+1;
1176   // Zero out the alias type cache.
1177   Copy::zero_to_bytes(_alias_cache, sizeof(_alias_cache));
1178   // A NULL adr_type hits in the cache right away.  Preload the right answer.
1179   probe_alias_cache(NULL)->_index = AliasIdxTop;
1180 
1181   _intrinsics = NULL;
1182   _macro_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1183   _predicate_opaqs = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1184   _expensive_nodes = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1185   _range_check_casts = new(comp_arena()) GrowableArray<Node*>(comp_arena(), 8,  0, NULL);
1186   _value_type_ptr_nodes = new(comp_arena()) GrowableArray<ValueTypePtrNode*>(comp_arena(), 8,  0, NULL);
1187   register_library_intrinsics();
1188 }
1189 
1190 //---------------------------init_start----------------------------------------
1191 // Install the StartNode on this compile object.
1192 void Compile::init_start(StartNode* s) {
1193   if (failing())
1194     return; // already failing
1195   assert(s == start(), "");
1196 }
1197 
1198 /**
1199  * Return the 'StartNode'. We must not have a pending failure, since the ideal graph
1200  * can be in an inconsistent state, i.e., we can get segmentation faults when traversing
1201  * the ideal graph.
1202  */
1203 StartNode* Compile::start() const {
1204   assert (!failing(), "Must not have pending failure. Reason is: %s", failure_reason());
1205   for (DUIterator_Fast imax, i = root()->fast_outs(imax); i < imax; i++) {
1206     Node* start = root()->fast_out(i);


1952   }
1953   assert(predicate_count()==0, "should be clean!");
1954 }
1955 
1956 void Compile::add_range_check_cast(Node* n) {
1957   assert(n->isa_CastII()->has_range_check(), "CastII should have range check dependency");
1958   assert(!_range_check_casts->contains(n), "duplicate entry in range check casts");
1959   _range_check_casts->append(n);
1960 }
1961 
1962 // Remove all range check dependent CastIINodes.
1963 void Compile::remove_range_check_casts(PhaseIterGVN &igvn) {
1964   for (int i = range_check_cast_count(); i > 0; i--) {
1965     Node* cast = range_check_cast_node(i-1);
1966     assert(cast->isa_CastII()->has_range_check(), "CastII should have range check dependency");
1967     igvn.replace_node(cast, cast->in(1));
1968   }
1969   assert(range_check_cast_count() == 0, "should be empty");
1970 }
1971 
1972 void Compile::add_value_type_ptr(ValueTypePtrNode* n) {
1973   assert(!_value_type_ptr_nodes->contains(n), "duplicate entry");
1974   _value_type_ptr_nodes->append(n);
1975 }
1976 
1977 void Compile::process_value_type_ptr_nodes(PhaseIterGVN &igvn) {
1978   for (int i = value_type_ptr_count(); i > 0; i--) {
1979     ValueTypePtrNode* vtptr = value_type_ptr(i-1);
1980     // once all inlining is over otherwise debug info can get
1981     // inconsistent
1982     vtptr->make_scalar_in_safepoints(igvn.C->root(), &igvn);
1983     igvn.replace_node(vtptr, vtptr->get_oop());
1984   }
1985   assert(value_type_ptr_count() == 0, "should be empty");
1986   igvn.optimize();
1987 }
1988 
1989 // StringOpts and late inlining of string methods
1990 void Compile::inline_string_calls(bool parse_time) {
1991   {
1992     // remove useless nodes to make the usage analysis simpler
1993     ResourceMark rm;
1994     PhaseRemoveUseless pru(initial_gvn(), for_igvn());
1995   }
1996 
1997   {
1998     ResourceMark rm;
1999     print_method(PHASE_BEFORE_STRINGOPTS, 3);
2000     PhaseStringOpts pso(initial_gvn(), for_igvn());
2001     print_method(PHASE_AFTER_STRINGOPTS, 3);
2002   }
2003 
2004   // now inline anything that we skipped the first time around
2005   if (!parse_time) {
2006     _late_inlines_pos = _late_inlines.length();
2007   }
2008 


2216   remove_speculative_types(igvn);
2217 
2218   // No more new expensive nodes will be added to the list from here
2219   // so keep only the actual candidates for optimizations.
2220   cleanup_expensive_nodes(igvn);
2221 
2222   if (!failing() && RenumberLiveNodes && live_nodes() + NodeLimitFudgeFactor < unique()) {
2223     Compile::TracePhase tp("", &timers[_t_renumberLive]);
2224     initial_gvn()->replace_with(&igvn);
2225     for_igvn()->clear();
2226     Unique_Node_List new_worklist(C->comp_arena());
2227     {
2228       ResourceMark rm;
2229       PhaseRenumberLive prl = PhaseRenumberLive(initial_gvn(), for_igvn(), &new_worklist);
2230     }
2231     set_for_igvn(&new_worklist);
2232     igvn = PhaseIterGVN(initial_gvn());
2233     igvn.optimize();
2234   }
2235 
2236   if (value_type_ptr_count() > 0) {
2237     process_value_type_ptr_nodes(igvn);
2238   }
2239 
2240   // Perform escape analysis
2241   if (_do_escape_analysis && ConnectionGraph::has_candidates(this)) {
2242     if (has_loops()) {
2243       // Cleanup graph (remove dead nodes).
2244       TracePhase tp("idealLoop", &timers[_t_idealLoop]);
2245       PhaseIdealLoop ideal_loop( igvn, false, true );
2246       if (major_progress()) print_method(PHASE_PHASEIDEAL_BEFORE_EA, 2);
2247       if (failing())  return;
2248     }
2249     ConnectionGraph::do_analysis(this, &igvn);
2250 
2251     if (failing())  return;
2252 
2253     // Optimize out fields loads from scalar replaceable allocations.
2254     igvn.optimize();
2255     print_method(PHASE_ITER_GVN_AFTER_EA, 2);
2256 
2257     if (failing())  return;
2258 
2259     if (congraph() != NULL && macro_count() > 0) {


3449           // redundant
3450           for (DUIterator_Fast imax, i = k->fast_outs(imax); i < imax; i++) {
3451             Node* u = k->fast_out(i);
3452             assert(!wq.contains(u), "shouldn't process one node several times");
3453             if (u->Opcode() == Op_LShiftL ||
3454                 u->Opcode() == Op_AddL ||
3455                 u->Opcode() == Op_SubL ||
3456                 u->Opcode() == Op_AddP) {
3457               wq.push(u);
3458             }
3459           }
3460           // Replace all nodes with identical edges as m with m
3461           k->subsume_by(m, this);
3462         }
3463       }
3464     }
3465     break;
3466   }
3467   case Op_ValueType: {
3468     ValueTypeNode* vt = n->as_ValueType();
3469     vt->make_scalar_in_safepoints(root(), NULL);
3470     if (vt->outcnt() == 0) {
3471       vt->disconnect_inputs(NULL, this);
3472     }
3473     break;
3474   }
3475   case Op_ValueTypePtr: {
3476     ShouldNotReachHere();
3477     break;
3478   }
3479   default:
3480     assert( !n->is_Call(), "" );
3481     assert( !n->is_Mem(), "" );
3482     assert( nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3483     break;
3484   }
3485 
3486   // Collect CFG split points
3487   if (n->is_MultiBranch() && !n->is_RangeCheck()) {
3488     frc._tests.push(n);
3489   }
3490 }
3491 
3492 //------------------------------final_graph_reshaping_walk---------------------
3493 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3494 // requires that the walk visits a node's inputs before visiting the node.
3495 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3496   ResourceArea *area = Thread::current()->resource_area();


< prev index next >