< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page




  62 #include "opto/opcodes.hpp"
  63 #include "opto/output.hpp"
  64 #include "opto/parse.hpp"
  65 #include "opto/phaseX.hpp"
  66 #include "opto/rootnode.hpp"
  67 #include "opto/runtime.hpp"
  68 #include "opto/stringopts.hpp"
  69 #include "opto/type.hpp"
  70 #include "opto/vectornode.hpp"
  71 #include "runtime/arguments.hpp"
  72 #include "runtime/sharedRuntime.hpp"
  73 #include "runtime/signature.hpp"
  74 #include "runtime/stubRoutines.hpp"
  75 #include "runtime/timer.hpp"
  76 #include "utilities/align.hpp"
  77 #include "utilities/copy.hpp"
  78 #include "utilities/macros.hpp"
  79 #if INCLUDE_ZGC
  80 #include "gc/z/c2/zBarrierSetC2.hpp"
  81 #endif



  82 
  83 
  84 // -------------------- Compile::mach_constant_base_node -----------------------
  85 // Constant table base node singleton.
  86 MachConstantBaseNode* Compile::mach_constant_base_node() {
  87   if (_mach_constant_base_node == NULL) {
  88     _mach_constant_base_node = new MachConstantBaseNode();
  89     _mach_constant_base_node->add_req(C->root());
  90   }
  91   return _mach_constant_base_node;
  92 }
  93 
  94 
  95 /// Support for intrinsics.
  96 
  97 // Return the index at which m must be inserted (or already exists).
  98 // The sort order is by the address of the ciMethod, with is_virtual as minor key.
  99 class IntrinsicDescPair {
 100  private:
 101   ciMethod* _m;


2387     // No more loop optimizations. Remove all range check dependent CastIINodes.
2388     C->remove_range_check_casts(igvn);
2389     igvn.optimize();
2390   }
2391 
2392 #ifdef ASSERT
2393   BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2394   bs->verify_gc_barriers(this, BarrierSetC2::BeforeExpand);
2395 #endif
2396 
2397   {
2398     TracePhase tp("macroExpand", &timers[_t_macroExpand]);
2399     PhaseMacroExpand  mex(igvn);
2400     print_method(PHASE_BEFORE_MACRO_EXPANSION, 2);
2401     if (mex.expand_macro_nodes()) {
2402       assert(failing(), "must bail out w/ explicit message");
2403       return;
2404     }
2405   }
2406 









2407   if (opaque4_count() > 0) {
2408     C->remove_opaque4_nodes(igvn);
2409     igvn.optimize();
2410   }
2411 
2412   DEBUG_ONLY( _modified_nodes = NULL; )
2413  } // (End scope of igvn; run destructor if necessary for asserts.)
2414 
2415  process_print_inlining();
2416  // A method with only infinite loops has no edges entering loops from root
2417  {
2418    TracePhase tp("graphReshape", &timers[_t_graphReshaping]);
2419    if (final_graph_reshaping()) {
2420      assert(failing(), "must bail out w/ explicit message");
2421      return;
2422    }
2423  }
2424 
2425  print_method(PHASE_OPTIMIZE_FINISHED, 2);
2426 }


3036 #endif
3037     // platform dependent reshaping of the address expression
3038     reshape_address(n->as_AddP());
3039     break;
3040   }
3041 
3042   case Op_CastPP: {
3043     // Remove CastPP nodes to gain more freedom during scheduling but
3044     // keep the dependency they encode as control or precedence edges
3045     // (if control is set already) on memory operations. Some CastPP
3046     // nodes don't have a control (don't carry a dependency): skip
3047     // those.
3048     if (n->in(0) != NULL) {
3049       ResourceMark rm;
3050       Unique_Node_List wq;
3051       wq.push(n);
3052       for (uint next = 0; next < wq.size(); ++next) {
3053         Node *m = wq.at(next);
3054         for (DUIterator_Fast imax, i = m->fast_outs(imax); i < imax; i++) {
3055           Node* use = m->fast_out(i);
3056           if (use->is_Mem() || use->is_EncodeNarrowPtr()) {
3057             use->ensure_control_or_add_prec(n->in(0));
3058           } else {
3059             switch(use->Opcode()) {
3060             case Op_AddP:
3061             case Op_DecodeN:
3062             case Op_DecodeNKlass:
3063             case Op_CheckCastPP:
3064             case Op_CastPP:
3065               wq.push(use);
3066               break;
3067             }
3068           }
3069         }
3070       }
3071     }
3072     const bool is_LP64 = LP64_ONLY(true) NOT_LP64(false);
3073     if (is_LP64 && n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {
3074       Node* in1 = n->in(1);
3075       const Type* t = n->bottom_type();
3076       Node* new_in1 = in1->clone();




  62 #include "opto/opcodes.hpp"
  63 #include "opto/output.hpp"
  64 #include "opto/parse.hpp"
  65 #include "opto/phaseX.hpp"
  66 #include "opto/rootnode.hpp"
  67 #include "opto/runtime.hpp"
  68 #include "opto/stringopts.hpp"
  69 #include "opto/type.hpp"
  70 #include "opto/vectornode.hpp"
  71 #include "runtime/arguments.hpp"
  72 #include "runtime/sharedRuntime.hpp"
  73 #include "runtime/signature.hpp"
  74 #include "runtime/stubRoutines.hpp"
  75 #include "runtime/timer.hpp"
  76 #include "utilities/align.hpp"
  77 #include "utilities/copy.hpp"
  78 #include "utilities/macros.hpp"
  79 #if INCLUDE_ZGC
  80 #include "gc/z/c2/zBarrierSetC2.hpp"
  81 #endif
  82 #if INCLUDE_SHENANDOAHGC
  83 #include "gc/shenandoah/c2/shenandoahBarrierSetC2.hpp"
  84 #endif
  85 
  86 
  87 // -------------------- Compile::mach_constant_base_node -----------------------
  88 // Constant table base node singleton.
  89 MachConstantBaseNode* Compile::mach_constant_base_node() {
  90   if (_mach_constant_base_node == NULL) {
  91     _mach_constant_base_node = new MachConstantBaseNode();
  92     _mach_constant_base_node->add_req(C->root());
  93   }
  94   return _mach_constant_base_node;
  95 }
  96 
  97 
  98 /// Support for intrinsics.
  99 
 100 // Return the index at which m must be inserted (or already exists).
 101 // The sort order is by the address of the ciMethod, with is_virtual as minor key.
 102 class IntrinsicDescPair {
 103  private:
 104   ciMethod* _m;


2390     // No more loop optimizations. Remove all range check dependent CastIINodes.
2391     C->remove_range_check_casts(igvn);
2392     igvn.optimize();
2393   }
2394 
2395 #ifdef ASSERT
2396   BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2397   bs->verify_gc_barriers(this, BarrierSetC2::BeforeExpand);
2398 #endif
2399 
2400   {
2401     TracePhase tp("macroExpand", &timers[_t_macroExpand]);
2402     PhaseMacroExpand  mex(igvn);
2403     print_method(PHASE_BEFORE_MACRO_EXPANSION, 2);
2404     if (mex.expand_macro_nodes()) {
2405       assert(failing(), "must bail out w/ explicit message");
2406       return;
2407     }
2408   }
2409 
2410   print_method(PHASE_BEFORE_BARRIER_EXPAND, 2);
2411 
2412 #if INCLUDE_SHENANDOAHGC
2413   if (!ShenandoahWriteBarrierNode::expand(this, igvn, loop_opts_cnt)) {
2414     assert(failing(), "must bail out w/ explicit message");
2415     return;
2416   }
2417 #endif
2418 
2419   if (opaque4_count() > 0) {
2420     C->remove_opaque4_nodes(igvn);
2421     igvn.optimize();
2422   }
2423 
2424   DEBUG_ONLY( _modified_nodes = NULL; )
2425  } // (End scope of igvn; run destructor if necessary for asserts.)
2426 
2427  process_print_inlining();
2428  // A method with only infinite loops has no edges entering loops from root
2429  {
2430    TracePhase tp("graphReshape", &timers[_t_graphReshaping]);
2431    if (final_graph_reshaping()) {
2432      assert(failing(), "must bail out w/ explicit message");
2433      return;
2434    }
2435  }
2436 
2437  print_method(PHASE_OPTIMIZE_FINISHED, 2);
2438 }


3048 #endif
3049     // platform dependent reshaping of the address expression
3050     reshape_address(n->as_AddP());
3051     break;
3052   }
3053 
3054   case Op_CastPP: {
3055     // Remove CastPP nodes to gain more freedom during scheduling but
3056     // keep the dependency they encode as control or precedence edges
3057     // (if control is set already) on memory operations. Some CastPP
3058     // nodes don't have a control (don't carry a dependency): skip
3059     // those.
3060     if (n->in(0) != NULL) {
3061       ResourceMark rm;
3062       Unique_Node_List wq;
3063       wq.push(n);
3064       for (uint next = 0; next < wq.size(); ++next) {
3065         Node *m = wq.at(next);
3066         for (DUIterator_Fast imax, i = m->fast_outs(imax); i < imax; i++) {
3067           Node* use = m->fast_out(i);
3068           if (use->is_Mem() || use->is_EncodeNarrowPtr() || use->is_ShenandoahBarrier()) {
3069             use->ensure_control_or_add_prec(n->in(0));
3070           } else {
3071             switch(use->Opcode()) {
3072             case Op_AddP:
3073             case Op_DecodeN:
3074             case Op_DecodeNKlass:
3075             case Op_CheckCastPP:
3076             case Op_CastPP:
3077               wq.push(use);
3078               break;
3079             }
3080           }
3081         }
3082       }
3083     }
3084     const bool is_LP64 = LP64_ONLY(true) NOT_LP64(false);
3085     if (is_LP64 && n->in(1)->is_DecodeN() && Matcher::gen_narrow_oop_implicit_null_checks()) {
3086       Node* in1 = n->in(1);
3087       const Type* t = n->bottom_type();
3088       Node* new_in1 = in1->clone();


< prev index next >