< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page




2775     int alias_idx = get_alias_index(n->as_Mem()->adr_type());
2776     assert( n->in(0) != NULL || alias_idx != Compile::AliasIdxRaw ||
2777             // oop will be recorded in oop map if load crosses safepoint
2778             n->is_Load() && (n->as_Load()->bottom_type()->isa_oopptr() ||
2779                              LoadNode::is_immutable_value(n->in(MemNode::Address))),
2780             "raw memory operations should have control edge");
2781   }
2782   if (n->is_MemBar()) {
2783     MemBarNode* mb = n->as_MemBar();
2784     if (mb->trailing_store() || mb->trailing_load_store()) {
2785       assert(mb->leading_membar()->trailing_membar() == mb, "bad membar pair");
2786       Node* mem = mb->in(MemBarNode::Precedent);
2787       assert((mb->trailing_store() && mem->is_Store() && mem->as_Store()->is_release()) ||
2788              (mb->trailing_load_store() && mem->is_LoadStore()), "missing mem op");
2789     } else if (mb->leading()) {
2790       assert(mb->trailing_membar()->leading_membar() == mb, "bad membar pair");
2791     }
2792   }
2793 #endif
2794   // Count FPU ops and common calls, implements item (3)
2795   bool gc_handled = BarrierSet::barrier_set()->barrier_set_c2()->final_graph_reshaping(this, n);
2796   switch( nop ) {
2797   // Count all float operations that may use FPU
2798   case Op_AddF:
2799   case Op_SubF:
2800   case Op_MulF:
2801   case Op_DivF:
2802   case Op_NegF:
2803   case Op_ModF:
2804   case Op_ConvI2F:
2805   case Op_ConF:
2806   case Op_CmpF:
2807   case Op_CmpF3:
2808   // case Op_ConvL2F: // longs are split into 32-bit halves
2809     frc.inc_float_count();
2810     break;
2811 
2812   case Op_ConvF2D:
2813   case Op_ConvD2F:
2814     frc.inc_float_count();
2815     frc.inc_double_count();


2922   case Op_GetAndAddL:
2923   case Op_GetAndSetS:
2924   case Op_GetAndSetB:
2925   case Op_GetAndSetI:
2926   case Op_GetAndSetL:
2927   case Op_GetAndSetP:
2928   case Op_GetAndSetN:
2929   case Op_StoreP:
2930   case Op_StoreN:
2931   case Op_StoreNKlass:
2932   case Op_LoadB:
2933   case Op_LoadUB:
2934   case Op_LoadUS:
2935   case Op_LoadI:
2936   case Op_LoadKlass:
2937   case Op_LoadNKlass:
2938   case Op_LoadL:
2939   case Op_LoadL_unaligned:
2940   case Op_LoadPLocked:
2941   case Op_LoadP:
2942 #if INCLUDE_ZGC
2943   case Op_LoadBarrierSlowReg:
2944   case Op_LoadBarrierWeakSlowReg:
2945 #endif
2946   case Op_LoadN:
2947   case Op_LoadRange:
2948   case Op_LoadS: {
2949   handle_mem:
2950 #ifdef ASSERT
2951     if( VerifyOptoOopOffsets ) {
2952       assert( n->is_Mem(), "" );
2953       MemNode *mem  = (MemNode*)n;
2954       // Check to see if address types have grounded out somehow.
2955       const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
2956       assert( !tp || oop_offset_is_sane(tp), "" );
2957     }
2958 #endif
2959     break;
2960   }
2961 
2962   case Op_AddP: {               // Assert sane base pointers
2963     Node *addp = n->in(AddPNode::Address);
2964     assert( !addp->is_AddP() ||
2965             addp->in(AddPNode::Base)->is_top() || // Top OK for allocation




2775     int alias_idx = get_alias_index(n->as_Mem()->adr_type());
2776     assert( n->in(0) != NULL || alias_idx != Compile::AliasIdxRaw ||
2777             // oop will be recorded in oop map if load crosses safepoint
2778             n->is_Load() && (n->as_Load()->bottom_type()->isa_oopptr() ||
2779                              LoadNode::is_immutable_value(n->in(MemNode::Address))),
2780             "raw memory operations should have control edge");
2781   }
2782   if (n->is_MemBar()) {
2783     MemBarNode* mb = n->as_MemBar();
2784     if (mb->trailing_store() || mb->trailing_load_store()) {
2785       assert(mb->leading_membar()->trailing_membar() == mb, "bad membar pair");
2786       Node* mem = mb->in(MemBarNode::Precedent);
2787       assert((mb->trailing_store() && mem->is_Store() && mem->as_Store()->is_release()) ||
2788              (mb->trailing_load_store() && mem->is_LoadStore()), "missing mem op");
2789     } else if (mb->leading()) {
2790       assert(mb->trailing_membar()->leading_membar() == mb, "bad membar pair");
2791     }
2792   }
2793 #endif
2794   // Count FPU ops and common calls, implements item (3)
2795   bool gc_handled = BarrierSet::barrier_set()->barrier_set_c2()->final_graph_reshaping(this, n, nop);
2796   switch( nop ) {
2797   // Count all float operations that may use FPU
2798   case Op_AddF:
2799   case Op_SubF:
2800   case Op_MulF:
2801   case Op_DivF:
2802   case Op_NegF:
2803   case Op_ModF:
2804   case Op_ConvI2F:
2805   case Op_ConF:
2806   case Op_CmpF:
2807   case Op_CmpF3:
2808   // case Op_ConvL2F: // longs are split into 32-bit halves
2809     frc.inc_float_count();
2810     break;
2811 
2812   case Op_ConvF2D:
2813   case Op_ConvD2F:
2814     frc.inc_float_count();
2815     frc.inc_double_count();


2922   case Op_GetAndAddL:
2923   case Op_GetAndSetS:
2924   case Op_GetAndSetB:
2925   case Op_GetAndSetI:
2926   case Op_GetAndSetL:
2927   case Op_GetAndSetP:
2928   case Op_GetAndSetN:
2929   case Op_StoreP:
2930   case Op_StoreN:
2931   case Op_StoreNKlass:
2932   case Op_LoadB:
2933   case Op_LoadUB:
2934   case Op_LoadUS:
2935   case Op_LoadI:
2936   case Op_LoadKlass:
2937   case Op_LoadNKlass:
2938   case Op_LoadL:
2939   case Op_LoadL_unaligned:
2940   case Op_LoadPLocked:
2941   case Op_LoadP:




2942   case Op_LoadN:
2943   case Op_LoadRange:
2944   case Op_LoadS: {
2945   handle_mem:
2946 #ifdef ASSERT
2947     if( VerifyOptoOopOffsets ) {
2948       assert( n->is_Mem(), "" );
2949       MemNode *mem  = (MemNode*)n;
2950       // Check to see if address types have grounded out somehow.
2951       const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
2952       assert( !tp || oop_offset_is_sane(tp), "" );
2953     }
2954 #endif
2955     break;
2956   }
2957 
2958   case Op_AddP: {               // Assert sane base pointers
2959     Node *addp = n->in(AddPNode::Address);
2960     assert( !addp->is_AddP() ||
2961             addp->in(AddPNode::Base)->is_top() || // Top OK for allocation


< prev index next >