2775 int alias_idx = get_alias_index(n->as_Mem()->adr_type());
2776 assert( n->in(0) != NULL || alias_idx != Compile::AliasIdxRaw ||
2777 // oop will be recorded in oop map if load crosses safepoint
2778 n->is_Load() && (n->as_Load()->bottom_type()->isa_oopptr() ||
2779 LoadNode::is_immutable_value(n->in(MemNode::Address))),
2780 "raw memory operations should have control edge");
2781 }
2782 if (n->is_MemBar()) {
2783 MemBarNode* mb = n->as_MemBar();
2784 if (mb->trailing_store() || mb->trailing_load_store()) {
2785 assert(mb->leading_membar()->trailing_membar() == mb, "bad membar pair");
2786 Node* mem = mb->in(MemBarNode::Precedent);
2787 assert((mb->trailing_store() && mem->is_Store() && mem->as_Store()->is_release()) ||
2788 (mb->trailing_load_store() && mem->is_LoadStore()), "missing mem op");
2789 } else if (mb->leading()) {
2790 assert(mb->trailing_membar()->leading_membar() == mb, "bad membar pair");
2791 }
2792 }
2793 #endif
2794 // Count FPU ops and common calls, implements item (3)
2795 switch( nop ) {
2796 // Count all float operations that may use FPU
2797 case Op_AddF:
2798 case Op_SubF:
2799 case Op_MulF:
2800 case Op_DivF:
2801 case Op_NegF:
2802 case Op_ModF:
2803 case Op_ConvI2F:
2804 case Op_ConF:
2805 case Op_CmpF:
2806 case Op_CmpF3:
2807 // case Op_ConvL2F: // longs are split into 32-bit halves
2808 frc.inc_float_count();
2809 break;
2810
2811 case Op_ConvF2D:
2812 case Op_ConvD2F:
2813 frc.inc_float_count();
2814 frc.inc_double_count();
2921 case Op_GetAndAddL:
2922 case Op_GetAndSetS:
2923 case Op_GetAndSetB:
2924 case Op_GetAndSetI:
2925 case Op_GetAndSetL:
2926 case Op_GetAndSetP:
2927 case Op_GetAndSetN:
2928 case Op_StoreP:
2929 case Op_StoreN:
2930 case Op_StoreNKlass:
2931 case Op_LoadB:
2932 case Op_LoadUB:
2933 case Op_LoadUS:
2934 case Op_LoadI:
2935 case Op_LoadKlass:
2936 case Op_LoadNKlass:
2937 case Op_LoadL:
2938 case Op_LoadL_unaligned:
2939 case Op_LoadPLocked:
2940 case Op_LoadP:
2941 #if INCLUDE_ZGC
2942 case Op_LoadBarrierSlowReg:
2943 case Op_LoadBarrierWeakSlowReg:
2944 #endif
2945 case Op_LoadN:
2946 case Op_LoadRange:
2947 case Op_LoadS: {
2948 handle_mem:
2949 #ifdef ASSERT
2950 if( VerifyOptoOopOffsets ) {
2951 assert( n->is_Mem(), "" );
2952 MemNode *mem = (MemNode*)n;
2953 // Check to see if address types have grounded out somehow.
2954 const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
2955 assert( !tp || oop_offset_is_sane(tp), "" );
2956 }
2957 #endif
2958 break;
2959 }
2960
2961 case Op_AddP: { // Assert sane base pointers
2962 Node *addp = n->in(AddPNode::Address);
2963 assert( !addp->is_AddP() ||
2964 addp->in(AddPNode::Base)->is_top() || // Top OK for allocation
3450 }
3451 }
3452 }
3453 break;
3454 }
3455 case Op_CmpUL: {
3456 if (!Matcher::has_match_rule(Op_CmpUL)) {
3457 // We don't support unsigned long comparisons. Set 'max_idx_expr'
3458 // to max_julong if < 0 to make the signed comparison fail.
3459 ConINode* sign_pos = new ConINode(TypeInt::make(BitsPerLong - 1));
3460 Node* sign_bit_mask = new RShiftLNode(n->in(1), sign_pos);
3461 Node* orl = new OrLNode(n->in(1), sign_bit_mask);
3462 ConLNode* remove_sign_mask = new ConLNode(TypeLong::make(max_jlong));
3463 Node* andl = new AndLNode(orl, remove_sign_mask);
3464 Node* cmp = new CmpLNode(andl, n->in(2));
3465 n->subsume_by(cmp, this);
3466 }
3467 break;
3468 }
3469 default:
3470 assert( !n->is_Call(), "" );
3471 assert( !n->is_Mem(), "" );
3472 assert( nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3473 break;
3474 }
3475
3476 // Collect CFG split points
3477 if (n->is_MultiBranch() && !n->is_RangeCheck()) {
3478 frc._tests.push(n);
3479 }
3480 }
3481
3482 //------------------------------final_graph_reshaping_walk---------------------
3483 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3484 // requires that the walk visits a node's inputs before visiting the node.
3485 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3486 ResourceArea *area = Thread::current()->resource_area();
3487 Unique_Node_List sfpt(area);
3488
3489 frc._visited.set(root->_idx); // first, mark node as visited
3490 uint cnt = root->req();
3491 Node *n = root;
3492 uint i = 0;
|
2775 int alias_idx = get_alias_index(n->as_Mem()->adr_type());
2776 assert( n->in(0) != NULL || alias_idx != Compile::AliasIdxRaw ||
2777 // oop will be recorded in oop map if load crosses safepoint
2778 n->is_Load() && (n->as_Load()->bottom_type()->isa_oopptr() ||
2779 LoadNode::is_immutable_value(n->in(MemNode::Address))),
2780 "raw memory operations should have control edge");
2781 }
2782 if (n->is_MemBar()) {
2783 MemBarNode* mb = n->as_MemBar();
2784 if (mb->trailing_store() || mb->trailing_load_store()) {
2785 assert(mb->leading_membar()->trailing_membar() == mb, "bad membar pair");
2786 Node* mem = mb->in(MemBarNode::Precedent);
2787 assert((mb->trailing_store() && mem->is_Store() && mem->as_Store()->is_release()) ||
2788 (mb->trailing_load_store() && mem->is_LoadStore()), "missing mem op");
2789 } else if (mb->leading()) {
2790 assert(mb->trailing_membar()->leading_membar() == mb, "bad membar pair");
2791 }
2792 }
2793 #endif
2794 // Count FPU ops and common calls, implements item (3)
2795 bool gc_handled = BarrierSet::barrier_set()->barrier_set_c2()->final_graph_reshaping(this, n, nop);
2796 switch( nop ) {
2797 // Count all float operations that may use FPU
2798 case Op_AddF:
2799 case Op_SubF:
2800 case Op_MulF:
2801 case Op_DivF:
2802 case Op_NegF:
2803 case Op_ModF:
2804 case Op_ConvI2F:
2805 case Op_ConF:
2806 case Op_CmpF:
2807 case Op_CmpF3:
2808 // case Op_ConvL2F: // longs are split into 32-bit halves
2809 frc.inc_float_count();
2810 break;
2811
2812 case Op_ConvF2D:
2813 case Op_ConvD2F:
2814 frc.inc_float_count();
2815 frc.inc_double_count();
2922 case Op_GetAndAddL:
2923 case Op_GetAndSetS:
2924 case Op_GetAndSetB:
2925 case Op_GetAndSetI:
2926 case Op_GetAndSetL:
2927 case Op_GetAndSetP:
2928 case Op_GetAndSetN:
2929 case Op_StoreP:
2930 case Op_StoreN:
2931 case Op_StoreNKlass:
2932 case Op_LoadB:
2933 case Op_LoadUB:
2934 case Op_LoadUS:
2935 case Op_LoadI:
2936 case Op_LoadKlass:
2937 case Op_LoadNKlass:
2938 case Op_LoadL:
2939 case Op_LoadL_unaligned:
2940 case Op_LoadPLocked:
2941 case Op_LoadP:
2942 case Op_LoadN:
2943 case Op_LoadRange:
2944 case Op_LoadS: {
2945 handle_mem:
2946 #ifdef ASSERT
2947 if( VerifyOptoOopOffsets ) {
2948 assert( n->is_Mem(), "" );
2949 MemNode *mem = (MemNode*)n;
2950 // Check to see if address types have grounded out somehow.
2951 const TypeInstPtr *tp = mem->in(MemNode::Address)->bottom_type()->isa_instptr();
2952 assert( !tp || oop_offset_is_sane(tp), "" );
2953 }
2954 #endif
2955 break;
2956 }
2957
2958 case Op_AddP: { // Assert sane base pointers
2959 Node *addp = n->in(AddPNode::Address);
2960 assert( !addp->is_AddP() ||
2961 addp->in(AddPNode::Base)->is_top() || // Top OK for allocation
3447 }
3448 }
3449 }
3450 break;
3451 }
3452 case Op_CmpUL: {
3453 if (!Matcher::has_match_rule(Op_CmpUL)) {
3454 // We don't support unsigned long comparisons. Set 'max_idx_expr'
3455 // to max_julong if < 0 to make the signed comparison fail.
3456 ConINode* sign_pos = new ConINode(TypeInt::make(BitsPerLong - 1));
3457 Node* sign_bit_mask = new RShiftLNode(n->in(1), sign_pos);
3458 Node* orl = new OrLNode(n->in(1), sign_bit_mask);
3459 ConLNode* remove_sign_mask = new ConLNode(TypeLong::make(max_jlong));
3460 Node* andl = new AndLNode(orl, remove_sign_mask);
3461 Node* cmp = new CmpLNode(andl, n->in(2));
3462 n->subsume_by(cmp, this);
3463 }
3464 break;
3465 }
3466 default:
3467 if (!gc_handled) {
3468 assert(!n->is_Call(), "");
3469 assert(!n->is_Mem(), "");
3470 assert(nop != Op_ProfileBoolean, "should be eliminated during IGVN");
3471 }
3472 break;
3473 }
3474
3475 // Collect CFG split points
3476 if (n->is_MultiBranch() && !n->is_RangeCheck()) {
3477 frc._tests.push(n);
3478 }
3479 }
3480
3481 //------------------------------final_graph_reshaping_walk---------------------
3482 // Replacing Opaque nodes with their input in final_graph_reshaping_impl(),
3483 // requires that the walk visits a node's inputs before visiting the node.
3484 void Compile::final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ) {
3485 ResourceArea *area = Thread::current()->resource_area();
3486 Unique_Node_List sfpt(area);
3487
3488 frc._visited.set(root->_idx); // first, mark node as visited
3489 uint cnt = root->req();
3490 Node *n = root;
3491 uint i = 0;
|