< prev index next >

src/share/vm/opto/macro.cpp

Print this page
rev 12906 : [mq]: gc_interface


 207         else
 208           _ioproj_fallthrough = pn;
 209         break;
 210       case TypeFunc::Memory:
 211         if (pn->_is_io_use)
 212           _memproj_catchall = pn;
 213         else
 214           _memproj_fallthrough = pn;
 215         break;
 216       case TypeFunc::Parms:
 217         _resproj = pn;
 218         break;
 219       default:
 220         assert(false, "unexpected projection from allocation node.");
 221     }
 222   }
 223 
 224 }
 225 
 226 // Eliminate a card mark sequence.  p2x is a ConvP2XNode
 227 void PhaseMacroExpand::eliminate_card_mark(Node* p2x) {
 228   assert(p2x->Opcode() == Op_CastP2X, "ConvP2XNode required");
 229   if (!UseG1GC) {
 230     // vanilla/CMS post barrier
 231     Node *shift = p2x->unique_out();
 232     Node *addp = shift->unique_out();
 233     for (DUIterator_Last jmin, j = addp->last_outs(jmin); j >= jmin; --j) {
 234       Node *mem = addp->last_out(j);
 235       if (UseCondCardMark && mem->is_Load()) {
 236         assert(mem->Opcode() == Op_LoadB, "unexpected code shape");
 237         // The load is checking if the card has been written so
 238         // replace it with zero to fold the test.
 239         _igvn.replace_node(mem, intcon(0));
 240         continue;
 241       }
 242       assert(mem->is_Store(), "store required");
 243       _igvn.replace_node(mem, mem->in(MemNode::Memory));
 244     }
 245   } else {
 246     // G1 pre/post barriers
 247     assert(p2x->outcnt() <= 2, "expects 1 or 2 users: Xor and URShift nodes");
 248     // It could be only one user, URShift node, in Object.clone() intrinsic
 249     // but the new allocation is passed to arraycopy stub and it could not
 250     // be scalar replaced. So we don't check the case.
 251 
 252     // An other case of only one user (Xor) is when the value check for NULL
 253     // in G1 post barrier is folded after CCP so the code which used URShift
 254     // is removed.
 255 
 256     // Take Region node before eliminating post barrier since it also
 257     // eliminates CastP2X node when it has only one user.
 258     Node* this_region = p2x->in(0);
 259     assert(this_region != NULL, "");
 260 
 261     // Remove G1 post barrier.
 262 
 263     // Search for CastP2X->Xor->URShift->Cmp path which
 264     // checks if the store done to a different from the value's region.
 265     // And replace Cmp with #0 (false) to collapse G1 post barrier.
 266     Node* xorx = p2x->find_out_with(Op_XorX);
 267     if (xorx != NULL) {
 268       Node* shift = xorx->unique_out();
 269       Node* cmpx = shift->unique_out();
 270       assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
 271       cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
 272       "missing region check in G1 post barrier");
 273       _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
 274 
 275       // Remove G1 pre barrier.
 276 
 277       // Search "if (marking != 0)" check and set it to "false".
 278       // There is no G1 pre barrier if previous stored value is NULL
 279       // (for example, after initialization).
 280       if (this_region->is_Region() && this_region->req() == 3) {
 281         int ind = 1;
 282         if (!this_region->in(ind)->is_IfFalse()) {
 283           ind = 2;
 284         }
 285         if (this_region->in(ind)->is_IfFalse()) {
 286           Node* bol = this_region->in(ind)->in(0)->in(1);
 287           assert(bol->is_Bool(), "");
 288           cmpx = bol->in(1);
 289           if (bol->as_Bool()->_test._test == BoolTest::ne &&
 290               cmpx->is_Cmp() && cmpx->in(2) == intcon(0) &&
 291               cmpx->in(1)->is_Load()) {
 292             Node* adr = cmpx->in(1)->as_Load()->in(MemNode::Address);
 293             const int marking_offset = in_bytes(JavaThread::satb_mark_queue_offset() +
 294                                                 SATBMarkQueue::byte_offset_of_active());
 295             if (adr->is_AddP() && adr->in(AddPNode::Base) == top() &&
 296                 adr->in(AddPNode::Address)->Opcode() == Op_ThreadLocal &&
 297                 adr->in(AddPNode::Offset) == MakeConX(marking_offset)) {
 298               _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
 299             }
 300           }
 301         }
 302       }
 303     } else {
 304       assert(!GraphKit::use_ReduceInitialCardMarks(), "can only happen with card marking");
 305       // This is a G1 post barrier emitted by the Object.clone() intrinsic.
 306       // Search for the CastP2X->URShiftX->AddP->LoadB->Cmp path which checks if the card
 307       // is marked as young_gen and replace the Cmp with 0 (false) to collapse the barrier.
 308       Node* shift = p2x->find_out_with(Op_URShiftX);
 309       assert(shift != NULL, "missing G1 post barrier");
 310       Node* addp = shift->unique_out();
 311       Node* load = addp->find_out_with(Op_LoadB);
 312       assert(load != NULL, "missing G1 post barrier");
 313       Node* cmpx = load->unique_out();
 314       assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
 315              cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
 316              "missing card value check in G1 post barrier");
 317       _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
 318       // There is no G1 pre barrier in this case
 319     }
 320     // Now CastP2X can be removed since it is used only on dead path
 321     // which currently still alive until igvn optimize it.
 322     assert(p2x->outcnt() == 0 || p2x->unique_out()->Opcode() == Op_URShiftX, "");
 323     _igvn.replace_node(p2x, top());
 324   }
 325 }
 326 
 327 // Search for a memory operation for the specified memory slice.
 328 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
 329   Node *orig_mem = mem;
 330   Node *alloc_mem = alloc->in(TypeFunc::Memory);
 331   const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
 332   while (true) {
 333     if (mem == alloc_mem || mem == start_mem ) {
 334       return mem;  // hit one of our sentinels
 335     } else if (mem->is_MergeMem()) {
 336       mem = mem->as_MergeMem()->memory_at(alias_idx);
 337     } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
 338       Node *in = mem->in(0);
 339       // we can safely skip over safepoints, calls, locks and membars because we
 340       // already know that the object is safe to eliminate.
 341       if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
 342         return in;
 343       } else if (in->is_Call()) {
 344         CallNode *call = in->as_Call();


1005               assert(mb->is_Initialize() || !mb->is_MemBar() ||
1006                      mb->req() <= MemBarNode::Precedent ||
1007                      mb->in(MemBarNode::Precedent) != n,
1008                      "MemBarVolatile should be eliminated for non-escaping object");
1009             }
1010 #endif
1011             _igvn.replace_node(n, n->in(MemNode::Memory));
1012           } else if (n->is_ArrayCopy()) {
1013             // Disconnect ArrayCopy node
1014             ArrayCopyNode* ac = n->as_ArrayCopy();
1015             assert(ac->is_clonebasic(), "unexpected array copy kind");
1016             Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
1017             disconnect_projections(ac, _igvn);
1018             assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before allocation");
1019             Node* membar_before = alloc->in(0)->in(0);
1020             disconnect_projections(membar_before->as_MemBar(), _igvn);
1021             if (membar_after->is_MemBar()) {
1022               disconnect_projections(membar_after->as_MemBar(), _igvn);
1023             }
1024           } else {
1025             eliminate_card_mark(n);
1026           }
1027           k -= (oc2 - use->outcnt());
1028         }
1029       } else if (use->is_ArrayCopy()) {
1030         // Disconnect ArrayCopy node
1031         ArrayCopyNode* ac = use->as_ArrayCopy();
1032         assert(ac->is_arraycopy_validated() ||
1033                ac->is_copyof_validated() ||
1034                ac->is_copyofrange_validated(), "unsupported");
1035         CallProjections callprojs;
1036         ac->extract_projections(&callprojs, true);
1037 
1038         _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
1039         _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
1040         _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
1041 
1042         // Set control to top. IGVN will remove the remaining projections
1043         ac->set_req(0, top());
1044         ac->replace_edge(res, top());
1045 
1046         // Disconnect src right away: it can help find new
1047         // opportunities for allocation elimination
1048         Node* src = ac->in(ArrayCopyNode::Src);
1049         ac->replace_edge(src, top());
1050         if (src->outcnt() == 0) {
1051           _igvn.remove_dead_node(src);
1052         }
1053 
1054         _igvn._worklist.push(ac);
1055       } else {
1056         eliminate_card_mark(use);
1057       }
1058       j -= (oc1 - res->outcnt());
1059     }
1060     assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
1061     _igvn.remove_dead_node(res);
1062   }
1063 
1064   //
1065   // Process other users of allocation's projections
1066   //
1067   if (_resproj != NULL && _resproj->outcnt() != 0) {
1068     // First disconnect stores captured by Initialize node.
1069     // If Initialize node is eliminated first in the following code,
1070     // it will kill such stores and DUIterator_Last will assert.
1071     for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax);  j < jmax; j++) {
1072       Node *use = _resproj->fast_out(j);
1073       if (use->is_AddP()) {
1074         // raw memory addresses used only by the initialization
1075         _igvn.replace_node(use, C->top());
1076         --j; --jmax;




 207         else
 208           _ioproj_fallthrough = pn;
 209         break;
 210       case TypeFunc::Memory:
 211         if (pn->_is_io_use)
 212           _memproj_catchall = pn;
 213         else
 214           _memproj_fallthrough = pn;
 215         break;
 216       case TypeFunc::Parms:
 217         _resproj = pn;
 218         break;
 219       default:
 220         assert(false, "unexpected projection from allocation node.");
 221     }
 222   }
 223 
 224 }
 225 
 226 // Eliminate a card mark sequence.  p2x is a ConvP2XNode
 227 void PhaseMacroExpand::eliminate_gc_barrier(Node* p2x) {
 228   C2BarrierSetCodeGen *code_gen = Universe::heap()->barrier_set()->c2_code_gen();
 229   code_gen->eliminate_gc_barrier(this, p2x);































































































 230 }
 231 
 232 // Search for a memory operation for the specified memory slice.
 233 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
 234   Node *orig_mem = mem;
 235   Node *alloc_mem = alloc->in(TypeFunc::Memory);
 236   const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
 237   while (true) {
 238     if (mem == alloc_mem || mem == start_mem ) {
 239       return mem;  // hit one of our sentinels
 240     } else if (mem->is_MergeMem()) {
 241       mem = mem->as_MergeMem()->memory_at(alias_idx);
 242     } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
 243       Node *in = mem->in(0);
 244       // we can safely skip over safepoints, calls, locks and membars because we
 245       // already know that the object is safe to eliminate.
 246       if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
 247         return in;
 248       } else if (in->is_Call()) {
 249         CallNode *call = in->as_Call();


 910               assert(mb->is_Initialize() || !mb->is_MemBar() ||
 911                      mb->req() <= MemBarNode::Precedent ||
 912                      mb->in(MemBarNode::Precedent) != n,
 913                      "MemBarVolatile should be eliminated for non-escaping object");
 914             }
 915 #endif
 916             _igvn.replace_node(n, n->in(MemNode::Memory));
 917           } else if (n->is_ArrayCopy()) {
 918             // Disconnect ArrayCopy node
 919             ArrayCopyNode* ac = n->as_ArrayCopy();
 920             assert(ac->is_clonebasic(), "unexpected array copy kind");
 921             Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
 922             disconnect_projections(ac, _igvn);
 923             assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before allocation");
 924             Node* membar_before = alloc->in(0)->in(0);
 925             disconnect_projections(membar_before->as_MemBar(), _igvn);
 926             if (membar_after->is_MemBar()) {
 927               disconnect_projections(membar_after->as_MemBar(), _igvn);
 928             }
 929           } else {
 930             eliminate_gc_barrier(n);
 931           }
 932           k -= (oc2 - use->outcnt());
 933         }
 934       } else if (use->is_ArrayCopy()) {
 935         // Disconnect ArrayCopy node
 936         ArrayCopyNode* ac = use->as_ArrayCopy();
 937         assert(ac->is_arraycopy_validated() ||
 938                ac->is_copyof_validated() ||
 939                ac->is_copyofrange_validated(), "unsupported");
 940         CallProjections callprojs;
 941         ac->extract_projections(&callprojs, true);
 942 
 943         _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
 944         _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
 945         _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
 946 
 947         // Set control to top. IGVN will remove the remaining projections
 948         ac->set_req(0, top());
 949         ac->replace_edge(res, top());
 950 
 951         // Disconnect src right away: it can help find new
 952         // opportunities for allocation elimination
 953         Node* src = ac->in(ArrayCopyNode::Src);
 954         ac->replace_edge(src, top());
 955         if (src->outcnt() == 0) {
 956           _igvn.remove_dead_node(src);
 957         }
 958 
 959         _igvn._worklist.push(ac);
 960       } else {
 961         eliminate_gc_barrier(use);
 962       }
 963       j -= (oc1 - res->outcnt());
 964     }
 965     assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
 966     _igvn.remove_dead_node(res);
 967   }
 968 
 969   //
 970   // Process other users of allocation's projections
 971   //
 972   if (_resproj != NULL && _resproj->outcnt() != 0) {
 973     // First disconnect stores captured by Initialize node.
 974     // If Initialize node is eliminated first in the following code,
 975     // it will kill such stores and DUIterator_Last will assert.
 976     for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax);  j < jmax; j++) {
 977       Node *use = _resproj->fast_out(j);
 978       if (use->is_AddP()) {
 979         // raw memory addresses used only by the initialization
 980         _igvn.replace_node(use, C->top());
 981         --j; --jmax;


< prev index next >