209 _ioproj_catchall = pn;
210 else
211 _ioproj_fallthrough = pn;
212 break;
213 case TypeFunc::Memory:
214 if (pn->_is_io_use)
215 _memproj_catchall = pn;
216 else
217 _memproj_fallthrough = pn;
218 break;
219 case TypeFunc::Parms:
220 _resproj = pn;
221 break;
222 default:
223 assert(false, "unexpected projection from allocation node.");
224 }
225 }
226
227 }
228
229 // Eliminate a card mark sequence. p2x is a ConvP2XNode
230 void PhaseMacroExpand::eliminate_card_mark(Node* p2x) {
231 assert(p2x->Opcode() == Op_CastP2X, "ConvP2XNode required");
232 if (!UseG1GC) {
233 // vanilla/CMS post barrier
234 Node *shift = p2x->unique_out();
235 Node *addp = shift->unique_out();
236 for (DUIterator_Last jmin, j = addp->last_outs(jmin); j >= jmin; --j) {
237 Node *mem = addp->last_out(j);
238 if (UseCondCardMark && mem->is_Load()) {
239 assert(mem->Opcode() == Op_LoadB, "unexpected code shape");
240 // The load is checking if the card has been written so
241 // replace it with zero to fold the test.
242 _igvn.replace_node(mem, intcon(0));
243 continue;
244 }
245 assert(mem->is_Store(), "store required");
246 _igvn.replace_node(mem, mem->in(MemNode::Memory));
247 }
248 }
249 #if INCLUDE_G1GC
250 else {
251 // G1 pre/post barriers
252 assert(p2x->outcnt() <= 2, "expects 1 or 2 users: Xor and URShift nodes");
253 // It could be only one user, URShift node, in Object.clone() intrinsic
254 // but the new allocation is passed to arraycopy stub and it could not
255 // be scalar replaced. So we don't check the case.
256
257 // An other case of only one user (Xor) is when the value check for NULL
258 // in G1 post barrier is folded after CCP so the code which used URShift
259 // is removed.
260
261 // Take Region node before eliminating post barrier since it also
262 // eliminates CastP2X node when it has only one user.
263 Node* this_region = p2x->in(0);
264 assert(this_region != NULL, "");
265
266 // Remove G1 post barrier.
267
268 // Search for CastP2X->Xor->URShift->Cmp path which
269 // checks if the store done to a different from the value's region.
270 // And replace Cmp with #0 (false) to collapse G1 post barrier.
271 Node* xorx = p2x->find_out_with(Op_XorX);
272 if (xorx != NULL) {
273 Node* shift = xorx->unique_out();
274 Node* cmpx = shift->unique_out();
275 assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
276 cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
277 "missing region check in G1 post barrier");
278 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
279
280 // Remove G1 pre barrier.
281
282 // Search "if (marking != 0)" check and set it to "false".
283 // There is no G1 pre barrier if previous stored value is NULL
284 // (for example, after initialization).
285 if (this_region->is_Region() && this_region->req() == 3) {
286 int ind = 1;
287 if (!this_region->in(ind)->is_IfFalse()) {
288 ind = 2;
289 }
290 if (this_region->in(ind)->is_IfFalse() &&
291 this_region->in(ind)->in(0)->Opcode() == Op_If) {
292 Node* bol = this_region->in(ind)->in(0)->in(1);
293 assert(bol->is_Bool(), "");
294 cmpx = bol->in(1);
295 if (bol->as_Bool()->_test._test == BoolTest::ne &&
296 cmpx->is_Cmp() && cmpx->in(2) == intcon(0) &&
297 cmpx->in(1)->is_Load()) {
298 Node* adr = cmpx->in(1)->as_Load()->in(MemNode::Address);
299 const int marking_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());
300 if (adr->is_AddP() && adr->in(AddPNode::Base) == top() &&
301 adr->in(AddPNode::Address)->Opcode() == Op_ThreadLocal &&
302 adr->in(AddPNode::Offset) == MakeConX(marking_offset)) {
303 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
304 }
305 }
306 }
307 }
308 } else {
309 assert(!GraphKit::use_ReduceInitialCardMarks(), "can only happen with card marking");
310 // This is a G1 post barrier emitted by the Object.clone() intrinsic.
311 // Search for the CastP2X->URShiftX->AddP->LoadB->Cmp path which checks if the card
312 // is marked as young_gen and replace the Cmp with 0 (false) to collapse the barrier.
313 Node* shift = p2x->find_out_with(Op_URShiftX);
314 assert(shift != NULL, "missing G1 post barrier");
315 Node* addp = shift->unique_out();
316 Node* load = addp->find_out_with(Op_LoadB);
317 assert(load != NULL, "missing G1 post barrier");
318 Node* cmpx = load->unique_out();
319 assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
320 cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
321 "missing card value check in G1 post barrier");
322 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
323 // There is no G1 pre barrier in this case
324 }
325 // Now CastP2X can be removed since it is used only on dead path
326 // which currently still alive until igvn optimize it.
327 assert(p2x->outcnt() == 0 || p2x->unique_out()->Opcode() == Op_URShiftX, "");
328 _igvn.replace_node(p2x, top());
329 }
330 #endif // INCLUDE_G1GC
331 }
332
333 // Search for a memory operation for the specified memory slice.
334 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
335 Node *orig_mem = mem;
336 Node *alloc_mem = alloc->in(TypeFunc::Memory);
337 const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
338 while (true) {
339 if (mem == alloc_mem || mem == start_mem ) {
340 return mem; // hit one of our sentinels
341 } else if (mem->is_MergeMem()) {
342 mem = mem->as_MergeMem()->memory_at(alias_idx);
343 } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
344 Node *in = mem->in(0);
345 // we can safely skip over safepoints, calls, locks and membars because we
346 // already know that the object is safe to eliminate.
347 if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
348 return in;
349 } else if (in->is_Call()) {
|
209 _ioproj_catchall = pn;
210 else
211 _ioproj_fallthrough = pn;
212 break;
213 case TypeFunc::Memory:
214 if (pn->_is_io_use)
215 _memproj_catchall = pn;
216 else
217 _memproj_fallthrough = pn;
218 break;
219 case TypeFunc::Parms:
220 _resproj = pn;
221 break;
222 default:
223 assert(false, "unexpected projection from allocation node.");
224 }
225 }
226
227 }
228
229 void PhaseMacroExpand::eliminate_gc_barrier(Node* p2x) {
230 BarrierSetC2 *bs = BarrierSet::barrier_set()->barrier_set_c2();
231 bs->eliminate_gc_barrier(this, p2x);
232 }
233
234 // Search for a memory operation for the specified memory slice.
235 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
236 Node *orig_mem = mem;
237 Node *alloc_mem = alloc->in(TypeFunc::Memory);
238 const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
239 while (true) {
240 if (mem == alloc_mem || mem == start_mem ) {
241 return mem; // hit one of our sentinels
242 } else if (mem->is_MergeMem()) {
243 mem = mem->as_MergeMem()->memory_at(alias_idx);
244 } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
245 Node *in = mem->in(0);
246 // we can safely skip over safepoints, calls, locks and membars because we
247 // already know that the object is safe to eliminate.
248 if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
249 return in;
250 } else if (in->is_Call()) {
|
1011 assert(mb->is_Initialize() || !mb->is_MemBar() ||
1012 mb->req() <= MemBarNode::Precedent ||
1013 mb->in(MemBarNode::Precedent) != n,
1014 "MemBarVolatile should be eliminated for non-escaping object");
1015 }
1016 #endif
1017 _igvn.replace_node(n, n->in(MemNode::Memory));
1018 } else if (n->is_ArrayCopy()) {
1019 // Disconnect ArrayCopy node
1020 ArrayCopyNode* ac = n->as_ArrayCopy();
1021 assert(ac->is_clonebasic(), "unexpected array copy kind");
1022 Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
1023 disconnect_projections(ac, _igvn);
1024 assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before alloca
1025 Node* membar_before = alloc->in(0)->in(0);
1026 disconnect_projections(membar_before->as_MemBar(), _igvn);
1027 if (membar_after->is_MemBar()) {
1028 disconnect_projections(membar_after->as_MemBar(), _igvn);
1029 }
1030 } else {
1031 eliminate_card_mark(n);
1032 }
1033 k -= (oc2 - use->outcnt());
1034 }
1035 } else if (use->is_ArrayCopy()) {
1036 // Disconnect ArrayCopy node
1037 ArrayCopyNode* ac = use->as_ArrayCopy();
1038 assert(ac->is_arraycopy_validated() ||
1039 ac->is_copyof_validated() ||
1040 ac->is_copyofrange_validated(), "unsupported");
1041 CallProjections callprojs;
1042 ac->extract_projections(&callprojs, true);
1043
1044 _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
1045 _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
1046 _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
1047
1048 // Set control to top. IGVN will remove the remaining projections
1049 ac->set_req(0, top());
1050 ac->replace_edge(res, top());
1051
1052 // Disconnect src right away: it can help find new
1053 // opportunities for allocation elimination
1054 Node* src = ac->in(ArrayCopyNode::Src);
1055 ac->replace_edge(src, top());
1056 // src can be top at this point if src and dest of the
1057 // arraycopy were the same
1058 if (src->outcnt() == 0 && !src->is_top()) {
1059 _igvn.remove_dead_node(src);
1060 }
1061
1062 _igvn._worklist.push(ac);
1063 } else {
1064 eliminate_card_mark(use);
1065 }
1066 j -= (oc1 - res->outcnt());
1067 }
1068 assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
1069 _igvn.remove_dead_node(res);
1070 }
1071
1072 //
1073 // Process other users of allocation's projections
1074 //
1075 if (_resproj != NULL && _resproj->outcnt() != 0) {
1076 // First disconnect stores captured by Initialize node.
1077 // If Initialize node is eliminated first in the following code,
1078 // it will kill such stores and DUIterator_Last will assert.
1079 for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax); j < jmax; j++) {
1080 Node *use = _resproj->fast_out(j);
1081 if (use->is_AddP()) {
1082 // raw memory addresses used only by the initialization
1083 _igvn.replace_node(use, C->top());
|
912 assert(mb->is_Initialize() || !mb->is_MemBar() ||
913 mb->req() <= MemBarNode::Precedent ||
914 mb->in(MemBarNode::Precedent) != n,
915 "MemBarVolatile should be eliminated for non-escaping object");
916 }
917 #endif
918 _igvn.replace_node(n, n->in(MemNode::Memory));
919 } else if (n->is_ArrayCopy()) {
920 // Disconnect ArrayCopy node
921 ArrayCopyNode* ac = n->as_ArrayCopy();
922 assert(ac->is_clonebasic(), "unexpected array copy kind");
923 Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
924 disconnect_projections(ac, _igvn);
925 assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before alloca
926 Node* membar_before = alloc->in(0)->in(0);
927 disconnect_projections(membar_before->as_MemBar(), _igvn);
928 if (membar_after->is_MemBar()) {
929 disconnect_projections(membar_after->as_MemBar(), _igvn);
930 }
931 } else {
932 eliminate_gc_barrier(n);
933 }
934 k -= (oc2 - use->outcnt());
935 }
936 } else if (use->is_ArrayCopy()) {
937 // Disconnect ArrayCopy node
938 ArrayCopyNode* ac = use->as_ArrayCopy();
939 assert(ac->is_arraycopy_validated() ||
940 ac->is_copyof_validated() ||
941 ac->is_copyofrange_validated(), "unsupported");
942 CallProjections callprojs;
943 ac->extract_projections(&callprojs, true);
944
945 _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
946 _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
947 _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
948
949 // Set control to top. IGVN will remove the remaining projections
950 ac->set_req(0, top());
951 ac->replace_edge(res, top());
952
953 // Disconnect src right away: it can help find new
954 // opportunities for allocation elimination
955 Node* src = ac->in(ArrayCopyNode::Src);
956 ac->replace_edge(src, top());
957 // src can be top at this point if src and dest of the
958 // arraycopy were the same
959 if (src->outcnt() == 0 && !src->is_top()) {
960 _igvn.remove_dead_node(src);
961 }
962
963 _igvn._worklist.push(ac);
964 } else {
965 eliminate_gc_barrier(use);
966 }
967 j -= (oc1 - res->outcnt());
968 }
969 assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
970 _igvn.remove_dead_node(res);
971 }
972
973 //
974 // Process other users of allocation's projections
975 //
976 if (_resproj != NULL && _resproj->outcnt() != 0) {
977 // First disconnect stores captured by Initialize node.
978 // If Initialize node is eliminated first in the following code,
979 // it will kill such stores and DUIterator_Last will assert.
980 for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax); j < jmax; j++) {
981 Node *use = _resproj->fast_out(j);
982 if (use->is_AddP()) {
983 // raw memory addresses used only by the initialization
984 _igvn.replace_node(use, C->top());
|
2783 break;
2784 case Node::Class_AllocateArray:
2785 expand_allocate_array(n->as_AllocateArray());
2786 break;
2787 case Node::Class_Lock:
2788 expand_lock_node(n->as_Lock());
2789 break;
2790 case Node::Class_Unlock:
2791 expand_unlock_node(n->as_Unlock());
2792 break;
2793 default:
2794 assert(false, "unknown node type in macro list");
2795 }
2796 assert(C->macro_count() < macro_count, "must have deleted a node from macro list");
2797 if (C->failing()) return true;
2798 }
2799
2800 _igvn.set_delay_transform(false);
2801 _igvn.optimize();
2802 if (C->failing()) return true;
2803 return false;
2804 }
|
2684 break;
2685 case Node::Class_AllocateArray:
2686 expand_allocate_array(n->as_AllocateArray());
2687 break;
2688 case Node::Class_Lock:
2689 expand_lock_node(n->as_Lock());
2690 break;
2691 case Node::Class_Unlock:
2692 expand_unlock_node(n->as_Unlock());
2693 break;
2694 default:
2695 assert(false, "unknown node type in macro list");
2696 }
2697 assert(C->macro_count() < macro_count, "must have deleted a node from macro list");
2698 if (C->failing()) return true;
2699 }
2700
2701 _igvn.set_delay_transform(false);
2702 _igvn.optimize();
2703 if (C->failing()) return true;
2704 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2705 return bs->expand_macro_nodes(this);
2706 }
|