209 _ioproj_catchall = pn;
210 else
211 _ioproj_fallthrough = pn;
212 break;
213 case TypeFunc::Memory:
214 if (pn->_is_io_use)
215 _memproj_catchall = pn;
216 else
217 _memproj_fallthrough = pn;
218 break;
219 case TypeFunc::Parms:
220 _resproj = pn;
221 break;
222 default:
223 assert(false, "unexpected projection from allocation node.");
224 }
225 }
226
227 }
228
229 // Eliminate a card mark sequence. p2x is a ConvP2XNode
230 void PhaseMacroExpand::eliminate_card_mark(Node* p2x) {
231 assert(p2x->Opcode() == Op_CastP2X, "ConvP2XNode required");
232 if (!UseG1GC) {
233 // vanilla/CMS post barrier
234 Node *shift = p2x->unique_out();
235 Node *addp = shift->unique_out();
236 for (DUIterator_Last jmin, j = addp->last_outs(jmin); j >= jmin; --j) {
237 Node *mem = addp->last_out(j);
238 if (UseCondCardMark && mem->is_Load()) {
239 assert(mem->Opcode() == Op_LoadB, "unexpected code shape");
240 // The load is checking if the card has been written so
241 // replace it with zero to fold the test.
242 _igvn.replace_node(mem, intcon(0));
243 continue;
244 }
245 assert(mem->is_Store(), "store required");
246 _igvn.replace_node(mem, mem->in(MemNode::Memory));
247 }
248 } else {
249 // G1 pre/post barriers
250 assert(p2x->outcnt() <= 2, "expects 1 or 2 users: Xor and URShift nodes");
251 // It could be only one user, URShift node, in Object.clone() intrinsic
252 // but the new allocation is passed to arraycopy stub and it could not
253 // be scalar replaced. So we don't check the case.
254
255 // An other case of only one user (Xor) is when the value check for NULL
256 // in G1 post barrier is folded after CCP so the code which used URShift
257 // is removed.
258
259 // Take Region node before eliminating post barrier since it also
260 // eliminates CastP2X node when it has only one user.
261 Node* this_region = p2x->in(0);
262 assert(this_region != NULL, "");
263
264 // Remove G1 post barrier.
265
266 // Search for CastP2X->Xor->URShift->Cmp path which
267 // checks if the store done to a different from the value's region.
268 // And replace Cmp with #0 (false) to collapse G1 post barrier.
269 Node* xorx = p2x->find_out_with(Op_XorX);
270 if (xorx != NULL) {
271 Node* shift = xorx->unique_out();
272 Node* cmpx = shift->unique_out();
273 assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
274 cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
275 "missing region check in G1 post barrier");
276 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
277
278 // Remove G1 pre barrier.
279
280 // Search "if (marking != 0)" check and set it to "false".
281 // There is no G1 pre barrier if previous stored value is NULL
282 // (for example, after initialization).
283 if (this_region->is_Region() && this_region->req() == 3) {
284 int ind = 1;
285 if (!this_region->in(ind)->is_IfFalse()) {
286 ind = 2;
287 }
288 if (this_region->in(ind)->is_IfFalse() &&
289 this_region->in(ind)->in(0)->Opcode() == Op_If) {
290 Node* bol = this_region->in(ind)->in(0)->in(1);
291 assert(bol->is_Bool(), "");
292 cmpx = bol->in(1);
293 if (bol->as_Bool()->_test._test == BoolTest::ne &&
294 cmpx->is_Cmp() && cmpx->in(2) == intcon(0) &&
295 cmpx->in(1)->is_Load()) {
296 Node* adr = cmpx->in(1)->as_Load()->in(MemNode::Address);
297 const int marking_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());
298 if (adr->is_AddP() && adr->in(AddPNode::Base) == top() &&
299 adr->in(AddPNode::Address)->Opcode() == Op_ThreadLocal &&
300 adr->in(AddPNode::Offset) == MakeConX(marking_offset)) {
301 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
302 }
303 }
304 }
305 }
306 } else {
307 assert(!GraphKit::use_ReduceInitialCardMarks(), "can only happen with card marking");
308 // This is a G1 post barrier emitted by the Object.clone() intrinsic.
309 // Search for the CastP2X->URShiftX->AddP->LoadB->Cmp path which checks if the card
310 // is marked as young_gen and replace the Cmp with 0 (false) to collapse the barrier.
311 Node* shift = p2x->find_out_with(Op_URShiftX);
312 assert(shift != NULL, "missing G1 post barrier");
313 Node* addp = shift->unique_out();
314 Node* load = addp->find_out_with(Op_LoadB);
315 assert(load != NULL, "missing G1 post barrier");
316 Node* cmpx = load->unique_out();
317 assert(cmpx->is_Cmp() && cmpx->unique_out()->is_Bool() &&
318 cmpx->unique_out()->as_Bool()->_test._test == BoolTest::ne,
319 "missing card value check in G1 post barrier");
320 _igvn.replace_node(cmpx, makecon(TypeInt::CC_EQ));
321 // There is no G1 pre barrier in this case
322 }
323 // Now CastP2X can be removed since it is used only on dead path
324 // which currently still alive until igvn optimize it.
325 assert(p2x->outcnt() == 0 || p2x->unique_out()->Opcode() == Op_URShiftX, "");
326 _igvn.replace_node(p2x, top());
327 }
328 }
329
330 // Search for a memory operation for the specified memory slice.
331 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
332 Node *orig_mem = mem;
333 Node *alloc_mem = alloc->in(TypeFunc::Memory);
334 const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
335 while (true) {
336 if (mem == alloc_mem || mem == start_mem ) {
337 return mem; // hit one of our sentinels
338 } else if (mem->is_MergeMem()) {
339 mem = mem->as_MergeMem()->memory_at(alias_idx);
340 } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
341 Node *in = mem->in(0);
342 // we can safely skip over safepoints, calls, locks and membars because we
343 // already know that the object is safe to eliminate.
344 if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
345 return in;
346 } else if (in->is_Call()) {
|
209 _ioproj_catchall = pn;
210 else
211 _ioproj_fallthrough = pn;
212 break;
213 case TypeFunc::Memory:
214 if (pn->_is_io_use)
215 _memproj_catchall = pn;
216 else
217 _memproj_fallthrough = pn;
218 break;
219 case TypeFunc::Parms:
220 _resproj = pn;
221 break;
222 default:
223 assert(false, "unexpected projection from allocation node.");
224 }
225 }
226
227 }
228
229 void PhaseMacroExpand::eliminate_gc_barrier(Node* p2x) {
230 BarrierSetC2 *bs = BarrierSet::barrier_set()->barrier_set_c2();
231 bs->eliminate_gc_barrier(this, p2x);
232 }
233
234 // Search for a memory operation for the specified memory slice.
235 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc, PhaseGVN *phase) {
236 Node *orig_mem = mem;
237 Node *alloc_mem = alloc->in(TypeFunc::Memory);
238 const TypeOopPtr *tinst = phase->C->get_adr_type(alias_idx)->isa_oopptr();
239 while (true) {
240 if (mem == alloc_mem || mem == start_mem ) {
241 return mem; // hit one of our sentinels
242 } else if (mem->is_MergeMem()) {
243 mem = mem->as_MergeMem()->memory_at(alias_idx);
244 } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) {
245 Node *in = mem->in(0);
246 // we can safely skip over safepoints, calls, locks and membars because we
247 // already know that the object is safe to eliminate.
248 if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) {
249 return in;
250 } else if (in->is_Call()) {
|
1008 assert(mb->is_Initialize() || !mb->is_MemBar() ||
1009 mb->req() <= MemBarNode::Precedent ||
1010 mb->in(MemBarNode::Precedent) != n,
1011 "MemBarVolatile should be eliminated for non-escaping object");
1012 }
1013 #endif
1014 _igvn.replace_node(n, n->in(MemNode::Memory));
1015 } else if (n->is_ArrayCopy()) {
1016 // Disconnect ArrayCopy node
1017 ArrayCopyNode* ac = n->as_ArrayCopy();
1018 assert(ac->is_clonebasic(), "unexpected array copy kind");
1019 Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
1020 disconnect_projections(ac, _igvn);
1021 assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before alloca
1022 Node* membar_before = alloc->in(0)->in(0);
1023 disconnect_projections(membar_before->as_MemBar(), _igvn);
1024 if (membar_after->is_MemBar()) {
1025 disconnect_projections(membar_after->as_MemBar(), _igvn);
1026 }
1027 } else {
1028 eliminate_card_mark(n);
1029 }
1030 k -= (oc2 - use->outcnt());
1031 }
1032 } else if (use->is_ArrayCopy()) {
1033 // Disconnect ArrayCopy node
1034 ArrayCopyNode* ac = use->as_ArrayCopy();
1035 assert(ac->is_arraycopy_validated() ||
1036 ac->is_copyof_validated() ||
1037 ac->is_copyofrange_validated(), "unsupported");
1038 CallProjections callprojs;
1039 ac->extract_projections(&callprojs, true);
1040
1041 _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
1042 _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
1043 _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
1044
1045 // Set control to top. IGVN will remove the remaining projections
1046 ac->set_req(0, top());
1047 ac->replace_edge(res, top());
1048
1049 // Disconnect src right away: it can help find new
1050 // opportunities for allocation elimination
1051 Node* src = ac->in(ArrayCopyNode::Src);
1052 ac->replace_edge(src, top());
1053 // src can be top at this point if src and dest of the
1054 // arraycopy were the same
1055 if (src->outcnt() == 0 && !src->is_top()) {
1056 _igvn.remove_dead_node(src);
1057 }
1058
1059 _igvn._worklist.push(ac);
1060 } else {
1061 eliminate_card_mark(use);
1062 }
1063 j -= (oc1 - res->outcnt());
1064 }
1065 assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
1066 _igvn.remove_dead_node(res);
1067 }
1068
1069 //
1070 // Process other users of allocation's projections
1071 //
1072 if (_resproj != NULL && _resproj->outcnt() != 0) {
1073 // First disconnect stores captured by Initialize node.
1074 // If Initialize node is eliminated first in the following code,
1075 // it will kill such stores and DUIterator_Last will assert.
1076 for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax); j < jmax; j++) {
1077 Node *use = _resproj->fast_out(j);
1078 if (use->is_AddP()) {
1079 // raw memory addresses used only by the initialization
1080 _igvn.replace_node(use, C->top());
|
912 assert(mb->is_Initialize() || !mb->is_MemBar() ||
913 mb->req() <= MemBarNode::Precedent ||
914 mb->in(MemBarNode::Precedent) != n,
915 "MemBarVolatile should be eliminated for non-escaping object");
916 }
917 #endif
918 _igvn.replace_node(n, n->in(MemNode::Memory));
919 } else if (n->is_ArrayCopy()) {
920 // Disconnect ArrayCopy node
921 ArrayCopyNode* ac = n->as_ArrayCopy();
922 assert(ac->is_clonebasic(), "unexpected array copy kind");
923 Node* membar_after = ac->proj_out(TypeFunc::Control)->unique_ctrl_out();
924 disconnect_projections(ac, _igvn);
925 assert(alloc->in(0)->is_Proj() && alloc->in(0)->in(0)->Opcode() == Op_MemBarCPUOrder, "mem barrier expected before alloca
926 Node* membar_before = alloc->in(0)->in(0);
927 disconnect_projections(membar_before->as_MemBar(), _igvn);
928 if (membar_after->is_MemBar()) {
929 disconnect_projections(membar_after->as_MemBar(), _igvn);
930 }
931 } else {
932 eliminate_gc_barrier(n);
933 }
934 k -= (oc2 - use->outcnt());
935 }
936 } else if (use->is_ArrayCopy()) {
937 // Disconnect ArrayCopy node
938 ArrayCopyNode* ac = use->as_ArrayCopy();
939 assert(ac->is_arraycopy_validated() ||
940 ac->is_copyof_validated() ||
941 ac->is_copyofrange_validated(), "unsupported");
942 CallProjections callprojs;
943 ac->extract_projections(&callprojs, true);
944
945 _igvn.replace_node(callprojs.fallthrough_ioproj, ac->in(TypeFunc::I_O));
946 _igvn.replace_node(callprojs.fallthrough_memproj, ac->in(TypeFunc::Memory));
947 _igvn.replace_node(callprojs.fallthrough_catchproj, ac->in(TypeFunc::Control));
948
949 // Set control to top. IGVN will remove the remaining projections
950 ac->set_req(0, top());
951 ac->replace_edge(res, top());
952
953 // Disconnect src right away: it can help find new
954 // opportunities for allocation elimination
955 Node* src = ac->in(ArrayCopyNode::Src);
956 ac->replace_edge(src, top());
957 // src can be top at this point if src and dest of the
958 // arraycopy were the same
959 if (src->outcnt() == 0 && !src->is_top()) {
960 _igvn.remove_dead_node(src);
961 }
962
963 _igvn._worklist.push(ac);
964 } else {
965 eliminate_gc_barrier(use);
966 }
967 j -= (oc1 - res->outcnt());
968 }
969 assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
970 _igvn.remove_dead_node(res);
971 }
972
973 //
974 // Process other users of allocation's projections
975 //
976 if (_resproj != NULL && _resproj->outcnt() != 0) {
977 // First disconnect stores captured by Initialize node.
978 // If Initialize node is eliminated first in the following code,
979 // it will kill such stores and DUIterator_Last will assert.
980 for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax); j < jmax; j++) {
981 Node *use = _resproj->fast_out(j);
982 if (use->is_AddP()) {
983 // raw memory addresses used only by the initialization
984 _igvn.replace_node(use, C->top());
|
2780 break;
2781 case Node::Class_AllocateArray:
2782 expand_allocate_array(n->as_AllocateArray());
2783 break;
2784 case Node::Class_Lock:
2785 expand_lock_node(n->as_Lock());
2786 break;
2787 case Node::Class_Unlock:
2788 expand_unlock_node(n->as_Unlock());
2789 break;
2790 default:
2791 assert(false, "unknown node type in macro list");
2792 }
2793 assert(C->macro_count() < macro_count, "must have deleted a node from macro list");
2794 if (C->failing()) return true;
2795 }
2796
2797 _igvn.set_delay_transform(false);
2798 _igvn.optimize();
2799 if (C->failing()) return true;
2800 return false;
2801 }
|
2684 break;
2685 case Node::Class_AllocateArray:
2686 expand_allocate_array(n->as_AllocateArray());
2687 break;
2688 case Node::Class_Lock:
2689 expand_lock_node(n->as_Lock());
2690 break;
2691 case Node::Class_Unlock:
2692 expand_unlock_node(n->as_Unlock());
2693 break;
2694 default:
2695 assert(false, "unknown node type in macro list");
2696 }
2697 assert(C->macro_count() < macro_count, "must have deleted a node from macro list");
2698 if (C->failing()) return true;
2699 }
2700
2701 _igvn.set_delay_transform(false);
2702 _igvn.optimize();
2703 if (C->failing()) return true;
2704 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
2705 return bs->expand_macro_nodes(this);
2706 }
|