349
350 if (n->is_Call()) {
351 // Arguments to allocation and locking don't escape.
352 if (n->is_AbstractLock()) {
353 // Put Lock and Unlock nodes on IGVN worklist to process them during
354 // first IGVN optimization when escape information is still available.
355 record_for_optimizer(n);
356 } else if (n->is_Allocate()) {
357 add_call_node(n->as_Call());
358 record_for_optimizer(n);
359 } else {
360 if (n->is_CallStaticJava()) {
361 const char* name = n->as_CallStaticJava()->_name;
362 if (name != NULL && strcmp(name, "uncommon_trap") == 0)
363 return; // Skip uncommon traps
364 }
365 // Don't mark as processed since call's arguments have to be processed.
366 delayed_worklist->push(n);
367 // Check if a call returns an object.
368 if ((n->as_Call()->returns_pointer() &&
369 n->as_Call()->proj_out(TypeFunc::Parms) != NULL) ||
370 (n->is_CallStaticJava() &&
371 n->as_CallStaticJava()->is_boxing_method())) {
372 add_call_node(n->as_Call());
373 }
374 }
375 return;
376 }
377 // Put this check here to process call arguments since some call nodes
378 // point to phantom_obj.
379 if (n_ptn == phantom_obj || n_ptn == null_obj)
380 return; // Skip predefined nodes.
381
382 int opcode = n->Opcode();
383 switch (opcode) {
384 case Op_AddP: {
385 Node* base = get_addp_base(n);
386 PointsToNode* ptn_base = ptnode_adr(base->_idx);
387 // Field nodes are created for all field types. They are used in
388 // adjust_scalar_replaceable_state() and split_unique_types().
389 // Note, non-oop fields will have only base edges in Connection
2657 assert(idx == alias_idx, "Following Phi nodes should be on the same memory slice");
2658 } else {
2659 use->dump();
2660 assert(false, "should not be here");
2661 #endif
2662 }
2663 }
2664 }
2665
2666 //
2667 // Search memory chain of "mem" to find a MemNode whose address
2668 // is the specified alias index.
2669 //
2670 Node* ConnectionGraph::find_inst_mem(Node *orig_mem, int alias_idx, GrowableArray<PhiNode *> &orig_phis) {
2671 if (orig_mem == NULL)
2672 return orig_mem;
2673 Compile* C = _compile;
2674 PhaseGVN* igvn = _igvn;
2675 const TypeOopPtr *toop = C->get_adr_type(alias_idx)->isa_oopptr();
2676 bool is_instance = (toop != NULL) && toop->is_known_instance();
2677 Node *start_mem = C->start()->proj_out(TypeFunc::Memory);
2678 Node *prev = NULL;
2679 Node *result = orig_mem;
2680 while (prev != result) {
2681 prev = result;
2682 if (result == start_mem)
2683 break; // hit one of our sentinels
2684 if (result->is_Mem()) {
2685 const Type *at = igvn->type(result->in(MemNode::Address));
2686 if (at == Type::TOP)
2687 break; // Dead
2688 assert (at->isa_ptr() != NULL, "pointer type required.");
2689 int idx = C->get_alias_index(at->is_ptr());
2690 if (idx == alias_idx)
2691 break; // Found
2692 if (!is_instance && (at->isa_oopptr() == NULL ||
2693 !at->is_oopptr()->is_known_instance())) {
2694 break; // Do not skip store to general memory slice.
2695 }
2696 result = result->in(MemNode::Memory);
2697 }
3011 PointsToNode* tgt = e.get();
3012 if (tgt->is_Arraycopy()) {
3013 continue;
3014 }
3015 Node* use = tgt->ideal_node();
3016 assert(tgt->is_Field() && use->is_AddP(),
3017 "only AddP nodes are Field edges in CG");
3018 if (use->outcnt() > 0) { // Don't process dead nodes
3019 Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
3020 if (addp2 != NULL) {
3021 assert(alloc->is_AllocateArray(),"array allocation was expected");
3022 alloc_worklist.append_if_missing(addp2);
3023 }
3024 alloc_worklist.append_if_missing(use);
3025 }
3026 }
3027
3028 // An allocation may have an Initialize which has raw stores. Scan
3029 // the users of the raw allocation result and push AddP users
3030 // on alloc_worklist.
3031 Node *raw_result = alloc->proj_out(TypeFunc::Parms);
3032 assert (raw_result != NULL, "must have an allocation result");
3033 for (DUIterator_Fast imax, i = raw_result->fast_outs(imax); i < imax; i++) {
3034 Node *use = raw_result->fast_out(i);
3035 if (use->is_AddP() && use->outcnt() > 0) { // Don't process dead nodes
3036 Node* addp2 = find_second_addp(use, raw_result);
3037 if (addp2 != NULL) {
3038 assert(alloc->is_AllocateArray(),"array allocation was expected");
3039 alloc_worklist.append_if_missing(addp2);
3040 }
3041 alloc_worklist.append_if_missing(use);
3042 } else if (use->is_MemBar()) {
3043 memnode_worklist.append_if_missing(use);
3044 }
3045 }
3046 }
3047 } else if (n->is_AddP()) {
3048 JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
3049 if (jobj == NULL || jobj == phantom_obj) {
3050 #ifdef ASSERT
3051 ptnode_adr(get_addp_base(n)->_idx)->dump();
3202 }
3203 }
3204
3205 // New alias types were created in split_AddP().
3206 uint new_index_end = (uint) _compile->num_alias_types();
3207 assert(unique_old == _compile->unique(), "there should be no new ideal nodes after Phase 1");
3208
3209 // Phase 2: Process MemNode's from memnode_worklist. compute new address type and
3210 // compute new values for Memory inputs (the Memory inputs are not
3211 // actually updated until phase 4.)
3212 if (memnode_worklist.length() == 0)
3213 return; // nothing to do
3214 while (memnode_worklist.length() != 0) {
3215 Node *n = memnode_worklist.pop();
3216 if (visited.test_set(n->_idx))
3217 continue;
3218 if (n->is_Phi() || n->is_ClearArray()) {
3219 // we don't need to do anything, but the users must be pushed
3220 } else if (n->is_MemBar()) { // Initialize, MemBar nodes
3221 // we don't need to do anything, but the users must be pushed
3222 n = n->as_MemBar()->proj_out(TypeFunc::Memory);
3223 if (n == NULL)
3224 continue;
3225 } else if (n->Opcode() == Op_StrCompressedCopy ||
3226 n->Opcode() == Op_EncodeISOArray) {
3227 // get the memory projection
3228 n = n->find_out_with(Op_SCMemProj);
3229 assert(n->Opcode() == Op_SCMemProj, "memory projection required");
3230 } else {
3231 assert(n->is_Mem(), "memory node required.");
3232 Node *addr = n->in(MemNode::Address);
3233 const Type *addr_t = igvn->type(addr);
3234 if (addr_t == Type::TOP)
3235 continue;
3236 assert (addr_t->isa_ptr() != NULL, "pointer type required.");
3237 int alias_idx = _compile->get_alias_index(addr_t->is_ptr());
3238 assert ((uint)alias_idx < new_index_end, "wrong alias index");
3239 Node *mem = find_inst_mem(n->in(MemNode::Memory), alias_idx, orig_phis);
3240 if (_compile->failing()) {
3241 return;
3242 }
|
349
350 if (n->is_Call()) {
351 // Arguments to allocation and locking don't escape.
352 if (n->is_AbstractLock()) {
353 // Put Lock and Unlock nodes on IGVN worklist to process them during
354 // first IGVN optimization when escape information is still available.
355 record_for_optimizer(n);
356 } else if (n->is_Allocate()) {
357 add_call_node(n->as_Call());
358 record_for_optimizer(n);
359 } else {
360 if (n->is_CallStaticJava()) {
361 const char* name = n->as_CallStaticJava()->_name;
362 if (name != NULL && strcmp(name, "uncommon_trap") == 0)
363 return; // Skip uncommon traps
364 }
365 // Don't mark as processed since call's arguments have to be processed.
366 delayed_worklist->push(n);
367 // Check if a call returns an object.
368 if ((n->as_Call()->returns_pointer() &&
369 n->as_Call()->proj_out_or_null(TypeFunc::Parms) != NULL) ||
370 (n->is_CallStaticJava() &&
371 n->as_CallStaticJava()->is_boxing_method())) {
372 add_call_node(n->as_Call());
373 }
374 }
375 return;
376 }
377 // Put this check here to process call arguments since some call nodes
378 // point to phantom_obj.
379 if (n_ptn == phantom_obj || n_ptn == null_obj)
380 return; // Skip predefined nodes.
381
382 int opcode = n->Opcode();
383 switch (opcode) {
384 case Op_AddP: {
385 Node* base = get_addp_base(n);
386 PointsToNode* ptn_base = ptnode_adr(base->_idx);
387 // Field nodes are created for all field types. They are used in
388 // adjust_scalar_replaceable_state() and split_unique_types().
389 // Note, non-oop fields will have only base edges in Connection
2657 assert(idx == alias_idx, "Following Phi nodes should be on the same memory slice");
2658 } else {
2659 use->dump();
2660 assert(false, "should not be here");
2661 #endif
2662 }
2663 }
2664 }
2665
2666 //
2667 // Search memory chain of "mem" to find a MemNode whose address
2668 // is the specified alias index.
2669 //
2670 Node* ConnectionGraph::find_inst_mem(Node *orig_mem, int alias_idx, GrowableArray<PhiNode *> &orig_phis) {
2671 if (orig_mem == NULL)
2672 return orig_mem;
2673 Compile* C = _compile;
2674 PhaseGVN* igvn = _igvn;
2675 const TypeOopPtr *toop = C->get_adr_type(alias_idx)->isa_oopptr();
2676 bool is_instance = (toop != NULL) && toop->is_known_instance();
2677 Node *start_mem = C->start()->proj_out_or_null(TypeFunc::Memory);
2678 Node *prev = NULL;
2679 Node *result = orig_mem;
2680 while (prev != result) {
2681 prev = result;
2682 if (result == start_mem)
2683 break; // hit one of our sentinels
2684 if (result->is_Mem()) {
2685 const Type *at = igvn->type(result->in(MemNode::Address));
2686 if (at == Type::TOP)
2687 break; // Dead
2688 assert (at->isa_ptr() != NULL, "pointer type required.");
2689 int idx = C->get_alias_index(at->is_ptr());
2690 if (idx == alias_idx)
2691 break; // Found
2692 if (!is_instance && (at->isa_oopptr() == NULL ||
2693 !at->is_oopptr()->is_known_instance())) {
2694 break; // Do not skip store to general memory slice.
2695 }
2696 result = result->in(MemNode::Memory);
2697 }
3011 PointsToNode* tgt = e.get();
3012 if (tgt->is_Arraycopy()) {
3013 continue;
3014 }
3015 Node* use = tgt->ideal_node();
3016 assert(tgt->is_Field() && use->is_AddP(),
3017 "only AddP nodes are Field edges in CG");
3018 if (use->outcnt() > 0) { // Don't process dead nodes
3019 Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
3020 if (addp2 != NULL) {
3021 assert(alloc->is_AllocateArray(),"array allocation was expected");
3022 alloc_worklist.append_if_missing(addp2);
3023 }
3024 alloc_worklist.append_if_missing(use);
3025 }
3026 }
3027
3028 // An allocation may have an Initialize which has raw stores. Scan
3029 // the users of the raw allocation result and push AddP users
3030 // on alloc_worklist.
3031 Node *raw_result = alloc->proj_out_or_null(TypeFunc::Parms);
3032 assert (raw_result != NULL, "must have an allocation result");
3033 for (DUIterator_Fast imax, i = raw_result->fast_outs(imax); i < imax; i++) {
3034 Node *use = raw_result->fast_out(i);
3035 if (use->is_AddP() && use->outcnt() > 0) { // Don't process dead nodes
3036 Node* addp2 = find_second_addp(use, raw_result);
3037 if (addp2 != NULL) {
3038 assert(alloc->is_AllocateArray(),"array allocation was expected");
3039 alloc_worklist.append_if_missing(addp2);
3040 }
3041 alloc_worklist.append_if_missing(use);
3042 } else if (use->is_MemBar()) {
3043 memnode_worklist.append_if_missing(use);
3044 }
3045 }
3046 }
3047 } else if (n->is_AddP()) {
3048 JavaObjectNode* jobj = unique_java_object(get_addp_base(n));
3049 if (jobj == NULL || jobj == phantom_obj) {
3050 #ifdef ASSERT
3051 ptnode_adr(get_addp_base(n)->_idx)->dump();
3202 }
3203 }
3204
3205 // New alias types were created in split_AddP().
3206 uint new_index_end = (uint) _compile->num_alias_types();
3207 assert(unique_old == _compile->unique(), "there should be no new ideal nodes after Phase 1");
3208
3209 // Phase 2: Process MemNode's from memnode_worklist. compute new address type and
3210 // compute new values for Memory inputs (the Memory inputs are not
3211 // actually updated until phase 4.)
3212 if (memnode_worklist.length() == 0)
3213 return; // nothing to do
3214 while (memnode_worklist.length() != 0) {
3215 Node *n = memnode_worklist.pop();
3216 if (visited.test_set(n->_idx))
3217 continue;
3218 if (n->is_Phi() || n->is_ClearArray()) {
3219 // we don't need to do anything, but the users must be pushed
3220 } else if (n->is_MemBar()) { // Initialize, MemBar nodes
3221 // we don't need to do anything, but the users must be pushed
3222 n = n->as_MemBar()->proj_out_or_null(TypeFunc::Memory);
3223 if (n == NULL)
3224 continue;
3225 } else if (n->Opcode() == Op_StrCompressedCopy ||
3226 n->Opcode() == Op_EncodeISOArray) {
3227 // get the memory projection
3228 n = n->find_out_with(Op_SCMemProj);
3229 assert(n->Opcode() == Op_SCMemProj, "memory projection required");
3230 } else {
3231 assert(n->is_Mem(), "memory node required.");
3232 Node *addr = n->in(MemNode::Address);
3233 const Type *addr_t = igvn->type(addr);
3234 if (addr_t == Type::TOP)
3235 continue;
3236 assert (addr_t->isa_ptr() != NULL, "pointer type required.");
3237 int alias_idx = _compile->get_alias_index(addr_t->is_ptr());
3238 assert ((uint)alias_idx < new_index_end, "wrong alias index");
3239 Node *mem = find_inst_mem(n->in(MemNode::Memory), alias_idx, orig_phis);
3240 if (_compile->failing()) {
3241 return;
3242 }
|