src/share/vm/opto/macro.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File
*** old/src/share/vm/opto/macro.cpp	Tue Jul 28 15:53:13 2015
--- new/src/share/vm/opto/macro.cpp	Tue Jul 28 15:53:12 2015

*** 322,343 **** --- 322,353 ---- // already know that the object is safe to eliminate. if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) { return in; } else if (in->is_Call()) { CallNode *call = in->as_Call(); - if (!call->may_modify(tinst, phase)) { ! mem = call->in(TypeFunc::Memory); ! assert(call->is_ArrayCopy(), "ArrayCopy is the only call node that doesn't make allocation escape"); + + if (call->as_ArrayCopy()->modifies(offset, offset, phase, false)) { + return in; + } } mem = in->in(TypeFunc::Memory); } else if (in->is_MemBar()) { + if (ArrayCopyNode::membar_for_arraycopy(tinst, in->as_MemBar(), phase)) { + assert(in->in(0)->is_Proj() && in->in(0)->in(0)->is_ArrayCopy(), "should be arraycopy"); + ArrayCopyNode* ac = in->in(0)->in(0)->as_ArrayCopy(); + assert(ac->is_clonebasic(), "Only basic clone is a non escaping clone"); + return ac; + } mem = in->in(TypeFunc::Memory); } else { assert(false, "unexpected projection"); } } else if (mem->is_Store()) { const TypePtr* atype = mem->as_Store()->adr_type(); ! int adr_idx = Compile::current()->get_alias_index(atype); ! int adr_idx = phase->C->get_alias_index(atype); if (adr_idx == alias_idx) { assert(atype->isa_oopptr(), "address type must be oopptr"); int adr_offset = atype->offset(); uint adr_iid = atype->is_oopptr()->instance_id(); // Array elements references have the same alias_idx
*** 371,381 **** --- 381,391 ---- } else { assert(mem->Opcode() == Op_EncodeISOArray, "sanity"); adr = mem->in(3); // Destination array } const TypePtr* atype = adr->bottom_type()->is_ptr(); ! int adr_idx = Compile::current()->get_alias_index(atype); ! int adr_idx = phase->C->get_alias_index(atype); if (adr_idx == alias_idx) { assert(false, "Object is not scalar replaceable if a LoadStore node access its field"); return NULL; } mem = mem->in(MemNode::Memory);
*** 384,399 **** --- 394,460 ---- } assert(mem != orig_mem, "dead memory loop"); } } + // Generate loads from source of the arraycopy for fields of + // destination needed at a deoptimization point + Node* PhaseMacroExpand::handle_arraycopy(ArrayCopyNode* ac, intptr_t offset, Node* ctl, BasicType ft, const Type *ftype, AllocateNode *alloc) { + BasicType bt = ft; + const Type *type = ftype; + if (ft == T_NARROWOOP) { + bt = T_OBJECT; + type = ftype->make_oopptr(); + } + Node* res = NULL; + if (ac->is_clonebasic()) { + Node* base = ac->in(ArrayCopyNode::Src)->in(AddPNode::Base); + Node* adr = _igvn.transform(new AddPNode(base, base, MakeConX(offset))); + const TypePtr* adr_type = _igvn.type(base)->is_ptr()->add_offset(offset); + Node* m = ac->in(TypeFunc::Memory); + while (m->is_MergeMem()) { + m = m->as_MergeMem()->memory_at(C->get_alias_index(adr_type)); + if (m->is_Proj() && m->in(0)->is_MemBar()) { + m = m->in(0)->in(TypeFunc::Memory); + } + } + res = LoadNode::make(_igvn, ctl, m, adr, adr_type, type, bt, MemNode::unordered, LoadNode::Pinned); + } else { + if (ac->modifies(offset, offset, &_igvn, true)) { + assert(ac->in(ArrayCopyNode::Dest) == alloc->result_cast(), "arraycopy destination should be allocation's result"); + uint shift = exact_log2(type2aelembytes(bt)); + Node* diff = _igvn.transform(new SubINode(ac->in(ArrayCopyNode::SrcPos), ac->in(ArrayCopyNode::DestPos))); + #ifdef _LP64 + diff = _igvn.transform(new ConvI2LNode(diff)); + #endif + diff = _igvn.transform(new LShiftXNode(diff, intcon(shift))); + + Node* off = _igvn.transform(new AddXNode(MakeConX(offset), diff)); + Node* base = ac->in(ArrayCopyNode::Src); + Node* adr = _igvn.transform(new AddPNode(base, base, off)); + const TypePtr* adr_type = _igvn.type(base)->is_ptr()->add_offset(offset); + Node* m = ac->in(TypeFunc::Memory); + res = LoadNode::make(_igvn, ctl, m, adr, adr_type, type, bt, MemNode::unordered, LoadNode::Pinned); + } + } + if (res != NULL) { + res = _igvn.transform(res); + if (ftype->isa_narrowoop()) { + // PhaseMacroExpand::scalar_replacement adds DecodeN nodes + res = _igvn.transform(new EncodePNode(res, ftype)); + } + return res; + } + return NULL; + } + // // Given a Memory Phi, compute a value Phi containing the values from stores // on the input paths. ! // Note: this function is recursive, its depth is limied by the "level" argument ! // Note: this function is recursive, its depth is limited by the "level" argument // Returns the computed Phi, or NULL if it cannot compute it. ! Node *PhaseMacroExpand::value_from_mem_phi(Node *mem, BasicType ft, const Type *phi_type, const TypeOopPtr *adr_t, AllocateNode *alloc, Node_Stack *value_phis, int level) { assert(mem->is_Phi(), "sanity"); int alias_idx = C->get_alias_index(adr_t); int offset = adr_t->offset(); int instance_id = adr_t->instance_id();
*** 456,465 **** --- 517,529 ---- values.at_put(j, val); } else if (val->Opcode() == Op_SCMemProj) { assert(val->in(0)->is_LoadStore() || val->in(0)->Opcode() == Op_EncodeISOArray, "sanity"); assert(false, "Object is not scalar replaceable if a LoadStore node access its field"); return NULL; + } else if (val->is_ArrayCopy()) { + Node* res = handle_arraycopy(val->as_ArrayCopy(), offset, val->in(0), ft, phi_type, alloc); + values.at_put(j, res); } else { #ifdef ASSERT val->dump(); assert(false, "unknown node on this path"); #endif
*** 477,487 **** --- 541,551 ---- } return phi; } // Search the last value stored into the object's field. ! Node *PhaseMacroExpand::value_from_mem(Node *sfpt_mem, Node *sfpt_ctl, BasicType ft, const Type *ftype, const TypeOopPtr *adr_t, AllocateNode *alloc) { assert(adr_t->is_known_instance_field(), "instance required"); int instance_id = adr_t->instance_id(); assert((uint)instance_id == alloc->_idx, "wrong allocation"); int alias_idx = C->get_alias_index(adr_t);
*** 536,545 **** --- 600,611 ---- if (unique_input != NULL && unique_input != top) { mem = unique_input; } else { done = true; } + } else if (mem->is_ArrayCopy()) { + done = true; } else { assert(false, "unexpected node"); } } if (mem != NULL) {
*** 560,569 **** --- 626,642 ---- Node* n = value_phis.node(); _igvn.replace_node(n, C->top()); value_phis.pop(); } } + } else if (mem->is_ArrayCopy()) { + Node* ctl = mem->in(0); + if (sfpt_ctl->is_Proj() && sfpt_ctl->as_Proj()->is_uncommon_trap_proj(Deoptimization::Reason_none)) { + // pin the loads in the uncommon trap path + ctl = sfpt_ctl; + } + return handle_arraycopy(mem->as_ArrayCopy(), offset, ctl, ft, ftype, alloc); } } // Something go wrong. return NULL; }
*** 736,745 **** --- 809,819 ---- // Process the safepoint uses // while (safepoints.length() > 0) { SafePointNode* sfpt = safepoints.pop(); Node* mem = sfpt->memory(); + Node* ctl = sfpt->control(); assert(sfpt->jvms() != NULL, "missed JVMS"); // Fields of scalar objs are referenced only at the end // of regular debuginfo at the last (youngest) JVMS. // Record relative start index. uint first_ind = (sfpt->req() - sfpt->jvms()->scloff());
*** 787,797 **** --- 861,871 ---- field_type = Type::get_const_basic_type(basic_elem_type); } const TypeOopPtr *field_addr_type = res_type->add_offset(offset)->isa_oopptr(); ! Node *field_val = value_from_mem(mem, ctl, basic_elem_type, field_type, field_addr_type, alloc); if (field_val == NULL) { // We weren't able to find a value for this field, // give up on eliminating this allocation. // Remove any extra entries we added to the safepoint.

src/share/vm/opto/macro.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File