src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6934604 Cdiff src/share/vm/opto/escape.cpp

src/share/vm/opto/escape.cpp

Print this page

        

*** 61,77 **** bool ConnectionGraph::has_candidates(Compile *C) { // EA brings benefits only when the code has allocations and/or locks which // are represented by ideal Macro nodes. int cnt = C->macro_count(); ! for( int i=0; i < cnt; i++ ) { Node *n = C->macro_node(i); ! if ( n->is_Allocate() ) return true; ! if( n->is_Lock() ) { Node* obj = n->as_Lock()->obj_node()->uncast(); ! if( !(obj->is_Parm() || obj->is_Con()) ) return true; } } return false; } --- 61,81 ---- bool ConnectionGraph::has_candidates(Compile *C) { // EA brings benefits only when the code has allocations and/or locks which // are represented by ideal Macro nodes. int cnt = C->macro_count(); ! for (int i=0; i < cnt; i++) { Node *n = C->macro_node(i); ! if (n->is_Allocate()) return true; ! if (n->is_Lock()) { Node* obj = n->as_Lock()->obj_node()->uncast(); ! if (!(obj->is_Parm() || obj->is_Con())) ! return true; ! } ! if (n->is_CallStaticJava() && ! n->as_CallStaticJava()->is_autoboxing()) { return true; } } return false; }
*** 113,123 **** DEBUG_ONLY( GrowableArray<Node*> addp_worklist; ) { Compile::TracePhase t3("connectionGraph", &Phase::_t_connectionGraph, true); // 1. Populate Connection Graph (CG) with PointsTo nodes. ! ideal_nodes.map(C->unique(), NULL); // preallocate space // Initialize worklist if (C->root() != NULL) { ideal_nodes.push(C->root()); } for( uint next = 0; next < ideal_nodes.size(); ++next ) { --- 117,127 ---- DEBUG_ONLY( GrowableArray<Node*> addp_worklist; ) { Compile::TracePhase t3("connectionGraph", &Phase::_t_connectionGraph, true); // 1. Populate Connection Graph (CG) with PointsTo nodes. ! ideal_nodes.map(C->live_nodes(), NULL); // preallocate space // Initialize worklist if (C->root() != NULL) { ideal_nodes.push(C->root()); } for( uint next = 0; next < ideal_nodes.size(); ++next ) {
*** 150,161 **** } else if (n->is_MemBarStoreStore()) { // Collect all MemBarStoreStore nodes so that depending on the // escape status of the associated Allocate node some of them // may be eliminated. storestore_worklist.append(n); #ifdef ASSERT ! } else if(n->is_AddP()) { // Collect address nodes for graph verification. addp_worklist.append(n); #endif } for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) { --- 154,168 ---- } else if (n->is_MemBarStoreStore()) { // Collect all MemBarStoreStore nodes so that depending on the // escape status of the associated Allocate node some of them // may be eliminated. storestore_worklist.append(n); + } else if (n->is_MemBar() && (n->Opcode() == Op_MemBarRelease) && + (n->req() > MemBarNode::Precedent)) { + record_for_optimizer(n); #ifdef ASSERT ! } else if (n->is_AddP()) { // Collect address nodes for graph verification. addp_worklist.append(n); #endif } for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
*** 204,215 **** // scalar replaceable allocations on alloc_worklist for processing // in split_unique_types(). int non_escaped_length = non_escaped_worklist.length(); for (int next = 0; next < non_escaped_length; next++) { JavaObjectNode* ptn = non_escaped_worklist.at(next); ! if (ptn->escape_state() == PointsToNode::NoEscape && ! ptn->scalar_replaceable()) { adjust_scalar_replaceable_state(ptn); if (ptn->scalar_replaceable()) { alloc_worklist.append(ptn->ideal_node()); } } --- 211,229 ---- // scalar replaceable allocations on alloc_worklist for processing // in split_unique_types(). int non_escaped_length = non_escaped_worklist.length(); for (int next = 0; next < non_escaped_length; next++) { JavaObjectNode* ptn = non_escaped_worklist.at(next); ! bool noescape = (ptn->escape_state() == PointsToNode::NoEscape); ! Node* n = ptn->ideal_node(); ! if (n->is_Allocate()) { ! n->as_Allocate()->_is_non_escaping = noescape; ! } ! if (n->is_CallStaticJava()) { ! n->as_CallStaticJava()->_is_non_escaping = noescape; ! } ! if (noescape && ptn->scalar_replaceable()) { adjust_scalar_replaceable_state(ptn); if (ptn->scalar_replaceable()) { alloc_worklist.append(ptn->ideal_node()); } }
*** 328,339 **** return; // Skip uncommon traps } // Don't mark as processed since call's arguments have to be processed. delayed_worklist->push(n); // Check if a call returns an object. ! if (n->as_Call()->returns_pointer() && ! n->as_Call()->proj_out(TypeFunc::Parms) != NULL) { add_call_node(n->as_Call()); } } return; } --- 342,355 ---- return; // Skip uncommon traps } // Don't mark as processed since call's arguments have to be processed. delayed_worklist->push(n); // Check if a call returns an object. ! if ((n->as_Call()->returns_pointer() && ! n->as_Call()->proj_out(TypeFunc::Parms) != NULL) || ! (n->is_CallStaticJava() && ! n->as_CallStaticJava()->is_autoboxing())) { add_call_node(n->as_Call()); } } return; }
*** 385,396 **** case Op_ConP: case Op_ConN: case Op_ConNKlass: { // assume all oop constants globally escape except for null PointsToNode::EscapeState es; ! if (igvn->type(n) == TypePtr::NULL_PTR || ! igvn->type(n) == TypeNarrowOop::NULL_PTR) { es = PointsToNode::NoEscape; } else { es = PointsToNode::GlobalEscape; } add_java_object(n, es); --- 401,412 ---- case Op_ConP: case Op_ConN: case Op_ConNKlass: { // assume all oop constants globally escape except for null PointsToNode::EscapeState es; ! const Type* t = igvn->type(n); ! if (t == TypePtr::NULL_PTR || t == TypeNarrowOop::NULL_PTR) { es = PointsToNode::NoEscape; } else { es = PointsToNode::GlobalEscape; } add_java_object(n, es);
*** 795,804 **** --- 811,823 ---- const char* name = call->as_CallStaticJava()->_name; assert(strncmp(name, "_multianewarray", 15) == 0, "TODO: add failed case check"); // Returns a newly allocated unescaped object. add_java_object(call, PointsToNode::NoEscape); ptnode_adr(call_idx)->set_scalar_replaceable(false); + } else if (meth->is_boxing_method()) { + // Returns boxing object + add_java_object(call, PointsToNode::NoEscape); } else { BCEscapeAnalyzer* call_analyzer = meth->get_bcea(); call_analyzer->copy_dependencies(_compile->dependencies()); if (call_analyzer->is_return_allocated()) { // Returns a newly allocated unescaped object, simply
*** 941,950 **** --- 960,972 ---- #ifdef ASSERT const char* name = call->as_CallStaticJava()->_name; assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only"); #endif ciMethod* meth = call->as_CallJava()->method(); + if ((meth != NULL) && meth->is_boxing_method()) { + break; // Boxing methods do not modify any oops. + } BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL; // fall-through if not a Java method or no analyzer information if (call_analyzer != NULL) { PointsToNode* call_ptn = ptnode_adr(call->_idx); const TypeTuple* d = call->tf()->domain();
*** 2742,2751 **** --- 2764,2778 ---- if (alloc->is_Allocate()) { // Set the scalar_replaceable flag for allocation // so it could be eliminated if it has no uses. alloc->as_Allocate()->_is_scalar_replaceable = true; } + if (alloc->is_CallStaticJava()) { + // Set the scalar_replaceable flag for autobox + // so it could be eliminated if it has no uses. + alloc->as_CallStaticJava()->_is_scalar_replaceable = true; + } continue; } if (!n->is_CheckCastPP()) { // not unique CheckCastPP. assert(!alloc->is_Allocate(), "allocation should have unique type"); continue;
*** 2780,2789 **** --- 2807,2821 ---- if (alloc->is_Allocate()) { // Set the scalar_replaceable flag for allocation // so it could be eliminated. alloc->as_Allocate()->_is_scalar_replaceable = true; } + if (alloc->is_CallStaticJava()) { + // Set the scalar_replaceable flag for autobox + // so it could be eliminated. + alloc->as_CallStaticJava()->_is_scalar_replaceable = true; + } set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state // in order for an object to be scalar-replaceable, it must be: // - a direct allocation (not a call returning an object) // - non-escaping // - eligible to be a unique type
*** 2909,2919 **** --- 2941,2953 ---- Node *use = n->fast_out(i); if(use->is_Mem() && use->in(MemNode::Address) == n) { // Load/store to instance's field memnode_worklist.append_if_missing(use); } else if (use->is_MemBar()) { + if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge memnode_worklist.append_if_missing(use); + } } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes Node* addp2 = find_second_addp(use, n); if (addp2 != NULL) { alloc_worklist.append_if_missing(addp2); }
*** 3026,3036 **** --- 3060,3072 ---- } else if (use->is_Mem() && use->in(MemNode::Memory) == n) { if (use->Opcode() == Op_StoreCM) // Ignore cardmark stores continue; memnode_worklist.append_if_missing(use); } else if (use->is_MemBar()) { + if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge memnode_worklist.append_if_missing(use); + } #ifdef ASSERT } else if(use->is_Mem()) { assert(use->in(MemNode::Memory) != n, "EA: missing memory path"); } else if (use->is_MergeMem()) { assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
*** 3262,3272 **** for (int i = 0; i < ptnodes_length; i++) { PointsToNode *ptn = ptnodes_worklist.at(i); if (ptn == NULL || !ptn->is_JavaObject()) continue; PointsToNode::EscapeState es = ptn->escape_state(); ! if (ptn->ideal_node()->is_Allocate() && (es == PointsToNode::NoEscape || Verbose)) { if (first) { tty->cr(); tty->print("======== Connection graph for "); _compile->method()->print_short_name(); tty->cr(); --- 3298,3313 ---- for (int i = 0; i < ptnodes_length; i++) { PointsToNode *ptn = ptnodes_worklist.at(i); if (ptn == NULL || !ptn->is_JavaObject()) continue; PointsToNode::EscapeState es = ptn->escape_state(); ! if ((es != PointsToNode::NoEscape) && !Verbose) { ! continue; ! } ! Node* n = ptn->ideal_node(); ! if (n->is_Allocate() || (n->is_CallStaticJava() && ! n->as_CallStaticJava()->is_autoboxing())) { if (first) { tty->cr(); tty->print("======== Connection graph for "); _compile->method()->print_short_name(); tty->cr();
src/share/vm/opto/escape.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File