< prev index next >

src/share/vm/opto/memnode.cpp

Print this page




2399   if (p)  return (p == NodeSentinel) ? NULL : p;
2400 
2401   Node* mem     = in(MemNode::Memory);
2402   Node* address = in(MemNode::Address);
2403   // Back-to-back stores to same address?  Fold em up.  Generally
2404   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2405   // since they must follow each StoreP operation.  Redundant StoreCMs
2406   // are eliminated just before matching in final_graph_reshape.
2407   {
2408     Node* st = mem;
2409     // If Store 'st' has more than one use, we cannot fold 'st' away.
2410     // For example, 'st' might be the final state at a conditional
2411     // return.  Or, 'st' might be used by some node which is live at
2412     // the same time 'st' is live, which might be unschedulable.  So,
2413     // require exactly ONE user until such time as we clone 'mem' for
2414     // each of 'mem's uses (thus making the exactly-1-user-rule hold
2415     // true).
2416     while (st->is_Store() && st->outcnt() == 1 && st->Opcode() != Op_StoreCM) {
2417       // Looking at a dead closed cycle of memory?
2418       assert(st != st->in(MemNode::Memory), "dead loop in StoreNode::Ideal");


2419       assert(Opcode() == st->Opcode() ||
2420              st->Opcode() == Op_StoreVector ||
2421              Opcode() == Op_StoreVector ||
2422              phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw ||
2423              (Opcode() == Op_StoreL && st->Opcode() == Op_StoreI) || // expanded ClearArrayNode
2424              (is_mismatched_access() || st->as_Store()->is_mismatched_access()),
2425              "no mismatched stores, except on raw memory: %s %s", NodeClassNames[Opcode()], NodeClassNames[st->Opcode()]);
2426 
2427       if (st->in(MemNode::Address)->eqv_uncast(address) &&
2428           st->as_Store()->memory_size() <= this->memory_size()) {
2429         Node* use = st->raw_out(0);
2430         phase->igvn_rehash_node_delayed(use);
2431         if (can_reshape) {
2432           use->set_req_X(MemNode::Memory, st->in(MemNode::Memory), phase->is_IterGVN());
2433         } else {
2434           // It's OK to do this in the parser, since DU info is always accurate,
2435           // and the parser always refers to nodes via SafePointNode maps.
2436           use->set_req(MemNode::Memory, st->in(MemNode::Memory));
2437         }
2438         return this;
2439       }
2440       st = st->in(MemNode::Memory);
2441     }
2442   }
2443 
2444 
2445   // Capture an unaliased, unconditional, simple store into an initializer.
2446   // Or, if it is independent of the allocation, hoist it above the allocation.




2399   if (p)  return (p == NodeSentinel) ? NULL : p;
2400 
2401   Node* mem     = in(MemNode::Memory);
2402   Node* address = in(MemNode::Address);
2403   // Back-to-back stores to same address?  Fold em up.  Generally
2404   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2405   // since they must follow each StoreP operation.  Redundant StoreCMs
2406   // are eliminated just before matching in final_graph_reshape.
2407   {
2408     Node* st = mem;
2409     // If Store 'st' has more than one use, we cannot fold 'st' away.
2410     // For example, 'st' might be the final state at a conditional
2411     // return.  Or, 'st' might be used by some node which is live at
2412     // the same time 'st' is live, which might be unschedulable.  So,
2413     // require exactly ONE user until such time as we clone 'mem' for
2414     // each of 'mem's uses (thus making the exactly-1-user-rule hold
2415     // true).
2416     while (st->is_Store() && st->outcnt() == 1 && st->Opcode() != Op_StoreCM) {
2417       // Looking at a dead closed cycle of memory?
2418       assert(st != st->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2419       // TODO re-enable assert
2420       /*
2421       assert(Opcode() == st->Opcode() ||
2422              st->Opcode() == Op_StoreVector ||
2423              Opcode() == Op_StoreVector ||
2424              phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw ||
2425              (Opcode() == Op_StoreL && st->Opcode() == Op_StoreI) || // expanded ClearArrayNode
2426              (is_mismatched_access() || st->as_Store()->is_mismatched_access()),
2427              "no mismatched stores, except on raw memory: %s %s", NodeClassNames[Opcode()], NodeClassNames[st->Opcode()]);
2428 */
2429       if (st->in(MemNode::Address)->eqv_uncast(address) &&
2430           st->as_Store()->memory_size() <= this->memory_size()) {
2431         Node* use = st->raw_out(0);
2432         phase->igvn_rehash_node_delayed(use);
2433         if (can_reshape) {
2434           use->set_req_X(MemNode::Memory, st->in(MemNode::Memory), phase->is_IterGVN());
2435         } else {
2436           // It's OK to do this in the parser, since DU info is always accurate,
2437           // and the parser always refers to nodes via SafePointNode maps.
2438           use->set_req(MemNode::Memory, st->in(MemNode::Memory));
2439         }
2440         return this;
2441       }
2442       st = st->in(MemNode::Memory);
2443     }
2444   }
2445 
2446 
2447   // Capture an unaliased, unconditional, simple store into an initializer.
2448   // Or, if it is independent of the allocation, hoist it above the allocation.


< prev index next >