src/share/vm/opto/memnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/opto

src/share/vm/opto/memnode.cpp

Print this page
rev 8052 : castpp gcm
rev 8571 : 8080289: Intermediate writes in a loop not eliminated by optimizer
Summary: Move Stores out of loop (after or before) when possible
Reviewed-by:


2404 
2405 //------------------------------hash-------------------------------------------
2406 uint StoreNode::hash() const {
2407   // unroll addition of interesting fields
2408   //return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address) + (uintptr_t)in(ValueIn);
2409 
2410   // Since they are not commoned, do not hash them:
2411   return NO_HASH;
2412 }
2413 
2414 //------------------------------Ideal------------------------------------------
2415 // Change back-to-back Store(, p, x) -> Store(m, p, y) to Store(m, p, x).
2416 // When a store immediately follows a relevant allocation/initialization,
2417 // try to capture it into the initialization, or hoist it above.
2418 Node *StoreNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2419   Node* p = MemNode::Ideal_common(phase, can_reshape);
2420   if (p)  return (p == NodeSentinel) ? NULL : p;
2421 
2422   Node* mem     = in(MemNode::Memory);
2423   Node* address = in(MemNode::Address);
2424 
2425   // Back-to-back stores to same address?  Fold em up.  Generally
2426   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2427   // since they must follow each StoreP operation.  Redundant StoreCMs
2428   // are eliminated just before matching in final_graph_reshape.
2429   if (mem->is_Store() && mem->in(MemNode::Address)->eqv_uncast(address) &&
2430       mem->Opcode() != Op_StoreCM) {











2431     // Looking at a dead closed cycle of memory?
2432     assert(mem != mem->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2433 
2434     assert(Opcode() == mem->Opcode() ||
2435            phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw,
2436            "no mismatched stores, except on raw memory");


2437 
2438     if (mem->outcnt() == 1 &&           // check for intervening uses
2439         mem->as_Store()->memory_size() <= this->memory_size()) {
2440       // If anybody other than 'this' uses 'mem', we cannot fold 'mem' away.
2441       // For example, 'mem' might be the final state at a conditional return.
2442       // Or, 'mem' might be used by some node which is live at the same time
2443       // 'this' is live, which might be unschedulable.  So, require exactly
2444       // ONE user, the 'this' store, until such time as we clone 'mem' for
2445       // each of 'mem's uses (thus making the exactly-1-user-rule hold true).
2446       if (can_reshape) {  // (%%% is this an anachronism?)
2447         set_req_X(MemNode::Memory, mem->in(MemNode::Memory),
2448                   phase->is_IterGVN());
2449       } else {
2450         // It's OK to do this in the parser, since DU info is always accurate,
2451         // and the parser always refers to nodes via SafePointNode maps.
2452         set_req(MemNode::Memory, mem->in(MemNode::Memory));


2453       }




2454       return this;
2455     }
2456   }

2457 
2458   // Capture an unaliased, unconditional, simple store into an initializer.
2459   // Or, if it is independent of the allocation, hoist it above the allocation.
2460   if (ReduceFieldZeroing && /*can_reshape &&*/
2461       mem->is_Proj() && mem->in(0)->is_Initialize()) {
2462     InitializeNode* init = mem->in(0)->as_Initialize();
2463     intptr_t offset = init->can_capture_store(this, phase, can_reshape);
2464     if (offset > 0) {
2465       Node* moved = init->capture_store(this, offset, phase, can_reshape);
2466       // If the InitializeNode captured me, it made a raw copy of me,
2467       // and I need to disappear.
2468       if (moved != NULL) {
2469         // %%% hack to ensure that Ideal returns a new node:
2470         mem = MergeMemNode::make(mem);
2471         return mem;             // fold me away
2472       }
2473     }
2474   }
2475 
2476   return NULL;                  // No further progress




2404 
2405 //------------------------------hash-------------------------------------------
2406 uint StoreNode::hash() const {
2407   // unroll addition of interesting fields
2408   //return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address) + (uintptr_t)in(ValueIn);
2409 
2410   // Since they are not commoned, do not hash them:
2411   return NO_HASH;
2412 }
2413 
2414 //------------------------------Ideal------------------------------------------
2415 // Change back-to-back Store(, p, x) -> Store(m, p, y) to Store(m, p, x).
2416 // When a store immediately follows a relevant allocation/initialization,
2417 // try to capture it into the initialization, or hoist it above.
2418 Node *StoreNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2419   Node* p = MemNode::Ideal_common(phase, can_reshape);
2420   if (p)  return (p == NodeSentinel) ? NULL : p;
2421 
2422   Node* mem     = in(MemNode::Memory);
2423   Node* address = in(MemNode::Address);

2424   // Back-to-back stores to same address?  Fold em up.  Generally
2425   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2426   // since they must follow each StoreP operation.  Redundant StoreCMs
2427   // are eliminated just before matching in final_graph_reshape.
2428   {
2429     bool improved = false;
2430     Node* st = mem;
2431     Node* prev = this;
2432     // If anybody other than the previous Store on the memory chain
2433     // uses 'st', we cannot fold 'st' away.  For example, 'st'
2434     // might be the final state at a conditional return.  Or, 'st'
2435     // might be used by some node which is live at the same time
2436     // 'st' is live, which might be unschedulable.  So, require
2437     // exactly ONE user, the 'prev' store, until such time as we clone
2438     // 'mem' for each of 'mem's uses (thus making the
2439     // exactly-1-user-rule hold true).
2440     while (st->is_Store() && st->outcnt() == 1 && st->Opcode() != Op_StoreCM) {
2441       // Looking at a dead closed cycle of memory?
2442       assert(st != st->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2443       assert(Opcode() == st->Opcode() ||
2444              st->Opcode() == Op_StoreVector ||
2445              Opcode() == Op_StoreVector ||
2446              phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw ||
2447              (Opcode() == Op_StoreL && st->Opcode() == Op_StoreI), // expanded ClearArrayNode
2448              err_msg_res("no mismatched stores, except on raw memory: %s %s", NodeClassNames[Opcode()], NodeClassNames[st->Opcode()]));
2449 
2450       if (st->in(MemNode::Address)->eqv_uncast(address) &&
2451           st->as_Store()->memory_size() <= this->memory_size()) {
2452         phase->igvn_rehash_node_delayed(prev);
2453         if (can_reshape) {
2454           prev->set_req_X(MemNode::Memory, st->in(MemNode::Memory), phase->is_IterGVN());






2455         } else {
2456           // It's OK to do this in the parser, since DU info is always accurate,
2457           // and the parser always refers to nodes via SafePointNode maps.
2458           prev->set_req(MemNode::Memory, st->in(MemNode::Memory));
2459         }
2460         improved = (prev == this);
2461       }
2462       prev = st;
2463       st = st->in(MemNode::Memory);
2464     }
2465     if (improved) {
2466       return this;
2467     }
2468   }
2469 
2470 
2471   // Capture an unaliased, unconditional, simple store into an initializer.
2472   // Or, if it is independent of the allocation, hoist it above the allocation.
2473   if (ReduceFieldZeroing && /*can_reshape &&*/
2474       mem->is_Proj() && mem->in(0)->is_Initialize()) {
2475     InitializeNode* init = mem->in(0)->as_Initialize();
2476     intptr_t offset = init->can_capture_store(this, phase, can_reshape);
2477     if (offset > 0) {
2478       Node* moved = init->capture_store(this, offset, phase, can_reshape);
2479       // If the InitializeNode captured me, it made a raw copy of me,
2480       // and I need to disappear.
2481       if (moved != NULL) {
2482         // %%% hack to ensure that Ideal returns a new node:
2483         mem = MergeMemNode::make(mem);
2484         return mem;             // fold me away
2485       }
2486     }
2487   }
2488 
2489   return NULL;                  // No further progress


src/share/vm/opto/memnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File