< prev index next >

src/hotspot/share/opto/memnode.cpp

Print this page




2522   Node* address = in(MemNode::Address);
2523   // Back-to-back stores to same address?  Fold em up.  Generally
2524   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2525   // since they must follow each StoreP operation.  Redundant StoreCMs
2526   // are eliminated just before matching in final_graph_reshape.
2527   {
2528     Node* st = mem;
2529     // If Store 'st' has more than one use, we cannot fold 'st' away.
2530     // For example, 'st' might be the final state at a conditional
2531     // return.  Or, 'st' might be used by some node which is live at
2532     // the same time 'st' is live, which might be unschedulable.  So,
2533     // require exactly ONE user until such time as we clone 'mem' for
2534     // each of 'mem's uses (thus making the exactly-1-user-rule hold
2535     // true).
2536     while (st->is_Store() && st->outcnt() == 1 && st->Opcode() != Op_StoreCM) {
2537       // Looking at a dead closed cycle of memory?
2538       assert(st != st->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2539       assert(Opcode() == st->Opcode() ||
2540              st->Opcode() == Op_StoreVector ||
2541              Opcode() == Op_StoreVector ||


2542              phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw ||
2543              (Opcode() == Op_StoreL && st->Opcode() == Op_StoreI) || // expanded ClearArrayNode
2544              (Opcode() == Op_StoreI && st->Opcode() == Op_StoreL) || // initialization by arraycopy
2545              (is_mismatched_access() || st->as_Store()->is_mismatched_access()),
2546              "no mismatched stores, except on raw memory: %s %s", NodeClassNames[Opcode()], NodeClassNames[st->Opcode()]);
2547 
2548       if (st->in(MemNode::Address)->eqv_uncast(address) &&
2549           st->as_Store()->memory_size() <= this->memory_size()) {
2550         Node* use = st->raw_out(0);
2551         phase->igvn_rehash_node_delayed(use);
2552         if (can_reshape) {
2553           use->set_req_X(MemNode::Memory, st->in(MemNode::Memory), phase->is_IterGVN());
2554         } else {
2555           // It's OK to do this in the parser, since DU info is always accurate,
2556           // and the parser always refers to nodes via SafePointNode maps.
2557           use->set_req(MemNode::Memory, st->in(MemNode::Memory));
2558         }
2559         return this;
2560       }
2561       st = st->in(MemNode::Memory);




2522   Node* address = in(MemNode::Address);
2523   // Back-to-back stores to same address?  Fold em up.  Generally
2524   // unsafe if I have intervening uses...  Also disallowed for StoreCM
2525   // since they must follow each StoreP operation.  Redundant StoreCMs
2526   // are eliminated just before matching in final_graph_reshape.
2527   {
2528     Node* st = mem;
2529     // If Store 'st' has more than one use, we cannot fold 'st' away.
2530     // For example, 'st' might be the final state at a conditional
2531     // return.  Or, 'st' might be used by some node which is live at
2532     // the same time 'st' is live, which might be unschedulable.  So,
2533     // require exactly ONE user until such time as we clone 'mem' for
2534     // each of 'mem's uses (thus making the exactly-1-user-rule hold
2535     // true).
2536     while (st->is_Store() && st->outcnt() == 1 && st->Opcode() != Op_StoreCM) {
2537       // Looking at a dead closed cycle of memory?
2538       assert(st != st->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2539       assert(Opcode() == st->Opcode() ||
2540              st->Opcode() == Op_StoreVector ||
2541              Opcode() == Op_StoreVector ||
2542              st->Opcode() == Op_StoreVectorScatter ||
2543              Opcode() == Op_StoreVectorScatter ||
2544              phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw ||
2545              (Opcode() == Op_StoreL && st->Opcode() == Op_StoreI) || // expanded ClearArrayNode
2546              (Opcode() == Op_StoreI && st->Opcode() == Op_StoreL) || // initialization by arraycopy
2547              (is_mismatched_access() || st->as_Store()->is_mismatched_access()),
2548              "no mismatched stores, except on raw memory: %s %s", NodeClassNames[Opcode()], NodeClassNames[st->Opcode()]);
2549 
2550       if (st->in(MemNode::Address)->eqv_uncast(address) &&
2551           st->as_Store()->memory_size() <= this->memory_size()) {
2552         Node* use = st->raw_out(0);
2553         phase->igvn_rehash_node_delayed(use);
2554         if (can_reshape) {
2555           use->set_req_X(MemNode::Memory, st->in(MemNode::Memory), phase->is_IterGVN());
2556         } else {
2557           // It's OK to do this in the parser, since DU info is always accurate,
2558           // and the parser always refers to nodes via SafePointNode maps.
2559           use->set_req(MemNode::Memory, st->in(MemNode::Memory));
2560         }
2561         return this;
2562       }
2563       st = st->in(MemNode::Memory);


< prev index next >