src/share/vm/opto/memnode.cpp

Print this page
rev 2571 : [mq]: membar3


 908 // of aliasing.
 909 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
 910   Node* ld_adr = in(MemNode::Address);
 911 
 912   const TypeInstPtr* tp = phase->type(ld_adr)->isa_instptr();
 913   Compile::AliasType* atp = tp != NULL ? phase->C->alias_type(tp) : NULL;
 914   if (EliminateAutoBox && atp != NULL && atp->index() >= Compile::AliasIdxRaw &&
 915       atp->field() != NULL && !atp->field()->is_volatile()) {
 916     uint alias_idx = atp->index();
 917     bool final = atp->field()->is_final();
 918     Node* result = NULL;
 919     Node* current = st;
 920     // Skip through chains of MemBarNodes checking the MergeMems for
 921     // new states for the slice of this load.  Stop once any other
 922     // kind of node is encountered.  Loads from final memory can skip
 923     // through any kind of MemBar but normal loads shouldn't skip
 924     // through MemBarAcquire since the could allow them to move out of
 925     // a synchronized region.
 926     while (current->is_Proj()) {
 927       int opc = current->in(0)->Opcode();
 928       if ((final && opc == Op_MemBarAcquire) ||
 929           opc == Op_MemBarRelease || opc == Op_MemBarCPUOrder) {

 930         Node* mem = current->in(0)->in(TypeFunc::Memory);
 931         if (mem->is_MergeMem()) {
 932           MergeMemNode* merge = mem->as_MergeMem();
 933           Node* new_st = merge->memory_at(alias_idx);
 934           if (new_st == merge->base_memory()) {
 935             // Keep searching
 936             current = merge->base_memory();
 937             continue;
 938           }
 939           // Save the new memory state for the slice and fall through
 940           // to exit.
 941           result = new_st;
 942         }
 943       }
 944       break;
 945     }
 946     if (result != NULL) {
 947       st = result;
 948     }
 949   }


2649   Node* top = C->top();
2650   init_req(TypeFunc::I_O,top);
2651   init_req(TypeFunc::FramePtr,top);
2652   init_req(TypeFunc::ReturnAdr,top);
2653   if (precedent != NULL)
2654     init_req(TypeFunc::Parms, precedent);
2655 }
2656 
2657 //------------------------------cmp--------------------------------------------
2658 uint MemBarNode::hash() const { return NO_HASH; }
2659 uint MemBarNode::cmp( const Node &n ) const {
2660   return (&n == this);          // Always fail except on self
2661 }
2662 
2663 //------------------------------make-------------------------------------------
2664 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
2665   int len = Precedent + (pn == NULL? 0: 1);
2666   switch (opcode) {
2667   case Op_MemBarAcquire:   return new(C, len) MemBarAcquireNode(C,  atp, pn);
2668   case Op_MemBarRelease:   return new(C, len) MemBarReleaseNode(C,  atp, pn);


2669   case Op_MemBarVolatile:  return new(C, len) MemBarVolatileNode(C, atp, pn);
2670   case Op_MemBarCPUOrder:  return new(C, len) MemBarCPUOrderNode(C, atp, pn);
2671   case Op_Initialize:      return new(C, len) InitializeNode(C,     atp, pn);
2672   default:                 ShouldNotReachHere(); return NULL;
2673   }
2674 }
2675 
2676 //------------------------------Ideal------------------------------------------
2677 // Return a node which is more "ideal" than the current node.  Strip out
2678 // control copies
2679 Node *MemBarNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2680   if (remove_dead_region(phase, can_reshape)) return this;
2681 
2682   // Eliminate volatile MemBars for scalar replaced objects.
2683   if (can_reshape && req() == (Precedent+1) &&
2684       (Opcode() == Op_MemBarAcquire || Opcode() == Op_MemBarVolatile)) {
2685     // Volatile field loads and stores.
2686     Node* my_mem = in(MemBarNode::Precedent);
2687     if (my_mem != NULL && my_mem->is_Mem()) {
2688       const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();




 908 // of aliasing.
 909 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
 910   Node* ld_adr = in(MemNode::Address);
 911 
 912   const TypeInstPtr* tp = phase->type(ld_adr)->isa_instptr();
 913   Compile::AliasType* atp = tp != NULL ? phase->C->alias_type(tp) : NULL;
 914   if (EliminateAutoBox && atp != NULL && atp->index() >= Compile::AliasIdxRaw &&
 915       atp->field() != NULL && !atp->field()->is_volatile()) {
 916     uint alias_idx = atp->index();
 917     bool final = atp->field()->is_final();
 918     Node* result = NULL;
 919     Node* current = st;
 920     // Skip through chains of MemBarNodes checking the MergeMems for
 921     // new states for the slice of this load.  Stop once any other
 922     // kind of node is encountered.  Loads from final memory can skip
 923     // through any kind of MemBar but normal loads shouldn't skip
 924     // through MemBarAcquire since the could allow them to move out of
 925     // a synchronized region.
 926     while (current->is_Proj()) {
 927       int opc = current->in(0)->Opcode();
 928       if ((final && (opc == Op_MemBarAcquire || opc == Op_MemBarAcquireLock)) ||
 929           opc == Op_MemBarRelease || opc == Op_MemBarCPUOrder ||
 930           opc == Op_MemBarReleaseLock) {
 931         Node* mem = current->in(0)->in(TypeFunc::Memory);
 932         if (mem->is_MergeMem()) {
 933           MergeMemNode* merge = mem->as_MergeMem();
 934           Node* new_st = merge->memory_at(alias_idx);
 935           if (new_st == merge->base_memory()) {
 936             // Keep searching
 937             current = merge->base_memory();
 938             continue;
 939           }
 940           // Save the new memory state for the slice and fall through
 941           // to exit.
 942           result = new_st;
 943         }
 944       }
 945       break;
 946     }
 947     if (result != NULL) {
 948       st = result;
 949     }
 950   }


2650   Node* top = C->top();
2651   init_req(TypeFunc::I_O,top);
2652   init_req(TypeFunc::FramePtr,top);
2653   init_req(TypeFunc::ReturnAdr,top);
2654   if (precedent != NULL)
2655     init_req(TypeFunc::Parms, precedent);
2656 }
2657 
2658 //------------------------------cmp--------------------------------------------
2659 uint MemBarNode::hash() const { return NO_HASH; }
2660 uint MemBarNode::cmp( const Node &n ) const {
2661   return (&n == this);          // Always fail except on self
2662 }
2663 
2664 //------------------------------make-------------------------------------------
2665 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
2666   int len = Precedent + (pn == NULL? 0: 1);
2667   switch (opcode) {
2668   case Op_MemBarAcquire:   return new(C, len) MemBarAcquireNode(C,  atp, pn);
2669   case Op_MemBarRelease:   return new(C, len) MemBarReleaseNode(C,  atp, pn);
2670   case Op_MemBarAcquireLock: return new(C, len) MemBarAcquireLockNode(C,  atp, pn);
2671   case Op_MemBarReleaseLock: return new(C, len) MemBarReleaseLockNode(C,  atp, pn);
2672   case Op_MemBarVolatile:  return new(C, len) MemBarVolatileNode(C, atp, pn);
2673   case Op_MemBarCPUOrder:  return new(C, len) MemBarCPUOrderNode(C, atp, pn);
2674   case Op_Initialize:      return new(C, len) InitializeNode(C,     atp, pn);
2675   default:                 ShouldNotReachHere(); return NULL;
2676   }
2677 }
2678 
2679 //------------------------------Ideal------------------------------------------
2680 // Return a node which is more "ideal" than the current node.  Strip out
2681 // control copies
2682 Node *MemBarNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2683   if (remove_dead_region(phase, can_reshape)) return this;
2684 
2685   // Eliminate volatile MemBars for scalar replaced objects.
2686   if (can_reshape && req() == (Precedent+1) &&
2687       (Opcode() == Op_MemBarAcquire || Opcode() == Op_MemBarVolatile)) {
2688     // Volatile field loads and stores.
2689     Node* my_mem = in(MemBarNode::Precedent);
2690     if (my_mem != NULL && my_mem->is_Mem()) {
2691       const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();