src/share/vm/opto/memnode.cpp

Print this page
rev 2892 : 7121140: Allocation paths require explicit memory synchronization operations for RMO systems
Summary: adds store store barrier after initialization of header and body of objects.
Reviewed-by:


2704     init_req(TypeFunc::Parms, precedent);
2705 }
2706 
2707 //------------------------------cmp--------------------------------------------
2708 uint MemBarNode::hash() const { return NO_HASH; }
2709 uint MemBarNode::cmp( const Node &n ) const {
2710   return (&n == this);          // Always fail except on self
2711 }
2712 
2713 //------------------------------make-------------------------------------------
2714 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
2715   int len = Precedent + (pn == NULL? 0: 1);
2716   switch (opcode) {
2717   case Op_MemBarAcquire:   return new(C, len) MemBarAcquireNode(C,  atp, pn);
2718   case Op_MemBarRelease:   return new(C, len) MemBarReleaseNode(C,  atp, pn);
2719   case Op_MemBarAcquireLock: return new(C, len) MemBarAcquireLockNode(C,  atp, pn);
2720   case Op_MemBarReleaseLock: return new(C, len) MemBarReleaseLockNode(C,  atp, pn);
2721   case Op_MemBarVolatile:  return new(C, len) MemBarVolatileNode(C, atp, pn);
2722   case Op_MemBarCPUOrder:  return new(C, len) MemBarCPUOrderNode(C, atp, pn);
2723   case Op_Initialize:      return new(C, len) InitializeNode(C,     atp, pn);

2724   default:                 ShouldNotReachHere(); return NULL;
2725   }
2726 }
2727 
2728 //------------------------------Ideal------------------------------------------
2729 // Return a node which is more "ideal" than the current node.  Strip out
2730 // control copies
2731 Node *MemBarNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2732   if (remove_dead_region(phase, can_reshape)) return this;
2733   // Don't bother trying to transform a dead node
2734   if (in(0) && in(0)->is_top())  return NULL;
2735 
2736   // Eliminate volatile MemBars for scalar replaced objects.
2737   if (can_reshape && req() == (Precedent+1) &&
2738       (Opcode() == Op_MemBarAcquire || Opcode() == Op_MemBarVolatile)) {
2739     // Volatile field loads and stores.
2740     Node* my_mem = in(MemBarNode::Precedent);
2741     if (my_mem != NULL && my_mem->is_Mem()) {
2742       const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();
2743       // Check for scalar replaced object reference.


2853 // produced by it are optimizable if they match the control edge and
2854 // new oop address associated with the allocation/initialization.
2855 // They return a stored value (if the offset matches) or else zero.
2856 // A write to the memory state, if it matches control and address,
2857 // and if it is to a constant offset, may be 'captured' by the
2858 // InitializeNode.  It is cloned as a raw memory operation and rewired
2859 // inside the initialization, to the raw oop produced by the allocation.
2860 // Operations on addresses which are provably distinct (e.g., to
2861 // other AllocateNodes) are allowed to bypass the initialization.
2862 //
2863 // The effect of all this is to consolidate object initialization
2864 // (both arrays and non-arrays, both piecewise and bulk) into a
2865 // single location, where it can be optimized as a unit.
2866 //
2867 // Only stores with an offset less than TrackedInitializationLimit words
2868 // will be considered for capture by an InitializeNode.  This puts a
2869 // reasonable limit on the complexity of optimized initializations.
2870 
2871 //---------------------------InitializeNode------------------------------------
2872 InitializeNode::InitializeNode(Compile* C, int adr_type, Node* rawoop)
2873   : _is_complete(Incomplete),
2874     MemBarNode(C, adr_type, rawoop)
2875 {
2876   init_class_id(Class_Initialize);
2877 
2878   assert(adr_type == Compile::AliasIdxRaw, "only valid atp");
2879   assert(in(RawAddress) == rawoop, "proper init");
2880   // Note:  allocation() can be NULL, for secondary initialization barriers
2881 }
2882 
2883 // Since this node is not matched, it will be processed by the
2884 // register allocator.  Declare that there are no constraints
2885 // on the allocation of the RawAddress edge.
2886 const RegMask &InitializeNode::in_RegMask(uint idx) const {
2887   // This edge should be set to top, by the set_complete.  But be conservative.
2888   if (idx == InitializeNode::RawAddress)
2889     return *(Compile::current()->matcher()->idealreg2spillmask[in(idx)->ideal_reg()]);
2890   return RegMask::Empty;
2891 }
2892 
2893 Node* InitializeNode::memory(uint alias_idx) {




2704     init_req(TypeFunc::Parms, precedent);
2705 }
2706 
2707 //------------------------------cmp--------------------------------------------
2708 uint MemBarNode::hash() const { return NO_HASH; }
2709 uint MemBarNode::cmp( const Node &n ) const {
2710   return (&n == this);          // Always fail except on self
2711 }
2712 
2713 //------------------------------make-------------------------------------------
2714 MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
2715   int len = Precedent + (pn == NULL? 0: 1);
2716   switch (opcode) {
2717   case Op_MemBarAcquire:   return new(C, len) MemBarAcquireNode(C,  atp, pn);
2718   case Op_MemBarRelease:   return new(C, len) MemBarReleaseNode(C,  atp, pn);
2719   case Op_MemBarAcquireLock: return new(C, len) MemBarAcquireLockNode(C,  atp, pn);
2720   case Op_MemBarReleaseLock: return new(C, len) MemBarReleaseLockNode(C,  atp, pn);
2721   case Op_MemBarVolatile:  return new(C, len) MemBarVolatileNode(C, atp, pn);
2722   case Op_MemBarCPUOrder:  return new(C, len) MemBarCPUOrderNode(C, atp, pn);
2723   case Op_Initialize:      return new(C, len) InitializeNode(C,     atp, pn);
2724   case Op_MemBarStoreStore: return new(C, len) MemBarStoreStoreNode(C,  atp, pn);
2725   default:                 ShouldNotReachHere(); return NULL;
2726   }
2727 }
2728 
2729 //------------------------------Ideal------------------------------------------
2730 // Return a node which is more "ideal" than the current node.  Strip out
2731 // control copies
2732 Node *MemBarNode::Ideal(PhaseGVN *phase, bool can_reshape) {
2733   if (remove_dead_region(phase, can_reshape)) return this;
2734   // Don't bother trying to transform a dead node
2735   if (in(0) && in(0)->is_top())  return NULL;
2736 
2737   // Eliminate volatile MemBars for scalar replaced objects.
2738   if (can_reshape && req() == (Precedent+1) &&
2739       (Opcode() == Op_MemBarAcquire || Opcode() == Op_MemBarVolatile)) {
2740     // Volatile field loads and stores.
2741     Node* my_mem = in(MemBarNode::Precedent);
2742     if (my_mem != NULL && my_mem->is_Mem()) {
2743       const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();
2744       // Check for scalar replaced object reference.


2854 // produced by it are optimizable if they match the control edge and
2855 // new oop address associated with the allocation/initialization.
2856 // They return a stored value (if the offset matches) or else zero.
2857 // A write to the memory state, if it matches control and address,
2858 // and if it is to a constant offset, may be 'captured' by the
2859 // InitializeNode.  It is cloned as a raw memory operation and rewired
2860 // inside the initialization, to the raw oop produced by the allocation.
2861 // Operations on addresses which are provably distinct (e.g., to
2862 // other AllocateNodes) are allowed to bypass the initialization.
2863 //
2864 // The effect of all this is to consolidate object initialization
2865 // (both arrays and non-arrays, both piecewise and bulk) into a
2866 // single location, where it can be optimized as a unit.
2867 //
2868 // Only stores with an offset less than TrackedInitializationLimit words
2869 // will be considered for capture by an InitializeNode.  This puts a
2870 // reasonable limit on the complexity of optimized initializations.
2871 
2872 //---------------------------InitializeNode------------------------------------
2873 InitializeNode::InitializeNode(Compile* C, int adr_type, Node* rawoop)
2874   : _is_complete(Incomplete), _does_not_escape(false),
2875     MemBarNode(C, adr_type, rawoop)
2876 {
2877   init_class_id(Class_Initialize);
2878 
2879   assert(adr_type == Compile::AliasIdxRaw, "only valid atp");
2880   assert(in(RawAddress) == rawoop, "proper init");
2881   // Note:  allocation() can be NULL, for secondary initialization barriers
2882 }
2883 
2884 // Since this node is not matched, it will be processed by the
2885 // register allocator.  Declare that there are no constraints
2886 // on the allocation of the RawAddress edge.
2887 const RegMask &InitializeNode::in_RegMask(uint idx) const {
2888   // This edge should be set to top, by the set_complete.  But be conservative.
2889   if (idx == InitializeNode::RawAddress)
2890     return *(Compile::current()->matcher()->idealreg2spillmask[in(idx)->ideal_reg()]);
2891   return RegMask::Empty;
2892 }
2893 
2894 Node* InitializeNode::memory(uint alias_idx) {