1 /*
   2  * Copyright (c) 2018, Red Hat, Inc. and/or its affiliates.
   3  *
   4  * This code is free software; you can redistribute it and/or modify it
   5  * under the terms of the GNU General Public License version 2 only, as
   6  * published by the Free Software Foundation.
   7  *
   8  * This code is distributed in the hope that it will be useful, but WITHOUT
   9  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  11  * version 2 for more details (a copy is included in the LICENSE file that
  12  * accompanied this code).
  13  *
  14  * You should have received a copy of the GNU General Public License version
  15  * 2 along with this work; if not, write to the Free Software Foundation,
  16  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  17  *
  18  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  19  * or visit www.oracle.com if you need additional information or have any
  20  * questions.
  21  *
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "gc/shenandoah/shenandoahHeap.hpp"
  26 #include "gc/shenandoah/shenandoahHeuristics.hpp"
  27 #include "gc/shenandoah/shenandoahRuntime.hpp"
  28 #include "gc/shenandoah/c2/shenandoahBarrierSetC2.hpp"
  29 #include "gc/shenandoah/c2/shenandoahSupport.hpp"
  30 #include "opto/graphKit.hpp"
  31 #include "opto/idealKit.hpp"
  32 #include "opto/macro.hpp"
  33 #include "opto/narrowptrnode.hpp"
  34 
  35 void ShenandoahBarrierSetC2::resolve_address(C2Access& access) const {
  36   const TypePtr* adr_type = access.addr().type();
  37 
  38   if ((access.decorators() & IN_NATIVE) == 0 && (adr_type->isa_instptr() || adr_type->isa_aryptr())) {
  39     int off = adr_type->is_ptr()->offset();
  40     int base_off = adr_type->isa_instptr() ? instanceOopDesc::base_offset_in_bytes() :
  41       arrayOopDesc::base_offset_in_bytes(adr_type->is_aryptr()->elem()->array_element_basic_type());
  42     assert(off != Type::OffsetTop, "unexpected offset");
  43     if (off == Type::OffsetBot || off >= base_off) {
  44       DecoratorSet decorators = access.decorators();
  45       bool is_write = (decorators & C2_WRITE_ACCESS) != 0;
  46       GraphKit* kit = access.kit();
  47       Node* adr = access.addr().node();
  48       assert(adr->is_AddP(), "unexpected address shape");
  49       Node* base = adr->in(AddPNode::Base);
  50 
  51       if (is_write) {
  52         base = shenandoah_write_barrier(kit, base);
  53       } else {
  54         if (adr_type->isa_instptr()) {
  55           Compile* C = kit->C;
  56           ciField* field = C->alias_type(adr_type)->field();
  57 
  58           // Insert read barrier for Shenandoah.
  59           if (field != NULL &&
  60               ((ShenandoahOptimizeStaticFinals   && field->is_static()  && field->is_final()) ||
  61                (ShenandoahOptimizeInstanceFinals && !field->is_static() && field->is_final()) ||
  62                (ShenandoahOptimizeStableFinals   && field->is_stable()))) {
  63             // Skip the barrier for special fields
  64           } else {
  65             base = shenandoah_read_barrier(kit, base);
  66           }
  67         } else {
  68           base = shenandoah_read_barrier(kit, base);
  69         }
  70       }
  71       if (base != adr->in(AddPNode::Base)) {
  72         Node* address = adr->in(AddPNode::Address);
  73 
  74         if (address->is_AddP()) {
  75           assert(address->in(AddPNode::Base) == adr->in(AddPNode::Base), "unexpected address shape");
  76           assert(!address->in(AddPNode::Address)->is_AddP(), "unexpected address shape");
  77           assert(address->in(AddPNode::Address) == adr->in(AddPNode::Base), "unexpected address shape");
  78           address = address->clone();
  79           address->set_req(AddPNode::Base, base);
  80           address->set_req(AddPNode::Address, base);
  81           address = kit->gvn().transform(address);
  82         } else {
  83           assert(address == adr->in(AddPNode::Base), "unexpected address shape");
  84           address = base;
  85         }
  86         adr = adr->clone();
  87         adr->set_req(AddPNode::Base, base);
  88         adr->set_req(AddPNode::Address, address);
  89         adr = kit->gvn().transform(adr);
  90         access.addr().set_node(adr);
  91       }
  92     }
  93   }
  94 }
  95 
  96 Node* ShenandoahBarrierSetC2::store_at_resolved(C2Access& access, C2AccessValue& val) const {
  97   DecoratorSet decorators = access.decorators();
  98   GraphKit* kit = access.kit();
  99 
 100   const TypePtr* adr_type = access.addr().type();
 101   Node* adr = access.addr().node();
 102 
 103   bool anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
 104   bool on_heap = (decorators & IN_HEAP) != 0;
 105 
 106   if (!access.is_oop() || (!on_heap && !anonymous)) {
 107     return BarrierSetC2::store_at_resolved(access, val);
 108   }
 109 
 110   uint adr_idx = kit->C->get_alias_index(adr_type);
 111   assert(adr_idx != Compile::AliasIdxTop, "use other store_to_memory factory" );
 112   Node* value = val.node();
 113   value = shenandoah_storeval_barrier(kit, value);
 114   val.set_node(value);
 115   shenandoah_write_barrier_pre(kit, true /* do_load */, /*kit->control(),*/ access.base(), adr, adr_idx, val.node(),
 116               static_cast<const TypeOopPtr*>(val.type()), NULL /* pre_val */, access.type());
 117   return BarrierSetC2::store_at_resolved(access, val);
 118 }
 119 
 120 
 121 Node* ShenandoahBarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) const {
 122   DecoratorSet decorators = access.decorators();
 123   GraphKit* kit = access.kit();
 124 
 125   Node* adr = access.addr().node();
 126   Node* obj = access.base();
 127 
 128   bool mismatched = (decorators & C2_MISMATCHED) != 0;
 129   bool unknown = (decorators & ON_UNKNOWN_OOP_REF) != 0;
 130   bool on_heap = (decorators & IN_HEAP) != 0;
 131   bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
 132   bool is_unordered = (decorators & MO_UNORDERED) != 0;
 133   bool need_cpu_mem_bar = !is_unordered || mismatched || !on_heap;
 134 
 135   Node* offset = adr->is_AddP() ? adr->in(AddPNode::Offset) : kit->top();
 136   Node* load = BarrierSetC2::load_at_resolved(access, val_type);
 137 
 138   // If we are reading the value of the referent field of a Reference
 139   // object (either by using Unsafe directly or through reflection)
 140   // then, if SATB is enabled, we need to record the referent in an
 141   // SATB log buffer using the pre-barrier mechanism.
 142   // Also we need to add memory barrier to prevent commoning reads
 143   // from this field across safepoint since GC can change its value.
 144   bool need_read_barrier = ShenandoahKeepAliveBarrier &&
 145     (on_heap && (on_weak || (unknown && offset != kit->top() && obj != kit->top())));
 146 
 147   if (!access.is_oop() || !need_read_barrier) {
 148     return load;
 149   }
 150 
 151   if (on_weak) {
 152     // Use the pre-barrier to record the value in the referent field
 153     satb_write_barrier_pre(kit, false /* do_load */,
 154                            NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
 155                            load /* pre_val */, T_OBJECT);
 156     // Add memory barrier to prevent commoning reads from this field
 157     // across safepoint since GC can change its value.
 158     kit->insert_mem_bar(Op_MemBarCPUOrder);
 159   } else if (unknown) {
 160     // We do not require a mem bar inside pre_barrier if need_mem_bar
 161     // is set: the barriers would be emitted by us.
 162     insert_pre_barrier(kit, obj, offset, load, !need_cpu_mem_bar);
 163   }
 164 
 165   return load;
 166 }
 167 
 168 Node* ShenandoahBarrierSetC2::atomic_cmpxchg_val_at_resolved(C2AtomicAccess& access, Node* expected_val,
 169                                                    Node* new_val, const Type* value_type) const {
 170   GraphKit* kit = access.kit();
 171   if (access.is_oop()) {
 172     new_val = shenandoah_storeval_barrier(kit, new_val);
 173     shenandoah_write_barrier_pre(kit, false /* do_load */,
 174                                  NULL, NULL, max_juint, NULL, NULL,
 175                                  expected_val /* pre_val */, T_OBJECT);
 176 
 177     MemNode::MemOrd mo = access.mem_node_mo();
 178     Node* mem = access.memory();
 179     Node* adr = access.addr().node();
 180     const TypePtr* adr_type = access.addr().type();
 181     Node* load_store = NULL;
 182 
 183 #ifdef _LP64
 184     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
 185       Node *newval_enc = kit->gvn().transform(new EncodePNode(new_val, new_val->bottom_type()->make_narrowoop()));
 186       Node *oldval_enc = kit->gvn().transform(new EncodePNode(expected_val, expected_val->bottom_type()->make_narrowoop()));
 187       load_store = kit->gvn().transform(new ShenandoahCompareAndExchangeNNode(kit->control(), mem, adr, newval_enc, oldval_enc, adr_type, value_type->make_narrowoop(), mo));
 188     } else
 189 #endif
 190     {
 191       load_store = kit->gvn().transform(new ShenandoahCompareAndExchangePNode(kit->control(), mem, adr, new_val, expected_val, adr_type, value_type->is_oopptr(), mo));
 192     }
 193 
 194     access.set_raw_access(load_store);
 195     pin_atomic_op(access);
 196 
 197 #ifdef _LP64
 198     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
 199       return kit->gvn().transform(new DecodeNNode(load_store, load_store->get_ptr_type()));
 200     }
 201 #endif
 202     return load_store;
 203   }
 204   return BarrierSetC2::atomic_cmpxchg_val_at_resolved(access, expected_val, new_val, value_type);
 205 }
 206 
 207 Node* ShenandoahBarrierSetC2::atomic_cmpxchg_bool_at_resolved(C2AtomicAccess& access, Node* expected_val,
 208                                                               Node* new_val, const Type* value_type) const {
 209   GraphKit* kit = access.kit();
 210   if (access.is_oop()) {
 211     new_val = shenandoah_storeval_barrier(kit, new_val);
 212     shenandoah_write_barrier_pre(kit, false /* do_load */,
 213                                  NULL, NULL, max_juint, NULL, NULL,
 214                                  expected_val /* pre_val */, T_OBJECT);
 215     DecoratorSet decorators = access.decorators();
 216     MemNode::MemOrd mo = access.mem_node_mo();
 217     Node* mem = access.memory();
 218     bool is_weak_cas = (decorators & C2_WEAK_CMPXCHG) != 0;
 219     Node* load_store = NULL;
 220     Node* adr = access.addr().node();
 221 #ifdef _LP64
 222     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
 223       Node *newval_enc = kit->gvn().transform(new EncodePNode(new_val, new_val->bottom_type()->make_narrowoop()));
 224       Node *oldval_enc = kit->gvn().transform(new EncodePNode(expected_val, expected_val->bottom_type()->make_narrowoop()));
 225       if (is_weak_cas) {
 226         load_store = kit->gvn().transform(new ShenandoahWeakCompareAndSwapNNode(kit->control(), mem, adr, newval_enc, oldval_enc, mo));
 227       } else {
 228         load_store = kit->gvn().transform(new ShenandoahCompareAndSwapNNode(kit->control(), mem, adr, newval_enc, oldval_enc, mo));
 229       }
 230     } else
 231 #endif
 232     {
 233       if (is_weak_cas) {
 234         load_store = kit->gvn().transform(new ShenandoahWeakCompareAndSwapPNode(kit->control(), mem, adr, new_val, expected_val, mo));
 235       } else {
 236         load_store = kit->gvn().transform(new ShenandoahCompareAndSwapPNode(kit->control(), mem, adr, new_val, expected_val, mo));
 237       }
 238     }
 239     access.set_raw_access(load_store);
 240     pin_atomic_op(access);
 241     return load_store;
 242   }
 243   return BarrierSetC2::atomic_cmpxchg_bool_at_resolved(access, expected_val, new_val, value_type);
 244 }
 245 
 246 Node* ShenandoahBarrierSetC2::atomic_xchg_at_resolved(C2AtomicAccess& access, Node* val, const Type* value_type) const {
 247   GraphKit* kit = access.kit();
 248   if (access.is_oop()) {
 249     val = shenandoah_storeval_barrier(kit, val);
 250   }
 251   Node* result = BarrierSetC2::atomic_xchg_at_resolved(access, val, value_type);
 252   if (access.is_oop()) {
 253     shenandoah_write_barrier_pre(kit, false /* do_load */,
 254                                  NULL, NULL, max_juint, NULL, NULL,
 255                                  result /* pre_val */, T_OBJECT);
 256   }
 257   return result;
 258 }
 259 
 260 Node* ShenandoahBarrierSetC2::resolve(GraphKit* kit, Node* n, DecoratorSet decorators) const {
 261   bool is_write = decorators & ACCESS_WRITE;
 262   if (is_write) {
 263     return shenandoah_write_barrier(kit, n);
 264   } else {
 265   return shenandoah_read_barrier(kit, n);
 266   }
 267 }
 268 
 269 void ShenandoahBarrierSetC2::resolve_for_obj_equals(GraphKit* kit, Node*& a, Node*& b) const {
 270   if (ShenandoahAcmpBarrier) {
 271     const Type* a_type = a->bottom_type();
 272     const Type* b_type = b->bottom_type();
 273     if (!a_type->make_ptr()->higher_equal(TypePtr::NULL_PTR) &&
 274         !b_type->make_ptr()->higher_equal(TypePtr::NULL_PTR)) {
 275       a = shenandoah_write_barrier(kit, a);
 276       b = shenandoah_write_barrier(kit, b);
 277     }
 278   }
 279 }