< prev index next >

src/hotspot/share/gc/shenandoah/c2/shenandoahBarrierSetC2.cpp

Print this page
rev 58543 : 8241605: Shenandoah: More aggressive reference discovery


 572     // If we are reading the value of the referent field of a Reference
 573     // object (either by using Unsafe directly or through reflection)
 574     // then, if SATB is enabled, we need to record the referent in an
 575     // SATB log buffer using the pre-barrier mechanism.
 576     // Also we need to add memory barrier to prevent commoning reads
 577     // from this field across safepoint since GC can change its value.
 578     if (!on_weak_ref || (unknown && (offset == top || obj == top)) || !keep_alive) {
 579       return load;
 580     }
 581 
 582     assert(access.is_parse_access(), "entry not supported at optimization time");
 583     C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
 584     GraphKit* kit = parse_access.kit();
 585     bool mismatched = (decorators & C2_MISMATCHED) != 0;
 586     bool is_unordered = (decorators & MO_UNORDERED) != 0;
 587     bool in_native = (decorators & IN_NATIVE) != 0;
 588     bool need_cpu_mem_bar = !is_unordered || mismatched || in_native;
 589 
 590     if (on_weak_ref) {
 591       // Use the pre-barrier to record the value in the referent field



 592       satb_write_barrier_pre(kit, false /* do_load */,
 593                              NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,

 594                              load /* pre_val */, T_OBJECT);

 595       // Add memory barrier to prevent commoning reads from this field
 596       // across safepoint since GC can change its value.
 597       kit->insert_mem_bar(Op_MemBarCPUOrder);
 598     } else if (unknown) {
 599       // We do not require a mem bar inside pre_barrier if need_mem_bar
 600       // is set: the barriers would be emitted by us.
 601       insert_pre_barrier(kit, obj, offset, load, !need_cpu_mem_bar);
 602     }
 603   }
 604 
 605   return load;
 606 }
 607 
 608 Node* ShenandoahBarrierSetC2::atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
 609                                                    Node* new_val, const Type* value_type) const {
 610   GraphKit* kit = access.kit();
 611   if (access.is_oop()) {
 612     new_val = shenandoah_storeval_barrier(kit, new_val);
 613     shenandoah_write_barrier_pre(kit, false /* do_load */,
 614                                  NULL, NULL, max_juint, NULL, NULL,




 572     // If we are reading the value of the referent field of a Reference
 573     // object (either by using Unsafe directly or through reflection)
 574     // then, if SATB is enabled, we need to record the referent in an
 575     // SATB log buffer using the pre-barrier mechanism.
 576     // Also we need to add memory barrier to prevent commoning reads
 577     // from this field across safepoint since GC can change its value.
 578     if (!on_weak_ref || (unknown && (offset == top || obj == top)) || !keep_alive) {
 579       return load;
 580     }
 581 
 582     assert(access.is_parse_access(), "entry not supported at optimization time");
 583     C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
 584     GraphKit* kit = parse_access.kit();
 585     bool mismatched = (decorators & C2_MISMATCHED) != 0;
 586     bool is_unordered = (decorators & MO_UNORDERED) != 0;
 587     bool in_native = (decorators & IN_NATIVE) != 0;
 588     bool need_cpu_mem_bar = !is_unordered || mismatched || in_native;
 589 
 590     if (on_weak_ref) {
 591       // Use the pre-barrier to record the value in the referent field
 592       if (ShenandoahAggressiveReferenceDiscovery) {
 593         load = shenandoah_enqueue_barrier(kit, load);
 594       } else {
 595         satb_write_barrier_pre(kit, false /* do_load */,
 596                                NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */,
 597                                NULL /* val_type */,
 598                                load /* pre_val */, T_OBJECT);
 599       }
 600       // Add memory barrier to prevent commoning reads from this field
 601       // across safepoint since GC can change its value.
 602       kit->insert_mem_bar(Op_MemBarCPUOrder);
 603     } else if (unknown) {
 604       // We do not require a mem bar inside pre_barrier if need_mem_bar
 605       // is set: the barriers would be emitted by us.
 606       insert_pre_barrier(kit, obj, offset, load, !need_cpu_mem_bar);
 607     }
 608   }
 609 
 610   return load;
 611 }
 612 
 613 Node* ShenandoahBarrierSetC2::atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
 614                                                    Node* new_val, const Type* value_type) const {
 615   GraphKit* kit = access.kit();
 616   if (access.is_oop()) {
 617     new_val = shenandoah_storeval_barrier(kit, new_val);
 618     shenandoah_write_barrier_pre(kit, false /* do_load */,
 619                                  NULL, NULL, max_juint, NULL, NULL,


< prev index next >