< prev index next >

src/hotspot/share/gc/shared/c1/barrierSetC1.cpp

Print this page
rev 56016 : 8229422: Taskqueue: Outdated selection of weak memory model platforms
Reviewed-by:


 142   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
 143   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
 144   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
 145   LIRGenerator* gen = access.gen();
 146 
 147   if (mask_boolean) {
 148     value = gen->mask_boolean(access.base().opr(), value, access.access_emit_info());
 149   }
 150 
 151   if (is_volatile) {
 152     __ membar_release();
 153   }
 154 
 155   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
 156   if (is_volatile && !needs_patching) {
 157     gen->volatile_field_store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info());
 158   } else {
 159     __ store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info(), patch_code);
 160   }
 161 
 162   if (is_volatile && !support_IRIW_for_not_multiple_copy_atomic_cpu) {
 163     __ membar();
 164   }
 165 }
 166 
 167 void BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) {
 168   LIRGenerator *gen = access.gen();
 169   DecoratorSet decorators = access.decorators();
 170   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
 171   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
 172   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
 173   bool in_native = (decorators & IN_NATIVE) != 0;
 174 
 175   if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_volatile) {
 176     __ membar();
 177   }
 178 
 179   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
 180   if (in_native) {
 181     __ move_wide(access.resolved_addr()->as_address_ptr(), result);
 182   } else if (is_volatile && !needs_patching) {
 183     gen->volatile_field_load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info());
 184   } else {
 185     __ load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info(), patch_code);
 186   }
 187 
 188   if (is_volatile) {
 189     __ membar_acquire();
 190   }
 191 
 192   /* Normalize boolean value returned by unsafe operation, i.e., value  != 0 ? value = true : value false. */
 193   if (mask_boolean) {
 194     LabelObj* equalZeroLabel = new LabelObj();
 195     __ cmp(lir_cond_equal, result, 0);




 142   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
 143   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
 144   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
 145   LIRGenerator* gen = access.gen();
 146 
 147   if (mask_boolean) {
 148     value = gen->mask_boolean(access.base().opr(), value, access.access_emit_info());
 149   }
 150 
 151   if (is_volatile) {
 152     __ membar_release();
 153   }
 154 
 155   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
 156   if (is_volatile && !needs_patching) {
 157     gen->volatile_field_store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info());
 158   } else {
 159     __ store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info(), patch_code);
 160   }
 161 
 162   if (is_volatile && !SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU) {
 163     __ membar();
 164   }
 165 }
 166 
 167 void BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) {
 168   LIRGenerator *gen = access.gen();
 169   DecoratorSet decorators = access.decorators();
 170   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
 171   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
 172   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
 173   bool in_native = (decorators & IN_NATIVE) != 0;
 174 
 175   if (SUPPORT_IRIW_FOR_NOT_MULTI_COPY_ATOMIC_CPU && is_volatile) {
 176     __ membar();
 177   }
 178 
 179   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
 180   if (in_native) {
 181     __ move_wide(access.resolved_addr()->as_address_ptr(), result);
 182   } else if (is_volatile && !needs_patching) {
 183     gen->volatile_field_load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info());
 184   } else {
 185     __ load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info(), patch_code);
 186   }
 187 
 188   if (is_volatile) {
 189     __ membar_acquire();
 190   }
 191 
 192   /* Normalize boolean value returned by unsafe operation, i.e., value  != 0 ? value = true : value false. */
 193   if (mask_boolean) {
 194     LabelObj* equalZeroLabel = new LabelObj();
 195     __ cmp(lir_cond_equal, result, 0);


< prev index next >