--- old/src/share/vm/c1/c1_LIRGenerator.cpp 2014-02-11 12:51:01.488558055 +0400 +++ new/src/share/vm/c1/c1_LIRGenerator.cpp 2014-02-11 12:51:01.380558053 +0400 @@ -1734,7 +1734,8 @@ (info ? new CodeEmitInfo(info) : NULL)); } - if (is_volatile && !needs_patching) { + bool needs_atomic_access = is_volatile || AlwaysAtomicAccesses; + if (needs_atomic_access && !needs_patching) { volatile_field_store(value.result(), address, info); } else { LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none; @@ -1807,7 +1808,8 @@ address = generate_address(object.result(), x->offset(), field_type); } - if (is_volatile && !needs_patching) { + bool needs_atomic_access = is_volatile || AlwaysAtomicAccesses; + if (needs_atomic_access && !needs_patching) { volatile_field_load(address, reg, info); } else { LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none; --- old/src/share/vm/opto/parse3.cpp 2014-02-11 12:51:01.996558067 +0400 +++ new/src/share/vm/opto/parse3.cpp 2014-02-11 12:51:01.884558065 +0400 @@ -233,7 +233,8 @@ // Build the load. // MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered; - Node* ld = make_load(NULL, adr, type, bt, adr_type, mo, is_vol); + bool needs_atomic_access = is_vol || AlwaysAtomicAccesses; + Node* ld = make_load(NULL, adr, type, bt, adr_type, mo, needs_atomic_access); // Adjust Java stack if (type2size[bt] == 1) @@ -314,7 +315,8 @@ } store = store_oop_to_object(control(), obj, adr, adr_type, val, field_type, bt, mo); } else { - store = store_to_memory(control(), adr, val, bt, adr_type, mo, is_vol); + bool needs_atomic_access = is_vol || AlwaysAtomicAccesses; + store = store_to_memory(control(), adr, val, bt, adr_type, mo, needs_atomic_access); } // If reference is volatile, prevent following volatiles ops from --- old/src/share/vm/runtime/globals.hpp 2014-02-11 12:51:02.464558078 +0400 +++ new/src/share/vm/runtime/globals.hpp 2014-02-11 12:51:02.348558076 +0400 @@ -3859,6 +3859,9 @@ "Allocation less than this value will be allocated " \ "using malloc. Larger allocations will use mmap.") \ \ + experimental(bool, AlwaysAtomicAccesses, false, \ + "Accesses to all variables should always be atomic") \ + \ product(bool, EnableTracing, false, \ "Enable event-based tracing") \ \