src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Cdiff src/share/vm/opto/library_call.cpp

src/share/vm/opto/library_call.cpp

Print this page
rev 7391 : 8077504: Unsafe load can loose control dependency and cause crash
Summary: Node::depends_only_on_test() should return false for Unsafe loads
Reviewed-by: kvn, adinn

*** 2667,2677 **** // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl. if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder); if (!is_store) { MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered; ! Node* p = make_load(control(), adr, value_type, type, adr_type, mo, is_volatile); // load value switch (type) { case T_BOOLEAN: case T_CHAR: case T_BYTE: --- 2667,2679 ---- // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl. if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder); if (!is_store) { MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered; ! // To be valid, unsafe loads may depend on other conditions than ! // the one that guards them: pin the Load node ! Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile); // load value switch (type) { case T_BOOLEAN: case T_CHAR: case T_BYTE:
*** 6036,6046 **** if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_vol) { insert_mem_bar(Op_MemBarVolatile); // StoreLoad barrier } // Build the load. MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered; ! Node* loadedField = make_load(NULL, adr, type, bt, adr_type, mo, is_vol); // If reference is volatile, prevent following memory ops from // floating up past the volatile read. Also prevents commoning // another volatile read. if (is_vol) { // Memory barrier includes bogus read of value to force load BEFORE membar --- 6038,6048 ---- if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_vol) { insert_mem_bar(Op_MemBarVolatile); // StoreLoad barrier } // Build the load. MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered; ! Node* loadedField = make_load(NULL, adr, type, bt, adr_type, mo, LoadNode::DependsOnlyOnTest, is_vol); // If reference is volatile, prevent following memory ops from // floating up past the volatile read. Also prevents commoning // another volatile read. if (is_vol) { // Memory barrier includes bogus read of value to force load BEFORE membar
src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File