--- old/src/share/vm/opto/library_call.cpp 2016-10-24 05:16:05.091086309 -0700 +++ new/src/share/vm/opto/library_call.cpp 2016-10-24 05:16:04.930085338 -0700 @@ -230,7 +230,7 @@ // Generates the guards that check whether the result of // Unsafe.getObject should be recorded in an SATB log buffer. void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar); - bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); + bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool is_unaligned); bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); @@ -795,63 +795,63 @@ case vmIntrinsics::_indexOf: return inline_string_indexOf(); case vmIntrinsics::_equals: return inline_string_equals(); - case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile); - case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile); - case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile); - case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile); - case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile); - case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile); - case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile); - case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile); - case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile); - - case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile); - case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile); - case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile); - case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile); - case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile); - case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile); - case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile); - case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile); - case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile); - - case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile); - case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile); - case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile); - case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile); - case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile); - case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile); - case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile); - case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile); - - case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile); - case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, !is_volatile); - case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, !is_volatile); - case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, !is_volatile); - case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, !is_volatile); - case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, !is_volatile); - case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, !is_volatile); - case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, !is_volatile); - - case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, is_volatile); - case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, is_volatile); - case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, is_volatile); - case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, is_volatile); - case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, is_volatile); - case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, is_volatile); - case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, is_volatile); - case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, is_volatile); - case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, is_volatile); - - case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, is_volatile); - case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, is_volatile); - case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, is_volatile); - case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, is_volatile); - case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, is_volatile); - case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile); - case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile); - case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile); - case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile); + case vmIntrinsics::_getObject: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, !is_volatile, false); + case vmIntrinsics::_getBoolean: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, !is_volatile, false); + case vmIntrinsics::_getByte: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, !is_volatile, false); + case vmIntrinsics::_getShort: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile, false); + case vmIntrinsics::_getChar: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile, false); + case vmIntrinsics::_getInt: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile, false); + case vmIntrinsics::_getLong: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile, false); + case vmIntrinsics::_getFloat: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, !is_volatile, false); + case vmIntrinsics::_getDouble: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, !is_volatile, false); + + case vmIntrinsics::_putObject: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, !is_volatile, false); + case vmIntrinsics::_putBoolean: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, !is_volatile, false); + case vmIntrinsics::_putByte: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, !is_volatile, false); + case vmIntrinsics::_putShort: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile, false); + case vmIntrinsics::_putChar: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile, false); + case vmIntrinsics::_putInt: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile, false); + case vmIntrinsics::_putLong: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile, false); + case vmIntrinsics::_putFloat: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, !is_volatile, false); + case vmIntrinsics::_putDouble: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, !is_volatile, false); + + case vmIntrinsics::_getByte_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_BYTE, !is_volatile, false); + case vmIntrinsics::_getShort_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_SHORT, !is_volatile, false); + case vmIntrinsics::_getChar_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_CHAR, !is_volatile, false); + case vmIntrinsics::_getInt_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_INT, !is_volatile, false); + case vmIntrinsics::_getLong_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_LONG, !is_volatile, false); + case vmIntrinsics::_getFloat_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_FLOAT, !is_volatile, false); + case vmIntrinsics::_getDouble_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_DOUBLE, !is_volatile, false); + case vmIntrinsics::_getAddress_raw: return inline_unsafe_access( is_native_ptr, !is_store, T_ADDRESS, !is_volatile, false); + + case vmIntrinsics::_putByte_raw: return inline_unsafe_access( is_native_ptr, is_store, T_BYTE, !is_volatile, false); + case vmIntrinsics::_putShort_raw: return inline_unsafe_access( is_native_ptr, is_store, T_SHORT, !is_volatile, false); + case vmIntrinsics::_putChar_raw: return inline_unsafe_access( is_native_ptr, is_store, T_CHAR, !is_volatile, false); + case vmIntrinsics::_putInt_raw: return inline_unsafe_access( is_native_ptr, is_store, T_INT, !is_volatile, false); + case vmIntrinsics::_putLong_raw: return inline_unsafe_access( is_native_ptr, is_store, T_LONG, !is_volatile, false); + case vmIntrinsics::_putFloat_raw: return inline_unsafe_access( is_native_ptr, is_store, T_FLOAT, !is_volatile, false); + case vmIntrinsics::_putDouble_raw: return inline_unsafe_access( is_native_ptr, is_store, T_DOUBLE, !is_volatile, false); + case vmIntrinsics::_putAddress_raw: return inline_unsafe_access( is_native_ptr, is_store, T_ADDRESS, !is_volatile, false); + + case vmIntrinsics::_getObjectVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_OBJECT, is_volatile, false); + case vmIntrinsics::_getBooleanVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BOOLEAN, is_volatile, false); + case vmIntrinsics::_getByteVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_BYTE, is_volatile, false); + case vmIntrinsics::_getShortVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, is_volatile, false); + case vmIntrinsics::_getCharVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, is_volatile, false); + case vmIntrinsics::_getIntVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, is_volatile, false); + case vmIntrinsics::_getLongVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, is_volatile, false); + case vmIntrinsics::_getFloatVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_FLOAT, is_volatile, false); + case vmIntrinsics::_getDoubleVolatile: return inline_unsafe_access(!is_native_ptr, !is_store, T_DOUBLE, is_volatile, false); + + case vmIntrinsics::_putObjectVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_OBJECT, is_volatile, false); + case vmIntrinsics::_putBooleanVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BOOLEAN, is_volatile, false); + case vmIntrinsics::_putByteVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_BYTE, is_volatile, false); + case vmIntrinsics::_putShortVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, is_volatile, false); + case vmIntrinsics::_putCharVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, is_volatile, false); + case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile, false); + case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile, false); + case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile, false); + case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile, false); case vmIntrinsics::_prefetchRead: return inline_unsafe_prefetch(!is_native_ptr, !is_store, !is_static); case vmIntrinsics::_prefetchWrite: return inline_unsafe_prefetch(!is_native_ptr, is_store, !is_static); @@ -2554,9 +2554,9 @@ return NULL; } -bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { +bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) { if (callee()->is_static()) return false; // caller must have the capability! - + assert(type != T_OBJECT || !unaligned, "unaligned access not supported with object type"); #ifndef PRODUCT { ResourceMark rm; @@ -2629,16 +2629,45 @@ val = is_store ? argument(3) : NULL; } - const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); + // Can base be NULL? Otherwise, always on-heap access. + bool can_access_non_heap = TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop)); - // First guess at the value type. - const Type *value_type = Type::get_const_basic_type(type); + const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); - // Try to categorize the address. If it comes up as TypeJavaPtr::BOTTOM, - // there was not enough information to nail it down. + // Try to categorize the address. Compile::AliasType* alias_type = C->alias_type(adr_type); assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here"); + if (alias_type->adr_type() == TypeInstPtr::KLASS || + alias_type->adr_type() == TypeAryPtr::RANGE) { + return false; // not supported + } + + bool mismatched = false; + BasicType bt = alias_type->basic_type(); + if (bt != T_ILLEGAL) { + assert(alias_type->adr_type()->is_oopptr(), "should be on-heap access"); + if (bt == T_BYTE && adr_type->isa_aryptr()) { + // Alias type doesn't differentiate between byte[] and boolean[]). + // Use address type to get the element type. + bt = adr_type->is_aryptr()->elem()->array_element_basic_type(); + } + if (bt == T_ARRAY || bt == T_NARROWOOP) { + // accessing an array field with getObject is not a mismatch + bt = T_OBJECT; + } + if ((bt == T_OBJECT) != (type == T_OBJECT)) { + // Don't intrinsify mismatched object accesses + return false; + } + mismatched = (bt != type); + } + + assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched"); + + // First guess at the value type. + const Type *value_type = Type::get_const_basic_type(type); + // We will need memory barriers unless we can determine a unique // alias category for this reference. (Note: If for some reason // the barriers get omitted and the unsafe reference begins to "pollute" @@ -2701,7 +2730,7 @@ MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered; // To be valid, unsafe loads may depend on other conditions than // the one that guards them: pin the Load node - Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile); + Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile, unaligned, mismatched); // load value switch (type) { case T_BOOLEAN: @@ -2747,12 +2776,12 @@ MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered; if (type != T_OBJECT ) { - (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile); + (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile, unaligned, mismatched); } else { // Possibly an oop being stored to Java heap or native memory - if (!TypePtr::NULL_PTR->higher_equal(_gvn.type(heap_base_oop))) { + if (!can_access_non_heap) { // oop to Java heap. - (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo); + (void) store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched); } else { // We can't tell at compile time if we are storing in the Java heap or outside // of it. So we need to emit code to conditionally do the proper type of @@ -2764,11 +2793,11 @@ __ if_then(heap_base_oop, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); { // Sync IdealKit and graphKit. sync_kit(ideal); - Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo); + Node* st = store_oop_to_unknown(control(), heap_base_oop, adr, adr_type, val, type, mo, mismatched); // Update IdealKit memory. __ sync_kit(this); } __ else_(); { - __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile); + __ store(__ ctrl(), adr, val, type, alias_type->index(), mo, is_volatile, mismatched); } __ end_if(); // Final sync IdealKit and GraphKit. final_sync(ideal); @@ -2939,12 +2968,6 @@ newval = argument(4); // type: oop, int, or long } - // Null check receiver. - receiver = null_check(receiver); - if (stopped()) { - return true; - } - // Build field offset expression. // We currently rely on the cookies produced by Unsafe.xxxFieldOffset // to be plain byte offsets, which are also the same as those accepted @@ -2955,11 +2978,18 @@ Node* adr = make_unsafe_address(base, offset); const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); + Compile::AliasType* alias_type = C->alias_type(adr_type); + BasicType bt = alias_type->basic_type(); + if (bt != T_ILLEGAL && + ((bt == T_OBJECT || bt == T_ARRAY) != (type == T_OBJECT))) { + // Don't intrinsify mismatched object accesses. + return false; + } + // For CAS, unlike inline_unsafe_access, there seems no point in // trying to refine types. Just use the coarse types here. - const Type *value_type = Type::get_const_basic_type(type); - Compile::AliasType* alias_type = C->alias_type(adr_type); assert(alias_type->index() != Compile::AliasIdxBot, "no bare pointers here"); + const Type *value_type = Type::get_const_basic_type(type); if (kind == LS_xchg && type == T_OBJECT) { const TypeOopPtr* tjp = sharpen_unsafe_type(alias_type, adr_type); @@ -2968,6 +2998,12 @@ } } + // Null check receiver. + receiver = null_check(receiver); + if (stopped()) { + return true; + } + int alias_idx = C->get_alias_index(adr_type); // Memory-model-wise, a LoadStore acts like a little synchronized --- old/src/share/vm/opto/type.hpp 2016-10-24 05:16:05.878091050 -0700 +++ new/src/share/vm/opto/type.hpp 2016-10-24 05:16:05.757090321 -0700 @@ -209,11 +209,11 @@ static int cmp( const Type *const t1, const Type *const t2 ); // Test for higher or equal in lattice // Variant that drops the speculative part of the types - int higher_equal(const Type *t) const { + bool higher_equal(const Type *t) const { return !cmp(meet(t),t->remove_speculative()); } // Variant that keeps the speculative part of the types - int higher_equal_speculative(const Type *t) const { + bool higher_equal_speculative(const Type *t) const { return !cmp(meet_speculative(t),t); } --- /dev/null 2016-04-23 10:20:15.433000000 -0700 +++ new/test/compiler/unsafe/OpaqueAccesses.java 2016-10-24 05:16:06.157092731 -0700 @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @bug 8155781 + * @modules java.base/jdk.internal.misc + * + * @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions + * -XX:-TieredCompilation -Xbatch + * -XX:+UseCompressedOops -XX:+UseCompressedClassPointers + * -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test* + * compiler.unsafe.OpaqueAccesses + * @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions + * -XX:-TieredCompilation -Xbatch + * -XX:+UseCompressedOops -XX:-UseCompressedClassPointers + * -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test* + * compiler.unsafe.OpaqueAccesses + * @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions + * -XX:-TieredCompilation -Xbatch + * -XX:-UseCompressedOops -XX:+UseCompressedClassPointers + * -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test* + * compiler.unsafe.OpaqueAccesses + * @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions + * -XX:-TieredCompilation -Xbatch + * -XX:-UseCompressedOops -XX:-UseCompressedClassPointers + * -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test* + * compiler.unsafe.OpaqueAccesses + */ +package compiler.unsafe; + +import jdk.internal.misc.Unsafe; + +import java.lang.reflect.Field; + +public class OpaqueAccesses { + private static final Unsafe UNSAFE = Unsafe.getUnsafe(); + + private static final Object INSTANCE = new OpaqueAccesses(); + + private static final Object[] ARRAY = new Object[10]; + + private static final long F_OFFSET; + private static final long E_OFFSET; + + static { + try { + Field field = OpaqueAccesses.class.getDeclaredField("f"); + F_OFFSET = UNSAFE.objectFieldOffset(field); + + E_OFFSET = UNSAFE.arrayBaseOffset(ARRAY.getClass()); + } catch (NoSuchFieldException e) { + throw new Error(e); + } + } + + private Object f = new Object(); + private long l1, l2; + + static Object testFixedOffsetField(Object o) { + return UNSAFE.getObject(o, F_OFFSET); + } + + static int testFixedOffsetHeader0(Object o) { + return UNSAFE.getInt(o, 0); + } + + static int testFixedOffsetHeader4(Object o) { + return UNSAFE.getInt(o, 4); + } + + static int testFixedOffsetHeader8(Object o) { + return UNSAFE.getInt(o, 8); + } + + static int testFixedOffsetHeader12(Object o) { + return UNSAFE.getInt(o, 12); + } + + static int testFixedOffsetHeader16(Object o) { + return UNSAFE.getInt(o, 16); + } + + static int testFixedOffsetHeader17(Object o) { + return UNSAFE.getIntUnaligned(o, 17); + } + + static Object testFixedBase(long off) { + return UNSAFE.getObject(INSTANCE, off); + } + + static Object testOpaque(Object o, long off) { + return UNSAFE.getObject(o, off); + } + + static int testFixedOffsetHeaderArray0(Object[] arr) { + return UNSAFE.getInt(arr, 0); + } + + static int testFixedOffsetHeaderArray4(Object[] arr) { + return UNSAFE.getInt(arr, 4); + } + + static int testFixedOffsetHeaderArray8(Object[] arr) { + return UNSAFE.getInt(arr, 8); + } + + static int testFixedOffsetHeaderArray12(Object[] arr) { + return UNSAFE.getInt(arr, 12); + } + + static int testFixedOffsetHeaderArray16(Object[] arr) { + return UNSAFE.getInt(arr, 16); + } + + static int testFixedOffsetHeaderArray17(Object[] arr) { + return UNSAFE.getIntUnaligned(arr, 17); + } + + static Object testFixedOffsetArray(Object[] arr) { + return UNSAFE.getObject(arr, E_OFFSET); + } + + static Object testFixedBaseArray(long off) { + return UNSAFE.getObject(ARRAY, off); + } + + static Object testOpaqueArray(Object[] o, long off) { + return UNSAFE.getObject(o, off); + } + + static final long ADDR = UNSAFE.allocateMemory(10); + static boolean flag; + + static int testMixedAccess() { + flag = !flag; + Object o = (flag ? INSTANCE : null); + long off = (flag ? F_OFFSET : ADDR); + return UNSAFE.getInt(o, off); + } + + public static void main(String[] args) { + for (int i = 0; i < 20_000; i++) { + // Instance + testFixedOffsetField(INSTANCE); + testFixedOffsetHeader0(INSTANCE); + testFixedOffsetHeader4(INSTANCE); + testFixedOffsetHeader8(INSTANCE); + testFixedOffsetHeader12(INSTANCE); + testFixedOffsetHeader16(INSTANCE); + testFixedOffsetHeader17(INSTANCE); + testFixedBase(F_OFFSET); + testOpaque(INSTANCE, F_OFFSET); + testMixedAccess(); + + // Array + testFixedOffsetHeaderArray0(ARRAY); + testFixedOffsetHeaderArray4(ARRAY); + testFixedOffsetHeaderArray8(ARRAY); + testFixedOffsetHeaderArray12(ARRAY); + testFixedOffsetHeaderArray16(ARRAY); + testFixedOffsetHeaderArray17(ARRAY); + testFixedOffsetArray(ARRAY); + testFixedBaseArray(E_OFFSET); + testOpaqueArray(ARRAY, E_OFFSET); + } + System.out.println("TEST PASSED"); + } +}