--- old/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java 2015-07-14 18:39:24.135577152 +0200 +++ new/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java 2015-07-14 18:39:23.799577143 +0200 @@ -29,11 +29,21 @@ enum IntIntrinsic implements CompilerWhiteBoxTest.TestCase { Add { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("addExact", int.class, int.class); + } + + @Override Object execMathMethod() { return intR = Math.addExact(int1, int2); } }, - Subtract { + Subtract { + @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("subtractExact", int.class, int.class); + } + @Override Object execMathMethod() { return intR = Math.subtractExact(int1, int2); @@ -41,34 +51,66 @@ }, Multiply { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("multiplyExact", int.class, int.class); + } + + @Override Object execMathMethod() { return intR = Math.multiplyExact(int1, int2); } }, Increment { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("incrementExact", int.class); + } + + @Override Object execMathMethod() { return intR = Math.incrementExact(int1); } }, Decrement { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("decrementExact", int.class); + } + + @Override Object execMathMethod() { return intR = Math.decrementExact(int1); } }, Negate { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("negateExact", int.class); + } + + @Override Object execMathMethod() { return intR = Math.negateExact(int1); } }; + protected int int1; protected int int2; protected int intR; + abstract Executable testMethod() throws NoSuchMethodException, ClassNotFoundException; abstract Object execMathMethod(); + public Executable getTestMethod() { + try { + return testMethod(); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Test bug, no such method: " + e); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Test bug, no such class: " + e); + } + } + @Override public Executable getExecutable() { try { @@ -93,36 +135,66 @@ enum LongIntrinsic implements CompilerWhiteBoxTest.TestCase { Add { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("addExact", long.class, long.class); + } + + @Override Object execMathMethod() { return longR = Math.addExact(long1, long2); } }, Subtract { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("subtractExact", long.class, long.class); + } + + @Override Object execMathMethod() { return longR = Math.subtractExact(long1, long2); } }, Multiply { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("multiplyExact", long.class, long.class); + } + + @Override Object execMathMethod() { return longR = Math.multiplyExact(long1, long2); } }, Increment { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("incrementExact", long.class); + } + + @Override Object execMathMethod() { return longR = Math.incrementExact(long1); } }, Decrement { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("decrementExact", long.class); + } + + @Override Object execMathMethod() { return longR = Math.decrementExact(long1); } }, Negate { @Override + Executable testMethod() throws NoSuchMethodException, ClassNotFoundException { + return Class.forName("java.lang.Math").getDeclaredMethod("negateExact", long.class); + } + + @Override Object execMathMethod() { return longR = Math.negateExact(long1); } @@ -131,8 +203,19 @@ protected long long2; protected long longR; + abstract Executable testMethod() throws NoSuchMethodException, ClassNotFoundException; abstract Object execMathMethod(); + public Executable getTestMethod() { + try { + return testMethod(); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Test bug, no such method: " + e); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Test bug, no such class: " + e); + } + } + @Override public Executable getExecutable() { try { --- old/src/share/vm/c1/c1_GraphBuilder.cpp 2015-07-14 18:39:24.155577153 +0200 +++ new/src/share/vm/c1/c1_GraphBuilder.cpp 2015-07-14 18:39:23.807577143 +0200 @@ -3372,231 +3372,321 @@ return NULL; } +bool GraphBuilder::is_intrinsic_available_for(Method* method) { + vmIntrinsics::ID id = method->intrinsic_id(); + if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::ID_LIMIT) { + return false; + } -bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) { - if (callee->is_synchronized()) { - // We don't currently support any synchronized intrinsics + if (method->is_synchronized()) { + // C1 does not support intrinsification of synchronized methods. return false; } - // callee seems like a good candidate - // determine id - vmIntrinsics::ID id = callee->intrinsic_id(); + // -XX:-InlineNatives disables all intrinsics except + // java.lang.ref.Reference::get. if (!InlineNatives && id != vmIntrinsics::_Reference_get) { - // InlineNatives does not control Reference.get - INLINE_BAILOUT("intrinsic method inlining disabled"); + return false; } - bool preserves_state = false; - bool cantrap = true; - switch (id) { - case vmIntrinsics::_arraycopy: - if (!InlineArrayCopy) return false; - break; + switch (id) { + case vmIntrinsics::_arraycopy: + if (!InlineArrayCopy) return false; + break; #ifdef TRACE_HAVE_INTRINSICS - case vmIntrinsics::_classID: - case vmIntrinsics::_threadID: - preserves_state = true; - cantrap = true; - break; - - case vmIntrinsics::_counterTime: - preserves_state = true; - cantrap = false; - break; + case vmIntrinsics::_classID: + case vmIntrinsics::_threadID: + case vmIntrinsics::_counterTime: #endif - - case vmIntrinsics::_currentTimeMillis: - case vmIntrinsics::_nanoTime: - preserves_state = true; - cantrap = false; - break; - - case vmIntrinsics::_floatToRawIntBits : - case vmIntrinsics::_intBitsToFloat : - case vmIntrinsics::_doubleToRawLongBits : - case vmIntrinsics::_longBitsToDouble : - if (!InlineMathNatives) return false; - preserves_state = true; - cantrap = false; - break; - - case vmIntrinsics::_getClass : - case vmIntrinsics::_isInstance : - if (!InlineClassNatives) return false; - preserves_state = true; - break; - - case vmIntrinsics::_currentThread : - if (!InlineThreadNatives) return false; - preserves_state = true; - cantrap = false; - break; - - case vmIntrinsics::_dabs : // fall through - case vmIntrinsics::_dsqrt : // fall through - case vmIntrinsics::_dsin : // fall through - case vmIntrinsics::_dcos : // fall through - case vmIntrinsics::_dtan : // fall through - case vmIntrinsics::_dlog : // fall through - case vmIntrinsics::_dlog10 : // fall through - case vmIntrinsics::_dexp : // fall through - case vmIntrinsics::_dpow : // fall through - if (!InlineMathNatives) return false; - cantrap = false; - preserves_state = true; - break; - - // Use special nodes for Unsafe instructions so we can more easily - // perform an address-mode optimization on the raw variants - case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false); - case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false); - case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false); - case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false); - case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false); - case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false); - case vmIntrinsics::_getLong : return append_unsafe_get_obj(callee, T_LONG, false); - case vmIntrinsics::_getFloat : return append_unsafe_get_obj(callee, T_FLOAT, false); - case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE, false); - - case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT, false); - case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false); - case vmIntrinsics::_putByte : return append_unsafe_put_obj(callee, T_BYTE, false); - case vmIntrinsics::_putShort : return append_unsafe_put_obj(callee, T_SHORT, false); - case vmIntrinsics::_putChar : return append_unsafe_put_obj(callee, T_CHAR, false); - case vmIntrinsics::_putInt : return append_unsafe_put_obj(callee, T_INT, false); - case vmIntrinsics::_putLong : return append_unsafe_put_obj(callee, T_LONG, false); - case vmIntrinsics::_putFloat : return append_unsafe_put_obj(callee, T_FLOAT, false); - case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE, false); - - case vmIntrinsics::_getShortUnaligned : - return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_SHORT, false) : false; - case vmIntrinsics::_getCharUnaligned : - return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_CHAR, false) : false; - case vmIntrinsics::_getIntUnaligned : - return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_INT, false) : false; - case vmIntrinsics::_getLongUnaligned : - return UseUnalignedAccesses ? append_unsafe_get_obj(callee, T_LONG, false) : false; - - case vmIntrinsics::_putShortUnaligned : - return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_SHORT, false) : false; - case vmIntrinsics::_putCharUnaligned : - return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_CHAR, false) : false; - case vmIntrinsics::_putIntUnaligned : - return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_INT, false) : false; - case vmIntrinsics::_putLongUnaligned : - return UseUnalignedAccesses ? append_unsafe_put_obj(callee, T_LONG, false) : false; - - case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT, true); - case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true); - case vmIntrinsics::_getByteVolatile : return append_unsafe_get_obj(callee, T_BYTE, true); - case vmIntrinsics::_getShortVolatile : return append_unsafe_get_obj(callee, T_SHORT, true); - case vmIntrinsics::_getCharVolatile : return append_unsafe_get_obj(callee, T_CHAR, true); - case vmIntrinsics::_getIntVolatile : return append_unsafe_get_obj(callee, T_INT, true); - case vmIntrinsics::_getLongVolatile : return append_unsafe_get_obj(callee, T_LONG, true); - case vmIntrinsics::_getFloatVolatile : return append_unsafe_get_obj(callee, T_FLOAT, true); - case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE, true); - - case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT, true); - case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true); - case vmIntrinsics::_putByteVolatile : return append_unsafe_put_obj(callee, T_BYTE, true); - case vmIntrinsics::_putShortVolatile : return append_unsafe_put_obj(callee, T_SHORT, true); - case vmIntrinsics::_putCharVolatile : return append_unsafe_put_obj(callee, T_CHAR, true); - case vmIntrinsics::_putIntVolatile : return append_unsafe_put_obj(callee, T_INT, true); - case vmIntrinsics::_putLongVolatile : return append_unsafe_put_obj(callee, T_LONG, true); - case vmIntrinsics::_putFloatVolatile : return append_unsafe_put_obj(callee, T_FLOAT, true); - case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE, true); - - case vmIntrinsics::_getByte_raw : return append_unsafe_get_raw(callee, T_BYTE); - case vmIntrinsics::_getShort_raw : return append_unsafe_get_raw(callee, T_SHORT); - case vmIntrinsics::_getChar_raw : return append_unsafe_get_raw(callee, T_CHAR); - case vmIntrinsics::_getInt_raw : return append_unsafe_get_raw(callee, T_INT); - case vmIntrinsics::_getLong_raw : return append_unsafe_get_raw(callee, T_LONG); - case vmIntrinsics::_getFloat_raw : return append_unsafe_get_raw(callee, T_FLOAT); - case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE); - - case vmIntrinsics::_putByte_raw : return append_unsafe_put_raw(callee, T_BYTE); - case vmIntrinsics::_putShort_raw : return append_unsafe_put_raw(callee, T_SHORT); - case vmIntrinsics::_putChar_raw : return append_unsafe_put_raw(callee, T_CHAR); - case vmIntrinsics::_putInt_raw : return append_unsafe_put_raw(callee, T_INT); - case vmIntrinsics::_putLong_raw : return append_unsafe_put_raw(callee, T_LONG); - case vmIntrinsics::_putFloat_raw : return append_unsafe_put_raw(callee, T_FLOAT); - case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE); - - case vmIntrinsics::_checkIndex : - if (!InlineNIOCheckIndex) return false; - preserves_state = true; - break; - case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT, true); - case vmIntrinsics::_putOrderedInt : return append_unsafe_put_obj(callee, T_INT, true); - case vmIntrinsics::_putOrderedLong : return append_unsafe_put_obj(callee, T_LONG, true); - - case vmIntrinsics::_compareAndSwapLong: - if (!VM_Version::supports_cx8()) return false; - // fall through - case vmIntrinsics::_compareAndSwapInt: - case vmIntrinsics::_compareAndSwapObject: - append_unsafe_CAS(callee); - return true; - - case vmIntrinsics::_getAndAddInt: - if (!VM_Version::supports_atomic_getadd4()) { - return false; - } - return append_unsafe_get_and_set_obj(callee, true); - case vmIntrinsics::_getAndAddLong: - if (!VM_Version::supports_atomic_getadd8()) { - return false; - } - return append_unsafe_get_and_set_obj(callee, true); - case vmIntrinsics::_getAndSetInt: - if (!VM_Version::supports_atomic_getset4()) { - return false; - } - return append_unsafe_get_and_set_obj(callee, false); - case vmIntrinsics::_getAndSetLong: - if (!VM_Version::supports_atomic_getset8()) { - return false; - } - return append_unsafe_get_and_set_obj(callee, false); - case vmIntrinsics::_getAndSetObject: + case vmIntrinsics::_currentTimeMillis: + case vmIntrinsics::_nanoTime: + case vmIntrinsics::_Reference_get: + // Use the intrinsic version of Reference.get() so that the value in + // the referent field can be registered by the G1 pre-barrier code. + // Also to prevent commoning reads from this field across safepoint + // since GC can change its value. + case vmIntrinsics::_loadFence: + case vmIntrinsics::_storeFence: + case vmIntrinsics::_fullFence: + break; + case vmIntrinsics::_floatToRawIntBits: + case vmIntrinsics::_intBitsToFloat: + case vmIntrinsics::_doubleToRawLongBits: + case vmIntrinsics::_longBitsToDouble: + if (!InlineMathNatives) return false; + break; + case vmIntrinsics::_getClass: + case vmIntrinsics::_isInstance: + if (!InlineClassNatives) return false; + break; + case vmIntrinsics::_currentThread: + if (!InlineThreadNatives) return false; + break; + case vmIntrinsics::_dabs: + case vmIntrinsics::_dsqrt: + case vmIntrinsics::_dsin: + case vmIntrinsics::_dcos: + case vmIntrinsics::_dtan: + case vmIntrinsics::_dlog: + case vmIntrinsics::_dlog10: + case vmIntrinsics::_dexp: + case vmIntrinsics::_dpow: + if (!InlineMathNatives) return false; + break; + case vmIntrinsics::_getObject: + case vmIntrinsics::_getBoolean: + case vmIntrinsics::_getByte: + case vmIntrinsics::_getShort: + case vmIntrinsics::_getChar: + case vmIntrinsics::_getInt: + case vmIntrinsics::_getLong: + case vmIntrinsics::_getFloat: + case vmIntrinsics::_getDouble: + case vmIntrinsics::_putObject: + case vmIntrinsics::_putBoolean: + case vmIntrinsics::_putByte: + case vmIntrinsics::_putShort: + case vmIntrinsics::_putChar: + case vmIntrinsics::_putInt: + case vmIntrinsics::_putLong: + case vmIntrinsics::_putFloat: + case vmIntrinsics::_putDouble: + case vmIntrinsics::_getObjectVolatile: + case vmIntrinsics::_getBooleanVolatile: + case vmIntrinsics::_getByteVolatile: + case vmIntrinsics::_getShortVolatile: + case vmIntrinsics::_getCharVolatile: + case vmIntrinsics::_getIntVolatile: + case vmIntrinsics::_getLongVolatile: + case vmIntrinsics::_getFloatVolatile: + case vmIntrinsics::_getDoubleVolatile: + case vmIntrinsics::_putObjectVolatile: + case vmIntrinsics::_putBooleanVolatile: + case vmIntrinsics::_putByteVolatile: + case vmIntrinsics::_putShortVolatile: + case vmIntrinsics::_putCharVolatile: + case vmIntrinsics::_putIntVolatile: + case vmIntrinsics::_putLongVolatile: + case vmIntrinsics::_putFloatVolatile: + case vmIntrinsics::_putDoubleVolatile: + case vmIntrinsics::_getByte_raw: + case vmIntrinsics::_getShort_raw: + case vmIntrinsics::_getChar_raw: + case vmIntrinsics::_getInt_raw: + case vmIntrinsics::_getLong_raw: + case vmIntrinsics::_getFloat_raw: + case vmIntrinsics::_getDouble_raw: + case vmIntrinsics::_putByte_raw: + case vmIntrinsics::_putShort_raw: + case vmIntrinsics::_putChar_raw: + case vmIntrinsics::_putInt_raw: + case vmIntrinsics::_putLong_raw: + case vmIntrinsics::_putFloat_raw: + case vmIntrinsics::_putDouble_raw: + case vmIntrinsics::_putOrderedObject: + case vmIntrinsics::_putOrderedInt: + case vmIntrinsics::_putOrderedLong: + if (!InlineUnsafeOps) return false; + break; + case vmIntrinsics::_getAndAddInt: + if (!InlineUnsafeOps || !VM_Version::supports_atomic_getadd4()) return false; + break; + case vmIntrinsics::_getAndAddLong: + if (!InlineUnsafeOps || !VM_Version::supports_atomic_getadd8()) return false; + break; + case vmIntrinsics::_getAndSetInt: + if (!InlineUnsafeOps || !VM_Version::supports_atomic_getset4()) return false; + break; + case vmIntrinsics::_getAndSetLong: + if (!InlineUnsafeOps || !VM_Version::supports_atomic_getset8()) return false; + break; + case vmIntrinsics::_getAndSetObject: #ifdef _LP64 - if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) { - return false; - } - if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) { - return false; - } + if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) return false; + if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) return false; #else - if (!VM_Version::supports_atomic_getset4()) { - return false; - } + if (!VM_Version::supports_atomic_getset4()) return false; +#endif + if (!InlineUnsafeOps) return false; + break; + case vmIntrinsics::_getShortUnaligned: + case vmIntrinsics::_getCharUnaligned: + case vmIntrinsics::_getIntUnaligned: + case vmIntrinsics::_getLongUnaligned: + case vmIntrinsics::_putShortUnaligned: + case vmIntrinsics::_putCharUnaligned: + case vmIntrinsics::_putIntUnaligned: + case vmIntrinsics::_putLongUnaligned: + if (!UseUnalignedAccesses || !InlineUnsafeOps) return false; + break; + case vmIntrinsics::_checkIndex: + if (!InlineNIOCheckIndex) return false; + break; + case vmIntrinsics::_updateCRC32: + case vmIntrinsics::_updateBytesCRC32: + case vmIntrinsics::_updateByteBufferCRC32: + if (!UseCRC32Intrinsics) return false; + break; + case vmIntrinsics::_compareAndSwapLong: + if (!VM_Version::supports_cx8()) return false; + // fall through + case vmIntrinsics::_compareAndSwapInt: + case vmIntrinsics::_compareAndSwapObject: + break; + default: + return false; // Intrinsics not on the previous list are not available. + } + + return true; +} + +bool GraphBuilder::intrinsic_preserves_state(vmIntrinsics::ID id) { + assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); + switch(id) { +#ifdef TRACE_HAVE_INTRINSICS + case vmIntrinsics::_classID: + case vmIntrinsics::_threadID: + case vmIntrinsics::_counterTime: +#endif + case vmIntrinsics::_currentTimeMillis: + case vmIntrinsics::_nanoTime: + case vmIntrinsics::_floatToRawIntBits: + case vmIntrinsics::_intBitsToFloat: + case vmIntrinsics::_doubleToRawLongBits: + case vmIntrinsics::_longBitsToDouble: + case vmIntrinsics::_getClass: + case vmIntrinsics::_isInstance: + case vmIntrinsics::_currentThread: + case vmIntrinsics::_dabs: + case vmIntrinsics::_dsqrt: + case vmIntrinsics::_dsin: + case vmIntrinsics::_dcos: + case vmIntrinsics::_dtan: + case vmIntrinsics::_dlog: + case vmIntrinsics::_dlog10: + case vmIntrinsics::_dexp: + case vmIntrinsics::_dpow: + case vmIntrinsics::_checkIndex: + case vmIntrinsics::_Reference_get: + case vmIntrinsics::_updateCRC32: + case vmIntrinsics::_updateBytesCRC32: + case vmIntrinsics::_updateByteBufferCRC32: + return true; + default: + return false; + } +} + +bool GraphBuilder::intrinsic_can_trap(vmIntrinsics::ID id) { + assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); + switch(id) { +#ifdef TRACE_HAVE_INTRINSICS + case vmIntrinsics::_counterTime: #endif - return append_unsafe_get_and_set_obj(callee, false); + case vmIntrinsics::_currentTimeMillis: + case vmIntrinsics::_nanoTime: + case vmIntrinsics::_floatToRawIntBits: + case vmIntrinsics::_intBitsToFloat: + case vmIntrinsics::_doubleToRawLongBits: + case vmIntrinsics::_longBitsToDouble: + case vmIntrinsics::_currentThread: + case vmIntrinsics::_dabs: + case vmIntrinsics::_dsqrt: + case vmIntrinsics::_dsin: + case vmIntrinsics::_dcos: + case vmIntrinsics::_dtan: + case vmIntrinsics::_dlog: + case vmIntrinsics::_dlog10: + case vmIntrinsics::_dexp: + case vmIntrinsics::_dpow: + case vmIntrinsics::_updateCRC32: + case vmIntrinsics::_updateBytesCRC32: + case vmIntrinsics::_updateByteBufferCRC32: + return false; + default: + return true; + } +} - case vmIntrinsics::_Reference_get: - // Use the intrinsic version of Reference.get() so that the value in - // the referent field can be registered by the G1 pre-barrier code. - // Also to prevent commoning reads from this field across safepoint - // since GC can change its value. - preserves_state = true; - break; - - case vmIntrinsics::_updateCRC32: - case vmIntrinsics::_updateBytesCRC32: - case vmIntrinsics::_updateByteBufferCRC32: - if (!UseCRC32Intrinsics) return false; - cantrap = false; - preserves_state = true; - break; - - case vmIntrinsics::_loadFence : - case vmIntrinsics::_storeFence: - case vmIntrinsics::_fullFence : - break; +void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) { + vmIntrinsics::ID id = callee->intrinsic_id(); + assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); - default : return false; // do not inline + // Some intrinsics need special IR nodes. + switch(id) { + case vmIntrinsics::_getObject : append_unsafe_get_obj(callee, T_OBJECT, false); return; + case vmIntrinsics::_getBoolean : append_unsafe_get_obj(callee, T_BOOLEAN, false); return; + case vmIntrinsics::_getByte : append_unsafe_get_obj(callee, T_BYTE, false); return; + case vmIntrinsics::_getShort : append_unsafe_get_obj(callee, T_SHORT, false); return; + case vmIntrinsics::_getChar : append_unsafe_get_obj(callee, T_CHAR, false); return; + case vmIntrinsics::_getInt : append_unsafe_get_obj(callee, T_INT, false); return; + case vmIntrinsics::_getLong : append_unsafe_get_obj(callee, T_LONG, false); return; + case vmIntrinsics::_getFloat : append_unsafe_get_obj(callee, T_FLOAT, false); return; + case vmIntrinsics::_getDouble : append_unsafe_get_obj(callee, T_DOUBLE, false); return; + case vmIntrinsics::_putObject : append_unsafe_put_obj(callee, T_OBJECT, false); return; + case vmIntrinsics::_putBoolean : append_unsafe_put_obj(callee, T_BOOLEAN, false); return; + case vmIntrinsics::_putByte : append_unsafe_put_obj(callee, T_BYTE, false); return; + case vmIntrinsics::_putShort : append_unsafe_put_obj(callee, T_SHORT, false); return; + case vmIntrinsics::_putChar : append_unsafe_put_obj(callee, T_CHAR, false); return; + case vmIntrinsics::_putInt : append_unsafe_put_obj(callee, T_INT, false); return; + case vmIntrinsics::_putLong : append_unsafe_put_obj(callee, T_LONG, false); return; + case vmIntrinsics::_putFloat : append_unsafe_put_obj(callee, T_FLOAT, false); return; + case vmIntrinsics::_putDouble : append_unsafe_put_obj(callee, T_DOUBLE, false); return; + case vmIntrinsics::_getShortUnaligned : append_unsafe_get_obj(callee, T_SHORT, false); return; + case vmIntrinsics::_getCharUnaligned : append_unsafe_get_obj(callee, T_CHAR, false); return; + case vmIntrinsics::_getIntUnaligned : append_unsafe_get_obj(callee, T_INT, false); return; + case vmIntrinsics::_getLongUnaligned : append_unsafe_get_obj(callee, T_LONG, false); return; + case vmIntrinsics::_putShortUnaligned : append_unsafe_put_obj(callee, T_SHORT, false); return; + case vmIntrinsics::_putCharUnaligned : append_unsafe_put_obj(callee, T_CHAR, false); return; + case vmIntrinsics::_putIntUnaligned : append_unsafe_put_obj(callee, T_INT, false); return; + case vmIntrinsics::_putLongUnaligned : append_unsafe_put_obj(callee, T_LONG, false); return; + case vmIntrinsics::_getObjectVolatile : append_unsafe_get_obj(callee, T_OBJECT, true); return; + case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return; + case vmIntrinsics::_getByteVolatile : append_unsafe_get_obj(callee, T_BYTE, true); return; + case vmIntrinsics::_getShortVolatile : append_unsafe_get_obj(callee, T_SHORT, true); return; + case vmIntrinsics::_getCharVolatile : append_unsafe_get_obj(callee, T_CHAR, true); return; + case vmIntrinsics::_getIntVolatile : append_unsafe_get_obj(callee, T_INT, true); return; + case vmIntrinsics::_getLongVolatile : append_unsafe_get_obj(callee, T_LONG, true); return; + case vmIntrinsics::_getFloatVolatile : append_unsafe_get_obj(callee, T_FLOAT, true); return; + case vmIntrinsics::_getDoubleVolatile : append_unsafe_get_obj(callee, T_DOUBLE, true); return; + case vmIntrinsics::_putObjectVolatile : append_unsafe_put_obj(callee, T_OBJECT, true); return; + case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return; + case vmIntrinsics::_putByteVolatile : append_unsafe_put_obj(callee, T_BYTE, true); return; + case vmIntrinsics::_putShortVolatile : append_unsafe_put_obj(callee, T_SHORT, true); return; + case vmIntrinsics::_putCharVolatile : append_unsafe_put_obj(callee, T_CHAR, true); return; + case vmIntrinsics::_putIntVolatile : append_unsafe_put_obj(callee, T_INT, true); return; + case vmIntrinsics::_putLongVolatile : append_unsafe_put_obj(callee, T_LONG, true); return; + case vmIntrinsics::_putFloatVolatile : append_unsafe_put_obj(callee, T_FLOAT, true); return; + case vmIntrinsics::_putDoubleVolatile : append_unsafe_put_obj(callee, T_DOUBLE, true); return; + case vmIntrinsics::_getByte_raw : append_unsafe_get_raw(callee, T_BYTE ); return; + case vmIntrinsics::_getShort_raw : append_unsafe_get_raw(callee, T_SHORT ); return; + case vmIntrinsics::_getChar_raw : append_unsafe_get_raw(callee, T_CHAR ); return; + case vmIntrinsics::_getInt_raw : append_unsafe_get_raw(callee, T_INT ); return; + case vmIntrinsics::_getLong_raw : append_unsafe_get_raw(callee, T_LONG ); return; + case vmIntrinsics::_getFloat_raw : append_unsafe_get_raw(callee, T_FLOAT ); return; + case vmIntrinsics::_getDouble_raw : append_unsafe_get_raw(callee, T_DOUBLE); return; + case vmIntrinsics::_putByte_raw : append_unsafe_put_raw(callee, T_BYTE ); return; + case vmIntrinsics::_putShort_raw : append_unsafe_put_raw(callee, T_SHORT ); return; + case vmIntrinsics::_putChar_raw : append_unsafe_put_raw(callee, T_CHAR ); return; + case vmIntrinsics::_putInt_raw : append_unsafe_put_raw(callee, T_INT ); return; + case vmIntrinsics::_putLong_raw : append_unsafe_put_raw(callee, T_LONG ); return; + case vmIntrinsics::_putFloat_raw : append_unsafe_put_raw(callee, T_FLOAT ); return; + case vmIntrinsics::_putDouble_raw : append_unsafe_put_raw(callee, T_DOUBLE); return; + case vmIntrinsics::_putOrderedObject : append_unsafe_put_obj(callee, T_OBJECT, true); return; + case vmIntrinsics::_putOrderedInt : append_unsafe_put_obj(callee, T_INT, true); return; + case vmIntrinsics::_putOrderedLong : append_unsafe_put_obj(callee, T_LONG, true); return; + case vmIntrinsics::_compareAndSwapLong: + case vmIntrinsics::_compareAndSwapInt: + case vmIntrinsics::_compareAndSwapObject: append_unsafe_CAS(callee); return; + case vmIntrinsics::_getAndAddInt: + case vmIntrinsics::_getAndAddLong : append_unsafe_get_and_set_obj(callee, true); return; + case vmIntrinsics::_getAndSetInt : + case vmIntrinsics::_getAndSetLong : + case vmIntrinsics::_getAndSetObject : append_unsafe_get_and_set_obj(callee, false); return; + default: + break; } + // create intrinsic node const bool has_receiver = !callee->is_static(); ValueType* result_type = as_ValueType(callee->return_type()); @@ -3621,8 +3711,10 @@ } } - Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before, - preserves_state, cantrap); + Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), + args, has_receiver, state_before, + intrinsic_preserves_state(id), + intrinsic_can_trap(id)); // append instruction & push result Value value = append_split(result); if (result_type != voidType) push(result_type, value); @@ -3630,8 +3722,20 @@ if (callee != method() && profile_return() && result_type->is_object_kind()) { profile_return_type(result, callee); } +} - // done +bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) { + Method* m = callee->get_Method(); + if (!is_intrinsic_available_for(m)) { + if (!InlineNatives) { + // Return false and also set message that the inlining of + // intrinsics has been disabled in general. + INLINE_BAILOUT("intrinsic method inlining disabled"); + } else { + return false; + } + } + build_graph_for_intrinsic(callee); return true; } @@ -4224,58 +4328,46 @@ _scope_data = scope_data()->parent(); } -bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) { - if (InlineUnsafeOps) { - Values* args = state()->pop_arguments(callee->arg_size()); - null_check(args->at(0)); - Instruction* offset = args->at(2); +void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) { + Values* args = state()->pop_arguments(callee->arg_size()); + null_check(args->at(0)); + Instruction* offset = args->at(2); #ifndef _LP64 - offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); + offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); #endif - Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile)); - push(op->type(), op); - compilation()->set_has_unsafe_access(true); - } - return InlineUnsafeOps; + Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile)); + push(op->type(), op); + compilation()->set_has_unsafe_access(true); } -bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) { - if (InlineUnsafeOps) { - Values* args = state()->pop_arguments(callee->arg_size()); - null_check(args->at(0)); - Instruction* offset = args->at(2); +void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) { + Values* args = state()->pop_arguments(callee->arg_size()); + null_check(args->at(0)); + Instruction* offset = args->at(2); #ifndef _LP64 - offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); + offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); #endif - Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile)); - compilation()->set_has_unsafe_access(true); - kill_all(); - } - return InlineUnsafeOps; + Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile)); + compilation()->set_has_unsafe_access(true); + kill_all(); } -bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) { - if (InlineUnsafeOps) { - Values* args = state()->pop_arguments(callee->arg_size()); - null_check(args->at(0)); - Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false)); - push(op->type(), op); - compilation()->set_has_unsafe_access(true); - } - return InlineUnsafeOps; +void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) { + Values* args = state()->pop_arguments(callee->arg_size()); + null_check(args->at(0)); + Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false)); + push(op->type(), op); + compilation()->set_has_unsafe_access(true); } -bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) { - if (InlineUnsafeOps) { - Values* args = state()->pop_arguments(callee->arg_size()); - null_check(args->at(0)); - Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2))); - compilation()->set_has_unsafe_access(true); - } - return InlineUnsafeOps; +void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) { + Values* args = state()->pop_arguments(callee->arg_size()); + null_check(args->at(0)); + Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2))); + compilation()->set_has_unsafe_access(true); } @@ -4352,21 +4444,18 @@ } } -bool GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) { - if (InlineUnsafeOps) { - Values* args = state()->pop_arguments(callee->arg_size()); - BasicType t = callee->return_type()->basic_type(); - null_check(args->at(0)); - Instruction* offset = args->at(2); +void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) { + Values* args = state()->pop_arguments(callee->arg_size()); + BasicType t = callee->return_type()->basic_type(); + null_check(args->at(0)); + Instruction* offset = args->at(2); #ifndef _LP64 - offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); + offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT))); #endif - Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add)); - compilation()->set_has_unsafe_access(true); - kill_all(); - push(op->type(), op); - } - return InlineUnsafeOps; + Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add)); + compilation()->set_has_unsafe_access(true); + kill_all(); + push(op->type(), op); } #ifndef PRODUCT --- old/src/share/vm/opto/compile.hpp 2015-07-14 18:39:24.107577151 +0200 +++ new/src/share/vm/opto/compile.hpp 2015-07-14 18:39:23.879577145 +0200 @@ -1234,6 +1234,13 @@ CallGenerator* find_intrinsic(ciMethod* m, bool is_virtual); // query fn void register_intrinsic(CallGenerator* cg); // update fn + // Returns true if the intrinsic for method 'method' will perform a virtual dispatch. + static bool intrinsic_does_virtual_dispatch_for(vmIntrinsics::ID id); + + // A return value larger than 0 indicates that the intrinsic for method + // 'method' requires predicated logic. + static int intrinsic_predicates_needed_for(vmIntrinsics::ID id); + #ifndef PRODUCT static juint _intrinsic_hist_count[vmIntrinsics::ID_LIMIT]; static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT]; @@ -1259,6 +1266,23 @@ bool is_virtual, int flags) PRODUCT_RETURN0; static void print_intrinsic_statistics() PRODUCT_RETURN; + // An intrinsic is available if: + // - the intrinsic is enabled (by using the appropriate command-line flag) and + // - the intrinsic is supported by the VM (i.e., the platform on which the VM is + // running provides the instructions necessary for the C2 compiler to generate + // the intrinsic). + // + // The method 'is_intrinsic_available_for' returns true if the current + // platform provides an intrinsic for the method, for the compilation context, and + // for the dispatch mode (virtual or non-virtual) given as parameter. + // + // The parameter 'compilation_context' indicates the context in which 'method' + // is attempted to be intrinsified; 'compilation_context' is either a method + // that is calling 'method' or it is 'method' itself. The parameter + // 'compilation_context' is necessary to implement support for the + // DisableIntrinsic flag. + static bool is_intrinsic_available_for(Method* method, Method* compilation_context, bool is_virtual); + // Graph verification code // Walk the node list, verifying that there is a one-to-one // correspondence between Use-Def edges and Def-Use edges --- old/src/share/vm/opto/c2compiler.cpp 2015-07-14 18:39:24.183577153 +0200 +++ new/src/share/vm/opto/c2compiler.cpp 2015-07-14 18:39:23.827577144 +0200 @@ -79,7 +79,6 @@ return OptoRuntime::generate(thread->env()); } - void C2Compiler::initialize() { // The first compiler thread that gets here will initialize the // small amount of global state (and runtime stubs) that C2 needs. @@ -154,11 +153,21 @@ } } - void C2Compiler::print_timers() { Compile::print_timers(); } +bool C2Compiler::is_intrinsic_available_for(methodHandle method, methodHandle compilation_context) { + // Assume a non-virtual dispatch. A virtual dispatch is + // possible for only a limited set of available intrinsics whereas + // a non-virtual dispatch is possible for all available intrinsics. + return Compile::is_intrinsic_available_for(method(), compilation_context(), false); +} + +bool C2Compiler::is_intrinsic_available_for(methodHandle method) { + return Compile::is_intrinsic_available_for(method(), NULL, false); +} + int C2Compiler::initial_code_buffer_size() { assert(SegmentedCodeCache, "Should be only used with a segmented code cache"); return Compile::MAX_inst_size + Compile::MAX_locs_size + initial_const_capacity; --- old/src/share/vm/opto/library_call.cpp 2015-07-14 18:39:24.223577155 +0200 +++ new/src/share/vm/opto/library_call.cpp 2015-07-14 18:39:23.915577146 +0200 @@ -304,42 +304,76 @@ bool inline_isCompileConstant(); }; - -//---------------------------make_vm_intrinsic---------------------------- -CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { - vmIntrinsics::ID id = m->intrinsic_id(); +bool Compile::intrinsic_does_virtual_dispatch_for(vmIntrinsics::ID id) { assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); + switch(id) { + case vmIntrinsics::_hashCode: + case vmIntrinsics::_clone: + return true; + break; + default: + return false; + } +} +int Compile::intrinsic_predicates_needed_for(vmIntrinsics::ID id) { + assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); + switch (id) { + case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt: + case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt: + return 1; + case vmIntrinsics::_digestBase_implCompressMB: + return 3; + default: + return 0; + } +} - ccstr disable_intr = NULL; - - if ((DisableIntrinsic[0] != '\0' - && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) || - (method_has_option_value("DisableIntrinsic", disable_intr) - && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) { - // disabled by a user request on the command line: - // example: -XX:DisableIntrinsic=_hashCode,_getClass - return NULL; +bool Compile::is_intrinsic_available_for(Method* method, Method* compilation_context, bool is_virtual) { + vmIntrinsics::ID id = method->intrinsic_id(); + if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) { + return false; } - if (!m->is_loaded()) { - // do not attempt to inline unloaded methods - return NULL; + // Check if the intrinsic corresponding to 'method' has been disabled on + // the command line by using the DisableIntrinsic flag (either globally + // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp + // for details). + // Usually, the compilation context is the caller of the method 'method'. + // The only case when for a non-recursive method 'method' the compilation context + // is not the caller of the 'method' (but it is the method itself) is + // java.lang.ref.Referene::get. + // For java.lang.ref.Reference::get, the intrinsic version is used + // instead of the C2-compiled version so that the value in the referent + // field can be registered by the G1 pre-barrier code. The intrinsified + // version of Reference::get also adds a memory barrier to prevent + // commoning reads from the referent field across safepoint since GC + // can change the referent field's value. See Compile::Compile() + // in src/share/vm/opto/compile.cpp for more details. + ccstr disable_intr = NULL; + if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) || + (compilation_context != NULL && + CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) && + strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL) + ) { + return false; } - // Only a few intrinsics implement a virtual dispatch. - // They are expensive calls which are also frequently overridden. + // Only Object.hashCode and Object.clone intrinsics implement also a virtual + // dispatch because calling both methods is expensive but both methods are + // frequently overridden. All other intrinsics implement only a non-virtual + // dispatch. if (is_virtual) { switch (id) { case vmIntrinsics::_hashCode: case vmIntrinsics::_clone: - // OK, Object.hashCode and Object.clone intrinsics come in both flavors break; default: - return NULL; + return false; } } - // -XX:-InlineNatives disables nearly all intrinsics: + // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in + // the following switch statement. if (!InlineNatives) { switch (id) { case vmIntrinsics::_indexOf: @@ -354,237 +388,189 @@ case vmIntrinsics::_loadFence: case vmIntrinsics::_storeFence: case vmIntrinsics::_fullFence: - break; // InlineNatives does not control String.compareTo case vmIntrinsics::_Reference_get: - break; // InlineNatives does not control Reference.get + break; default: - return NULL; + return false; } } - int predicates = 0; - bool does_virtual_dispatch = false; - switch (id) { case vmIntrinsics::_compareTo: - if (!SpecialStringCompareTo) return NULL; - if (!Matcher::match_rule_supported(Op_StrComp)) return NULL; + if (!SpecialStringCompareTo) return false; + if (!Matcher::match_rule_supported(Op_StrComp)) return false; break; case vmIntrinsics::_indexOf: - if (!SpecialStringIndexOf) return NULL; + if (!SpecialStringIndexOf) return false; break; case vmIntrinsics::_equals: - if (!SpecialStringEquals) return NULL; - if (!Matcher::match_rule_supported(Op_StrEquals)) return NULL; + if (!SpecialStringEquals) return false; + if (!Matcher::match_rule_supported(Op_StrEquals)) return false; break; case vmIntrinsics::_equalsC: - if (!SpecialArraysEquals) return NULL; - if (!Matcher::match_rule_supported(Op_AryEq)) return NULL; + if (!SpecialArraysEquals) return false; + if (!Matcher::match_rule_supported(Op_AryEq)) return false; break; case vmIntrinsics::_arraycopy: - if (!InlineArrayCopy) return NULL; + if (!InlineArrayCopy) return false; break; case vmIntrinsics::_copyMemory: - if (StubRoutines::unsafe_arraycopy() == NULL) return NULL; - if (!InlineArrayCopy) return NULL; + if (!InlineArrayCopy) return false; + if (StubRoutines::unsafe_arraycopy() == NULL) return false; break; case vmIntrinsics::_hashCode: - if (!InlineObjectHash) return NULL; - does_virtual_dispatch = true; + if (!InlineObjectHash) return false; break; case vmIntrinsics::_clone: - does_virtual_dispatch = true; case vmIntrinsics::_copyOf: case vmIntrinsics::_copyOfRange: - if (!InlineObjectCopy) return NULL; - // These also use the arraycopy intrinsic mechanism: - if (!InlineArrayCopy) return NULL; + // These intrinsics use both the objectcopy and the arraycopy + // intrinsic mechanism. + if (!InlineObjectCopy || !InlineArrayCopy) return false; break; case vmIntrinsics::_encodeISOArray: - if (!SpecialEncodeISOArray) return NULL; - if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return NULL; + if (!SpecialEncodeISOArray) return false; + if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false; break; case vmIntrinsics::_checkIndex: // We do not intrinsify this. The optimizer does fine with it. - return NULL; - + return false; case vmIntrinsics::_getCallerClass: - if (!InlineReflectionGetCallerClass) return NULL; - if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return NULL; + if (!InlineReflectionGetCallerClass || + SystemDictionary::reflect_CallerSensitive_klass() == NULL) { + return false; + } break; - case vmIntrinsics::_bitCount_i: - if (!Matcher::match_rule_supported(Op_PopCountI)) return NULL; + if (!Matcher::match_rule_supported(Op_PopCountI)) return false; break; - case vmIntrinsics::_bitCount_l: - if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL; + if (!Matcher::match_rule_supported(Op_PopCountL)) return false; break; - case vmIntrinsics::_numberOfLeadingZeros_i: - if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL; + if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false; break; - case vmIntrinsics::_numberOfLeadingZeros_l: - if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL; + if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false; break; - case vmIntrinsics::_numberOfTrailingZeros_i: - if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL; + if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false; break; - case vmIntrinsics::_numberOfTrailingZeros_l: - if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL; + if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false; break; - case vmIntrinsics::_reverseBytes_c: - if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL; + if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false; break; case vmIntrinsics::_reverseBytes_s: - if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return NULL; + if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false; break; case vmIntrinsics::_reverseBytes_i: - if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return NULL; + if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false; break; case vmIntrinsics::_reverseBytes_l: - if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return NULL; + if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false; break; - - case vmIntrinsics::_Reference_get: - // Use the intrinsic version of Reference.get() so that the value in - // the referent field can be registered by the G1 pre-barrier code. - // Also add memory barrier to prevent commoning reads from this field - // across safepoint since GC can change it value. - break; - case vmIntrinsics::_compareAndSwapObject: #ifdef _LP64 - if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL; +if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false; #endif - break; - +break; case vmIntrinsics::_compareAndSwapLong: - if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL; + if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false; break; - case vmIntrinsics::_getAndAddInt: - if (!Matcher::match_rule_supported(Op_GetAndAddI)) return NULL; + if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false; break; - case vmIntrinsics::_getAndAddLong: - if (!Matcher::match_rule_supported(Op_GetAndAddL)) return NULL; + if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false; break; - case vmIntrinsics::_getAndSetInt: - if (!Matcher::match_rule_supported(Op_GetAndSetI)) return NULL; + if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false; break; - case vmIntrinsics::_getAndSetLong: - if (!Matcher::match_rule_supported(Op_GetAndSetL)) return NULL; + if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false; break; - case vmIntrinsics::_getAndSetObject: #ifdef _LP64 - if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return NULL; - if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return NULL; + if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false; + if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false; break; #else - if (!Matcher::match_rule_supported(Op_GetAndSetP)) return NULL; + if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false; break; #endif - case vmIntrinsics::_aescrypt_encryptBlock: case vmIntrinsics::_aescrypt_decryptBlock: - if (!UseAESIntrinsics) return NULL; + if (!UseAESIntrinsics) return false; break; - case vmIntrinsics::_multiplyToLen: - if (!UseMultiplyToLenIntrinsic) return NULL; + if (!UseMultiplyToLenIntrinsic) return false; break; - case vmIntrinsics::_squareToLen: - if (!UseSquareToLenIntrinsic) return NULL; + if (!UseSquareToLenIntrinsic) return false; break; - case vmIntrinsics::_mulAdd: - if (!UseMulAddIntrinsic) return NULL; + if (!UseMulAddIntrinsic) return false; break; - case vmIntrinsics::_montgomeryMultiply: - if (!UseMontgomeryMultiplyIntrinsic) return NULL; + if (!UseMontgomeryMultiplyIntrinsic) return false; break; case vmIntrinsics::_montgomerySquare: - if (!UseMontgomerySquareIntrinsic) return NULL; + if (!UseMontgomerySquareIntrinsic) return false; break; - case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt: case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt: - if (!UseAESIntrinsics) return NULL; - // these two require the predicated logic - predicates = 1; + if (!UseAESIntrinsics) return false; break; - case vmIntrinsics::_sha_implCompress: - if (!UseSHA1Intrinsics) return NULL; + if (!UseSHA1Intrinsics) return false; break; - case vmIntrinsics::_sha2_implCompress: - if (!UseSHA256Intrinsics) return NULL; + if (!UseSHA256Intrinsics) return false; break; - case vmIntrinsics::_sha5_implCompress: - if (!UseSHA512Intrinsics) return NULL; + if (!UseSHA512Intrinsics) return false; break; - case vmIntrinsics::_digestBase_implCompressMB: - if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL; - predicates = 3; + if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return false; break; - case vmIntrinsics::_ghash_processBlocks: - if (!UseGHASHIntrinsics) return NULL; + if (!UseGHASHIntrinsics) return false; break; - case vmIntrinsics::_updateCRC32: case vmIntrinsics::_updateBytesCRC32: case vmIntrinsics::_updateByteBufferCRC32: - if (!UseCRC32Intrinsics) return NULL; + if (!UseCRC32Intrinsics) return false; break; - case vmIntrinsics::_updateBytesCRC32C: case vmIntrinsics::_updateDirectByteBufferCRC32C: - if (!UseCRC32CIntrinsics) return NULL; + if (!UseCRC32CIntrinsics) return false; break; - case vmIntrinsics::_incrementExactI: case vmIntrinsics::_addExactI: - if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL; + if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddI)) return false; break; case vmIntrinsics::_incrementExactL: case vmIntrinsics::_addExactL: - if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL; + if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowAddL)) return false; break; case vmIntrinsics::_decrementExactI: case vmIntrinsics::_subtractExactI: - if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL; + case vmIntrinsics::_negateExactI: + if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubI)) return false; break; case vmIntrinsics::_decrementExactL: case vmIntrinsics::_subtractExactL: - if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL; - break; - case vmIntrinsics::_negateExactI: - if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL; - break; case vmIntrinsics::_negateExactL: - if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL; + if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowSubL)) return false; break; case vmIntrinsics::_multiplyExactI: - if (!Matcher::match_rule_supported(Op_OverflowMulI) || !UseMathExactIntrinsics) return NULL; + if (!UseMathExactIntrinsics || !Matcher::match_rule_supported(Op_OverflowMulI)) return false; break; case vmIntrinsics::_multiplyExactL: - if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return NULL; + if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return false; break; - case vmIntrinsics::_getShortUnaligned: case vmIntrinsics::_getCharUnaligned: case vmIntrinsics::_getIntUnaligned: @@ -593,41 +579,72 @@ case vmIntrinsics::_putCharUnaligned: case vmIntrinsics::_putIntUnaligned: case vmIntrinsics::_putLongUnaligned: - if (!UseUnalignedAccesses) return NULL; + if (!UseUnalignedAccesses) return false; break; - - default: - assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); - assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); + default: break; } + Symbol* method_holder = method->method_holder()->name(); // -XX:-InlineClassNatives disables natives from the Class class. // The flag applies to all reflective calls, notably Array.newArray // (visible to Java programmers as Array.newInstance). - if (m->holder()->name() == ciSymbol::java_lang_Class() || - m->holder()->name() == ciSymbol::java_lang_reflect_Array()) { - if (!InlineClassNatives) return NULL; + if (method_holder == vmSymbols::java_lang_Class() || + method_holder == vmSymbols::java_lang_reflect_Array()) { + if (!InlineClassNatives) return false; } // -XX:-InlineThreadNatives disables natives from the Thread class. - if (m->holder()->name() == ciSymbol::java_lang_Thread()) { - if (!InlineThreadNatives) return NULL; + if (method_holder == vmSymbols::java_lang_Thread()) { + if (!InlineThreadNatives) return false; } // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes. - if (m->holder()->name() == ciSymbol::java_lang_Math() || - m->holder()->name() == ciSymbol::java_lang_Float() || - m->holder()->name() == ciSymbol::java_lang_Double()) { - if (!InlineMathNatives) return NULL; + if (method_holder == vmSymbols::java_lang_Math() || + method_holder == vmSymbols::java_lang_Float() || + method_holder == vmSymbols::java_lang_Double()) { + if (!InlineMathNatives) return false; } // -XX:-InlineUnsafeOps disables natives from the Unsafe class. - if (m->holder()->name() == ciSymbol::sun_misc_Unsafe()) { - if (!InlineUnsafeOps) return NULL; + if (method_holder == vmSymbols::sun_misc_Unsafe()) { + if (!InlineUnsafeOps) return false; + } + + return true; +} + +//---------------------------make_vm_intrinsic---------------------------- +CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { + vmIntrinsics::ID id = m->intrinsic_id(); + assert(id != vmIntrinsics::_none, "must be a VM intrinsic"); + + if (!m->is_loaded()) { + // Do not attempt to inline unloaded methods. + return NULL; + } + + bool is_available = false; + { + // For calling is_intrinsic_available we need to transition to + // the '_thread_in_vm' stat because is_intrinsic_available_for() + // accesses VM-internal data. + VM_ENTRY_MARK; + methodHandle mh(THREAD, m->get_Method()); + methodHandle ct(THREAD, method()->get_Method()); + is_available = is_intrinsic_available_for(mh(), ct(), is_virtual); } - return new LibraryIntrinsic(m, is_virtual, predicates, does_virtual_dispatch, (vmIntrinsics::ID) id); + if (is_available) { + assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); + assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); + return new LibraryIntrinsic(m, is_virtual, + intrinsic_predicates_needed_for(id), + intrinsic_does_virtual_dispatch_for(id), + (vmIntrinsics::ID) id); + } else { + return NULL; + } } //----------------------register_library_intrinsics----------------------- --- old/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java 2015-07-14 18:39:24.259577156 +0200 +++ new/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java 2015-07-14 18:39:23.831577144 +0200 @@ -67,7 +67,7 @@ compileAtLevel(CompilerWhiteBoxTest.COMP_LEVEL_SIMPLE); } - if (!isIntrinsicSupported()) { + if (!isIntrinsicAvailable()) { expectedIntrinsicCount = 0; } break; @@ -114,7 +114,11 @@ } } - protected abstract boolean isIntrinsicSupported(); + // An intrinsic is available if: + // - the intrinsic is enabled (by using the appropriate command-line flag) and + // - the intrinsic is supported by the VM (i.e., the platform on which the VM is + // running provides the instructions necessary for the VM to generate the intrinsic). + protected abstract boolean isIntrinsicAvailable(); protected abstract String getIntrinsicId(); @@ -123,14 +127,20 @@ } static class IntTest extends IntrinsicBase { + + protected boolean isIntrinsicAvailable; // The tested intrinsic is available on the current platform. + protected IntTest(MathIntrinsic.IntIntrinsic testCase) { super(testCase); + // Only the C2 compiler intrinsifies exact math methods + // so check if the intrinsics are available with C2. + isIntrinsicAvailable = WHITE_BOX.isIntrinsicAvailableForMethod(testCase.getTestMethod(), + COMP_LEVEL_FULL_OPTIMIZATION); } @Override - protected boolean isIntrinsicSupported() { - return isServerVM() && Boolean.valueOf(useMathExactIntrinsics) - && (Platform.isX86() || Platform.isX64() || Platform.isAArch64()); + protected boolean isIntrinsicAvailable() { + return isIntrinsicAvailable; } @Override @@ -140,14 +150,20 @@ } static class LongTest extends IntrinsicBase { + + protected boolean isIntrinsicAvailable; // The tested intrinsic is available on the current platform. + protected LongTest(MathIntrinsic.LongIntrinsic testCase) { super(testCase); + // Only the C2 compiler intrinsifies exact math methods + // so check if the intrinsics are available with C2. + isIntrinsicAvailable = WHITE_BOX.isIntrinsicAvailableForMethod(testCase.getTestMethod(), + COMP_LEVEL_FULL_OPTIMIZATION); } @Override - protected boolean isIntrinsicSupported() { - return isServerVM() && Boolean.valueOf(useMathExactIntrinsics) && - (Platform.isX64() || Platform.isPPC() || Platform.isAArch64()); + protected boolean isIntrinsicAvailable() { + return isIntrinsicAvailable; } @Override --- old/src/share/vm/c1/c1_Compiler.hpp 2015-07-14 18:39:24.283577156 +0200 +++ new/src/share/vm/c1/c1_Compiler.hpp 2015-07-14 18:39:23.855577144 +0200 @@ -55,6 +55,16 @@ // Print compilation timers and statistics virtual void print_timers(); + // Check the availability of an intrinsic for 'method' given a compilation context. + // The compilation context is needed to support per-method usage of the + // DisableIntrinsic flag. However, as C1 ignores the DisableIntrinsic flag, the + // compilation context can be ignored. + virtual bool is_intrinsic_available_for(methodHandle method, methodHandle compilation_context) { + return is_intrinsic_available_for(method); + } + // Check the availability of an intrinsic for a 'method'. + virtual bool is_intrinsic_available_for(methodHandle method); + // Size of the code buffer static int code_buffer_size(); }; --- old/src/share/vm/compiler/abstractCompiler.hpp 2015-07-14 18:39:24.275577156 +0200 +++ new/src/share/vm/compiler/abstractCompiler.hpp 2015-07-14 18:39:23.927577146 +0200 @@ -66,6 +66,44 @@ virtual bool supports_osr () { return true; } virtual bool can_compile_method(methodHandle method) { return true; } + // Determine if the current compiler provides an intrinsic + // for method 'method'. An intrinsic is available if: + // - the intrinsic is enabled (by using the appropriate command-line flag) and + // - the platform on which the VM is running provides the instructions necessary + // for the compiler to generate the intrinsic code. + // + // An intrinsic can also be disabled using the DisableIntrinsic command-line + // flag. There are three ways to disable an intrinsic using the DisableIntrinsic + // flag: + // + // (1) -XX:DisableIntrinsic=_hashCode,_getClass + // Disables intrinsification of _hashCode and _getClass globally + // (i.e., the intrinsified version the methods will not be used at all). + // (2) -XX:CompileCommand=option,aClass::aMethod,DisableIntrinsic,_hashCode + // Disables intrinsification of _hashCode when it is called from + // aClass::aMethod (but not for any other call site of _hashCode) + // (3) -XX:CompileCommand=option,java.lang.ref.Reference::get,DisableIntrinsic,_Reference_get + // Some methods are not compiled by C2. Instead, the C2 compiler + // returns directly the intrinsified version of these methods. + // The command above forces C2 to compile _Reference_get, but + // allows using the intrinsified version of _Reference_get at all + // other call sites. + // + // From the above modes, (1) disable intrinsics globally, (2) and (3) + // disable intrinsics on a per-method basis. In cases (2) and (3) the + // compilation context is aClass::aMethod and java.lang.ref.Reference::get, + // respectively. + virtual bool is_intrinsic_available_for(methodHandle method, methodHandle compilation_context) { + return false; + } + + // Use the version of 'is_intrinsic_available_for' from below if usage of + // per-method DisableIntrinsic flag is not expected (or not relevant). The + // method below ignores all per-method usages of the DisableIntrinsic flag. + virtual bool is_intrinsic_available_for(methodHandle method) { + return false; + } + // Compiler type queries. bool is_c1() { return _type == c1; } bool is_c2() { return _type == c2; } --- old/src/share/vm/prims/whitebox.cpp 2015-07-14 18:39:24.331577158 +0200 +++ new/src/share/vm/prims/whitebox.cpp 2015-07-14 18:39:24.035577149 +0200 @@ -528,6 +528,24 @@ return mh->queued_for_compilation(); WB_END +WB_ENTRY(jboolean, WB_IsIntrinsicAvailableForMethod(JNIEnv* env, jobject o, jobject method, jobject compilation_context, jint compLevel)) + if (compLevel < CompLevel_none || compLevel > CompLevel_highest_tier) { + return false; // Intrinsic is not available on a non-existent compilation level. + } + jmethodID method_id, compilation_context_id; + method_id = reflected_method_to_jmid(thread, env, method); + CHECK_JNI_EXCEPTION_(env, JNI_FALSE); + methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(method_id)); + if (compilation_context != NULL) { + compilation_context_id = reflected_method_to_jmid(thread, env, compilation_context); + CHECK_JNI_EXCEPTION_(env, JNI_FALSE); + methodHandle cch(THREAD, Method::checked_resolve_jmethod_id(compilation_context_id)); + return CompileBroker::compiler(compLevel)->is_intrinsic_available_for(mh, cch); + } else { + return CompileBroker::compiler(compLevel)->is_intrinsic_available_for(mh); + } +WB_END + WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) jmethodID jmid = reflected_method_to_jmid(thread, env, method); CHECK_JNI_EXCEPTION_(env, CompLevel_none); @@ -1477,14 +1495,17 @@ #endif // INCLUDE_NMT {CC"deoptimizeFrames", CC"(Z)I", (void*)&WB_DeoptimizeFrames }, {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll }, - {CC"deoptimizeMethod0", CC"(Ljava/lang/reflect/Executable;Z)I", - (void*)&WB_DeoptimizeMethod }, + {CC"deoptimizeMethod0", CC"(Ljava/lang/reflect/Executable;Z)I", + (void*)&WB_DeoptimizeMethod }, {CC"isMethodCompiled0", CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_IsMethodCompiled }, {CC"isMethodCompilable0", CC"(Ljava/lang/reflect/Executable;IZ)Z", (void*)&WB_IsMethodCompilable}, {CC"isMethodQueuedForCompilation0", CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation}, + {CC"isIntrinsicAvailableForMethod0", + CC"(Ljava/lang/reflect/Executable;Ljava/lang/reflect/Executable;I)Z", + (void*)&WB_IsIntrinsicAvailableForMethod}, {CC"makeMethodNotCompilable0", CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable}, {CC"testSetDontInlineMethod0", --- old/src/share/vm/c1/c1_Compiler.cpp 2015-07-14 18:39:24.387577159 +0200 +++ new/src/share/vm/c1/c1_Compiler.cpp 2015-07-14 18:39:24.215577154 +0200 @@ -117,3 +117,7 @@ void Compiler::print_timers() { Compilation::print_timers(); } + +bool Compiler::is_intrinsic_available_for(methodHandle method) { + return GraphBuilder::is_intrinsic_available_for(method()); +} --- old/src/share/vm/opto/c2compiler.hpp 2015-07-14 18:39:24.463577161 +0200 +++ new/src/share/vm/opto/c2compiler.hpp 2015-07-14 18:39:24.267577156 +0200 @@ -36,7 +36,6 @@ // Name const char *name() { return "C2"; } - void initialize(); // Compilation entry point for methods @@ -52,6 +51,12 @@ // Print compilation timers and statistics void print_timers(); + // Check the availability of an intrinsic for 'method' given a compilation context. + bool is_intrinsic_available_for(methodHandle method, methodHandle compilation_context); + // Check the availability of an intrinsic for a 'method' but ignore + // the compilation context. + bool is_intrinsic_available_for(methodHandle method); + // Initial size of the code buffer (may be increased at runtime) static int initial_code_buffer_size(); }; --- old/src/share/vm/c1/c1_GraphBuilder.hpp 2015-07-14 18:39:24.431577160 +0200 +++ new/src/share/vm/c1/c1_GraphBuilder.hpp 2015-07-14 18:39:24.075577151 +0200 @@ -339,6 +339,10 @@ void inline_sync_entry(Value lock, BlockBegin* sync_handler); void fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler = false); + static bool intrinsic_preserves_state(vmIntrinsics::ID id); + static bool intrinsic_can_trap(vmIntrinsics::ID id); + void build_graph_for_intrinsic(ciMethod* callee); + // inliners bool try_inline( ciMethod* callee, bool holder_known, Bytecodes::Code bc = Bytecodes::_illegal, Value receiver = NULL); bool try_inline_intrinsics(ciMethod* callee); @@ -364,12 +368,12 @@ void pop_scope(); void pop_scope_for_jsr(); - bool append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile); - bool append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile); - bool append_unsafe_get_raw(ciMethod* callee, BasicType t); - bool append_unsafe_put_raw(ciMethod* callee, BasicType t); + void append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile); + void append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile); + void append_unsafe_get_raw(ciMethod* callee, BasicType t); + void append_unsafe_put_raw(ciMethod* callee, BasicType t); void append_unsafe_CAS(ciMethod* callee); - bool append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add); + void append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add); void print_inlining(ciMethod* callee, const char* msg = NULL, bool success = true); @@ -412,6 +416,8 @@ static void sort_top_into_worklist(BlockList* worklist, BlockBegin* top); BlockBegin* start() const { return _start; } + + static bool is_intrinsic_available_for(Method* method); }; #endif // SHARE_VM_C1_C1_GRAPHBUILDER_HPP