--- old/src/hotspot/cpu/x86/c1_CodeStubs_x86.cpp 2019-01-15 22:00:06.476475314 -0800 +++ new/src/hotspot/cpu/x86/c1_CodeStubs_x86.cpp 2019-01-15 22:00:06.292468550 -0800 @@ -235,16 +235,28 @@ // Implementation of MonitorAccessStubs -MonitorEnterStub::MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info) +MonitorEnterStub::MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info, CodeStub* throw_imse_stub, LIR_Opr scratch_reg) : MonitorAccessStub(obj_reg, lock_reg) { _info = new CodeEmitInfo(info); + _throw_imse_stub = throw_imse_stub; + _scratch_reg = scratch_reg; + if (_throw_imse_stub != NULL) { + assert(_scratch_reg != LIR_OprFact::illegalOpr, "must be"); + } } void MonitorEnterStub::emit_code(LIR_Assembler* ce) { assert(__ rsp_offset() == 0, "frame size should be fixed"); __ bind(_entry); + if (_throw_imse_stub != NULL) { + // When we come here, _obj_reg has already been checked to be non-null. + Register mark = _scratch_reg->as_register(); + __ movptr(mark, Address(_obj_reg->as_register(), oopDesc::mark_offset_in_bytes())); + __ testl(mark, markOopDesc::always_locked_pattern); + __ jcc(Assembler::notZero, *_throw_imse_stub->entry()); + } ce->store_parameter(_obj_reg->as_register(), 1); ce->store_parameter(_lock_reg->as_register(), 0); Runtime1::StubID enter_id; --- old/src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp 2019-01-15 22:00:07.052496489 -0800 +++ new/src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp 2019-01-15 22:00:06.864489578 -0800 @@ -3479,7 +3479,8 @@ assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header"); __ resolve(ACCESS_READ | ACCESS_WRITE, obj); // add debug info for NullPointerException only if one is possible - int null_check_offset = __ lock_object(hdr, obj, lock, scratch, *op->stub()->entry()); + bool check_always_locked = (op->throw_imse_stub() != NULL); + int null_check_offset = __ lock_object(hdr, obj, lock, scratch, *op->stub()->entry(), check_always_locked); if (op->info() != NULL) { add_debug_info_for_null_check(null_check_offset, op->info()); } --- old/src/hotspot/cpu/x86/c1_LIRGenerator_x86.cpp 2019-01-15 22:00:07.652518547 -0800 +++ new/src/hotspot/cpu/x86/c1_LIRGenerator_x86.cpp 2019-01-15 22:00:07.468511783 -0800 @@ -286,7 +286,7 @@ LIR_Opr lock = new_register(T_INT); // Need a scratch register for biased locking on x86 LIR_Opr scratch = LIR_OprFact::illegalOpr; - if (UseBiasedLocking) { + if (UseBiasedLocking || x->maybe_valuetype()) { scratch = new_register(T_INT); } @@ -294,11 +294,17 @@ if (x->needs_null_check()) { info_for_exception = state_for(x); } + + CodeStub* throw_imse_stub = x->maybe_valuetype() ? + new SimpleExceptionStub(Runtime1::throw_illegal_monitor_state_exception_id, + LIR_OprFact::illegalOpr, state_for(x)) + : NULL; + // this CodeEmitInfo must not have the xhandlers because here the // object is already locked (xhandlers expect object to be unlocked) CodeEmitInfo* info = state_for(x, x->state(), true); monitor_enter(obj.result(), lock, syncTempOpr(), scratch, - x->monitor_no(), info_for_exception, info); + x->monitor_no(), info_for_exception, info, throw_imse_stub); } --- old/src/hotspot/cpu/x86/c1_MacroAssembler_x86.cpp 2019-01-15 22:00:08.824561633 -0800 +++ new/src/hotspot/cpu/x86/c1_MacroAssembler_x86.cpp 2019-01-15 22:00:08.640554869 -0800 @@ -38,7 +38,7 @@ #include "runtime/sharedRuntime.hpp" #include "runtime/stubRoutines.hpp" -int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr, Register scratch, Label& slow_case) { +int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr, Register scratch, Label& slow_case, bool check_always_locked) { const int aligned_mask = BytesPerWord -1; const int hdr_offset = oopDesc::mark_offset_in_bytes(); assert(hdr == rax, "hdr must be rax, for the cmpxchg instruction"); @@ -60,6 +60,10 @@ // Load object header movptr(hdr, Address(obj, hdr_offset)); + if (check_always_locked) { + testl(hdr, markOopDesc::always_locked_pattern); + jcc(Assembler::notZero, slow_case); + } // and mark it as unlocked orptr(hdr, markOopDesc::unlocked_value); // save unlocked object header into the displaced header location on the stack --- old/src/hotspot/cpu/x86/c1_MacroAssembler_x86.hpp 2019-01-15 22:00:15.452805299 -0800 +++ new/src/hotspot/cpu/x86/c1_MacroAssembler_x86.hpp 2019-01-15 22:00:15.264798388 -0800 @@ -51,7 +51,7 @@ // disp_hdr: must point to the displaced header location, contents preserved // scratch : scratch register, contents destroyed // returns code offset at which to add null check debug information - int lock_object (Register swap, Register obj, Register disp_hdr, Register scratch, Label& slow_case); + int lock_object (Register swap, Register obj, Register disp_hdr, Register scratch, Label& slow_case, bool check_always_locked); // unlocking // hdr : contents destroyed --- old/src/hotspot/cpu/x86/c1_Runtime1_x86.cpp 2019-01-15 22:00:16.024826329 -0800 +++ new/src/hotspot/cpu/x86/c1_Runtime1_x86.cpp 2019-01-15 22:00:15.840819564 -0800 @@ -1318,11 +1318,17 @@ break; case throw_incompatible_class_change_error_id: - { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments); + { StubFrame f(sasm, "throw_incompatible_class_change_error", dont_gc_arguments); oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false); } break; + case throw_illegal_monitor_state_exception_id: + { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments); + oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false); + } + break; + case slow_subtype_check_id: { // Typical calling sequence: --- old/src/hotspot/share/c1/c1_CodeStubs.hpp 2019-01-15 22:00:20.476989998 -0800 +++ new/src/hotspot/share/c1/c1_CodeStubs.hpp 2019-01-15 22:00:20.288983086 -0800 @@ -321,15 +321,20 @@ class MonitorEnterStub: public MonitorAccessStub { private: CodeEmitInfo* _info; + CodeStub* _throw_imse_stub; + LIR_Opr _scratch_reg; public: - MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info); + MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info, CodeStub* throw_imse_stub = NULL, LIR_Opr scratch_reg = LIR_OprFact::illegalOpr); virtual void emit_code(LIR_Assembler* e); virtual CodeEmitInfo* info() const { return _info; } virtual void visit(LIR_OpVisitState* visitor) { visitor->do_input(_obj_reg); visitor->do_input(_lock_reg); + if (_scratch_reg != LIR_OprFact::illegalOpr) { + visitor->do_temp(_scratch_reg); + } visitor->do_slow_case(_info); } #ifndef PRODUCT --- old/src/hotspot/share/c1/c1_GraphBuilder.cpp 2019-01-15 22:00:21.057011321 -0800 +++ new/src/hotspot/share/c1/c1_GraphBuilder.cpp 2019-01-15 22:00:20.869004409 -0800 @@ -2398,9 +2398,28 @@ void GraphBuilder::monitorenter(Value x, int bci) { + bool maybe_valuetype = false; + if (bci == InvocationEntryBci) { + // Called by GraphBuilder::inline_sync_entry. +#ifdef ASSERT + ciType* obj_type = x->declared_type(); + assert(obj_type == NULL || !obj_type->is_valuetype(), "valuetypes cannot have synchronized methods"); +#endif + } else { + // We are compiling a monitorenter bytecode + if (EnableValhalla) { + ciType* obj_type = x->declared_type(); + if (obj_type == NULL || obj_type->is_valuetype() || obj_type->as_klass()->is_java_lang_Object()) { + // If we're (possibly) locking on a valuetype, check for markOopDesc::always_locked_pattern + // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative). + maybe_valuetype = true; + } + } + } + // save state before locking in case of deoptimization after a NullPointerException ValueStack* state_before = copy_state_for_exception_with_bci(bci); - append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci); + append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_valuetype), bci); kill_all(); } --- old/src/hotspot/share/c1/c1_Instruction.hpp 2019-01-15 22:00:22.241054849 -0800 +++ new/src/hotspot/share/c1/c1_Instruction.hpp 2019-01-15 22:00:22.057048084 -0800 @@ -1574,14 +1574,19 @@ LEAF(MonitorEnter, AccessMonitor) +bool _maybe_valuetype; public: // creation - MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) + MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) : AccessMonitor(obj, monitor_no, state_before) + , _maybe_valuetype(maybe_valuetype) { ASSERT_VALUES } + // accessors + bool maybe_valuetype() const { return _maybe_valuetype; } + // generic virtual bool can_trap() const { return true; } }; --- old/src/hotspot/share/c1/c1_LIR.cpp 2019-01-15 22:00:22.833076612 -0800 +++ new/src/hotspot/share/c1/c1_LIR.cpp 2019-01-15 22:00:22.645069701 -0800 @@ -821,6 +821,7 @@ assert(opLock->_result->is_illegal(), "unused"); do_stub(opLock->_stub); + do_stub(opLock->_throw_imse_stub); break; } @@ -1057,6 +1058,9 @@ if (stub()) { masm->append_code_stub(stub()); } + if (throw_imse_stub()) { + masm->append_code_stub(throw_imse_stub()); + } } #ifdef ASSERT @@ -1358,7 +1362,7 @@ dst)); } -void LIR_List::lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info) { +void LIR_List::lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_imse_stub) { append(new LIR_OpLock( lir_lock, hdr, @@ -1366,7 +1370,8 @@ lock, scratch, stub, - info)); + info, + throw_imse_stub)); } void LIR_List::unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub) { --- old/src/hotspot/share/c1/c1_LIR.hpp 2019-01-15 22:00:23.413097935 -0800 +++ new/src/hotspot/share/c1/c1_LIR.hpp 2019-01-15 22:00:23.225091023 -0800 @@ -1788,20 +1788,23 @@ LIR_Opr _lock; LIR_Opr _scratch; CodeStub* _stub; + CodeStub* _throw_imse_stub; public: - LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info) + LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_imse_stub=NULL) : LIR_Op(code, LIR_OprFact::illegalOpr, info) , _hdr(hdr) , _obj(obj) , _lock(lock) , _scratch(scratch) - , _stub(stub) {} + , _stub(stub) + , _throw_imse_stub(throw_imse_stub) {} LIR_Opr hdr_opr() const { return _hdr; } LIR_Opr obj_opr() const { return _obj; } LIR_Opr lock_opr() const { return _lock; } LIR_Opr scratch_opr() const { return _scratch; } CodeStub* stub() const { return _stub; } + CodeStub* throw_imse_stub() const { return _throw_imse_stub; } virtual void emit_code(LIR_Assembler* masm); virtual LIR_OpLock* as_OpLock() { return this; } @@ -2240,7 +2243,7 @@ void load_stack_address_monitor(int monitor_ix, LIR_Opr dst) { append(new LIR_Op1(lir_monaddr, LIR_OprFact::intConst(monitor_ix), dst)); } void unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub); - void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info); + void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_imse_stub=NULL); void set_24bit_fpu() { append(new LIR_Op0(lir_24bit_FPU )); } void restore_fpu() { append(new LIR_Op0(lir_reset_FPU )); } --- old/src/hotspot/share/c1/c1_LIRGenerator.cpp 2019-01-15 22:00:24.001119552 -0800 +++ new/src/hotspot/share/c1/c1_LIRGenerator.cpp 2019-01-15 22:00:23.817112787 -0800 @@ -643,13 +643,14 @@ } -void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info) { +void LIRGenerator::monitor_enter(LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, + CodeEmitInfo* info_for_exception, CodeEmitInfo* info, CodeStub* throw_imse_stub) { if (!GenerateSynchronizationCode) return; // for slow path, use debug info for state after successful locking - CodeStub* slow_path = new MonitorEnterStub(object, lock, info); + CodeStub* slow_path = new MonitorEnterStub(object, lock, info, throw_imse_stub, scratch); __ load_stack_address_monitor(monitor_no, lock); // for handling NullPointerException, use debug info representing just the lock stack before this monitorenter - __ lock_object(hdr, object, lock, scratch, slow_path, info_for_exception); + __ lock_object(hdr, object, lock, scratch, slow_path, info_for_exception, throw_imse_stub); } --- old/src/hotspot/share/c1/c1_LIRGenerator.hpp 2019-01-15 22:00:24.605141757 -0800 +++ new/src/hotspot/share/c1/c1_LIRGenerator.hpp 2019-01-15 22:00:24.417134845 -0800 @@ -363,7 +363,7 @@ void logic_op (Bytecodes::Code code, LIR_Opr dst_reg, LIR_Opr left, LIR_Opr right); - void monitor_enter (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info); + void monitor_enter (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info, CodeStub* throw_imse_stub); void monitor_exit (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no); void new_instance (LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info); --- old/src/hotspot/share/c1/c1_Runtime1.cpp 2019-01-15 22:00:25.177162785 -0800 +++ new/src/hotspot/share/c1/c1_Runtime1.cpp 2019-01-15 22:00:24.989155875 -0800 @@ -127,6 +127,7 @@ int Runtime1::_throw_null_pointer_exception_count = 0; int Runtime1::_throw_class_cast_exception_count = 0; int Runtime1::_throw_incompatible_class_change_error_count = 0; +int Runtime1::_throw_illegal_monitor_state_exception_count = 0; int Runtime1::_throw_array_store_exception_count = 0; int Runtime1::_throw_count = 0; @@ -696,6 +697,13 @@ JRT_END +JRT_ENTRY(void, Runtime1::throw_illegal_monitor_state_exception(JavaThread* thread)) + NOT_PRODUCT(_throw_illegal_monitor_state_exception_count++;) + ResourceMark rm(thread); + SharedRuntime::throw_and_post_jvmti_exception(thread, vmSymbols::java_lang_IllegalMonitorStateException()); +JRT_END + + JRT_ENTRY_NO_ASYNC(void, Runtime1::monitorenter(JavaThread* thread, oopDesc* obj, BasicObjectLock* lock)) NOT_PRODUCT(_monitorenter_slowcase_cnt++;) if (PrintBiasedLockingStatistics) { @@ -1502,6 +1510,7 @@ tty->print_cr(" _throw_null_pointer_exception_count: %d:", _throw_null_pointer_exception_count); tty->print_cr(" _throw_class_cast_exception_count: %d:", _throw_class_cast_exception_count); tty->print_cr(" _throw_incompatible_class_change_error_count: %d:", _throw_incompatible_class_change_error_count); + tty->print_cr(" _throw_illegal_monitor_state_exception_count: %d:", _throw_illegal_monitor_state_exception_count); tty->print_cr(" _throw_array_store_exception_count: %d:", _throw_array_store_exception_count); tty->print_cr(" _throw_count: %d:", _throw_count); --- old/src/hotspot/share/c1/c1_Runtime1.hpp 2019-01-15 22:00:25.757184108 -0800 +++ new/src/hotspot/share/c1/c1_Runtime1.hpp 2019-01-15 22:00:25.573177344 -0800 @@ -59,6 +59,7 @@ stub(throw_array_store_exception) \ stub(throw_class_cast_exception) \ stub(throw_incompatible_class_change_error) \ + stub(throw_illegal_monitor_state_exception) \ stub(slow_subtype_check) \ stub(monitorenter) \ stub(monitorenter_nofpu) /* optimized version that does not preserve fpu registers */ \ @@ -116,6 +117,7 @@ static int _throw_null_pointer_exception_count; static int _throw_class_cast_exception_count; static int _throw_incompatible_class_change_error_count; + static int _throw_illegal_monitor_state_exception_count; static int _throw_array_store_exception_count; static int _throw_count; #endif @@ -156,6 +158,7 @@ static void throw_null_pointer_exception(JavaThread* thread); static void throw_class_cast_exception(JavaThread* thread, oopDesc* object); static void throw_incompatible_class_change_error(JavaThread* thread); + static void throw_illegal_monitor_state_exception(JavaThread* thread); static void throw_array_store_exception(JavaThread* thread, oopDesc* object); static void monitorenter(JavaThread* thread, oopDesc* obj, BasicObjectLock* lock); --- old/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestLWorld.java 2019-01-15 22:00:26.889225724 -0800 +++ new/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestLWorld.java 2019-01-15 22:00:26.701218813 -0800 @@ -47,9 +47,24 @@ * compiler.valhalla.valuetypes.TestLWorld */ public class TestLWorld extends ValueTypeTest { + public int getNumScenarios() { + if (TEST_C1) { + return 2; + } else { + return super.getNumScenarios(); + } + } + // Extra VM parameters for some test scenarios. See ValueTypeTest.getVMParameters() @Override public String[] getExtraVMParameters(int scenario) { + if (TEST_C1) { + switch (scenario) { + case 1: return new String[] {"-XX:-UseBiasedLocking"}; + } + return null; + } + switch (scenario) { case 1: return new String[] {"-XX:-UseOptoBiasInlining"}; case 2: return new String[] {"-XX:-UseBiasedLocking"}; @@ -1159,6 +1174,9 @@ @DontCompile public void test39_verifier(boolean warmup) { + if (!ENABLE_VALUE_ARRAY_COVARIANCE) { + return; + } int index = Math.abs(rI) % 3; MyValue1[] va = new MyValue1[42]; Object result = test39(null, testValue1, index, index, 0); --- old/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestNewAcmp.java 2019-01-15 22:00:27.485247635 -0800 +++ new/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestNewAcmp.java 2019-01-15 22:00:27.301240870 -0800 @@ -1481,6 +1481,11 @@ } public static void main(String[] args) throws Exception { + if (Boolean.getBoolean("test.c1")) { + System.out.println("new acmp is not implemented for C1"); + return; + } + int nullMode = Integer.valueOf(args[0]); TestNewAcmp t = new TestNewAcmp(); t.run(nullMode); --- old/test/hotspot/jtreg/compiler/valhalla/valuetypes/ValueTypeTest.java 2019-01-15 22:00:28.069269105 -0800 +++ new/test/hotspot/jtreg/compiler/valhalla/valuetypes/ValueTypeTest.java 2019-01-15 22:00:27.881262194 -0800 @@ -87,6 +87,9 @@ // Run "jtreg -Dtest.c1=true" to enable experimental C1 testing. static final boolean TEST_C1 = Boolean.getBoolean("test.c1"); + // Should we execute tests that assume (ValueType[] <: Object[])? + static final boolean ENABLE_VALUE_ARRAY_COVARIANCE = Boolean.getBoolean("ValueArrayCovariance"); + // Random test values public static final int rI = Utils.getRandomInstance().nextInt() % 1000; public static final long rL = Utils.getRandomInstance().nextLong() % 1000;