< prev index next >

src/cpu/x86/vm/templateTable_x86.cpp

Print this page
rev 8961 : [mq]: diff-shenandoah.patch

*** 155,164 **** --- 155,165 ---- bool precise) { assert(val == noreg || val == rax, "parameter is just for looks"); switch (barrier) { #if INCLUDE_ALL_GCS case BarrierSet::G1SATBCTLogging: + case BarrierSet::ShenandoahBarrierSet: { // flatten object address if needed // We do it regardless of precise because we need the registers if (obj.index() == noreg && obj.disp() == 0) { if (obj.base() != rdx) {
*** 187,196 **** --- 188,200 ---- Register new_val = val; if (UseCompressedOops) { new_val = rbx; __ movptr(new_val, val); } + // For Shenandoah, make sure we only store refs into to-space. + oopDesc::bs()->interpreter_read_barrier(_masm, val); + __ store_heap_oop(Address(rdx, 0), val); __ g1_write_barrier_post(rdx /* store_adr */, new_val /* new_val */, rthread /* thread */, rtmp /* tmp */,
*** 682,691 **** --- 686,700 ---- } void TemplateTable::index_check_without_pop(Register array, Register index) { // destroys rbx // check array + + if (ShenandoahVerifyReadsToFromSpace) { + oopDesc::bs()->interpreter_read_barrier(_masm, array); + } + __ null_check(array, arrayOopDesc::length_offset_in_bytes()); // sign extend index for use by indexed load __ movl2ptr(index, index); // check index __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
*** 702,711 **** --- 711,721 ---- void TemplateTable::iaload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ movl(rax, Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_INT))); }
*** 714,723 **** --- 724,734 ---- // rax: index // rdx: array index_check(rdx, rax); // kills rbx NOT_LP64(__ mov(rbx, rax)); // rbx,: index + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize)); NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize))); }
*** 725,772 **** --- 736,788 ---- void TemplateTable::faload() { transition(itos, ftos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_float(Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT))); } void TemplateTable::daload() { transition(itos, dtos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_double(Address(rdx, rax, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); } void TemplateTable::aaload() { transition(itos, atos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_heap_oop(rax, Address(rdx, rax, UseCompressedOops ? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT))); } void TemplateTable::baload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE))); } void TemplateTable::caload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR))); } // iload followed by caload frequent pair void TemplateTable::fast_icaload() {
*** 776,785 **** --- 792,802 ---- __ movl(rax, iaddress(rbx)); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR))); }
*** 788,797 **** --- 805,815 ---- void TemplateTable::saload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rdx); __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT))); } void TemplateTable::iload(int n) { transition(vtos, itos);
*** 982,991 **** --- 1000,1010 ---- __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ movl(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_INT)), rax); }
*** 996,1005 **** --- 1015,1025 ---- // rax,: low(value) // rcx: array // rdx: high(value) index_check(rcx, rbx); // prefer index in rbx, // rbx,: index + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax); NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx)); }
*** 1008,1027 **** --- 1028,1049 ---- __ pop_i(rbx); // value is in UseSSE >= 1 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT))); } void TemplateTable::dastore() { transition(dtos, vtos); __ pop_i(rbx); // value is in UseSSE >= 2 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); } void TemplateTable::aastore() { Label is_null, ok_is_subtype, done;
*** 1034,1043 **** --- 1056,1066 ---- Address element_address(rdx, rcx, UseCompressedOops? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)); index_check_without_pop(rdx, rcx); // kills rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ testptr(rax, rax); __ jcc(Assembler::zero, is_null); // Move subklass into rbx __ load_klass(rbx, rax);
*** 1082,1091 **** --- 1105,1115 ---- __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ movb(Address(rdx, rbx, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), rax); }
*** 1095,1104 **** --- 1119,1129 ---- __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx + oopDesc::bs()->interpreter_write_barrier(_masm, rdx); __ movw(Address(rdx, rbx, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), rax); }
*** 2309,2318 **** --- 2334,2354 ---- void TemplateTable::if_acmp(Condition cc) { transition(atos, vtos); // assume branch is more often taken than not (loops use backward branches) Label not_taken; __ pop_ptr(rdx); + if (UseShenandoahGC) { + // For Shenandoah, if the objects are not equal, we try again after + // resolving both objects through a read barrier, to make sure we're + // not comparing from-space and to-space copies of the same object. + Label eq; + __ cmpptr(rdx, rax); + __ jcc(Assembler::equal, eq); + oopDesc::bs()->interpreter_read_barrier(_masm, rax); + oopDesc::bs()->interpreter_read_barrier(_masm, rdx); + __ bind(eq); + } __ cmpptr(rdx, rax); __ jcc(j_not(cc), not_taken); branch(false, false); __ bind(not_taken); __ profile_not_taken_branch(rax);
*** 2749,2758 **** --- 2785,2795 ---- resolve_cache_and_index(byte_no, cache, index, sizeof(u2)); jvmti_post_field_access(cache, index, is_static, false); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, obj); const Address field(obj, off, Address::times_1, 0*wordSize); NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize)); Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
*** 3018,3027 **** --- 3055,3065 ---- // btos { __ pop(btos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ movb(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3033,3042 **** --- 3071,3081 ---- // atos { __ pop(atos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); // Store into the field do_oop_store(_masm, field, rax, _bs->kind(), false); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no); }
*** 3049,3058 **** --- 3088,3098 ---- // itos { __ pop(itos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ movl(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3064,3073 **** --- 3104,3114 ---- // ctos { __ pop(ctos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ movw(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3079,3088 **** --- 3120,3130 ---- // stos { __ pop(stos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ movw(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3095,3104 **** --- 3137,3147 ---- // ltos #ifdef _LP64 { __ pop(ltos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ movq(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3141,3150 **** --- 3184,3194 ---- // ftos { __ pop(ftos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ store_float(field); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done);
*** 3158,3167 **** --- 3202,3212 ---- // dtos { __ pop(dtos); if (!is_static) pop_and_check_object(obj); + oopDesc::bs()->interpreter_write_barrier(_masm, obj); __ store_double(field); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } }
*** 3284,3317 **** --- 3329,3369 ---- const Address field(rcx, rbx, Address::times_1); // access field switch (bytecode()) { case Bytecodes::_fast_aputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); do_oop_store(_masm, field, rax, _bs->kind(), false); break; case Bytecodes::_fast_lputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); #ifdef _LP64 __ movq(field, rax); #else __ stop("should not be rewritten"); #endif break; case Bytecodes::_fast_iputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ movl(field, rax); break; case Bytecodes::_fast_bputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ movb(field, rax); break; case Bytecodes::_fast_sputfield: // fall through case Bytecodes::_fast_cputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ movw(field, rax); break; case Bytecodes::_fast_fputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ store_float(field); break; case Bytecodes::_fast_dputfield: + oopDesc::bs()->interpreter_write_barrier(_masm, rcx); __ store_double(field); break; default: ShouldNotReachHere(); }
*** 3365,3374 **** --- 3417,3427 ---- ConstantPoolCacheEntry::f2_offset()))); // rax: object __ verify_oop(rax); __ null_check(rax); + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax); Address field(rax, rbx, Address::times_1); // access field switch (bytecode()) { case Bytecodes::_fast_agetfield:
*** 3426,3435 **** --- 3479,3489 ---- ConstantPoolCacheEntry::f2_offset()))); // make sure exception is reported in correct bcp range (getfield is // next instruction) __ increment(rbcp); __ null_check(rax); + oopDesc::bs()->interpreter_read_barrier_not_null(_masm, rax); const Address field = Address(rax, rbx, Address::times_1, 0*wordSize); switch (state) { case itos: __ movl(rax, field); break;
*** 3828,3842 **** --- 3882,3902 ---- __ get_thread(thread); } #endif // _LP64 if (UseTLAB) { + uint oop_extra_words = Universe::heap()->oop_extra_words(); + if (oop_extra_words > 0) { + __ addq(rdx, oop_extra_words * HeapWordSize); + } + __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset()))); __ lea(rbx, Address(rax, rdx, Address::times_1)); __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset()))); __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case); __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx); + Universe::heap()->compile_prepare_oop(_masm); if (ZeroTLAB) { // the fields have been already cleared __ jmp(initialize_header); } else { // initialize both the header and fields
*** 3973,3982 **** --- 4033,4045 ---- rarg1, rarg2, rax); } void TemplateTable::arraylength() { transition(atos, itos); + if (ShenandoahVerifyReadsToFromSpace) { + oopDesc::bs()->interpreter_read_barrier(_masm, rax); + } __ null_check(rax, arrayOopDesc::length_offset_in_bytes()); __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes())); } void TemplateTable::checkcast() {
*** 4073,4083 **** --- 4136,4150 ---- #endif __ pop_ptr(rdx); // restore receiver __ verify_oop(rdx); __ load_klass(rdx, rdx); + if (ShenandoahVerifyReadsToFromSpace) { + __ jmp(resolved); + } else { __ jmpb(resolved); + } // Get superklass in rax and subklass in rdx __ bind(quicked); __ load_klass(rdx, rax); __ movptr(rax, Address(rcx, rbx,
*** 4169,4178 **** --- 4236,4250 ---- transition(atos, vtos); // check for NULL object __ null_check(rax); + // We need to preemptively evacuate the object, because we later compare + // it to objects in the BasicObjectLock list, and we might get false negatives + // if another thread evacuates the object in the meantime. See acmp. + oopDesc::bs()->interpreter_write_barrier(_masm, rax); + const Address monitor_block_top( rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize); const Address monitor_block_bot( rbp, frame::interpreter_frame_initial_sp_offset * wordSize); const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
*** 4191,4209 **** Label entry, loop, exit; __ movptr(rtop, monitor_block_top); // points to current entry, // starting with top-most entry __ lea(rbot, monitor_block_bot); // points to word before bottom // of monitor block __ jmpb(entry); __ bind(loop); // check if current entry is used __ cmpptr(Address(rtop, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD); // if not used then remember entry in rmon __ cmovptr(Assembler::equal, rmon, rtop); // cmov => cmovptr // check if current entry is for same object ! __ cmpptr(rax, Address(rtop, BasicObjectLock::obj_offset_in_bytes())); // if same object then stop searching __ jccb(Assembler::equal, exit); // otherwise advance to next entry __ addptr(rtop, entry_size); __ bind(entry); --- 4263,4287 ---- Label entry, loop, exit; __ movptr(rtop, monitor_block_top); // points to current entry, // starting with top-most entry __ lea(rbot, monitor_block_bot); // points to word before bottom // of monitor block + if (UseShenandoahGC && ShenandoahVerifyReadsToFromSpace) { + __ jmp(entry); + } else { __ jmpb(entry); + } __ bind(loop); // check if current entry is used __ cmpptr(Address(rtop, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD); // if not used then remember entry in rmon __ cmovptr(Assembler::equal, rmon, rtop); // cmov => cmovptr // check if current entry is for same object ! __ movptr(rscratch1, Address(rtop, BasicObjectLock::obj_offset_in_bytes())); ! oopDesc::bs()->interpreter_read_barrier(_masm, rscratch1); ! __ cmpptr(rax, rscratch1); // if same object then stop searching __ jccb(Assembler::equal, exit); // otherwise advance to next entry __ addptr(rtop, entry_size); __ bind(entry);
*** 4266,4275 **** --- 4344,4358 ---- transition(atos, vtos); // check for NULL object __ null_check(rax); + // We need to preemptively evacuate the object, because we later compare + // it to objects in the BasicObjectLock list, and we might get false negatives + // if another thread evacuates the object in the meantime. See acmp. + oopDesc::bs()->interpreter_write_barrier(_masm, rax); + const Address monitor_block_top( rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize); const Address monitor_block_bot( rbp, frame::interpreter_frame_initial_sp_offset * wordSize); const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
*** 4284,4298 **** Label entry, loop; __ movptr(rtop, monitor_block_top); // points to current entry, // starting with top-most entry __ lea(rbot, monitor_block_bot); // points to word before bottom // of monitor block __ jmpb(entry); __ bind(loop); // check if current entry is for same object ! __ cmpptr(rax, Address(rtop, BasicObjectLock::obj_offset_in_bytes())); // if same object then stop searching __ jcc(Assembler::equal, found); // otherwise advance to next entry __ addptr(rtop, entry_size); __ bind(entry); --- 4367,4387 ---- Label entry, loop; __ movptr(rtop, monitor_block_top); // points to current entry, // starting with top-most entry __ lea(rbot, monitor_block_bot); // points to word before bottom // of monitor block + if (UseShenandoahGC && ShenandoahVerifyReadsToFromSpace) { + __ jmp(entry); + } else { __ jmpb(entry); + } __ bind(loop); // check if current entry is for same object ! __ movptr(rscratch1, Address(rtop, BasicObjectLock::obj_offset_in_bytes())); ! oopDesc::bs()->interpreter_read_barrier(_masm, rscratch1); ! __ cmpptr(rax, rscratch1); // if same object then stop searching __ jcc(Assembler::equal, found); // otherwise advance to next entry __ addptr(rtop, entry_size); __ bind(entry);
< prev index next >