src/cpu/x86/vm/templateTable_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Cdiff src/cpu/x86/vm/templateTable_x86_64.cpp

src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page

        

*** 2477,2486 **** --- 2477,2489 ---- const Register flags = rax; const Register bc = c_rarg3; resolve_cache_and_index(byte_no, cache, index, sizeof(u2)); jvmti_post_field_mod(cache, index, is_static); + #ifdef ASSERT + verify_stable(cache, index, is_static, false); + #endif //ASSERT load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); // [jk] not needed currently // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore | // Assembler::StoreStore));
*** 2703,2712 **** --- 2706,2719 ---- ByteSize base = ConstantPoolCache::base_offset(); jvmti_post_fast_field_mod(); + #ifdef ASSERT + verify_stable_fast(); + #endif // ASSERT + // access constant pool cache __ get_cache_and_index_at_bcp(rcx, rbx, 1); // test for volatile with rdx __ movl(rdx, Address(rcx, rbx, Address::times_8,
*** 3751,3755 **** --- 3758,3860 ---- c_rarg1); __ load_unsigned_byte(rbx, at_bcp(3)); __ lea(rsp, Address(rsp, rbx, Address::times_8)); } #endif // !CC_INTERP + + #ifdef ASSERT + + // Correct values of the cache and index registers are preserved. + void TemplateTable::verify_stable(Register cache, Register index, + bool is_static, bool has_tos) { + if (FoldStableValues && (VerifyStable || TraceStableFieldUpdates)) { + ByteSize cp_base_offset = ConstantPoolCache::base_offset(); + assert_different_registers(cache, index, rax); + __ get_cache_and_index_at_bcp(c_rarg2, rscratch1, 1); + + if (is_static) { + // Life is simple. Null out the object pointer. + __ xorl(c_rarg1, c_rarg1); + } else { + // Life is harder. The stack holds the value on top, followed by + // the object. We don't know the size of the value, though; it + // could be one or two words depending on its type. As a result, + // we must find the type to determine where the object is. + __ movl(c_rarg3, Address(c_rarg2, rscratch1, + Address::times_8, + in_bytes(cp_base_offset + + ConstantPoolCacheEntry::flags_offset()))); + __ shrl(c_rarg3, ConstantPoolCacheEntry::tos_state_shift); + // Make sure we don't need to mask rcx after the above shift + ConstantPoolCacheEntry::verify_tos_state_shift(); + __ movptr(c_rarg1, at_tos_p1()); // initially assume a one word jvalue + __ cmpl(c_rarg3, ltos); + __ cmovptr(Assembler::equal, + c_rarg1, at_tos_p2()); // ltos (two word jvalue) + __ cmpl(c_rarg3, dtos); + __ cmovptr(Assembler::equal, + c_rarg1, at_tos_p2()); // dtos (two word jvalue) + } + // cache entry pointer + __ addptr(c_rarg2, in_bytes(cp_base_offset)); + __ shll(rscratch1, LogBytesPerWord); + __ addptr(c_rarg2, rscratch1); + // object (tos) + __ mov(c_rarg3, rsp); + // c_rarg1: object pointer set up above (NULL if static) + // c_rarg2: cache entry pointer + // c_rarg3: jvalue object on the stack + __ call_VM(noreg, + CAST_FROM_FN_PTR(address, + InterpreterRuntime::verify_stable), + c_rarg1, c_rarg2, c_rarg3); + __ get_cache_and_index_at_bcp(cache, index, 1); + } + } + + void TemplateTable::verify_stable_fast() { + if (FoldStableValues && (VerifyStable || TraceStableFieldUpdates)) { + __ pop_ptr(rbx); // copy the object pointer from tos + __ verify_oop(rbx); + __ push_ptr(rbx); // put the object pointer back on tos + // Save tos values before call_VM() clobbers them. Since we have + // to do it for every data type, we use the saved values as the + // jvalue object. + switch (bytecode()) { // load values into the jvalue object + case Bytecodes::_fast_aputfield: __ push_ptr(rax); break; + case Bytecodes::_fast_bputfield: // fall through + case Bytecodes::_fast_sputfield: // fall through + case Bytecodes::_fast_cputfield: // fall through + case Bytecodes::_fast_iputfield: __ push_i(rax); break; + case Bytecodes::_fast_dputfield: __ push_d(); break; + case Bytecodes::_fast_fputfield: __ push_f(); break; + case Bytecodes::_fast_lputfield: __ push_l(rax); break; + + default: + ShouldNotReachHere(); + } + __ mov(c_rarg3, rsp); // points to jvalue on the stack + // access constant pool cache entry + __ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1); + __ verify_oop(rbx); + // rbx: object pointer copied above + // c_rarg2: cache entry pointer + // c_rarg3: jvalue object on the stack + __ call_VM(noreg, + CAST_FROM_FN_PTR(address, + InterpreterRuntime::verify_stable), + rbx, c_rarg2, c_rarg3); + + switch (bytecode()) { // restore tos values + case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break; + case Bytecodes::_fast_bputfield: // fall through + case Bytecodes::_fast_sputfield: // fall through + case Bytecodes::_fast_cputfield: // fall through + case Bytecodes::_fast_iputfield: __ pop_i(rax); break; + case Bytecodes::_fast_dputfield: __ pop_d(); break; + case Bytecodes::_fast_fputfield: __ pop_f(); break; + case Bytecodes::_fast_lputfield: __ pop_l(rax); break; + } + } + } + + #endif // ASSERT
src/cpu/x86/vm/templateTable_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File