< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

        

*** 2717,2753 **** // Helper function to insert a is-volatile test and memory barrier __ membar(order_constraint); } void TemplateTable::resolve_cache_and_index(int byte_no, ! Register Rcache, Register index, size_t index_size) { const Register temp = rbx; ! assert_different_registers(Rcache, index, temp); Label resolved; Bytecodes::Code code = bytecode(); switch (code) { case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; default: break; } assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); ! __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); __ cmpl(temp, code); // have we resolved this bytecode? __ jcc(Assembler::equal, resolved); // resolve first time through address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache); __ movl(temp, code); __ call_VM(noreg, entry, temp); // Update registers with resolved info ! __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); __ bind(resolved); } // The cache and index registers must be set before call void TemplateTable::load_field_cp_cache_entry(Register obj, Register cache, --- 2717,2769 ---- // Helper function to insert a is-volatile test and memory barrier __ membar(order_constraint); } void TemplateTable::resolve_cache_and_index(int byte_no, ! Register cache, Register index, size_t index_size) { const Register temp = rbx; ! assert_different_registers(cache, index, temp); + Label L_clinit_barrier_slow; Label resolved; Bytecodes::Code code = bytecode(); switch (code) { case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; default: break; } assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); ! __ get_cache_and_index_and_bytecode_at_bcp(cache, index, temp, byte_no, 1, index_size); __ cmpl(temp, code); // have we resolved this bytecode? __ jcc(Assembler::equal, resolved); // resolve first time through + // Class initialization barrier slow path lands here as well. + __ bind(L_clinit_barrier_slow); address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache); __ movl(temp, code); __ call_VM(noreg, entry, temp); // Update registers with resolved info ! __ get_cache_and_index_at_bcp(cache, index, 1, index_size); ! __ bind(resolved); + + // Class initialization barrier for static methods + if (UseFastClassInitChecks && bytecode() == Bytecodes::_invokestatic) { + const Register method = temp; + const Register klass = temp; + const Register thread = LP64_ONLY(r15_thread) NOT_LP64(noreg); + assert(thread != noreg, "x86_32 not supported"); + + __ load_resolved_method_at_index(byte_no, cache, index, method); + __ load_method_holder(klass, method); + __ clinit_barrier(klass, thread, NULL /*L_fast_path*/, &L_clinit_barrier_slow); + } } // The cache and index registers must be set before call void TemplateTable::load_field_cp_cache_entry(Register obj, Register cache,
< prev index next >