src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page

        

*** 481,493 **** void TemplateTable::locals_index(Register reg, int offset) { __ load_unsigned_byte(reg, at_bcp(offset)); __ negptr(reg); } ! void TemplateTable::iload() { transition(vtos, itos); ! if (RewriteFrequentPairs) { Label rewrite, done; const Register bc = c_rarg3; assert(rbx != bc, "register damaged"); // get next byte --- 481,493 ---- void TemplateTable::locals_index(Register reg, int offset) { __ load_unsigned_byte(reg, at_bcp(offset)); __ negptr(reg); } ! void TemplateTable::iload_internal(RewriteControl rc) { transition(vtos, itos); ! if (RewriteFrequentPairs && rc == MAY_REWRITE) { Label rewrite, done; const Register bc = c_rarg3; assert(rbx != bc, "register damaged"); // get next byte
*** 514,531 **** --- 514,540 ---- // rewrite // bc: fast bytecode __ bind(rewrite); patch_bytecode(Bytecodes::_iload, bc, rbx, false); + __ bind(done); } // Get the local value into tos locals_index(rbx); __ movl(rax, iaddress(rbx)); } + void TemplateTable::iload() { + iload_internal(); + } + + void TemplateTable::nofast_iload() { + iload_internal(MAY_NOT_REWRITE); + } + void TemplateTable::fast_iload2() { transition(vtos, itos); locals_index(rbx); __ movl(rax, iaddress(rbx)); __ push(itos);
*** 749,758 **** --- 758,775 ---- transition(vtos, atos); __ movptr(rax, aaddress(n)); } void TemplateTable::aload_0() { + aload_0_internal(); + } + + void TemplateTable::nofast_aload_0() { + aload_0_internal(MAY_NOT_REWRITE); + } + + void TemplateTable::aload_0_internal(RewriteControl rc) { transition(vtos, atos); // According to bytecode histograms, the pairs: // // _aload_0, _fast_igetfield // _aload_0, _fast_agetfield
*** 771,781 **** // Also rewrite frequent pairs // aload_0, aload_1 // aload_0, iload_1 // These bytecodes with a small amount of code are most profitable // to rewrite ! if (RewriteFrequentPairs) { Label rewrite, done; const Register bc = c_rarg3; assert(rbx != bc, "register damaged"); // get next byte __ load_unsigned_byte(rbx, --- 788,798 ---- // Also rewrite frequent pairs // aload_0, aload_1 // aload_0, iload_1 // These bytecodes with a small amount of code are most profitable // to rewrite ! if (RewriteFrequentPairs && rc == MAY_REWRITE) { Label rewrite, done; const Register bc = c_rarg3; assert(rbx != bc, "register damaged"); // get next byte __ load_unsigned_byte(rbx,
*** 2107,2127 **** void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index, size_t index_size) { const Register temp = rbx; assert_different_registers(Rcache, index, temp); Label resolved; assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); ! __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode? __ jcc(Assembler::equal, resolved); // resolve first time through address entry; ! switch (bytecode()) { case Bytecodes::_getstatic: case Bytecodes::_putstatic: case Bytecodes::_getfield: case Bytecodes::_putfield: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); --- 2124,2151 ---- void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index, size_t index_size) { const Register temp = rbx; + Bytecodes::Code code = bytecode(); + switch (code) { + case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break; + case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break; + case Bytecodes::_nofast_invokevirtual: code = Bytecodes::_invokevirtual; + } + assert_different_registers(Rcache, index, temp); Label resolved; assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); ! __ cmpl(temp, code); // have we resolved this bytecode? __ jcc(Assembler::equal, resolved); // resolve first time through address entry; ! switch (code) { case Bytecodes::_getstatic: case Bytecodes::_putstatic: case Bytecodes::_getfield: case Bytecodes::_putfield: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
*** 2137,2150 **** break; case Bytecodes::_invokedynamic: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; default: ! fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); break; } ! __ movl(temp, (int) bytecode()); __ call_VM(noreg, entry, temp); // Update registers with resolved info __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); __ bind(resolved); --- 2161,2174 ---- break; case Bytecodes::_invokedynamic: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; default: ! fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code))); break; } ! __ movl(temp, code); __ call_VM(noreg, entry, temp); // Update registers with resolved info __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); __ bind(resolved);
*** 2259,2269 **** __ pop_ptr(r); __ null_check(r); // for field access must check obj. __ verify_oop(r); } ! void TemplateTable::getfield_or_static(int byte_no, bool is_static) { transition(vtos, vtos); const Register cache = rcx; const Register index = rdx; const Register obj = c_rarg3; --- 2283,2293 ---- __ pop_ptr(r); __ null_check(r); // for field access must check obj. __ verify_oop(r); } ! void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); const Register cache = rcx; const Register index = rdx; const Register obj = c_rarg3;
*** 2293,2314 **** __ jcc(Assembler::notZero, notByte); // btos __ load_signed_byte(rax, field); __ push(btos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); } __ jmp(Done); __ bind(notByte); __ cmpl(flags, atos); __ jcc(Assembler::notEqual, notObj); // atos __ load_heap_oop(rax, field); __ push(atos); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx); } __ jmp(Done); __ bind(notObj); --- 2317,2338 ---- __ jcc(Assembler::notZero, notByte); // btos __ load_signed_byte(rax, field); __ push(btos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); } __ jmp(Done); __ bind(notByte); __ cmpl(flags, atos); __ jcc(Assembler::notEqual, notObj); // atos __ load_heap_oop(rax, field); __ push(atos); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx); } __ jmp(Done); __ bind(notObj);
*** 2316,2326 **** __ jcc(Assembler::notEqual, notInt); // itos __ movl(rax, field); __ push(itos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx); } __ jmp(Done); __ bind(notInt); --- 2340,2350 ---- __ jcc(Assembler::notEqual, notInt); // itos __ movl(rax, field); __ push(itos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx); } __ jmp(Done); __ bind(notInt);
*** 2328,2338 **** __ jcc(Assembler::notEqual, notChar); // ctos __ load_unsigned_short(rax, field); __ push(ctos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx); } __ jmp(Done); __ bind(notChar); --- 2352,2362 ---- __ jcc(Assembler::notEqual, notChar); // ctos __ load_unsigned_short(rax, field); __ push(ctos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx); } __ jmp(Done); __ bind(notChar);
*** 2340,2350 **** __ jcc(Assembler::notEqual, notShort); // stos __ load_signed_short(rax, field); __ push(stos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx); } __ jmp(Done); __ bind(notShort); --- 2364,2374 ---- __ jcc(Assembler::notEqual, notShort); // stos __ load_signed_short(rax, field); __ push(stos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx); } __ jmp(Done); __ bind(notShort);
*** 2352,2362 **** __ jcc(Assembler::notEqual, notLong); // ltos __ movq(rax, field); __ push(ltos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx); } __ jmp(Done); __ bind(notLong); --- 2376,2386 ---- __ jcc(Assembler::notEqual, notLong); // ltos __ movq(rax, field); __ push(ltos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx); } __ jmp(Done); __ bind(notLong);
*** 2364,2374 **** __ jcc(Assembler::notEqual, notFloat); // ftos __ movflt(xmm0, field); __ push(ftos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx); } __ jmp(Done); __ bind(notFloat); --- 2388,2398 ---- __ jcc(Assembler::notEqual, notFloat); // ftos __ movflt(xmm0, field); __ push(ftos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx); } __ jmp(Done); __ bind(notFloat);
*** 2378,2388 **** #endif // dtos __ movdbl(xmm0, field); __ push(dtos); // Rewrite bytecode to be faster ! if (!is_static) { patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx); } #ifdef ASSERT __ jmp(Done); --- 2402,2412 ---- #endif // dtos __ movdbl(xmm0, field); __ push(dtos); // Rewrite bytecode to be faster ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx); } #ifdef ASSERT __ jmp(Done);
*** 2399,2408 **** --- 2423,2436 ---- void TemplateTable::getfield(int byte_no) { getfield_or_static(byte_no, false); } + void TemplateTable::nofast_getfield(int byte_no) { + getfield_or_static(byte_no, false, MAY_NOT_REWRITE); + } + void TemplateTable::getstatic(int byte_no) { getfield_or_static(byte_no, true); } // The registers cache and index expected to be set before call.
*** 2462,2472 **** __ get_cache_and_index_at_bcp(cache, index, 1); __ bind(L1); } } ! void TemplateTable::putfield_or_static(int byte_no, bool is_static) { transition(vtos, vtos); const Register cache = rcx; const Register index = rdx; const Register obj = rcx; --- 2490,2500 ---- __ get_cache_and_index_at_bcp(cache, index, 1); __ bind(L1); } } ! void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) { transition(vtos, vtos); const Register cache = rcx; const Register index = rdx; const Register obj = rcx;
*** 2502,2512 **** // btos { __ pop(btos); if (!is_static) pop_and_check_object(obj); __ movb(field, rax); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2530,2540 ---- // btos { __ pop(btos); if (!is_static) pop_and_check_object(obj); __ movb(field, rax); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2518,2528 **** { __ pop(atos); if (!is_static) pop_and_check_object(obj); // Store into the field do_oop_store(_masm, field, rax, _bs->kind(), false); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2546,2556 ---- { __ pop(atos); if (!is_static) pop_and_check_object(obj); // Store into the field do_oop_store(_masm, field, rax, _bs->kind(), false); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2533,2543 **** // itos { __ pop(itos); if (!is_static) pop_and_check_object(obj); __ movl(field, rax); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2561,2571 ---- // itos { __ pop(itos); if (!is_static) pop_and_check_object(obj); __ movl(field, rax); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2548,2558 **** // ctos { __ pop(ctos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2576,2586 ---- // ctos { __ pop(ctos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2563,2573 **** // stos { __ pop(stos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2591,2601 ---- // stos { __ pop(stos); if (!is_static) pop_and_check_object(obj); __ movw(field, rax); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2578,2588 **** // ltos { __ pop(ltos); if (!is_static) pop_and_check_object(obj); __ movq(field, rax); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2606,2616 ---- // ltos { __ pop(ltos); if (!is_static) pop_and_check_object(obj); __ movq(field, rax); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2593,2603 **** // ftos { __ pop(ftos); if (!is_static) pop_and_check_object(obj); __ movflt(field, xmm0); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 2621,2631 ---- // ftos { __ pop(ftos); if (!is_static) pop_and_check_object(obj); __ movflt(field, xmm0); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 2610,2620 **** // dtos { __ pop(dtos); if (!is_static) pop_and_check_object(obj); __ movdbl(field, xmm0); ! if (!is_static) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } } #ifdef ASSERT --- 2638,2648 ---- // dtos { __ pop(dtos); if (!is_static) pop_and_check_object(obj); __ movdbl(field, xmm0); ! if (!is_static && rc == MAY_REWRITE) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } } #ifdef ASSERT
*** 2636,2645 **** --- 2664,2677 ---- void TemplateTable::putfield(int byte_no) { putfield_or_static(byte_no, false); } + void TemplateTable::nofast_putfield(int byte_no) { + putfield_or_static(byte_no, false, MAY_NOT_REWRITE); + } + void TemplateTable::putstatic(int byte_no) { putfield_or_static(byte_no, true); } void TemplateTable::jvmti_post_fast_field_mod() {
*** 2913,2923 **** Register index, // itable index, MethodType, etc. Register recv, // if caller wants to see it Register flags // if caller wants to test it ) { // determine flags ! const Bytecodes::Code code = bytecode(); const bool is_invokeinterface = code == Bytecodes::_invokeinterface; const bool is_invokedynamic = code == Bytecodes::_invokedynamic; const bool is_invokehandle = code == Bytecodes::_invokehandle; const bool is_invokevirtual = code == Bytecodes::_invokevirtual; const bool is_invokespecial = code == Bytecodes::_invokespecial; --- 2945,2955 ---- Register index, // itable index, MethodType, etc. Register recv, // if caller wants to see it Register flags // if caller wants to test it ) { // determine flags ! const Bytecodes::Code code = bytecode() == Bytecodes::_nofast_invokevirtual ? Bytecodes::_invokevirtual : bytecode(); const bool is_invokeinterface = code == Bytecodes::_invokeinterface; const bool is_invokedynamic = code == Bytecodes::_invokedynamic; const bool is_invokehandle = code == Bytecodes::_invokehandle; const bool is_invokevirtual = code == Bytecodes::_invokevirtual; const bool is_invokespecial = code == Bytecodes::_invokespecial;
*** 3038,3049 **** __ lookup_virtual_method(rax, index, method); __ profile_arguments_type(rdx, method, r13, true); __ jump_from_interpreted(method, rdx); } - void TemplateTable::invokevirtual(int byte_no) { transition(vtos, vtos); assert(byte_no == f2_byte, "use this argument"); prepare_invoke(byte_no, rbx, // method or vtable index noreg, // unused itable index --- 3070,3088 ---- __ lookup_virtual_method(rax, index, method); __ profile_arguments_type(rdx, method, r13, true); __ jump_from_interpreted(method, rdx); } void TemplateTable::invokevirtual(int byte_no) { + invokevirtual_internal(byte_no); + } + + void TemplateTable::nofast_invokevirtual(int byte_no) { + invokevirtual_internal(byte_no, MAY_NOT_REWRITE); + } + + void TemplateTable::invokevirtual_internal(int byte_no, RewriteControl rc) { transition(vtos, vtos); assert(byte_no == f2_byte, "use this argument"); prepare_invoke(byte_no, rbx, // method or vtable index noreg, // unused itable index