< prev index next >

src/hotspot/cpu/s390/templateTable_s390.cpp

Print this page
rev 48251 : 8193257: PPC64, s390 implementation for Thread-local handshakes
Reviewed-by:


1836   if (is_wide) {
1837     __ get_4_byte_integer_at_bcp(disp, 1);
1838   } else {
1839     __ get_2_byte_integer_at_bcp(disp, 1, InterpreterMacroAssembler::Signed);
1840   }
1841 
1842   // Handle all the JSR stuff here, then exit.
1843   // It's much shorter and cleaner than intermingling with the
1844   // non-JSR normal-branch stuff occurring below.
1845   if (is_jsr) {
1846     // Compute return address as bci in Z_tos.
1847     __ z_lgr(Z_R1_scratch, Z_bcp);
1848     __ z_sg(Z_R1_scratch, Address(method, Method::const_offset()));
1849     __ add2reg(Z_tos, (is_wide ? 5 : 3) - in_bytes(ConstMethod::codes_offset()), Z_R1_scratch);
1850 
1851     // Bump bcp to target of JSR.
1852     __ z_agr(Z_bcp, disp);
1853     // Push return address for "ret" on stack.
1854     __ push_ptr(Z_tos);
1855     // And away we go!
1856     __ dispatch_next(vtos);
1857     return;
1858   }
1859 
1860   // Normal (non-jsr) branch handling.
1861 
1862   // Bump bytecode pointer by displacement (take the branch).
1863   __ z_agr(Z_bcp, disp);
1864 
1865   assert(UseLoopCounter || !UseOnStackReplacement,
1866          "on-stack-replacement requires loop counters");
1867 
1868   NearLabel backedge_counter_overflow;
1869   NearLabel profile_method;
1870   NearLabel dispatch;
1871   int       increment = InvocationCounter::count_increment;
1872 
1873   if (UseLoopCounter) {
1874     // Increment backedge counter for backward branches.
1875     // disp: target offset
1876     // Z_bcp: target bcp


1944         if (UseOnStackReplacement) {
1945           // Check for overflow against 'counter', which is the sum of the
1946           // counters.
1947           __ z_cl(counter, Address(m_counters, MethodCounters::interpreter_backward_branch_limit_offset()));
1948           __ z_brh(backedge_counter_overflow);
1949         }
1950       }
1951       __ bind(noCounters);
1952     }
1953 
1954     __ bind(dispatch);
1955   }
1956 
1957   // Pre-load the next target bytecode into rbx.
1958   __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));
1959 
1960   // Continue with the bytecode @ target.
1961   // Z_tos: Return bci for jsr's, unused otherwise.
1962   // Z_bytecode: target bytecode
1963   // Z_bcp: target bcp
1964   __ dispatch_only(vtos);
1965 
1966   // Out-of-line code runtime calls.
1967   if (UseLoopCounter) {
1968     if (ProfileInterpreter) {
1969       // Out-of-line code to allocate method data oop.
1970       __ bind(profile_method);
1971 
1972       __ call_VM(noreg,
1973                  CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method));
1974       __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));  // Restore target bytecode.
1975       __ set_method_data_pointer_for_bcp();
1976       __ z_bru(dispatch);
1977     }
1978 
1979     if (UseOnStackReplacement) {
1980 
1981       // invocation counter overflow
1982       __ bind(backedge_counter_overflow);
1983 
1984       __ z_lcgr(Z_ARG2, disp); // Z_ARG2 := -disp


2055   NearLabel not_taken;
2056   __ pop_ptr(Z_ARG2);
2057   __ verify_oop(Z_ARG2);
2058   __ verify_oop(Z_tos);
2059   __ compareU64_and_branch(Z_tos, Z_ARG2, j_not(cc), not_taken);
2060   branch(false, false);
2061   __ bind(not_taken);
2062   __ profile_not_taken_branch(Z_ARG3);
2063 }
2064 
2065 void TemplateTable::ret() {
2066   transition(vtos, vtos);
2067 
2068   locals_index(Z_tmp_1);
2069   // Get return bci, compute return bcp. Must load 64 bits.
2070   __ mem2reg_opt(Z_tmp_1, iaddress(_masm, Z_tmp_1));
2071   __ profile_ret(Z_tmp_1, Z_tmp_2);
2072   __ get_method(Z_tos);
2073   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
2074   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
2075   __ dispatch_next(vtos);
2076 }
2077 
2078 void TemplateTable::wide_ret() {
2079   transition(vtos, vtos);
2080 
2081   locals_index_wide(Z_tmp_1);
2082   // Get return bci, compute return bcp.
2083   __ mem2reg_opt(Z_tmp_1, aaddress(_masm, Z_tmp_1));
2084   __ profile_ret(Z_tmp_1, Z_tmp_2);
2085   __ get_method(Z_tos);
2086   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
2087   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
2088   __ dispatch_next(vtos);
2089 }
2090 
2091 void TemplateTable::tableswitch () {
2092   transition(itos, vtos);
2093 
2094   NearLabel default_case, continue_execution;
2095   Register  bcp = Z_ARG5;
2096   // Align bcp.
2097   __ load_address(bcp, at_bcp(BytesPerInt));
2098   __ z_nill(bcp, (-BytesPerInt) & 0xffff);
2099 
2100   // Load lo & hi.
2101   Register low  = Z_tmp_1;
2102   Register high = Z_tmp_2;
2103 
2104   // Load low into 64 bits, since used for address calculation.
2105   __ mem2reg_signed_opt(low, Address(bcp, BytesPerInt));
2106   __ mem2reg_opt(high, Address(bcp, 2 * BytesPerInt), false);
2107   // Sign extend "label" value for address calculation.
2108   __ z_lgfr(Z_tos, Z_tos);


2112   __ compare32_and_branch(Z_tos, high, Assembler::bcondHigh, default_case);
2113 
2114   // Lookup dispatch offset.
2115   __ z_sgr(Z_tos, low);
2116   Register jump_table_offset = Z_ARG3;
2117   // Index2offset; index in Z_tos is killed by profile_switch_case.
2118   __ z_sllg(jump_table_offset, Z_tos, LogBytesPerInt);
2119   __ profile_switch_case(Z_tos, Z_ARG4 /*tmp for mdp*/, low/*tmp*/, Z_bytecode/*tmp*/);
2120 
2121   Register index = Z_tmp_2;
2122 
2123   // Load index sign extended for addressing.
2124   __ mem2reg_signed_opt(index, Address(bcp, jump_table_offset, 3 * BytesPerInt));
2125 
2126   // Continue execution.
2127   __ bind(continue_execution);
2128 
2129   // Load next bytecode.
2130   __ z_llgc(Z_bytecode, Address(Z_bcp, index));
2131   __ z_agr(Z_bcp, index); // Advance bcp.
2132   __ dispatch_only(vtos);
2133 
2134   // Handle default.
2135   __ bind(default_case);
2136 
2137   __ profile_switch_default(Z_tos);
2138   __ mem2reg_signed_opt(index, Address(bcp));
2139   __ z_bru(continue_execution);
2140 }
2141 
2142 void TemplateTable::lookupswitch () {
2143   transition(itos, itos);
2144   __ stop("lookupswitch bytecode should have been rewritten");
2145 }
2146 
2147 void TemplateTable::fast_linearswitch () {
2148   transition(itos, vtos);
2149 
2150   Label    loop_entry, loop, found, continue_execution;
2151   Register bcp = Z_ARG5;
2152 


2176 
2177   __ profile_switch_default(Z_tos);
2178   // Load offset sign extended for addressing.
2179   __ mem2reg_signed_opt(offset, Address(bcp));
2180   __ z_bru(continue_execution);
2181 
2182   // Entry found -> get offset.
2183   __ bind(found);
2184   __ mem2reg_signed_opt(offset, Address(bcp, current_case_offset, 3 * BytesPerInt));
2185   // Profile that this case was taken.
2186   Register current_case_idx = Z_ARG4;
2187   __ z_srlg(current_case_idx, current_case_offset, LogBytesPerWord); // bytes2index
2188   __ profile_switch_case(current_case_idx, Z_tos, bcp, Z_bytecode);
2189 
2190   // Continue execution.
2191   __ bind(continue_execution);
2192 
2193   // Load next bytecode.
2194   __ z_llgc(Z_bytecode, Address(Z_bcp, offset, 0));
2195   __ z_agr(Z_bcp, offset); // Advance bcp.
2196   __ dispatch_only(vtos);
2197 }
2198 
2199 
2200 void TemplateTable::fast_binaryswitch() {
2201 
2202   transition(itos, vtos);
2203 
2204   // Implementation using the following core algorithm:
2205   //
2206   // int binary_search(int key, LookupswitchPair* array, int n) {
2207   //   // Binary search according to "Methodik des Programmierens" by
2208   //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
2209   //   int i = 0;
2210   //   int j = n;
2211   //   while (i+1 < j) {
2212   //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
2213   //     // with      Q: for all i: 0 <= i < n: key < a[i]
2214   //     // where a stands for the array and assuming that the (inexisting)
2215   //     // element a[n] is infinitely big.
2216   //     int h = (i + j) >> 1;


2285 
2286     // if (i + 1 < j) continue search
2287     __ add2reg(h, 1, i);
2288     __ compare64_and_branch(h, j, Assembler::bcondLow, loop);
2289   }
2290 
2291   // End of binary search, result index is i (must check again!).
2292   NearLabel default_case;
2293 
2294   // h is no longer needed, so use it to hold the byte offset.
2295   __ z_sllg(h, i, LogBytesPerWord);   // index2bytes
2296   __ mem2reg_opt(temp, Address(array, h), false);
2297   __ compare32_and_branch(key, temp, Assembler::bcondNotEqual, default_case);
2298 
2299   // entry found -> j = offset
2300   __ mem2reg_signed_opt(j, Address(array, h, BytesPerInt));
2301   __ profile_switch_case(i, key, array, Z_bytecode);
2302   // Load next bytecode.
2303   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
2304   __ z_agr(Z_bcp, j);       // Advance bcp.
2305   __ dispatch_only(vtos);
2306 
2307   // default case -> j = default offset
2308   __ bind(default_case);
2309 
2310   __ profile_switch_default(i);
2311   __ mem2reg_signed_opt(j, Address(array, -2 * BytesPerInt));
2312   // Load next bytecode.
2313   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
2314   __ z_agr(Z_bcp, j);       // Advance bcp.
2315   __ dispatch_only(vtos);
2316 }
2317 
2318 void TemplateTable::_return(TosState state) {
2319   transition(state, state);
2320   assert(_desc->calls_vm(),
2321          "inconsistent calls_vm information"); // call in remove_activation
2322 
2323   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2324     Register Rthis  = Z_ARG2;
2325     Register Rklass = Z_ARG5;
2326     Label skip_register_finalizer;
2327     assert(state == vtos, "only valid state");
2328     __ z_lg(Rthis, aaddress(0));
2329     __ load_klass(Rklass, Rthis);
2330     __ testbit(Address(Rklass, Klass::access_flags_offset()), exact_log2(JVM_ACC_HAS_FINALIZER));
2331     __ z_bfalse(skip_register_finalizer);
2332     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Rthis);
2333     __ bind(skip_register_finalizer);











2334   }
2335 
2336   if (state == itos) {
2337     // Narrow result if state is itos but result type is smaller.
2338     // Need to narrow in the return bytecode rather than in generate_return_entry
2339     // since compiled code callers expect the result to already be narrowed.
2340     __ narrow(Z_tos, Z_tmp_1); /* fall through */
2341   }
2342 
2343   __ remove_activation(state, Z_R14);
2344   __ z_br(Z_R14);
2345 }
2346 
2347 // ----------------------------------------------------------------------------
2348 // NOTE: Cpe_offset is already computed as byte offset, so we must not
2349 // shift it afterwards!
2350 void TemplateTable::resolve_cache_and_index(int byte_no,
2351                                             Register Rcache,
2352                                             Register cpe_offset,
2353                                             size_t index_size) {




1836   if (is_wide) {
1837     __ get_4_byte_integer_at_bcp(disp, 1);
1838   } else {
1839     __ get_2_byte_integer_at_bcp(disp, 1, InterpreterMacroAssembler::Signed);
1840   }
1841 
1842   // Handle all the JSR stuff here, then exit.
1843   // It's much shorter and cleaner than intermingling with the
1844   // non-JSR normal-branch stuff occurring below.
1845   if (is_jsr) {
1846     // Compute return address as bci in Z_tos.
1847     __ z_lgr(Z_R1_scratch, Z_bcp);
1848     __ z_sg(Z_R1_scratch, Address(method, Method::const_offset()));
1849     __ add2reg(Z_tos, (is_wide ? 5 : 3) - in_bytes(ConstMethod::codes_offset()), Z_R1_scratch);
1850 
1851     // Bump bcp to target of JSR.
1852     __ z_agr(Z_bcp, disp);
1853     // Push return address for "ret" on stack.
1854     __ push_ptr(Z_tos);
1855     // And away we go!
1856     __ dispatch_next(vtos, 0 , true);
1857     return;
1858   }
1859 
1860   // Normal (non-jsr) branch handling.
1861 
1862   // Bump bytecode pointer by displacement (take the branch).
1863   __ z_agr(Z_bcp, disp);
1864 
1865   assert(UseLoopCounter || !UseOnStackReplacement,
1866          "on-stack-replacement requires loop counters");
1867 
1868   NearLabel backedge_counter_overflow;
1869   NearLabel profile_method;
1870   NearLabel dispatch;
1871   int       increment = InvocationCounter::count_increment;
1872 
1873   if (UseLoopCounter) {
1874     // Increment backedge counter for backward branches.
1875     // disp: target offset
1876     // Z_bcp: target bcp


1944         if (UseOnStackReplacement) {
1945           // Check for overflow against 'counter', which is the sum of the
1946           // counters.
1947           __ z_cl(counter, Address(m_counters, MethodCounters::interpreter_backward_branch_limit_offset()));
1948           __ z_brh(backedge_counter_overflow);
1949         }
1950       }
1951       __ bind(noCounters);
1952     }
1953 
1954     __ bind(dispatch);
1955   }
1956 
1957   // Pre-load the next target bytecode into rbx.
1958   __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));
1959 
1960   // Continue with the bytecode @ target.
1961   // Z_tos: Return bci for jsr's, unused otherwise.
1962   // Z_bytecode: target bytecode
1963   // Z_bcp: target bcp
1964   __ dispatch_only(vtos, true);
1965 
1966   // Out-of-line code runtime calls.
1967   if (UseLoopCounter) {
1968     if (ProfileInterpreter) {
1969       // Out-of-line code to allocate method data oop.
1970       __ bind(profile_method);
1971 
1972       __ call_VM(noreg,
1973                  CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method));
1974       __ z_llgc(Z_bytecode, Address(Z_bcp, (intptr_t) 0));  // Restore target bytecode.
1975       __ set_method_data_pointer_for_bcp();
1976       __ z_bru(dispatch);
1977     }
1978 
1979     if (UseOnStackReplacement) {
1980 
1981       // invocation counter overflow
1982       __ bind(backedge_counter_overflow);
1983 
1984       __ z_lcgr(Z_ARG2, disp); // Z_ARG2 := -disp


2055   NearLabel not_taken;
2056   __ pop_ptr(Z_ARG2);
2057   __ verify_oop(Z_ARG2);
2058   __ verify_oop(Z_tos);
2059   __ compareU64_and_branch(Z_tos, Z_ARG2, j_not(cc), not_taken);
2060   branch(false, false);
2061   __ bind(not_taken);
2062   __ profile_not_taken_branch(Z_ARG3);
2063 }
2064 
2065 void TemplateTable::ret() {
2066   transition(vtos, vtos);
2067 
2068   locals_index(Z_tmp_1);
2069   // Get return bci, compute return bcp. Must load 64 bits.
2070   __ mem2reg_opt(Z_tmp_1, iaddress(_masm, Z_tmp_1));
2071   __ profile_ret(Z_tmp_1, Z_tmp_2);
2072   __ get_method(Z_tos);
2073   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
2074   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
2075   __ dispatch_next(vtos, 0 , true);
2076 }
2077 
2078 void TemplateTable::wide_ret() {
2079   transition(vtos, vtos);
2080 
2081   locals_index_wide(Z_tmp_1);
2082   // Get return bci, compute return bcp.
2083   __ mem2reg_opt(Z_tmp_1, aaddress(_masm, Z_tmp_1));
2084   __ profile_ret(Z_tmp_1, Z_tmp_2);
2085   __ get_method(Z_tos);
2086   __ mem2reg_opt(Z_R1_scratch, Address(Z_tos, Method::const_offset()));
2087   __ load_address(Z_bcp, Address(Z_R1_scratch, Z_tmp_1, ConstMethod::codes_offset()));
2088   __ dispatch_next(vtos, 0, true);
2089 }
2090 
2091 void TemplateTable::tableswitch () {
2092   transition(itos, vtos);
2093 
2094   NearLabel default_case, continue_execution;
2095   Register  bcp = Z_ARG5;
2096   // Align bcp.
2097   __ load_address(bcp, at_bcp(BytesPerInt));
2098   __ z_nill(bcp, (-BytesPerInt) & 0xffff);
2099 
2100   // Load lo & hi.
2101   Register low  = Z_tmp_1;
2102   Register high = Z_tmp_2;
2103 
2104   // Load low into 64 bits, since used for address calculation.
2105   __ mem2reg_signed_opt(low, Address(bcp, BytesPerInt));
2106   __ mem2reg_opt(high, Address(bcp, 2 * BytesPerInt), false);
2107   // Sign extend "label" value for address calculation.
2108   __ z_lgfr(Z_tos, Z_tos);


2112   __ compare32_and_branch(Z_tos, high, Assembler::bcondHigh, default_case);
2113 
2114   // Lookup dispatch offset.
2115   __ z_sgr(Z_tos, low);
2116   Register jump_table_offset = Z_ARG3;
2117   // Index2offset; index in Z_tos is killed by profile_switch_case.
2118   __ z_sllg(jump_table_offset, Z_tos, LogBytesPerInt);
2119   __ profile_switch_case(Z_tos, Z_ARG4 /*tmp for mdp*/, low/*tmp*/, Z_bytecode/*tmp*/);
2120 
2121   Register index = Z_tmp_2;
2122 
2123   // Load index sign extended for addressing.
2124   __ mem2reg_signed_opt(index, Address(bcp, jump_table_offset, 3 * BytesPerInt));
2125 
2126   // Continue execution.
2127   __ bind(continue_execution);
2128 
2129   // Load next bytecode.
2130   __ z_llgc(Z_bytecode, Address(Z_bcp, index));
2131   __ z_agr(Z_bcp, index); // Advance bcp.
2132   __ dispatch_only(vtos, true);
2133 
2134   // Handle default.
2135   __ bind(default_case);
2136 
2137   __ profile_switch_default(Z_tos);
2138   __ mem2reg_signed_opt(index, Address(bcp));
2139   __ z_bru(continue_execution);
2140 }
2141 
2142 void TemplateTable::lookupswitch () {
2143   transition(itos, itos);
2144   __ stop("lookupswitch bytecode should have been rewritten");
2145 }
2146 
2147 void TemplateTable::fast_linearswitch () {
2148   transition(itos, vtos);
2149 
2150   Label    loop_entry, loop, found, continue_execution;
2151   Register bcp = Z_ARG5;
2152 


2176 
2177   __ profile_switch_default(Z_tos);
2178   // Load offset sign extended for addressing.
2179   __ mem2reg_signed_opt(offset, Address(bcp));
2180   __ z_bru(continue_execution);
2181 
2182   // Entry found -> get offset.
2183   __ bind(found);
2184   __ mem2reg_signed_opt(offset, Address(bcp, current_case_offset, 3 * BytesPerInt));
2185   // Profile that this case was taken.
2186   Register current_case_idx = Z_ARG4;
2187   __ z_srlg(current_case_idx, current_case_offset, LogBytesPerWord); // bytes2index
2188   __ profile_switch_case(current_case_idx, Z_tos, bcp, Z_bytecode);
2189 
2190   // Continue execution.
2191   __ bind(continue_execution);
2192 
2193   // Load next bytecode.
2194   __ z_llgc(Z_bytecode, Address(Z_bcp, offset, 0));
2195   __ z_agr(Z_bcp, offset); // Advance bcp.
2196   __ dispatch_only(vtos, true);
2197 }
2198 
2199 
2200 void TemplateTable::fast_binaryswitch() {
2201 
2202   transition(itos, vtos);
2203 
2204   // Implementation using the following core algorithm:
2205   //
2206   // int binary_search(int key, LookupswitchPair* array, int n) {
2207   //   // Binary search according to "Methodik des Programmierens" by
2208   //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
2209   //   int i = 0;
2210   //   int j = n;
2211   //   while (i+1 < j) {
2212   //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
2213   //     // with      Q: for all i: 0 <= i < n: key < a[i]
2214   //     // where a stands for the array and assuming that the (inexisting)
2215   //     // element a[n] is infinitely big.
2216   //     int h = (i + j) >> 1;


2285 
2286     // if (i + 1 < j) continue search
2287     __ add2reg(h, 1, i);
2288     __ compare64_and_branch(h, j, Assembler::bcondLow, loop);
2289   }
2290 
2291   // End of binary search, result index is i (must check again!).
2292   NearLabel default_case;
2293 
2294   // h is no longer needed, so use it to hold the byte offset.
2295   __ z_sllg(h, i, LogBytesPerWord);   // index2bytes
2296   __ mem2reg_opt(temp, Address(array, h), false);
2297   __ compare32_and_branch(key, temp, Assembler::bcondNotEqual, default_case);
2298 
2299   // entry found -> j = offset
2300   __ mem2reg_signed_opt(j, Address(array, h, BytesPerInt));
2301   __ profile_switch_case(i, key, array, Z_bytecode);
2302   // Load next bytecode.
2303   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
2304   __ z_agr(Z_bcp, j);       // Advance bcp.
2305   __ dispatch_only(vtos, true);
2306 
2307   // default case -> j = default offset
2308   __ bind(default_case);
2309 
2310   __ profile_switch_default(i);
2311   __ mem2reg_signed_opt(j, Address(array, -2 * BytesPerInt));
2312   // Load next bytecode.
2313   __ z_llgc(Z_bytecode, Address(Z_bcp, j));
2314   __ z_agr(Z_bcp, j);       // Advance bcp.
2315   __ dispatch_only(vtos, true);
2316 }
2317 
2318 void TemplateTable::_return(TosState state) {
2319   transition(state, state);
2320   assert(_desc->calls_vm(),
2321          "inconsistent calls_vm information"); // call in remove_activation
2322 
2323   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2324     Register Rthis  = Z_ARG2;
2325     Register Rklass = Z_ARG5;
2326     Label skip_register_finalizer;
2327     assert(state == vtos, "only valid state");
2328     __ z_lg(Rthis, aaddress(0));
2329     __ load_klass(Rklass, Rthis);
2330     __ testbit(Address(Rklass, Klass::access_flags_offset()), exact_log2(JVM_ACC_HAS_FINALIZER));
2331     __ z_bfalse(skip_register_finalizer);
2332     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Rthis);
2333     __ bind(skip_register_finalizer);
2334   }
2335 
2336   if (SafepointMechanism::uses_thread_local_poll() && _desc->bytecode() != Bytecodes::_return_register_finalizer) {
2337     Label no_safepoint;
2338     const Address poll_byte_addr(Z_thread, in_bytes(Thread::polling_page_offset()) + 7 /* Big Endian */);
2339     __ z_tm(poll_byte_addr, SafepointMechanism::poll_bit());
2340     __ z_braz(no_safepoint);
2341     __ push(state);
2342     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2343     __ pop(state);
2344     __ bind(no_safepoint);
2345   }
2346 
2347   if (state == itos) {
2348     // Narrow result if state is itos but result type is smaller.
2349     // Need to narrow in the return bytecode rather than in generate_return_entry
2350     // since compiled code callers expect the result to already be narrowed.
2351     __ narrow(Z_tos, Z_tmp_1); /* fall through */
2352   }
2353 
2354   __ remove_activation(state, Z_R14);
2355   __ z_br(Z_R14);
2356 }
2357 
2358 // ----------------------------------------------------------------------------
2359 // NOTE: Cpe_offset is already computed as byte offset, so we must not
2360 // shift it afterwards!
2361 void TemplateTable::resolve_cache_and_index(int byte_no,
2362                                             Register Rcache,
2363                                             Register cpe_offset,
2364                                             size_t index_size) {


< prev index next >