< prev index next >

src/cpu/aarch64/vm/templateTable_aarch64.cpp

Print this page




2178 void TemplateTable::_return(TosState state)
2179 {
2180   transition(state, state);
2181   assert(_desc->calls_vm(),
2182          "inconsistent calls_vm information"); // call in remove_activation
2183 
2184   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2185     assert(state == vtos, "only valid state");
2186 
2187     __ ldr(c_rarg1, aaddress(0));
2188     __ load_klass(r3, c_rarg1);
2189     __ ldrw(r3, Address(r3, Klass::access_flags_offset()));
2190     Label skip_register_finalizer;
2191     __ tbz(r3, exact_log2(JVM_ACC_HAS_FINALIZER), skip_register_finalizer);
2192 
2193     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2194 
2195     __ bind(skip_register_finalizer);
2196   }
2197 







2198   // Issue a StoreStore barrier after all stores but before return
2199   // from any constructor for any class with a final field.  We don't
2200   // know if this is a finalizer, so we always do so.
2201   if (_desc->bytecode() == Bytecodes::_return)
2202     __ membar(MacroAssembler::StoreStore);
2203 
2204   // Narrow result if state is itos but result type is smaller.
2205   // Need to narrow in the return bytecode rather than in generate_return_entry
2206   // since compiled code callers expect the result to already be narrowed.
2207   if (state == itos) {
2208     __ narrow(r0);
2209   }
2210 
2211   __ remove_activation(state);
2212   __ ret(lr);
2213 }
2214 
2215 // ----------------------------------------------------------------------------
2216 // Volatile variables demand their effects be made known to all CPU's
2217 // in order.  Store buffers on most chips allow reads & writes to




2178 void TemplateTable::_return(TosState state)
2179 {
2180   transition(state, state);
2181   assert(_desc->calls_vm(),
2182          "inconsistent calls_vm information"); // call in remove_activation
2183 
2184   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2185     assert(state == vtos, "only valid state");
2186 
2187     __ ldr(c_rarg1, aaddress(0));
2188     __ load_klass(r3, c_rarg1);
2189     __ ldrw(r3, Address(r3, Klass::access_flags_offset()));
2190     Label skip_register_finalizer;
2191     __ tbz(r3, exact_log2(JVM_ACC_HAS_FINALIZER), skip_register_finalizer);
2192 
2193     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2194 
2195     __ bind(skip_register_finalizer);
2196   }
2197 
2198   // Explicitly reset last_sp, for handling special case in TemplateInterpreter::deopt_reexecute_entry
2199 #ifdef ASSERT 
2200   if (state == vtos) {
2201     __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
2202   }
2203 #endif
2204 
2205   // Issue a StoreStore barrier after all stores but before return
2206   // from any constructor for any class with a final field.  We don't
2207   // know if this is a finalizer, so we always do so.
2208   if (_desc->bytecode() == Bytecodes::_return)
2209     __ membar(MacroAssembler::StoreStore);
2210 
2211   // Narrow result if state is itos but result type is smaller.
2212   // Need to narrow in the return bytecode rather than in generate_return_entry
2213   // since compiled code callers expect the result to already be narrowed.
2214   if (state == itos) {
2215     __ narrow(r0);
2216   }
2217 
2218   __ remove_activation(state);
2219   __ ret(lr);
2220 }
2221 
2222 // ----------------------------------------------------------------------------
2223 // Volatile variables demand their effects be made known to all CPU's
2224 // in order.  Store buffers on most chips allow reads & writes to


< prev index next >