< prev index next >

src/hotspot/share/interpreter/interpreterRuntime.cpp

Print this page

@@ -85,30 +85,32 @@
 
 //------------------------------------------------------------------------------------------------------------------------
 // State accessors
 
 void InterpreterRuntime::set_bcp_and_mdp(address bcp, JavaThread *thread) {
-  last_frame(thread).interpreter_frame_set_bcp(bcp);
+  LastFrameAccessor last_frame(thread);
+  last_frame.set_bcp(bcp);
   if (ProfileInterpreter) {
     // ProfileTraps uses MDOs independently of ProfileInterpreter.
     // That is why we must check both ProfileInterpreter and mdo != NULL.
-    MethodData* mdo = last_frame(thread).interpreter_frame_method()->method_data();
+    MethodData* mdo = last_frame.method()->method_data();
     if (mdo != NULL) {
       NEEDS_CLEANUP;
-      last_frame(thread).interpreter_frame_set_mdp(mdo->bci_to_dp(last_frame(thread).interpreter_frame_bci()));
+      last_frame.set_mdp(mdo->bci_to_dp(last_frame.bci()));
     }
   }
 }
 
 //------------------------------------------------------------------------------------------------------------------------
 // Constants
 
 
 IRT_ENTRY(void, InterpreterRuntime::ldc(JavaThread* thread, bool wide))
   // access constant pool
-  ConstantPool* pool = method(thread)->constants();
-  int index = wide ? get_index_u2(thread, Bytecodes::_ldc_w) : get_index_u1(thread, Bytecodes::_ldc);
+  LastFrameAccessor last_frame(thread);
+  ConstantPool* pool = last_frame.method()->constants();
+  int index = wide ? last_frame.get_index_u2(Bytecodes::_ldc_w) : last_frame.get_index_u1(Bytecodes::_ldc);
   constantTag tag = pool->tag_at(index);
 
   assert (tag.is_unresolved_klass() || tag.is_klass(), "wrong ldc call");
   Klass* klass = pool->klass_at(index, CHECK);
     oop java_class = klass->java_mirror();

@@ -117,17 +119,18 @@
 
 IRT_ENTRY(void, InterpreterRuntime::resolve_ldc(JavaThread* thread, Bytecodes::Code bytecode)) {
   assert(bytecode == Bytecodes::_fast_aldc ||
          bytecode == Bytecodes::_fast_aldc_w, "wrong bc");
   ResourceMark rm(thread);
-  methodHandle m (thread, method(thread));
-  Bytecode_loadconstant ldc(m, bci(thread));
+  LastFrameAccessor last_frame(thread);
+  methodHandle m (thread, last_frame.method());
+  Bytecode_loadconstant ldc(m, last_frame.bci());
   oop result = ldc.resolve_constant(CHECK);
 #ifdef ASSERT
   {
     // The bytecode wrappers aren't GC-safe so construct a new one
-    Bytecode_loadconstant ldc2(m, bci(thread));
+    Bytecode_loadconstant ldc2(m, last_frame.bci());
     oop coop = m->constants()->resolved_references()->obj_at(ldc2.cache_index());
     assert(result == coop, "expected result for assembly code");
   }
 #endif
   thread->set_vm_result(result);

@@ -180,14 +183,15 @@
 IRT_END
 
 
 IRT_ENTRY(void, InterpreterRuntime::multianewarray(JavaThread* thread, jint* first_size_address))
   // We may want to pass in more arguments - could make this slightly faster
-  ConstantPool* constants = method(thread)->constants();
-  int          i = get_index_u2(thread, Bytecodes::_multianewarray);
+  LastFrameAccessor last_frame(thread);
+  ConstantPool* constants = last_frame.method()->constants();
+  int          i = last_frame.get_index_u2(Bytecodes::_multianewarray);
   Klass* klass = constants->klass_at(i, CHECK);
-  int   nof_dims = number_of_dimensions(thread);
+  int   nof_dims = last_frame.number_of_dimensions();
   assert(klass->is_klass(), "not a class");
   assert(nof_dims >= 1, "multianewarray rank must be nonzero");
 
   // We must create an array of jints to pass to multi_allocate.
   ResourceMark rm(thread);

@@ -215,12 +219,13 @@
 
 
 // Quicken instance-of and check-cast bytecodes
 IRT_ENTRY(void, InterpreterRuntime::quicken_io_cc(JavaThread* thread))
   // Force resolving; quicken the bytecode
-  int which = get_index_u2(thread, Bytecodes::_checkcast);
-  ConstantPool* cpool = method(thread)->constants();
+  LastFrameAccessor last_frame(thread);
+  int which = last_frame.get_index_u2(Bytecodes::_checkcast);
+  ConstantPool* cpool = last_frame.method()->constants();
   // We'd expect to assert that we're only here to quicken bytecodes, but in a multithreaded
   // program we might have seen an unquick'd bytecode in the interpreter but have another
   // thread quicken the bytecode before we get here.
   // assert( cpool->tag_at(which).is_unresolved_klass(), "should only come here to quicken bytecodes" );
   Klass* klass = cpool->klass_at(which, CHECK);

@@ -255,12 +260,13 @@
 
 // Assume the compiler is (or will be) interested in this event.
 // If necessary, create an MDO to hold the information, and record it.
 void InterpreterRuntime::note_trap(JavaThread* thread, int reason, TRAPS) {
   assert(ProfileTraps, "call me only if profiling");
-  methodHandle trap_method(thread, method(thread));
-  int trap_bci = trap_method->bci_from(bcp(thread));
+  LastFrameAccessor last_frame(thread);
+  methodHandle trap_method(thread, last_frame.method());
+  int trap_bci = trap_method->bci_from(last_frame.bcp());
   note_trap_inner(thread, reason, trap_method, trap_bci, THREAD);
 }
 
 #ifdef CC_INTERP
 // As legacy note_trap, but we have more arguments.

@@ -389,16 +395,17 @@
 // bci where the exception happened. If the exception was propagated back
 // from a call, the expression stack contains the values for the bci at the
 // invoke w/o arguments (i.e., as if one were inside the call).
 IRT_ENTRY(address, InterpreterRuntime::exception_handler_for_exception(JavaThread* thread, oopDesc* exception))
 
+  LastFrameAccessor last_frame(thread);
   Handle             h_exception(thread, exception);
-  methodHandle       h_method   (thread, method(thread));
+  methodHandle       h_method   (thread, last_frame.method());
   constantPoolHandle h_constants(thread, h_method->constants());
   bool               should_repeat;
   int                handler_bci;
-  int                current_bci = bci(thread);
+  int                current_bci = last_frame.bci();
 
   if (thread->frames_to_pop_failed_realloc() > 0) {
     // Allocation of scalar replaced object used in this frame
     // failed. Unconditionally pop the frame.
     thread->dec_frames_to_pop_failed_realloc();

@@ -491,11 +498,11 @@
 #endif
 
   // notify JVMTI of an exception throw; JVMTI will detect if this is a first
   // time throw or a stack unwinding throw and accordingly notify the debugger
   if (JvmtiExport::can_post_on_exceptions()) {
-    JvmtiExport::post_exception_throw(thread, h_method(), bcp(thread), h_exception());
+    JvmtiExport::post_exception_throw(thread, h_method(), last_frame.bcp(), h_exception());
   }
 
 #ifdef CC_INTERP
   address continuation = (address)(intptr_t) handler_bci;
 #else

@@ -554,24 +561,25 @@
 
 void InterpreterRuntime::resolve_get_put(JavaThread* thread, Bytecodes::Code bytecode) {
   Thread* THREAD = thread;
   // resolve field
   fieldDescriptor info;
-  constantPoolHandle pool(thread, method(thread)->constants());
-  methodHandle m(thread, method(thread));
+  LastFrameAccessor last_frame(thread);
+  constantPoolHandle pool(thread, last_frame.method()->constants());
+  methodHandle m(thread, last_frame.method());
   bool is_put    = (bytecode == Bytecodes::_putfield  || bytecode == Bytecodes::_nofast_putfield ||
                     bytecode == Bytecodes::_putstatic);
   bool is_static = (bytecode == Bytecodes::_getstatic || bytecode == Bytecodes::_putstatic);
 
   {
     JvmtiHideSingleStepping jhss(thread);
-    LinkResolver::resolve_field_access(info, pool, get_index_u2_cpcache(thread, bytecode),
+    LinkResolver::resolve_field_access(info, pool, last_frame.get_index_u2_cpcache(bytecode),
                                        m, bytecode, CHECK);
   } // end JvmtiHideSingleStepping
 
   // check if link resolution caused cpCache to be updated
-  ConstantPoolCacheEntry* cp_cache_entry = cache_entry(thread);
+  ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry();
   if (cp_cache_entry->is_resolved(bytecode)) return;
 
   // compute auxiliary field attributes
   TosState state  = as_TosState(info.field_type());
 

@@ -716,35 +724,36 @@
   JvmtiExport::post_raw_breakpoint(thread, method, bcp);
 IRT_END
 
 void InterpreterRuntime::resolve_invoke(JavaThread* thread, Bytecodes::Code bytecode) {
   Thread* THREAD = thread;
+  LastFrameAccessor last_frame(thread);
   // extract receiver from the outgoing argument list if necessary
   Handle receiver(thread, NULL);
   if (bytecode == Bytecodes::_invokevirtual || bytecode == Bytecodes::_invokeinterface ||
       bytecode == Bytecodes::_invokespecial) {
     ResourceMark rm(thread);
-    methodHandle m (thread, method(thread));
-    Bytecode_invoke call(m, bci(thread));
+    methodHandle m (thread, last_frame.method());
+    Bytecode_invoke call(m, last_frame.bci());
     Symbol* signature = call.signature();
-    receiver = Handle(thread,
-                  thread->last_frame().interpreter_callee_receiver(signature));
+    receiver = Handle(thread, last_frame.callee_receiver(signature));
+
     assert(Universe::heap()->is_in_reserved_or_null(receiver()),
            "sanity check");
     assert(receiver.is_null() ||
            !Universe::heap()->is_in_reserved(receiver->klass()),
            "sanity check");
   }
 
   // resolve method
   CallInfo info;
-  constantPoolHandle pool(thread, method(thread)->constants());
+  constantPoolHandle pool(thread, last_frame.method()->constants());
 
   {
     JvmtiHideSingleStepping jhss(thread);
     LinkResolver::resolve_invoke(info, receiver, pool,
-                                 get_index_u2_cpcache(thread, bytecode), bytecode,
+                                 last_frame.get_index_u2_cpcache(bytecode), bytecode,
                                  CHECK);
     if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
       int retry_count = 0;
       while (info.resolved_method()->is_old()) {
         // It is very unlikely that method is redefined more than 100 times

@@ -752,18 +761,18 @@
         // means then there could be a bug here.
         guarantee((retry_count++ < 100),
                   "Could not resolve to latest version of redefined method");
         // method is redefined in the middle of resolve so re-try.
         LinkResolver::resolve_invoke(info, receiver, pool,
-                                     get_index_u2_cpcache(thread, bytecode), bytecode,
+                                     last_frame.get_index_u2_cpcache(bytecode), bytecode,
                                      CHECK);
       }
     }
   } // end JvmtiHideSingleStepping
 
   // check if link resolution caused cpCache to be updated
-  ConstantPoolCacheEntry* cp_cache_entry = cache_entry(thread);
+  ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry();
   if (cp_cache_entry->is_resolved(bytecode)) return;
 
 #ifdef ASSERT
   if (bytecode == Bytecodes::_invokeinterface) {
     if (info.resolved_method()->method_holder() ==

@@ -823,37 +832,39 @@
 
 // First time execution:  Resolve symbols, create a permanent MethodType object.
 void InterpreterRuntime::resolve_invokehandle(JavaThread* thread) {
   Thread* THREAD = thread;
   const Bytecodes::Code bytecode = Bytecodes::_invokehandle;
+  LastFrameAccessor last_frame(thread);
 
   // resolve method
   CallInfo info;
-  constantPoolHandle pool(thread, method(thread)->constants());
+  constantPoolHandle pool(thread, last_frame.method()->constants());
   {
     JvmtiHideSingleStepping jhss(thread);
     LinkResolver::resolve_invoke(info, Handle(), pool,
-                                 get_index_u2_cpcache(thread, bytecode), bytecode,
+                                 last_frame.get_index_u2_cpcache(bytecode), bytecode,
                                  CHECK);
   } // end JvmtiHideSingleStepping
 
-  ConstantPoolCacheEntry* cp_cache_entry = cache_entry(thread);
+  ConstantPoolCacheEntry* cp_cache_entry = last_frame.cache_entry();
   cp_cache_entry->set_method_handle(pool, info);
 }
 
 // First time execution:  Resolve symbols, create a permanent CallSite object.
 void InterpreterRuntime::resolve_invokedynamic(JavaThread* thread) {
   Thread* THREAD = thread;
+  LastFrameAccessor last_frame(thread);
   const Bytecodes::Code bytecode = Bytecodes::_invokedynamic;
 
   //TO DO: consider passing BCI to Java.
-  //  int caller_bci = method(thread)->bci_from(bcp(thread));
+  //  int caller_bci = last_frame.method()->bci_from(last_frame.bcp());
 
   // resolve method
   CallInfo info;
-  constantPoolHandle pool(thread, method(thread)->constants());
-  int index = get_index_u4(thread, bytecode);
+  constantPoolHandle pool(thread, last_frame.method()->constants());
+  int index = last_frame.get_index_u4(bytecode);
   {
     JvmtiHideSingleStepping jhss(thread);
     LinkResolver::resolve_invoke(info, Handle(), pool,
                                  index, bytecode, CHECK);
   } // end JvmtiHideSingleStepping

@@ -903,13 +914,13 @@
     // This was a successful request for an OSR nmethod.  Because
     // frequency_counter_overflow_inner ends with a safepoint check,
     // nm could have been unloaded so look it up again.  It's unsafe
     // to examine nm directly since it might have been freed and used
     // for something else.
-    frame fr = thread->last_frame();
-    Method* method =  fr.interpreter_frame_method();
-    int bci = method->bci_from(fr.interpreter_frame_bcp());
+    LastFrameAccessor last_frame(thread);
+    Method* method =  last_frame.method();
+    int bci = method->bci_from(last_frame.bcp());
     nm = method->lookup_osr_nmethod_for(bci, CompLevel_none, false);
   }
 #ifndef PRODUCT
   if (TraceOnStackReplacement) {
     if (nm != NULL) {

@@ -925,15 +936,15 @@
           InterpreterRuntime::frequency_counter_overflow_inner(JavaThread* thread, address branch_bcp))
   // use UnlockFlagSaver to clear and restore the _do_not_unlock_if_synchronized
   // flag, in case this method triggers classloading which will call into Java.
   UnlockFlagSaver fs(thread);
 
-  frame fr = thread->last_frame();
-  assert(fr.is_interpreted_frame(), "must come from interpreter");
-  methodHandle method(thread, fr.interpreter_frame_method());
+  LastFrameAccessor last_frame(thread);
+  assert(last_frame.is_interpreted_frame(), "must come from interpreter");
+  methodHandle method(thread, last_frame.method());
   const int branch_bci = branch_bcp != NULL ? method->bci_from(branch_bcp) : InvocationEntryBci;
-  const int bci = branch_bcp != NULL ? method->bci_from(fr.interpreter_frame_bcp()) : InvocationEntryBci;
+  const int bci = branch_bcp != NULL ? method->bci_from(last_frame.bcp()) : InvocationEntryBci;
 
   assert(!HAS_PENDING_EXCEPTION, "Should not have any exceptions pending");
   nmethod* osr_nm = CompilationPolicy::policy()->event(method, method, branch_bci, bci, CompLevel_none, NULL, thread);
   assert(!HAS_PENDING_EXCEPTION, "Event handler should not throw any exceptions");
 

@@ -945,13 +956,13 @@
     // the OSR nmethod might be invalidated) because we don't have a
     // safepoint opportunity later once the migration begins.
     if (UseBiasedLocking) {
       ResourceMark rm;
       GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
-      for( BasicObjectLock *kptr = fr.interpreter_frame_monitor_end();
-           kptr < fr.interpreter_frame_monitor_begin();
-           kptr = fr.next_monitor_in_interpreter_frame(kptr) ) {
+      for( BasicObjectLock *kptr = last_frame.monitor_end();
+           kptr < last_frame.monitor_begin();
+           kptr = last_frame.next_monitor(kptr) ) {
         if( kptr->obj() != NULL ) {
           objects_to_revoke->append(Handle(THREAD, kptr->obj()));
         }
       }
       BiasedLocking::revoke(objects_to_revoke);

@@ -972,13 +983,13 @@
   // use UnlockFlagSaver to clear and restore the _do_not_unlock_if_synchronized
   // flag, in case this method triggers classloading which will call into Java.
   UnlockFlagSaver fs(thread);
 
   assert(ProfileInterpreter, "must be profiling interpreter");
-  frame fr = thread->last_frame();
-  assert(fr.is_interpreted_frame(), "must come from interpreter");
-  methodHandle method(thread, fr.interpreter_frame_method());
+  LastFrameAccessor last_frame(thread);
+  assert(last_frame.is_interpreted_frame(), "must come from interpreter");
+  methodHandle method(thread, last_frame.method());
   Method::build_interpreter_method_data(method, THREAD);
   if (HAS_PENDING_EXCEPTION) {
     assert((PENDING_EXCEPTION->is_a(SystemDictionary::OutOfMemoryError_klass())), "we expect only an OOM error here");
     CLEAR_PENDING_EXCEPTION;
     // and fall through...

@@ -1019,25 +1030,25 @@
 
 IRT_ENTRY(void, InterpreterRuntime::update_mdp_for_ret(JavaThread* thread, int return_bci))
   assert(ProfileInterpreter, "must be profiling interpreter");
   ResourceMark rm(thread);
   HandleMark hm(thread);
-  frame fr = thread->last_frame();
-  assert(fr.is_interpreted_frame(), "must come from interpreter");
-  MethodData* h_mdo = fr.interpreter_frame_method()->method_data();
+  LastFrameAccessor last_frame(thread);
+  assert(last_frame.is_interpreted_frame(), "must come from interpreter");
+  MethodData* h_mdo = last_frame.method()->method_data();
 
   // Grab a lock to ensure atomic access to setting the return bci and
   // the displacement.  This can block and GC, invalidating all naked oops.
   MutexLocker ml(RetData_lock);
 
   // ProfileData is essentially a wrapper around a derived oop, so we
   // need to take the lock before making any ProfileData structures.
-  ProfileData* data = h_mdo->data_at(h_mdo->dp_to_di(fr.interpreter_frame_mdp()));
+  ProfileData* data = h_mdo->data_at(h_mdo->dp_to_di(last_frame.mdp()));
   guarantee(data != NULL, "profile data must be valid");
   RetData* rdata = data->as_RetData();
   address new_mdp = rdata->fixup_ret(return_bci, h_mdo);
-  fr.interpreter_frame_set_mdp(new_mdp);
+  last_frame.set_mdp(new_mdp);
 IRT_END
 
 IRT_ENTRY(MethodCounters*, InterpreterRuntime::build_method_counters(JavaThread* thread, Method* m))
   MethodCounters* mcs = Method::build_method_counters(m, thread);
   if (HAS_PENDING_EXCEPTION) {

@@ -1058,11 +1069,12 @@
 
   if (JvmtiExport::should_post_single_step()) {
     // We are called during regular safepoints and when the VM is
     // single stepping. If any thread is marked for single stepping,
     // then we may have JVMTI work to do.
-    JvmtiExport::at_single_stepping_point(thread, method(thread), bcp(thread));
+    LastFrameAccessor last_frame(thread);
+    JvmtiExport::at_single_stepping_point(thread, last_frame.method(), last_frame.bcp());
   }
 IRT_END
 
 IRT_ENTRY(void, InterpreterRuntime::post_field_access(JavaThread *thread, oopDesc* obj,
 ConstantPoolCacheEntry *cp_entry))

@@ -1081,11 +1093,12 @@
     // non-static field accessors have an object, but we need a handle
     h_obj = Handle(thread, obj);
   }
   InstanceKlass* cp_entry_f1 = InstanceKlass::cast(cp_entry->f1_as_klass());
   jfieldID fid = jfieldIDWorkaround::to_jfieldID(cp_entry_f1, cp_entry->f2_as_index(), is_static);
-  JvmtiExport::post_field_access(thread, method(thread), bcp(thread), cp_entry_f1, h_obj, fid);
+  LastFrameAccessor last_frame(thread);
+  JvmtiExport::post_field_access(thread, last_frame.method(), last_frame.bcp(), cp_entry_f1, h_obj, fid);
 IRT_END
 
 IRT_ENTRY(void, InterpreterRuntime::post_field_modification(JavaThread *thread,
   oopDesc* obj, ConstantPoolCacheEntry *cp_entry, jvalue *value))
 

@@ -1136,21 +1149,24 @@
   if (!is_static) {
     // non-static field accessors have an object, but we need a handle
     h_obj = Handle(thread, obj);
   }
 
-  JvmtiExport::post_raw_field_modification(thread, method(thread), bcp(thread), ik, h_obj,
+  LastFrameAccessor last_frame(thread);
+  JvmtiExport::post_raw_field_modification(thread, last_frame.method(), last_frame.bcp(), ik, h_obj,
                                            fid, sig_type, &fvalue);
 IRT_END
 
 IRT_ENTRY(void, InterpreterRuntime::post_method_entry(JavaThread *thread))
-  JvmtiExport::post_method_entry(thread, InterpreterRuntime::method(thread), InterpreterRuntime::last_frame(thread));
+  LastFrameAccessor last_frame(thread);
+  JvmtiExport::post_method_entry(thread, last_frame.method(), last_frame.get_frame());
 IRT_END
 
 
 IRT_ENTRY(void, InterpreterRuntime::post_method_exit(JavaThread *thread))
-  JvmtiExport::post_method_exit(thread, InterpreterRuntime::method(thread), InterpreterRuntime::last_frame(thread));
+  LastFrameAccessor last_frame(thread);
+  JvmtiExport::post_method_exit(thread, last_frame.method(), last_frame.get_frame());
 IRT_END
 
 IRT_LEAF(int, InterpreterRuntime::interpreter_contains(address pc))
 {
   return (Interpreter::contains(pc) ? 1 : 0);

@@ -1370,14 +1386,14 @@
     return;
   }
   ResetNoHandleMark rnm; // In a LEAF entry.
   HandleMark hm;
   ResourceMark rm;
-  frame fr = thread->last_frame();
-  assert(fr.is_interpreted_frame(), "");
-  jint bci = fr.interpreter_frame_bci();
-  methodHandle mh(thread, fr.interpreter_frame_method());
+  LastFrameAccessor last_frame(thread);
+  assert(last_frame.is_interpreted_frame(), "");
+  jint bci = last_frame.bci();
+  methodHandle mh(thread, last_frame.method());
   Bytecode_invoke invoke(mh, bci);
   ArgumentSizeComputer asc(invoke.signature());
   int size_of_arguments = (asc.size() + (invoke.has_receiver() ? 1 : 0)); // receiver
   Copy::conjoint_jbytes(src_address, dest_address,
                        size_of_arguments * Interpreter::stackElementSize);

@@ -1419,12 +1435,12 @@
 // This must be a IRT_LEAF function because the interpreter must save registers on x86 to
 // call this, which changes rsp and makes the interpreter's expression stack not walkable.
 // The generated code still uses call_VM because that will set up the frame pointer for
 // bcp and method.
 IRT_LEAF(intptr_t, InterpreterRuntime::trace_bytecode(JavaThread* thread, intptr_t preserve_this_value, intptr_t tos, intptr_t tos2))
-  const frame f = thread->last_frame();
-  assert(f.is_interpreted_frame(), "must be an interpreted frame");
-  methodHandle mh(thread, f.interpreter_frame_method());
-  BytecodeTracer::trace(mh, f.interpreter_frame_bcp(), tos, tos2);
+  LastFrameAccessor last_frame(thread);
+  assert(last_frame.is_interpreted_frame(), "must be an interpreted frame");
+  methodHandle mh(thread, last_frame.method());
+  BytecodeTracer::trace(mh, last_frame.bcp(), tos, tos2);
   return preserve_this_value;
 IRT_END
 #endif // !PRODUCT
< prev index next >