< prev index next >

src/cpu/x86/vm/templateTable_x86.cpp

Print this page

        

@@ -2102,10 +2102,40 @@
 #endif // _LP64
   }
 }
 
 void TemplateTable::branch(bool is_jsr, bool is_wide) {
+  if (ValueTypesThreadLocalRecycling) {
+    Label no_vt_recycling, no_fixing_required;
+    const Register thread1 = NOT_LP64(rbx) LP64_ONLY(r15_thread);
+    NOT_LP64(__ get_thread(thread1));
+    __ movptr(rbx, Address(thread1, in_bytes(JavaThread::vt_alloc_ptr_offset())));
+    __ testptr(rbx, rbx);
+    __ jcc(Assembler::zero, no_vt_recycling);
+    __ movptr(rcx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
+    __ testptr(rcx, rcx);
+    __ jcc(Assembler::notZero, no_fixing_required);
+    // vt_alloc_ptr in JavaThread is non-null but frame vt_alloc_ptr is null
+    // which means frame vt_alloc_ptr needs to be initialized
+    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::fix_frame_vt_alloc_ptr));
+    __ movptr(rcx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
+    __ bind(no_fixing_required);
+    __ testptr(rcx, rbx);
+    __ jcc(Assembler::equal, no_vt_recycling);
+    __ andptr(rcx, VTBufferChunk::chunk_mask());
+    __ movl(rcx, Address(rcx, VTBufferChunk::index_offset()));
+    __ andptr(rbx, VTBufferChunk::chunk_mask());
+    __ movl(rbx, Address(rbx, VTBufferChunk::index_offset()));
+    __ subl(rbx, rcx);
+    __ get_method(rcx);
+    __ movl(rcx, Address(rcx, Method::max_vt_buffer_offset()));
+    __ cmpl(rbx, rcx);
+    __ jcc(Assembler::lessEqual, no_vt_recycling);
+    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::recycle_buffered_values));
+    __ bind(no_vt_recycling);
+  }
+
   __ get_method(rcx); // rcx holds method
   __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
                                      // holds bumped taken count
 
   const ByteSize be_offset = MethodCounters::backedge_counter_offset() +

@@ -2618,16 +2648,34 @@
     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), robj);
 
     __ bind(skip_register_finalizer);
   }
 
+  if (state == qtos) {
+    const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
+    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::return_value), rax);
+    NOT_LP64(__ get_thread(thread1));
+    __ get_vm_result(rax, thread1);
+  }
   // Narrow result if state is itos but result type is smaller.
   // Need to narrow in the return bytecode rather than in generate_return_entry
   // since compiled code callers expect the result to already be narrowed.
   if (state == itos) {
     __ narrow(rax);
   }
+
+#ifdef ASSERT
+  if (EnableMVT || EnableValhalla) {
+    if (state == atos) {
+      const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
+      __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::check_areturn), rax);
+      NOT_LP64(__ get_thread(thread1));
+      __ get_vm_result(rax, thread1);
+    }
+  }
+#endif // ASSERT
+
   __ remove_activation(state, rbcp, true, true, true, state == qtos && ValueTypeReturnedAsFields);
 
   __ jmp(rbcp);
 }
 

@@ -2843,24 +2891,25 @@
 
   __ cmpl(flags, qtos);
   __ jcc(Assembler::notEqual, notValueType);
   // qtos
   if (is_static) {
+    Label initialized;
+    // Issue below if the static field has not been initialized yet
     __ load_heap_oop(rax, field);
+    __ testptr(rax, rax);
+    __ jcc(Assembler::notZero, initialized);
+    __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::initialize_static_value_field),
+         obj, off);
+    __ verify_oop(rax);
+    __ bind(initialized);
     __ push(qtos);
     //    if (!is_static && !is_vgetfield) {
     //      patch_bytecode(Bytecodes::_fast_qgetfield, bc, rbx);
     //    }
   } else {
-
-    // cp cache entry pointer
-//    __ addptr(cache, in_bytes(ConstantPoolCache::base_offset()));
-//    __ shll(index, LogBytesPerWord);
-//    __ addptr(cache, index);
-
     pop_and_check_object(obj);
-
     call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::qgetfield),
         obj, off);
     __ verify_oop(rax);
     __ push(qtos);
     // Bytecode rewrite?

@@ -3204,11 +3253,13 @@
           rbx, rax, rcx);
       __ jmp(notVolatile); // value types are never volatile
     } else {
       // Store into the static field
       // Value types in static fields are currently handled with indirection
-      do_oop_store(_masm, field, rax, _bs->kind(), false);
+      // but a copy to the Java heap might be required if the value is currently
+      // stored in a thread local buffer
+      call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::qputstatic), rax);
     }
     __ jmp(Done);
   }
 
   __ bind(notValueType);
< prev index next >