< prev index next >

src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp

Print this page

        

@@ -160,20 +160,30 @@
   }
 
   // Creation also verifies the object.
   NativeMovConstReg* method_holder
     = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
-#ifndef PRODUCT
+
+#ifdef ASSERT
   NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
 
-  // read the value once
-  volatile intptr_t data = method_holder->data();
-  assert(data == 0 || data == (intptr_t)callee(),
+  // A generated lambda form might be deleted from the Lambdaform
+  // cache in MethodTypeForm.  If a jit compiled lambdaform method
+  // becomes not entrant and the cache access returns null, the new
+  // resolve will lead to a new generated LambdaForm.
+  Method* old_method = reinterpret_cast<Method*>(method_holder->data());
+  assert(old_method == NULL || old_method == callee() ||
+         callee->is_compiled_lambda_form() ||
+         !old_method->method_holder()->is_loader_alive() ||
+         old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
          "a) MT-unsafe modification of inline cache");
-  assert(data == 0 || jump->jump_destination() == entry,
+
+  address destination = jump->jump_destination();
+  assert(destination == (address)-1 || destination == entry,
          "b) MT-unsafe modification of inline cache");
 #endif
+
   // Update stub.
   method_holder->set_data((intptr_t)callee());
   NativeGeneralJump::insert_unconditional(method_holder->next_instruction_address(), entry);
   ICache::invalidate_range(stub, to_interp_stub_size());
   // Update jump to call.
< prev index next >