1 /*
   2  * Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  */
  23 
  24 #include "precompiled.hpp"
  25 #include "asm/codeBuffer.hpp"
  26 #include "classfile/javaClasses.inline.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "compiler/compileBroker.hpp"
  29 #include "compiler/disassembler.hpp"
  30 #include "jvmci/jvmciRuntime.hpp"
  31 #include "jvmci/jvmciCompilerToVM.hpp"
  32 #include "jvmci/jvmciCompiler.hpp"
  33 #include "jvmci/jvmciJavaClasses.hpp"
  34 #include "jvmci/jvmciEnv.hpp"
  35 #include "logging/log.hpp"
  36 #include "memory/oopFactory.hpp"
  37 #include "memory/resourceArea.hpp"
  38 #include "oops/oop.inline.hpp"
  39 #include "oops/objArrayOop.inline.hpp"
  40 #include "prims/jvm.h"
  41 #include "runtime/biasedLocking.hpp"
  42 #include "runtime/interfaceSupport.hpp"
  43 #include "runtime/reflection.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 #include "utilities/debug.hpp"
  46 #include "utilities/defaultStream.hpp"
  47 
  48 #if defined(_MSC_VER)
  49 #define strtoll _strtoi64
  50 #endif
  51 
  52 jobject JVMCIRuntime::_HotSpotJVMCIRuntime_instance = NULL;
  53 bool JVMCIRuntime::_HotSpotJVMCIRuntime_initialized = false;
  54 bool JVMCIRuntime::_well_known_classes_initialized = false;
  55 int JVMCIRuntime::_trivial_prefixes_count = 0;
  56 char** JVMCIRuntime::_trivial_prefixes = NULL;
  57 bool JVMCIRuntime::_shutdown_called = false;
  58 
  59 BasicType JVMCIRuntime::kindToBasicType(Handle kind, TRAPS) {
  60   if (kind.is_null()) {
  61     THROW_(vmSymbols::java_lang_NullPointerException(), T_ILLEGAL);
  62   }
  63   jchar ch = JavaKind::typeChar(kind);
  64   switch(ch) {
  65     case 'z': return T_BOOLEAN;
  66     case 'b': return T_BYTE;
  67     case 's': return T_SHORT;
  68     case 'c': return T_CHAR;
  69     case 'i': return T_INT;
  70     case 'f': return T_FLOAT;
  71     case 'j': return T_LONG;
  72     case 'd': return T_DOUBLE;
  73     case 'a': return T_OBJECT;
  74     case '-': return T_ILLEGAL;
  75     default:
  76       JVMCI_ERROR_(T_ILLEGAL, "unexpected Kind: %c", ch);
  77   }
  78 }
  79 
  80 // Simple helper to see if the caller of a runtime stub which
  81 // entered the VM has been deoptimized
  82 
  83 static bool caller_is_deopted() {
  84   JavaThread* thread = JavaThread::current();
  85   RegisterMap reg_map(thread, false);
  86   frame runtime_frame = thread->last_frame();
  87   frame caller_frame = runtime_frame.sender(&reg_map);
  88   assert(caller_frame.is_compiled_frame(), "must be compiled");
  89   return caller_frame.is_deoptimized_frame();
  90 }
  91 
  92 // Stress deoptimization
  93 static void deopt_caller() {
  94   if ( !caller_is_deopted()) {
  95     JavaThread* thread = JavaThread::current();
  96     RegisterMap reg_map(thread, false);
  97     frame runtime_frame = thread->last_frame();
  98     frame caller_frame = runtime_frame.sender(&reg_map);
  99     Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint);
 100     assert(caller_is_deopted(), "Must be deoptimized");
 101   }
 102 }
 103 
 104 JRT_BLOCK_ENTRY(void, JVMCIRuntime::new_instance(JavaThread* thread, Klass* klass))
 105   JRT_BLOCK;
 106   assert(klass->is_klass(), "not a class");
 107   Handle holder(THREAD, klass->klass_holder()); // keep the klass alive
 108   instanceKlassHandle h(thread, klass);
 109   h->check_valid_for_instantiation(true, CHECK);
 110   // make sure klass is initialized
 111   h->initialize(CHECK);
 112   // allocate instance and return via TLS
 113   oop obj = h->allocate_instance(CHECK);
 114   thread->set_vm_result(obj);
 115   JRT_BLOCK_END;
 116 
 117   if (ReduceInitialCardMarks) {
 118     new_store_pre_barrier(thread);
 119   }
 120 JRT_END
 121 
 122 JRT_BLOCK_ENTRY(void, JVMCIRuntime::new_array(JavaThread* thread, Klass* array_klass, jint length))
 123   JRT_BLOCK;
 124   // Note: no handle for klass needed since they are not used
 125   //       anymore after new_objArray() and no GC can happen before.
 126   //       (This may have to change if this code changes!)
 127   assert(array_klass->is_klass(), "not a class");
 128   oop obj;
 129   if (array_klass->is_typeArray_klass()) {
 130     BasicType elt_type = TypeArrayKlass::cast(array_klass)->element_type();
 131     obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 132   } else {
 133     Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
 134     Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 135     obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 136   }
 137   thread->set_vm_result(obj);
 138   // This is pretty rare but this runtime patch is stressful to deoptimization
 139   // if we deoptimize here so force a deopt to stress the path.
 140   if (DeoptimizeALot) {
 141     static int deopts = 0;
 142     // Alternate between deoptimizing and raising an error (which will also cause a deopt)
 143     if (deopts++ % 2 == 0) {
 144       ResourceMark rm(THREAD);
 145       THROW(vmSymbols::java_lang_OutOfMemoryError());
 146     } else {
 147       deopt_caller();
 148     }
 149   }
 150   JRT_BLOCK_END;
 151 
 152   if (ReduceInitialCardMarks) {
 153     new_store_pre_barrier(thread);
 154   }
 155 JRT_END
 156 
 157 void JVMCIRuntime::new_store_pre_barrier(JavaThread* thread) {
 158   // After any safepoint, just before going back to compiled code,
 159   // we inform the GC that we will be doing initializing writes to
 160   // this object in the future without emitting card-marks, so
 161   // GC may take any compensating steps.
 162   // NOTE: Keep this code consistent with GraphKit::store_barrier.
 163 
 164   oop new_obj = thread->vm_result();
 165   if (new_obj == NULL)  return;
 166 
 167   assert(Universe::heap()->can_elide_tlab_store_barriers(),
 168          "compiler must check this first");
 169   // GC may decide to give back a safer copy of new_obj.
 170   new_obj = Universe::heap()->new_store_pre_barrier(thread, new_obj);
 171   thread->set_vm_result(new_obj);
 172 }
 173 
 174 JRT_ENTRY(void, JVMCIRuntime::new_multi_array(JavaThread* thread, Klass* klass, int rank, jint* dims))
 175   assert(klass->is_klass(), "not a class");
 176   assert(rank >= 1, "rank must be nonzero");
 177   Handle holder(THREAD, klass->klass_holder()); // keep the klass alive
 178   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 179   thread->set_vm_result(obj);
 180 JRT_END
 181 
 182 JRT_ENTRY(void, JVMCIRuntime::dynamic_new_array(JavaThread* thread, oopDesc* element_mirror, jint length))
 183   oop obj = Reflection::reflect_new_array(element_mirror, length, CHECK);
 184   thread->set_vm_result(obj);
 185 JRT_END
 186 
 187 JRT_ENTRY(void, JVMCIRuntime::dynamic_new_instance(JavaThread* thread, oopDesc* type_mirror))
 188   instanceKlassHandle klass(THREAD, java_lang_Class::as_Klass(type_mirror));
 189 
 190   if (klass == NULL) {
 191     ResourceMark rm(THREAD);
 192     THROW(vmSymbols::java_lang_InstantiationException());
 193   }
 194 
 195   // Create new instance (the receiver)
 196   klass->check_valid_for_instantiation(false, CHECK);
 197 
 198   // Make sure klass gets initialized
 199   klass->initialize(CHECK);
 200 
 201   oop obj = klass->allocate_instance(CHECK);
 202   thread->set_vm_result(obj);
 203 JRT_END
 204 
 205 extern void vm_exit(int code);
 206 
 207 // Enter this method from compiled code handler below. This is where we transition
 208 // to VM mode. This is done as a helper routine so that the method called directly
 209 // from compiled code does not have to transition to VM. This allows the entry
 210 // method to see if the nmethod that we have just looked up a handler for has
 211 // been deoptimized while we were in the vm. This simplifies the assembly code
 212 // cpu directories.
 213 //
 214 // We are entering here from exception stub (via the entry method below)
 215 // If there is a compiled exception handler in this method, we will continue there;
 216 // otherwise we will unwind the stack and continue at the caller of top frame method
 217 // Note: we enter in Java using a special JRT wrapper. This wrapper allows us to
 218 // control the area where we can allow a safepoint. After we exit the safepoint area we can
 219 // check to see if the handler we are going to return is now in a nmethod that has
 220 // been deoptimized. If that is the case we return the deopt blob
 221 // unpack_with_exception entry instead. This makes life for the exception blob easier
 222 // because making that same check and diverting is painful from assembly language.
 223 JRT_ENTRY_NO_ASYNC(static address, exception_handler_for_pc_helper(JavaThread* thread, oopDesc* ex, address pc, CompiledMethod*& cm))
 224   // Reset method handle flag.
 225   thread->set_is_method_handle_return(false);
 226 
 227   Handle exception(thread, ex);
 228   cm = CodeCache::find_compiled(pc);
 229   assert(cm != NULL, "this is not a compiled method");
 230   // Adjust the pc as needed/
 231   if (cm->is_deopt_pc(pc)) {
 232     RegisterMap map(thread, false);
 233     frame exception_frame = thread->last_frame().sender(&map);
 234     // if the frame isn't deopted then pc must not correspond to the caller of last_frame
 235     assert(exception_frame.is_deoptimized_frame(), "must be deopted");
 236     pc = exception_frame.pc();
 237   }
 238 #ifdef ASSERT
 239   assert(exception.not_null(), "NULL exceptions should be handled by throw_exception");
 240   assert(exception->is_oop(), "just checking");
 241   // Check that exception is a subclass of Throwable, otherwise we have a VerifyError
 242   if (!(exception->is_a(SystemDictionary::Throwable_klass()))) {
 243     if (ExitVMOnVerifyError) vm_exit(-1);
 244     ShouldNotReachHere();
 245   }
 246 #endif
 247 
 248   // Check the stack guard pages and reenable them if necessary and there is
 249   // enough space on the stack to do so.  Use fast exceptions only if the guard
 250   // pages are enabled.
 251   bool guard_pages_enabled = thread->stack_guards_enabled();
 252   if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack();
 253 
 254   if (JvmtiExport::can_post_on_exceptions()) {
 255     // To ensure correct notification of exception catches and throws
 256     // we have to deoptimize here.  If we attempted to notify the
 257     // catches and throws during this exception lookup it's possible
 258     // we could deoptimize on the way out of the VM and end back in
 259     // the interpreter at the throw site.  This would result in double
 260     // notifications since the interpreter would also notify about
 261     // these same catches and throws as it unwound the frame.
 262 
 263     RegisterMap reg_map(thread);
 264     frame stub_frame = thread->last_frame();
 265     frame caller_frame = stub_frame.sender(&reg_map);
 266 
 267     // We don't really want to deoptimize the nmethod itself since we
 268     // can actually continue in the exception handler ourselves but I
 269     // don't see an easy way to have the desired effect.
 270     Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint);
 271     assert(caller_is_deopted(), "Must be deoptimized");
 272 
 273     return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
 274   }
 275 
 276   // ExceptionCache is used only for exceptions at call sites and not for implicit exceptions
 277   if (guard_pages_enabled) {
 278     address fast_continuation = cm->handler_for_exception_and_pc(exception, pc);
 279     if (fast_continuation != NULL) {
 280       // Set flag if return address is a method handle call site.
 281       thread->set_is_method_handle_return(cm->is_method_handle_return(pc));
 282       return fast_continuation;
 283     }
 284   }
 285 
 286   // If the stack guard pages are enabled, check whether there is a handler in
 287   // the current method.  Otherwise (guard pages disabled), force an unwind and
 288   // skip the exception cache update (i.e., just leave continuation==NULL).
 289   address continuation = NULL;
 290   if (guard_pages_enabled) {
 291 
 292     // New exception handling mechanism can support inlined methods
 293     // with exception handlers since the mappings are from PC to PC
 294 
 295     // debugging support
 296     // tracing
 297     if (log_is_enabled(Info, exceptions)) {
 298       ResourceMark rm;
 299       stringStream tempst;
 300       tempst.print("compiled method <%s>\n"
 301                    " at PC" INTPTR_FORMAT " for thread " INTPTR_FORMAT,
 302                    cm->method()->print_value_string(), p2i(pc), p2i(thread));
 303       Exceptions::log_exception(exception, tempst);
 304     }
 305     // for AbortVMOnException flag
 306     NOT_PRODUCT(Exceptions::debug_check_abort(exception));
 307 
 308     // Clear out the exception oop and pc since looking up an
 309     // exception handler can cause class loading, which might throw an
 310     // exception and those fields are expected to be clear during
 311     // normal bytecode execution.
 312     thread->clear_exception_oop_and_pc();
 313 
 314     continuation = SharedRuntime::compute_compiled_exc_handler(cm, pc, exception, false, false);
 315     // If an exception was thrown during exception dispatch, the exception oop may have changed
 316     thread->set_exception_oop(exception());
 317     thread->set_exception_pc(pc);
 318 
 319     // the exception cache is used only by non-implicit exceptions
 320     if (continuation != NULL && !SharedRuntime::deopt_blob()->contains(continuation)) {
 321       cm->add_handler_for_exception_and_pc(exception, pc, continuation);
 322     }
 323   }
 324 
 325   // Set flag if return address is a method handle call site.
 326   thread->set_is_method_handle_return(cm->is_method_handle_return(pc));
 327 
 328   if (log_is_enabled(Info, exceptions)) {
 329     ResourceMark rm;
 330     log_info(exceptions)("Thread " PTR_FORMAT " continuing at PC " PTR_FORMAT
 331                          " for exception thrown at PC " PTR_FORMAT,
 332                          p2i(thread), p2i(continuation), p2i(pc));
 333   }
 334 
 335   return continuation;
 336 JRT_END
 337 
 338 // Enter this method from compiled code only if there is a Java exception handler
 339 // in the method handling the exception.
 340 // We are entering here from exception stub. We don't do a normal VM transition here.
 341 // We do it in a helper. This is so we can check to see if the nmethod we have just
 342 // searched for an exception handler has been deoptimized in the meantime.
 343 address JVMCIRuntime::exception_handler_for_pc(JavaThread* thread) {
 344   oop exception = thread->exception_oop();
 345   address pc = thread->exception_pc();
 346   // Still in Java mode
 347   DEBUG_ONLY(ResetNoHandleMark rnhm);
 348   CompiledMethod* cm = NULL;
 349   address continuation = NULL;
 350   {
 351     // Enter VM mode by calling the helper
 352     ResetNoHandleMark rnhm;
 353     continuation = exception_handler_for_pc_helper(thread, exception, pc, cm);
 354   }
 355   // Back in JAVA, use no oops DON'T safepoint
 356 
 357   // Now check to see if the compiled method we were called from is now deoptimized.
 358   // If so we must return to the deopt blob and deoptimize the nmethod
 359   if (cm != NULL && caller_is_deopted()) {
 360     continuation = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls();
 361   }
 362 
 363   assert(continuation != NULL, "no handler found");
 364   return continuation;
 365 }
 366 
 367 JRT_ENTRY_NO_ASYNC(void, JVMCIRuntime::monitorenter(JavaThread* thread, oopDesc* obj, BasicLock* lock))
 368   IF_TRACE_jvmci_3 {
 369     char type[O_BUFLEN];
 370     obj->klass()->name()->as_C_string(type, O_BUFLEN);
 371     markOop mark = obj->mark();
 372     TRACE_jvmci_3("%s: entered locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), p2i(obj), type, p2i(mark), p2i(lock));
 373     tty->flush();
 374   }
 375 #ifdef ASSERT
 376   if (PrintBiasedLockingStatistics) {
 377     Atomic::inc(BiasedLocking::slow_path_entry_count_addr());
 378   }
 379 #endif
 380   Handle h_obj(thread, obj);
 381   assert(h_obj()->is_oop(), "must be NULL or an object");
 382   if (UseBiasedLocking) {
 383     // Retry fast entry if bias is revoked to avoid unnecessary inflation
 384     ObjectSynchronizer::fast_enter(h_obj, lock, true, CHECK);
 385   } else {
 386     if (JVMCIUseFastLocking) {
 387       // When using fast locking, the compiled code has already tried the fast case
 388       ObjectSynchronizer::slow_enter(h_obj, lock, THREAD);
 389     } else {
 390       ObjectSynchronizer::fast_enter(h_obj, lock, false, THREAD);
 391     }
 392   }
 393   TRACE_jvmci_3("%s: exiting locking slow with obj=" INTPTR_FORMAT, thread->name(), p2i(obj));
 394 JRT_END
 395 
 396 JRT_LEAF(void, JVMCIRuntime::monitorexit(JavaThread* thread, oopDesc* obj, BasicLock* lock))
 397   assert(thread == JavaThread::current(), "threads must correspond");
 398   assert(thread->last_Java_sp(), "last_Java_sp must be set");
 399   // monitorexit is non-blocking (leaf routine) => no exceptions can be thrown
 400   EXCEPTION_MARK;
 401 
 402 #ifdef DEBUG
 403   if (!obj->is_oop()) {
 404     ResetNoHandleMark rhm;
 405     nmethod* method = thread->last_frame().cb()->as_nmethod_or_null();
 406     if (method != NULL) {
 407       tty->print_cr("ERROR in monitorexit in method %s wrong obj " INTPTR_FORMAT, method->name(), p2i(obj));
 408     }
 409     thread->print_stack_on(tty);
 410     assert(false, "invalid lock object pointer dected");
 411   }
 412 #endif
 413 
 414   if (JVMCIUseFastLocking) {
 415     // When using fast locking, the compiled code has already tried the fast case
 416     ObjectSynchronizer::slow_exit(obj, lock, THREAD);
 417   } else {
 418     ObjectSynchronizer::fast_exit(obj, lock, THREAD);
 419   }
 420   IF_TRACE_jvmci_3 {
 421     char type[O_BUFLEN];
 422     obj->klass()->name()->as_C_string(type, O_BUFLEN);
 423     TRACE_jvmci_3("%s: exited locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), p2i(obj), type, p2i(obj->mark()), p2i(lock));
 424     tty->flush();
 425   }
 426 JRT_END
 427 
 428 JRT_ENTRY(void, JVMCIRuntime::throw_and_post_jvmti_exception(JavaThread* thread, const char* exception, const char* message))
 429   TempNewSymbol symbol = SymbolTable::new_symbol(exception, CHECK);
 430   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, message);
 431 JRT_END
 432 
 433 JRT_ENTRY(void, JVMCIRuntime::throw_klass_external_name_exception(JavaThread* thread, const char* exception, Klass* klass))
 434   ResourceMark rm(thread);
 435   TempNewSymbol symbol = SymbolTable::new_symbol(exception, CHECK);
 436   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, klass->external_name());
 437 JRT_END
 438 
 439 JRT_ENTRY(void, JVMCIRuntime::throw_class_cast_exception(JavaThread* thread, const char* exception, Klass* caster_klass, Klass* target_klass))
 440   ResourceMark rm(thread);
 441   const char* message = SharedRuntime::generate_class_cast_message(caster_klass, target_klass);
 442   TempNewSymbol symbol = SymbolTable::new_symbol(exception, CHECK);
 443   SharedRuntime::throw_and_post_jvmti_exception(thread, symbol, message);
 444 JRT_END
 445 
 446 JRT_LEAF(void, JVMCIRuntime::log_object(JavaThread* thread, oopDesc* obj, bool as_string, bool newline))
 447   ttyLocker ttyl;
 448 
 449   if (obj == NULL) {
 450     tty->print("NULL");
 451   } else if (obj->is_oop_or_null(true) && (!as_string || !java_lang_String::is_instance(obj))) {
 452     if (obj->is_oop_or_null(true)) {
 453       char buf[O_BUFLEN];
 454       tty->print("%s@" INTPTR_FORMAT, obj->klass()->name()->as_C_string(buf, O_BUFLEN), p2i(obj));
 455     } else {
 456       tty->print(INTPTR_FORMAT, p2i(obj));
 457     }
 458   } else {
 459     ResourceMark rm;
 460     assert(obj != NULL && java_lang_String::is_instance(obj), "must be");
 461     char *buf = java_lang_String::as_utf8_string(obj);
 462     tty->print_raw(buf);
 463   }
 464   if (newline) {
 465     tty->cr();
 466   }
 467 JRT_END
 468 
 469 JRT_LEAF(void, JVMCIRuntime::write_barrier_pre(JavaThread* thread, oopDesc* obj))
 470   thread->satb_mark_queue().enqueue(obj);
 471 JRT_END
 472 
 473 JRT_LEAF(void, JVMCIRuntime::write_barrier_post(JavaThread* thread, void* card_addr))
 474   thread->dirty_card_queue().enqueue(card_addr);
 475 JRT_END
 476 
 477 JRT_LEAF(jboolean, JVMCIRuntime::validate_object(JavaThread* thread, oopDesc* parent, oopDesc* child))
 478   bool ret = true;
 479   if(!Universe::heap()->is_in_closed_subset(parent)) {
 480     tty->print_cr("Parent Object " INTPTR_FORMAT " not in heap", p2i(parent));
 481     parent->print();
 482     ret=false;
 483   }
 484   if(!Universe::heap()->is_in_closed_subset(child)) {
 485     tty->print_cr("Child Object " INTPTR_FORMAT " not in heap", p2i(child));
 486     child->print();
 487     ret=false;
 488   }
 489   return (jint)ret;
 490 JRT_END
 491 
 492 JRT_ENTRY(void, JVMCIRuntime::vm_error(JavaThread* thread, jlong where, jlong format, jlong value))
 493   ResourceMark rm;
 494   const char *error_msg = where == 0L ? "<internal JVMCI error>" : (char*) (address) where;
 495   char *detail_msg = NULL;
 496   if (format != 0L) {
 497     const char* buf = (char*) (address) format;
 498     size_t detail_msg_length = strlen(buf) * 2;
 499     detail_msg = (char *) NEW_RESOURCE_ARRAY(u_char, detail_msg_length);
 500     jio_snprintf(detail_msg, detail_msg_length, buf, value);
 501     report_vm_error(__FILE__, __LINE__, error_msg, "%s", detail_msg);
 502   } else {
 503     report_vm_error(__FILE__, __LINE__, error_msg);
 504   }
 505 JRT_END
 506 
 507 JRT_LEAF(oopDesc*, JVMCIRuntime::load_and_clear_exception(JavaThread* thread))
 508   oop exception = thread->exception_oop();
 509   assert(exception != NULL, "npe");
 510   thread->set_exception_oop(NULL);
 511   thread->set_exception_pc(0);
 512   return exception;
 513 JRT_END
 514 
 515 PRAGMA_DIAG_PUSH
 516 PRAGMA_FORMAT_NONLITERAL_IGNORED
 517 JRT_LEAF(void, JVMCIRuntime::log_printf(JavaThread* thread, oopDesc* format, jlong v1, jlong v2, jlong v3))
 518   ResourceMark rm;
 519   assert(format != NULL && java_lang_String::is_instance(format), "must be");
 520   char *buf = java_lang_String::as_utf8_string(format);
 521   tty->print((const char*)buf, v1, v2, v3);
 522 JRT_END
 523 PRAGMA_DIAG_POP
 524 
 525 static void decipher(jlong v, bool ignoreZero) {
 526   if (v != 0 || !ignoreZero) {
 527     void* p = (void *)(address) v;
 528     CodeBlob* cb = CodeCache::find_blob(p);
 529     if (cb) {
 530       if (cb->is_nmethod()) {
 531         char buf[O_BUFLEN];
 532         tty->print("%s [" INTPTR_FORMAT "+" JLONG_FORMAT "]", cb->as_nmethod_or_null()->method()->name_and_sig_as_C_string(buf, O_BUFLEN), p2i(cb->code_begin()), (jlong)((address)v - cb->code_begin()));
 533         return;
 534       }
 535       cb->print_value_on(tty);
 536       return;
 537     }
 538     if (Universe::heap()->is_in(p)) {
 539       oop obj = oop(p);
 540       obj->print_value_on(tty);
 541       return;
 542     }
 543     tty->print(INTPTR_FORMAT " [long: " JLONG_FORMAT ", double %lf, char %c]",p2i((void *)v), (jlong)v, (jdouble)v, (char)v);
 544   }
 545 }
 546 
 547 PRAGMA_DIAG_PUSH
 548 PRAGMA_FORMAT_NONLITERAL_IGNORED
 549 JRT_LEAF(void, JVMCIRuntime::vm_message(jboolean vmError, jlong format, jlong v1, jlong v2, jlong v3))
 550   ResourceMark rm;
 551   const char *buf = (const char*) (address) format;
 552   if (vmError) {
 553     if (buf != NULL) {
 554       fatal(buf, v1, v2, v3);
 555     } else {
 556       fatal("<anonymous error>");
 557     }
 558   } else if (buf != NULL) {
 559     tty->print(buf, v1, v2, v3);
 560   } else {
 561     assert(v2 == 0, "v2 != 0");
 562     assert(v3 == 0, "v3 != 0");
 563     decipher(v1, false);
 564   }
 565 JRT_END
 566 PRAGMA_DIAG_POP
 567 
 568 JRT_LEAF(void, JVMCIRuntime::log_primitive(JavaThread* thread, jchar typeChar, jlong value, jboolean newline))
 569   union {
 570       jlong l;
 571       jdouble d;
 572       jfloat f;
 573   } uu;
 574   uu.l = value;
 575   switch (typeChar) {
 576     case 'z': tty->print(value == 0 ? "false" : "true"); break;
 577     case 'b': tty->print("%d", (jbyte) value); break;
 578     case 'c': tty->print("%c", (jchar) value); break;
 579     case 's': tty->print("%d", (jshort) value); break;
 580     case 'i': tty->print("%d", (jint) value); break;
 581     case 'f': tty->print("%f", uu.f); break;
 582     case 'j': tty->print(JLONG_FORMAT, value); break;
 583     case 'd': tty->print("%lf", uu.d); break;
 584     default: assert(false, "unknown typeChar"); break;
 585   }
 586   if (newline) {
 587     tty->cr();
 588   }
 589 JRT_END
 590 
 591 JRT_ENTRY(jint, JVMCIRuntime::identity_hash_code(JavaThread* thread, oopDesc* obj))
 592   return (jint) obj->identity_hash();
 593 JRT_END
 594 
 595 JRT_ENTRY(jboolean, JVMCIRuntime::thread_is_interrupted(JavaThread* thread, oopDesc* receiver, jboolean clear_interrupted))
 596   // Ensure that the C++ Thread and OSThread structures aren't freed before we operate.
 597   // This locking requires thread_in_vm which is why this method cannot be JRT_LEAF.
 598   Handle receiverHandle(thread, receiver);
 599   MutexLockerEx ml(thread->threadObj() == (void*)receiver ? NULL : Threads_lock);
 600   JavaThread* receiverThread = java_lang_Thread::thread(receiverHandle());
 601   if (receiverThread == NULL) {
 602     // The other thread may exit during this process, which is ok so return false.
 603     return JNI_FALSE;
 604   } else {
 605     return (jint) Thread::is_interrupted(receiverThread, clear_interrupted != 0);
 606   }
 607 JRT_END
 608 
 609 JRT_ENTRY(jint, JVMCIRuntime::test_deoptimize_call_int(JavaThread* thread, int value))
 610   deopt_caller();
 611   return value;
 612 JRT_END
 613 
 614 // private static JVMCIRuntime JVMCI.initializeRuntime()
 615 JVM_ENTRY(jobject, JVM_GetJVMCIRuntime(JNIEnv *env, jclass c))
 616   if (!EnableJVMCI) {
 617     THROW_MSG_NULL(vmSymbols::java_lang_InternalError(), "JVMCI is not enabled")
 618   }
 619   JVMCIRuntime::initialize_HotSpotJVMCIRuntime(CHECK_NULL);
 620   jobject ret = JVMCIRuntime::get_HotSpotJVMCIRuntime_jobject(CHECK_NULL);
 621   return ret;
 622 JVM_END
 623 
 624 Handle JVMCIRuntime::callStatic(const char* className, const char* methodName, const char* signature, JavaCallArguments* args, TRAPS) {
 625   guarantee(!_HotSpotJVMCIRuntime_initialized, "cannot reinitialize HotSpotJVMCIRuntime");
 626 
 627   TempNewSymbol name = SymbolTable::new_symbol(className, CHECK_(Handle()));
 628   KlassHandle klass = SystemDictionary::resolve_or_fail(name, true, CHECK_(Handle()));
 629   TempNewSymbol runtime = SymbolTable::new_symbol(methodName, CHECK_(Handle()));
 630   TempNewSymbol sig = SymbolTable::new_symbol(signature, CHECK_(Handle()));
 631   JavaValue result(T_OBJECT);
 632   if (args == NULL) {
 633     JavaCalls::call_static(&result, klass, runtime, sig, CHECK_(Handle()));
 634   } else {
 635     JavaCalls::call_static(&result, klass, runtime, sig, args, CHECK_(Handle()));
 636   }
 637   return Handle((oop)result.get_jobject());
 638 }
 639 
 640 void JVMCIRuntime::initialize_HotSpotJVMCIRuntime(TRAPS) {
 641   if (JNIHandles::resolve(_HotSpotJVMCIRuntime_instance) == NULL) {
 642     ResourceMark rm;
 643 #ifdef ASSERT
 644     // This should only be called in the context of the JVMCI class being initialized
 645     TempNewSymbol name = SymbolTable::new_symbol("jdk/vm/ci/runtime/JVMCI", CHECK);
 646     Klass* k = SystemDictionary::resolve_or_null(name, CHECK);
 647     instanceKlassHandle klass = InstanceKlass::cast(k);
 648     assert(klass->is_being_initialized() && klass->is_reentrant_initialization(THREAD),
 649            "HotSpotJVMCIRuntime initialization should only be triggered through JVMCI initialization");
 650 #endif
 651 
 652     Handle result = callStatic("jdk/vm/ci/hotspot/HotSpotJVMCIRuntime",
 653                                "runtime",
 654                                "()Ljdk/vm/ci/hotspot/HotSpotJVMCIRuntime;", NULL, CHECK);
 655     objArrayOop trivial_prefixes = HotSpotJVMCIRuntime::trivialPrefixes(result);
 656     if (trivial_prefixes != NULL) {
 657       char** prefixes = NEW_C_HEAP_ARRAY(char*, trivial_prefixes->length(), mtCompiler);
 658       for (int i = 0; i < trivial_prefixes->length(); i++) {
 659         oop str = trivial_prefixes->obj_at(i);
 660         if (str == NULL) {
 661           THROW(vmSymbols::java_lang_NullPointerException());
 662         } else {
 663           prefixes[i] = strdup(java_lang_String::as_utf8_string(str));
 664         }
 665       }
 666       _trivial_prefixes = prefixes;
 667       _trivial_prefixes_count = trivial_prefixes->length();
 668     }
 669     _HotSpotJVMCIRuntime_initialized = true;
 670     _HotSpotJVMCIRuntime_instance = JNIHandles::make_global(result());
 671   }
 672 }
 673 
 674 void JVMCIRuntime::initialize_JVMCI(TRAPS) {
 675   if (JNIHandles::resolve(_HotSpotJVMCIRuntime_instance) == NULL) {
 676     callStatic("jdk/vm/ci/runtime/JVMCI",
 677                "getRuntime",
 678                "()Ljdk/vm/ci/runtime/JVMCIRuntime;", NULL, CHECK);
 679   }
 680   assert(_HotSpotJVMCIRuntime_initialized == true, "what?");
 681 }
 682 
 683 void JVMCIRuntime::initialize_well_known_classes(TRAPS) {
 684   if (JVMCIRuntime::_well_known_classes_initialized == false) {
 685     SystemDictionary::WKID scan = SystemDictionary::FIRST_JVMCI_WKID;
 686     SystemDictionary::initialize_wk_klasses_through(SystemDictionary::LAST_JVMCI_WKID, scan, CHECK);
 687     JVMCIJavaClasses::compute_offsets(CHECK);
 688     JVMCIRuntime::_well_known_classes_initialized = true;
 689   }
 690 }
 691 
 692 void JVMCIRuntime::metadata_do(void f(Metadata*)) {
 693   // For simplicity, the existence of HotSpotJVMCIMetaAccessContext in
 694   // the SystemDictionary well known classes should ensure the other
 695   // classes have already been loaded, so make sure their order in the
 696   // table enforces that.
 697   assert(SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotResolvedJavaMethodImpl) <
 698          SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotJVMCIMetaAccessContext), "must be loaded earlier");
 699   assert(SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotConstantPool) <
 700          SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotJVMCIMetaAccessContext), "must be loaded earlier");
 701   assert(SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotResolvedObjectTypeImpl) <
 702          SystemDictionary::WK_KLASS_ENUM_NAME(jdk_vm_ci_hotspot_HotSpotJVMCIMetaAccessContext), "must be loaded earlier");
 703 
 704   if (HotSpotJVMCIMetaAccessContext::klass() == NULL ||
 705       !HotSpotJVMCIMetaAccessContext::klass()->is_linked()) {
 706     // Nothing could be registered yet
 707     return;
 708   }
 709 
 710   // WeakReference<HotSpotJVMCIMetaAccessContext>[]
 711   objArrayOop allContexts = HotSpotJVMCIMetaAccessContext::allContexts();
 712   if (allContexts == NULL) {
 713     return;
 714   }
 715 
 716   // These must be loaded at this point but the linking state doesn't matter.
 717   assert(SystemDictionary::HotSpotResolvedJavaMethodImpl_klass() != NULL, "must be loaded");
 718   assert(SystemDictionary::HotSpotConstantPool_klass() != NULL, "must be loaded");
 719   assert(SystemDictionary::HotSpotResolvedObjectTypeImpl_klass() != NULL, "must be loaded");
 720 
 721   for (int i = 0; i < allContexts->length(); i++) {
 722     oop ref = allContexts->obj_at(i);
 723     if (ref != NULL) {
 724       oop referent = java_lang_ref_Reference::referent(ref);
 725       if (referent != NULL) {
 726         // Chunked Object[] with last element pointing to next chunk
 727         objArrayOop metadataRoots = HotSpotJVMCIMetaAccessContext::metadataRoots(referent);
 728         while (metadataRoots != NULL) {
 729           for (int typeIndex = 0; typeIndex < metadataRoots->length() - 1; typeIndex++) {
 730             oop reference = metadataRoots->obj_at(typeIndex);
 731             if (reference == NULL) {
 732               continue;
 733             }
 734             oop metadataRoot = java_lang_ref_Reference::referent(reference);
 735             if (metadataRoot == NULL) {
 736               continue;
 737             }
 738             if (metadataRoot->is_a(SystemDictionary::HotSpotResolvedJavaMethodImpl_klass())) {
 739               Method* method = CompilerToVM::asMethod(metadataRoot);
 740               f(method);
 741             } else if (metadataRoot->is_a(SystemDictionary::HotSpotConstantPool_klass())) {
 742               ConstantPool* constantPool = CompilerToVM::asConstantPool(metadataRoot);
 743               f(constantPool);
 744             } else if (metadataRoot->is_a(SystemDictionary::HotSpotResolvedObjectTypeImpl_klass())) {
 745               Klass* klass = CompilerToVM::asKlass(metadataRoot);
 746               f(klass);
 747             } else {
 748               metadataRoot->print();
 749               ShouldNotReachHere();
 750             }
 751           }
 752           metadataRoots = (objArrayOop)metadataRoots->obj_at(metadataRoots->length() - 1);
 753           assert(metadataRoots == NULL || metadataRoots->is_objArray(), "wrong type");
 754         }
 755       }
 756     }
 757   }
 758 }
 759 
 760 // private static void CompilerToVM.registerNatives()
 761 JVM_ENTRY(void, JVM_RegisterJVMCINatives(JNIEnv *env, jclass c2vmClass))
 762   if (!EnableJVMCI) {
 763     THROW_MSG(vmSymbols::java_lang_InternalError(), "JVMCI is not enabled");
 764   }
 765 
 766 #ifdef _LP64
 767 #ifndef TARGET_ARCH_sparc
 768   uintptr_t heap_end = (uintptr_t) Universe::heap()->reserved_region().end();
 769   uintptr_t allocation_end = heap_end + ((uintptr_t)16) * 1024 * 1024 * 1024;
 770   guarantee(heap_end < allocation_end, "heap end too close to end of address space (might lead to erroneous TLAB allocations)");
 771 #endif // TARGET_ARCH_sparc
 772 #else
 773   fatal("check TLAB allocation code for address space conflicts");
 774 #endif
 775 
 776   JVMCIRuntime::initialize_well_known_classes(CHECK);
 777 
 778   {
 779     ThreadToNativeFromVM trans(thread);
 780     env->RegisterNatives(c2vmClass, CompilerToVM::methods, CompilerToVM::methods_count());
 781   }
 782 JVM_END
 783 
 784 #define CHECK_WARN_ABORT_(message) THREAD); \
 785   if (HAS_PENDING_EXCEPTION) { \
 786     warning(message); \
 787     char buf[512]; \
 788     jio_snprintf(buf, 512, "Uncaught exception at %s:%d", __FILE__, __LINE__); \
 789     JVMCIRuntime::abort_on_pending_exception(PENDING_EXCEPTION, buf); \
 790     return; \
 791   } \
 792   (void)(0
 793 
 794 void JVMCIRuntime::shutdown(TRAPS) {
 795   if (_HotSpotJVMCIRuntime_instance != NULL) {
 796     _shutdown_called = true;
 797     HandleMark hm(THREAD);
 798     Handle receiver = get_HotSpotJVMCIRuntime(CHECK);
 799     JavaValue result(T_VOID);
 800     JavaCallArguments args;
 801     args.push_oop(receiver);
 802     JavaCalls::call_special(&result, receiver->klass(), vmSymbols::shutdown_method_name(), vmSymbols::void_method_signature(), &args, CHECK);
 803   }
 804 }
 805 
 806 bool JVMCIRuntime::treat_as_trivial(Method* method) {
 807   if (_HotSpotJVMCIRuntime_initialized) {
 808     for (int i = 0; i < _trivial_prefixes_count; i++) {
 809       if (method->method_holder()->name()->starts_with(_trivial_prefixes[i])) {
 810         return true;
 811       }
 812     }
 813   }
 814   return false;
 815 }