1 /* 2 * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 27 #include "memory/metadataFactory.hpp" 28 #include "memory/universe.hpp" 29 #include "oops/oop.inline.hpp" 30 31 #include "classfile/symbolTable.hpp" 32 #include "classfile/classLoaderData.hpp" 33 34 #include "prims/whitebox.hpp" 35 #include "prims/wbtestmethods/parserTests.hpp" 36 37 #include "runtime/arguments.hpp" 38 #include "runtime/interfaceSupport.hpp" 39 #include "runtime/os.hpp" 40 #include "utilities/array.hpp" 41 #include "utilities/debug.hpp" 42 #include "utilities/macros.hpp" 43 #include "utilities/exceptions.hpp" 44 45 #if INCLUDE_ALL_GCS 46 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.inline.hpp" 47 #include "gc_implementation/g1/concurrentMark.hpp" 48 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 49 #include "gc_implementation/g1/heapRegionRemSet.hpp" 50 #endif // INCLUDE_ALL_GCS 51 52 #if INCLUDE_NMT 53 #include "services/mallocSiteTable.hpp" 54 #include "services/memTracker.hpp" 55 #include "utilities/nativeCallStack.hpp" 56 #endif // INCLUDE_NMT 57 58 #include "compiler/compileBroker.hpp" 59 #include "runtime/compilationPolicy.hpp" 60 61 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC 62 63 #define SIZE_T_MAX_VALUE ((size_t) -1) 64 65 bool WhiteBox::_used = false; 66 67 WB_ENTRY(jlong, WB_GetObjectAddress(JNIEnv* env, jobject o, jobject obj)) 68 return (jlong)(void*)JNIHandles::resolve(obj); 69 WB_END 70 71 WB_ENTRY(jint, WB_GetHeapOopSize(JNIEnv* env, jobject o)) 72 return heapOopSize; 73 WB_END 74 75 76 class WBIsKlassAliveClosure : public KlassClosure { 77 Symbol* _name; 78 bool _found; 79 public: 80 WBIsKlassAliveClosure(Symbol* name) : _name(name), _found(false) {} 81 82 void do_klass(Klass* k) { 83 if (_found) return; 84 Symbol* ksym = k->name(); 85 if (ksym->fast_compare(_name) == 0) { 86 _found = true; 87 } 88 } 89 90 bool found() const { 91 return _found; 92 } 93 }; 94 95 WB_ENTRY(jboolean, WB_IsClassAlive(JNIEnv* env, jobject target, jstring name)) 96 Handle h_name = JNIHandles::resolve(name); 97 if (h_name.is_null()) return false; 98 Symbol* sym = java_lang_String::as_symbol(h_name, CHECK_false); 99 TempNewSymbol tsym(sym); // Make sure to decrement reference count on sym on return 100 101 WBIsKlassAliveClosure closure(sym); 102 ClassLoaderDataGraph::classes_do(&closure); 103 104 return closure.found(); 105 WB_END 106 107 WB_ENTRY(jboolean, WB_ClassKnownToNotExist(JNIEnv* env, jobject o, jobject loader, jstring name)) 108 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 109 const char* class_name = env->GetStringUTFChars(name, NULL); 110 jboolean result = JVM_KnownToNotExist(env, loader, class_name); 111 env->ReleaseStringUTFChars(name, class_name); 112 return result; 113 WB_END 114 115 WB_ENTRY(jobjectArray, WB_GetLookupCacheURLs(JNIEnv* env, jobject o, jobject loader)) 116 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 117 return JVM_GetResourceLookupCacheURLs(env, loader); 118 WB_END 119 120 WB_ENTRY(jintArray, WB_GetLookupCacheMatches(JNIEnv* env, jobject o, jobject loader, jstring name)) 121 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 122 const char* resource_name = env->GetStringUTFChars(name, NULL); 123 jintArray result = JVM_GetResourceLookupCache(env, loader, resource_name); 124 125 env->ReleaseStringUTFChars(name, resource_name); 126 return result; 127 WB_END 128 129 WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { 130 return (jlong)Arguments::max_heap_for_compressed_oops(); 131 } 132 WB_END 133 134 WB_ENTRY(void, WB_PrintHeapSizes(JNIEnv* env, jobject o)) { 135 CollectorPolicy * p = Universe::heap()->collector_policy(); 136 gclog_or_tty->print_cr("Minimum heap "SIZE_FORMAT" Initial heap " 137 SIZE_FORMAT" Maximum heap "SIZE_FORMAT" Min alignment "SIZE_FORMAT" Max alignment "SIZE_FORMAT, 138 p->min_heap_byte_size(), p->initial_heap_byte_size(), p->max_heap_byte_size(), 139 p->space_alignment(), p->heap_alignment()); 140 } 141 WB_END 142 143 #ifndef PRODUCT 144 // Forward declaration 145 void TestReservedSpace_test(); 146 void TestReserveMemorySpecial_test(); 147 void TestVirtualSpace_test(); 148 void TestMetaspaceAux_test(); 149 #endif 150 151 WB_ENTRY(void, WB_RunMemoryUnitTests(JNIEnv* env, jobject o)) 152 #ifndef PRODUCT 153 TestReservedSpace_test(); 154 TestReserveMemorySpecial_test(); 155 TestVirtualSpace_test(); 156 TestMetaspaceAux_test(); 157 #endif 158 WB_END 159 160 WB_ENTRY(void, WB_ReadFromNoaccessArea(JNIEnv* env, jobject o)) 161 size_t granularity = os::vm_allocation_granularity(); 162 ReservedHeapSpace rhs(100 * granularity, granularity, false, NULL); 163 VirtualSpace vs; 164 vs.initialize(rhs, 50 * granularity); 165 166 //Check if constraints are complied 167 if (!( UseCompressedOops && rhs.base() != NULL && 168 Universe::narrow_oop_base() != NULL && 169 Universe::narrow_oop_use_implicit_null_checks() )) { 170 tty->print_cr("WB_ReadFromNoaccessArea method is useless:\n " 171 "\tUseCompressedOops is %d\n" 172 "\trhs.base() is "PTR_FORMAT"\n" 173 "\tUniverse::narrow_oop_base() is "PTR_FORMAT"\n" 174 "\tUniverse::narrow_oop_use_implicit_null_checks() is %d", 175 UseCompressedOops, 176 rhs.base(), 177 Universe::narrow_oop_base(), 178 Universe::narrow_oop_use_implicit_null_checks()); 179 return; 180 } 181 tty->print_cr("Reading from no access area... "); 182 tty->print_cr("*(vs.low_boundary() - rhs.noaccess_prefix() / 2 ) = %c", 183 *(vs.low_boundary() - rhs.noaccess_prefix() / 2 )); 184 WB_END 185 186 static jint wb_stress_virtual_space_resize(size_t reserved_space_size, 187 size_t magnitude, size_t iterations) { 188 size_t granularity = os::vm_allocation_granularity(); 189 ReservedHeapSpace rhs(reserved_space_size * granularity, granularity, false, NULL); 190 VirtualSpace vs; 191 if (!vs.initialize(rhs, 0)) { 192 tty->print_cr("Failed to initialize VirtualSpace. Can't proceed."); 193 return 3; 194 } 195 196 long seed = os::random(); 197 tty->print_cr("Random seed is %ld", seed); 198 os::init_random(seed); 199 200 for (size_t i = 0; i < iterations; i++) { 201 202 // Whether we will shrink or grow 203 bool shrink = os::random() % 2L == 0; 204 205 // Get random delta to resize virtual space 206 size_t delta = (size_t)os::random() % magnitude; 207 208 // If we are about to shrink virtual space below zero, then expand instead 209 if (shrink && vs.committed_size() < delta) { 210 shrink = false; 211 } 212 213 // Resizing by delta 214 if (shrink) { 215 vs.shrink_by(delta); 216 } else { 217 // If expanding fails expand_by will silently return false 218 vs.expand_by(delta, true); 219 } 220 } 221 return 0; 222 } 223 224 WB_ENTRY(jint, WB_StressVirtualSpaceResize(JNIEnv* env, jobject o, 225 jlong reserved_space_size, jlong magnitude, jlong iterations)) 226 tty->print_cr("reservedSpaceSize="JLONG_FORMAT", magnitude="JLONG_FORMAT", " 227 "iterations="JLONG_FORMAT"\n", reserved_space_size, magnitude, 228 iterations); 229 if (reserved_space_size < 0 || magnitude < 0 || iterations < 0) { 230 tty->print_cr("One of variables printed above is negative. Can't proceed.\n"); 231 return 1; 232 } 233 234 // sizeof(size_t) depends on whether OS is 32bit or 64bit. sizeof(jlong) is 235 // always 8 byte. That's why we should avoid overflow in case of 32bit platform. 236 if (sizeof(size_t) < sizeof(jlong)) { 237 jlong size_t_max_value = (jlong) SIZE_T_MAX_VALUE; 238 if (reserved_space_size > size_t_max_value || magnitude > size_t_max_value 239 || iterations > size_t_max_value) { 240 tty->print_cr("One of variables printed above overflows size_t. Can't proceed.\n"); 241 return 2; 242 } 243 } 244 245 return wb_stress_virtual_space_resize((size_t) reserved_space_size, 246 (size_t) magnitude, (size_t) iterations); 247 WB_END 248 249 WB_ENTRY(jboolean, WB_isObjectInOldGen(JNIEnv* env, jobject o, jobject obj)) 250 oop p = JNIHandles::resolve(obj); 251 #if INCLUDE_ALL_GCS 252 if (UseG1GC) { 253 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 254 const HeapRegion* hr = g1->heap_region_containing(p); 255 if (hr == NULL) { 256 return false; 257 } 258 return !(hr->is_young()); 259 } else if (UseParallelGC) { 260 ParallelScavengeHeap* psh = ParallelScavengeHeap::heap(); 261 return !psh->is_in_young(p); 262 } 263 #endif // INCLUDE_ALL_GCS 264 GenCollectedHeap* gch = GenCollectedHeap::heap(); 265 return !gch->is_in_young(p); 266 WB_END 267 268 WB_ENTRY(jlong, WB_GetObjectSize(JNIEnv* env, jobject o, jobject obj)) 269 oop p = JNIHandles::resolve(obj); 270 return p->size() * HeapWordSize; 271 WB_END 272 273 #if INCLUDE_ALL_GCS 274 WB_ENTRY(jboolean, WB_G1IsHumongous(JNIEnv* env, jobject o, jobject obj)) 275 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 276 oop result = JNIHandles::resolve(obj); 277 const HeapRegion* hr = g1->heap_region_containing(result); 278 return hr->isHumongous(); 279 WB_END 280 281 WB_ENTRY(jlong, WB_G1NumFreeRegions(JNIEnv* env, jobject o)) 282 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 283 size_t nr = g1->num_free_regions(); 284 return (jlong)nr; 285 WB_END 286 287 WB_ENTRY(jboolean, WB_G1InConcurrentMark(JNIEnv* env, jobject o)) 288 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 289 ConcurrentMark* cm = g1->concurrent_mark(); 290 return cm->concurrent_marking_in_progress(); 291 WB_END 292 293 WB_ENTRY(jint, WB_G1RegionSize(JNIEnv* env, jobject o)) 294 return (jint)HeapRegion::GrainBytes; 295 WB_END 296 #endif // INCLUDE_ALL_GCS 297 298 #if INCLUDE_NMT 299 // Alloc memory using the test memory type so that we can use that to see if 300 // NMT picks it up correctly 301 WB_ENTRY(jlong, WB_NMTMalloc(JNIEnv* env, jobject o, jlong size)) 302 jlong addr = 0; 303 addr = (jlong)(uintptr_t)os::malloc(size, mtTest); 304 return addr; 305 WB_END 306 307 // Alloc memory with pseudo call stack. The test can create psudo malloc 308 // allocation site to stress the malloc tracking. 309 WB_ENTRY(jlong, WB_NMTMallocWithPseudoStack(JNIEnv* env, jobject o, jlong size, jint pseudo_stack)) 310 address pc = (address)(size_t)pseudo_stack; 311 NativeCallStack stack(&pc, 1); 312 return (jlong)os::malloc(size, mtTest, stack); 313 WB_END 314 315 // Free the memory allocated by NMTAllocTest 316 WB_ENTRY(void, WB_NMTFree(JNIEnv* env, jobject o, jlong mem)) 317 os::free((void*)(uintptr_t)mem, mtTest); 318 WB_END 319 320 WB_ENTRY(jlong, WB_NMTReserveMemory(JNIEnv* env, jobject o, jlong size)) 321 jlong addr = 0; 322 323 addr = (jlong)(uintptr_t)os::reserve_memory(size); 324 MemTracker::record_virtual_memory_type((address)addr, mtTest); 325 326 return addr; 327 WB_END 328 329 330 WB_ENTRY(void, WB_NMTCommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 331 os::commit_memory((char *)(uintptr_t)addr, size, !ExecMem); 332 MemTracker::record_virtual_memory_type((address)(uintptr_t)addr, mtTest); 333 WB_END 334 335 WB_ENTRY(void, WB_NMTUncommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 336 os::uncommit_memory((char *)(uintptr_t)addr, size); 337 WB_END 338 339 WB_ENTRY(void, WB_NMTReleaseMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 340 os::release_memory((char *)(uintptr_t)addr, size); 341 WB_END 342 343 WB_ENTRY(jboolean, WB_NMTIsDetailSupported(JNIEnv* env)) 344 return MemTracker::tracking_level() == NMT_detail; 345 WB_END 346 347 WB_ENTRY(void, WB_NMTOverflowHashBucket(JNIEnv* env, jobject o, jlong num)) 348 address pc = (address)1; 349 for (jlong index = 0; index < num; index ++) { 350 NativeCallStack stack(&pc, 1); 351 os::malloc(0, mtTest, stack); 352 pc += MallocSiteTable::hash_buckets(); 353 } 354 WB_END 355 356 WB_ENTRY(jboolean, WB_NMTChangeTrackingLevel(JNIEnv* env)) 357 // Test that we can downgrade NMT levels but not upgrade them. 358 if (MemTracker::tracking_level() == NMT_off) { 359 MemTracker::transition_to(NMT_off); 360 return MemTracker::tracking_level() == NMT_off; 361 } else { 362 assert(MemTracker::tracking_level() == NMT_detail, "Should start out as detail tracking"); 363 MemTracker::transition_to(NMT_summary); 364 assert(MemTracker::tracking_level() == NMT_summary, "Should be summary now"); 365 366 // Can't go to detail once NMT is set to summary. 367 MemTracker::transition_to(NMT_detail); 368 assert(MemTracker::tracking_level() == NMT_summary, "Should still be summary now"); 369 370 // Shutdown sets tracking level to minimal. 371 MemTracker::shutdown(); 372 assert(MemTracker::tracking_level() == NMT_minimal, "Should be minimal now"); 373 374 // Once the tracking level is minimal, we cannot increase to summary. 375 // The code ignores this request instead of asserting because if the malloc site 376 // table overflows in another thread, it tries to change the code to summary. 377 MemTracker::transition_to(NMT_summary); 378 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now"); 379 380 // Really can never go up to detail, verify that the code would never do this. 381 MemTracker::transition_to(NMT_detail); 382 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now"); 383 return MemTracker::tracking_level() == NMT_minimal; 384 } 385 WB_END 386 #endif // INCLUDE_NMT 387 388 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) { 389 assert(method != NULL, "method should not be null"); 390 ThreadToNativeFromVM ttn(thread); 391 return env->FromReflectedMethod(method); 392 } 393 394 WB_ENTRY(void, WB_DeoptimizeAll(JNIEnv* env, jobject o)) 395 MutexLockerEx mu(Compile_lock); 396 CodeCache::mark_all_nmethods_for_deoptimization(); 397 VM_Deoptimize op; 398 VMThread::execute(&op); 399 WB_END 400 401 WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 402 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 403 int result = 0; 404 CHECK_JNI_EXCEPTION_(env, result); 405 MutexLockerEx mu(Compile_lock); 406 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 407 if (is_osr) { 408 result += mh->mark_osr_nmethods(); 409 } else if (mh->code() != NULL) { 410 mh->code()->mark_for_deoptimization(); 411 ++result; 412 } 413 result += CodeCache::mark_for_deoptimization(mh()); 414 if (result > 0) { 415 VM_Deoptimize op; 416 VMThread::execute(&op); 417 } 418 return result; 419 WB_END 420 421 WB_ENTRY(jboolean, WB_IsMethodCompiled(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 422 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 423 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 424 MutexLockerEx mu(Compile_lock); 425 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 426 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 427 if (code == NULL) { 428 return JNI_FALSE; 429 } 430 return (code->is_alive() && !code->is_marked_for_deoptimization()); 431 WB_END 432 433 WB_ENTRY(jboolean, WB_IsMethodCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 434 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 435 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 436 MutexLockerEx mu(Compile_lock); 437 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 438 if (is_osr) { 439 return CompilationPolicy::can_be_osr_compiled(mh, comp_level); 440 } else { 441 return CompilationPolicy::can_be_compiled(mh, comp_level); 442 } 443 WB_END 444 445 WB_ENTRY(jboolean, WB_IsMethodQueuedForCompilation(JNIEnv* env, jobject o, jobject method)) 446 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 447 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 448 MutexLockerEx mu(Compile_lock); 449 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 450 return mh->queued_for_compilation(); 451 WB_END 452 453 WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 454 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 455 CHECK_JNI_EXCEPTION_(env, CompLevel_none); 456 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 457 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 458 return (code != NULL ? code->comp_level() : CompLevel_none); 459 WB_END 460 461 WB_ENTRY(void, WB_MakeMethodNotCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 462 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 463 CHECK_JNI_EXCEPTION(env); 464 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 465 if (is_osr) { 466 mh->set_not_osr_compilable(comp_level, true /* report */, "WhiteBox"); 467 } else { 468 mh->set_not_compilable(comp_level, true /* report */, "WhiteBox"); 469 } 470 WB_END 471 472 WB_ENTRY(jint, WB_GetMethodEntryBci(JNIEnv* env, jobject o, jobject method)) 473 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 474 CHECK_JNI_EXCEPTION_(env, InvocationEntryBci); 475 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 476 nmethod* code = mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false); 477 return (code != NULL && code->is_osr_method() ? code->osr_entry_bci() : InvocationEntryBci); 478 WB_END 479 480 WB_ENTRY(jboolean, WB_TestSetDontInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 481 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 482 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 483 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 484 bool result = mh->dont_inline(); 485 mh->set_dont_inline(value == JNI_TRUE); 486 return result; 487 WB_END 488 489 WB_ENTRY(jint, WB_GetCompileQueueSize(JNIEnv* env, jobject o, jint comp_level)) 490 if (comp_level == CompLevel_any) { 491 return CompileBroker::queue_size(CompLevel_full_optimization) /* C2 */ + 492 CompileBroker::queue_size(CompLevel_full_profile) /* C1 */; 493 } else { 494 return CompileBroker::queue_size(comp_level); 495 } 496 WB_END 497 498 WB_ENTRY(jboolean, WB_TestSetForceInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 499 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 500 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 501 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 502 bool result = mh->force_inline(); 503 mh->set_force_inline(value == JNI_TRUE); 504 return result; 505 WB_END 506 507 WB_ENTRY(jboolean, WB_EnqueueMethodForCompilation(JNIEnv* env, jobject o, jobject method, jint comp_level, jint bci)) 508 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 509 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 510 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 511 nmethod* nm = CompileBroker::compile_method(mh, bci, comp_level, mh, mh->invocation_count(), "WhiteBox", THREAD); 512 MutexLockerEx mu(Compile_lock); 513 return (mh->queued_for_compilation() || nm != NULL); 514 WB_END 515 516 class VM_WhiteBoxOperation : public VM_Operation { 517 public: 518 VM_WhiteBoxOperation() { } 519 VMOp_Type type() const { return VMOp_WhiteBoxOperation; } 520 bool allow_nested_vm_operations() const { return true; } 521 }; 522 523 class AlwaysFalseClosure : public BoolObjectClosure { 524 public: 525 bool do_object_b(oop p) { return false; } 526 }; 527 528 static AlwaysFalseClosure always_false; 529 530 class VM_WhiteBoxCleanMethodData : public VM_WhiteBoxOperation { 531 public: 532 VM_WhiteBoxCleanMethodData(MethodData* mdo) : _mdo(mdo) { } 533 void doit() { 534 _mdo->clean_method_data(&always_false); 535 } 536 private: 537 MethodData* _mdo; 538 }; 539 540 WB_ENTRY(void, WB_ClearMethodState(JNIEnv* env, jobject o, jobject method)) 541 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 542 CHECK_JNI_EXCEPTION(env); 543 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 544 MutexLockerEx mu(Compile_lock); 545 MethodData* mdo = mh->method_data(); 546 MethodCounters* mcs = mh->method_counters(); 547 548 if (mdo != NULL) { 549 mdo->init(); 550 ResourceMark rm; 551 int arg_count = mdo->method()->size_of_parameters(); 552 for (int i = 0; i < arg_count; i++) { 553 mdo->set_arg_modified(i, 0); 554 } 555 VM_WhiteBoxCleanMethodData op(mdo); 556 VMThread::execute(&op); 557 } 558 559 mh->clear_not_c1_compilable(); 560 mh->clear_not_c2_compilable(); 561 mh->clear_not_c2_osr_compilable(); 562 NOT_PRODUCT(mh->set_compiled_invocation_count(0)); 563 if (mcs != NULL) { 564 mcs->backedge_counter()->init(); 565 mcs->invocation_counter()->init(); 566 mcs->set_interpreter_invocation_count(0); 567 mcs->set_interpreter_throwout_count(0); 568 569 #ifdef TIERED 570 mcs->set_rate(0.0F); 571 mh->set_prev_event_count(0); 572 mh->set_prev_time(0); 573 #endif 574 } 575 WB_END 576 577 template <typename T> 578 static bool GetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value, bool (*TAt)(const char*, T*)) { 579 if (name == NULL) { 580 return false; 581 } 582 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 583 const char* flag_name = env->GetStringUTFChars(name, NULL); 584 bool result = (*TAt)(flag_name, value); 585 env->ReleaseStringUTFChars(name, flag_name); 586 return result; 587 } 588 589 template <typename T> 590 static bool SetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value, bool (*TAtPut)(const char*, T*, Flag::Flags)) { 591 if (name == NULL) { 592 return false; 593 } 594 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 595 const char* flag_name = env->GetStringUTFChars(name, NULL); 596 bool result = (*TAtPut)(flag_name, value, Flag::INTERNAL); 597 env->ReleaseStringUTFChars(name, flag_name); 598 return result; 599 } 600 601 template <typename T> 602 static jobject box(JavaThread* thread, JNIEnv* env, Symbol* name, Symbol* sig, T value) { 603 ResourceMark rm(thread); 604 jclass clazz = env->FindClass(name->as_C_string()); 605 CHECK_JNI_EXCEPTION_(env, NULL); 606 jmethodID methodID = env->GetStaticMethodID(clazz, 607 vmSymbols::valueOf_name()->as_C_string(), 608 sig->as_C_string()); 609 CHECK_JNI_EXCEPTION_(env, NULL); 610 jobject result = env->CallStaticObjectMethod(clazz, methodID, value); 611 CHECK_JNI_EXCEPTION_(env, NULL); 612 return result; 613 } 614 615 static jobject booleanBox(JavaThread* thread, JNIEnv* env, jboolean value) { 616 return box(thread, env, vmSymbols::java_lang_Boolean(), vmSymbols::Boolean_valueOf_signature(), value); 617 } 618 static jobject integerBox(JavaThread* thread, JNIEnv* env, jint value) { 619 return box(thread, env, vmSymbols::java_lang_Integer(), vmSymbols::Integer_valueOf_signature(), value); 620 } 621 static jobject longBox(JavaThread* thread, JNIEnv* env, jlong value) { 622 return box(thread, env, vmSymbols::java_lang_Long(), vmSymbols::Long_valueOf_signature(), value); 623 } 624 /* static jobject floatBox(JavaThread* thread, JNIEnv* env, jfloat value) { 625 return box(thread, env, vmSymbols::java_lang_Float(), vmSymbols::Float_valueOf_signature(), value); 626 }*/ 627 static jobject doubleBox(JavaThread* thread, JNIEnv* env, jdouble value) { 628 return box(thread, env, vmSymbols::java_lang_Double(), vmSymbols::Double_valueOf_signature(), value); 629 } 630 631 WB_ENTRY(jobject, WB_GetBooleanVMFlag(JNIEnv* env, jobject o, jstring name)) 632 bool result; 633 if (GetVMFlag <bool> (thread, env, name, &result, &CommandLineFlags::boolAt)) { 634 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 635 return booleanBox(thread, env, result); 636 } 637 return NULL; 638 WB_END 639 640 WB_ENTRY(jobject, WB_GetIntxVMFlag(JNIEnv* env, jobject o, jstring name)) 641 intx result; 642 if (GetVMFlag <intx> (thread, env, name, &result, &CommandLineFlags::intxAt)) { 643 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 644 return longBox(thread, env, result); 645 } 646 return NULL; 647 WB_END 648 649 WB_ENTRY(jobject, WB_GetUintxVMFlag(JNIEnv* env, jobject o, jstring name)) 650 uintx result; 651 if (GetVMFlag <uintx> (thread, env, name, &result, &CommandLineFlags::uintxAt)) { 652 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 653 return longBox(thread, env, result); 654 } 655 return NULL; 656 WB_END 657 658 WB_ENTRY(jobject, WB_GetUint64VMFlag(JNIEnv* env, jobject o, jstring name)) 659 uint64_t result; 660 if (GetVMFlag <uint64_t> (thread, env, name, &result, &CommandLineFlags::uint64_tAt)) { 661 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 662 return longBox(thread, env, result); 663 } 664 return NULL; 665 WB_END 666 667 WB_ENTRY(jobject, WB_GetDoubleVMFlag(JNIEnv* env, jobject o, jstring name)) 668 double result; 669 if (GetVMFlag <double> (thread, env, name, &result, &CommandLineFlags::doubleAt)) { 670 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 671 return doubleBox(thread, env, result); 672 } 673 return NULL; 674 WB_END 675 676 WB_ENTRY(jstring, WB_GetStringVMFlag(JNIEnv* env, jobject o, jstring name)) 677 ccstr ccstrResult; 678 if (GetVMFlag <ccstr> (thread, env, name, &ccstrResult, &CommandLineFlags::ccstrAt)) { 679 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 680 jstring result = env->NewStringUTF(ccstrResult); 681 CHECK_JNI_EXCEPTION_(env, NULL); 682 return result; 683 } 684 return NULL; 685 WB_END 686 687 WB_ENTRY(void, WB_SetBooleanVMFlag(JNIEnv* env, jobject o, jstring name, jboolean value)) 688 bool result = value == JNI_TRUE ? true : false; 689 SetVMFlag <bool> (thread, env, name, &result, &CommandLineFlags::boolAtPut); 690 WB_END 691 692 WB_ENTRY(void, WB_SetIntxVMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 693 intx result = value; 694 SetVMFlag <intx> (thread, env, name, &result, &CommandLineFlags::intxAtPut); 695 WB_END 696 697 WB_ENTRY(void, WB_SetUintxVMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 698 uintx result = value; 699 SetVMFlag <uintx> (thread, env, name, &result, &CommandLineFlags::uintxAtPut); 700 WB_END 701 702 WB_ENTRY(void, WB_SetUint64VMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 703 uint64_t result = value; 704 SetVMFlag <uint64_t> (thread, env, name, &result, &CommandLineFlags::uint64_tAtPut); 705 WB_END 706 707 WB_ENTRY(void, WB_SetDoubleVMFlag(JNIEnv* env, jobject o, jstring name, jdouble value)) 708 double result = value; 709 SetVMFlag <double> (thread, env, name, &result, &CommandLineFlags::doubleAtPut); 710 WB_END 711 712 WB_ENTRY(void, WB_SetStringVMFlag(JNIEnv* env, jobject o, jstring name, jstring value)) 713 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 714 const char* ccstrValue = (value == NULL) ? NULL : env->GetStringUTFChars(value, NULL); 715 ccstr ccstrResult = ccstrValue; 716 bool needFree; 717 { 718 ThreadInVMfromNative ttvfn(thread); // back to VM 719 needFree = SetVMFlag <ccstr> (thread, env, name, &ccstrResult, &CommandLineFlags::ccstrAtPut); 720 } 721 if (value != NULL) { 722 env->ReleaseStringUTFChars(value, ccstrValue); 723 } 724 if (needFree) { 725 FREE_C_HEAP_ARRAY(char, ccstrResult, mtInternal); 726 } 727 WB_END 728 729 730 WB_ENTRY(jboolean, WB_IsInStringTable(JNIEnv* env, jobject o, jstring javaString)) 731 ResourceMark rm(THREAD); 732 int len; 733 jchar* name = java_lang_String::as_unicode_string(JNIHandles::resolve(javaString), len, CHECK_false); 734 return (StringTable::lookup(name, len) != NULL); 735 WB_END 736 737 WB_ENTRY(void, WB_FullGC(JNIEnv* env, jobject o)) 738 Universe::heap()->collector_policy()->set_should_clear_all_soft_refs(true); 739 Universe::heap()->collect(GCCause::_last_ditch_collection); 740 #if INCLUDE_ALL_GCS 741 if (UseG1GC) { 742 // Needs to be cleared explicitly for G1 743 Universe::heap()->collector_policy()->set_should_clear_all_soft_refs(false); 744 } 745 #endif // INCLUDE_ALL_GCS 746 WB_END 747 748 WB_ENTRY(void, WB_YoungGC(JNIEnv* env, jobject o)) 749 Universe::heap()->collect(GCCause::_wb_young_gc); 750 WB_END 751 752 WB_ENTRY(void, WB_ReadReservedMemory(JNIEnv* env, jobject o)) 753 // static+volatile in order to force the read to happen 754 // (not be eliminated by the compiler) 755 static char c; 756 static volatile char* p; 757 758 p = os::reserve_memory(os::vm_allocation_granularity(), NULL, 0); 759 if (p == NULL) { 760 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Failed to reserve memory"); 761 } 762 763 c = *p; 764 WB_END 765 766 WB_ENTRY(jstring, WB_GetCPUFeatures(JNIEnv* env, jobject o)) 767 const char* cpu_features = VM_Version::cpu_features(); 768 ThreadToNativeFromVM ttn(thread); 769 jstring features_string = env->NewStringUTF(cpu_features); 770 771 CHECK_JNI_EXCEPTION_(env, NULL); 772 773 return features_string; 774 WB_END 775 776 777 WB_ENTRY(jobjectArray, WB_GetNMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 778 ResourceMark rm(THREAD); 779 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 780 CHECK_JNI_EXCEPTION_(env, NULL); 781 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 782 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 783 jobjectArray result = NULL; 784 if (code == NULL) { 785 return result; 786 } 787 int insts_size = code->insts_size(); 788 789 ThreadToNativeFromVM ttn(thread); 790 jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string()); 791 CHECK_JNI_EXCEPTION_(env, NULL); 792 result = env->NewObjectArray(2, clazz, NULL); 793 if (result == NULL) { 794 return result; 795 } 796 797 jobject obj = integerBox(thread, env, code->comp_level()); 798 CHECK_JNI_EXCEPTION_(env, NULL); 799 env->SetObjectArrayElement(result, 0, obj); 800 801 jbyteArray insts = env->NewByteArray(insts_size); 802 CHECK_JNI_EXCEPTION_(env, NULL); 803 env->SetByteArrayRegion(insts, 0, insts_size, (jbyte*) code->insts_begin()); 804 env->SetObjectArrayElement(result, 1, insts); 805 806 return result; 807 WB_END 808 809 810 int WhiteBox::array_bytes_to_length(size_t bytes) { 811 return Array<u1>::bytes_to_length(bytes); 812 } 813 814 WB_ENTRY(jlong, WB_AllocateMetaspace(JNIEnv* env, jobject wb, jobject class_loader, jlong size)) 815 if (size < 0) { 816 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 817 err_msg("WB_AllocateMetaspace: size is negative: " JLONG_FORMAT, size)); 818 } 819 820 oop class_loader_oop = JNIHandles::resolve(class_loader); 821 ClassLoaderData* cld = class_loader_oop != NULL 822 ? java_lang_ClassLoader::loader_data(class_loader_oop) 823 : ClassLoaderData::the_null_class_loader_data(); 824 825 void* metadata = MetadataFactory::new_writeable_array<u1>(cld, WhiteBox::array_bytes_to_length((size_t)size), thread); 826 827 return (jlong)(uintptr_t)metadata; 828 WB_END 829 830 WB_ENTRY(void, WB_FreeMetaspace(JNIEnv* env, jobject wb, jobject class_loader, jlong addr, jlong size)) 831 oop class_loader_oop = JNIHandles::resolve(class_loader); 832 ClassLoaderData* cld = class_loader_oop != NULL 833 ? java_lang_ClassLoader::loader_data(class_loader_oop) 834 : ClassLoaderData::the_null_class_loader_data(); 835 836 MetadataFactory::free_array(cld, (Array<u1>*)(uintptr_t)addr); 837 WB_END 838 839 WB_ENTRY(jlong, WB_IncMetaspaceCapacityUntilGC(JNIEnv* env, jobject wb, jlong inc)) 840 if (inc < 0) { 841 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 842 err_msg("WB_IncMetaspaceCapacityUntilGC: inc is negative: " JLONG_FORMAT, inc)); 843 } 844 845 jlong max_size_t = (jlong) ((size_t) -1); 846 if (inc > max_size_t) { 847 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 848 err_msg("WB_IncMetaspaceCapacityUntilGC: inc does not fit in size_t: " JLONG_FORMAT, inc)); 849 } 850 851 size_t new_cap_until_GC = 0; 852 size_t aligned_inc = align_size_down((size_t) inc, Metaspace::commit_alignment()); 853 bool success = MetaspaceGC::inc_capacity_until_GC(aligned_inc, &new_cap_until_GC); 854 if (!success) { 855 THROW_MSG_0(vmSymbols::java_lang_IllegalStateException(), 856 "WB_IncMetaspaceCapacityUntilGC: could not increase capacity until GC " 857 "due to contention with another thread"); 858 } 859 return (jlong) new_cap_until_GC; 860 WB_END 861 862 WB_ENTRY(jlong, WB_MetaspaceCapacityUntilGC(JNIEnv* env, jobject wb)) 863 return (jlong) MetaspaceGC::capacity_until_GC(); 864 WB_END 865 866 //Some convenience methods to deal with objects from java 867 int WhiteBox::offset_for_field(const char* field_name, oop object, 868 Symbol* signature_symbol) { 869 assert(field_name != NULL && strlen(field_name) > 0, "Field name not valid"); 870 Thread* THREAD = Thread::current(); 871 872 //Get the class of our object 873 Klass* arg_klass = object->klass(); 874 //Turn it into an instance-klass 875 InstanceKlass* ik = InstanceKlass::cast(arg_klass); 876 877 //Create symbols to look for in the class 878 TempNewSymbol name_symbol = SymbolTable::lookup(field_name, (int) strlen(field_name), 879 THREAD); 880 881 //To be filled in with an offset of the field we're looking for 882 fieldDescriptor fd; 883 884 Klass* res = ik->find_field(name_symbol, signature_symbol, &fd); 885 if (res == NULL) { 886 tty->print_cr("Invalid layout of %s at %s", ik->external_name(), 887 name_symbol->as_C_string()); 888 fatal("Invalid layout of preloaded class"); 889 } 890 891 //fetch the field at the offset we've found 892 int dest_offset = fd.offset(); 893 894 return dest_offset; 895 } 896 897 898 const char* WhiteBox::lookup_jstring(const char* field_name, oop object) { 899 int offset = offset_for_field(field_name, object, 900 vmSymbols::string_signature()); 901 oop string = object->obj_field(offset); 902 if (string == NULL) { 903 return NULL; 904 } 905 const char* ret = java_lang_String::as_utf8_string(string); 906 return ret; 907 } 908 909 bool WhiteBox::lookup_bool(const char* field_name, oop object) { 910 int offset = 911 offset_for_field(field_name, object, vmSymbols::bool_signature()); 912 bool ret = (object->bool_field(offset) == JNI_TRUE); 913 return ret; 914 } 915 916 void WhiteBox::register_methods(JNIEnv* env, jclass wbclass, JavaThread* thread, JNINativeMethod* method_array, int method_count) { 917 ResourceMark rm; 918 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 919 920 // one by one registration natives for exception catching 921 jclass no_such_method_error_klass = env->FindClass(vmSymbols::java_lang_NoSuchMethodError()->as_C_string()); 922 CHECK_JNI_EXCEPTION(env); 923 for (int i = 0, n = method_count; i < n; ++i) { 924 // Skip dummy entries 925 if (method_array[i].fnPtr == NULL) continue; 926 if (env->RegisterNatives(wbclass, &method_array[i], 1) != 0) { 927 jthrowable throwable_obj = env->ExceptionOccurred(); 928 if (throwable_obj != NULL) { 929 env->ExceptionClear(); 930 if (env->IsInstanceOf(throwable_obj, no_such_method_error_klass)) { 931 // NoSuchMethodError is thrown when a method can't be found or a method is not native. 932 // Ignoring the exception since it is not preventing use of other WhiteBox methods. 933 tty->print_cr("Warning: 'NoSuchMethodError' on register of sun.hotspot.WhiteBox::%s%s", 934 method_array[i].name, method_array[i].signature); 935 } 936 } else { 937 // Registration failed unexpectedly. 938 tty->print_cr("Warning: unexpected error on register of sun.hotspot.WhiteBox::%s%s. All methods will be unregistered", 939 method_array[i].name, method_array[i].signature); 940 env->UnregisterNatives(wbclass); 941 break; 942 } 943 } 944 } 945 } 946 947 #define CC (char*) 948 949 static JNINativeMethod methods[] = { 950 {CC"getObjectAddress", CC"(Ljava/lang/Object;)J", (void*)&WB_GetObjectAddress }, 951 {CC"getObjectSize", CC"(Ljava/lang/Object;)J", (void*)&WB_GetObjectSize }, 952 {CC"isObjectInOldGen", CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen }, 953 {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, 954 {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, 955 {CC"classKnownToNotExist", 956 CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)Z",(void*)&WB_ClassKnownToNotExist}, 957 {CC"getLookupCacheURLs", CC"(Ljava/lang/ClassLoader;)[Ljava/net/URL;", (void*)&WB_GetLookupCacheURLs}, 958 {CC"getLookupCacheMatches", CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)[I", 959 (void*)&WB_GetLookupCacheMatches}, 960 {CC"parseCommandLine", 961 CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", 962 (void*) &WB_ParseCommandLine 963 }, 964 {CC"getCompressedOopsMaxHeapSize", CC"()J", 965 (void*)&WB_GetCompressedOopsMaxHeapSize}, 966 {CC"printHeapSizes", CC"()V", (void*)&WB_PrintHeapSizes }, 967 {CC"runMemoryUnitTests", CC"()V", (void*)&WB_RunMemoryUnitTests}, 968 {CC"readFromNoaccessArea",CC"()V", (void*)&WB_ReadFromNoaccessArea}, 969 {CC"stressVirtualSpaceResize",CC"(JJJ)I", (void*)&WB_StressVirtualSpaceResize}, 970 #if INCLUDE_ALL_GCS 971 {CC"g1InConcurrentMark", CC"()Z", (void*)&WB_G1InConcurrentMark}, 972 {CC"g1IsHumongous", CC"(Ljava/lang/Object;)Z", (void*)&WB_G1IsHumongous }, 973 {CC"g1NumFreeRegions", CC"()J", (void*)&WB_G1NumFreeRegions }, 974 {CC"g1RegionSize", CC"()I", (void*)&WB_G1RegionSize }, 975 #endif // INCLUDE_ALL_GCS 976 #if INCLUDE_NMT 977 {CC"NMTMalloc", CC"(J)J", (void*)&WB_NMTMalloc }, 978 {CC"NMTMallocWithPseudoStack", CC"(JI)J", (void*)&WB_NMTMallocWithPseudoStack}, 979 {CC"NMTFree", CC"(J)V", (void*)&WB_NMTFree }, 980 {CC"NMTReserveMemory", CC"(J)J", (void*)&WB_NMTReserveMemory }, 981 {CC"NMTCommitMemory", CC"(JJ)V", (void*)&WB_NMTCommitMemory }, 982 {CC"NMTUncommitMemory", CC"(JJ)V", (void*)&WB_NMTUncommitMemory }, 983 {CC"NMTReleaseMemory", CC"(JJ)V", (void*)&WB_NMTReleaseMemory }, 984 {CC"NMTOverflowHashBucket", CC"(J)V", (void*)&WB_NMTOverflowHashBucket}, 985 {CC"NMTIsDetailSupported",CC"()Z", (void*)&WB_NMTIsDetailSupported}, 986 {CC"NMTChangeTrackingLevel", CC"()Z", (void*)&WB_NMTChangeTrackingLevel}, 987 #endif // INCLUDE_NMT 988 {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll }, 989 {CC"deoptimizeMethod", CC"(Ljava/lang/reflect/Executable;Z)I", 990 (void*)&WB_DeoptimizeMethod }, 991 {CC"isMethodCompiled", CC"(Ljava/lang/reflect/Executable;Z)Z", 992 (void*)&WB_IsMethodCompiled }, 993 {CC"isMethodCompilable", CC"(Ljava/lang/reflect/Executable;IZ)Z", 994 (void*)&WB_IsMethodCompilable}, 995 {CC"isMethodQueuedForCompilation", 996 CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation}, 997 {CC"makeMethodNotCompilable", 998 CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable}, 999 {CC"testSetDontInlineMethod", 1000 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetDontInlineMethod}, 1001 {CC"getMethodCompilationLevel", 1002 CC"(Ljava/lang/reflect/Executable;Z)I", (void*)&WB_GetMethodCompilationLevel}, 1003 {CC"getMethodEntryBci", 1004 CC"(Ljava/lang/reflect/Executable;)I", (void*)&WB_GetMethodEntryBci}, 1005 {CC"getCompileQueueSize", 1006 CC"(I)I", (void*)&WB_GetCompileQueueSize}, 1007 {CC"testSetForceInlineMethod", 1008 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetForceInlineMethod}, 1009 {CC"enqueueMethodForCompilation", 1010 CC"(Ljava/lang/reflect/Executable;II)Z", (void*)&WB_EnqueueMethodForCompilation}, 1011 {CC"clearMethodState", 1012 CC"(Ljava/lang/reflect/Executable;)V", (void*)&WB_ClearMethodState}, 1013 {CC"setBooleanVMFlag", CC"(Ljava/lang/String;Z)V",(void*)&WB_SetBooleanVMFlag}, 1014 {CC"setIntxVMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetIntxVMFlag}, 1015 {CC"setUintxVMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetUintxVMFlag}, 1016 {CC"setUint64VMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetUint64VMFlag}, 1017 {CC"setDoubleVMFlag", CC"(Ljava/lang/String;D)V",(void*)&WB_SetDoubleVMFlag}, 1018 {CC"setStringVMFlag", CC"(Ljava/lang/String;Ljava/lang/String;)V", 1019 (void*)&WB_SetStringVMFlag}, 1020 {CC"getBooleanVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Boolean;", 1021 (void*)&WB_GetBooleanVMFlag}, 1022 {CC"getIntxVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1023 (void*)&WB_GetIntxVMFlag}, 1024 {CC"getUintxVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1025 (void*)&WB_GetUintxVMFlag}, 1026 {CC"getUint64VMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1027 (void*)&WB_GetUint64VMFlag}, 1028 {CC"getDoubleVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Double;", 1029 (void*)&WB_GetDoubleVMFlag}, 1030 {CC"getStringVMFlag", CC"(Ljava/lang/String;)Ljava/lang/String;", 1031 (void*)&WB_GetStringVMFlag}, 1032 {CC"isInStringTable", CC"(Ljava/lang/String;)Z", (void*)&WB_IsInStringTable }, 1033 {CC"fullGC", CC"()V", (void*)&WB_FullGC }, 1034 {CC"youngGC", CC"()V", (void*)&WB_YoungGC }, 1035 {CC"readReservedMemory", CC"()V", (void*)&WB_ReadReservedMemory }, 1036 {CC"allocateMetaspace", 1037 CC"(Ljava/lang/ClassLoader;J)J", (void*)&WB_AllocateMetaspace }, 1038 {CC"freeMetaspace", 1039 CC"(Ljava/lang/ClassLoader;JJ)V", (void*)&WB_FreeMetaspace }, 1040 {CC"incMetaspaceCapacityUntilGC", CC"(J)J", (void*)&WB_IncMetaspaceCapacityUntilGC }, 1041 {CC"metaspaceCapacityUntilGC", CC"()J", (void*)&WB_MetaspaceCapacityUntilGC }, 1042 {CC"getCPUFeatures", CC"()Ljava/lang/String;", (void*)&WB_GetCPUFeatures }, 1043 {CC"getNMethod", CC"(Ljava/lang/reflect/Executable;Z)[Ljava/lang/Object;", 1044 (void*)&WB_GetNMethod }, 1045 }; 1046 1047 #undef CC 1048 1049 JVM_ENTRY(void, JVM_RegisterWhiteBoxMethods(JNIEnv* env, jclass wbclass)) 1050 { 1051 if (WhiteBoxAPI) { 1052 // Make sure that wbclass is loaded by the null classloader 1053 instanceKlassHandle ikh = instanceKlassHandle(JNIHandles::resolve(wbclass)->klass()); 1054 Handle loader(ikh->class_loader()); 1055 if (loader.is_null()) { 1056 WhiteBox::register_methods(env, wbclass, thread, methods, sizeof(methods) / sizeof(methods[0])); 1057 WhiteBox::register_extended(env, wbclass, thread); 1058 WhiteBox::set_used(); 1059 } 1060 } 1061 } 1062 JVM_END