1 /* 2 * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 27 #include "memory/universe.hpp" 28 #include "oops/oop.inline.hpp" 29 30 #include "classfile/stringTable.hpp" 31 #include "classfile/classLoaderData.hpp" 32 33 #include "prims/whitebox.hpp" 34 #include "prims/wbtestmethods/parserTests.hpp" 35 36 #include "runtime/thread.hpp" 37 #include "runtime/arguments.hpp" 38 #include "runtime/interfaceSupport.hpp" 39 #include "runtime/os.hpp" 40 41 #include "utilities/debug.hpp" 42 #include "utilities/macros.hpp" 43 #include "utilities/exceptions.hpp" 44 45 #if INCLUDE_ALL_GCS 46 #include "gc_implementation/g1/concurrentMark.hpp" 47 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 48 #include "gc_implementation/g1/heapRegionRemSet.hpp" 49 #endif // INCLUDE_ALL_GCS 50 51 #ifdef INCLUDE_NMT 52 #include "services/memTracker.hpp" 53 #endif // INCLUDE_NMT 54 55 #include "compiler/compileBroker.hpp" 56 #include "runtime/compilationPolicy.hpp" 57 58 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC 59 60 #define SIZE_T_MAX_VALUE ((size_t) -1) 61 62 bool WhiteBox::_used = false; 63 64 WB_ENTRY(jlong, WB_GetObjectAddress(JNIEnv* env, jobject o, jobject obj)) 65 return (jlong)(void*)JNIHandles::resolve(obj); 66 WB_END 67 68 WB_ENTRY(jint, WB_GetHeapOopSize(JNIEnv* env, jobject o)) 69 return heapOopSize; 70 WB_END 71 72 73 class WBIsKlassAliveClosure : public KlassClosure { 74 Symbol* _name; 75 bool _found; 76 public: 77 WBIsKlassAliveClosure(Symbol* name) : _name(name), _found(false) {} 78 79 void do_klass(Klass* k) { 80 if (_found) return; 81 Symbol* ksym = k->name(); 82 if (ksym->fast_compare(_name) == 0) { 83 _found = true; 84 } 85 } 86 87 bool found() const { 88 return _found; 89 } 90 }; 91 92 WB_ENTRY(jboolean, WB_IsClassAlive(JNIEnv* env, jobject target, jstring name)) 93 Handle h_name = JNIHandles::resolve(name); 94 if (h_name.is_null()) return false; 95 Symbol* sym = java_lang_String::as_symbol(h_name, CHECK_false); 96 TempNewSymbol tsym(sym); // Make sure to decrement reference count on sym on return 97 98 WBIsKlassAliveClosure closure(sym); 99 ClassLoaderDataGraph::classes_do(&closure); 100 101 return closure.found(); 102 WB_END 103 104 WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { 105 return (jlong)Arguments::max_heap_for_compressed_oops(); 106 } 107 WB_END 108 109 WB_ENTRY(void, WB_PrintHeapSizes(JNIEnv* env, jobject o)) { 110 CollectorPolicy * p = Universe::heap()->collector_policy(); 111 gclog_or_tty->print_cr("Minimum heap "SIZE_FORMAT" Initial heap " 112 SIZE_FORMAT" Maximum heap "SIZE_FORMAT" Space alignment "SIZE_FORMAT" Heap alignment "SIZE_FORMAT, 113 p->min_heap_byte_size(), p->initial_heap_byte_size(), p->max_heap_byte_size(), 114 p->space_alignment(), p->heap_alignment()); 115 } 116 WB_END 117 118 #ifndef PRODUCT 119 // Forward declaration 120 void TestReservedSpace_test(); 121 void TestReserveMemorySpecial_test(); 122 void TestVirtualSpace_test(); 123 void TestMetaspaceAux_test(); 124 #endif 125 126 WB_ENTRY(void, WB_RunMemoryUnitTests(JNIEnv* env, jobject o)) 127 #ifndef PRODUCT 128 TestReservedSpace_test(); 129 TestReserveMemorySpecial_test(); 130 TestVirtualSpace_test(); 131 TestMetaspaceAux_test(); 132 #endif 133 WB_END 134 135 WB_ENTRY(void, WB_ReadFromNoaccessArea(JNIEnv* env, jobject o)) 136 size_t granularity = os::vm_allocation_granularity(); 137 ReservedHeapSpace rhs(100 * granularity, granularity, false, NULL); 138 VirtualSpace vs; 139 vs.initialize(rhs, 50 * granularity); 140 141 //Check if constraints are complied 142 if (!( UseCompressedOops && rhs.base() != NULL && 143 Universe::narrow_oop_base() != NULL && 144 Universe::narrow_oop_use_implicit_null_checks() )) { 145 tty->print_cr("WB_ReadFromNoaccessArea method is useless:\n " 146 "\tUseCompressedOops is %d\n" 147 "\trhs.base() is "PTR_FORMAT"\n" 148 "\tUniverse::narrow_oop_base() is "PTR_FORMAT"\n" 149 "\tUniverse::narrow_oop_use_implicit_null_checks() is %d", 150 UseCompressedOops, 151 rhs.base(), 152 Universe::narrow_oop_base(), 153 Universe::narrow_oop_use_implicit_null_checks()); 154 return; 155 } 156 tty->print_cr("Reading from no access area... "); 157 tty->print_cr("*(vs.low_boundary() - rhs.noaccess_prefix() / 2 ) = %c", 158 *(vs.low_boundary() - rhs.noaccess_prefix() / 2 )); 159 WB_END 160 161 static jint wb_stress_virtual_space_resize(size_t reserved_space_size, 162 size_t magnitude, size_t iterations) { 163 size_t granularity = os::vm_allocation_granularity(); 164 ReservedHeapSpace rhs(reserved_space_size * granularity, granularity, false, NULL); 165 VirtualSpace vs; 166 if (!vs.initialize(rhs, 0)) { 167 tty->print_cr("Failed to initialize VirtualSpace. Can't proceed."); 168 return 3; 169 } 170 171 long seed = os::random(); 172 tty->print_cr("Random seed is %ld", seed); 173 os::init_random(seed); 174 175 for (size_t i = 0; i < iterations; i++) { 176 177 // Whether we will shrink or grow 178 bool shrink = os::random() % 2L == 0; 179 180 // Get random delta to resize virtual space 181 size_t delta = (size_t)os::random() % magnitude; 182 183 // If we are about to shrink virtual space below zero, then expand instead 184 if (shrink && vs.committed_size() < delta) { 185 shrink = false; 186 } 187 188 // Resizing by delta 189 if (shrink) { 190 vs.shrink_by(delta); 191 } else { 192 // If expanding fails expand_by will silently return false 193 vs.expand_by(delta, true); 194 } 195 } 196 return 0; 197 } 198 199 WB_ENTRY(jint, WB_StressVirtualSpaceResize(JNIEnv* env, jobject o, 200 jlong reserved_space_size, jlong magnitude, jlong iterations)) 201 tty->print_cr("reservedSpaceSize="JLONG_FORMAT", magnitude="JLONG_FORMAT", " 202 "iterations="JLONG_FORMAT"\n", reserved_space_size, magnitude, 203 iterations); 204 if (reserved_space_size < 0 || magnitude < 0 || iterations < 0) { 205 tty->print_cr("One of variables printed above is negative. Can't proceed.\n"); 206 return 1; 207 } 208 209 // sizeof(size_t) depends on whether OS is 32bit or 64bit. sizeof(jlong) is 210 // always 8 byte. That's why we should avoid overflow in case of 32bit platform. 211 if (sizeof(size_t) < sizeof(jlong)) { 212 jlong size_t_max_value = (jlong) SIZE_T_MAX_VALUE; 213 if (reserved_space_size > size_t_max_value || magnitude > size_t_max_value 214 || iterations > size_t_max_value) { 215 tty->print_cr("One of variables printed above overflows size_t. Can't proceed.\n"); 216 return 2; 217 } 218 } 219 220 return wb_stress_virtual_space_resize((size_t) reserved_space_size, 221 (size_t) magnitude, (size_t) iterations); 222 WB_END 223 224 #if INCLUDE_ALL_GCS 225 WB_ENTRY(jboolean, WB_G1IsHumongous(JNIEnv* env, jobject o, jobject obj)) 226 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 227 oop result = JNIHandles::resolve(obj); 228 const HeapRegion* hr = g1->heap_region_containing(result); 229 return hr->isHumongous(); 230 WB_END 231 232 WB_ENTRY(jlong, WB_G1NumFreeRegions(JNIEnv* env, jobject o)) 233 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 234 size_t nr = g1->free_regions(); 235 return (jlong)nr; 236 WB_END 237 238 WB_ENTRY(jboolean, WB_G1InConcurrentMark(JNIEnv* env, jobject o)) 239 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 240 ConcurrentMark* cm = g1->concurrent_mark(); 241 return cm->concurrent_marking_in_progress(); 242 WB_END 243 244 WB_ENTRY(jint, WB_G1RegionSize(JNIEnv* env, jobject o)) 245 return (jint)HeapRegion::GrainBytes; 246 WB_END 247 #endif // INCLUDE_ALL_GCS 248 249 #if INCLUDE_NMT 250 // Alloc memory using the test memory type so that we can use that to see if 251 // NMT picks it up correctly 252 WB_ENTRY(jlong, WB_NMTMalloc(JNIEnv* env, jobject o, jlong size)) 253 jlong addr = 0; 254 255 if (MemTracker::is_on() && !MemTracker::shutdown_in_progress()) { 256 addr = (jlong)(uintptr_t)os::malloc(size, mtTest); 257 } 258 259 return addr; 260 WB_END 261 262 // Free the memory allocated by NMTAllocTest 263 WB_ENTRY(void, WB_NMTFree(JNIEnv* env, jobject o, jlong mem)) 264 os::free((void*)(uintptr_t)mem, mtTest); 265 WB_END 266 267 WB_ENTRY(jlong, WB_NMTReserveMemory(JNIEnv* env, jobject o, jlong size)) 268 jlong addr = 0; 269 270 if (MemTracker::is_on() && !MemTracker::shutdown_in_progress()) { 271 addr = (jlong)(uintptr_t)os::reserve_memory(size); 272 MemTracker::record_virtual_memory_type((address)addr, mtTest); 273 } 274 275 return addr; 276 WB_END 277 278 279 WB_ENTRY(void, WB_NMTCommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 280 os::commit_memory((char *)(uintptr_t)addr, size, !ExecMem); 281 MemTracker::record_virtual_memory_type((address)(uintptr_t)addr, mtTest); 282 WB_END 283 284 WB_ENTRY(void, WB_NMTUncommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 285 os::uncommit_memory((char *)(uintptr_t)addr, size); 286 WB_END 287 288 WB_ENTRY(void, WB_NMTReleaseMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 289 os::release_memory((char *)(uintptr_t)addr, size); 290 WB_END 291 292 // Block until the current generation of NMT data to be merged, used to reliably test the NMT feature 293 WB_ENTRY(jboolean, WB_NMTWaitForDataMerge(JNIEnv* env)) 294 295 if (!MemTracker::is_on() || MemTracker::shutdown_in_progress()) { 296 return false; 297 } 298 299 return MemTracker::wbtest_wait_for_data_merge(); 300 WB_END 301 302 WB_ENTRY(jboolean, WB_NMTIsDetailSupported(JNIEnv* env)) 303 return MemTracker::tracking_level() == MemTracker::NMT_detail; 304 WB_END 305 306 #endif // INCLUDE_NMT 307 308 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) { 309 assert(method != NULL, "method should not be null"); 310 ThreadToNativeFromVM ttn(thread); 311 return env->FromReflectedMethod(method); 312 } 313 314 WB_ENTRY(void, WB_DeoptimizeAll(JNIEnv* env, jobject o)) 315 MutexLockerEx mu(Compile_lock); 316 CodeCache::mark_all_nmethods_for_deoptimization(); 317 VM_Deoptimize op; 318 VMThread::execute(&op); 319 WB_END 320 321 WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 322 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 323 int result = 0; 324 CHECK_JNI_EXCEPTION_(env, result); 325 MutexLockerEx mu(Compile_lock); 326 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 327 nmethod* code; 328 if (is_osr) { 329 int bci = InvocationEntryBci; 330 while ((code = mh->lookup_osr_nmethod_for(bci, CompLevel_none, false)) != NULL) { 331 code->mark_for_deoptimization(); 332 ++result; 333 bci = code->osr_entry_bci() + 1; 334 } 335 } else { 336 code = mh->code(); 337 } 338 if (code != NULL) { 339 code->mark_for_deoptimization(); 340 ++result; 341 } 342 result += CodeCache::mark_for_deoptimization(mh()); 343 if (result > 0) { 344 VM_Deoptimize op; 345 VMThread::execute(&op); 346 } 347 return result; 348 WB_END 349 350 WB_ENTRY(jboolean, WB_IsMethodCompiled(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 351 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 352 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 353 MutexLockerEx mu(Compile_lock); 354 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 355 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 356 if (code == NULL) { 357 return JNI_FALSE; 358 } 359 return (code->is_alive() && !code->is_marked_for_deoptimization()); 360 WB_END 361 362 WB_ENTRY(jboolean, WB_IsMethodCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 363 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 364 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 365 MutexLockerEx mu(Compile_lock); 366 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 367 if (is_osr) { 368 return CompilationPolicy::can_be_osr_compiled(mh, comp_level); 369 } else { 370 return CompilationPolicy::can_be_compiled(mh, comp_level); 371 } 372 WB_END 373 374 WB_ENTRY(jboolean, WB_IsMethodQueuedForCompilation(JNIEnv* env, jobject o, jobject method)) 375 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 376 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 377 MutexLockerEx mu(Compile_lock); 378 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 379 return mh->queued_for_compilation(); 380 WB_END 381 382 WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 383 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 384 CHECK_JNI_EXCEPTION_(env, CompLevel_none); 385 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 386 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 387 return (code != NULL ? code->comp_level() : CompLevel_none); 388 WB_END 389 390 WB_ENTRY(void, WB_MakeMethodNotCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 391 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 392 CHECK_JNI_EXCEPTION(env); 393 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 394 if (is_osr) { 395 mh->set_not_osr_compilable(comp_level, true /* report */, "WhiteBox"); 396 } else { 397 mh->set_not_compilable(comp_level, true /* report */, "WhiteBox"); 398 } 399 WB_END 400 401 WB_ENTRY(jint, WB_GetMethodEntryBci(JNIEnv* env, jobject o, jobject method)) 402 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 403 CHECK_JNI_EXCEPTION_(env, InvocationEntryBci); 404 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 405 nmethod* code = mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false); 406 return (code != NULL && code->is_osr_method() ? code->osr_entry_bci() : InvocationEntryBci); 407 WB_END 408 409 WB_ENTRY(jboolean, WB_TestSetDontInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 410 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 411 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 412 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 413 bool result = mh->dont_inline(); 414 mh->set_dont_inline(value == JNI_TRUE); 415 return result; 416 WB_END 417 418 WB_ENTRY(jint, WB_GetCompileQueueSize(JNIEnv* env, jobject o, jint comp_level)) 419 if (comp_level == CompLevel_any) { 420 return CompileBroker::queue_size(CompLevel_full_optimization) /* C2 */ + 421 CompileBroker::queue_size(CompLevel_full_profile) /* C1 */; 422 } else { 423 return CompileBroker::queue_size(comp_level); 424 } 425 WB_END 426 427 WB_ENTRY(jboolean, WB_TestSetForceInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 428 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 429 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 430 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 431 bool result = mh->force_inline(); 432 mh->set_force_inline(value == JNI_TRUE); 433 return result; 434 WB_END 435 436 WB_ENTRY(jboolean, WB_EnqueueMethodForCompilation(JNIEnv* env, jobject o, jobject method, jint comp_level, jint bci)) 437 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 438 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 439 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 440 nmethod* nm = CompileBroker::compile_method(mh, bci, comp_level, mh, mh->invocation_count(), "WhiteBox", THREAD); 441 MutexLockerEx mu(Compile_lock); 442 return (mh->queued_for_compilation() || nm != NULL); 443 WB_END 444 445 class VM_WhiteBoxOperation : public VM_Operation { 446 public: 447 VM_WhiteBoxOperation() { } 448 VMOp_Type type() const { return VMOp_WhiteBoxOperation; } 449 bool allow_nested_vm_operations() const { return true; } 450 }; 451 452 class AlwaysFalseClosure : public BoolObjectClosure { 453 public: 454 bool do_object_b(oop p) { return false; } 455 }; 456 457 static AlwaysFalseClosure always_false; 458 459 class VM_WhiteBoxCleanMethodData : public VM_WhiteBoxOperation { 460 public: 461 VM_WhiteBoxCleanMethodData(MethodData* mdo) : _mdo(mdo) { } 462 void doit() { 463 _mdo->clean_method_data(&always_false); 464 } 465 private: 466 MethodData* _mdo; 467 }; 468 469 WB_ENTRY(void, WB_ClearMethodState(JNIEnv* env, jobject o, jobject method)) 470 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 471 CHECK_JNI_EXCEPTION(env); 472 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 473 MutexLockerEx mu(Compile_lock); 474 MethodData* mdo = mh->method_data(); 475 MethodCounters* mcs = mh->method_counters(); 476 477 if (mdo != NULL) { 478 mdo->init(); 479 ResourceMark rm; 480 int arg_count = mdo->method()->size_of_parameters(); 481 for (int i = 0; i < arg_count; i++) { 482 mdo->set_arg_modified(i, 0); 483 } 484 VM_WhiteBoxCleanMethodData op(mdo); 485 VMThread::execute(&op); 486 } 487 488 mh->clear_not_c1_compilable(); 489 mh->clear_not_c2_compilable(); 490 mh->clear_not_c2_osr_compilable(); 491 NOT_PRODUCT(mh->set_compiled_invocation_count(0)); 492 if (mcs != NULL) { 493 mcs->backedge_counter()->init(); 494 mcs->invocation_counter()->init(); 495 mcs->set_interpreter_invocation_count(0); 496 mcs->set_interpreter_throwout_count(0); 497 498 #ifdef TIERED 499 mcs->set_rate(0.0F); 500 mh->set_prev_event_count(0); 501 mh->set_prev_time(0); 502 #endif 503 } 504 WB_END 505 506 WB_ENTRY(jboolean, WB_IsInStringTable(JNIEnv* env, jobject o, jstring javaString)) 507 ResourceMark rm(THREAD); 508 int len; 509 jchar* name = java_lang_String::as_unicode_string(JNIHandles::resolve(javaString), len, CHECK_false); 510 return (StringTable::lookup(name, len) != NULL); 511 WB_END 512 513 WB_ENTRY(void, WB_FullGC(JNIEnv* env, jobject o)) 514 Universe::heap()->collector_policy()->set_should_clear_all_soft_refs(true); 515 Universe::heap()->collect(GCCause::_last_ditch_collection); 516 WB_END 517 518 519 WB_ENTRY(void, WB_ReadReservedMemory(JNIEnv* env, jobject o)) 520 // static+volatile in order to force the read to happen 521 // (not be eliminated by the compiler) 522 static char c; 523 static volatile char* p; 524 525 p = os::reserve_memory(os::vm_allocation_granularity(), NULL, 0); 526 if (p == NULL) { 527 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Failed to reserve memory"); 528 } 529 530 c = *p; 531 WB_END 532 533 WB_ENTRY(jstring, WB_GetCPUFeatures(JNIEnv* env, jobject o)) 534 const char* cpu_features = VM_Version::cpu_features(); 535 ThreadToNativeFromVM ttn(thread); 536 jstring features_string = env->NewStringUTF(cpu_features); 537 538 CHECK_JNI_EXCEPTION_(env, NULL); 539 540 return features_string; 541 WB_END 542 543 544 WB_ENTRY(jobjectArray, WB_GetNMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 545 ResourceMark rm(THREAD); 546 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 547 CHECK_JNI_EXCEPTION_(env, NULL); 548 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 549 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 550 jobjectArray result = NULL; 551 if (code == NULL) { 552 return result; 553 } 554 int insts_size = code->insts_size(); 555 556 ThreadToNativeFromVM ttn(thread); 557 jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string()); 558 CHECK_JNI_EXCEPTION_(env, NULL); 559 result = env->NewObjectArray(2, clazz, NULL); 560 if (result == NULL) { 561 return result; 562 } 563 564 clazz = env->FindClass(vmSymbols::java_lang_Integer()->as_C_string()); 565 CHECK_JNI_EXCEPTION_(env, NULL); 566 jmethodID constructor = env->GetMethodID(clazz, vmSymbols::object_initializer_name()->as_C_string(), vmSymbols::int_void_signature()->as_C_string()); 567 CHECK_JNI_EXCEPTION_(env, NULL); 568 jobject obj = env->NewObject(clazz, constructor, code->comp_level()); 569 CHECK_JNI_EXCEPTION_(env, NULL); 570 env->SetObjectArrayElement(result, 0, obj); 571 572 jbyteArray insts = env->NewByteArray(insts_size); 573 CHECK_JNI_EXCEPTION_(env, NULL); 574 env->SetByteArrayRegion(insts, 0, insts_size, (jbyte*) code->insts_begin()); 575 env->SetObjectArrayElement(result, 1, insts); 576 577 return result; 578 WB_END 579 580 581 WB_ENTRY(jlong, WB_GetThreadFullStackSize(JNIEnv* env, jobject o)) 582 return (jlong) Thread::current()->stack_size(); 583 WB_END 584 585 WB_ENTRY(jlong, WB_GetThreadRemainingStackSize(JNIEnv* env, jobject o)) 586 JavaThread* t = JavaThread::current(); 587 return (jlong) t->stack_available(os::current_stack_pointer()) - (jlong) StackShadowPages * os::vm_page_size(); 588 WB_END 589 590 //Some convenience methods to deal with objects from java 591 int WhiteBox::offset_for_field(const char* field_name, oop object, 592 Symbol* signature_symbol) { 593 assert(field_name != NULL && strlen(field_name) > 0, "Field name not valid"); 594 Thread* THREAD = Thread::current(); 595 596 //Get the class of our object 597 Klass* arg_klass = object->klass(); 598 //Turn it into an instance-klass 599 InstanceKlass* ik = InstanceKlass::cast(arg_klass); 600 601 //Create symbols to look for in the class 602 TempNewSymbol name_symbol = SymbolTable::lookup(field_name, (int) strlen(field_name), 603 THREAD); 604 605 //To be filled in with an offset of the field we're looking for 606 fieldDescriptor fd; 607 608 Klass* res = ik->find_field(name_symbol, signature_symbol, &fd); 609 if (res == NULL) { 610 tty->print_cr("Invalid layout of %s at %s", ik->external_name(), 611 name_symbol->as_C_string()); 612 fatal("Invalid layout of preloaded class"); 613 } 614 615 //fetch the field at the offset we've found 616 int dest_offset = fd.offset(); 617 618 return dest_offset; 619 } 620 621 622 const char* WhiteBox::lookup_jstring(const char* field_name, oop object) { 623 int offset = offset_for_field(field_name, object, 624 vmSymbols::string_signature()); 625 oop string = object->obj_field(offset); 626 if (string == NULL) { 627 return NULL; 628 } 629 const char* ret = java_lang_String::as_utf8_string(string); 630 return ret; 631 } 632 633 bool WhiteBox::lookup_bool(const char* field_name, oop object) { 634 int offset = 635 offset_for_field(field_name, object, vmSymbols::bool_signature()); 636 bool ret = (object->bool_field(offset) == JNI_TRUE); 637 return ret; 638 } 639 640 641 #define CC (char*) 642 643 static JNINativeMethod methods[] = { 644 {CC"getObjectAddress", CC"(Ljava/lang/Object;)J", (void*)&WB_GetObjectAddress }, 645 {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, 646 {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, 647 {CC"parseCommandLine", 648 CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", 649 (void*) &WB_ParseCommandLine 650 }, 651 {CC"getCompressedOopsMaxHeapSize", CC"()J", 652 (void*)&WB_GetCompressedOopsMaxHeapSize}, 653 {CC"printHeapSizes", CC"()V", (void*)&WB_PrintHeapSizes }, 654 {CC"runMemoryUnitTests", CC"()V", (void*)&WB_RunMemoryUnitTests}, 655 {CC"readFromNoaccessArea",CC"()V", (void*)&WB_ReadFromNoaccessArea}, 656 {CC"stressVirtualSpaceResize",CC"(JJJ)I", (void*)&WB_StressVirtualSpaceResize}, 657 #if INCLUDE_ALL_GCS 658 {CC"g1InConcurrentMark", CC"()Z", (void*)&WB_G1InConcurrentMark}, 659 {CC"g1IsHumongous", CC"(Ljava/lang/Object;)Z", (void*)&WB_G1IsHumongous }, 660 {CC"g1NumFreeRegions", CC"()J", (void*)&WB_G1NumFreeRegions }, 661 {CC"g1RegionSize", CC"()I", (void*)&WB_G1RegionSize }, 662 #endif // INCLUDE_ALL_GCS 663 #if INCLUDE_NMT 664 {CC"NMTMalloc", CC"(J)J", (void*)&WB_NMTMalloc }, 665 {CC"NMTFree", CC"(J)V", (void*)&WB_NMTFree }, 666 {CC"NMTReserveMemory", CC"(J)J", (void*)&WB_NMTReserveMemory }, 667 {CC"NMTCommitMemory", CC"(JJ)V", (void*)&WB_NMTCommitMemory }, 668 {CC"NMTUncommitMemory", CC"(JJ)V", (void*)&WB_NMTUncommitMemory }, 669 {CC"NMTReleaseMemory", CC"(JJ)V", (void*)&WB_NMTReleaseMemory }, 670 {CC"NMTWaitForDataMerge", CC"()Z", (void*)&WB_NMTWaitForDataMerge}, 671 {CC"NMTIsDetailSupported",CC"()Z", (void*)&WB_NMTIsDetailSupported}, 672 #endif // INCLUDE_NMT 673 {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll }, 674 {CC"deoptimizeMethod", CC"(Ljava/lang/reflect/Executable;Z)I", 675 (void*)&WB_DeoptimizeMethod }, 676 {CC"isMethodCompiled", CC"(Ljava/lang/reflect/Executable;Z)Z", 677 (void*)&WB_IsMethodCompiled }, 678 {CC"isMethodCompilable", CC"(Ljava/lang/reflect/Executable;IZ)Z", 679 (void*)&WB_IsMethodCompilable}, 680 {CC"isMethodQueuedForCompilation", 681 CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation}, 682 {CC"makeMethodNotCompilable", 683 CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable}, 684 {CC"testSetDontInlineMethod", 685 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetDontInlineMethod}, 686 {CC"getMethodCompilationLevel", 687 CC"(Ljava/lang/reflect/Executable;Z)I", (void*)&WB_GetMethodCompilationLevel}, 688 {CC"getMethodEntryBci", 689 CC"(Ljava/lang/reflect/Executable;)I", (void*)&WB_GetMethodEntryBci}, 690 {CC"getCompileQueueSize", 691 CC"(I)I", (void*)&WB_GetCompileQueueSize}, 692 {CC"testSetForceInlineMethod", 693 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetForceInlineMethod}, 694 {CC"enqueueMethodForCompilation", 695 CC"(Ljava/lang/reflect/Executable;II)Z", (void*)&WB_EnqueueMethodForCompilation}, 696 {CC"clearMethodState", 697 CC"(Ljava/lang/reflect/Executable;)V", (void*)&WB_ClearMethodState}, 698 {CC"isInStringTable", CC"(Ljava/lang/String;)Z", (void*)&WB_IsInStringTable }, 699 {CC"fullGC", CC"()V", (void*)&WB_FullGC }, 700 {CC"readReservedMemory", CC"()V", (void*)&WB_ReadReservedMemory }, 701 {CC"getCPUFeatures", CC"()Ljava/lang/String;", (void*)&WB_GetCPUFeatures }, 702 {CC"getNMethod", CC"(Ljava/lang/reflect/Executable;Z)[Ljava/lang/Object;", 703 (void*)&WB_GetNMethod }, 704 {CC"getThreadFullStackSize", CC"()J", (void*)&WB_GetThreadFullStackSize }, 705 {CC"getThreadRemainingStackSize", CC"()J", (void*)&WB_GetThreadRemainingStackSize }, 706 }; 707 708 #undef CC 709 710 JVM_ENTRY(void, JVM_RegisterWhiteBoxMethods(JNIEnv* env, jclass wbclass)) 711 { 712 if (WhiteBoxAPI) { 713 // Make sure that wbclass is loaded by the null classloader 714 instanceKlassHandle ikh = instanceKlassHandle(JNIHandles::resolve(wbclass)->klass()); 715 Handle loader(ikh->class_loader()); 716 if (loader.is_null()) { 717 ResourceMark rm; 718 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 719 bool result = true; 720 // one by one registration natives for exception catching 721 jclass exceptionKlass = env->FindClass(vmSymbols::java_lang_NoSuchMethodError()->as_C_string()); 722 CHECK_JNI_EXCEPTION(env); 723 for (int i = 0, n = sizeof(methods) / sizeof(methods[0]); i < n; ++i) { 724 if (env->RegisterNatives(wbclass, methods + i, 1) != 0) { 725 result = false; 726 jthrowable throwable_obj = env->ExceptionOccurred(); 727 if (throwable_obj != NULL) { 728 env->ExceptionClear(); 729 if (env->IsInstanceOf(throwable_obj, exceptionKlass)) { 730 // j.l.NoSuchMethodError is thrown when a method can't be found or a method is not native 731 // ignoring the exception 732 tty->print_cr("Warning: 'NoSuchMethodError' on register of sun.hotspot.WhiteBox::%s%s", methods[i].name, methods[i].signature); 733 } 734 } else { 735 // register is failed w/o exception or w/ unexpected exception 736 tty->print_cr("Warning: unexpected error on register of sun.hotspot.WhiteBox::%s%s. All methods will be unregistered", methods[i].name, methods[i].signature); 737 env->UnregisterNatives(wbclass); 738 break; 739 } 740 } 741 } 742 743 if (result) { 744 WhiteBox::set_used(); 745 } 746 } 747 } 748 } 749 JVM_END