292
293 WB_ENTRY(jint, WB_G1RegionSize(JNIEnv* env, jobject o))
294 return (jint)HeapRegion::GrainBytes;
295 WB_END
296 #endif // INCLUDE_ALL_GCS
297
298 #if INCLUDE_NMT
299 // Alloc memory using the test memory type so that we can use that to see if
300 // NMT picks it up correctly
301 WB_ENTRY(jlong, WB_NMTMalloc(JNIEnv* env, jobject o, jlong size))
302 jlong addr = 0;
303 addr = (jlong)(uintptr_t)os::malloc(size, mtTest);
304 return addr;
305 WB_END
306
307 // Alloc memory with pseudo call stack. The test can create psudo malloc
308 // allocation site to stress the malloc tracking.
309 WB_ENTRY(jlong, WB_NMTMallocWithPseudoStack(JNIEnv* env, jobject o, jlong size, jint pseudo_stack))
310 address pc = (address)(size_t)pseudo_stack;
311 NativeCallStack stack(&pc, 1);
312 return (jlong)os::malloc(size, mtTest, stack);
313 WB_END
314
315 // Free the memory allocated by NMTAllocTest
316 WB_ENTRY(void, WB_NMTFree(JNIEnv* env, jobject o, jlong mem))
317 os::free((void*)(uintptr_t)mem, mtTest);
318 WB_END
319
320 WB_ENTRY(jlong, WB_NMTReserveMemory(JNIEnv* env, jobject o, jlong size))
321 jlong addr = 0;
322
323 addr = (jlong)(uintptr_t)os::reserve_memory(size);
324 MemTracker::record_virtual_memory_type((address)addr, mtTest);
325
326 return addr;
327 WB_END
328
329
330 WB_ENTRY(void, WB_NMTCommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
331 os::commit_memory((char *)(uintptr_t)addr, size, !ExecMem);
332 MemTracker::record_virtual_memory_type((address)(uintptr_t)addr, mtTest);
333 WB_END
334
335 WB_ENTRY(void, WB_NMTUncommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
336 os::uncommit_memory((char *)(uintptr_t)addr, size);
337 WB_END
338
339 WB_ENTRY(void, WB_NMTReleaseMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
340 os::release_memory((char *)(uintptr_t)addr, size);
341 WB_END
342
343 WB_ENTRY(jboolean, WB_NMTIsDetailSupported(JNIEnv* env))
344 return MemTracker::tracking_level() == NMT_detail;
345 WB_END
346
347 WB_ENTRY(void, WB_NMTOverflowHashBucket(JNIEnv* env, jobject o, jlong num))
348 address pc = (address)1;
349 for (jlong index = 0; index < num; index ++) {
350 NativeCallStack stack(&pc, 1);
351 os::malloc(0, mtTest, stack);
352 pc += MallocSiteTable::hash_buckets();
353 }
354 WB_END
355
356 WB_ENTRY(jboolean, WB_NMTChangeTrackingLevel(JNIEnv* env))
357 // Test that we can downgrade NMT levels but not upgrade them.
358 if (MemTracker::tracking_level() == NMT_off) {
359 MemTracker::transition_to(NMT_off);
360 return MemTracker::tracking_level() == NMT_off;
361 } else {
362 assert(MemTracker::tracking_level() == NMT_detail, "Should start out as detail tracking");
363 MemTracker::transition_to(NMT_summary);
364 assert(MemTracker::tracking_level() == NMT_summary, "Should be summary now");
365
366 // Can't go to detail once NMT is set to summary.
367 MemTracker::transition_to(NMT_detail);
368 assert(MemTracker::tracking_level() == NMT_summary, "Should still be summary now");
369
370 // Shutdown sets tracking level to minimal.
371 MemTracker::shutdown();
372 assert(MemTracker::tracking_level() == NMT_minimal, "Should be minimal now");
373
374 // Once the tracking level is minimal, we cannot increase to summary.
375 // The code ignores this request instead of asserting because if the malloc site
376 // table overflows in another thread, it tries to change the code to summary.
377 MemTracker::transition_to(NMT_summary);
378 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now");
379
380 // Really can never go up to detail, verify that the code would never do this.
381 MemTracker::transition_to(NMT_detail);
382 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now");
383 return MemTracker::tracking_level() == NMT_minimal;
384 }
385 WB_END
386 #endif // INCLUDE_NMT
387
388 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) {
389 assert(method != NULL, "method should not be null");
390 ThreadToNativeFromVM ttn(thread);
391 return env->FromReflectedMethod(method);
392 }
393
394 WB_ENTRY(void, WB_DeoptimizeAll(JNIEnv* env, jobject o))
395 MutexLockerEx mu(Compile_lock);
396 CodeCache::mark_all_nmethods_for_deoptimization();
397 VM_Deoptimize op;
398 VMThread::execute(&op);
399 WB_END
400
401 WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr))
402 jmethodID jmid = reflected_method_to_jmid(thread, env, method);
403 int result = 0;
404 CHECK_JNI_EXCEPTION_(env, result);
405 MutexLockerEx mu(Compile_lock);
964 {CC"getCompressedOopsMaxHeapSize", CC"()J",
965 (void*)&WB_GetCompressedOopsMaxHeapSize},
966 {CC"printHeapSizes", CC"()V", (void*)&WB_PrintHeapSizes },
967 {CC"runMemoryUnitTests", CC"()V", (void*)&WB_RunMemoryUnitTests},
968 {CC"readFromNoaccessArea",CC"()V", (void*)&WB_ReadFromNoaccessArea},
969 {CC"stressVirtualSpaceResize",CC"(JJJ)I", (void*)&WB_StressVirtualSpaceResize},
970 #if INCLUDE_ALL_GCS
971 {CC"g1InConcurrentMark", CC"()Z", (void*)&WB_G1InConcurrentMark},
972 {CC"g1IsHumongous", CC"(Ljava/lang/Object;)Z", (void*)&WB_G1IsHumongous },
973 {CC"g1NumFreeRegions", CC"()J", (void*)&WB_G1NumFreeRegions },
974 {CC"g1RegionSize", CC"()I", (void*)&WB_G1RegionSize },
975 #endif // INCLUDE_ALL_GCS
976 #if INCLUDE_NMT
977 {CC"NMTMalloc", CC"(J)J", (void*)&WB_NMTMalloc },
978 {CC"NMTMallocWithPseudoStack", CC"(JI)J", (void*)&WB_NMTMallocWithPseudoStack},
979 {CC"NMTFree", CC"(J)V", (void*)&WB_NMTFree },
980 {CC"NMTReserveMemory", CC"(J)J", (void*)&WB_NMTReserveMemory },
981 {CC"NMTCommitMemory", CC"(JJ)V", (void*)&WB_NMTCommitMemory },
982 {CC"NMTUncommitMemory", CC"(JJ)V", (void*)&WB_NMTUncommitMemory },
983 {CC"NMTReleaseMemory", CC"(JJ)V", (void*)&WB_NMTReleaseMemory },
984 {CC"NMTOverflowHashBucket", CC"(J)V", (void*)&WB_NMTOverflowHashBucket},
985 {CC"NMTIsDetailSupported",CC"()Z", (void*)&WB_NMTIsDetailSupported},
986 {CC"NMTChangeTrackingLevel", CC"()Z", (void*)&WB_NMTChangeTrackingLevel},
987 #endif // INCLUDE_NMT
988 {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll },
989 {CC"deoptimizeMethod", CC"(Ljava/lang/reflect/Executable;Z)I",
990 (void*)&WB_DeoptimizeMethod },
991 {CC"isMethodCompiled", CC"(Ljava/lang/reflect/Executable;Z)Z",
992 (void*)&WB_IsMethodCompiled },
993 {CC"isMethodCompilable", CC"(Ljava/lang/reflect/Executable;IZ)Z",
994 (void*)&WB_IsMethodCompilable},
995 {CC"isMethodQueuedForCompilation",
996 CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation},
997 {CC"makeMethodNotCompilable",
998 CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable},
999 {CC"testSetDontInlineMethod",
1000 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetDontInlineMethod},
1001 {CC"getMethodCompilationLevel",
1002 CC"(Ljava/lang/reflect/Executable;Z)I", (void*)&WB_GetMethodCompilationLevel},
1003 {CC"getMethodEntryBci",
1004 CC"(Ljava/lang/reflect/Executable;)I", (void*)&WB_GetMethodEntryBci},
1005 {CC"getCompileQueueSize",
1006 CC"(I)I", (void*)&WB_GetCompileQueueSize},
|
292
293 WB_ENTRY(jint, WB_G1RegionSize(JNIEnv* env, jobject o))
294 return (jint)HeapRegion::GrainBytes;
295 WB_END
296 #endif // INCLUDE_ALL_GCS
297
298 #if INCLUDE_NMT
299 // Alloc memory using the test memory type so that we can use that to see if
300 // NMT picks it up correctly
301 WB_ENTRY(jlong, WB_NMTMalloc(JNIEnv* env, jobject o, jlong size))
302 jlong addr = 0;
303 addr = (jlong)(uintptr_t)os::malloc(size, mtTest);
304 return addr;
305 WB_END
306
307 // Alloc memory with pseudo call stack. The test can create psudo malloc
308 // allocation site to stress the malloc tracking.
309 WB_ENTRY(jlong, WB_NMTMallocWithPseudoStack(JNIEnv* env, jobject o, jlong size, jint pseudo_stack))
310 address pc = (address)(size_t)pseudo_stack;
311 NativeCallStack stack(&pc, 1);
312 return (jlong)(uintptr_t)os::malloc(size, mtTest, stack);
313 WB_END
314
315 // Free the memory allocated by NMTAllocTest
316 WB_ENTRY(void, WB_NMTFree(JNIEnv* env, jobject o, jlong mem))
317 os::free((void*)(uintptr_t)mem, mtTest);
318 WB_END
319
320 WB_ENTRY(jlong, WB_NMTReserveMemory(JNIEnv* env, jobject o, jlong size))
321 jlong addr = 0;
322
323 addr = (jlong)(uintptr_t)os::reserve_memory(size);
324 MemTracker::record_virtual_memory_type((address)addr, mtTest);
325
326 return addr;
327 WB_END
328
329
330 WB_ENTRY(void, WB_NMTCommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
331 os::commit_memory((char *)(uintptr_t)addr, size, !ExecMem);
332 MemTracker::record_virtual_memory_type((address)(uintptr_t)addr, mtTest);
333 WB_END
334
335 WB_ENTRY(void, WB_NMTUncommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
336 os::uncommit_memory((char *)(uintptr_t)addr, size);
337 WB_END
338
339 WB_ENTRY(void, WB_NMTReleaseMemory(JNIEnv* env, jobject o, jlong addr, jlong size))
340 os::release_memory((char *)(uintptr_t)addr, size);
341 WB_END
342
343 WB_ENTRY(jboolean, WB_NMTIsDetailSupported(JNIEnv* env))
344 return MemTracker::tracking_level() == NMT_detail;
345 WB_END
346
347 WB_ENTRY(jboolean, WB_NMTChangeTrackingLevel(JNIEnv* env))
348 // Test that we can downgrade NMT levels but not upgrade them.
349 if (MemTracker::tracking_level() == NMT_off) {
350 MemTracker::transition_to(NMT_off);
351 return MemTracker::tracking_level() == NMT_off;
352 } else {
353 assert(MemTracker::tracking_level() == NMT_detail, "Should start out as detail tracking");
354 MemTracker::transition_to(NMT_summary);
355 assert(MemTracker::tracking_level() == NMT_summary, "Should be summary now");
356
357 // Can't go to detail once NMT is set to summary.
358 MemTracker::transition_to(NMT_detail);
359 assert(MemTracker::tracking_level() == NMT_summary, "Should still be summary now");
360
361 // Shutdown sets tracking level to minimal.
362 MemTracker::shutdown();
363 assert(MemTracker::tracking_level() == NMT_minimal, "Should be minimal now");
364
365 // Once the tracking level is minimal, we cannot increase to summary.
366 // The code ignores this request instead of asserting because if the malloc site
367 // table overflows in another thread, it tries to change the code to summary.
368 MemTracker::transition_to(NMT_summary);
369 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now");
370
371 // Really can never go up to detail, verify that the code would never do this.
372 MemTracker::transition_to(NMT_detail);
373 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now");
374 return MemTracker::tracking_level() == NMT_minimal;
375 }
376 WB_END
377
378 WB_ENTRY(jint, WB_NMTGetHashSize(JNIEnv* env, jobject o))
379 int hash_size = MallocSiteTable::hash_buckets();
380 assert(hash_size > 0, "NMT hash_size should be > 0");
381 return (jint)hash_size;
382 WB_END
383 #endif // INCLUDE_NMT
384
385 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) {
386 assert(method != NULL, "method should not be null");
387 ThreadToNativeFromVM ttn(thread);
388 return env->FromReflectedMethod(method);
389 }
390
391 WB_ENTRY(void, WB_DeoptimizeAll(JNIEnv* env, jobject o))
392 MutexLockerEx mu(Compile_lock);
393 CodeCache::mark_all_nmethods_for_deoptimization();
394 VM_Deoptimize op;
395 VMThread::execute(&op);
396 WB_END
397
398 WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr))
399 jmethodID jmid = reflected_method_to_jmid(thread, env, method);
400 int result = 0;
401 CHECK_JNI_EXCEPTION_(env, result);
402 MutexLockerEx mu(Compile_lock);
961 {CC"getCompressedOopsMaxHeapSize", CC"()J",
962 (void*)&WB_GetCompressedOopsMaxHeapSize},
963 {CC"printHeapSizes", CC"()V", (void*)&WB_PrintHeapSizes },
964 {CC"runMemoryUnitTests", CC"()V", (void*)&WB_RunMemoryUnitTests},
965 {CC"readFromNoaccessArea",CC"()V", (void*)&WB_ReadFromNoaccessArea},
966 {CC"stressVirtualSpaceResize",CC"(JJJ)I", (void*)&WB_StressVirtualSpaceResize},
967 #if INCLUDE_ALL_GCS
968 {CC"g1InConcurrentMark", CC"()Z", (void*)&WB_G1InConcurrentMark},
969 {CC"g1IsHumongous", CC"(Ljava/lang/Object;)Z", (void*)&WB_G1IsHumongous },
970 {CC"g1NumFreeRegions", CC"()J", (void*)&WB_G1NumFreeRegions },
971 {CC"g1RegionSize", CC"()I", (void*)&WB_G1RegionSize },
972 #endif // INCLUDE_ALL_GCS
973 #if INCLUDE_NMT
974 {CC"NMTMalloc", CC"(J)J", (void*)&WB_NMTMalloc },
975 {CC"NMTMallocWithPseudoStack", CC"(JI)J", (void*)&WB_NMTMallocWithPseudoStack},
976 {CC"NMTFree", CC"(J)V", (void*)&WB_NMTFree },
977 {CC"NMTReserveMemory", CC"(J)J", (void*)&WB_NMTReserveMemory },
978 {CC"NMTCommitMemory", CC"(JJ)V", (void*)&WB_NMTCommitMemory },
979 {CC"NMTUncommitMemory", CC"(JJ)V", (void*)&WB_NMTUncommitMemory },
980 {CC"NMTReleaseMemory", CC"(JJ)V", (void*)&WB_NMTReleaseMemory },
981 {CC"NMTIsDetailSupported",CC"()Z", (void*)&WB_NMTIsDetailSupported},
982 {CC"NMTChangeTrackingLevel", CC"()Z", (void*)&WB_NMTChangeTrackingLevel},
983 {CC"NMTGetHashSize", CC"()I", (void*)&WB_NMTGetHashSize },
984 #endif // INCLUDE_NMT
985 {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll },
986 {CC"deoptimizeMethod", CC"(Ljava/lang/reflect/Executable;Z)I",
987 (void*)&WB_DeoptimizeMethod },
988 {CC"isMethodCompiled", CC"(Ljava/lang/reflect/Executable;Z)Z",
989 (void*)&WB_IsMethodCompiled },
990 {CC"isMethodCompilable", CC"(Ljava/lang/reflect/Executable;IZ)Z",
991 (void*)&WB_IsMethodCompilable},
992 {CC"isMethodQueuedForCompilation",
993 CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation},
994 {CC"makeMethodNotCompilable",
995 CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable},
996 {CC"testSetDontInlineMethod",
997 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetDontInlineMethod},
998 {CC"getMethodCompilationLevel",
999 CC"(Ljava/lang/reflect/Executable;Z)I", (void*)&WB_GetMethodCompilationLevel},
1000 {CC"getMethodEntryBci",
1001 CC"(Ljava/lang/reflect/Executable;)I", (void*)&WB_GetMethodEntryBci},
1002 {CC"getCompileQueueSize",
1003 CC"(I)I", (void*)&WB_GetCompileQueueSize},
|