# HG changeset patch # User amurillo # Date 1414779221 25200 # Fri Oct 31 11:13:41 2014 -0700 # Node ID 063338b89a56a14b5a296e5d93fe732677d283d4 # Parent 1d2ac45722d4dbfa676d28200aac7f7727603ea6 8062589: new hotspot build - hs25.40-b18 Reviewed-by: jcoomes diff --git a/make/hotspot_version b/make/hotspot_version --- a/make/hotspot_version +++ b/make/hotspot_version @@ -35,7 +35,7 @@ HS_MAJOR_VER=25 HS_MINOR_VER=40 -HS_BUILD_NUMBER=17 +HS_BUILD_NUMBER=18 JDK_MAJOR_VER=1 JDK_MINOR_VER=8 # HG changeset patch # User thartmann # Date 1413196237 -7200 # Mon Oct 13 12:30:37 2014 +0200 # Node ID c88a4554854cc2b60073402349ed62aaf8a3f4ee # Parent 063338b89a56a14b5a296e5d93fe732677d283d4 8046268: compiler/whitebox/ tests fail : must be osr_compiled Summary: Added code to 'warm up' the methods before triggering OSR compilation by executing them a limited number of times. Like this, the profile information marks the loop exit as taken and we don't add an uncommon trap. Reviewed-by: kvn, dlong, iignatyev diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -72,10 +72,12 @@ /** Flag for verbose output, true if {@code -Dverbose} specified */ protected static final boolean IS_VERBOSE = System.getProperty("verbose") != null; - /** count of invocation to triger compilation */ + /** invocation count to trigger compilation */ protected static final int THRESHOLD; - /** count of invocation to triger OSR compilation */ + /** invocation count to trigger OSR compilation */ protected static final long BACKEDGE_THRESHOLD; + /** invocation count to warm up method before triggering OSR compilation */ + protected static final long OSR_WARMUP = 2000; /** Value of {@code java.vm.info} (interpreted|mixed|comp mode) */ protected static final String MODE = System.getProperty("java.vm.info"); @@ -227,15 +229,19 @@ * compilation level. */ protected final void checkNotCompiled() { - if (WHITE_BOX.isMethodQueuedForCompilation(method)) { - throw new RuntimeException(method + " must not be in queue"); - } if (WHITE_BOX.isMethodCompiled(method, false)) { throw new RuntimeException(method + " must be not compiled"); } if (WHITE_BOX.getMethodCompilationLevel(method, false) != 0) { throw new RuntimeException(method + " comp_level must be == 0"); } + checkNotOsrCompiled(); + } + + protected final void checkNotOsrCompiled() { + if (WHITE_BOX.isMethodQueuedForCompilation(method)) { + throw new RuntimeException(method + " must not be in queue"); + } if (WHITE_BOX.isMethodCompiled(method, true)) { throw new RuntimeException(method + " must be not osr_compiled"); } @@ -306,12 +312,21 @@ * Waits for completion of background compilation of {@linkplain #method}. */ protected final void waitBackgroundCompilation() { + waitBackgroundCompilation(method); + } + + /** + * Waits for completion of background compilation of the given executable. + * + * @param executable Executable + */ + protected static final void waitBackgroundCompilation(Executable executable) { if (!BACKGROUND_COMPILATION) { return; } final Object obj = new Object(); for (int i = 0; i < 10 - && WHITE_BOX.isMethodQueuedForCompilation(method); ++i) { + && WHITE_BOX.isMethodQueuedForCompilation(executable); ++i) { synchronized (obj) { try { obj.wait(1000); @@ -425,14 +440,14 @@ /** constructor test case */ CONSTRUCTOR_TEST(Helper.CONSTRUCTOR, Helper.CONSTRUCTOR_CALLABLE, false), /** method test case */ - METOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), + METHOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), /** static method test case */ STATIC_TEST(Helper.STATIC, Helper.STATIC_CALLABLE, false), /** OSR constructor test case */ OSR_CONSTRUCTOR_TEST(Helper.OSR_CONSTRUCTOR, Helper.OSR_CONSTRUCTOR_CALLABLE, true), /** OSR method test case */ - OSR_METOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), + OSR_METHOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), /** OSR static method test case */ OSR_STATIC_TEST(Helper.OSR_STATIC, Helper.OSR_STATIC_CALLABLE, true); @@ -494,7 +509,8 @@ = new Callable() { @Override public Integer call() throws Exception { - return new Helper(null).hashCode(); + int result = warmup(OSR_CONSTRUCTOR); + return result + new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode(); } }; @@ -504,7 +520,8 @@ @Override public Integer call() throws Exception { - return helper.osrMethod(); + int result = warmup(OSR_METHOD); + return result + helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -512,10 +529,66 @@ = new Callable() { @Override public Integer call() throws Exception { - return osrStaticMethod(); + int result = warmup(OSR_STATIC); + return result + osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; + /** + * Deoptimizes all non-osr versions of the given executable after + * compilation finished. + * + * @param e Executable + * @throws Exception + */ + private static void waitAndDeoptimize(Executable e) throws Exception { + CompilerWhiteBoxTest.waitBackgroundCompilation(e); + if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { + throw new RuntimeException(e + " must not be in queue"); + } + // Deoptimize non-osr versions of executable + WhiteBox.getWhiteBox().deoptimizeMethod(e, false); + } + + /** + * Executes the method multiple times to make sure we have + * enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param m Method to be executed + * @return Number of times the method was executed + * @throws Exception + */ + private static int warmup(Method m) throws Exception { + Helper helper = new Helper(); + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { + result += (int)m.invoke(helper, 1); + } + // Deoptimize non-osr versions + waitAndDeoptimize(m); + return result; + } + + /** + * Executes the constructor multiple times to make sure we + * have enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param c Constructor to be executed + * @return Number of times the constructor was executed + * @throws Exception + */ + private static int warmup(Constructor c) throws Exception { + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { + result += c.newInstance(null, 1).hashCode(); + } + // Deoptimize non-osr versions + waitAndDeoptimize(c); + return result; + } + private static final Constructor CONSTRUCTOR; private static final Constructor OSR_CONSTRUCTOR; private static final Method METHOD; @@ -532,25 +605,24 @@ } try { OSR_CONSTRUCTOR = Helper.class.getDeclaredConstructor( - Object.class); + Object.class, long.class); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( - "exception on getting method Helper.(Object)", e); + "exception on getting method Helper.(Object, long)", e); } METHOD = getMethod("method"); STATIC = getMethod("staticMethod"); - OSR_METHOD = getMethod("osrMethod"); - OSR_STATIC = getMethod("osrStaticMethod"); + OSR_METHOD = getMethod("osrMethod", long.class); + OSR_STATIC = getMethod("osrStaticMethod", long.class); } - private static Method getMethod(String name) { + private static Method getMethod(String name, Class... parameterTypes) { try { - return Helper.class.getDeclaredMethod(name); + return Helper.class.getDeclaredMethod(name, parameterTypes); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( "exception on getting method Helper." + name, e); } - } private static int staticMethod() { @@ -561,17 +633,17 @@ return 42; } - private static int osrStaticMethod() { + private static int osrStaticMethod(long limit) { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + for (long i = 0; i < limit; ++i) { result += staticMethod(); } return result; } - private int osrMethod() { + private int osrMethod(long limit) { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + for (long i = 0; i < limit; ++i) { result += method(); } return result; @@ -585,9 +657,9 @@ } // for OSR constructor test case - private Helper(Object o) { + private Helper(Object o, long limit) { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + for (long i = 0; i < limit; ++i) { result += method(); } x = result; diff --git a/test/compiler/whitebox/MakeMethodNotCompilableTest.java b/test/compiler/whitebox/MakeMethodNotCompilableTest.java --- a/test/compiler/whitebox/MakeMethodNotCompilableTest.java +++ b/test/compiler/whitebox/MakeMethodNotCompilableTest.java @@ -138,7 +138,7 @@ } compile(); - checkNotCompiled(); + checkNotOsrCompiled(); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } # HG changeset patch # User thartmann # Date 1413533085 -7200 # Fri Oct 17 10:04:45 2014 +0200 # Node ID 564d9799706451788113cbfc5da60b9915b8b28f # Parent c88a4554854cc2b60073402349ed62aaf8a3f4ee 8060454: [TESTBUG] Whitebox tests fail with -XX:CompileThreshold=100 Summary: Move the call to 'waitAndDeoptimize' from the warmup methods to the osr triggering methods to make sure that no non-osr compilation is in the queue after warmup. Reviewed-by: kvn diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -541,7 +541,7 @@ * @param e Executable * @throws Exception */ - private static void waitAndDeoptimize(Executable e) throws Exception { + private static void waitAndDeoptimize(Executable e) { CompilerWhiteBoxTest.waitBackgroundCompilation(e); if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { throw new RuntimeException(e + " must not be in queue"); @@ -565,8 +565,6 @@ for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { result += (int)m.invoke(helper, 1); } - // Deoptimize non-osr versions - waitAndDeoptimize(m); return result; } @@ -584,8 +582,6 @@ for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { result += c.newInstance(null, 1).hashCode(); } - // Deoptimize non-osr versions - waitAndDeoptimize(c); return result; } @@ -634,6 +630,11 @@ } private static int osrStaticMethod(long limit) { + if (limit != 1) { + // Make sure there is no compiled version after warmup + waitAndDeoptimize(OSR_STATIC); + } + // Trigger osr compilation int result = 0; for (long i = 0; i < limit; ++i) { result += staticMethod(); @@ -642,6 +643,11 @@ } private int osrMethod(long limit) { + if (limit != 1) { + // Make sure there is no compiled version after warmup + waitAndDeoptimize(OSR_METHOD); + } + // Trigger osr compilation int result = 0; for (long i = 0; i < limit; ++i) { result += method(); @@ -658,6 +664,11 @@ // for OSR constructor test case private Helper(Object o, long limit) { + if (limit != 1) { + // Make sure there is no compiled version after warmup + waitAndDeoptimize(OSR_CONSTRUCTOR); + } + // Trigger osr compilation int result = 0; for (long i = 0; i < limit; ++i) { result += method(); # HG changeset patch # User thartmann # Date 1414132529 -7200 # Fri Oct 24 08:35:29 2014 +0200 # Node ID d1e732bbe4e593862c9b201784584c043ee0b7bd # Parent 564d9799706451788113cbfc5da60b9915b8b28f 8061486: [TESTBUG] compiler/whitebox/ tests fail : must be osr_compiled (reappeared in nightlies) Summary: Call warmup code from OSR triggering method to make sure no non-OSR compilation is triggered in the loop. Reviewed-by: kvn diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -76,8 +76,6 @@ protected static final int THRESHOLD; /** invocation count to trigger OSR compilation */ protected static final long BACKEDGE_THRESHOLD; - /** invocation count to warm up method before triggering OSR compilation */ - protected static final long OSR_WARMUP = 2000; /** Value of {@code java.vm.info} (interpreted|mixed|comp mode) */ protected static final String MODE = System.getProperty("java.vm.info"); @@ -509,8 +507,7 @@ = new Callable() { @Override public Integer call() throws Exception { - int result = warmup(OSR_CONSTRUCTOR); - return result + new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode(); + return new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode(); } }; @@ -520,8 +517,7 @@ @Override public Integer call() throws Exception { - int result = warmup(OSR_METHOD); - return result + helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); + return helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -529,62 +525,10 @@ = new Callable() { @Override public Integer call() throws Exception { - int result = warmup(OSR_STATIC); - return result + osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); + return osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; - /** - * Deoptimizes all non-osr versions of the given executable after - * compilation finished. - * - * @param e Executable - * @throws Exception - */ - private static void waitAndDeoptimize(Executable e) { - CompilerWhiteBoxTest.waitBackgroundCompilation(e); - if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { - throw new RuntimeException(e + " must not be in queue"); - } - // Deoptimize non-osr versions of executable - WhiteBox.getWhiteBox().deoptimizeMethod(e, false); - } - - /** - * Executes the method multiple times to make sure we have - * enough profiling information before triggering an OSR - * compilation. Otherwise the C2 compiler may add uncommon traps. - * - * @param m Method to be executed - * @return Number of times the method was executed - * @throws Exception - */ - private static int warmup(Method m) throws Exception { - Helper helper = new Helper(); - int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { - result += (int)m.invoke(helper, 1); - } - return result; - } - - /** - * Executes the constructor multiple times to make sure we - * have enough profiling information before triggering an OSR - * compilation. Otherwise the C2 compiler may add uncommon traps. - * - * @param c Constructor to be executed - * @return Number of times the constructor was executed - * @throws Exception - */ - private static int warmup(Constructor c) throws Exception { - int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.OSR_WARMUP; ++i) { - result += c.newInstance(null, 1).hashCode(); - } - return result; - } - private static final Constructor CONSTRUCTOR; private static final Constructor OSR_CONSTRUCTOR; private static final Method METHOD; @@ -629,26 +573,83 @@ return 42; } - private static int osrStaticMethod(long limit) { + /** + * Deoptimizes all non-osr versions of the given executable after + * compilation finished. + * + * @param e Executable + * @throws Exception + */ + private static void waitAndDeoptimize(Executable e) { + CompilerWhiteBoxTest.waitBackgroundCompilation(e); + if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { + throw new RuntimeException(e + " must not be in queue"); + } + // Deoptimize non-osr versions of executable + WhiteBox.getWhiteBox().deoptimizeMethod(e, false); + } + + /** + * Executes the method multiple times to make sure we have + * enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param m Method to be executed + * @return Number of times the method was executed + * @throws Exception + */ + private static int warmup(Method m) throws Exception { + waitAndDeoptimize(m); + Helper helper = new Helper(); + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += (int)m.invoke(helper, 1); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(m); + return result; + } + + /** + * Executes the constructor multiple times to make sure we + * have enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param c Constructor to be executed + * @return Number of times the constructor was executed + * @throws Exception + */ + private static int warmup(Constructor c) throws Exception { + waitAndDeoptimize(c); + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += c.newInstance(null, 1).hashCode(); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(c); + return result; + } + + private static int osrStaticMethod(long limit) throws Exception { + int result = 0; if (limit != 1) { - // Make sure there is no compiled version after warmup - waitAndDeoptimize(OSR_STATIC); + result = warmup(OSR_STATIC); } // Trigger osr compilation - int result = 0; for (long i = 0; i < limit; ++i) { result += staticMethod(); } return result; } - private int osrMethod(long limit) { + private int osrMethod(long limit) throws Exception { + int result = 0; if (limit != 1) { - // Make sure there is no compiled version after warmup - waitAndDeoptimize(OSR_METHOD); + result = warmup(OSR_METHOD); } // Trigger osr compilation - int result = 0; for (long i = 0; i < limit; ++i) { result += method(); } @@ -663,13 +664,12 @@ } // for OSR constructor test case - private Helper(Object o, long limit) { + private Helper(Object o, long limit) throws Exception { + int result = 0; if (limit != 1) { - // Make sure there is no compiled version after warmup - waitAndDeoptimize(OSR_CONSTRUCTOR); + result = warmup(OSR_CONSTRUCTOR); } // Trigger osr compilation - int result = 0; for (long i = 0; i < limit; ++i) { result += method(); } # HG changeset patch # User thartmann # Date 1414484256 -3600 # Tue Oct 28 09:17:36 2014 +0100 # Node ID f5f752e748401dc4470a950351b5c69744cc7885 # Parent d1e732bbe4e593862c9b201784584c043ee0b7bd 8061983: [TESTBUG] compiler/whitebox/MakeMethodNotCompilableTest.java fails with "must not be in queue" Summary: Added a method checkNotCompiled(boolean isOsr) to either check if the method is OSR compiled or to check if it is non-OSR compiled. Reviewed-by: kvn diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -206,7 +206,6 @@ * is compiled, or if {@linkplain #method} has zero * compilation level. */ - protected final void checkNotCompiled(int compLevel) { if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); @@ -227,24 +226,30 @@ * compilation level. */ protected final void checkNotCompiled() { - if (WHITE_BOX.isMethodCompiled(method, false)) { - throw new RuntimeException(method + " must be not compiled"); - } - if (WHITE_BOX.getMethodCompilationLevel(method, false) != 0) { - throw new RuntimeException(method + " comp_level must be == 0"); - } - checkNotOsrCompiled(); + checkNotCompiled(true); + checkNotCompiled(false); } - protected final void checkNotOsrCompiled() { + /** + * Checks, that {@linkplain #method} is not (OSR-)compiled. + * + * @param isOsr Check for OSR compilation if true + * @throws RuntimeException if {@linkplain #method} is in compiler queue or + * is compiled, or if {@linkplain #method} has zero + * compilation level. + */ + protected final void checkNotCompiled(boolean isOsr) { + waitBackgroundCompilation(); if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); } - if (WHITE_BOX.isMethodCompiled(method, true)) { - throw new RuntimeException(method + " must be not osr_compiled"); + if (WHITE_BOX.isMethodCompiled(method, isOsr)) { + throw new RuntimeException(method + " must not be " + + (isOsr ? "osr_" : "") + "compiled"); } - if (WHITE_BOX.getMethodCompilationLevel(method, true) != 0) { - throw new RuntimeException(method + " osr_comp_level must be == 0"); + if (WHITE_BOX.getMethodCompilationLevel(method, isOsr) != 0) { + throw new RuntimeException(method + (isOsr ? " osr_" : " ") + + "comp_level must be == 0"); } } diff --git a/test/compiler/whitebox/MakeMethodNotCompilableTest.java b/test/compiler/whitebox/MakeMethodNotCompilableTest.java --- a/test/compiler/whitebox/MakeMethodNotCompilableTest.java +++ b/test/compiler/whitebox/MakeMethodNotCompilableTest.java @@ -131,14 +131,15 @@ throw new RuntimeException(method + " is not compilable after clearMethodState()"); } - + // Make method not (OSR-)compilable (depending on testCase.isOsr()) makeNotCompilable(); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } - + // Try to (OSR-)compile method compile(); - checkNotOsrCompiled(); + // Method should not be (OSR-)compiled + checkNotCompiled(testCase.isOsr()); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } # HG changeset patch # User iklam # Date 1413241797 25200 # Mon Oct 13 16:09:57 2014 -0700 # Node ID 4cb90023bf2bd4fdef1d160868bbc1b734a67d28 # Parent f5f752e748401dc4470a950351b5c69744cc7885 8061651: Interface to the Lookup Index Cache to improve URLClassPath search time Summary: Implemented the interface in sun.misc.URLClassPath and corresponding JVM_XXX APIs Reviewed-by: mchung, acorn, jiangli, dholmes diff --git a/make/bsd/makefiles/mapfile-vers-debug b/make/bsd/makefiles/mapfile-vers-debug --- a/make/bsd/makefiles/mapfile-vers-debug +++ b/make/bsd/makefiles/mapfile-vers-debug @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/bsd/makefiles/mapfile-vers-product b/make/bsd/makefiles/mapfile-vers-product --- a/make/bsd/makefiles/mapfile-vers-product +++ b/make/bsd/makefiles/mapfile-vers-product @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/linux/makefiles/mapfile-vers-debug b/make/linux/makefiles/mapfile-vers-debug --- a/make/linux/makefiles/mapfile-vers-debug +++ b/make/linux/makefiles/mapfile-vers-debug @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/linux/makefiles/mapfile-vers-product b/make/linux/makefiles/mapfile-vers-product --- a/make/linux/makefiles/mapfile-vers-product +++ b/make/linux/makefiles/mapfile-vers-product @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/solaris/makefiles/mapfile-vers b/make/solaris/makefiles/mapfile-vers --- a/make/solaris/makefiles/mapfile-vers +++ b/make/solaris/makefiles/mapfile-vers @@ -189,6 +189,9 @@ JVM_IsSupportedJNIVersion; JVM_IsThreadAlive; JVM_IsVMGeneratedMethodIx; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_LatestUserDefinedLoader; JVM_Listen; JVM_LoadClass0; diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp --- a/src/share/vm/classfile/classLoader.cpp +++ b/src/share/vm/classfile/classLoader.cpp @@ -610,7 +610,7 @@ } #endif -void ClassLoader::setup_search_path(const char *class_path) { +void ClassLoader::setup_search_path(const char *class_path, bool canonicalize) { int offset = 0; int len = (int)strlen(class_path); int end = 0; @@ -625,7 +625,13 @@ char* path = NEW_RESOURCE_ARRAY(char, end - start + 1); strncpy(path, &class_path[start], end - start); path[end - start] = '\0'; - update_class_path_entry_list(path, false); + if (canonicalize) { + char* canonical_path = NEW_RESOURCE_ARRAY(char, JVM_MAXPATHLEN + 1); + if (get_canonical_path(path, canonical_path, JVM_MAXPATHLEN)) { + path = canonical_path; + } + } + update_class_path_entry_list(path, /*check_for_duplicates=*/canonicalize); #if INCLUDE_CDS if (DumpSharedSpaces) { check_shared_classpath(path); diff --git a/src/share/vm/classfile/classLoader.hpp b/src/share/vm/classfile/classLoader.hpp --- a/src/share/vm/classfile/classLoader.hpp +++ b/src/share/vm/classfile/classLoader.hpp @@ -129,8 +129,8 @@ bool _has_error; bool _throw_exception; volatile ClassPathEntry* _resolved_entry; + public: ClassPathEntry* resolve_entry(TRAPS); - public: bool is_jar_file(); const char* name() { return _path; } LazyClassPathEntry(const char* path, const struct stat* st, bool throw_exception); @@ -218,7 +218,7 @@ static void setup_meta_index(const char* meta_index_path, const char* meta_index_dir, int start_index); static void setup_bootstrap_search_path(); - static void setup_search_path(const char *class_path); + static void setup_search_path(const char *class_path, bool canonicalize=false); static void load_zip_library(); static ClassPathEntry* create_class_path_entry(const char *path, const struct stat* st, @@ -329,6 +329,10 @@ return e; } + static int num_classpath_entries() { + return _num_entries; + } + #if INCLUDE_CDS // Sharing dump and restore static void copy_package_info_buckets(char** top, char* end); diff --git a/src/share/vm/classfile/classLoaderExt.hpp b/src/share/vm/classfile/classLoaderExt.hpp --- a/src/share/vm/classfile/classLoaderExt.hpp +++ b/src/share/vm/classfile/classLoaderExt.hpp @@ -64,6 +64,15 @@ ClassLoader::add_to_list(new_entry); } static void setup_search_paths() {} + + static void init_lookup_cache(TRAPS) {} + static void copy_lookup_cache_to_archive(char** top, char* end) {} + static char* restore_lookup_cache_from_archive(char* buffer) {return buffer;} + static inline bool is_lookup_cache_enabled() {return false;} + + static bool known_to_not_exist(JNIEnv *env, jobject loader, const char *classname, TRAPS) {return false;} + static jobjectArray get_lookup_cache_urls(JNIEnv *env, jobject loader, TRAPS) {return NULL;} + static jintArray get_lookup_cache(JNIEnv *env, jobject loader, const char *pkgname, TRAPS) {return NULL;} }; #endif // SHARE_VM_CLASSFILE_CLASSLOADEREXT_HPP diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp --- a/src/share/vm/classfile/systemDictionary.hpp +++ b/src/share/vm/classfile/systemDictionary.hpp @@ -175,6 +175,8 @@ do_klass(URL_klass, java_net_URL, Pre ) \ do_klass(Jar_Manifest_klass, java_util_jar_Manifest, Pre ) \ do_klass(sun_misc_Launcher_klass, sun_misc_Launcher, Pre ) \ + do_klass(sun_misc_Launcher_AppClassLoader_klass, sun_misc_Launcher_AppClassLoader, Pre ) \ + do_klass(sun_misc_Launcher_ExtClassLoader_klass, sun_misc_Launcher_ExtClassLoader, Pre ) \ do_klass(CodeSource_klass, java_security_CodeSource, Pre ) \ \ /* It's NULL in non-1.4 JDKs. */ \ diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp --- a/src/share/vm/classfile/vmSymbols.hpp +++ b/src/share/vm/classfile/vmSymbols.hpp @@ -116,6 +116,7 @@ template(java_lang_AssertionStatusDirectives, "java/lang/AssertionStatusDirectives") \ template(getBootClassPathEntryForClass_name, "getBootClassPathEntryForClass") \ template(sun_misc_PostVMInitHook, "sun/misc/PostVMInitHook") \ + template(sun_misc_Launcher_AppClassLoader, "sun/misc/Launcher$AppClassLoader") \ template(sun_misc_Launcher_ExtClassLoader, "sun/misc/Launcher$ExtClassLoader") \ \ /* Java runtime version access */ \ diff --git a/src/share/vm/memory/metadataFactory.hpp b/src/share/vm/memory/metadataFactory.hpp --- a/src/share/vm/memory/metadataFactory.hpp +++ b/src/share/vm/memory/metadataFactory.hpp @@ -64,6 +64,12 @@ template static void free_array(ClassLoaderData* loader_data, Array* data) { + if (DumpSharedSpaces) { + // FIXME: the freeing code is buggy, especially when PrintSharedSpaces is enabled. + // Disable for now -- this means if you specify bad classes in your classlist you + // may have wasted space inside the archive. + return; + } if (data != NULL) { assert(loader_data != NULL, "shouldn't pass null"); assert(!data->is_shared(), "cannot deallocate array in shared spaces"); diff --git a/src/share/vm/memory/metaspaceShared.cpp b/src/share/vm/memory/metaspaceShared.cpp --- a/src/share/vm/memory/metaspaceShared.cpp +++ b/src/share/vm/memory/metaspaceShared.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/dictionary.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/loaderConstraints.hpp" #include "classfile/placeholders.hpp" #include "classfile/sharedClassUtil.hpp" @@ -39,6 +40,7 @@ #include "runtime/signature.hpp" #include "runtime/vm_operations.hpp" #include "runtime/vmThread.hpp" +#include "utilities/hashtable.hpp" #include "utilities/hashtable.inline.hpp" PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC @@ -533,6 +535,8 @@ ClassLoader::copy_package_info_table(&md_top, md_end); ClassLoader::verify(); + ClassLoaderExt::copy_lookup_cache_to_archive(&md_top, md_end); + // Write the other data to the output array. WriteClosure wc(md_top, md_end); MetaspaceShared::serialize(&wc); @@ -745,6 +749,8 @@ } tty->print_cr("Loading classes to share: done."); + ClassLoaderExt::init_lookup_cache(THREAD); + if (PrintSharedSpaces) { tty->print_cr("Shared spaces: preloaded %d classes", class_count); } @@ -1056,6 +1062,8 @@ buffer += sizeof(intptr_t); buffer += len; + buffer = ClassLoaderExt::restore_lookup_cache_from_archive(buffer); + intptr_t* array = (intptr_t*)buffer; ReadClosure rc(&array); serialize(&rc); diff --git a/src/share/vm/memory/metaspaceShared.hpp b/src/share/vm/memory/metaspaceShared.hpp --- a/src/share/vm/memory/metaspaceShared.hpp +++ b/src/share/vm/memory/metaspaceShared.hpp @@ -32,9 +32,9 @@ #define LargeSharedArchiveSize (300*M) #define HugeSharedArchiveSize (800*M) -#define ReadOnlyRegionPercentage 0.4 -#define ReadWriteRegionPercentage 0.55 -#define MiscDataRegionPercentage 0.03 +#define ReadOnlyRegionPercentage 0.39 +#define ReadWriteRegionPercentage 0.50 +#define MiscDataRegionPercentage 0.09 #define MiscCodeRegionPercentage 0.02 #define LargeThresholdClassCount 5000 #define HugeThresholdClassCount 40000 diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp --- a/src/share/vm/prims/jvm.cpp +++ b/src/share/vm/prims/jvm.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/classLoader.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/javaAssertions.hpp" #include "classfile/javaClasses.hpp" #include "classfile/symbolTable.hpp" @@ -393,6 +394,14 @@ } } + const char* enableSharedLookupCache = "false"; +#if INCLUDE_CDS + if (ClassLoaderExt::is_lookup_cache_enabled()) { + enableSharedLookupCache = "true"; + } +#endif + PUTPROP(props, "sun.cds.enableSharedLookupCache", enableSharedLookupCache); + return properties; JVM_END @@ -766,6 +775,36 @@ JVM_END +JVM_ENTRY(jboolean, JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname)) + JVMWrapper("JVM_KnownToNotExist"); +#if INCLUDE_CDS + return ClassLoaderExt::known_to_not_exist(env, loader, classname, CHECK_(false)); +#else + return false; +#endif +JVM_END + + +JVM_ENTRY(jobjectArray, JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader)) + JVMWrapper("JVM_GetResourceLookupCacheURLs"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache_urls(env, loader, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + +JVM_ENTRY(jintArray, JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name)) + JVMWrapper("JVM_GetResourceLookupCache"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache(env, loader, resource_name, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + // Returns a class loaded by the bootstrap class loader; or null // if not found. ClassNotFoundException is not thrown. // diff --git a/src/share/vm/prims/jvm.h b/src/share/vm/prims/jvm.h --- a/src/share/vm/prims/jvm.h +++ b/src/share/vm/prims/jvm.h @@ -1548,6 +1548,31 @@ JNIEXPORT jobjectArray JNICALL JVM_GetThreadStateNames(JNIEnv* env, jint javaThreadState, jintArray values); +/* + * Returns true if the JVM's lookup cache indicates that this class is + * known to NOT exist for the given loader. + */ +JNIEXPORT jboolean JNICALL +JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname); + +/* + * Returns an array of all URLs that are stored in the JVM's lookup cache + * for the given loader. NULL if the lookup cache is unavailable. + */ +JNIEXPORT jobjectArray JNICALL +JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader); + +/* + * Returns an array of all URLs that *may* contain the resource_name for the + * given loader. This function returns an integer array, each element + * of which can be used to index into the array returned by + * JVM_GetResourceLookupCacheURLs of the same loader to determine the + * URLs. + */ +JNIEXPORT jintArray JNICALL +JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name); + + /* ========================================================================= * The following defines a private JVM interface that the JDK can query * for the JVM version and capabilities. sun.misc.Version defines diff --git a/src/share/vm/prims/whitebox.cpp b/src/share/vm/prims/whitebox.cpp --- a/src/share/vm/prims/whitebox.cpp +++ b/src/share/vm/prims/whitebox.cpp @@ -104,6 +104,28 @@ return closure.found(); WB_END +WB_ENTRY(jboolean, WB_ClassKnownToNotExist(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* class_name = env->GetStringUTFChars(name, NULL); + jboolean result = JVM_KnownToNotExist(env, loader, class_name); + env->ReleaseStringUTFChars(name, class_name); + return result; +WB_END + +WB_ENTRY(jobjectArray, WB_GetLookupCacheURLs(JNIEnv* env, jobject o, jobject loader)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + return JVM_GetResourceLookupCacheURLs(env, loader); +WB_END + +WB_ENTRY(jintArray, WB_GetLookupCacheMatches(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* resource_name = env->GetStringUTFChars(name, NULL); + jintArray result = JVM_GetResourceLookupCache(env, loader, resource_name); + + env->ReleaseStringUTFChars(name, resource_name); + return result; +WB_END + WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { return (jlong)Arguments::max_heap_for_compressed_oops(); } @@ -939,6 +961,11 @@ {CC"isObjectInOldGen", CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen }, {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, + {CC"classKnownToNotExist", + CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)Z",(void*)&WB_ClassKnownToNotExist}, + {CC"getLookupCacheURLs", CC"(Ljava/lang/ClassLoader;)[Ljava/net/URL;", (void*)&WB_GetLookupCacheURLs}, + {CC"getLookupCacheMatches", CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)[I", + (void*)&WB_GetLookupCacheMatches}, {CC"parseCommandLine", CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", (void*) &WB_ParseCommandLine diff --git a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java --- a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java +++ b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java @@ -30,6 +30,7 @@ import java.util.function.Function; import java.util.stream.Stream; import java.security.BasicPermission; +import java.net.URL; import sun.hotspot.parser.DiagnosticCommand; @@ -84,6 +85,11 @@ } private native boolean isClassAlive0(String name); + // Resource/Class Lookup Cache + public native boolean classKnownToNotExist(ClassLoader loader, String name); + public native URL[] getLookupCacheURLs(ClassLoader loader); + public native int[] getLookupCacheMatches(ClassLoader loader, String name); + // G1 public native boolean g1InConcurrentMark(); public native boolean g1IsHumongous(Object o); # HG changeset patch # User thartmann # Date 1414754180 -3600 # Fri Oct 31 12:16:20 2014 +0100 # Node ID 468850e35e481e7c7433be3d3b4c090bf3be702a # Parent 063338b89a56a14b5a296e5d93fe732677d283d4 8062169: Multiple OSR compilations issued for same bci Summary: Fixed 'SimpleThresholdPolicy::event' to always perform OSR if an OSR nmethod is available. Reviewed-by: kvn, iveresov diff --git a/src/share/vm/runtime/simpleThresholdPolicy.cpp b/src/share/vm/runtime/simpleThresholdPolicy.cpp --- a/src/share/vm/runtime/simpleThresholdPolicy.cpp +++ b/src/share/vm/runtime/simpleThresholdPolicy.cpp @@ -196,7 +196,6 @@ // Don't trigger other compiles in testing mode return NULL; } - nmethod *osr_nm = NULL; handle_counter_overflow(method()); if (method() != inlinee()) { @@ -210,14 +209,16 @@ if (bci == InvocationEntryBci) { method_invocation_event(method, inlinee, comp_level, nm, thread); } else { + // method == inlinee if the event originated in the main method method_back_branch_event(method, inlinee, bci, comp_level, nm, thread); - // method == inlinee if the event originated in the main method - int highest_level = inlinee->highest_osr_comp_level(); - if (highest_level > comp_level) { - osr_nm = inlinee->lookup_osr_nmethod_for(bci, highest_level, false); + // Check if event led to a higher level OSR compilation + nmethod* osr_nm = inlinee->lookup_osr_nmethod_for(bci, comp_level, false); + if (osr_nm != NULL && osr_nm->comp_level() > comp_level) { + // Perform OSR with new nmethod + return osr_nm; } } - return osr_nm; + return NULL; } // Check if the method can be compiled, change level if necessary # HG changeset patch # User thartmann # Date 1415176502 0 # Wed Nov 05 08:35:02 2014 +0000 # Node ID d4562805b0304240e47386dede429562f691cc60 # Parent 468850e35e481e7c7433be3d3b4c090bf3be702a # Parent 4cb90023bf2bd4fdef1d160868bbc1b734a67d28 Merge diff --git a/make/bsd/makefiles/mapfile-vers-debug b/make/bsd/makefiles/mapfile-vers-debug --- a/make/bsd/makefiles/mapfile-vers-debug +++ b/make/bsd/makefiles/mapfile-vers-debug @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/bsd/makefiles/mapfile-vers-product b/make/bsd/makefiles/mapfile-vers-product --- a/make/bsd/makefiles/mapfile-vers-product +++ b/make/bsd/makefiles/mapfile-vers-product @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/linux/makefiles/mapfile-vers-debug b/make/linux/makefiles/mapfile-vers-debug --- a/make/linux/makefiles/mapfile-vers-debug +++ b/make/linux/makefiles/mapfile-vers-debug @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/linux/makefiles/mapfile-vers-product b/make/linux/makefiles/mapfile-vers-product --- a/make/linux/makefiles/mapfile-vers-product +++ b/make/linux/makefiles/mapfile-vers-product @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/solaris/makefiles/mapfile-vers b/make/solaris/makefiles/mapfile-vers --- a/make/solaris/makefiles/mapfile-vers +++ b/make/solaris/makefiles/mapfile-vers @@ -189,6 +189,9 @@ JVM_IsSupportedJNIVersion; JVM_IsThreadAlive; JVM_IsVMGeneratedMethodIx; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_LatestUserDefinedLoader; JVM_Listen; JVM_LoadClass0; diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp --- a/src/share/vm/classfile/classLoader.cpp +++ b/src/share/vm/classfile/classLoader.cpp @@ -610,7 +610,7 @@ } #endif -void ClassLoader::setup_search_path(const char *class_path) { +void ClassLoader::setup_search_path(const char *class_path, bool canonicalize) { int offset = 0; int len = (int)strlen(class_path); int end = 0; @@ -625,7 +625,13 @@ char* path = NEW_RESOURCE_ARRAY(char, end - start + 1); strncpy(path, &class_path[start], end - start); path[end - start] = '\0'; - update_class_path_entry_list(path, false); + if (canonicalize) { + char* canonical_path = NEW_RESOURCE_ARRAY(char, JVM_MAXPATHLEN + 1); + if (get_canonical_path(path, canonical_path, JVM_MAXPATHLEN)) { + path = canonical_path; + } + } + update_class_path_entry_list(path, /*check_for_duplicates=*/canonicalize); #if INCLUDE_CDS if (DumpSharedSpaces) { check_shared_classpath(path); diff --git a/src/share/vm/classfile/classLoader.hpp b/src/share/vm/classfile/classLoader.hpp --- a/src/share/vm/classfile/classLoader.hpp +++ b/src/share/vm/classfile/classLoader.hpp @@ -129,8 +129,8 @@ bool _has_error; bool _throw_exception; volatile ClassPathEntry* _resolved_entry; + public: ClassPathEntry* resolve_entry(TRAPS); - public: bool is_jar_file(); const char* name() { return _path; } LazyClassPathEntry(const char* path, const struct stat* st, bool throw_exception); @@ -218,7 +218,7 @@ static void setup_meta_index(const char* meta_index_path, const char* meta_index_dir, int start_index); static void setup_bootstrap_search_path(); - static void setup_search_path(const char *class_path); + static void setup_search_path(const char *class_path, bool canonicalize=false); static void load_zip_library(); static ClassPathEntry* create_class_path_entry(const char *path, const struct stat* st, @@ -329,6 +329,10 @@ return e; } + static int num_classpath_entries() { + return _num_entries; + } + #if INCLUDE_CDS // Sharing dump and restore static void copy_package_info_buckets(char** top, char* end); diff --git a/src/share/vm/classfile/classLoaderExt.hpp b/src/share/vm/classfile/classLoaderExt.hpp --- a/src/share/vm/classfile/classLoaderExt.hpp +++ b/src/share/vm/classfile/classLoaderExt.hpp @@ -64,6 +64,15 @@ ClassLoader::add_to_list(new_entry); } static void setup_search_paths() {} + + static void init_lookup_cache(TRAPS) {} + static void copy_lookup_cache_to_archive(char** top, char* end) {} + static char* restore_lookup_cache_from_archive(char* buffer) {return buffer;} + static inline bool is_lookup_cache_enabled() {return false;} + + static bool known_to_not_exist(JNIEnv *env, jobject loader, const char *classname, TRAPS) {return false;} + static jobjectArray get_lookup_cache_urls(JNIEnv *env, jobject loader, TRAPS) {return NULL;} + static jintArray get_lookup_cache(JNIEnv *env, jobject loader, const char *pkgname, TRAPS) {return NULL;} }; #endif // SHARE_VM_CLASSFILE_CLASSLOADEREXT_HPP diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp --- a/src/share/vm/classfile/systemDictionary.hpp +++ b/src/share/vm/classfile/systemDictionary.hpp @@ -175,6 +175,8 @@ do_klass(URL_klass, java_net_URL, Pre ) \ do_klass(Jar_Manifest_klass, java_util_jar_Manifest, Pre ) \ do_klass(sun_misc_Launcher_klass, sun_misc_Launcher, Pre ) \ + do_klass(sun_misc_Launcher_AppClassLoader_klass, sun_misc_Launcher_AppClassLoader, Pre ) \ + do_klass(sun_misc_Launcher_ExtClassLoader_klass, sun_misc_Launcher_ExtClassLoader, Pre ) \ do_klass(CodeSource_klass, java_security_CodeSource, Pre ) \ \ /* It's NULL in non-1.4 JDKs. */ \ diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp --- a/src/share/vm/classfile/vmSymbols.hpp +++ b/src/share/vm/classfile/vmSymbols.hpp @@ -116,6 +116,7 @@ template(java_lang_AssertionStatusDirectives, "java/lang/AssertionStatusDirectives") \ template(getBootClassPathEntryForClass_name, "getBootClassPathEntryForClass") \ template(sun_misc_PostVMInitHook, "sun/misc/PostVMInitHook") \ + template(sun_misc_Launcher_AppClassLoader, "sun/misc/Launcher$AppClassLoader") \ template(sun_misc_Launcher_ExtClassLoader, "sun/misc/Launcher$ExtClassLoader") \ \ /* Java runtime version access */ \ diff --git a/src/share/vm/memory/metadataFactory.hpp b/src/share/vm/memory/metadataFactory.hpp --- a/src/share/vm/memory/metadataFactory.hpp +++ b/src/share/vm/memory/metadataFactory.hpp @@ -64,6 +64,12 @@ template static void free_array(ClassLoaderData* loader_data, Array* data) { + if (DumpSharedSpaces) { + // FIXME: the freeing code is buggy, especially when PrintSharedSpaces is enabled. + // Disable for now -- this means if you specify bad classes in your classlist you + // may have wasted space inside the archive. + return; + } if (data != NULL) { assert(loader_data != NULL, "shouldn't pass null"); assert(!data->is_shared(), "cannot deallocate array in shared spaces"); diff --git a/src/share/vm/memory/metaspaceShared.cpp b/src/share/vm/memory/metaspaceShared.cpp --- a/src/share/vm/memory/metaspaceShared.cpp +++ b/src/share/vm/memory/metaspaceShared.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/dictionary.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/loaderConstraints.hpp" #include "classfile/placeholders.hpp" #include "classfile/sharedClassUtil.hpp" @@ -39,6 +40,7 @@ #include "runtime/signature.hpp" #include "runtime/vm_operations.hpp" #include "runtime/vmThread.hpp" +#include "utilities/hashtable.hpp" #include "utilities/hashtable.inline.hpp" PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC @@ -533,6 +535,8 @@ ClassLoader::copy_package_info_table(&md_top, md_end); ClassLoader::verify(); + ClassLoaderExt::copy_lookup_cache_to_archive(&md_top, md_end); + // Write the other data to the output array. WriteClosure wc(md_top, md_end); MetaspaceShared::serialize(&wc); @@ -745,6 +749,8 @@ } tty->print_cr("Loading classes to share: done."); + ClassLoaderExt::init_lookup_cache(THREAD); + if (PrintSharedSpaces) { tty->print_cr("Shared spaces: preloaded %d classes", class_count); } @@ -1056,6 +1062,8 @@ buffer += sizeof(intptr_t); buffer += len; + buffer = ClassLoaderExt::restore_lookup_cache_from_archive(buffer); + intptr_t* array = (intptr_t*)buffer; ReadClosure rc(&array); serialize(&rc); diff --git a/src/share/vm/memory/metaspaceShared.hpp b/src/share/vm/memory/metaspaceShared.hpp --- a/src/share/vm/memory/metaspaceShared.hpp +++ b/src/share/vm/memory/metaspaceShared.hpp @@ -32,9 +32,9 @@ #define LargeSharedArchiveSize (300*M) #define HugeSharedArchiveSize (800*M) -#define ReadOnlyRegionPercentage 0.4 -#define ReadWriteRegionPercentage 0.55 -#define MiscDataRegionPercentage 0.03 +#define ReadOnlyRegionPercentage 0.39 +#define ReadWriteRegionPercentage 0.50 +#define MiscDataRegionPercentage 0.09 #define MiscCodeRegionPercentage 0.02 #define LargeThresholdClassCount 5000 #define HugeThresholdClassCount 40000 diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp --- a/src/share/vm/prims/jvm.cpp +++ b/src/share/vm/prims/jvm.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/classLoader.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/javaAssertions.hpp" #include "classfile/javaClasses.hpp" #include "classfile/symbolTable.hpp" @@ -393,6 +394,14 @@ } } + const char* enableSharedLookupCache = "false"; +#if INCLUDE_CDS + if (ClassLoaderExt::is_lookup_cache_enabled()) { + enableSharedLookupCache = "true"; + } +#endif + PUTPROP(props, "sun.cds.enableSharedLookupCache", enableSharedLookupCache); + return properties; JVM_END @@ -766,6 +775,36 @@ JVM_END +JVM_ENTRY(jboolean, JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname)) + JVMWrapper("JVM_KnownToNotExist"); +#if INCLUDE_CDS + return ClassLoaderExt::known_to_not_exist(env, loader, classname, CHECK_(false)); +#else + return false; +#endif +JVM_END + + +JVM_ENTRY(jobjectArray, JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader)) + JVMWrapper("JVM_GetResourceLookupCacheURLs"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache_urls(env, loader, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + +JVM_ENTRY(jintArray, JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name)) + JVMWrapper("JVM_GetResourceLookupCache"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache(env, loader, resource_name, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + // Returns a class loaded by the bootstrap class loader; or null // if not found. ClassNotFoundException is not thrown. // diff --git a/src/share/vm/prims/jvm.h b/src/share/vm/prims/jvm.h --- a/src/share/vm/prims/jvm.h +++ b/src/share/vm/prims/jvm.h @@ -1548,6 +1548,31 @@ JNIEXPORT jobjectArray JNICALL JVM_GetThreadStateNames(JNIEnv* env, jint javaThreadState, jintArray values); +/* + * Returns true if the JVM's lookup cache indicates that this class is + * known to NOT exist for the given loader. + */ +JNIEXPORT jboolean JNICALL +JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname); + +/* + * Returns an array of all URLs that are stored in the JVM's lookup cache + * for the given loader. NULL if the lookup cache is unavailable. + */ +JNIEXPORT jobjectArray JNICALL +JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader); + +/* + * Returns an array of all URLs that *may* contain the resource_name for the + * given loader. This function returns an integer array, each element + * of which can be used to index into the array returned by + * JVM_GetResourceLookupCacheURLs of the same loader to determine the + * URLs. + */ +JNIEXPORT jintArray JNICALL +JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name); + + /* ========================================================================= * The following defines a private JVM interface that the JDK can query * for the JVM version and capabilities. sun.misc.Version defines diff --git a/src/share/vm/prims/whitebox.cpp b/src/share/vm/prims/whitebox.cpp --- a/src/share/vm/prims/whitebox.cpp +++ b/src/share/vm/prims/whitebox.cpp @@ -104,6 +104,28 @@ return closure.found(); WB_END +WB_ENTRY(jboolean, WB_ClassKnownToNotExist(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* class_name = env->GetStringUTFChars(name, NULL); + jboolean result = JVM_KnownToNotExist(env, loader, class_name); + env->ReleaseStringUTFChars(name, class_name); + return result; +WB_END + +WB_ENTRY(jobjectArray, WB_GetLookupCacheURLs(JNIEnv* env, jobject o, jobject loader)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + return JVM_GetResourceLookupCacheURLs(env, loader); +WB_END + +WB_ENTRY(jintArray, WB_GetLookupCacheMatches(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* resource_name = env->GetStringUTFChars(name, NULL); + jintArray result = JVM_GetResourceLookupCache(env, loader, resource_name); + + env->ReleaseStringUTFChars(name, resource_name); + return result; +WB_END + WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { return (jlong)Arguments::max_heap_for_compressed_oops(); } @@ -939,6 +961,11 @@ {CC"isObjectInOldGen", CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen }, {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, + {CC"classKnownToNotExist", + CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)Z",(void*)&WB_ClassKnownToNotExist}, + {CC"getLookupCacheURLs", CC"(Ljava/lang/ClassLoader;)[Ljava/net/URL;", (void*)&WB_GetLookupCacheURLs}, + {CC"getLookupCacheMatches", CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)[I", + (void*)&WB_GetLookupCacheMatches}, {CC"parseCommandLine", CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", (void*) &WB_ParseCommandLine diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -72,9 +72,9 @@ /** Flag for verbose output, true if {@code -Dverbose} specified */ protected static final boolean IS_VERBOSE = System.getProperty("verbose") != null; - /** count of invocation to triger compilation */ + /** invocation count to trigger compilation */ protected static final int THRESHOLD; - /** count of invocation to triger OSR compilation */ + /** invocation count to trigger OSR compilation */ protected static final long BACKEDGE_THRESHOLD; /** Value of {@code java.vm.info} (interpreted|mixed|comp mode) */ protected static final String MODE = System.getProperty("java.vm.info"); @@ -206,7 +206,6 @@ * is compiled, or if {@linkplain #method} has zero * compilation level. */ - protected final void checkNotCompiled(int compLevel) { if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); @@ -227,20 +226,30 @@ * compilation level. */ protected final void checkNotCompiled() { + checkNotCompiled(true); + checkNotCompiled(false); + } + + /** + * Checks, that {@linkplain #method} is not (OSR-)compiled. + * + * @param isOsr Check for OSR compilation if true + * @throws RuntimeException if {@linkplain #method} is in compiler queue or + * is compiled, or if {@linkplain #method} has zero + * compilation level. + */ + protected final void checkNotCompiled(boolean isOsr) { + waitBackgroundCompilation(); if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); } - if (WHITE_BOX.isMethodCompiled(method, false)) { - throw new RuntimeException(method + " must be not compiled"); + if (WHITE_BOX.isMethodCompiled(method, isOsr)) { + throw new RuntimeException(method + " must not be " + + (isOsr ? "osr_" : "") + "compiled"); } - if (WHITE_BOX.getMethodCompilationLevel(method, false) != 0) { - throw new RuntimeException(method + " comp_level must be == 0"); - } - if (WHITE_BOX.isMethodCompiled(method, true)) { - throw new RuntimeException(method + " must be not osr_compiled"); - } - if (WHITE_BOX.getMethodCompilationLevel(method, true) != 0) { - throw new RuntimeException(method + " osr_comp_level must be == 0"); + if (WHITE_BOX.getMethodCompilationLevel(method, isOsr) != 0) { + throw new RuntimeException(method + (isOsr ? " osr_" : " ") + + "comp_level must be == 0"); } } @@ -306,12 +315,21 @@ * Waits for completion of background compilation of {@linkplain #method}. */ protected final void waitBackgroundCompilation() { + waitBackgroundCompilation(method); + } + + /** + * Waits for completion of background compilation of the given executable. + * + * @param executable Executable + */ + protected static final void waitBackgroundCompilation(Executable executable) { if (!BACKGROUND_COMPILATION) { return; } final Object obj = new Object(); for (int i = 0; i < 10 - && WHITE_BOX.isMethodQueuedForCompilation(method); ++i) { + && WHITE_BOX.isMethodQueuedForCompilation(executable); ++i) { synchronized (obj) { try { obj.wait(1000); @@ -425,14 +443,14 @@ /** constructor test case */ CONSTRUCTOR_TEST(Helper.CONSTRUCTOR, Helper.CONSTRUCTOR_CALLABLE, false), /** method test case */ - METOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), + METHOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), /** static method test case */ STATIC_TEST(Helper.STATIC, Helper.STATIC_CALLABLE, false), /** OSR constructor test case */ OSR_CONSTRUCTOR_TEST(Helper.OSR_CONSTRUCTOR, Helper.OSR_CONSTRUCTOR_CALLABLE, true), /** OSR method test case */ - OSR_METOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), + OSR_METHOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), /** OSR static method test case */ OSR_STATIC_TEST(Helper.OSR_STATIC, Helper.OSR_STATIC_CALLABLE, true); @@ -494,7 +512,7 @@ = new Callable() { @Override public Integer call() throws Exception { - return new Helper(null).hashCode(); + return new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode(); } }; @@ -504,7 +522,7 @@ @Override public Integer call() throws Exception { - return helper.osrMethod(); + return helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -512,7 +530,7 @@ = new Callable() { @Override public Integer call() throws Exception { - return osrStaticMethod(); + return osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -532,25 +550,24 @@ } try { OSR_CONSTRUCTOR = Helper.class.getDeclaredConstructor( - Object.class); + Object.class, long.class); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( - "exception on getting method Helper.(Object)", e); + "exception on getting method Helper.(Object, long)", e); } METHOD = getMethod("method"); STATIC = getMethod("staticMethod"); - OSR_METHOD = getMethod("osrMethod"); - OSR_STATIC = getMethod("osrStaticMethod"); + OSR_METHOD = getMethod("osrMethod", long.class); + OSR_STATIC = getMethod("osrStaticMethod", long.class); } - private static Method getMethod(String name) { + private static Method getMethod(String name, Class... parameterTypes) { try { - return Helper.class.getDeclaredMethod(name); + return Helper.class.getDeclaredMethod(name, parameterTypes); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( "exception on getting method Helper." + name, e); } - } private static int staticMethod() { @@ -561,17 +578,84 @@ return 42; } - private static int osrStaticMethod() { + /** + * Deoptimizes all non-osr versions of the given executable after + * compilation finished. + * + * @param e Executable + * @throws Exception + */ + private static void waitAndDeoptimize(Executable e) { + CompilerWhiteBoxTest.waitBackgroundCompilation(e); + if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { + throw new RuntimeException(e + " must not be in queue"); + } + // Deoptimize non-osr versions of executable + WhiteBox.getWhiteBox().deoptimizeMethod(e, false); + } + + /** + * Executes the method multiple times to make sure we have + * enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param m Method to be executed + * @return Number of times the method was executed + * @throws Exception + */ + private static int warmup(Method m) throws Exception { + waitAndDeoptimize(m); + Helper helper = new Helper(); int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += (int)m.invoke(helper, 1); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(m); + return result; + } + + /** + * Executes the constructor multiple times to make sure we + * have enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param c Constructor to be executed + * @return Number of times the constructor was executed + * @throws Exception + */ + private static int warmup(Constructor c) throws Exception { + waitAndDeoptimize(c); + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += c.newInstance(null, 1).hashCode(); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(c); + return result; + } + + private static int osrStaticMethod(long limit) throws Exception { + int result = 0; + if (limit != 1) { + result = warmup(OSR_STATIC); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += staticMethod(); } return result; } - private int osrMethod() { + private int osrMethod(long limit) throws Exception { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + if (limit != 1) { + result = warmup(OSR_METHOD); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += method(); } return result; @@ -585,9 +669,13 @@ } // for OSR constructor test case - private Helper(Object o) { + private Helper(Object o, long limit) throws Exception { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + if (limit != 1) { + result = warmup(OSR_CONSTRUCTOR); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += method(); } x = result; diff --git a/test/compiler/whitebox/MakeMethodNotCompilableTest.java b/test/compiler/whitebox/MakeMethodNotCompilableTest.java --- a/test/compiler/whitebox/MakeMethodNotCompilableTest.java +++ b/test/compiler/whitebox/MakeMethodNotCompilableTest.java @@ -131,14 +131,15 @@ throw new RuntimeException(method + " is not compilable after clearMethodState()"); } - + // Make method not (OSR-)compilable (depending on testCase.isOsr()) makeNotCompilable(); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } - + // Try to (OSR-)compile method compile(); - checkNotCompiled(); + // Method should not be (OSR-)compiled + checkNotCompiled(testCase.isOsr()); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } diff --git a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java --- a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java +++ b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java @@ -30,6 +30,7 @@ import java.util.function.Function; import java.util.stream.Stream; import java.security.BasicPermission; +import java.net.URL; import sun.hotspot.parser.DiagnosticCommand; @@ -84,6 +85,11 @@ } private native boolean isClassAlive0(String name); + // Resource/Class Lookup Cache + public native boolean classKnownToNotExist(ClassLoader loader, String name); + public native URL[] getLookupCacheURLs(ClassLoader loader); + public native int[] getLookupCacheMatches(ClassLoader loader, String name); + // G1 public native boolean g1InConcurrentMark(); public native boolean g1IsHumongous(Object o); # HG changeset patch # User thartmann # Date 1414670610 -3600 # Thu Oct 30 13:03:30 2014 +0100 # Node ID 3c87c13918fb5e4e918f55fc06faca0eb736951d # Parent d4562805b0304240e47386dede429562f691cc60 8061817: Whitebox.deoptimizeMethod() does not deoptimize all OSR versions of method Summary: Fixed Whitebox.deoptimizeMethod() to deoptimize all OSR versions of the method. Reviewed-by: kvn, iignatyev diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp --- a/src/share/vm/oops/instanceKlass.cpp +++ b/src/share/vm/oops/instanceKlass.cpp @@ -2890,6 +2890,22 @@ OsrList_lock->unlock(); } +int InstanceKlass::mark_osr_nmethods(const Method* m) { + // This is a short non-blocking critical region, so the no safepoint check is ok. + MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); + nmethod* osr = osr_nmethods_head(); + int found = 0; + while (osr != NULL) { + assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); + if (osr->method() == m) { + osr->mark_for_deoptimization(); + found++; + } + osr = osr->osr_link(); + } + return found; +} + nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { // This is a short non-blocking critical region, so the no safepoint check is ok. OsrList_lock->lock_without_safepoint_check(); diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp --- a/src/share/vm/oops/instanceKlass.hpp +++ b/src/share/vm/oops/instanceKlass.hpp @@ -783,6 +783,7 @@ void set_osr_nmethods_head(nmethod* h) { _osr_nmethods_head = h; }; void add_osr_nmethod(nmethod* n); void remove_osr_nmethod(nmethod* n); + int mark_osr_nmethods(const Method* m); nmethod* lookup_osr_nmethod(const Method* m, int bci, int level, bool match_level) const; // Breakpoint support (see methods on Method* for details) diff --git a/src/share/vm/oops/method.hpp b/src/share/vm/oops/method.hpp --- a/src/share/vm/oops/method.hpp +++ b/src/share/vm/oops/method.hpp @@ -793,6 +793,10 @@ return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL; } + int mark_osr_nmethods() { + return method_holder()->mark_osr_nmethods(this); + } + nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) { return method_holder()->lookup_osr_nmethod(this, bci, level, match_level); } diff --git a/src/share/vm/prims/whitebox.cpp b/src/share/vm/prims/whitebox.cpp --- a/src/share/vm/prims/whitebox.cpp +++ b/src/share/vm/prims/whitebox.cpp @@ -404,19 +404,10 @@ CHECK_JNI_EXCEPTION_(env, result); MutexLockerEx mu(Compile_lock); methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); - nmethod* code; if (is_osr) { - int bci = InvocationEntryBci; - while ((code = mh->lookup_osr_nmethod_for(bci, CompLevel_none, false)) != NULL) { - code->mark_for_deoptimization(); - ++result; - bci = code->osr_entry_bci() + 1; - } - } else { - code = mh->code(); - } - if (code != NULL) { - code->mark_for_deoptimization(); + result += mh->mark_osr_nmethods(); + } else if (mh->code() != NULL) { + mh->code()->mark_for_deoptimization(); ++result; } result += CodeCache::mark_for_deoptimization(mh()); diff --git a/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java b/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java new file mode 100644 --- /dev/null +++ b/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +import sun.hotspot.WhiteBox; +import java.lang.reflect.Executable; +import java.lang.reflect.Method; + +/* + * @test DeoptimizeMultipleOSRTest + * @bug 8061817 + * @library /testlibrary /testlibrary/whitebox + * @build DeoptimizeMultipleOSRTest + * @run main ClassFileInstaller sun.hotspot.WhiteBox + * sun.hotspot.WhiteBox$WhiteBoxPermission + * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI -XX:CompileCommand=compileonly,DeoptimizeMultipleOSRTest::triggerOSR DeoptimizeMultipleOSRTest + * @summary testing of WB::deoptimizeMethod() + */ +public class DeoptimizeMultipleOSRTest { + private static final WhiteBox WHITE_BOX = WhiteBox.getWhiteBox(); + private static final long BACKEDGE_THRESHOLD = 150000; + private Method method; + private int counter = 0; + + public static void main(String[] args) throws Exception { + DeoptimizeMultipleOSRTest test = new DeoptimizeMultipleOSRTest(); + test.test(); + } + + /** + * Triggers two different OSR compilations for the same method and + * checks if WhiteBox.deoptimizeMethod() deoptimizes both. + * + * @throws Exception + */ + public void test() throws Exception { + method = DeoptimizeMultipleOSRTest.class.getDeclaredMethod("triggerOSR", boolean.class, long.class); + // Trigger two OSR compiled versions + triggerOSR(true, BACKEDGE_THRESHOLD); + triggerOSR(false, BACKEDGE_THRESHOLD); + // Wait for compilation + CompilerWhiteBoxTest.waitBackgroundCompilation(method); + // Deoptimize + WHITE_BOX.deoptimizeMethod(method, true); + if (WHITE_BOX.isMethodCompiled(method, true)) { + throw new AssertionError("Not all OSR compiled versions were deoptimized"); + } + } + + /** + * Triggers OSR compilations by executing loops. + * + * @param first Determines which loop to execute + * @param limit The number of loop iterations + */ + public void triggerOSR(boolean first, long limit) { + if (limit != 1) { + // Warmup method to avoid uncommon traps + for (int i = 0; i < limit; ++i) { + triggerOSR(first, 1); + } + CompilerWhiteBoxTest.waitBackgroundCompilation(method); + } + if (first) { + // Trigger OSR compilation 1 + for (int i = 0; i < limit; ++i) { + counter++; + } + } else { + // Trigger OSR compilation 2 + for (int i = 0; i < limit; ++i) { + counter++; + } + } + } +} # HG changeset patch # User vkempik # Date 1414507294 -14400 # Tue Oct 28 18:41:34 2014 +0400 # Node ID 6f06ebb090805c6eb961ada13d05ccd67c34f78b # Parent 3c87c13918fb5e4e918f55fc06faca0eb736951d 8059216: Make PrintGCApplicationStoppedTime print information about stopping threads Reviewed-by: dholmes, brutisso diff --git a/src/share/vm/services/runtimeService.cpp b/src/share/vm/services/runtimeService.cpp --- a/src/share/vm/services/runtimeService.cpp +++ b/src/share/vm/services/runtimeService.cpp @@ -46,6 +46,7 @@ PerfCounter* RuntimeService::_thread_interrupt_signaled_count = NULL; PerfCounter* RuntimeService::_interrupted_before_count = NULL; PerfCounter* RuntimeService::_interrupted_during_count = NULL; +double RuntimeService::_last_safepoint_sync_time_sec = 0.0; void RuntimeService::init() { // Make sure the VM version is initialized @@ -128,6 +129,7 @@ // update the time stamp to begin recording safepoint time _safepoint_timer.update(); + _last_safepoint_sync_time_sec = 0.0; if (UsePerfData) { _total_safepoints->inc(); if (_app_timer.is_updated()) { @@ -140,6 +142,9 @@ if (UsePerfData) { _sync_time_ticks->inc(_safepoint_timer.ticks_since_update()); } + if (PrintGCApplicationStoppedTime) { + _last_safepoint_sync_time_sec = last_safepoint_time_sec(); + } } void RuntimeService::record_safepoint_end() { @@ -155,8 +160,10 @@ gclog_or_tty->date_stamp(PrintGCDateStamps); gclog_or_tty->stamp(PrintGCTimeStamps); gclog_or_tty->print_cr("Total time for which application threads " - "were stopped: %3.7f seconds", - last_safepoint_time_sec()); + "were stopped: %3.7f seconds, " + "Stopping threads took: %3.7f seconds", + last_safepoint_time_sec(), + _last_safepoint_sync_time_sec); } // update the time stamp to begin recording app time diff --git a/src/share/vm/services/runtimeService.hpp b/src/share/vm/services/runtimeService.hpp --- a/src/share/vm/services/runtimeService.hpp +++ b/src/share/vm/services/runtimeService.hpp @@ -40,6 +40,7 @@ static TimeStamp _safepoint_timer; static TimeStamp _app_timer; + static double _last_safepoint_sync_time_sec; public: static void init(); # HG changeset patch # User aeriksso # Date 1413979196 -7200 # Wed Oct 22 13:59:56 2014 +0200 # Node ID 50054b63f0aa0ec0c63c191356ebd2c91927a7a8 # Parent 3c87c13918fb5e4e918f55fc06faca0eb736951d 8057043: Type annotations not retained during class redefine / retransform Reviewed-by: coleenp, sspitsyn, jfranck diff --git a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp --- a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp +++ b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp @@ -54,6 +54,7 @@ void JvmtiClassFileReconstituter::write_field_infos() { HandleMark hm(thread()); Array* fields_anno = ikh()->fields_annotations(); + Array* fields_type_anno = ikh()->fields_type_annotations(); // Compute the real number of Java fields int java_fields = ikh()->java_fields_count(); @@ -68,6 +69,7 @@ // int offset = ikh()->field_offset( index ); int generic_signature_index = fs.generic_signature_index(); AnnotationArray* anno = fields_anno == NULL ? NULL : fields_anno->at(fs.index()); + AnnotationArray* type_anno = fields_type_anno == NULL ? NULL : fields_type_anno->at(fs.index()); // JVMSpec| field_info { // JVMSpec| u2 access_flags; @@ -93,6 +95,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); @@ -110,6 +115,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } } @@ -550,6 +558,7 @@ AnnotationArray* anno = method->annotations(); AnnotationArray* param_anno = method->parameter_annotations(); AnnotationArray* default_anno = method->annotation_default(); + AnnotationArray* type_anno = method->type_annotations(); // skip generated default interface methods if (method->is_overpass()) { @@ -585,6 +594,9 @@ if (param_anno != NULL) { ++attr_count; // has RuntimeVisibleParameterAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); if (const_method->code_size() > 0) { @@ -609,6 +621,9 @@ if (param_anno != NULL) { write_annotations_attribute("RuntimeVisibleParameterAnnotations", param_anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } // Write the class attributes portion of ClassFile structure @@ -618,6 +633,7 @@ u2 inner_classes_length = inner_classes_attribute_length(); Symbol* generic_signature = ikh()->generic_signature(); AnnotationArray* anno = ikh()->class_annotations(); + AnnotationArray* type_anno = ikh()->class_type_annotations(); int attr_count = 0; if (generic_signature != NULL) { @@ -635,6 +651,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } if (cpool()->operands() != NULL) { ++attr_count; } @@ -656,6 +675,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } if (cpool()->operands() != NULL) { write_bootstrapmethod_attribute(); } diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -1569,6 +1569,29 @@ return false; } + // rewrite constant pool references in the class_type_annotations: + if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the fields_type_annotations: + if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the methods_type_annotations: + if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // There can be type annotations in the Code part of a method_info attribute. + // These annotations are not accessible, even by reflection. + // Currently they are not even parsed by the ClassFileParser. + // If runtime access is added they will also need to be rewritten. + // rewrite source file name index: u2 source_file_name_idx = scratch_class->source_file_name_index(); if (source_file_name_idx != 0) { @@ -2239,6 +2262,588 @@ } // end rewrite_cp_refs_in_methods_default_annotations() +// Rewrite constant pool references in a class_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + AnnotationArray* class_type_annotations = scratch_class->class_type_annotations(); + if (class_type_annotations == NULL || class_type_annotations->length() == 0) { + // no class_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("class_type_annotations length=%d", class_type_annotations->length())); + + int byte_i = 0; // byte index into class_type_annotations + return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations, + byte_i, "ClassFile", THREAD); +} // end rewrite_cp_refs_in_class_type_annotations() + + +// Rewrite constant pool references in a fields_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + Array* fields_type_annotations = scratch_class->fields_type_annotations(); + if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) { + // no fields_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("fields_type_annotations length=%d", fields_type_annotations->length())); + + for (int i = 0; i < fields_type_annotations->length(); i++) { + AnnotationArray* field_type_annotations = fields_type_annotations->at(i); + if (field_type_annotations == NULL || field_type_annotations->length() == 0) { + // this field does not have any annotations so skip it + continue; + } + + int byte_i = 0; // byte index into field_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations, + byte_i, "field_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad field_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_fields_type_annotations() + + +// Rewrite constant pool references in a methods_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + for (int i = 0; i < scratch_class->methods()->length(); i++) { + Method* m = scratch_class->methods()->at(i); + AnnotationArray* method_type_annotations = m->constMethod()->type_annotations(); + + if (method_type_annotations == NULL || method_type_annotations->length() == 0) { + // this method does not have any annotations so skip it + continue; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("methods type_annotations length=%d", method_type_annotations->length())); + + int byte_i = 0; // byte index into method_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations, + byte_i, "method_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad method_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_methods_type_annotations() + + +// Rewrite constant pool references in a type_annotations +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotations_typeArray { +// u2 num_annotations; +// type_annotation annotations[num_annotations]; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for num_annotations field + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for num_annotations field")); + return false; + } + + u2 num_annotations = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("num_type_annotations=%d", num_annotations)); + + int calc_num_annotations = 0; + for (; calc_num_annotations < num_annotations; calc_num_annotations++) { + if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad type_annotation_struct at %d", calc_num_annotations)); + // propagate failure back to caller + return false; + } + } + assert(num_annotations == calc_num_annotations, "sanity check"); + + if (byte_i_ref != type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("read wrong amount of bytes at end of processing " + "type_annotations_typeArray (%d of %d bytes were read)", + byte_i_ref, type_annotations_typeArray->length())); + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotations_typeArray() + + +// Rewrite constant pool references in a type_annotation +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotation { +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// type_path target_path; +// annotation anno; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if (!skip_type_annotation_target(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + return false; + } + + if (!skip_type_annotation_type_path(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotation_struct() + + +// Read, verify and skip over the target_type and target_info part +// so that rewriting can continue in the later parts of the struct. +// +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// +bool VM_RedefineClasses::skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a target_type let alone the rest of a type_annotation + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a target_type")); + return false; + } + + u1 target_type = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type)); + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg)); + + // Skip over target_info + switch (target_type) { + case 0x00: + // kind: type parameter declaration of generic class or interface + // location: ClassFile + case 0x01: + // kind: type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_target { + // u1 type_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_target: type_parameter_index=%d", + type_parameter_index)); + } break; + + case 0x10: + // kind: type in extends clause of class or interface declaration + // (including the direct superclass of an anonymous class declaration), + // or in implements clause of interface declaration + // location: ClassFile + + { + // struct: + // supertype_target { + // u2 supertype_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a supertype_target")); + return false; + } + + u2 supertype_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("supertype_target: supertype_index=%d", supertype_index)); + } break; + + case 0x11: + // kind: type in bound of type parameter declaration of generic class or interface + // location: ClassFile + case 0x12: + // kind: type in bound of type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_bound_target { + // u1 type_parameter_index; + // u1 bound_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_bound_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 bound_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", + type_parameter_index, bound_index)); + } break; + + case 0x13: + // kind: type in field declaration + // location: field_info + case 0x14: + // kind: return type of method, or type of newly constructed object + // location: method_info + case 0x15: + // kind: receiver type of method or constructor + // location: method_info + + { + // struct: + // empty_target { + // } + // + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("empty_target")); + } break; + + case 0x16: + // kind: type in formal parameter declaration of method, constructor, or lambda expression + // location: method_info + + { + // struct: + // formal_parameter_target { + // u1 formal_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a formal_parameter_target")); + return false; + } + + u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("formal_parameter_target: formal_parameter_index=%d", + formal_parameter_index)); + } break; + + case 0x17: + // kind: type in throws clause of method or constructor + // location: method_info + + { + // struct: + // throws_target { + // u2 throws_type_index + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a throws_target")); + return false; + } + + u2 throws_type_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("throws_target: throws_type_index=%d", throws_type_index)); + } break; + + case 0x40: + // kind: type in local variable declaration + // location: Code + case 0x41: + // kind: type in resource variable declaration + // location: Code + + { + // struct: + // localvar_target { + // u2 table_length; + // struct { + // u2 start_pc; + // u2 length; + // u2 index; + // } table[table_length]; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for a table_length let alone the rest of a localvar_target + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a localvar_target table_length")); + return false; + } + + u2 table_length = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("localvar_target: table_length=%d", table_length)); + + int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry + int table_size = table_length * table_struct_size; + + if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) { + // not enough room for a table + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a table array of length %d", table_length)); + return false; + } + + // Skip over table + byte_i_ref += table_size; + } break; + + case 0x42: + // kind: type in exception parameter declaration + // location: Code + + { + // struct: + // catch_target { + // u2 exception_table_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a catch_target")); + return false; + } + + u2 exception_table_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("catch_target: exception_table_index=%d", exception_table_index)); + } break; + + case 0x43: + // kind: type in instanceof expression + // location: Code + case 0x44: + // kind: type in new expression + // location: Code + case 0x45: + // kind: type in method reference expression using ::new + // location: Code + case 0x46: + // kind: type in method reference expression using ::Identifier + // location: Code + + { + // struct: + // offset_target { + // u2 offset; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a offset_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("offset_target: offset=%d", offset)); + } break; + + case 0x47: + // kind: type in cast expression + // location: Code + case 0x48: + // kind: type argument for generic constructor in new expression or + // explicit constructor invocation statement + // location: Code + case 0x49: + // kind: type argument for generic method in method invocation expression + // location: Code + case 0x4A: + // kind: type argument for generic constructor in method reference expression using ::new + // location: Code + case 0x4B: + // kind: type argument for generic method in method reference expression using ::Identifier + // location: Code + + { + // struct: + // type_argument_target { + // u2 offset; + // u1 type_argument_index; + // } + // + if ((byte_i_ref + 3) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_argument_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_argument_target: offset=%d, type_argument_index=%d", + offset, type_argument_index)); + } break; + + default: + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("unknown target_type")); +#ifdef ASSERT + ShouldNotReachHere(); +#endif + return false; + } + + return true; +} // end skip_type_annotation_target() + + +// Read, verify and skip over the type_path part so that rewriting +// can continue in the later parts of the struct. +// +// type_path { +// u1 path_length; +// { +// u1 type_path_kind; +// u1 type_argument_index; +// } path[path_length]; +// } +// +bool VM_RedefineClasses::skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a path_length let alone the rest of the type_path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_path")); + return false; + } + + u1 path_length = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path_length=%d", path_length)); + + int calc_path_length = 0; + for (; calc_path_length < path_length; calc_path_length++) { + if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for path entry %d of %d", + calc_path_length, path_length)); + return false; + } + + u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d", + calc_path_length, type_path_kind, type_argument_index)); + + if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("inconsistent type_path values")); + return false; + } + } + assert(path_length == calc_path_length, "sanity check"); + + return true; +} // end skip_type_annotation_type_path() + + // Rewrite constant pool references in the method's stackmap table. // These "structures" are adapted from the StackMapTable_attribute that // is described in section 4.8.4 of the 6.0 version of the VM spec @@ -3223,23 +3828,6 @@ void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class, instanceKlassHandle scratch_class) { - // Since there is currently no rewriting of type annotations indexes - // into the CP, we null out type annotations on scratch_class before - // we swap annotations with the_class rather than facing the - // possibility of shipping annotations with broken indexes to - // Java-land. - ClassLoaderData* loader_data = scratch_class->class_loader_data(); - AnnotationArray* new_class_type_annotations = scratch_class->class_type_annotations(); - if (new_class_type_annotations != NULL) { - MetadataFactory::free_array(loader_data, new_class_type_annotations); - scratch_class->annotations()->set_class_type_annotations(NULL); - } - Array* new_field_type_annotations = scratch_class->fields_type_annotations(); - if (new_field_type_annotations != NULL) { - Annotations::free_contents(loader_data, new_field_type_annotations); - scratch_class->annotations()->set_fields_type_annotations(NULL); - } - // Swap annotation fields values Annotations* old_annotations = the_class->annotations(); the_class->set_annotations(scratch_class->annotations()); diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp --- a/src/share/vm/prims/jvmtiRedefineClasses.hpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp @@ -457,6 +457,17 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_element_value( AnnotationArray* class_annotations, int &byte_i_ref, TRAPS); + bool rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS); bool rewrite_cp_refs_in_fields_annotations( instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_method(methodHandle method, @@ -468,6 +479,12 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_methods_parameter_annotations( instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS); void rewrite_cp_refs_in_verification_type_info( address& stackmap_addr_ref, address stackmap_end, u2 frame_i, diff --git a/test/runtime/RedefineTests/RedefineAnnotations.java b/test/runtime/RedefineTests/RedefineAnnotations.java new file mode 100644 --- /dev/null +++ b/test/runtime/RedefineTests/RedefineAnnotations.java @@ -0,0 +1,410 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @library /testlibrary + * @summary Test that type annotations are retained after a retransform + * @run main RedefineAnnotations buildagent + * @run main/othervm -javaagent:redefineagent.jar RedefineAnnotations + */ + +import static com.oracle.java.testlibrary.Asserts.assertTrue; +import java.io.FileNotFoundException; +import java.io.PrintWriter; +import java.lang.NoSuchFieldException; +import java.lang.NoSuchMethodException; +import java.lang.RuntimeException; +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.instrument.ClassFileTransformer; +import java.lang.instrument.IllegalClassFormatException; +import java.lang.instrument.Instrumentation; +import java.lang.instrument.UnmodifiableClassException; +import java.lang.reflect.AnnotatedArrayType; +import java.lang.reflect.AnnotatedParameterizedType; +import java.lang.reflect.AnnotatedType; +import java.lang.reflect.AnnotatedWildcardType; +import java.lang.reflect.Executable; +import java.lang.reflect.TypeVariable; +import java.security.ProtectionDomain; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import jdk.internal.org.objectweb.asm.ClassReader; +import jdk.internal.org.objectweb.asm.ClassVisitor; +import jdk.internal.org.objectweb.asm.ClassWriter; +import jdk.internal.org.objectweb.asm.FieldVisitor; +import static jdk.internal.org.objectweb.asm.Opcodes.ASM5; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE_USE) +@interface TestAnn { + String site(); +} + +public class RedefineAnnotations { + static Instrumentation inst; + public static void premain(String agentArgs, Instrumentation inst) { + RedefineAnnotations.inst = inst; + } + + static class Transformer implements ClassFileTransformer { + + public byte[] asm(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + ClassWriter cw = new ClassWriter(0); + ClassVisitor cv = new ReAddDummyFieldsClassVisitor(ASM5, cw) { }; + ClassReader cr = new ClassReader(classfileBuffer); + cr.accept(cv, 0); + return cw.toByteArray(); + } + + public class ReAddDummyFieldsClassVisitor extends ClassVisitor { + + LinkedList fields = new LinkedList<>(); + + public ReAddDummyFieldsClassVisitor(int api, ClassVisitor cv) { + super(api, cv); + } + + @Override public FieldVisitor visitField(int access, String name, + String desc, String signature, Object value) { + if (name.startsWith("dummy")) { + // Remove dummy field + fields.addLast(new F(access, name, desc, signature, value)); + return null; + } + return cv.visitField(access, name, desc, signature, value); + } + + @Override public void visitEnd() { + F f; + while ((f = fields.pollFirst()) != null) { + // Re-add dummy fields + cv.visitField(f.access, f.name, f.desc, f.signature, f.value); + } + } + + private class F { + private int access; + private String name; + private String desc; + private String signature; + private Object value; + F(int access, String name, String desc, String signature, Object value) { + this.access = access; + this.name = name; + this.desc = desc; + this.signature = signature; + this.value = value; + } + } + } + + @Override public byte[] transform(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + if (className.contains("TypeAnnotatedTestClass")) { + try { + // Here we remove and re-add the dummy fields. This shuffles the constant pool + return asm(loader, className, classBeingRedefined, protectionDomain, classfileBuffer); + } catch (Throwable e) { + // The retransform native code that called this method does not propagate + // exceptions. Instead of getting an uninformative generic error, catch + // problems here and print it, then exit. + e.printStackTrace(); + System.exit(1); + } + } + return null; + } + } + + private static void buildAgent() { + try { + ClassFileInstaller.main("RedefineAnnotations"); + } catch (Exception e) { + throw new RuntimeException("Could not write agent classfile", e); + } + + try { + PrintWriter pw = new PrintWriter("MANIFEST.MF"); + pw.println("Premain-Class: RedefineAnnotations"); + pw.println("Agent-Class: RedefineAnnotations"); + pw.println("Can-Retransform-Classes: true"); + pw.close(); + } catch (FileNotFoundException e) { + throw new RuntimeException("Could not write manifest file for the agent", e); + } + + sun.tools.jar.Main jarTool = new sun.tools.jar.Main(System.out, System.err, "jar"); + if (!jarTool.run(new String[] { "-cmf", "MANIFEST.MF", "redefineagent.jar", "RedefineAnnotations.class" })) { + throw new RuntimeException("Could not write the agent jar file"); + } + } + + public static void main(String argv[]) throws NoSuchFieldException, NoSuchMethodException { + if (argv.length == 1 && argv[0].equals("buildagent")) { + buildAgent(); + return; + } + + if (inst == null) { + throw new RuntimeException("Instrumentation object was null"); + } + + RedefineAnnotations test = new RedefineAnnotations(); + test.testTransformAndVerify(); + } + + // Class type annotations + private Annotation classTypeParameterTA; + private Annotation extendsTA; + private Annotation implementsTA; + + // Field type annotations + private Annotation fieldTA; + private Annotation innerTA; + private Annotation[] arrayTA = new Annotation[4]; + private Annotation[] mapTA = new Annotation[5]; + + // Method type annotations + private Annotation returnTA, methodTypeParameterTA, formalParameterTA, throwsTA; + + private void testTransformAndVerify() + throws NoSuchFieldException, NoSuchMethodException { + + Class c = TypeAnnotatedTestClass.class; + Class myClass = c; + + /* + * Verify that the expected annotations are where they should be before transform. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + + try { + inst.addTransformer(new Transformer(), true); + inst.retransformClasses(myClass); + } catch (UnmodifiableClassException e) { + throw new RuntimeException(e); + } + + /* + * Verify that the expected annotations are where they should be after transform. + * Also verify that before and after are equal. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + } + + private void verifyClassTypeAnnotations(Class c) { + Annotation anno; + + anno = c.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(classTypeParameterTA, anno, "classTypeParameter"); + classTypeParameterTA = anno; + + anno = c.getAnnotatedSuperclass().getAnnotations()[0]; + verifyTestAnn(extendsTA, anno, "extends"); + extendsTA = anno; + + anno = c.getAnnotatedInterfaces()[0].getAnnotations()[0]; + verifyTestAnn(implementsTA, anno, "implements"); + implementsTA = anno; + } + + private void verifyFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + verifyBasicFieldTypeAnnotations(c); + verifyInnerFieldTypeAnnotations(c); + verifyArrayFieldTypeAnnotations(c); + verifyMapFieldTypeAnnotations(c); + } + + private void verifyBasicFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno = c.getDeclaredField("typeAnnotatedBoolean").getAnnotatedType().getAnnotations()[0]; + verifyTestAnn(fieldTA, anno, "field"); + fieldTA = anno; + } + + private void verifyInnerFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + AnnotatedType at = c.getDeclaredField("typeAnnotatedInner").getAnnotatedType(); + Annotation anno = at.getAnnotations()[0]; + verifyTestAnn(innerTA, anno, "inner"); + innerTA = anno; + } + + private void verifyArrayFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType at; + + at = c.getDeclaredField("typeAnnotatedArray").getAnnotatedType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[0], anno, "array1"); + arrayTA[0] = anno; + + for (int i = 1; i <= 3; i++) { + at = ((AnnotatedArrayType) at).getAnnotatedGenericComponentType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[i], anno, "array" + (i + 1)); + arrayTA[i] = anno; + } + } + + private void verifyMapFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType atBase; + AnnotatedType atParameter; + atBase = c.getDeclaredField("typeAnnotatedMap").getAnnotatedType(); + + anno = atBase.getAnnotations()[0]; + verifyTestAnn(mapTA[0], anno, "map1"); + mapTA[0] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[0]; + anno = ((AnnotatedWildcardType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[1], anno, "map2"); + mapTA[1] = anno; + + anno = + ((AnnotatedWildcardType) atParameter). + getAnnotatedUpperBounds()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[2], anno, "map3"); + mapTA[2] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[1]; + anno = ((AnnotatedParameterizedType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[3], anno, "map4"); + mapTA[3] = anno; + + anno = + ((AnnotatedParameterizedType) atParameter). + getAnnotatedActualTypeArguments()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[4], anno, "map5"); + mapTA[4] = anno; + } + + private void verifyMethodTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + Annotation anno; + Executable typeAnnotatedMethod = + c.getDeclaredMethod("typeAnnotatedMethod", TypeAnnotatedTestClass.class); + + anno = typeAnnotatedMethod.getAnnotatedReturnType().getAnnotations()[0]; + verifyTestAnn(returnTA, anno, "return"); + returnTA = anno; + + anno = typeAnnotatedMethod.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(methodTypeParameterTA, anno, "methodTypeParameter"); + methodTypeParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedParameterTypes()[0].getAnnotations()[0]; + verifyTestAnn(formalParameterTA, anno, "formalParameter"); + formalParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedExceptionTypes()[0].getAnnotations()[0]; + verifyTestAnn(throwsTA, anno, "throws"); + throwsTA = anno; + } + + private static void verifyTestAnn(Annotation verifyAgainst, Annotation anno, String expectedSite) { + verifyTestAnnSite(anno, expectedSite); + + // When called before transform verifyAgainst will be null, when called + // after transform it will be the annotation from before the transform + if (verifyAgainst != null) { + assertTrue(anno.equals(verifyAgainst), + "Annotations do not match before and after." + + " Before: \"" + verifyAgainst + "\", After: \"" + anno + "\""); + } + } + + private static void verifyTestAnnSite(Annotation testAnn, String expectedSite) { + String expectedAnn = "@TestAnn(site=" + expectedSite + ")"; + assertTrue(testAnn.toString().equals(expectedAnn), + "Expected \"" + expectedAnn + "\", got \"" + testAnn + "\""); + } + + public static class TypeAnnotatedTestClass <@TestAnn(site="classTypeParameter") S,T> + extends @TestAnn(site="extends") Thread + implements @TestAnn(site="implements") Runnable { + + public @TestAnn(site="field") boolean typeAnnotatedBoolean; + + public + RedefineAnnotations. + @TestAnn(site="inner") TypeAnnotatedTestClass + typeAnnotatedInner; + + public + @TestAnn(site="array4") boolean + @TestAnn(site="array1") [] + @TestAnn(site="array2") [] + @TestAnn(site="array3") [] + typeAnnotatedArray; + + public @TestAnn(site="map1") Map + <@TestAnn(site="map2") ? extends @TestAnn(site="map3") String, + @TestAnn(site="map4") List<@TestAnn(site="map5") Object>> typeAnnotatedMap; + + public int dummy1; + public int dummy2; + public int dummy3; + + @TestAnn(site="return") <@TestAnn(site="methodTypeParameter") U,V> Class + typeAnnotatedMethod(@TestAnn(site="formalParameter") TypeAnnotatedTestClass arg) + throws @TestAnn(site="throws") ClassNotFoundException { + + @TestAnn(site="local_variable_type") int foo = 0; + throw new ClassNotFoundException(); + } + + public void run() {} + } +} # HG changeset patch # User brutisso # Date 1415270136 0 # Thu Nov 06 10:35:36 2014 +0000 # Node ID 5217eef2497f8d2fb9a96f17833410f0416f8e39 # Parent 6f06ebb090805c6eb961ada13d05ccd67c34f78b # Parent 50054b63f0aa0ec0c63c191356ebd2c91927a7a8 Merge diff --git a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp --- a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp +++ b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp @@ -54,6 +54,7 @@ void JvmtiClassFileReconstituter::write_field_infos() { HandleMark hm(thread()); Array* fields_anno = ikh()->fields_annotations(); + Array* fields_type_anno = ikh()->fields_type_annotations(); // Compute the real number of Java fields int java_fields = ikh()->java_fields_count(); @@ -68,6 +69,7 @@ // int offset = ikh()->field_offset( index ); int generic_signature_index = fs.generic_signature_index(); AnnotationArray* anno = fields_anno == NULL ? NULL : fields_anno->at(fs.index()); + AnnotationArray* type_anno = fields_type_anno == NULL ? NULL : fields_type_anno->at(fs.index()); // JVMSpec| field_info { // JVMSpec| u2 access_flags; @@ -93,6 +95,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); @@ -110,6 +115,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } } @@ -550,6 +558,7 @@ AnnotationArray* anno = method->annotations(); AnnotationArray* param_anno = method->parameter_annotations(); AnnotationArray* default_anno = method->annotation_default(); + AnnotationArray* type_anno = method->type_annotations(); // skip generated default interface methods if (method->is_overpass()) { @@ -585,6 +594,9 @@ if (param_anno != NULL) { ++attr_count; // has RuntimeVisibleParameterAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); if (const_method->code_size() > 0) { @@ -609,6 +621,9 @@ if (param_anno != NULL) { write_annotations_attribute("RuntimeVisibleParameterAnnotations", param_anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } // Write the class attributes portion of ClassFile structure @@ -618,6 +633,7 @@ u2 inner_classes_length = inner_classes_attribute_length(); Symbol* generic_signature = ikh()->generic_signature(); AnnotationArray* anno = ikh()->class_annotations(); + AnnotationArray* type_anno = ikh()->class_type_annotations(); int attr_count = 0; if (generic_signature != NULL) { @@ -635,6 +651,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } if (cpool()->operands() != NULL) { ++attr_count; } @@ -656,6 +675,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } if (cpool()->operands() != NULL) { write_bootstrapmethod_attribute(); } diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -1569,6 +1569,29 @@ return false; } + // rewrite constant pool references in the class_type_annotations: + if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the fields_type_annotations: + if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the methods_type_annotations: + if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // There can be type annotations in the Code part of a method_info attribute. + // These annotations are not accessible, even by reflection. + // Currently they are not even parsed by the ClassFileParser. + // If runtime access is added they will also need to be rewritten. + // rewrite source file name index: u2 source_file_name_idx = scratch_class->source_file_name_index(); if (source_file_name_idx != 0) { @@ -2239,6 +2262,588 @@ } // end rewrite_cp_refs_in_methods_default_annotations() +// Rewrite constant pool references in a class_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + AnnotationArray* class_type_annotations = scratch_class->class_type_annotations(); + if (class_type_annotations == NULL || class_type_annotations->length() == 0) { + // no class_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("class_type_annotations length=%d", class_type_annotations->length())); + + int byte_i = 0; // byte index into class_type_annotations + return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations, + byte_i, "ClassFile", THREAD); +} // end rewrite_cp_refs_in_class_type_annotations() + + +// Rewrite constant pool references in a fields_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + Array* fields_type_annotations = scratch_class->fields_type_annotations(); + if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) { + // no fields_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("fields_type_annotations length=%d", fields_type_annotations->length())); + + for (int i = 0; i < fields_type_annotations->length(); i++) { + AnnotationArray* field_type_annotations = fields_type_annotations->at(i); + if (field_type_annotations == NULL || field_type_annotations->length() == 0) { + // this field does not have any annotations so skip it + continue; + } + + int byte_i = 0; // byte index into field_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations, + byte_i, "field_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad field_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_fields_type_annotations() + + +// Rewrite constant pool references in a methods_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + for (int i = 0; i < scratch_class->methods()->length(); i++) { + Method* m = scratch_class->methods()->at(i); + AnnotationArray* method_type_annotations = m->constMethod()->type_annotations(); + + if (method_type_annotations == NULL || method_type_annotations->length() == 0) { + // this method does not have any annotations so skip it + continue; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("methods type_annotations length=%d", method_type_annotations->length())); + + int byte_i = 0; // byte index into method_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations, + byte_i, "method_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad method_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_methods_type_annotations() + + +// Rewrite constant pool references in a type_annotations +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotations_typeArray { +// u2 num_annotations; +// type_annotation annotations[num_annotations]; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for num_annotations field + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for num_annotations field")); + return false; + } + + u2 num_annotations = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("num_type_annotations=%d", num_annotations)); + + int calc_num_annotations = 0; + for (; calc_num_annotations < num_annotations; calc_num_annotations++) { + if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad type_annotation_struct at %d", calc_num_annotations)); + // propagate failure back to caller + return false; + } + } + assert(num_annotations == calc_num_annotations, "sanity check"); + + if (byte_i_ref != type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("read wrong amount of bytes at end of processing " + "type_annotations_typeArray (%d of %d bytes were read)", + byte_i_ref, type_annotations_typeArray->length())); + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotations_typeArray() + + +// Rewrite constant pool references in a type_annotation +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotation { +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// type_path target_path; +// annotation anno; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if (!skip_type_annotation_target(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + return false; + } + + if (!skip_type_annotation_type_path(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotation_struct() + + +// Read, verify and skip over the target_type and target_info part +// so that rewriting can continue in the later parts of the struct. +// +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// +bool VM_RedefineClasses::skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a target_type let alone the rest of a type_annotation + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a target_type")); + return false; + } + + u1 target_type = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type)); + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg)); + + // Skip over target_info + switch (target_type) { + case 0x00: + // kind: type parameter declaration of generic class or interface + // location: ClassFile + case 0x01: + // kind: type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_target { + // u1 type_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_target: type_parameter_index=%d", + type_parameter_index)); + } break; + + case 0x10: + // kind: type in extends clause of class or interface declaration + // (including the direct superclass of an anonymous class declaration), + // or in implements clause of interface declaration + // location: ClassFile + + { + // struct: + // supertype_target { + // u2 supertype_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a supertype_target")); + return false; + } + + u2 supertype_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("supertype_target: supertype_index=%d", supertype_index)); + } break; + + case 0x11: + // kind: type in bound of type parameter declaration of generic class or interface + // location: ClassFile + case 0x12: + // kind: type in bound of type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_bound_target { + // u1 type_parameter_index; + // u1 bound_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_bound_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 bound_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", + type_parameter_index, bound_index)); + } break; + + case 0x13: + // kind: type in field declaration + // location: field_info + case 0x14: + // kind: return type of method, or type of newly constructed object + // location: method_info + case 0x15: + // kind: receiver type of method or constructor + // location: method_info + + { + // struct: + // empty_target { + // } + // + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("empty_target")); + } break; + + case 0x16: + // kind: type in formal parameter declaration of method, constructor, or lambda expression + // location: method_info + + { + // struct: + // formal_parameter_target { + // u1 formal_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a formal_parameter_target")); + return false; + } + + u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("formal_parameter_target: formal_parameter_index=%d", + formal_parameter_index)); + } break; + + case 0x17: + // kind: type in throws clause of method or constructor + // location: method_info + + { + // struct: + // throws_target { + // u2 throws_type_index + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a throws_target")); + return false; + } + + u2 throws_type_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("throws_target: throws_type_index=%d", throws_type_index)); + } break; + + case 0x40: + // kind: type in local variable declaration + // location: Code + case 0x41: + // kind: type in resource variable declaration + // location: Code + + { + // struct: + // localvar_target { + // u2 table_length; + // struct { + // u2 start_pc; + // u2 length; + // u2 index; + // } table[table_length]; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for a table_length let alone the rest of a localvar_target + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a localvar_target table_length")); + return false; + } + + u2 table_length = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("localvar_target: table_length=%d", table_length)); + + int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry + int table_size = table_length * table_struct_size; + + if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) { + // not enough room for a table + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a table array of length %d", table_length)); + return false; + } + + // Skip over table + byte_i_ref += table_size; + } break; + + case 0x42: + // kind: type in exception parameter declaration + // location: Code + + { + // struct: + // catch_target { + // u2 exception_table_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a catch_target")); + return false; + } + + u2 exception_table_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("catch_target: exception_table_index=%d", exception_table_index)); + } break; + + case 0x43: + // kind: type in instanceof expression + // location: Code + case 0x44: + // kind: type in new expression + // location: Code + case 0x45: + // kind: type in method reference expression using ::new + // location: Code + case 0x46: + // kind: type in method reference expression using ::Identifier + // location: Code + + { + // struct: + // offset_target { + // u2 offset; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a offset_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("offset_target: offset=%d", offset)); + } break; + + case 0x47: + // kind: type in cast expression + // location: Code + case 0x48: + // kind: type argument for generic constructor in new expression or + // explicit constructor invocation statement + // location: Code + case 0x49: + // kind: type argument for generic method in method invocation expression + // location: Code + case 0x4A: + // kind: type argument for generic constructor in method reference expression using ::new + // location: Code + case 0x4B: + // kind: type argument for generic method in method reference expression using ::Identifier + // location: Code + + { + // struct: + // type_argument_target { + // u2 offset; + // u1 type_argument_index; + // } + // + if ((byte_i_ref + 3) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_argument_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_argument_target: offset=%d, type_argument_index=%d", + offset, type_argument_index)); + } break; + + default: + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("unknown target_type")); +#ifdef ASSERT + ShouldNotReachHere(); +#endif + return false; + } + + return true; +} // end skip_type_annotation_target() + + +// Read, verify and skip over the type_path part so that rewriting +// can continue in the later parts of the struct. +// +// type_path { +// u1 path_length; +// { +// u1 type_path_kind; +// u1 type_argument_index; +// } path[path_length]; +// } +// +bool VM_RedefineClasses::skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a path_length let alone the rest of the type_path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_path")); + return false; + } + + u1 path_length = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path_length=%d", path_length)); + + int calc_path_length = 0; + for (; calc_path_length < path_length; calc_path_length++) { + if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for path entry %d of %d", + calc_path_length, path_length)); + return false; + } + + u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d", + calc_path_length, type_path_kind, type_argument_index)); + + if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("inconsistent type_path values")); + return false; + } + } + assert(path_length == calc_path_length, "sanity check"); + + return true; +} // end skip_type_annotation_type_path() + + // Rewrite constant pool references in the method's stackmap table. // These "structures" are adapted from the StackMapTable_attribute that // is described in section 4.8.4 of the 6.0 version of the VM spec @@ -3223,23 +3828,6 @@ void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class, instanceKlassHandle scratch_class) { - // Since there is currently no rewriting of type annotations indexes - // into the CP, we null out type annotations on scratch_class before - // we swap annotations with the_class rather than facing the - // possibility of shipping annotations with broken indexes to - // Java-land. - ClassLoaderData* loader_data = scratch_class->class_loader_data(); - AnnotationArray* new_class_type_annotations = scratch_class->class_type_annotations(); - if (new_class_type_annotations != NULL) { - MetadataFactory::free_array(loader_data, new_class_type_annotations); - scratch_class->annotations()->set_class_type_annotations(NULL); - } - Array* new_field_type_annotations = scratch_class->fields_type_annotations(); - if (new_field_type_annotations != NULL) { - Annotations::free_contents(loader_data, new_field_type_annotations); - scratch_class->annotations()->set_fields_type_annotations(NULL); - } - // Swap annotation fields values Annotations* old_annotations = the_class->annotations(); the_class->set_annotations(scratch_class->annotations()); diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp --- a/src/share/vm/prims/jvmtiRedefineClasses.hpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp @@ -457,6 +457,17 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_element_value( AnnotationArray* class_annotations, int &byte_i_ref, TRAPS); + bool rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS); bool rewrite_cp_refs_in_fields_annotations( instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_method(methodHandle method, @@ -468,6 +479,12 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_methods_parameter_annotations( instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS); void rewrite_cp_refs_in_verification_type_info( address& stackmap_addr_ref, address stackmap_end, u2 frame_i, diff --git a/test/runtime/RedefineTests/RedefineAnnotations.java b/test/runtime/RedefineTests/RedefineAnnotations.java new file mode 100644 --- /dev/null +++ b/test/runtime/RedefineTests/RedefineAnnotations.java @@ -0,0 +1,410 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @library /testlibrary + * @summary Test that type annotations are retained after a retransform + * @run main RedefineAnnotations buildagent + * @run main/othervm -javaagent:redefineagent.jar RedefineAnnotations + */ + +import static com.oracle.java.testlibrary.Asserts.assertTrue; +import java.io.FileNotFoundException; +import java.io.PrintWriter; +import java.lang.NoSuchFieldException; +import java.lang.NoSuchMethodException; +import java.lang.RuntimeException; +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.instrument.ClassFileTransformer; +import java.lang.instrument.IllegalClassFormatException; +import java.lang.instrument.Instrumentation; +import java.lang.instrument.UnmodifiableClassException; +import java.lang.reflect.AnnotatedArrayType; +import java.lang.reflect.AnnotatedParameterizedType; +import java.lang.reflect.AnnotatedType; +import java.lang.reflect.AnnotatedWildcardType; +import java.lang.reflect.Executable; +import java.lang.reflect.TypeVariable; +import java.security.ProtectionDomain; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import jdk.internal.org.objectweb.asm.ClassReader; +import jdk.internal.org.objectweb.asm.ClassVisitor; +import jdk.internal.org.objectweb.asm.ClassWriter; +import jdk.internal.org.objectweb.asm.FieldVisitor; +import static jdk.internal.org.objectweb.asm.Opcodes.ASM5; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE_USE) +@interface TestAnn { + String site(); +} + +public class RedefineAnnotations { + static Instrumentation inst; + public static void premain(String agentArgs, Instrumentation inst) { + RedefineAnnotations.inst = inst; + } + + static class Transformer implements ClassFileTransformer { + + public byte[] asm(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + ClassWriter cw = new ClassWriter(0); + ClassVisitor cv = new ReAddDummyFieldsClassVisitor(ASM5, cw) { }; + ClassReader cr = new ClassReader(classfileBuffer); + cr.accept(cv, 0); + return cw.toByteArray(); + } + + public class ReAddDummyFieldsClassVisitor extends ClassVisitor { + + LinkedList fields = new LinkedList<>(); + + public ReAddDummyFieldsClassVisitor(int api, ClassVisitor cv) { + super(api, cv); + } + + @Override public FieldVisitor visitField(int access, String name, + String desc, String signature, Object value) { + if (name.startsWith("dummy")) { + // Remove dummy field + fields.addLast(new F(access, name, desc, signature, value)); + return null; + } + return cv.visitField(access, name, desc, signature, value); + } + + @Override public void visitEnd() { + F f; + while ((f = fields.pollFirst()) != null) { + // Re-add dummy fields + cv.visitField(f.access, f.name, f.desc, f.signature, f.value); + } + } + + private class F { + private int access; + private String name; + private String desc; + private String signature; + private Object value; + F(int access, String name, String desc, String signature, Object value) { + this.access = access; + this.name = name; + this.desc = desc; + this.signature = signature; + this.value = value; + } + } + } + + @Override public byte[] transform(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + if (className.contains("TypeAnnotatedTestClass")) { + try { + // Here we remove and re-add the dummy fields. This shuffles the constant pool + return asm(loader, className, classBeingRedefined, protectionDomain, classfileBuffer); + } catch (Throwable e) { + // The retransform native code that called this method does not propagate + // exceptions. Instead of getting an uninformative generic error, catch + // problems here and print it, then exit. + e.printStackTrace(); + System.exit(1); + } + } + return null; + } + } + + private static void buildAgent() { + try { + ClassFileInstaller.main("RedefineAnnotations"); + } catch (Exception e) { + throw new RuntimeException("Could not write agent classfile", e); + } + + try { + PrintWriter pw = new PrintWriter("MANIFEST.MF"); + pw.println("Premain-Class: RedefineAnnotations"); + pw.println("Agent-Class: RedefineAnnotations"); + pw.println("Can-Retransform-Classes: true"); + pw.close(); + } catch (FileNotFoundException e) { + throw new RuntimeException("Could not write manifest file for the agent", e); + } + + sun.tools.jar.Main jarTool = new sun.tools.jar.Main(System.out, System.err, "jar"); + if (!jarTool.run(new String[] { "-cmf", "MANIFEST.MF", "redefineagent.jar", "RedefineAnnotations.class" })) { + throw new RuntimeException("Could not write the agent jar file"); + } + } + + public static void main(String argv[]) throws NoSuchFieldException, NoSuchMethodException { + if (argv.length == 1 && argv[0].equals("buildagent")) { + buildAgent(); + return; + } + + if (inst == null) { + throw new RuntimeException("Instrumentation object was null"); + } + + RedefineAnnotations test = new RedefineAnnotations(); + test.testTransformAndVerify(); + } + + // Class type annotations + private Annotation classTypeParameterTA; + private Annotation extendsTA; + private Annotation implementsTA; + + // Field type annotations + private Annotation fieldTA; + private Annotation innerTA; + private Annotation[] arrayTA = new Annotation[4]; + private Annotation[] mapTA = new Annotation[5]; + + // Method type annotations + private Annotation returnTA, methodTypeParameterTA, formalParameterTA, throwsTA; + + private void testTransformAndVerify() + throws NoSuchFieldException, NoSuchMethodException { + + Class c = TypeAnnotatedTestClass.class; + Class myClass = c; + + /* + * Verify that the expected annotations are where they should be before transform. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + + try { + inst.addTransformer(new Transformer(), true); + inst.retransformClasses(myClass); + } catch (UnmodifiableClassException e) { + throw new RuntimeException(e); + } + + /* + * Verify that the expected annotations are where they should be after transform. + * Also verify that before and after are equal. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + } + + private void verifyClassTypeAnnotations(Class c) { + Annotation anno; + + anno = c.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(classTypeParameterTA, anno, "classTypeParameter"); + classTypeParameterTA = anno; + + anno = c.getAnnotatedSuperclass().getAnnotations()[0]; + verifyTestAnn(extendsTA, anno, "extends"); + extendsTA = anno; + + anno = c.getAnnotatedInterfaces()[0].getAnnotations()[0]; + verifyTestAnn(implementsTA, anno, "implements"); + implementsTA = anno; + } + + private void verifyFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + verifyBasicFieldTypeAnnotations(c); + verifyInnerFieldTypeAnnotations(c); + verifyArrayFieldTypeAnnotations(c); + verifyMapFieldTypeAnnotations(c); + } + + private void verifyBasicFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno = c.getDeclaredField("typeAnnotatedBoolean").getAnnotatedType().getAnnotations()[0]; + verifyTestAnn(fieldTA, anno, "field"); + fieldTA = anno; + } + + private void verifyInnerFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + AnnotatedType at = c.getDeclaredField("typeAnnotatedInner").getAnnotatedType(); + Annotation anno = at.getAnnotations()[0]; + verifyTestAnn(innerTA, anno, "inner"); + innerTA = anno; + } + + private void verifyArrayFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType at; + + at = c.getDeclaredField("typeAnnotatedArray").getAnnotatedType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[0], anno, "array1"); + arrayTA[0] = anno; + + for (int i = 1; i <= 3; i++) { + at = ((AnnotatedArrayType) at).getAnnotatedGenericComponentType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[i], anno, "array" + (i + 1)); + arrayTA[i] = anno; + } + } + + private void verifyMapFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType atBase; + AnnotatedType atParameter; + atBase = c.getDeclaredField("typeAnnotatedMap").getAnnotatedType(); + + anno = atBase.getAnnotations()[0]; + verifyTestAnn(mapTA[0], anno, "map1"); + mapTA[0] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[0]; + anno = ((AnnotatedWildcardType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[1], anno, "map2"); + mapTA[1] = anno; + + anno = + ((AnnotatedWildcardType) atParameter). + getAnnotatedUpperBounds()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[2], anno, "map3"); + mapTA[2] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[1]; + anno = ((AnnotatedParameterizedType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[3], anno, "map4"); + mapTA[3] = anno; + + anno = + ((AnnotatedParameterizedType) atParameter). + getAnnotatedActualTypeArguments()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[4], anno, "map5"); + mapTA[4] = anno; + } + + private void verifyMethodTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + Annotation anno; + Executable typeAnnotatedMethod = + c.getDeclaredMethod("typeAnnotatedMethod", TypeAnnotatedTestClass.class); + + anno = typeAnnotatedMethod.getAnnotatedReturnType().getAnnotations()[0]; + verifyTestAnn(returnTA, anno, "return"); + returnTA = anno; + + anno = typeAnnotatedMethod.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(methodTypeParameterTA, anno, "methodTypeParameter"); + methodTypeParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedParameterTypes()[0].getAnnotations()[0]; + verifyTestAnn(formalParameterTA, anno, "formalParameter"); + formalParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedExceptionTypes()[0].getAnnotations()[0]; + verifyTestAnn(throwsTA, anno, "throws"); + throwsTA = anno; + } + + private static void verifyTestAnn(Annotation verifyAgainst, Annotation anno, String expectedSite) { + verifyTestAnnSite(anno, expectedSite); + + // When called before transform verifyAgainst will be null, when called + // after transform it will be the annotation from before the transform + if (verifyAgainst != null) { + assertTrue(anno.equals(verifyAgainst), + "Annotations do not match before and after." + + " Before: \"" + verifyAgainst + "\", After: \"" + anno + "\""); + } + } + + private static void verifyTestAnnSite(Annotation testAnn, String expectedSite) { + String expectedAnn = "@TestAnn(site=" + expectedSite + ")"; + assertTrue(testAnn.toString().equals(expectedAnn), + "Expected \"" + expectedAnn + "\", got \"" + testAnn + "\""); + } + + public static class TypeAnnotatedTestClass <@TestAnn(site="classTypeParameter") S,T> + extends @TestAnn(site="extends") Thread + implements @TestAnn(site="implements") Runnable { + + public @TestAnn(site="field") boolean typeAnnotatedBoolean; + + public + RedefineAnnotations. + @TestAnn(site="inner") TypeAnnotatedTestClass + typeAnnotatedInner; + + public + @TestAnn(site="array4") boolean + @TestAnn(site="array1") [] + @TestAnn(site="array2") [] + @TestAnn(site="array3") [] + typeAnnotatedArray; + + public @TestAnn(site="map1") Map + <@TestAnn(site="map2") ? extends @TestAnn(site="map3") String, + @TestAnn(site="map4") List<@TestAnn(site="map5") Object>> typeAnnotatedMap; + + public int dummy1; + public int dummy2; + public int dummy3; + + @TestAnn(site="return") <@TestAnn(site="methodTypeParameter") U,V> Class + typeAnnotatedMethod(@TestAnn(site="formalParameter") TypeAnnotatedTestClass arg) + throws @TestAnn(site="throws") ClassNotFoundException { + + @TestAnn(site="local_variable_type") int foo = 0; + throw new ClassNotFoundException(); + } + + public void run() {} + } +} # HG changeset patch # User dbuck # Date 1415270041 28800 # Thu Nov 06 02:34:01 2014 -0800 # Node ID ef6b27d844cc0fa6724ca658cf0f70a6bf932e20 # Parent 3c87c13918fb5e4e918f55fc06faca0eb736951d 8058715: stability issues when being launched as an embedded JVM via JNI Summary: Use mmap call without MAP_FIXED so we avoid corrupting already allocated memory Reviewed-by: coleenp, dsimms diff --git a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp --- a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp +++ b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp @@ -909,7 +909,7 @@ */ char* hint = (char*) (Linux::initial_thread_stack_bottom() - ((StackYellowPages + StackRedPages + 1) * page_size)); - char* codebuf = os::reserve_memory(page_size, hint); + char* codebuf = os::attempt_reserve_memory_at(page_size, hint); if ( (codebuf == NULL) || (!os::commit_memory(codebuf, page_size, true)) ) { return; // No matter, we tried, best effort. } # HG changeset patch # User dbuck # Date 1415270461 0 # Thu Nov 06 10:41:01 2014 +0000 # Node ID 0d754e6851d3dd53b7c2f1db9c46edc8dbcb0b0a # Parent ef6b27d844cc0fa6724ca658cf0f70a6bf932e20 # Parent 6f06ebb090805c6eb961ada13d05ccd67c34f78b Merge diff --git a/src/share/vm/services/runtimeService.cpp b/src/share/vm/services/runtimeService.cpp --- a/src/share/vm/services/runtimeService.cpp +++ b/src/share/vm/services/runtimeService.cpp @@ -46,6 +46,7 @@ PerfCounter* RuntimeService::_thread_interrupt_signaled_count = NULL; PerfCounter* RuntimeService::_interrupted_before_count = NULL; PerfCounter* RuntimeService::_interrupted_during_count = NULL; +double RuntimeService::_last_safepoint_sync_time_sec = 0.0; void RuntimeService::init() { // Make sure the VM version is initialized @@ -128,6 +129,7 @@ // update the time stamp to begin recording safepoint time _safepoint_timer.update(); + _last_safepoint_sync_time_sec = 0.0; if (UsePerfData) { _total_safepoints->inc(); if (_app_timer.is_updated()) { @@ -140,6 +142,9 @@ if (UsePerfData) { _sync_time_ticks->inc(_safepoint_timer.ticks_since_update()); } + if (PrintGCApplicationStoppedTime) { + _last_safepoint_sync_time_sec = last_safepoint_time_sec(); + } } void RuntimeService::record_safepoint_end() { @@ -155,8 +160,10 @@ gclog_or_tty->date_stamp(PrintGCDateStamps); gclog_or_tty->stamp(PrintGCTimeStamps); gclog_or_tty->print_cr("Total time for which application threads " - "were stopped: %3.7f seconds", - last_safepoint_time_sec()); + "were stopped: %3.7f seconds, " + "Stopping threads took: %3.7f seconds", + last_safepoint_time_sec(), + _last_safepoint_sync_time_sec); } // update the time stamp to begin recording app time diff --git a/src/share/vm/services/runtimeService.hpp b/src/share/vm/services/runtimeService.hpp --- a/src/share/vm/services/runtimeService.hpp +++ b/src/share/vm/services/runtimeService.hpp @@ -40,6 +40,7 @@ static TimeStamp _safepoint_timer; static TimeStamp _app_timer; + static double _last_safepoint_sync_time_sec; public: static void init(); # HG changeset patch # User dbuck # Date 1415275543 0 # Thu Nov 06 12:05:43 2014 +0000 # Node ID 28ab1380ec65d0425da9b33316e38a2966058297 # Parent 5217eef2497f8d2fb9a96f17833410f0416f8e39 # Parent 0d754e6851d3dd53b7c2f1db9c46edc8dbcb0b0a Merge diff --git a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp --- a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp +++ b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp @@ -909,7 +909,7 @@ */ char* hint = (char*) (Linux::initial_thread_stack_bottom() - ((StackYellowPages + StackRedPages + 1) * page_size)); - char* codebuf = os::reserve_memory(page_size, hint); + char* codebuf = os::attempt_reserve_memory_at(page_size, hint); if ( (codebuf == NULL) || (!os::commit_memory(codebuf, page_size, true)) ) { return; // No matter, we tried, best effort. } # HG changeset patch # User kvn # Date 1415044960 28800 # Mon Nov 03 12:02:40 2014 -0800 # Node ID 7024b693c8f9f3fdc75454c040ca0d2bdfe36bfc # Parent 28ab1380ec65d0425da9b33316e38a2966058297 8059780: SPECjvm2008-MPEG performance regressions on x64 platforms Summary: Back-out 8052081 changes made in lcm.cpp. Reviewed-by: iveresov, roland diff --git a/src/share/vm/opto/lcm.cpp b/src/share/vm/opto/lcm.cpp --- a/src/share/vm/opto/lcm.cpp +++ b/src/share/vm/opto/lcm.cpp @@ -484,9 +484,7 @@ iop == Op_CreateEx || // Create-exception must start block iop == Op_CheckCastPP ) { - // select the node n - // remove n from worklist and retain the order of remaining nodes - worklist.remove((uint)i); + worklist.map(i,worklist.pop()); return n; } @@ -572,9 +570,7 @@ assert(idx >= 0, "index should be set"); Node *n = worklist[(uint)idx]; // Get the winner - // select the node n - // remove n from worklist and retain the order of remaining nodes - worklist.remove((uint)idx); + worklist.map((uint)idx, worklist.pop()); // Compress worklist return n; } # HG changeset patch # User stefank # Date 1412240136 -7200 # Thu Oct 02 10:55:36 2014 +0200 # Node ID b12a2a9b05ca00a9493736d2dcae7e2ee2389b37 # Parent 7024b693c8f9f3fdc75454c040ca0d2bdfe36bfc 8056240: Investigate increased GC remark time after class unloading changes in CRM Fuse Reviewed-by: mgerdin, coleenp, bdelsart diff --git a/src/share/vm/classfile/classLoaderData.cpp b/src/share/vm/classfile/classLoaderData.cpp --- a/src/share/vm/classfile/classLoaderData.cpp +++ b/src/share/vm/classfile/classLoaderData.cpp @@ -747,7 +747,7 @@ // Move class loader data from main list to the unloaded list for unloading // and deallocation later. -bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure) { +bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, bool clean_alive) { ClassLoaderData* data = _head; ClassLoaderData* prev = NULL; bool seen_dead_loader = false; @@ -756,16 +756,8 @@ // purging and we don't want to rewalk the previously unloaded class loader data. _saved_unloading = _unloading; - // mark metadata seen on the stack and code cache so we can delete - // unneeded entries. - bool has_redefined_a_class = JvmtiExport::has_redefined_a_class(); - MetadataOnStackMark md_on_stack; while (data != NULL) { if (data->is_alive(is_alive_closure)) { - if (has_redefined_a_class) { - data->classes_do(InstanceKlass::purge_previous_versions); - } - data->free_deallocate_list(); prev = data; data = data->next(); continue; @@ -787,6 +779,11 @@ _unloading = dead; } + if (clean_alive) { + // Clean previous versions and the deallocate list. + ClassLoaderDataGraph::clean_metaspaces(); + } + if (seen_dead_loader) { post_class_unload_events(); } @@ -794,6 +791,26 @@ return seen_dead_loader; } +void ClassLoaderDataGraph::clean_metaspaces() { + // mark metadata seen on the stack and code cache so we can delete unneeded entries. + bool has_redefined_a_class = JvmtiExport::has_redefined_a_class(); + MetadataOnStackMark md_on_stack(has_redefined_a_class); + + if (has_redefined_a_class) { + // purge_previous_versions also cleans weak method links. Because + // one method's MDO can reference another method from another + // class loader, we need to first clean weak method links for all + // class loaders here. Below, we can then free redefined methods + // for all class loaders. + for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { + data->classes_do(InstanceKlass::purge_previous_versions); + } + } + + // Need to purge the previous version before deallocating. + free_deallocate_lists(); +} + void ClassLoaderDataGraph::purge() { assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); ClassLoaderData* list = _unloading; @@ -821,6 +838,14 @@ #endif } +void ClassLoaderDataGraph::free_deallocate_lists() { + for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { + // We need to keep this data until InstanceKlass::purge_previous_version has been + // called on all alive classes. See the comment in ClassLoaderDataGraph::clean_metaspaces. + cld->free_deallocate_list(); + } +} + // CDS support // Global metaspaces for writing information to the shared archive. When diff --git a/src/share/vm/classfile/classLoaderData.hpp b/src/share/vm/classfile/classLoaderData.hpp --- a/src/share/vm/classfile/classLoaderData.hpp +++ b/src/share/vm/classfile/classLoaderData.hpp @@ -71,6 +71,7 @@ static ClassLoaderData* add(Handle class_loader, bool anonymous, TRAPS); static void post_class_unload_events(void); + static void clean_metaspaces(); public: static ClassLoaderData* find_or_create(Handle class_loader, TRAPS); static void purge(); @@ -89,7 +90,7 @@ static void classes_do(void f(Klass* const)); static void loaded_classes_do(KlassClosure* klass_closure); static void classes_unloading_do(void f(Klass* const)); - static bool do_unloading(BoolObjectClosure* is_alive); + static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive); // CMS support. static void remember_new_clds(bool remember) { _saved_head = (remember ? _head : NULL); } @@ -105,6 +106,8 @@ } } + static void free_deallocate_lists(); + static void dump_on(outputStream * const out) PRODUCT_RETURN; static void dump() { dump_on(tty); } static void verify(); diff --git a/src/share/vm/classfile/metadataOnStackMark.cpp b/src/share/vm/classfile/metadataOnStackMark.cpp --- a/src/share/vm/classfile/metadataOnStackMark.cpp +++ b/src/share/vm/classfile/metadataOnStackMark.cpp @@ -31,25 +31,23 @@ #include "runtime/synchronizer.hpp" #include "runtime/thread.hpp" #include "services/threadService.hpp" -#include "utilities/growableArray.hpp" +#include "utilities/chunkedList.hpp" +volatile MetadataOnStackBuffer* MetadataOnStackMark::_used_buffers = NULL; +volatile MetadataOnStackBuffer* MetadataOnStackMark::_free_buffers = NULL; -// Keep track of marked on-stack metadata so it can be cleared. -GrowableArray* _marked_objects = NULL; NOT_PRODUCT(bool MetadataOnStackMark::_is_active = false;) // Walk metadata on the stack and mark it so that redefinition doesn't delete // it. Class unloading also walks the previous versions and might try to // delete it, so this class is used by class unloading also. -MetadataOnStackMark::MetadataOnStackMark() { +MetadataOnStackMark::MetadataOnStackMark(bool visit_code_cache) { assert(SafepointSynchronize::is_at_safepoint(), "sanity check"); + assert(_used_buffers == NULL, "sanity check"); NOT_PRODUCT(_is_active = true;) - if (_marked_objects == NULL) { - _marked_objects = new (ResourceObj::C_HEAP, mtClass) GrowableArray(1000, true); - } Threads::metadata_do(Metadata::mark_on_stack); - if (JvmtiExport::has_redefined_a_class()) { + if (visit_code_cache) { CodeCache::alive_nmethods_do(nmethod::mark_on_stack); } CompileBroker::mark_on_stack(); @@ -62,15 +60,93 @@ // Unmark everything that was marked. Can't do the same walk because // redefine classes messes up the code cache so the set of methods // might not be the same. - for (int i = 0; i< _marked_objects->length(); i++) { - _marked_objects->at(i)->set_on_stack(false); + + retire_buffer_for_thread(Thread::current()); + + MetadataOnStackBuffer* buffer = const_cast(_used_buffers); + while (buffer != NULL) { + // Clear on stack state for all metadata. + size_t size = buffer->size(); + for (size_t i = 0; i < size; i++) { + Metadata* md = buffer->at(i); + md->set_on_stack(false); + } + + MetadataOnStackBuffer* next = buffer->next_used(); + + // Move the buffer to the free list. + buffer->clear(); + buffer->set_next_used(NULL); + buffer->set_next_free(const_cast(_free_buffers)); + _free_buffers = buffer; + + // Step to next used buffer. + buffer = next; } - _marked_objects->clear(); // reuse growable array for next time. + + _used_buffers = NULL; + NOT_PRODUCT(_is_active = false;) } +void MetadataOnStackMark::retire_buffer(MetadataOnStackBuffer* buffer) { + if (buffer == NULL) { + return; + } + + MetadataOnStackBuffer* old_head; + + do { + old_head = const_cast(_used_buffers); + buffer->set_next_used(old_head); + } while (Atomic::cmpxchg_ptr(buffer, &_used_buffers, old_head) != old_head); +} + +void MetadataOnStackMark::retire_buffer_for_thread(Thread* thread) { + retire_buffer(thread->metadata_on_stack_buffer()); + thread->set_metadata_on_stack_buffer(NULL); +} + +bool MetadataOnStackMark::has_buffer_for_thread(Thread* thread) { + return thread->metadata_on_stack_buffer() != NULL; +} + +MetadataOnStackBuffer* MetadataOnStackMark::allocate_buffer() { + MetadataOnStackBuffer* allocated; + MetadataOnStackBuffer* new_head; + + do { + allocated = const_cast(_free_buffers); + if (allocated == NULL) { + break; + } + new_head = allocated->next_free(); + } while (Atomic::cmpxchg_ptr(new_head, &_free_buffers, allocated) != allocated); + + if (allocated == NULL) { + allocated = new MetadataOnStackBuffer(); + } + + assert(!allocated->is_full(), err_msg("Should not be full: " PTR_FORMAT, p2i(allocated))); + + return allocated; +} + // Record which objects are marked so we can unmark the same objects. -void MetadataOnStackMark::record(Metadata* m) { +void MetadataOnStackMark::record(Metadata* m, Thread* thread) { assert(_is_active, "metadata on stack marking is active"); - _marked_objects->push(m); + + MetadataOnStackBuffer* buffer = thread->metadata_on_stack_buffer(); + + if (buffer != NULL && buffer->is_full()) { + retire_buffer(buffer); + buffer = NULL; + } + + if (buffer == NULL) { + buffer = allocate_buffer(); + thread->set_metadata_on_stack_buffer(buffer); + } + + buffer->push(m); } diff --git a/src/share/vm/classfile/metadataOnStackMark.hpp b/src/share/vm/classfile/metadataOnStackMark.hpp --- a/src/share/vm/classfile/metadataOnStackMark.hpp +++ b/src/share/vm/classfile/metadataOnStackMark.hpp @@ -26,9 +26,12 @@ #define SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP #include "memory/allocation.hpp" +#include "utilities/chunkedList.hpp" class Metadata; +typedef ChunkedList MetadataOnStackBuffer; + // Helper class to mark and unmark metadata used on the stack as either handles // or executing methods, so that it can't be deleted during class redefinition // and class unloading. @@ -36,10 +39,20 @@ // metadata during parsing, relocated methods, and methods in backtraces. class MetadataOnStackMark : public StackObj { NOT_PRODUCT(static bool _is_active;) + + static volatile MetadataOnStackBuffer* _used_buffers; + static volatile MetadataOnStackBuffer* _free_buffers; + + static MetadataOnStackBuffer* allocate_buffer(); + static void retire_buffer(MetadataOnStackBuffer* buffer); + public: - MetadataOnStackMark(); - ~MetadataOnStackMark(); - static void record(Metadata* m); + MetadataOnStackMark(bool visit_code_cache); + ~MetadataOnStackMark(); + + static void record(Metadata* m, Thread* thread); + static void retire_buffer_for_thread(Thread* thread); + static bool has_buffer_for_thread(Thread* thread); }; #endif // SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp --- a/src/share/vm/classfile/systemDictionary.cpp +++ b/src/share/vm/classfile/systemDictionary.cpp @@ -1691,9 +1691,9 @@ // Assumes classes in the SystemDictionary are only unloaded at a safepoint // Note: anonymous classes are not in the SD. -bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive) { +bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive, bool clean_alive) { // First, mark for unload all ClassLoaderData referencing a dead class loader. - bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive); + bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive, clean_alive); if (unloading_occurred) { dictionary()->do_unloading(); constraints()->purge_loader_constraints(); diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp --- a/src/share/vm/classfile/systemDictionary.hpp +++ b/src/share/vm/classfile/systemDictionary.hpp @@ -341,7 +341,7 @@ // Unload (that is, break root links to) all unmarked classes and // loaders. Returns "true" iff something was unloaded. - static bool do_unloading(BoolObjectClosure* is_alive); + static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive = true); // Used by DumpSharedSpaces only to remove classes that failed verification static void remove_classes_in_error_state(); diff --git a/src/share/vm/code/nmethod.cpp b/src/share/vm/code/nmethod.cpp --- a/src/share/vm/code/nmethod.cpp +++ b/src/share/vm/code/nmethod.cpp @@ -1720,11 +1720,17 @@ set_unload_reported(); } -void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) { +void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive, bool mark_on_stack) { if (ic->is_icholder_call()) { // The only exception is compiledICHolder oops which may // yet be marked below. (We check this further below). CompiledICHolder* cichk_oop = ic->cached_icholder(); + + if (mark_on_stack) { + Metadata::mark_on_stack(cichk_oop->holder_method()); + Metadata::mark_on_stack(cichk_oop->holder_klass()); + } + if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) && cichk_oop->holder_klass()->is_loader_alive(is_alive)) { return; @@ -1732,6 +1738,10 @@ } else { Metadata* ic_oop = ic->cached_metadata(); if (ic_oop != NULL) { + if (mark_on_stack) { + Metadata::mark_on_stack(ic_oop); + } + if (ic_oop->is_klass()) { if (((Klass*)ic_oop)->is_loader_alive(is_alive)) { return; @@ -1792,7 +1802,7 @@ while(iter.next()) { if (iter.type() == relocInfo::virtual_call_type) { CompiledIC *ic = CompiledIC_at(&iter); - clean_ic_if_metadata_is_dead(ic, is_alive); + clean_ic_if_metadata_is_dead(ic, is_alive, false); } } } @@ -1860,6 +1870,53 @@ return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from); } +bool nmethod::unload_if_dead_at(RelocIterator* iter_at_oop, BoolObjectClosure *is_alive, bool unloading_occurred) { + assert(iter_at_oop->type() == relocInfo::oop_type, "Wrong relocation type"); + + oop_Relocation* r = iter_at_oop->oop_reloc(); + // Traverse those oops directly embedded in the code. + // Other oops (oop_index>0) are seen as part of scopes_oops. + assert(1 == (r->oop_is_immediate()) + + (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), + "oop must be found in exactly one place"); + if (r->oop_is_immediate() && r->oop_value() != NULL) { + // Unload this nmethod if the oop is dead. + if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) { + return true;; + } + } + + return false; +} + +void nmethod::mark_metadata_on_stack_at(RelocIterator* iter_at_metadata) { + assert(iter_at_metadata->type() == relocInfo::metadata_type, "Wrong relocation type"); + + metadata_Relocation* r = iter_at_metadata->metadata_reloc(); + // In this metadata, we must only follow those metadatas directly embedded in + // the code. Other metadatas (oop_index>0) are seen as part of + // the metadata section below. + assert(1 == (r->metadata_is_immediate()) + + (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()), + "metadata must be found in exactly one place"); + if (r->metadata_is_immediate() && r->metadata_value() != NULL) { + Metadata* md = r->metadata_value(); + if (md != _method) Metadata::mark_on_stack(md); + } +} + +void nmethod::mark_metadata_on_stack_non_relocs() { + // Visit the metadata section + for (Metadata** p = metadata_begin(); p < metadata_end(); p++) { + if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops + Metadata* md = *p; + Metadata::mark_on_stack(md); + } + + // Visit metadata not embedded in the other places. + if (_method != NULL) Metadata::mark_on_stack(_method); +} + bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { ResourceMark rm; @@ -1889,6 +1946,11 @@ unloading_occurred = true; } + // When class redefinition is used all metadata in the CodeCache has to be recorded, + // so that unused "previous versions" can be purged. Since walking the CodeCache can + // be expensive, the "mark on stack" is piggy-backed on this parallel unloading code. + bool mark_metadata_on_stack = a_class_was_redefined; + // Exception cache clean_exception_cache(is_alive); @@ -1904,7 +1966,7 @@ if (unloading_occurred) { // If class unloading occurred we first iterate over all inline caches and // clear ICs where the cached oop is referring to an unloaded klass or method. - clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive); + clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive, mark_metadata_on_stack); } postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); @@ -1920,24 +1982,21 @@ case relocInfo::oop_type: if (!is_unloaded) { - // Unload check - oop_Relocation* r = iter.oop_reloc(); - // Traverse those oops directly embedded in the code. - // Other oops (oop_index>0) are seen as part of scopes_oops. - assert(1 == (r->oop_is_immediate()) + - (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), - "oop must be found in exactly one place"); - if (r->oop_is_immediate() && r->oop_value() != NULL) { - if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) { - is_unloaded = true; - } - } + is_unloaded = unload_if_dead_at(&iter, is_alive, unloading_occurred); } break; + case relocInfo::metadata_type: + if (mark_metadata_on_stack) { + mark_metadata_on_stack_at(&iter); + } } } + if (mark_metadata_on_stack) { + mark_metadata_on_stack_non_relocs(); + } + if (is_unloaded) { return postponed; } @@ -2085,7 +2144,7 @@ while (iter.next()) { if (iter.type() == relocInfo::metadata_type ) { metadata_Relocation* r = iter.metadata_reloc(); - // In this lmetadata, we must only follow those metadatas directly embedded in + // In this metadata, we must only follow those metadatas directly embedded in // the code. Other metadatas (oop_index>0) are seen as part of // the metadata section below. assert(1 == (r->metadata_is_immediate()) + @@ -2119,7 +2178,7 @@ f(md); } - // Call function Method*, not embedded in these other places. + // Visit metadata not embedded in the other places. if (_method != NULL) f(_method); } diff --git a/src/share/vm/code/nmethod.hpp b/src/share/vm/code/nmethod.hpp --- a/src/share/vm/code/nmethod.hpp +++ b/src/share/vm/code/nmethod.hpp @@ -614,9 +614,16 @@ // The parallel versions are used by G1. bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred); void do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred); + + private: // Unload a nmethod if the *root object is dead. bool can_unload(BoolObjectClosure* is_alive, oop* root, bool unloading_occurred); + bool unload_if_dead_at(RelocIterator *iter_at_oop, BoolObjectClosure* is_alive, bool unloading_occurred); + void mark_metadata_on_stack_at(RelocIterator* iter_at_metadata); + void mark_metadata_on_stack_non_relocs(); + + public: void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f); void oops_do(OopClosure* f) { oops_do(f, false); } diff --git a/src/share/vm/gc_implementation/g1/concurrentMark.cpp b/src/share/vm/gc_implementation/g1/concurrentMark.cpp --- a/src/share/vm/gc_implementation/g1/concurrentMark.cpp +++ b/src/share/vm/gc_implementation/g1/concurrentMark.cpp @@ -23,6 +23,7 @@ */ #include "precompiled.hpp" +#include "classfile/metadataOnStackMark.hpp" #include "classfile/symbolTable.hpp" #include "code/codeCache.hpp" #include "gc_implementation/g1/concurrentMark.inline.hpp" @@ -2602,17 +2603,27 @@ G1RemarkGCTraceTime trace("Unloading", G1Log::finer()); if (ClassUnloadingWithConcurrentMark) { + // Cleaning of klasses depends on correct information from MetadataMarkOnStack. The CodeCache::mark_on_stack + // part is too slow to be done serially, so it is handled during the weakRefsWorkParallelPart phase. + // Defer the cleaning until we have complete on_stack data. + MetadataOnStackMark md_on_stack(false /* Don't visit the code cache at this point */); + bool purged_classes; { G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest()); - purged_classes = SystemDictionary::do_unloading(&g1_is_alive); + purged_classes = SystemDictionary::do_unloading(&g1_is_alive, false /* Defer klass cleaning */); } { G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest()); weakRefsWorkParallelPart(&g1_is_alive, purged_classes); } + + { + G1RemarkGCTraceTime trace("Deallocate Metadata", G1Log::finest()); + ClassLoaderDataGraph::free_deallocate_lists(); + } } if (G1StringDedup::is_enabled()) { diff --git a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp --- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp +++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp @@ -27,6 +27,7 @@ #endif #include "precompiled.hpp" +#include "classfile/metadataOnStackMark.hpp" #include "code/codeCache.hpp" #include "code/icBuffer.hpp" #include "gc_implementation/g1/bufferingOopClosure.hpp" @@ -5133,6 +5134,10 @@ clean_nmethod(claimed_nmethods[i]); } } + + // The nmethod cleaning helps out and does the CodeCache part of MetadataOnStackMark. + // Need to retire the buffers now that this thread has stopped cleaning nmethods. + MetadataOnStackMark::retire_buffer_for_thread(Thread::current()); } void work_second_pass(uint worker_id) { @@ -5185,6 +5190,9 @@ // G1 specific cleanup work that has // been moved here to be done in parallel. ik->clean_dependent_nmethods(); + if (JvmtiExport::has_redefined_a_class()) { + InstanceKlass::purge_previous_versions(ik); + } } void work() { @@ -5219,8 +5227,18 @@ _klass_cleaning_task(is_alive) { } + void pre_work_verification() { + assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty"); + } + + void post_work_verification() { + assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty"); + } + // The parallel work done by all worker threads. void work(uint worker_id) { + pre_work_verification(); + // Do first pass of code cache cleaning. _code_cache_task.work_first_pass(worker_id); @@ -5239,6 +5257,8 @@ // Clean all klasses that were not unloaded. _klass_cleaning_task.work(); + + post_work_verification(); } }; diff --git a/src/share/vm/oops/constantPool.cpp b/src/share/vm/oops/constantPool.cpp --- a/src/share/vm/oops/constantPool.cpp +++ b/src/share/vm/oops/constantPool.cpp @@ -1817,11 +1817,22 @@ void ConstantPool::set_on_stack(const bool value) { if (value) { - _flags |= _on_stack; + int old_flags = *const_cast(&_flags); + while ((old_flags & _on_stack) == 0) { + int new_flags = old_flags | _on_stack; + int result = Atomic::cmpxchg(new_flags, &_flags, old_flags); + + if (result == old_flags) { + // Succeeded. + MetadataOnStackMark::record(this, Thread::current()); + return; + } + old_flags = result; + } } else { + // Clearing is done single-threadedly. _flags &= ~_on_stack; } - if (value) MetadataOnStackMark::record(this); } // JSR 292 support for patching constant pool oops after the class is linked and diff --git a/src/share/vm/oops/method.cpp b/src/share/vm/oops/method.cpp --- a/src/share/vm/oops/method.cpp +++ b/src/share/vm/oops/method.cpp @@ -1863,9 +1863,12 @@ void Method::set_on_stack(const bool value) { // Set both the method itself and its constant pool. The constant pool // on stack means some method referring to it is also on the stack. - _access_flags.set_on_stack(value); constants()->set_on_stack(value); - if (value) MetadataOnStackMark::record(this); + + bool succeeded = _access_flags.set_on_stack(value); + if (value && succeeded) { + MetadataOnStackMark::record(this, Thread::current()); + } } // Called when the class loader is unloaded to make all methods weak. diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp --- a/src/share/vm/prims/jni.cpp +++ b/src/share/vm/prims/jni.cpp @@ -5080,6 +5080,7 @@ void TestNewSize_test(); void TestKlass_test(); void Test_linked_list(); +void TestChunkedList_test(); #if INCLUDE_ALL_GCS void TestOldFreeSpaceCalculation_test(); void TestG1BiasedArray_test(); @@ -5108,6 +5109,7 @@ run_unit_test(TestNewSize_test()); run_unit_test(TestKlass_test()); run_unit_test(Test_linked_list()); + run_unit_test(TestChunkedList_test()); #if INCLUDE_VM_STRUCTS run_unit_test(VMStructs::test()); #endif diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -135,7 +135,7 @@ // Mark methods seen on stack and everywhere else so old methods are not // cleaned up if they're on the stack. - MetadataOnStackMark md_on_stack; + MetadataOnStackMark md_on_stack(true); HandleMark hm(thread); // make sure any handles created are deleted // before the stack walk again. diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp --- a/src/share/vm/runtime/thread.cpp +++ b/src/share/vm/runtime/thread.cpp @@ -234,6 +234,8 @@ // This initial value ==> never claimed. _oops_do_parity = 0; + _metadata_on_stack_buffer = NULL; + // the handle mark links itself to last_handle_mark new HandleMark(this); diff --git a/src/share/vm/runtime/thread.hpp b/src/share/vm/runtime/thread.hpp --- a/src/share/vm/runtime/thread.hpp +++ b/src/share/vm/runtime/thread.hpp @@ -42,11 +42,10 @@ #include "runtime/threadLocalStorage.hpp" #include "runtime/thread_ext.hpp" #include "runtime/unhandledOops.hpp" -#include "utilities/macros.hpp" - #include "trace/traceBackend.hpp" #include "trace/traceMacros.hpp" #include "utilities/exceptions.hpp" +#include "utilities/macros.hpp" #include "utilities/top.hpp" #if INCLUDE_ALL_GCS #include "gc_implementation/g1/dirtyCardQueue.hpp" @@ -83,6 +82,10 @@ class ThreadClosure; class IdealGraphPrinter; +class Metadata; +template class ChunkedList; +typedef ChunkedList MetadataOnStackBuffer; + DEBUG_ONLY(class ResourceMark;) class WorkerThread; @@ -256,6 +259,9 @@ jlong _allocated_bytes; // Cumulative number of bytes allocated on // the Java heap + // Thread-local buffer used by MetadataOnStackMark. + MetadataOnStackBuffer* _metadata_on_stack_buffer; + TRACE_DATA _trace_data; // Thread-local data for tracing ThreadExt _ext; @@ -517,7 +523,10 @@ // creation fails due to lack of memory, too many threads etc. bool set_as_starting_thread(); - protected: + void set_metadata_on_stack_buffer(MetadataOnStackBuffer* buffer) { _metadata_on_stack_buffer = buffer; } + MetadataOnStackBuffer* metadata_on_stack_buffer() const { return _metadata_on_stack_buffer; } + +protected: // OS data associated with the thread OSThread* _osthread; // Platform-specific thread information diff --git a/src/share/vm/utilities/accessFlags.cpp b/src/share/vm/utilities/accessFlags.cpp --- a/src/share/vm/utilities/accessFlags.cpp +++ b/src/share/vm/utilities/accessFlags.cpp @@ -62,6 +62,21 @@ } while(f != old_flags); } +// Returns true iff this thread succeeded setting the bit. +bool AccessFlags::atomic_set_one_bit(jint bit) { + // Atomically update the flags with the bit given + jint old_flags, new_flags, f; + bool is_setting_bit = false; + do { + old_flags = _flags; + new_flags = old_flags | bit; + is_setting_bit = old_flags != new_flags; + f = Atomic::cmpxchg(new_flags, &_flags, old_flags); + } while(f != old_flags); + + return is_setting_bit; +} + #if !defined(PRODUCT) || INCLUDE_JVMTI void AccessFlags::print_on(outputStream* st) const { diff --git a/src/share/vm/utilities/accessFlags.hpp b/src/share/vm/utilities/accessFlags.hpp --- a/src/share/vm/utilities/accessFlags.hpp +++ b/src/share/vm/utilities/accessFlags.hpp @@ -170,6 +170,7 @@ // Atomic update of flags void atomic_set_bits(jint bits); + bool atomic_set_one_bit(jint bit); void atomic_clear_bits(jint bits); private: @@ -230,12 +231,13 @@ atomic_set_bits(JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE); } - void set_on_stack(const bool value) + bool set_on_stack(const bool value) { if (value) { - atomic_set_bits(JVM_ACC_ON_STACK); + return atomic_set_one_bit(JVM_ACC_ON_STACK); } else { atomic_clear_bits(JVM_ACC_ON_STACK); + return true; // Ignored } } // Conversion diff --git a/src/share/vm/utilities/chunkedList.cpp b/src/share/vm/utilities/chunkedList.cpp new file mode 100644 --- /dev/null +++ b/src/share/vm/utilities/chunkedList.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + * + */ + +#include "precompiled.hpp" +#include "utilities/chunkedList.hpp" +#include "utilities/debug.hpp" + +/////////////// Unit tests /////////////// + +#ifndef PRODUCT + +template +class TestChunkedList { + typedef ChunkedList ChunkedListT; + + public: + static void testEmpty() { + ChunkedListT buffer; + assert(buffer.size() == 0, "assert"); + } + + static void testFull() { + ChunkedListT buffer; + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + } + assert(buffer.size() == ChunkedListT::BufferSize, "assert"); + assert(buffer.is_full(), "assert"); + } + + static void testSize() { + ChunkedListT buffer; + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + assert(buffer.size() == i, "assert"); + buffer.push((T)i); + assert(buffer.size() == i + 1, "assert"); + } + } + + static void testClear() { + ChunkedListT buffer; + + buffer.clear(); + assert(buffer.size() == 0, "assert"); + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize / 2; i++) { + buffer.push((T)i); + } + buffer.clear(); + assert(buffer.size() == 0, "assert"); + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + } + buffer.clear(); + assert(buffer.size() == 0, "assert"); + } + + static void testAt() { + ChunkedListT buffer; + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + assert(buffer.at(i) == (T)i, "assert"); + } + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + assert(buffer.at(i) == (T)i, "assert"); + } + } + + static void test() { + testEmpty(); + testFull(); + testSize(); + testClear(); + testAt(); + } +}; + +class Metadata; + +void TestChunkedList_test() { + TestChunkedList::test(); + TestChunkedList::test(); +} + +#endif diff --git a/src/share/vm/utilities/chunkedList.hpp b/src/share/vm/utilities/chunkedList.hpp new file mode 100644 --- /dev/null +++ b/src/share/vm/utilities/chunkedList.hpp @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + * + */ + +#ifndef SHARE_VM_UTILITIES_CHUNKED_LIST_HPP +#define SHARE_VM_UTILITIES_CHUNKED_LIST_HPP + +#include "memory/allocation.hpp" +#include "utilities/debug.hpp" + +template class ChunkedList : public CHeapObj { + template friend class TestChunkedList; + + static const size_t BufferSize = 64; + + T _values[BufferSize]; + T* _top; + + ChunkedList* _next_used; + ChunkedList* _next_free; + + T const * end() const { + return &_values[BufferSize]; + } + + public: + ChunkedList() : _top(_values), _next_used(NULL), _next_free(NULL) {} + + bool is_full() const { + return _top == end(); + } + + void clear() { + _top = _values; + // Don't clear the next pointers since that would interfere + // with other threads trying to iterate through the lists. + } + + void push(T m) { + assert(!is_full(), "Buffer is full"); + *_top = m; + _top++; + } + + void set_next_used(ChunkedList* buffer) { _next_used = buffer; } + void set_next_free(ChunkedList* buffer) { _next_free = buffer; } + + ChunkedList* next_used() const { return _next_used; } + ChunkedList* next_free() const { return _next_free; } + + size_t size() const { + return pointer_delta(_top, _values, sizeof(T)); + } + + T at(size_t i) { + assert(i < size(), err_msg("IOOBE i: " SIZE_FORMAT " size(): " SIZE_FORMAT, i, size())); + return _values[i]; + } +}; + +#endif // SHARE_VM_UTILITIES_CHUNKED_LIST_HPP # HG changeset patch # User amurillo # Date 1415380977 28800 # Fri Nov 07 09:22:57 2014 -0800 # Node ID 4d5dc0d0f8799fafa1135d51d85edd4edd566501 # Parent b0c7e7f1bbbe879ebcb9425551ed718e734997b6 # Parent b12a2a9b05ca00a9493736d2dcae7e2ee2389b37 Merge diff --git a/make/bsd/makefiles/mapfile-vers-debug b/make/bsd/makefiles/mapfile-vers-debug --- a/make/bsd/makefiles/mapfile-vers-debug +++ b/make/bsd/makefiles/mapfile-vers-debug @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/bsd/makefiles/mapfile-vers-product b/make/bsd/makefiles/mapfile-vers-product --- a/make/bsd/makefiles/mapfile-vers-product +++ b/make/bsd/makefiles/mapfile-vers-product @@ -187,6 +187,9 @@ _JVM_IsSupportedJNIVersion _JVM_IsThreadAlive _JVM_IsVMGeneratedMethodIx + _JVM_KnownToNotExist + _JVM_GetResourceLookupCacheURLs + _JVM_GetResourceLookupCache _JVM_LatestUserDefinedLoader _JVM_Listen _JVM_LoadClass0 diff --git a/make/hotspot_version b/make/hotspot_version --- a/make/hotspot_version +++ b/make/hotspot_version @@ -35,7 +35,7 @@ HS_MAJOR_VER=25 HS_MINOR_VER=40 -HS_BUILD_NUMBER=17 +HS_BUILD_NUMBER=18 JDK_MAJOR_VER=1 JDK_MINOR_VER=8 diff --git a/make/linux/makefiles/mapfile-vers-debug b/make/linux/makefiles/mapfile-vers-debug --- a/make/linux/makefiles/mapfile-vers-debug +++ b/make/linux/makefiles/mapfile-vers-debug @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/linux/makefiles/mapfile-vers-product b/make/linux/makefiles/mapfile-vers-product --- a/make/linux/makefiles/mapfile-vers-product +++ b/make/linux/makefiles/mapfile-vers-product @@ -217,6 +217,9 @@ JVM_RegisterSignal; JVM_ReleaseUTF; JVM_ResolveClass; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_ResumeThread; JVM_Send; JVM_SendTo; diff --git a/make/solaris/makefiles/mapfile-vers b/make/solaris/makefiles/mapfile-vers --- a/make/solaris/makefiles/mapfile-vers +++ b/make/solaris/makefiles/mapfile-vers @@ -189,6 +189,9 @@ JVM_IsSupportedJNIVersion; JVM_IsThreadAlive; JVM_IsVMGeneratedMethodIx; + JVM_KnownToNotExist; + JVM_GetResourceLookupCacheURLs; + JVM_GetResourceLookupCache; JVM_LatestUserDefinedLoader; JVM_Listen; JVM_LoadClass0; diff --git a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp --- a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp +++ b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp @@ -909,7 +909,7 @@ */ char* hint = (char*) (Linux::initial_thread_stack_bottom() - ((StackYellowPages + StackRedPages + 1) * page_size)); - char* codebuf = os::reserve_memory(page_size, hint); + char* codebuf = os::attempt_reserve_memory_at(page_size, hint); if ( (codebuf == NULL) || (!os::commit_memory(codebuf, page_size, true)) ) { return; // No matter, we tried, best effort. } diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp --- a/src/share/vm/classfile/classLoader.cpp +++ b/src/share/vm/classfile/classLoader.cpp @@ -610,7 +610,7 @@ } #endif -void ClassLoader::setup_search_path(const char *class_path) { +void ClassLoader::setup_search_path(const char *class_path, bool canonicalize) { int offset = 0; int len = (int)strlen(class_path); int end = 0; @@ -625,7 +625,13 @@ char* path = NEW_RESOURCE_ARRAY(char, end - start + 1); strncpy(path, &class_path[start], end - start); path[end - start] = '\0'; - update_class_path_entry_list(path, false); + if (canonicalize) { + char* canonical_path = NEW_RESOURCE_ARRAY(char, JVM_MAXPATHLEN + 1); + if (get_canonical_path(path, canonical_path, JVM_MAXPATHLEN)) { + path = canonical_path; + } + } + update_class_path_entry_list(path, /*check_for_duplicates=*/canonicalize); #if INCLUDE_CDS if (DumpSharedSpaces) { check_shared_classpath(path); diff --git a/src/share/vm/classfile/classLoader.hpp b/src/share/vm/classfile/classLoader.hpp --- a/src/share/vm/classfile/classLoader.hpp +++ b/src/share/vm/classfile/classLoader.hpp @@ -129,8 +129,8 @@ bool _has_error; bool _throw_exception; volatile ClassPathEntry* _resolved_entry; + public: ClassPathEntry* resolve_entry(TRAPS); - public: bool is_jar_file(); const char* name() { return _path; } LazyClassPathEntry(const char* path, const struct stat* st, bool throw_exception); @@ -218,7 +218,7 @@ static void setup_meta_index(const char* meta_index_path, const char* meta_index_dir, int start_index); static void setup_bootstrap_search_path(); - static void setup_search_path(const char *class_path); + static void setup_search_path(const char *class_path, bool canonicalize=false); static void load_zip_library(); static ClassPathEntry* create_class_path_entry(const char *path, const struct stat* st, @@ -329,6 +329,10 @@ return e; } + static int num_classpath_entries() { + return _num_entries; + } + #if INCLUDE_CDS // Sharing dump and restore static void copy_package_info_buckets(char** top, char* end); diff --git a/src/share/vm/classfile/classLoaderData.cpp b/src/share/vm/classfile/classLoaderData.cpp --- a/src/share/vm/classfile/classLoaderData.cpp +++ b/src/share/vm/classfile/classLoaderData.cpp @@ -747,7 +747,7 @@ // Move class loader data from main list to the unloaded list for unloading // and deallocation later. -bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure) { +bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, bool clean_alive) { ClassLoaderData* data = _head; ClassLoaderData* prev = NULL; bool seen_dead_loader = false; @@ -756,16 +756,8 @@ // purging and we don't want to rewalk the previously unloaded class loader data. _saved_unloading = _unloading; - // mark metadata seen on the stack and code cache so we can delete - // unneeded entries. - bool has_redefined_a_class = JvmtiExport::has_redefined_a_class(); - MetadataOnStackMark md_on_stack; while (data != NULL) { if (data->is_alive(is_alive_closure)) { - if (has_redefined_a_class) { - data->classes_do(InstanceKlass::purge_previous_versions); - } - data->free_deallocate_list(); prev = data; data = data->next(); continue; @@ -787,6 +779,11 @@ _unloading = dead; } + if (clean_alive) { + // Clean previous versions and the deallocate list. + ClassLoaderDataGraph::clean_metaspaces(); + } + if (seen_dead_loader) { post_class_unload_events(); } @@ -794,6 +791,26 @@ return seen_dead_loader; } +void ClassLoaderDataGraph::clean_metaspaces() { + // mark metadata seen on the stack and code cache so we can delete unneeded entries. + bool has_redefined_a_class = JvmtiExport::has_redefined_a_class(); + MetadataOnStackMark md_on_stack(has_redefined_a_class); + + if (has_redefined_a_class) { + // purge_previous_versions also cleans weak method links. Because + // one method's MDO can reference another method from another + // class loader, we need to first clean weak method links for all + // class loaders here. Below, we can then free redefined methods + // for all class loaders. + for (ClassLoaderData* data = _head; data != NULL; data = data->next()) { + data->classes_do(InstanceKlass::purge_previous_versions); + } + } + + // Need to purge the previous version before deallocating. + free_deallocate_lists(); +} + void ClassLoaderDataGraph::purge() { assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!"); ClassLoaderData* list = _unloading; @@ -821,6 +838,14 @@ #endif } +void ClassLoaderDataGraph::free_deallocate_lists() { + for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { + // We need to keep this data until InstanceKlass::purge_previous_version has been + // called on all alive classes. See the comment in ClassLoaderDataGraph::clean_metaspaces. + cld->free_deallocate_list(); + } +} + // CDS support // Global metaspaces for writing information to the shared archive. When diff --git a/src/share/vm/classfile/classLoaderData.hpp b/src/share/vm/classfile/classLoaderData.hpp --- a/src/share/vm/classfile/classLoaderData.hpp +++ b/src/share/vm/classfile/classLoaderData.hpp @@ -71,6 +71,7 @@ static ClassLoaderData* add(Handle class_loader, bool anonymous, TRAPS); static void post_class_unload_events(void); + static void clean_metaspaces(); public: static ClassLoaderData* find_or_create(Handle class_loader, TRAPS); static void purge(); @@ -89,7 +90,7 @@ static void classes_do(void f(Klass* const)); static void loaded_classes_do(KlassClosure* klass_closure); static void classes_unloading_do(void f(Klass* const)); - static bool do_unloading(BoolObjectClosure* is_alive); + static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive); // CMS support. static void remember_new_clds(bool remember) { _saved_head = (remember ? _head : NULL); } @@ -105,6 +106,8 @@ } } + static void free_deallocate_lists(); + static void dump_on(outputStream * const out) PRODUCT_RETURN; static void dump() { dump_on(tty); } static void verify(); diff --git a/src/share/vm/classfile/classLoaderExt.hpp b/src/share/vm/classfile/classLoaderExt.hpp --- a/src/share/vm/classfile/classLoaderExt.hpp +++ b/src/share/vm/classfile/classLoaderExt.hpp @@ -64,6 +64,15 @@ ClassLoader::add_to_list(new_entry); } static void setup_search_paths() {} + + static void init_lookup_cache(TRAPS) {} + static void copy_lookup_cache_to_archive(char** top, char* end) {} + static char* restore_lookup_cache_from_archive(char* buffer) {return buffer;} + static inline bool is_lookup_cache_enabled() {return false;} + + static bool known_to_not_exist(JNIEnv *env, jobject loader, const char *classname, TRAPS) {return false;} + static jobjectArray get_lookup_cache_urls(JNIEnv *env, jobject loader, TRAPS) {return NULL;} + static jintArray get_lookup_cache(JNIEnv *env, jobject loader, const char *pkgname, TRAPS) {return NULL;} }; #endif // SHARE_VM_CLASSFILE_CLASSLOADEREXT_HPP diff --git a/src/share/vm/classfile/metadataOnStackMark.cpp b/src/share/vm/classfile/metadataOnStackMark.cpp --- a/src/share/vm/classfile/metadataOnStackMark.cpp +++ b/src/share/vm/classfile/metadataOnStackMark.cpp @@ -31,25 +31,23 @@ #include "runtime/synchronizer.hpp" #include "runtime/thread.hpp" #include "services/threadService.hpp" -#include "utilities/growableArray.hpp" +#include "utilities/chunkedList.hpp" +volatile MetadataOnStackBuffer* MetadataOnStackMark::_used_buffers = NULL; +volatile MetadataOnStackBuffer* MetadataOnStackMark::_free_buffers = NULL; -// Keep track of marked on-stack metadata so it can be cleared. -GrowableArray* _marked_objects = NULL; NOT_PRODUCT(bool MetadataOnStackMark::_is_active = false;) // Walk metadata on the stack and mark it so that redefinition doesn't delete // it. Class unloading also walks the previous versions and might try to // delete it, so this class is used by class unloading also. -MetadataOnStackMark::MetadataOnStackMark() { +MetadataOnStackMark::MetadataOnStackMark(bool visit_code_cache) { assert(SafepointSynchronize::is_at_safepoint(), "sanity check"); + assert(_used_buffers == NULL, "sanity check"); NOT_PRODUCT(_is_active = true;) - if (_marked_objects == NULL) { - _marked_objects = new (ResourceObj::C_HEAP, mtClass) GrowableArray(1000, true); - } Threads::metadata_do(Metadata::mark_on_stack); - if (JvmtiExport::has_redefined_a_class()) { + if (visit_code_cache) { CodeCache::alive_nmethods_do(nmethod::mark_on_stack); } CompileBroker::mark_on_stack(); @@ -62,15 +60,93 @@ // Unmark everything that was marked. Can't do the same walk because // redefine classes messes up the code cache so the set of methods // might not be the same. - for (int i = 0; i< _marked_objects->length(); i++) { - _marked_objects->at(i)->set_on_stack(false); + + retire_buffer_for_thread(Thread::current()); + + MetadataOnStackBuffer* buffer = const_cast(_used_buffers); + while (buffer != NULL) { + // Clear on stack state for all metadata. + size_t size = buffer->size(); + for (size_t i = 0; i < size; i++) { + Metadata* md = buffer->at(i); + md->set_on_stack(false); + } + + MetadataOnStackBuffer* next = buffer->next_used(); + + // Move the buffer to the free list. + buffer->clear(); + buffer->set_next_used(NULL); + buffer->set_next_free(const_cast(_free_buffers)); + _free_buffers = buffer; + + // Step to next used buffer. + buffer = next; } - _marked_objects->clear(); // reuse growable array for next time. + + _used_buffers = NULL; + NOT_PRODUCT(_is_active = false;) } +void MetadataOnStackMark::retire_buffer(MetadataOnStackBuffer* buffer) { + if (buffer == NULL) { + return; + } + + MetadataOnStackBuffer* old_head; + + do { + old_head = const_cast(_used_buffers); + buffer->set_next_used(old_head); + } while (Atomic::cmpxchg_ptr(buffer, &_used_buffers, old_head) != old_head); +} + +void MetadataOnStackMark::retire_buffer_for_thread(Thread* thread) { + retire_buffer(thread->metadata_on_stack_buffer()); + thread->set_metadata_on_stack_buffer(NULL); +} + +bool MetadataOnStackMark::has_buffer_for_thread(Thread* thread) { + return thread->metadata_on_stack_buffer() != NULL; +} + +MetadataOnStackBuffer* MetadataOnStackMark::allocate_buffer() { + MetadataOnStackBuffer* allocated; + MetadataOnStackBuffer* new_head; + + do { + allocated = const_cast(_free_buffers); + if (allocated == NULL) { + break; + } + new_head = allocated->next_free(); + } while (Atomic::cmpxchg_ptr(new_head, &_free_buffers, allocated) != allocated); + + if (allocated == NULL) { + allocated = new MetadataOnStackBuffer(); + } + + assert(!allocated->is_full(), err_msg("Should not be full: " PTR_FORMAT, p2i(allocated))); + + return allocated; +} + // Record which objects are marked so we can unmark the same objects. -void MetadataOnStackMark::record(Metadata* m) { +void MetadataOnStackMark::record(Metadata* m, Thread* thread) { assert(_is_active, "metadata on stack marking is active"); - _marked_objects->push(m); + + MetadataOnStackBuffer* buffer = thread->metadata_on_stack_buffer(); + + if (buffer != NULL && buffer->is_full()) { + retire_buffer(buffer); + buffer = NULL; + } + + if (buffer == NULL) { + buffer = allocate_buffer(); + thread->set_metadata_on_stack_buffer(buffer); + } + + buffer->push(m); } diff --git a/src/share/vm/classfile/metadataOnStackMark.hpp b/src/share/vm/classfile/metadataOnStackMark.hpp --- a/src/share/vm/classfile/metadataOnStackMark.hpp +++ b/src/share/vm/classfile/metadataOnStackMark.hpp @@ -26,9 +26,12 @@ #define SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP #include "memory/allocation.hpp" +#include "utilities/chunkedList.hpp" class Metadata; +typedef ChunkedList MetadataOnStackBuffer; + // Helper class to mark and unmark metadata used on the stack as either handles // or executing methods, so that it can't be deleted during class redefinition // and class unloading. @@ -36,10 +39,20 @@ // metadata during parsing, relocated methods, and methods in backtraces. class MetadataOnStackMark : public StackObj { NOT_PRODUCT(static bool _is_active;) + + static volatile MetadataOnStackBuffer* _used_buffers; + static volatile MetadataOnStackBuffer* _free_buffers; + + static MetadataOnStackBuffer* allocate_buffer(); + static void retire_buffer(MetadataOnStackBuffer* buffer); + public: - MetadataOnStackMark(); - ~MetadataOnStackMark(); - static void record(Metadata* m); + MetadataOnStackMark(bool visit_code_cache); + ~MetadataOnStackMark(); + + static void record(Metadata* m, Thread* thread); + static void retire_buffer_for_thread(Thread* thread); + static bool has_buffer_for_thread(Thread* thread); }; #endif // SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp --- a/src/share/vm/classfile/systemDictionary.cpp +++ b/src/share/vm/classfile/systemDictionary.cpp @@ -1691,9 +1691,9 @@ // Assumes classes in the SystemDictionary are only unloaded at a safepoint // Note: anonymous classes are not in the SD. -bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive) { +bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive, bool clean_alive) { // First, mark for unload all ClassLoaderData referencing a dead class loader. - bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive); + bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive, clean_alive); if (unloading_occurred) { dictionary()->do_unloading(); constraints()->purge_loader_constraints(); diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp --- a/src/share/vm/classfile/systemDictionary.hpp +++ b/src/share/vm/classfile/systemDictionary.hpp @@ -175,6 +175,8 @@ do_klass(URL_klass, java_net_URL, Pre ) \ do_klass(Jar_Manifest_klass, java_util_jar_Manifest, Pre ) \ do_klass(sun_misc_Launcher_klass, sun_misc_Launcher, Pre ) \ + do_klass(sun_misc_Launcher_AppClassLoader_klass, sun_misc_Launcher_AppClassLoader, Pre ) \ + do_klass(sun_misc_Launcher_ExtClassLoader_klass, sun_misc_Launcher_ExtClassLoader, Pre ) \ do_klass(CodeSource_klass, java_security_CodeSource, Pre ) \ \ /* It's NULL in non-1.4 JDKs. */ \ @@ -339,7 +341,7 @@ // Unload (that is, break root links to) all unmarked classes and // loaders. Returns "true" iff something was unloaded. - static bool do_unloading(BoolObjectClosure* is_alive); + static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive = true); // Used by DumpSharedSpaces only to remove classes that failed verification static void remove_classes_in_error_state(); diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp --- a/src/share/vm/classfile/vmSymbols.hpp +++ b/src/share/vm/classfile/vmSymbols.hpp @@ -116,6 +116,7 @@ template(java_lang_AssertionStatusDirectives, "java/lang/AssertionStatusDirectives") \ template(getBootClassPathEntryForClass_name, "getBootClassPathEntryForClass") \ template(sun_misc_PostVMInitHook, "sun/misc/PostVMInitHook") \ + template(sun_misc_Launcher_AppClassLoader, "sun/misc/Launcher$AppClassLoader") \ template(sun_misc_Launcher_ExtClassLoader, "sun/misc/Launcher$ExtClassLoader") \ \ /* Java runtime version access */ \ diff --git a/src/share/vm/code/nmethod.cpp b/src/share/vm/code/nmethod.cpp --- a/src/share/vm/code/nmethod.cpp +++ b/src/share/vm/code/nmethod.cpp @@ -1720,11 +1720,17 @@ set_unload_reported(); } -void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) { +void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive, bool mark_on_stack) { if (ic->is_icholder_call()) { // The only exception is compiledICHolder oops which may // yet be marked below. (We check this further below). CompiledICHolder* cichk_oop = ic->cached_icholder(); + + if (mark_on_stack) { + Metadata::mark_on_stack(cichk_oop->holder_method()); + Metadata::mark_on_stack(cichk_oop->holder_klass()); + } + if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) && cichk_oop->holder_klass()->is_loader_alive(is_alive)) { return; @@ -1732,6 +1738,10 @@ } else { Metadata* ic_oop = ic->cached_metadata(); if (ic_oop != NULL) { + if (mark_on_stack) { + Metadata::mark_on_stack(ic_oop); + } + if (ic_oop->is_klass()) { if (((Klass*)ic_oop)->is_loader_alive(is_alive)) { return; @@ -1792,7 +1802,7 @@ while(iter.next()) { if (iter.type() == relocInfo::virtual_call_type) { CompiledIC *ic = CompiledIC_at(&iter); - clean_ic_if_metadata_is_dead(ic, is_alive); + clean_ic_if_metadata_is_dead(ic, is_alive, false); } } } @@ -1860,6 +1870,53 @@ return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from); } +bool nmethod::unload_if_dead_at(RelocIterator* iter_at_oop, BoolObjectClosure *is_alive, bool unloading_occurred) { + assert(iter_at_oop->type() == relocInfo::oop_type, "Wrong relocation type"); + + oop_Relocation* r = iter_at_oop->oop_reloc(); + // Traverse those oops directly embedded in the code. + // Other oops (oop_index>0) are seen as part of scopes_oops. + assert(1 == (r->oop_is_immediate()) + + (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), + "oop must be found in exactly one place"); + if (r->oop_is_immediate() && r->oop_value() != NULL) { + // Unload this nmethod if the oop is dead. + if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) { + return true;; + } + } + + return false; +} + +void nmethod::mark_metadata_on_stack_at(RelocIterator* iter_at_metadata) { + assert(iter_at_metadata->type() == relocInfo::metadata_type, "Wrong relocation type"); + + metadata_Relocation* r = iter_at_metadata->metadata_reloc(); + // In this metadata, we must only follow those metadatas directly embedded in + // the code. Other metadatas (oop_index>0) are seen as part of + // the metadata section below. + assert(1 == (r->metadata_is_immediate()) + + (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()), + "metadata must be found in exactly one place"); + if (r->metadata_is_immediate() && r->metadata_value() != NULL) { + Metadata* md = r->metadata_value(); + if (md != _method) Metadata::mark_on_stack(md); + } +} + +void nmethod::mark_metadata_on_stack_non_relocs() { + // Visit the metadata section + for (Metadata** p = metadata_begin(); p < metadata_end(); p++) { + if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops + Metadata* md = *p; + Metadata::mark_on_stack(md); + } + + // Visit metadata not embedded in the other places. + if (_method != NULL) Metadata::mark_on_stack(_method); +} + bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { ResourceMark rm; @@ -1889,6 +1946,11 @@ unloading_occurred = true; } + // When class redefinition is used all metadata in the CodeCache has to be recorded, + // so that unused "previous versions" can be purged. Since walking the CodeCache can + // be expensive, the "mark on stack" is piggy-backed on this parallel unloading code. + bool mark_metadata_on_stack = a_class_was_redefined; + // Exception cache clean_exception_cache(is_alive); @@ -1904,7 +1966,7 @@ if (unloading_occurred) { // If class unloading occurred we first iterate over all inline caches and // clear ICs where the cached oop is referring to an unloaded klass or method. - clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive); + clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive, mark_metadata_on_stack); } postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); @@ -1920,24 +1982,21 @@ case relocInfo::oop_type: if (!is_unloaded) { - // Unload check - oop_Relocation* r = iter.oop_reloc(); - // Traverse those oops directly embedded in the code. - // Other oops (oop_index>0) are seen as part of scopes_oops. - assert(1 == (r->oop_is_immediate()) + - (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), - "oop must be found in exactly one place"); - if (r->oop_is_immediate() && r->oop_value() != NULL) { - if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) { - is_unloaded = true; - } - } + is_unloaded = unload_if_dead_at(&iter, is_alive, unloading_occurred); } break; + case relocInfo::metadata_type: + if (mark_metadata_on_stack) { + mark_metadata_on_stack_at(&iter); + } } } + if (mark_metadata_on_stack) { + mark_metadata_on_stack_non_relocs(); + } + if (is_unloaded) { return postponed; } @@ -2085,7 +2144,7 @@ while (iter.next()) { if (iter.type() == relocInfo::metadata_type ) { metadata_Relocation* r = iter.metadata_reloc(); - // In this lmetadata, we must only follow those metadatas directly embedded in + // In this metadata, we must only follow those metadatas directly embedded in // the code. Other metadatas (oop_index>0) are seen as part of // the metadata section below. assert(1 == (r->metadata_is_immediate()) + @@ -2119,7 +2178,7 @@ f(md); } - // Call function Method*, not embedded in these other places. + // Visit metadata not embedded in the other places. if (_method != NULL) f(_method); } diff --git a/src/share/vm/code/nmethod.hpp b/src/share/vm/code/nmethod.hpp --- a/src/share/vm/code/nmethod.hpp +++ b/src/share/vm/code/nmethod.hpp @@ -614,9 +614,16 @@ // The parallel versions are used by G1. bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred); void do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred); + + private: // Unload a nmethod if the *root object is dead. bool can_unload(BoolObjectClosure* is_alive, oop* root, bool unloading_occurred); + bool unload_if_dead_at(RelocIterator *iter_at_oop, BoolObjectClosure* is_alive, bool unloading_occurred); + void mark_metadata_on_stack_at(RelocIterator* iter_at_metadata); + void mark_metadata_on_stack_non_relocs(); + + public: void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f); void oops_do(OopClosure* f) { oops_do(f, false); } diff --git a/src/share/vm/gc_implementation/g1/concurrentMark.cpp b/src/share/vm/gc_implementation/g1/concurrentMark.cpp --- a/src/share/vm/gc_implementation/g1/concurrentMark.cpp +++ b/src/share/vm/gc_implementation/g1/concurrentMark.cpp @@ -23,6 +23,7 @@ */ #include "precompiled.hpp" +#include "classfile/metadataOnStackMark.hpp" #include "classfile/symbolTable.hpp" #include "code/codeCache.hpp" #include "gc_implementation/g1/concurrentMark.inline.hpp" @@ -2602,17 +2603,27 @@ G1RemarkGCTraceTime trace("Unloading", G1Log::finer()); if (ClassUnloadingWithConcurrentMark) { + // Cleaning of klasses depends on correct information from MetadataMarkOnStack. The CodeCache::mark_on_stack + // part is too slow to be done serially, so it is handled during the weakRefsWorkParallelPart phase. + // Defer the cleaning until we have complete on_stack data. + MetadataOnStackMark md_on_stack(false /* Don't visit the code cache at this point */); + bool purged_classes; { G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest()); - purged_classes = SystemDictionary::do_unloading(&g1_is_alive); + purged_classes = SystemDictionary::do_unloading(&g1_is_alive, false /* Defer klass cleaning */); } { G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest()); weakRefsWorkParallelPart(&g1_is_alive, purged_classes); } + + { + G1RemarkGCTraceTime trace("Deallocate Metadata", G1Log::finest()); + ClassLoaderDataGraph::free_deallocate_lists(); + } } if (G1StringDedup::is_enabled()) { diff --git a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp --- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp +++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp @@ -27,6 +27,7 @@ #endif #include "precompiled.hpp" +#include "classfile/metadataOnStackMark.hpp" #include "code/codeCache.hpp" #include "code/icBuffer.hpp" #include "gc_implementation/g1/bufferingOopClosure.hpp" @@ -5133,6 +5134,10 @@ clean_nmethod(claimed_nmethods[i]); } } + + // The nmethod cleaning helps out and does the CodeCache part of MetadataOnStackMark. + // Need to retire the buffers now that this thread has stopped cleaning nmethods. + MetadataOnStackMark::retire_buffer_for_thread(Thread::current()); } void work_second_pass(uint worker_id) { @@ -5185,6 +5190,9 @@ // G1 specific cleanup work that has // been moved here to be done in parallel. ik->clean_dependent_nmethods(); + if (JvmtiExport::has_redefined_a_class()) { + InstanceKlass::purge_previous_versions(ik); + } } void work() { @@ -5219,8 +5227,18 @@ _klass_cleaning_task(is_alive) { } + void pre_work_verification() { + assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty"); + } + + void post_work_verification() { + assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty"); + } + // The parallel work done by all worker threads. void work(uint worker_id) { + pre_work_verification(); + // Do first pass of code cache cleaning. _code_cache_task.work_first_pass(worker_id); @@ -5239,6 +5257,8 @@ // Clean all klasses that were not unloaded. _klass_cleaning_task.work(); + + post_work_verification(); } }; diff --git a/src/share/vm/memory/metadataFactory.hpp b/src/share/vm/memory/metadataFactory.hpp --- a/src/share/vm/memory/metadataFactory.hpp +++ b/src/share/vm/memory/metadataFactory.hpp @@ -64,6 +64,12 @@ template static void free_array(ClassLoaderData* loader_data, Array* data) { + if (DumpSharedSpaces) { + // FIXME: the freeing code is buggy, especially when PrintSharedSpaces is enabled. + // Disable for now -- this means if you specify bad classes in your classlist you + // may have wasted space inside the archive. + return; + } if (data != NULL) { assert(loader_data != NULL, "shouldn't pass null"); assert(!data->is_shared(), "cannot deallocate array in shared spaces"); diff --git a/src/share/vm/memory/metaspaceShared.cpp b/src/share/vm/memory/metaspaceShared.cpp --- a/src/share/vm/memory/metaspaceShared.cpp +++ b/src/share/vm/memory/metaspaceShared.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/dictionary.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/loaderConstraints.hpp" #include "classfile/placeholders.hpp" #include "classfile/sharedClassUtil.hpp" @@ -39,6 +40,7 @@ #include "runtime/signature.hpp" #include "runtime/vm_operations.hpp" #include "runtime/vmThread.hpp" +#include "utilities/hashtable.hpp" #include "utilities/hashtable.inline.hpp" PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC @@ -533,6 +535,8 @@ ClassLoader::copy_package_info_table(&md_top, md_end); ClassLoader::verify(); + ClassLoaderExt::copy_lookup_cache_to_archive(&md_top, md_end); + // Write the other data to the output array. WriteClosure wc(md_top, md_end); MetaspaceShared::serialize(&wc); @@ -745,6 +749,8 @@ } tty->print_cr("Loading classes to share: done."); + ClassLoaderExt::init_lookup_cache(THREAD); + if (PrintSharedSpaces) { tty->print_cr("Shared spaces: preloaded %d classes", class_count); } @@ -1056,6 +1062,8 @@ buffer += sizeof(intptr_t); buffer += len; + buffer = ClassLoaderExt::restore_lookup_cache_from_archive(buffer); + intptr_t* array = (intptr_t*)buffer; ReadClosure rc(&array); serialize(&rc); diff --git a/src/share/vm/memory/metaspaceShared.hpp b/src/share/vm/memory/metaspaceShared.hpp --- a/src/share/vm/memory/metaspaceShared.hpp +++ b/src/share/vm/memory/metaspaceShared.hpp @@ -32,9 +32,9 @@ #define LargeSharedArchiveSize (300*M) #define HugeSharedArchiveSize (800*M) -#define ReadOnlyRegionPercentage 0.4 -#define ReadWriteRegionPercentage 0.55 -#define MiscDataRegionPercentage 0.03 +#define ReadOnlyRegionPercentage 0.39 +#define ReadWriteRegionPercentage 0.50 +#define MiscDataRegionPercentage 0.09 #define MiscCodeRegionPercentage 0.02 #define LargeThresholdClassCount 5000 #define HugeThresholdClassCount 40000 diff --git a/src/share/vm/oops/constantPool.cpp b/src/share/vm/oops/constantPool.cpp --- a/src/share/vm/oops/constantPool.cpp +++ b/src/share/vm/oops/constantPool.cpp @@ -1817,11 +1817,22 @@ void ConstantPool::set_on_stack(const bool value) { if (value) { - _flags |= _on_stack; + int old_flags = *const_cast(&_flags); + while ((old_flags & _on_stack) == 0) { + int new_flags = old_flags | _on_stack; + int result = Atomic::cmpxchg(new_flags, &_flags, old_flags); + + if (result == old_flags) { + // Succeeded. + MetadataOnStackMark::record(this, Thread::current()); + return; + } + old_flags = result; + } } else { + // Clearing is done single-threadedly. _flags &= ~_on_stack; } - if (value) MetadataOnStackMark::record(this); } // JSR 292 support for patching constant pool oops after the class is linked and diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp --- a/src/share/vm/oops/instanceKlass.cpp +++ b/src/share/vm/oops/instanceKlass.cpp @@ -2890,6 +2890,22 @@ OsrList_lock->unlock(); } +int InstanceKlass::mark_osr_nmethods(const Method* m) { + // This is a short non-blocking critical region, so the no safepoint check is ok. + MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); + nmethod* osr = osr_nmethods_head(); + int found = 0; + while (osr != NULL) { + assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); + if (osr->method() == m) { + osr->mark_for_deoptimization(); + found++; + } + osr = osr->osr_link(); + } + return found; +} + nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { // This is a short non-blocking critical region, so the no safepoint check is ok. OsrList_lock->lock_without_safepoint_check(); diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp --- a/src/share/vm/oops/instanceKlass.hpp +++ b/src/share/vm/oops/instanceKlass.hpp @@ -783,6 +783,7 @@ void set_osr_nmethods_head(nmethod* h) { _osr_nmethods_head = h; }; void add_osr_nmethod(nmethod* n); void remove_osr_nmethod(nmethod* n); + int mark_osr_nmethods(const Method* m); nmethod* lookup_osr_nmethod(const Method* m, int bci, int level, bool match_level) const; // Breakpoint support (see methods on Method* for details) diff --git a/src/share/vm/oops/method.cpp b/src/share/vm/oops/method.cpp --- a/src/share/vm/oops/method.cpp +++ b/src/share/vm/oops/method.cpp @@ -1863,9 +1863,12 @@ void Method::set_on_stack(const bool value) { // Set both the method itself and its constant pool. The constant pool // on stack means some method referring to it is also on the stack. - _access_flags.set_on_stack(value); constants()->set_on_stack(value); - if (value) MetadataOnStackMark::record(this); + + bool succeeded = _access_flags.set_on_stack(value); + if (value && succeeded) { + MetadataOnStackMark::record(this, Thread::current()); + } } // Called when the class loader is unloaded to make all methods weak. diff --git a/src/share/vm/oops/method.hpp b/src/share/vm/oops/method.hpp --- a/src/share/vm/oops/method.hpp +++ b/src/share/vm/oops/method.hpp @@ -793,6 +793,10 @@ return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL; } + int mark_osr_nmethods() { + return method_holder()->mark_osr_nmethods(this); + } + nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) { return method_holder()->lookup_osr_nmethod(this, bci, level, match_level); } diff --git a/src/share/vm/opto/lcm.cpp b/src/share/vm/opto/lcm.cpp --- a/src/share/vm/opto/lcm.cpp +++ b/src/share/vm/opto/lcm.cpp @@ -484,9 +484,7 @@ iop == Op_CreateEx || // Create-exception must start block iop == Op_CheckCastPP ) { - // select the node n - // remove n from worklist and retain the order of remaining nodes - worklist.remove((uint)i); + worklist.map(i,worklist.pop()); return n; } @@ -572,9 +570,7 @@ assert(idx >= 0, "index should be set"); Node *n = worklist[(uint)idx]; // Get the winner - // select the node n - // remove n from worklist and retain the order of remaining nodes - worklist.remove((uint)idx); + worklist.map((uint)idx, worklist.pop()); // Compress worklist return n; } diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp --- a/src/share/vm/prims/jni.cpp +++ b/src/share/vm/prims/jni.cpp @@ -5080,6 +5080,7 @@ void TestNewSize_test(); void TestKlass_test(); void Test_linked_list(); +void TestChunkedList_test(); #if INCLUDE_ALL_GCS void TestOldFreeSpaceCalculation_test(); void TestG1BiasedArray_test(); @@ -5108,6 +5109,7 @@ run_unit_test(TestNewSize_test()); run_unit_test(TestKlass_test()); run_unit_test(Test_linked_list()); + run_unit_test(TestChunkedList_test()); #if INCLUDE_VM_STRUCTS run_unit_test(VMStructs::test()); #endif diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp --- a/src/share/vm/prims/jvm.cpp +++ b/src/share/vm/prims/jvm.cpp @@ -24,6 +24,7 @@ #include "precompiled.hpp" #include "classfile/classLoader.hpp" +#include "classfile/classLoaderExt.hpp" #include "classfile/javaAssertions.hpp" #include "classfile/javaClasses.hpp" #include "classfile/symbolTable.hpp" @@ -393,6 +394,14 @@ } } + const char* enableSharedLookupCache = "false"; +#if INCLUDE_CDS + if (ClassLoaderExt::is_lookup_cache_enabled()) { + enableSharedLookupCache = "true"; + } +#endif + PUTPROP(props, "sun.cds.enableSharedLookupCache", enableSharedLookupCache); + return properties; JVM_END @@ -766,6 +775,36 @@ JVM_END +JVM_ENTRY(jboolean, JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname)) + JVMWrapper("JVM_KnownToNotExist"); +#if INCLUDE_CDS + return ClassLoaderExt::known_to_not_exist(env, loader, classname, CHECK_(false)); +#else + return false; +#endif +JVM_END + + +JVM_ENTRY(jobjectArray, JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader)) + JVMWrapper("JVM_GetResourceLookupCacheURLs"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache_urls(env, loader, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + +JVM_ENTRY(jintArray, JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name)) + JVMWrapper("JVM_GetResourceLookupCache"); +#if INCLUDE_CDS + return ClassLoaderExt::get_lookup_cache(env, loader, resource_name, CHECK_NULL); +#else + return NULL; +#endif +JVM_END + + // Returns a class loaded by the bootstrap class loader; or null // if not found. ClassNotFoundException is not thrown. // diff --git a/src/share/vm/prims/jvm.h b/src/share/vm/prims/jvm.h --- a/src/share/vm/prims/jvm.h +++ b/src/share/vm/prims/jvm.h @@ -1548,6 +1548,31 @@ JNIEXPORT jobjectArray JNICALL JVM_GetThreadStateNames(JNIEnv* env, jint javaThreadState, jintArray values); +/* + * Returns true if the JVM's lookup cache indicates that this class is + * known to NOT exist for the given loader. + */ +JNIEXPORT jboolean JNICALL +JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname); + +/* + * Returns an array of all URLs that are stored in the JVM's lookup cache + * for the given loader. NULL if the lookup cache is unavailable. + */ +JNIEXPORT jobjectArray JNICALL +JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader); + +/* + * Returns an array of all URLs that *may* contain the resource_name for the + * given loader. This function returns an integer array, each element + * of which can be used to index into the array returned by + * JVM_GetResourceLookupCacheURLs of the same loader to determine the + * URLs. + */ +JNIEXPORT jintArray JNICALL +JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name); + + /* ========================================================================= * The following defines a private JVM interface that the JDK can query * for the JVM version and capabilities. sun.misc.Version defines diff --git a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp --- a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp +++ b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp @@ -54,6 +54,7 @@ void JvmtiClassFileReconstituter::write_field_infos() { HandleMark hm(thread()); Array* fields_anno = ikh()->fields_annotations(); + Array* fields_type_anno = ikh()->fields_type_annotations(); // Compute the real number of Java fields int java_fields = ikh()->java_fields_count(); @@ -68,6 +69,7 @@ // int offset = ikh()->field_offset( index ); int generic_signature_index = fs.generic_signature_index(); AnnotationArray* anno = fields_anno == NULL ? NULL : fields_anno->at(fs.index()); + AnnotationArray* type_anno = fields_type_anno == NULL ? NULL : fields_type_anno->at(fs.index()); // JVMSpec| field_info { // JVMSpec| u2 access_flags; @@ -93,6 +95,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); @@ -110,6 +115,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } } @@ -550,6 +558,7 @@ AnnotationArray* anno = method->annotations(); AnnotationArray* param_anno = method->parameter_annotations(); AnnotationArray* default_anno = method->annotation_default(); + AnnotationArray* type_anno = method->type_annotations(); // skip generated default interface methods if (method->is_overpass()) { @@ -585,6 +594,9 @@ if (param_anno != NULL) { ++attr_count; // has RuntimeVisibleParameterAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } write_u2(attr_count); if (const_method->code_size() > 0) { @@ -609,6 +621,9 @@ if (param_anno != NULL) { write_annotations_attribute("RuntimeVisibleParameterAnnotations", param_anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } } // Write the class attributes portion of ClassFile structure @@ -618,6 +633,7 @@ u2 inner_classes_length = inner_classes_attribute_length(); Symbol* generic_signature = ikh()->generic_signature(); AnnotationArray* anno = ikh()->class_annotations(); + AnnotationArray* type_anno = ikh()->class_type_annotations(); int attr_count = 0; if (generic_signature != NULL) { @@ -635,6 +651,9 @@ if (anno != NULL) { ++attr_count; // has RuntimeVisibleAnnotations attribute } + if (type_anno != NULL) { + ++attr_count; // has RuntimeVisibleTypeAnnotations attribute + } if (cpool()->operands() != NULL) { ++attr_count; } @@ -656,6 +675,9 @@ if (anno != NULL) { write_annotations_attribute("RuntimeVisibleAnnotations", anno); } + if (type_anno != NULL) { + write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno); + } if (cpool()->operands() != NULL) { write_bootstrapmethod_attribute(); } diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -135,7 +135,7 @@ // Mark methods seen on stack and everywhere else so old methods are not // cleaned up if they're on the stack. - MetadataOnStackMark md_on_stack; + MetadataOnStackMark md_on_stack(true); HandleMark hm(thread); // make sure any handles created are deleted // before the stack walk again. @@ -1569,6 +1569,29 @@ return false; } + // rewrite constant pool references in the class_type_annotations: + if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the fields_type_annotations: + if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // rewrite constant pool references in the methods_type_annotations: + if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) { + // propagate failure back to caller + return false; + } + + // There can be type annotations in the Code part of a method_info attribute. + // These annotations are not accessible, even by reflection. + // Currently they are not even parsed by the ClassFileParser. + // If runtime access is added they will also need to be rewritten. + // rewrite source file name index: u2 source_file_name_idx = scratch_class->source_file_name_index(); if (source_file_name_idx != 0) { @@ -2239,6 +2262,588 @@ } // end rewrite_cp_refs_in_methods_default_annotations() +// Rewrite constant pool references in a class_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + AnnotationArray* class_type_annotations = scratch_class->class_type_annotations(); + if (class_type_annotations == NULL || class_type_annotations->length() == 0) { + // no class_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("class_type_annotations length=%d", class_type_annotations->length())); + + int byte_i = 0; // byte index into class_type_annotations + return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations, + byte_i, "ClassFile", THREAD); +} // end rewrite_cp_refs_in_class_type_annotations() + + +// Rewrite constant pool references in a fields_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + Array* fields_type_annotations = scratch_class->fields_type_annotations(); + if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) { + // no fields_type_annotations so nothing to do + return true; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("fields_type_annotations length=%d", fields_type_annotations->length())); + + for (int i = 0; i < fields_type_annotations->length(); i++) { + AnnotationArray* field_type_annotations = fields_type_annotations->at(i); + if (field_type_annotations == NULL || field_type_annotations->length() == 0) { + // this field does not have any annotations so skip it + continue; + } + + int byte_i = 0; // byte index into field_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations, + byte_i, "field_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad field_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_fields_type_annotations() + + +// Rewrite constant pool references in a methods_type_annotations field. +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS) { + + for (int i = 0; i < scratch_class->methods()->length(); i++) { + Method* m = scratch_class->methods()->at(i); + AnnotationArray* method_type_annotations = m->constMethod()->type_annotations(); + + if (method_type_annotations == NULL || method_type_annotations->length() == 0) { + // this method does not have any annotations so skip it + continue; + } + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("methods type_annotations length=%d", method_type_annotations->length())); + + int byte_i = 0; // byte index into method_type_annotations + if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations, + byte_i, "method_info", THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad method_type_annotations at %d", i)); + // propagate failure back to caller + return false; + } + } + + return true; +} // end rewrite_cp_refs_in_methods_type_annotations() + + +// Rewrite constant pool references in a type_annotations +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotations_typeArray { +// u2 num_annotations; +// type_annotation annotations[num_annotations]; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for num_annotations field + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for num_annotations field")); + return false; + } + + u2 num_annotations = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("num_type_annotations=%d", num_annotations)); + + int calc_num_annotations = 0; + for (; calc_num_annotations < num_annotations; calc_num_annotations++) { + if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("bad type_annotation_struct at %d", calc_num_annotations)); + // propagate failure back to caller + return false; + } + } + assert(num_annotations == calc_num_annotations, "sanity check"); + + if (byte_i_ref != type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("read wrong amount of bytes at end of processing " + "type_annotations_typeArray (%d of %d bytes were read)", + byte_i_ref, type_annotations_typeArray->length())); + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotations_typeArray() + + +// Rewrite constant pool references in a type_annotation +// field. This "structure" is adapted from the +// RuntimeVisibleTypeAnnotations_attribute described in +// section 4.7.20 of the Java SE 8 Edition of the VM spec: +// +// type_annotation { +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// type_path target_path; +// annotation anno; +// } +// +bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if (!skip_type_annotation_target(type_annotations_typeArray, + byte_i_ref, location_mesg, THREAD)) { + return false; + } + + if (!skip_type_annotation_type_path(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray, + byte_i_ref, THREAD)) { + return false; + } + + return true; +} // end rewrite_cp_refs_in_type_annotation_struct() + + +// Read, verify and skip over the target_type and target_info part +// so that rewriting can continue in the later parts of the struct. +// +// u1 target_type; +// union { +// type_parameter_target; +// supertype_target; +// type_parameter_bound_target; +// empty_target; +// method_formal_parameter_target; +// throws_target; +// localvar_target; +// catch_target; +// offset_target; +// type_argument_target; +// } target_info; +// +bool VM_RedefineClasses::skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a target_type let alone the rest of a type_annotation + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a target_type")); + return false; + } + + u1 target_type = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type)); + RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg)); + + // Skip over target_info + switch (target_type) { + case 0x00: + // kind: type parameter declaration of generic class or interface + // location: ClassFile + case 0x01: + // kind: type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_target { + // u1 type_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_target: type_parameter_index=%d", + type_parameter_index)); + } break; + + case 0x10: + // kind: type in extends clause of class or interface declaration + // (including the direct superclass of an anonymous class declaration), + // or in implements clause of interface declaration + // location: ClassFile + + { + // struct: + // supertype_target { + // u2 supertype_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a supertype_target")); + return false; + } + + u2 supertype_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("supertype_target: supertype_index=%d", supertype_index)); + } break; + + case 0x11: + // kind: type in bound of type parameter declaration of generic class or interface + // location: ClassFile + case 0x12: + // kind: type in bound of type parameter declaration of generic method or constructor + // location: method_info + + { + // struct: + // type_parameter_bound_target { + // u1 type_parameter_index; + // u1 bound_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_parameter_bound_target")); + return false; + } + + u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 bound_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", + type_parameter_index, bound_index)); + } break; + + case 0x13: + // kind: type in field declaration + // location: field_info + case 0x14: + // kind: return type of method, or type of newly constructed object + // location: method_info + case 0x15: + // kind: receiver type of method or constructor + // location: method_info + + { + // struct: + // empty_target { + // } + // + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("empty_target")); + } break; + + case 0x16: + // kind: type in formal parameter declaration of method, constructor, or lambda expression + // location: method_info + + { + // struct: + // formal_parameter_target { + // u1 formal_parameter_index; + // } + // + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a formal_parameter_target")); + return false; + } + + u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("formal_parameter_target: formal_parameter_index=%d", + formal_parameter_index)); + } break; + + case 0x17: + // kind: type in throws clause of method or constructor + // location: method_info + + { + // struct: + // throws_target { + // u2 throws_type_index + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a throws_target")); + return false; + } + + u2 throws_type_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("throws_target: throws_type_index=%d", throws_type_index)); + } break; + + case 0x40: + // kind: type in local variable declaration + // location: Code + case 0x41: + // kind: type in resource variable declaration + // location: Code + + { + // struct: + // localvar_target { + // u2 table_length; + // struct { + // u2 start_pc; + // u2 length; + // u2 index; + // } table[table_length]; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + // not enough room for a table_length let alone the rest of a localvar_target + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a localvar_target table_length")); + return false; + } + + u2 table_length = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("localvar_target: table_length=%d", table_length)); + + int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry + int table_size = table_length * table_struct_size; + + if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) { + // not enough room for a table + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a table array of length %d", table_length)); + return false; + } + + // Skip over table + byte_i_ref += table_size; + } break; + + case 0x42: + // kind: type in exception parameter declaration + // location: Code + + { + // struct: + // catch_target { + // u2 exception_table_index; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a catch_target")); + return false; + } + + u2 exception_table_index = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("catch_target: exception_table_index=%d", exception_table_index)); + } break; + + case 0x43: + // kind: type in instanceof expression + // location: Code + case 0x44: + // kind: type in new expression + // location: Code + case 0x45: + // kind: type in method reference expression using ::new + // location: Code + case 0x46: + // kind: type in method reference expression using ::Identifier + // location: Code + + { + // struct: + // offset_target { + // u2 offset; + // } + // + if ((byte_i_ref + 2) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a offset_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("offset_target: offset=%d", offset)); + } break; + + case 0x47: + // kind: type in cast expression + // location: Code + case 0x48: + // kind: type argument for generic constructor in new expression or + // explicit constructor invocation statement + // location: Code + case 0x49: + // kind: type argument for generic method in method invocation expression + // location: Code + case 0x4A: + // kind: type argument for generic constructor in method reference expression using ::new + // location: Code + case 0x4B: + // kind: type argument for generic method in method reference expression using ::Identifier + // location: Code + + { + // struct: + // type_argument_target { + // u2 offset; + // u1 type_argument_index; + // } + // + if ((byte_i_ref + 3) > type_annotations_typeArray->length()) { + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_argument_target")); + return false; + } + + u2 offset = Bytes::get_Java_u2((address) + type_annotations_typeArray->adr_at(byte_i_ref)); + byte_i_ref += 2; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_argument_target: offset=%d, type_argument_index=%d", + offset, type_argument_index)); + } break; + + default: + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("unknown target_type")); +#ifdef ASSERT + ShouldNotReachHere(); +#endif + return false; + } + + return true; +} // end skip_type_annotation_target() + + +// Read, verify and skip over the type_path part so that rewriting +// can continue in the later parts of the struct. +// +// type_path { +// u1 path_length; +// { +// u1 type_path_kind; +// u1 type_argument_index; +// } path[path_length]; +// } +// +bool VM_RedefineClasses::skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) { + + if ((byte_i_ref + 1) > type_annotations_typeArray->length()) { + // not enough room for a path_length let alone the rest of the type_path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for a type_path")); + return false; + } + + u1 path_length = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path_length=%d", path_length)); + + int calc_path_length = 0; + for (; calc_path_length < path_length; calc_path_length++) { + if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("length() is too small for path entry %d of %d", + calc_path_length, path_length)); + return false; + } + + u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref); + byte_i_ref += 1; + + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d", + calc_path_length, type_path_kind, type_argument_index)); + + if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) { + // not enough room for a path + RC_TRACE_WITH_THREAD(0x02000000, THREAD, + ("inconsistent type_path values")); + return false; + } + } + assert(path_length == calc_path_length, "sanity check"); + + return true; +} // end skip_type_annotation_type_path() + + // Rewrite constant pool references in the method's stackmap table. // These "structures" are adapted from the StackMapTable_attribute that // is described in section 4.8.4 of the 6.0 version of the VM spec @@ -3223,23 +3828,6 @@ void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class, instanceKlassHandle scratch_class) { - // Since there is currently no rewriting of type annotations indexes - // into the CP, we null out type annotations on scratch_class before - // we swap annotations with the_class rather than facing the - // possibility of shipping annotations with broken indexes to - // Java-land. - ClassLoaderData* loader_data = scratch_class->class_loader_data(); - AnnotationArray* new_class_type_annotations = scratch_class->class_type_annotations(); - if (new_class_type_annotations != NULL) { - MetadataFactory::free_array(loader_data, new_class_type_annotations); - scratch_class->annotations()->set_class_type_annotations(NULL); - } - Array* new_field_type_annotations = scratch_class->fields_type_annotations(); - if (new_field_type_annotations != NULL) { - Annotations::free_contents(loader_data, new_field_type_annotations); - scratch_class->annotations()->set_fields_type_annotations(NULL); - } - // Swap annotation fields values Annotations* old_annotations = the_class->annotations(); the_class->set_annotations(scratch_class->annotations()); diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp --- a/src/share/vm/prims/jvmtiRedefineClasses.hpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp @@ -457,6 +457,17 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_element_value( AnnotationArray* class_annotations, int &byte_i_ref, TRAPS); + bool rewrite_cp_refs_in_type_annotations_typeArray( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool rewrite_cp_refs_in_type_annotation_struct( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_target( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, + const char * location_mesg, TRAPS); + bool skip_type_annotation_type_path( + AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS); bool rewrite_cp_refs_in_fields_annotations( instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_method(methodHandle method, @@ -468,6 +479,12 @@ instanceKlassHandle scratch_class, TRAPS); bool rewrite_cp_refs_in_methods_parameter_annotations( instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_class_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_fields_type_annotations( + instanceKlassHandle scratch_class, TRAPS); + bool rewrite_cp_refs_in_methods_type_annotations( + instanceKlassHandle scratch_class, TRAPS); void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS); void rewrite_cp_refs_in_verification_type_info( address& stackmap_addr_ref, address stackmap_end, u2 frame_i, diff --git a/src/share/vm/prims/whitebox.cpp b/src/share/vm/prims/whitebox.cpp --- a/src/share/vm/prims/whitebox.cpp +++ b/src/share/vm/prims/whitebox.cpp @@ -104,6 +104,28 @@ return closure.found(); WB_END +WB_ENTRY(jboolean, WB_ClassKnownToNotExist(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* class_name = env->GetStringUTFChars(name, NULL); + jboolean result = JVM_KnownToNotExist(env, loader, class_name); + env->ReleaseStringUTFChars(name, class_name); + return result; +WB_END + +WB_ENTRY(jobjectArray, WB_GetLookupCacheURLs(JNIEnv* env, jobject o, jobject loader)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + return JVM_GetResourceLookupCacheURLs(env, loader); +WB_END + +WB_ENTRY(jintArray, WB_GetLookupCacheMatches(JNIEnv* env, jobject o, jobject loader, jstring name)) + ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI + const char* resource_name = env->GetStringUTFChars(name, NULL); + jintArray result = JVM_GetResourceLookupCache(env, loader, resource_name); + + env->ReleaseStringUTFChars(name, resource_name); + return result; +WB_END + WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { return (jlong)Arguments::max_heap_for_compressed_oops(); } @@ -382,19 +404,10 @@ CHECK_JNI_EXCEPTION_(env, result); MutexLockerEx mu(Compile_lock); methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); - nmethod* code; if (is_osr) { - int bci = InvocationEntryBci; - while ((code = mh->lookup_osr_nmethod_for(bci, CompLevel_none, false)) != NULL) { - code->mark_for_deoptimization(); - ++result; - bci = code->osr_entry_bci() + 1; - } - } else { - code = mh->code(); - } - if (code != NULL) { - code->mark_for_deoptimization(); + result += mh->mark_osr_nmethods(); + } else if (mh->code() != NULL) { + mh->code()->mark_for_deoptimization(); ++result; } result += CodeCache::mark_for_deoptimization(mh()); @@ -939,6 +952,11 @@ {CC"isObjectInOldGen", CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen }, {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, + {CC"classKnownToNotExist", + CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)Z",(void*)&WB_ClassKnownToNotExist}, + {CC"getLookupCacheURLs", CC"(Ljava/lang/ClassLoader;)[Ljava/net/URL;", (void*)&WB_GetLookupCacheURLs}, + {CC"getLookupCacheMatches", CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)[I", + (void*)&WB_GetLookupCacheMatches}, {CC"parseCommandLine", CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", (void*) &WB_ParseCommandLine diff --git a/src/share/vm/runtime/simpleThresholdPolicy.cpp b/src/share/vm/runtime/simpleThresholdPolicy.cpp --- a/src/share/vm/runtime/simpleThresholdPolicy.cpp +++ b/src/share/vm/runtime/simpleThresholdPolicy.cpp @@ -196,7 +196,6 @@ // Don't trigger other compiles in testing mode return NULL; } - nmethod *osr_nm = NULL; handle_counter_overflow(method()); if (method() != inlinee()) { @@ -210,14 +209,16 @@ if (bci == InvocationEntryBci) { method_invocation_event(method, inlinee, comp_level, nm, thread); } else { + // method == inlinee if the event originated in the main method method_back_branch_event(method, inlinee, bci, comp_level, nm, thread); - // method == inlinee if the event originated in the main method - int highest_level = inlinee->highest_osr_comp_level(); - if (highest_level > comp_level) { - osr_nm = inlinee->lookup_osr_nmethod_for(bci, highest_level, false); + // Check if event led to a higher level OSR compilation + nmethod* osr_nm = inlinee->lookup_osr_nmethod_for(bci, comp_level, false); + if (osr_nm != NULL && osr_nm->comp_level() > comp_level) { + // Perform OSR with new nmethod + return osr_nm; } } - return osr_nm; + return NULL; } // Check if the method can be compiled, change level if necessary diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp --- a/src/share/vm/runtime/thread.cpp +++ b/src/share/vm/runtime/thread.cpp @@ -234,6 +234,8 @@ // This initial value ==> never claimed. _oops_do_parity = 0; + _metadata_on_stack_buffer = NULL; + // the handle mark links itself to last_handle_mark new HandleMark(this); diff --git a/src/share/vm/runtime/thread.hpp b/src/share/vm/runtime/thread.hpp --- a/src/share/vm/runtime/thread.hpp +++ b/src/share/vm/runtime/thread.hpp @@ -42,11 +42,10 @@ #include "runtime/threadLocalStorage.hpp" #include "runtime/thread_ext.hpp" #include "runtime/unhandledOops.hpp" -#include "utilities/macros.hpp" - #include "trace/traceBackend.hpp" #include "trace/traceMacros.hpp" #include "utilities/exceptions.hpp" +#include "utilities/macros.hpp" #include "utilities/top.hpp" #if INCLUDE_ALL_GCS #include "gc_implementation/g1/dirtyCardQueue.hpp" @@ -83,6 +82,10 @@ class ThreadClosure; class IdealGraphPrinter; +class Metadata; +template class ChunkedList; +typedef ChunkedList MetadataOnStackBuffer; + DEBUG_ONLY(class ResourceMark;) class WorkerThread; @@ -256,6 +259,9 @@ jlong _allocated_bytes; // Cumulative number of bytes allocated on // the Java heap + // Thread-local buffer used by MetadataOnStackMark. + MetadataOnStackBuffer* _metadata_on_stack_buffer; + TRACE_DATA _trace_data; // Thread-local data for tracing ThreadExt _ext; @@ -517,7 +523,10 @@ // creation fails due to lack of memory, too many threads etc. bool set_as_starting_thread(); - protected: + void set_metadata_on_stack_buffer(MetadataOnStackBuffer* buffer) { _metadata_on_stack_buffer = buffer; } + MetadataOnStackBuffer* metadata_on_stack_buffer() const { return _metadata_on_stack_buffer; } + +protected: // OS data associated with the thread OSThread* _osthread; // Platform-specific thread information diff --git a/src/share/vm/services/runtimeService.cpp b/src/share/vm/services/runtimeService.cpp --- a/src/share/vm/services/runtimeService.cpp +++ b/src/share/vm/services/runtimeService.cpp @@ -46,6 +46,7 @@ PerfCounter* RuntimeService::_thread_interrupt_signaled_count = NULL; PerfCounter* RuntimeService::_interrupted_before_count = NULL; PerfCounter* RuntimeService::_interrupted_during_count = NULL; +double RuntimeService::_last_safepoint_sync_time_sec = 0.0; void RuntimeService::init() { // Make sure the VM version is initialized @@ -128,6 +129,7 @@ // update the time stamp to begin recording safepoint time _safepoint_timer.update(); + _last_safepoint_sync_time_sec = 0.0; if (UsePerfData) { _total_safepoints->inc(); if (_app_timer.is_updated()) { @@ -140,6 +142,9 @@ if (UsePerfData) { _sync_time_ticks->inc(_safepoint_timer.ticks_since_update()); } + if (PrintGCApplicationStoppedTime) { + _last_safepoint_sync_time_sec = last_safepoint_time_sec(); + } } void RuntimeService::record_safepoint_end() { @@ -155,8 +160,10 @@ gclog_or_tty->date_stamp(PrintGCDateStamps); gclog_or_tty->stamp(PrintGCTimeStamps); gclog_or_tty->print_cr("Total time for which application threads " - "were stopped: %3.7f seconds", - last_safepoint_time_sec()); + "were stopped: %3.7f seconds, " + "Stopping threads took: %3.7f seconds", + last_safepoint_time_sec(), + _last_safepoint_sync_time_sec); } // update the time stamp to begin recording app time diff --git a/src/share/vm/services/runtimeService.hpp b/src/share/vm/services/runtimeService.hpp --- a/src/share/vm/services/runtimeService.hpp +++ b/src/share/vm/services/runtimeService.hpp @@ -40,6 +40,7 @@ static TimeStamp _safepoint_timer; static TimeStamp _app_timer; + static double _last_safepoint_sync_time_sec; public: static void init(); diff --git a/src/share/vm/utilities/accessFlags.cpp b/src/share/vm/utilities/accessFlags.cpp --- a/src/share/vm/utilities/accessFlags.cpp +++ b/src/share/vm/utilities/accessFlags.cpp @@ -62,6 +62,21 @@ } while(f != old_flags); } +// Returns true iff this thread succeeded setting the bit. +bool AccessFlags::atomic_set_one_bit(jint bit) { + // Atomically update the flags with the bit given + jint old_flags, new_flags, f; + bool is_setting_bit = false; + do { + old_flags = _flags; + new_flags = old_flags | bit; + is_setting_bit = old_flags != new_flags; + f = Atomic::cmpxchg(new_flags, &_flags, old_flags); + } while(f != old_flags); + + return is_setting_bit; +} + #if !defined(PRODUCT) || INCLUDE_JVMTI void AccessFlags::print_on(outputStream* st) const { diff --git a/src/share/vm/utilities/accessFlags.hpp b/src/share/vm/utilities/accessFlags.hpp --- a/src/share/vm/utilities/accessFlags.hpp +++ b/src/share/vm/utilities/accessFlags.hpp @@ -170,6 +170,7 @@ // Atomic update of flags void atomic_set_bits(jint bits); + bool atomic_set_one_bit(jint bit); void atomic_clear_bits(jint bits); private: @@ -230,12 +231,13 @@ atomic_set_bits(JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE); } - void set_on_stack(const bool value) + bool set_on_stack(const bool value) { if (value) { - atomic_set_bits(JVM_ACC_ON_STACK); + return atomic_set_one_bit(JVM_ACC_ON_STACK); } else { atomic_clear_bits(JVM_ACC_ON_STACK); + return true; // Ignored } } // Conversion diff --git a/src/share/vm/utilities/chunkedList.cpp b/src/share/vm/utilities/chunkedList.cpp new file mode 100644 --- /dev/null +++ b/src/share/vm/utilities/chunkedList.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + * + */ + +#include "precompiled.hpp" +#include "utilities/chunkedList.hpp" +#include "utilities/debug.hpp" + +/////////////// Unit tests /////////////// + +#ifndef PRODUCT + +template +class TestChunkedList { + typedef ChunkedList ChunkedListT; + + public: + static void testEmpty() { + ChunkedListT buffer; + assert(buffer.size() == 0, "assert"); + } + + static void testFull() { + ChunkedListT buffer; + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + } + assert(buffer.size() == ChunkedListT::BufferSize, "assert"); + assert(buffer.is_full(), "assert"); + } + + static void testSize() { + ChunkedListT buffer; + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + assert(buffer.size() == i, "assert"); + buffer.push((T)i); + assert(buffer.size() == i + 1, "assert"); + } + } + + static void testClear() { + ChunkedListT buffer; + + buffer.clear(); + assert(buffer.size() == 0, "assert"); + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize / 2; i++) { + buffer.push((T)i); + } + buffer.clear(); + assert(buffer.size() == 0, "assert"); + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + } + buffer.clear(); + assert(buffer.size() == 0, "assert"); + } + + static void testAt() { + ChunkedListT buffer; + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + buffer.push((T)i); + assert(buffer.at(i) == (T)i, "assert"); + } + + for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) { + assert(buffer.at(i) == (T)i, "assert"); + } + } + + static void test() { + testEmpty(); + testFull(); + testSize(); + testClear(); + testAt(); + } +}; + +class Metadata; + +void TestChunkedList_test() { + TestChunkedList::test(); + TestChunkedList::test(); +} + +#endif diff --git a/src/share/vm/utilities/chunkedList.hpp b/src/share/vm/utilities/chunkedList.hpp new file mode 100644 --- /dev/null +++ b/src/share/vm/utilities/chunkedList.hpp @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + * + */ + +#ifndef SHARE_VM_UTILITIES_CHUNKED_LIST_HPP +#define SHARE_VM_UTILITIES_CHUNKED_LIST_HPP + +#include "memory/allocation.hpp" +#include "utilities/debug.hpp" + +template class ChunkedList : public CHeapObj { + template friend class TestChunkedList; + + static const size_t BufferSize = 64; + + T _values[BufferSize]; + T* _top; + + ChunkedList* _next_used; + ChunkedList* _next_free; + + T const * end() const { + return &_values[BufferSize]; + } + + public: + ChunkedList() : _top(_values), _next_used(NULL), _next_free(NULL) {} + + bool is_full() const { + return _top == end(); + } + + void clear() { + _top = _values; + // Don't clear the next pointers since that would interfere + // with other threads trying to iterate through the lists. + } + + void push(T m) { + assert(!is_full(), "Buffer is full"); + *_top = m; + _top++; + } + + void set_next_used(ChunkedList* buffer) { _next_used = buffer; } + void set_next_free(ChunkedList* buffer) { _next_free = buffer; } + + ChunkedList* next_used() const { return _next_used; } + ChunkedList* next_free() const { return _next_free; } + + size_t size() const { + return pointer_delta(_top, _values, sizeof(T)); + } + + T at(size_t i) { + assert(i < size(), err_msg("IOOBE i: " SIZE_FORMAT " size(): " SIZE_FORMAT, i, size())); + return _values[i]; + } +}; + +#endif // SHARE_VM_UTILITIES_CHUNKED_LIST_HPP diff --git a/test/compiler/whitebox/CompilerWhiteBoxTest.java b/test/compiler/whitebox/CompilerWhiteBoxTest.java --- a/test/compiler/whitebox/CompilerWhiteBoxTest.java +++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java @@ -72,9 +72,9 @@ /** Flag for verbose output, true if {@code -Dverbose} specified */ protected static final boolean IS_VERBOSE = System.getProperty("verbose") != null; - /** count of invocation to triger compilation */ + /** invocation count to trigger compilation */ protected static final int THRESHOLD; - /** count of invocation to triger OSR compilation */ + /** invocation count to trigger OSR compilation */ protected static final long BACKEDGE_THRESHOLD; /** Value of {@code java.vm.info} (interpreted|mixed|comp mode) */ protected static final String MODE = System.getProperty("java.vm.info"); @@ -206,7 +206,6 @@ * is compiled, or if {@linkplain #method} has zero * compilation level. */ - protected final void checkNotCompiled(int compLevel) { if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); @@ -227,20 +226,30 @@ * compilation level. */ protected final void checkNotCompiled() { + checkNotCompiled(true); + checkNotCompiled(false); + } + + /** + * Checks, that {@linkplain #method} is not (OSR-)compiled. + * + * @param isOsr Check for OSR compilation if true + * @throws RuntimeException if {@linkplain #method} is in compiler queue or + * is compiled, or if {@linkplain #method} has zero + * compilation level. + */ + protected final void checkNotCompiled(boolean isOsr) { + waitBackgroundCompilation(); if (WHITE_BOX.isMethodQueuedForCompilation(method)) { throw new RuntimeException(method + " must not be in queue"); } - if (WHITE_BOX.isMethodCompiled(method, false)) { - throw new RuntimeException(method + " must be not compiled"); + if (WHITE_BOX.isMethodCompiled(method, isOsr)) { + throw new RuntimeException(method + " must not be " + + (isOsr ? "osr_" : "") + "compiled"); } - if (WHITE_BOX.getMethodCompilationLevel(method, false) != 0) { - throw new RuntimeException(method + " comp_level must be == 0"); - } - if (WHITE_BOX.isMethodCompiled(method, true)) { - throw new RuntimeException(method + " must be not osr_compiled"); - } - if (WHITE_BOX.getMethodCompilationLevel(method, true) != 0) { - throw new RuntimeException(method + " osr_comp_level must be == 0"); + if (WHITE_BOX.getMethodCompilationLevel(method, isOsr) != 0) { + throw new RuntimeException(method + (isOsr ? " osr_" : " ") + + "comp_level must be == 0"); } } @@ -306,12 +315,21 @@ * Waits for completion of background compilation of {@linkplain #method}. */ protected final void waitBackgroundCompilation() { + waitBackgroundCompilation(method); + } + + /** + * Waits for completion of background compilation of the given executable. + * + * @param executable Executable + */ + protected static final void waitBackgroundCompilation(Executable executable) { if (!BACKGROUND_COMPILATION) { return; } final Object obj = new Object(); for (int i = 0; i < 10 - && WHITE_BOX.isMethodQueuedForCompilation(method); ++i) { + && WHITE_BOX.isMethodQueuedForCompilation(executable); ++i) { synchronized (obj) { try { obj.wait(1000); @@ -425,14 +443,14 @@ /** constructor test case */ CONSTRUCTOR_TEST(Helper.CONSTRUCTOR, Helper.CONSTRUCTOR_CALLABLE, false), /** method test case */ - METOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), + METHOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false), /** static method test case */ STATIC_TEST(Helper.STATIC, Helper.STATIC_CALLABLE, false), /** OSR constructor test case */ OSR_CONSTRUCTOR_TEST(Helper.OSR_CONSTRUCTOR, Helper.OSR_CONSTRUCTOR_CALLABLE, true), /** OSR method test case */ - OSR_METOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), + OSR_METHOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true), /** OSR static method test case */ OSR_STATIC_TEST(Helper.OSR_STATIC, Helper.OSR_STATIC_CALLABLE, true); @@ -494,7 +512,7 @@ = new Callable() { @Override public Integer call() throws Exception { - return new Helper(null).hashCode(); + return new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode(); } }; @@ -504,7 +522,7 @@ @Override public Integer call() throws Exception { - return helper.osrMethod(); + return helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -512,7 +530,7 @@ = new Callable() { @Override public Integer call() throws Exception { - return osrStaticMethod(); + return osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD); } }; @@ -532,25 +550,24 @@ } try { OSR_CONSTRUCTOR = Helper.class.getDeclaredConstructor( - Object.class); + Object.class, long.class); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( - "exception on getting method Helper.(Object)", e); + "exception on getting method Helper.(Object, long)", e); } METHOD = getMethod("method"); STATIC = getMethod("staticMethod"); - OSR_METHOD = getMethod("osrMethod"); - OSR_STATIC = getMethod("osrStaticMethod"); + OSR_METHOD = getMethod("osrMethod", long.class); + OSR_STATIC = getMethod("osrStaticMethod", long.class); } - private static Method getMethod(String name) { + private static Method getMethod(String name, Class... parameterTypes) { try { - return Helper.class.getDeclaredMethod(name); + return Helper.class.getDeclaredMethod(name, parameterTypes); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( "exception on getting method Helper." + name, e); } - } private static int staticMethod() { @@ -561,17 +578,84 @@ return 42; } - private static int osrStaticMethod() { + /** + * Deoptimizes all non-osr versions of the given executable after + * compilation finished. + * + * @param e Executable + * @throws Exception + */ + private static void waitAndDeoptimize(Executable e) { + CompilerWhiteBoxTest.waitBackgroundCompilation(e); + if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) { + throw new RuntimeException(e + " must not be in queue"); + } + // Deoptimize non-osr versions of executable + WhiteBox.getWhiteBox().deoptimizeMethod(e, false); + } + + /** + * Executes the method multiple times to make sure we have + * enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param m Method to be executed + * @return Number of times the method was executed + * @throws Exception + */ + private static int warmup(Method m) throws Exception { + waitAndDeoptimize(m); + Helper helper = new Helper(); int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += (int)m.invoke(helper, 1); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(m); + return result; + } + + /** + * Executes the constructor multiple times to make sure we + * have enough profiling information before triggering an OSR + * compilation. Otherwise the C2 compiler may add uncommon traps. + * + * @param c Constructor to be executed + * @return Number of times the constructor was executed + * @throws Exception + */ + private static int warmup(Constructor c) throws Exception { + waitAndDeoptimize(c); + int result = 0; + for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) { + result += c.newInstance(null, 1).hashCode(); + } + // Wait to make sure OSR compilation is not blocked by + // non-OSR compilation in the compile queue + CompilerWhiteBoxTest.waitBackgroundCompilation(c); + return result; + } + + private static int osrStaticMethod(long limit) throws Exception { + int result = 0; + if (limit != 1) { + result = warmup(OSR_STATIC); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += staticMethod(); } return result; } - private int osrMethod() { + private int osrMethod(long limit) throws Exception { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + if (limit != 1) { + result = warmup(OSR_METHOD); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += method(); } return result; @@ -585,9 +669,13 @@ } // for OSR constructor test case - private Helper(Object o) { + private Helper(Object o, long limit) throws Exception { int result = 0; - for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) { + if (limit != 1) { + result = warmup(OSR_CONSTRUCTOR); + } + // Trigger osr compilation + for (long i = 0; i < limit; ++i) { result += method(); } x = result; diff --git a/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java b/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java new file mode 100644 --- /dev/null +++ b/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +import sun.hotspot.WhiteBox; +import java.lang.reflect.Executable; +import java.lang.reflect.Method; + +/* + * @test DeoptimizeMultipleOSRTest + * @bug 8061817 + * @library /testlibrary /testlibrary/whitebox + * @build DeoptimizeMultipleOSRTest + * @run main ClassFileInstaller sun.hotspot.WhiteBox + * sun.hotspot.WhiteBox$WhiteBoxPermission + * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI -XX:CompileCommand=compileonly,DeoptimizeMultipleOSRTest::triggerOSR DeoptimizeMultipleOSRTest + * @summary testing of WB::deoptimizeMethod() + */ +public class DeoptimizeMultipleOSRTest { + private static final WhiteBox WHITE_BOX = WhiteBox.getWhiteBox(); + private static final long BACKEDGE_THRESHOLD = 150000; + private Method method; + private int counter = 0; + + public static void main(String[] args) throws Exception { + DeoptimizeMultipleOSRTest test = new DeoptimizeMultipleOSRTest(); + test.test(); + } + + /** + * Triggers two different OSR compilations for the same method and + * checks if WhiteBox.deoptimizeMethod() deoptimizes both. + * + * @throws Exception + */ + public void test() throws Exception { + method = DeoptimizeMultipleOSRTest.class.getDeclaredMethod("triggerOSR", boolean.class, long.class); + // Trigger two OSR compiled versions + triggerOSR(true, BACKEDGE_THRESHOLD); + triggerOSR(false, BACKEDGE_THRESHOLD); + // Wait for compilation + CompilerWhiteBoxTest.waitBackgroundCompilation(method); + // Deoptimize + WHITE_BOX.deoptimizeMethod(method, true); + if (WHITE_BOX.isMethodCompiled(method, true)) { + throw new AssertionError("Not all OSR compiled versions were deoptimized"); + } + } + + /** + * Triggers OSR compilations by executing loops. + * + * @param first Determines which loop to execute + * @param limit The number of loop iterations + */ + public void triggerOSR(boolean first, long limit) { + if (limit != 1) { + // Warmup method to avoid uncommon traps + for (int i = 0; i < limit; ++i) { + triggerOSR(first, 1); + } + CompilerWhiteBoxTest.waitBackgroundCompilation(method); + } + if (first) { + // Trigger OSR compilation 1 + for (int i = 0; i < limit; ++i) { + counter++; + } + } else { + // Trigger OSR compilation 2 + for (int i = 0; i < limit; ++i) { + counter++; + } + } + } +} diff --git a/test/compiler/whitebox/MakeMethodNotCompilableTest.java b/test/compiler/whitebox/MakeMethodNotCompilableTest.java --- a/test/compiler/whitebox/MakeMethodNotCompilableTest.java +++ b/test/compiler/whitebox/MakeMethodNotCompilableTest.java @@ -131,14 +131,15 @@ throw new RuntimeException(method + " is not compilable after clearMethodState()"); } - + // Make method not (OSR-)compilable (depending on testCase.isOsr()) makeNotCompilable(); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } - + // Try to (OSR-)compile method compile(); - checkNotCompiled(); + // Method should not be (OSR-)compiled + checkNotCompiled(testCase.isOsr()); if (isCompilable()) { throw new RuntimeException(method + " must be not compilable"); } diff --git a/test/runtime/RedefineTests/RedefineAnnotations.java b/test/runtime/RedefineTests/RedefineAnnotations.java new file mode 100644 --- /dev/null +++ b/test/runtime/RedefineTests/RedefineAnnotations.java @@ -0,0 +1,410 @@ +/* + * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @library /testlibrary + * @summary Test that type annotations are retained after a retransform + * @run main RedefineAnnotations buildagent + * @run main/othervm -javaagent:redefineagent.jar RedefineAnnotations + */ + +import static com.oracle.java.testlibrary.Asserts.assertTrue; +import java.io.FileNotFoundException; +import java.io.PrintWriter; +import java.lang.NoSuchFieldException; +import java.lang.NoSuchMethodException; +import java.lang.RuntimeException; +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.instrument.ClassFileTransformer; +import java.lang.instrument.IllegalClassFormatException; +import java.lang.instrument.Instrumentation; +import java.lang.instrument.UnmodifiableClassException; +import java.lang.reflect.AnnotatedArrayType; +import java.lang.reflect.AnnotatedParameterizedType; +import java.lang.reflect.AnnotatedType; +import java.lang.reflect.AnnotatedWildcardType; +import java.lang.reflect.Executable; +import java.lang.reflect.TypeVariable; +import java.security.ProtectionDomain; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import jdk.internal.org.objectweb.asm.ClassReader; +import jdk.internal.org.objectweb.asm.ClassVisitor; +import jdk.internal.org.objectweb.asm.ClassWriter; +import jdk.internal.org.objectweb.asm.FieldVisitor; +import static jdk.internal.org.objectweb.asm.Opcodes.ASM5; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE_USE) +@interface TestAnn { + String site(); +} + +public class RedefineAnnotations { + static Instrumentation inst; + public static void premain(String agentArgs, Instrumentation inst) { + RedefineAnnotations.inst = inst; + } + + static class Transformer implements ClassFileTransformer { + + public byte[] asm(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + ClassWriter cw = new ClassWriter(0); + ClassVisitor cv = new ReAddDummyFieldsClassVisitor(ASM5, cw) { }; + ClassReader cr = new ClassReader(classfileBuffer); + cr.accept(cv, 0); + return cw.toByteArray(); + } + + public class ReAddDummyFieldsClassVisitor extends ClassVisitor { + + LinkedList fields = new LinkedList<>(); + + public ReAddDummyFieldsClassVisitor(int api, ClassVisitor cv) { + super(api, cv); + } + + @Override public FieldVisitor visitField(int access, String name, + String desc, String signature, Object value) { + if (name.startsWith("dummy")) { + // Remove dummy field + fields.addLast(new F(access, name, desc, signature, value)); + return null; + } + return cv.visitField(access, name, desc, signature, value); + } + + @Override public void visitEnd() { + F f; + while ((f = fields.pollFirst()) != null) { + // Re-add dummy fields + cv.visitField(f.access, f.name, f.desc, f.signature, f.value); + } + } + + private class F { + private int access; + private String name; + private String desc; + private String signature; + private Object value; + F(int access, String name, String desc, String signature, Object value) { + this.access = access; + this.name = name; + this.desc = desc; + this.signature = signature; + this.value = value; + } + } + } + + @Override public byte[] transform(ClassLoader loader, String className, + Class classBeingRedefined, + ProtectionDomain protectionDomain, byte[] classfileBuffer) + throws IllegalClassFormatException { + + if (className.contains("TypeAnnotatedTestClass")) { + try { + // Here we remove and re-add the dummy fields. This shuffles the constant pool + return asm(loader, className, classBeingRedefined, protectionDomain, classfileBuffer); + } catch (Throwable e) { + // The retransform native code that called this method does not propagate + // exceptions. Instead of getting an uninformative generic error, catch + // problems here and print it, then exit. + e.printStackTrace(); + System.exit(1); + } + } + return null; + } + } + + private static void buildAgent() { + try { + ClassFileInstaller.main("RedefineAnnotations"); + } catch (Exception e) { + throw new RuntimeException("Could not write agent classfile", e); + } + + try { + PrintWriter pw = new PrintWriter("MANIFEST.MF"); + pw.println("Premain-Class: RedefineAnnotations"); + pw.println("Agent-Class: RedefineAnnotations"); + pw.println("Can-Retransform-Classes: true"); + pw.close(); + } catch (FileNotFoundException e) { + throw new RuntimeException("Could not write manifest file for the agent", e); + } + + sun.tools.jar.Main jarTool = new sun.tools.jar.Main(System.out, System.err, "jar"); + if (!jarTool.run(new String[] { "-cmf", "MANIFEST.MF", "redefineagent.jar", "RedefineAnnotations.class" })) { + throw new RuntimeException("Could not write the agent jar file"); + } + } + + public static void main(String argv[]) throws NoSuchFieldException, NoSuchMethodException { + if (argv.length == 1 && argv[0].equals("buildagent")) { + buildAgent(); + return; + } + + if (inst == null) { + throw new RuntimeException("Instrumentation object was null"); + } + + RedefineAnnotations test = new RedefineAnnotations(); + test.testTransformAndVerify(); + } + + // Class type annotations + private Annotation classTypeParameterTA; + private Annotation extendsTA; + private Annotation implementsTA; + + // Field type annotations + private Annotation fieldTA; + private Annotation innerTA; + private Annotation[] arrayTA = new Annotation[4]; + private Annotation[] mapTA = new Annotation[5]; + + // Method type annotations + private Annotation returnTA, methodTypeParameterTA, formalParameterTA, throwsTA; + + private void testTransformAndVerify() + throws NoSuchFieldException, NoSuchMethodException { + + Class c = TypeAnnotatedTestClass.class; + Class myClass = c; + + /* + * Verify that the expected annotations are where they should be before transform. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + + try { + inst.addTransformer(new Transformer(), true); + inst.retransformClasses(myClass); + } catch (UnmodifiableClassException e) { + throw new RuntimeException(e); + } + + /* + * Verify that the expected annotations are where they should be after transform. + * Also verify that before and after are equal. + */ + verifyClassTypeAnnotations(c); + verifyFieldTypeAnnotations(c); + verifyMethodTypeAnnotations(c); + } + + private void verifyClassTypeAnnotations(Class c) { + Annotation anno; + + anno = c.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(classTypeParameterTA, anno, "classTypeParameter"); + classTypeParameterTA = anno; + + anno = c.getAnnotatedSuperclass().getAnnotations()[0]; + verifyTestAnn(extendsTA, anno, "extends"); + extendsTA = anno; + + anno = c.getAnnotatedInterfaces()[0].getAnnotations()[0]; + verifyTestAnn(implementsTA, anno, "implements"); + implementsTA = anno; + } + + private void verifyFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + verifyBasicFieldTypeAnnotations(c); + verifyInnerFieldTypeAnnotations(c); + verifyArrayFieldTypeAnnotations(c); + verifyMapFieldTypeAnnotations(c); + } + + private void verifyBasicFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno = c.getDeclaredField("typeAnnotatedBoolean").getAnnotatedType().getAnnotations()[0]; + verifyTestAnn(fieldTA, anno, "field"); + fieldTA = anno; + } + + private void verifyInnerFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + AnnotatedType at = c.getDeclaredField("typeAnnotatedInner").getAnnotatedType(); + Annotation anno = at.getAnnotations()[0]; + verifyTestAnn(innerTA, anno, "inner"); + innerTA = anno; + } + + private void verifyArrayFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType at; + + at = c.getDeclaredField("typeAnnotatedArray").getAnnotatedType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[0], anno, "array1"); + arrayTA[0] = anno; + + for (int i = 1; i <= 3; i++) { + at = ((AnnotatedArrayType) at).getAnnotatedGenericComponentType(); + anno = at.getAnnotations()[0]; + verifyTestAnn(arrayTA[i], anno, "array" + (i + 1)); + arrayTA[i] = anno; + } + } + + private void verifyMapFieldTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + + Annotation anno; + AnnotatedType atBase; + AnnotatedType atParameter; + atBase = c.getDeclaredField("typeAnnotatedMap").getAnnotatedType(); + + anno = atBase.getAnnotations()[0]; + verifyTestAnn(mapTA[0], anno, "map1"); + mapTA[0] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[0]; + anno = ((AnnotatedWildcardType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[1], anno, "map2"); + mapTA[1] = anno; + + anno = + ((AnnotatedWildcardType) atParameter). + getAnnotatedUpperBounds()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[2], anno, "map3"); + mapTA[2] = anno; + + atParameter = + ((AnnotatedParameterizedType) atBase). + getAnnotatedActualTypeArguments()[1]; + anno = ((AnnotatedParameterizedType) atParameter).getAnnotations()[0]; + verifyTestAnn(mapTA[3], anno, "map4"); + mapTA[3] = anno; + + anno = + ((AnnotatedParameterizedType) atParameter). + getAnnotatedActualTypeArguments()[0].getAnnotations()[0]; + verifyTestAnn(mapTA[4], anno, "map5"); + mapTA[4] = anno; + } + + private void verifyMethodTypeAnnotations(Class c) + throws NoSuchFieldException, NoSuchMethodException { + Annotation anno; + Executable typeAnnotatedMethod = + c.getDeclaredMethod("typeAnnotatedMethod", TypeAnnotatedTestClass.class); + + anno = typeAnnotatedMethod.getAnnotatedReturnType().getAnnotations()[0]; + verifyTestAnn(returnTA, anno, "return"); + returnTA = anno; + + anno = typeAnnotatedMethod.getTypeParameters()[0].getAnnotations()[0]; + verifyTestAnn(methodTypeParameterTA, anno, "methodTypeParameter"); + methodTypeParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedParameterTypes()[0].getAnnotations()[0]; + verifyTestAnn(formalParameterTA, anno, "formalParameter"); + formalParameterTA = anno; + + anno = typeAnnotatedMethod.getAnnotatedExceptionTypes()[0].getAnnotations()[0]; + verifyTestAnn(throwsTA, anno, "throws"); + throwsTA = anno; + } + + private static void verifyTestAnn(Annotation verifyAgainst, Annotation anno, String expectedSite) { + verifyTestAnnSite(anno, expectedSite); + + // When called before transform verifyAgainst will be null, when called + // after transform it will be the annotation from before the transform + if (verifyAgainst != null) { + assertTrue(anno.equals(verifyAgainst), + "Annotations do not match before and after." + + " Before: \"" + verifyAgainst + "\", After: \"" + anno + "\""); + } + } + + private static void verifyTestAnnSite(Annotation testAnn, String expectedSite) { + String expectedAnn = "@TestAnn(site=" + expectedSite + ")"; + assertTrue(testAnn.toString().equals(expectedAnn), + "Expected \"" + expectedAnn + "\", got \"" + testAnn + "\""); + } + + public static class TypeAnnotatedTestClass <@TestAnn(site="classTypeParameter") S,T> + extends @TestAnn(site="extends") Thread + implements @TestAnn(site="implements") Runnable { + + public @TestAnn(site="field") boolean typeAnnotatedBoolean; + + public + RedefineAnnotations. + @TestAnn(site="inner") TypeAnnotatedTestClass + typeAnnotatedInner; + + public + @TestAnn(site="array4") boolean + @TestAnn(site="array1") [] + @TestAnn(site="array2") [] + @TestAnn(site="array3") [] + typeAnnotatedArray; + + public @TestAnn(site="map1") Map + <@TestAnn(site="map2") ? extends @TestAnn(site="map3") String, + @TestAnn(site="map4") List<@TestAnn(site="map5") Object>> typeAnnotatedMap; + + public int dummy1; + public int dummy2; + public int dummy3; + + @TestAnn(site="return") <@TestAnn(site="methodTypeParameter") U,V> Class + typeAnnotatedMethod(@TestAnn(site="formalParameter") TypeAnnotatedTestClass arg) + throws @TestAnn(site="throws") ClassNotFoundException { + + @TestAnn(site="local_variable_type") int foo = 0; + throw new ClassNotFoundException(); + } + + public void run() {} + } +} diff --git a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java --- a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java +++ b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java @@ -30,6 +30,7 @@ import java.util.function.Function; import java.util.stream.Stream; import java.security.BasicPermission; +import java.net.URL; import sun.hotspot.parser.DiagnosticCommand; @@ -84,6 +85,11 @@ } private native boolean isClassAlive0(String name); + // Resource/Class Lookup Cache + public native boolean classKnownToNotExist(ClassLoader loader, String name); + public native URL[] getLookupCacheURLs(ClassLoader loader); + public native int[] getLookupCacheMatches(ClassLoader loader, String name); + // G1 public native boolean g1InConcurrentMark(); public native boolean g1IsHumongous(Object o); # HG changeset patch # User amurillo # Date 1415380979 28800 # Fri Nov 07 09:22:59 2014 -0800 # Node ID b8ca8ec1daea70f7c0d519e866f9f147ec247055 # Parent 4d5dc0d0f8799fafa1135d51d85edd4edd566501 Added tag hs25.40-b18 for changeset 4d5dc0d0f879 diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -551,3 +551,4 @@ 6b93bf9ea3ea57ed0fe53cfedb2f9ab912c324e5 jdk8u40-b12 521e269ae1daa9df1cb0835b97aa76bdf340fcb2 hs25.40-b17 86307d47790785398d0695acc361bccaefe25f94 jdk8u40-b13 +4d5dc0d0f8799fafa1135d51d85edd4edd566501 hs25.40-b18