--- old/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp 2018-03-20 09:04:59.272034985 +0000 +++ new/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp 2018-03-20 09:04:58.752034985 +0000 @@ -79,8 +79,8 @@ void call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions = true); - // Maximum size of class area in Metaspace when compressed - uint64_t use_XOR_for_compressed_class_base; + // True if an XOR can be used to expand narrow klass references. + bool use_XOR_for_compressed_class_base; public: MacroAssembler(CodeBuffer* code) : Assembler(code) { @@ -88,7 +88,7 @@ = (operand_valid_for_logical_immediate(false /*is32*/, (uint64_t)Universe::narrow_klass_base()) && ((uint64_t)Universe::narrow_klass_base() - > (1u << log2_intptr(CompressedClassSpaceSize)))); + > (1UL << log2_intptr(Universe::narrow_klass_range())))); } // These routines should emit JVMTI PopFrame and ForceEarlyReturn handling code. --- old/src/hotspot/share/memory/metaspaceShared.cpp 2018-03-20 09:05:01.222034985 +0000 +++ new/src/hotspot/share/memory/metaspaceShared.cpp 2018-03-20 09:05:00.702034985 +0000 @@ -233,6 +233,7 @@ // with the archived ones, so it must be done after all encodings are determined. mapinfo->map_heap_regions(); } + Universe::set_narrow_klass_range(CompressedClassSpaceSize); #endif // _LP64 } else { assert(!mapinfo->is_open() && !UseSharedSpaces, @@ -298,6 +299,8 @@ // Set narrow_klass_shift to be LogKlassAlignmentInBytes. This is consistent // with AOT. Universe::set_narrow_klass_shift(LogKlassAlignmentInBytes); + // Set the range of klass addresses to 4GB. + Universe::set_narrow_klass_range(cds_total); Metaspace::initialize_class_space(tmp_class_space); tty->print_cr("narrow_klass_base = " PTR_FORMAT ", narrow_klass_shift = %d", --- old/src/hotspot/share/memory/universe.cpp 2018-03-20 09:05:03.142034985 +0000 +++ new/src/hotspot/share/memory/universe.cpp 2018-03-20 09:05:02.632034985 +0000 @@ -165,6 +165,7 @@ NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true }; NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true }; address Universe::_narrow_ptrs_base; +uint64_t Universe::_narrow_klass_range = (uint64_t(max_juint)+1); void Universe::basic_type_classes_do(void f(Klass*)) { f(boolArrayKlassObj()); --- old/src/hotspot/share/memory/universe.hpp 2018-03-20 09:05:05.022034985 +0000 +++ new/src/hotspot/share/memory/universe.hpp 2018-03-20 09:05:04.512034985 +0000 @@ -194,7 +194,8 @@ // For UseCompressedClassPointers. static struct NarrowPtrStruct _narrow_klass; static address _narrow_ptrs_base; - + // CompressedClassSpaceSize set to 1GB, but appear 3GB away from _narrow_ptrs_base during CDS dump. + static uint64_t _narrow_klass_range; // array of dummy objects used with +FullGCAlot debug_only(static objArrayOop _fullgc_alot_dummy_array;) // index of next entry to clear @@ -244,6 +245,10 @@ assert(UseCompressedClassPointers, "no compressed klass ptrs?"); _narrow_klass._base = base; } + static void set_narrow_klass_range(uint64_t range) { + assert(UseCompressedClassPointers, "no compressed klass ptrs?"); + _narrow_klass_range = range; + } static void set_narrow_oop_use_implicit_null_checks(bool use) { assert(UseCompressedOops, "no compressed ptrs?"); _narrow_oop._use_implicit_null_checks = use; @@ -429,6 +434,7 @@ // For UseCompressedClassPointers static address narrow_klass_base() { return _narrow_klass._base; } static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } + static uint64_t narrow_klass_range() { return _narrow_klass_range; } static int narrow_klass_shift() { return _narrow_klass._shift; } static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }