--- old/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp 2018-06-13 04:02:13.672929954 -0400 +++ new/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp 2018-06-13 04:02:13.408916094 -0400 @@ -33,13 +33,13 @@ // LR is live. It must be saved around calls. - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { - if (on_heap) { + if (in_heap) { if (UseCompressedOops) { __ ldrw(dst, src); if (oop_not_null) { @@ -51,7 +51,7 @@ __ ldr(dst, src); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ ldr(dst, src); } break; @@ -71,13 +71,13 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address dst, Register val, Register tmp1, Register tmp2) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { val = val == noreg ? zr : val; - if (on_heap) { + if (in_heap) { if (UseCompressedOops) { assert(!dst.uses(val), "not enough registers"); if (val != zr) { @@ -88,7 +88,7 @@ __ str(val, dst); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ str(val, dst); } break; --- old/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp 2018-06-13 04:02:14.484972583 -0400 +++ new/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp 2018-06-13 04:02:14.216958511 -0400 @@ -2120,7 +2120,7 @@ tbz(r0, 0, not_weak); // Test for jweak tag. // Resolve jweak. - access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF, value, + access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, value, Address(value, -JNIHandles::weak_tag_value), tmp, thread); verify_oop(value); b(done); --- old/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp 2018-06-13 04:02:15.413021288 -0400 +++ new/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp 2018-06-13 04:02:15.101004912 -0400 @@ -29,12 +29,12 @@ void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register dst, Address src, Register tmp1, Register tmp2, Register tmp3) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { - if (on_heap) { + if (in_heap) { #ifdef AARCH64 if (UseCompressedOops) { __ ldr_w(dst, src); @@ -45,7 +45,7 @@ __ ldr(dst, src); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ ldr(dst, src); } break; @@ -57,12 +57,12 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address obj, Register val, Register tmp1, Register tmp2, Register tmp3, bool is_null) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { - if (on_heap) { + if (in_heap) { #ifdef AARCH64 if (UseCompressedOops) { assert(!dst.uses(src), "not enough registers"); @@ -76,7 +76,7 @@ __ str(val, obj); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ str(val, obj); } break; --- old/src/hotspot/cpu/arm/macroAssembler_arm.cpp 2018-06-13 04:02:16.285067066 -0400 +++ new/src/hotspot/cpu/arm/macroAssembler_arm.cpp 2018-06-13 04:02:15.969050474 -0400 @@ -2135,7 +2135,7 @@ tbz(value, 0, not_weak); // Test for jweak tag. // Resolve jweak. - access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF, + access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, Address(value, -JNIHandles::weak_tag_value), value, tmp1, tmp2, noreg); b(done); bind(not_weak); --- old/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp 2018-06-13 04:02:17.185114297 -0400 +++ new/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp 2018-06-13 04:02:16.869097726 -0400 @@ -335,7 +335,7 @@ __ beq(CCR0, not_weak); // Test for jweak tag. __ verify_oop(value); - g1_write_barrier_pre(masm, IN_ROOT | ON_PHANTOM_OOP_REF, + g1_write_barrier_pre(masm, IN_NATIVE | ON_PHANTOM_OOP_REF, noreg, noreg, value, tmp1, tmp2, needs_frame); __ bind(not_weak); --- old/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp 2018-06-13 04:02:18.053159875 -0400 +++ new/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp 2018-06-13 04:02:17.741143481 -0400 @@ -33,16 +33,16 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register base, RegisterOrConstant ind_or_offs, Register val, Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool not_null = (decorators & OOP_NOT_NULL) != 0; - assert(on_heap || on_root, "where?"); + assert(in_heap || in_native, "where?"); assert_different_registers(base, val, tmp1, tmp2, R0); switch (type) { case T_ARRAY: case T_OBJECT: { - if (UseCompressedOops && on_heap) { + if (UseCompressedOops && in_heap) { Register co = tmp1; if (val == noreg) { __ li(co, 0); @@ -66,16 +66,16 @@ void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register base, RegisterOrConstant ind_or_offs, Register dst, Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool not_null = (decorators & OOP_NOT_NULL) != 0; - assert(on_heap || on_root, "where?"); + assert(in_heap || in_native, "where?"); assert_different_registers(ind_or_offs.register_or_noreg(), dst, R0); switch (type) { case T_ARRAY: case T_OBJECT: { - if (UseCompressedOops && on_heap) { + if (UseCompressedOops && in_heap) { if (L_handle_null != NULL) { // Label provided. __ lwz(dst, ind_or_offs, base); __ cmpwi(CCR0, dst, 0); --- old/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp 2018-06-13 04:02:18.925205631 -0400 +++ new/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp 2018-06-13 04:02:18.613189270 -0400 @@ -329,7 +329,7 @@ inline void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Register base, RegisterOrConstant ind_or_offs, Register val, Register tmp1, Register tmp2, Register tmp3, bool needs_frame) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); bool as_raw = (decorators & AS_RAW) != 0; @@ -348,7 +348,7 @@ inline void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register base, RegisterOrConstant ind_or_offs, Register dst, Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); --- old/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp 2018-06-13 04:02:19.801251624 -0400 +++ new/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp 2018-06-13 04:02:19.485235023 -0400 @@ -403,7 +403,7 @@ __ z_tmll(tmp1, JNIHandles::weak_tag_mask); // Test for jweak tag. __ z_braz(Lnot_weak); __ verify_oop(value); - DecoratorSet decorators = IN_ROOT | ON_PHANTOM_OOP_REF; + DecoratorSet decorators = IN_NATIVE | ON_PHANTOM_OOP_REF; g1_write_barrier_pre(masm, decorators, (const Address*)NULL, value, noreg, tmp1, tmp2, true); __ bind(Lnot_weak); __ verify_oop(value); --- old/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp 2018-06-13 04:02:20.677297607 -0400 +++ new/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp 2018-06-13 04:02:20.365281229 -0400 @@ -37,15 +37,15 @@ void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, const Address& addr, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool not_null = (decorators & OOP_NOT_NULL) != 0; - assert(on_heap || on_root, "where?"); + assert(in_heap || in_native, "where?"); switch (type) { case T_ARRAY: case T_OBJECT: { - if (UseCompressedOops && on_heap) { + if (UseCompressedOops && in_heap) { __ z_llgf(dst, addr); if (L_handle_null != NULL) { // Label provided. __ compareU32_and_branch(dst, (intptr_t)0, Assembler::bcondEqual, *L_handle_null); @@ -67,16 +67,16 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, const Address& addr, Register val, Register tmp1, Register tmp2, Register tmp3) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool not_null = (decorators & OOP_NOT_NULL) != 0; - assert(on_heap || on_root, "where?"); + assert(in_heap || in_native, "where?"); assert_different_registers(val, tmp1, tmp2); switch (type) { case T_ARRAY: case T_OBJECT: { - if (UseCompressedOops && on_heap) { + if (UseCompressedOops && in_heap) { if (val == noreg) { __ clear_mem(addr, 4); } else if (Universe::narrow_oop_mode() == Universe::UnscaledNarrowOop) { --- old/src/hotspot/cpu/s390/macroAssembler_s390.cpp 2018-06-13 04:02:21.561344001 -0400 +++ new/src/hotspot/cpu/s390/macroAssembler_s390.cpp 2018-06-13 04:02:21.245327416 -0400 @@ -4051,7 +4051,7 @@ void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, const Address& addr, Register val, Register tmp1, Register tmp2, Register tmp3) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); @@ -4070,7 +4070,7 @@ void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, const Address& addr, Register dst, Register tmp1, Register tmp2, Label *is_null) { - assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL | + assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL | ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator"); BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); decorators = AccessInternal::decorator_fixup(decorators); --- old/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp 2018-06-13 04:02:22.517394185 -0400 +++ new/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp 2018-06-13 04:02:22.205377808 -0400 @@ -32,14 +32,14 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register val, Address dst, Register tmp) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; switch (type) { case T_ARRAY: case T_OBJECT: { - if (on_heap) { + if (in_heap) { if (dst.has_disp() && !Assembler::is_simm13(dst.disp())) { assert(!dst.has_index(), "not supported yet"); __ set(dst.disp(), tmp); @@ -57,7 +57,7 @@ __ st_ptr(val, dst); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ st_ptr(val, dst); } break; @@ -68,14 +68,14 @@ void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address src, Register dst, Register tmp) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; switch (type) { case T_ARRAY: case T_OBJECT: { - if (on_heap) { + if (in_heap) { if (src.has_disp() && !Assembler::is_simm13(src.disp())) { assert(!src.has_index(), "not supported yet"); __ set(src.disp(), tmp); @@ -92,7 +92,7 @@ __ ld_ptr(src, dst); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ ld_ptr(src, dst); } break; --- old/src/hotspot/cpu/sparc/macroAssembler_sparc.cpp 2018-06-13 04:02:23.393440167 -0400 +++ new/src/hotspot/cpu/sparc/macroAssembler_sparc.cpp 2018-06-13 04:02:23.081423790 -0400 @@ -176,7 +176,7 @@ delayed()->andcc(value, JNIHandles::weak_tag_mask, G0); // Test for jweak brx(Assembler::zero, true, Assembler::pt, not_weak); delayed()->nop(); - access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF, + access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, Address(value, -JNIHandles::weak_tag_value), value, tmp); verify_oop(value); br (Assembler::always, true, Assembler::pt, done); --- old/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp 2018-06-13 04:02:24.321488880 -0400 +++ new/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp 2018-06-13 04:02:23.997471870 -0400 @@ -31,15 +31,15 @@ void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register dst, Address src, Register tmp1, Register tmp_thread) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; bool atomic = (decorators & MO_RELAXED) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { - if (on_heap) { + if (in_heap) { #ifdef _LP64 if (UseCompressedOops) { __ movl(dst, src); @@ -54,7 +54,7 @@ __ movptr(dst, src); } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); __ movptr(dst, src); } break; @@ -96,15 +96,15 @@ void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Address dst, Register val, Register tmp1, Register tmp2) { - bool on_heap = (decorators & IN_HEAP) != 0; - bool on_root = (decorators & IN_ROOT) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; + bool in_native = (decorators & IN_NATIVE) != 0; bool oop_not_null = (decorators & OOP_NOT_NULL) != 0; bool atomic = (decorators & MO_RELAXED) != 0; switch (type) { case T_OBJECT: case T_ARRAY: { - if (on_heap) { + if (in_heap) { if (val == noreg) { assert(!oop_not_null, "inconsistent access"); #ifdef _LP64 @@ -133,7 +133,7 @@ } } } else { - assert(on_root, "why else?"); + assert(in_native, "why else?"); assert(val != noreg, "not supported"); __ movptr(dst, val); } --- old/src/hotspot/cpu/x86/macroAssembler_x86.cpp 2018-06-13 04:02:25.221536126 -0400 +++ new/src/hotspot/cpu/x86/macroAssembler_x86.cpp 2018-06-13 04:02:24.905519531 -0400 @@ -5253,7 +5253,7 @@ testptr(value, JNIHandles::weak_tag_mask); // Test for jweak tag. jcc(Assembler::zero, not_weak); // Resolve jweak. - access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF, + access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, value, Address(value, -JNIHandles::weak_tag_value), tmp, thread); verify_oop(value); jmp(done); --- old/src/hotspot/share/classfile/classLoaderData.cpp 2018-06-13 04:02:26.209587980 -0400 +++ new/src/hotspot/share/classfile/classLoaderData.cpp 2018-06-13 04:02:25.893571394 -0400 @@ -815,7 +815,7 @@ assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr)); // This root is not walked in safepoints, and hence requires an appropriate // decorator that e.g. maintains the SATB invariant in SATB collectors. - RootAccess::oop_store(ptr, oop(NULL)); + NativeAccess::oop_store(ptr, oop(NULL)); } } --- old/src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp 2018-06-13 04:02:27.093634384 -0400 +++ new/src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp 2018-06-13 04:02:26.781618006 -0400 @@ -601,10 +601,10 @@ bool mismatched = (decorators & C2_MISMATCHED) != 0; bool unknown = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool on_heap = (decorators & IN_HEAP) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0; bool is_unordered = (decorators & MO_UNORDERED) != 0; - bool need_cpu_mem_bar = !is_unordered || mismatched || !on_heap; + bool need_cpu_mem_bar = !is_unordered || mismatched || !in_heap; Node* offset = adr->is_AddP() ? adr->in(AddPNode::Offset) : kit->top(); Node* load = CardTableBarrierSetC2::load_at_resolved(access, val_type); @@ -615,7 +615,7 @@ // SATB log buffer using the pre-barrier mechanism. // Also we need to add memory barrier to prevent commoning reads // from this field across safepoint since GC can change its value. - bool need_read_barrier = on_heap && (on_weak || + bool need_read_barrier = in_heap && (on_weak || (unknown && offset != kit->top() && obj != kit->top())); if (!access.is_oop() || !need_read_barrier) { --- old/src/hotspot/share/gc/shared/barrierSet.hpp 2018-06-13 04:02:27.977680791 -0400 +++ new/src/hotspot/share/gc/shared/barrierSet.hpp 2018-06-13 04:02:27.661664196 -0400 @@ -270,7 +270,7 @@ } // Off-heap oop accesses. These accessors get resolved when - // IN_HEAP is not set (e.g. when using the RootAccess API), it is + // IN_HEAP is not set (e.g. when using the NativeAccess API), it is // an oop* overload, and the barrier strength is AS_NORMAL. template static oop oop_load_not_in_heap(T* addr) { --- old/src/hotspot/share/gc/shared/c2/barrierSetC2.cpp 2018-06-13 04:02:28.853726772 -0400 +++ new/src/hotspot/share/gc/shared/c2/barrierSetC2.cpp 2018-06-13 04:02:28.533709970 -0400 @@ -41,7 +41,7 @@ bool mismatched = (_decorators & C2_MISMATCHED) != 0; bool is_unordered = (_decorators & MO_UNORDERED) != 0; bool anonymous = (_decorators & C2_UNSAFE_ACCESS) != 0; - bool on_heap = (_decorators & IN_HEAP) != 0; + bool in_heap = (_decorators & IN_HEAP) != 0; bool is_write = (_decorators & C2_WRITE_ACCESS) != 0; bool is_read = (_decorators & C2_READ_ACCESS) != 0; @@ -58,7 +58,7 @@ // the barriers get omitted and the unsafe reference begins to "pollute" // the alias analysis of the rest of the graph, either Compile::can_alias // or Compile::must_alias will throw a diagnostic assert.) - if (!on_heap || !is_unordered || (mismatched && !_addr.type()->isa_aryptr())) { + if (!in_heap || !is_unordered || (mismatched && !_addr.type()->isa_aryptr())) { return true; } } @@ -74,8 +74,8 @@ bool unaligned = (decorators & C2_UNALIGNED) != 0; bool requires_atomic_access = (decorators & MO_UNORDERED) == 0; - bool in_root = (decorators & IN_ROOT) != 0; - assert(!in_root, "not supported yet"); + bool in_native = (decorators & IN_NATIVE) != 0; + assert(!in_native, "not supported yet"); if (access.type() == T_DOUBLE) { Node* new_val = kit->dstore_rounding(val.node()); @@ -103,8 +103,8 @@ bool control_dependent = (decorators & C2_CONTROL_DEPENDENT_LOAD) != 0; bool pinned = (decorators & C2_PINNED_LOAD) != 0; - bool in_root = (decorators & IN_ROOT) != 0; - assert(!in_root, "not supported yet"); + bool in_native = (decorators & IN_NATIVE) != 0; + assert(!in_native, "not supported yet"); MemNode::MemOrd mo = access.mem_node_mo(); LoadNode::ControlDependency dep = pinned ? LoadNode::Pinned : LoadNode::DependsOnlyOnTest; --- old/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp 2018-06-13 04:02:29.733772959 -0400 +++ new/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp 2018-06-13 04:02:29.421756582 -0400 @@ -39,10 +39,10 @@ bool on_array = (decorators & IN_HEAP_ARRAY) != 0; bool anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0; - bool on_heap = (decorators & IN_HEAP) != 0; + bool in_heap = (decorators & IN_HEAP) != 0; bool use_precise = on_array || anonymous; - if (!access.is_oop() || (!on_heap && !anonymous)) { + if (!access.is_oop() || (!in_heap && !anonymous)) { return BarrierSetC2::store_at_resolved(access, val); } --- old/src/hotspot/share/gc/z/zHeapIterator.cpp 2018-06-13 04:02:30.601818519 -0400 +++ new/src/hotspot/share/gc/z/zHeapIterator.cpp 2018-06-13 04:02:30.285801934 -0400 @@ -63,7 +63,7 @@ virtual void do_oop(oop* p) { // Load barrier needed here for the same reason we // need fixup_partial_loads() in ZHeap::mark_end() - const oop obj = RootAccess<>::oop_load(p); + const oop obj = NativeAccess<>::oop_load(p); _iter->push(obj); _iter->drain(_cl); } --- old/src/hotspot/share/gc/z/zOopClosures.cpp 2018-06-13 04:02:31.469864081 -0400 +++ new/src/hotspot/share/gc/z/zOopClosures.cpp 2018-06-13 04:02:31.157847711 -0400 @@ -57,7 +57,7 @@ void ZVerifyRootOopClosure::do_oop(oop* p) { guarantee(!ZHeap::heap()->is_in((uintptr_t)p), "oop* " PTR_FORMAT " in heap", p2i(p)); - const oop obj = RootAccess<>::oop_load(p); + const oop obj = NativeAccess<>::oop_load(p); z_verify_loaded_object(p, obj); } --- old/src/hotspot/share/oops/access.hpp 2018-06-13 04:02:32.341909861 -0400 +++ new/src/hotspot/share/oops/access.hpp 2018-06-13 04:02:32.025893266 -0400 @@ -121,7 +121,7 @@ static void verify_heap_oop_decorators() { const DecoratorSet heap_oop_decorators = AS_DECORATOR_MASK | ON_DECORATOR_MASK | OOP_DECORATOR_MASK | (IN_DECORATOR_MASK ^ - (IN_ROOT | IN_CONCURRENT_ROOT)); // no root accesses in the heap + (IN_NATIVE | IN_CONCURRENT_ROOT)); // no root accesses in the heap verify_decorators(); } @@ -296,7 +296,7 @@ // Helper for performing normal accesses in roots. These accesses // may resolve an accessor on a GC barrier set template -class RootAccess: public Access {}; +class NativeAccess: public Access {}; // Helper for array access. template @@ -376,10 +376,10 @@ )); const DecoratorSet location_decorators = decorators & IN_DECORATOR_MASK; STATIC_ASSERT(location_decorators == 0 || ( // make sure location decorators are disjoint if set - (location_decorators ^ IN_ROOT) == 0 || + (location_decorators ^ IN_NATIVE) == 0 || (location_decorators ^ IN_HEAP) == 0 || (location_decorators ^ (IN_HEAP | IN_HEAP_ARRAY)) == 0 || - (location_decorators ^ (IN_ROOT | IN_CONCURRENT_ROOT)) == 0 + (location_decorators ^ (IN_NATIVE | IN_CONCURRENT_ROOT)) == 0 )); } --- old/src/hotspot/share/oops/accessDecorators.hpp 2018-06-13 04:02:33.217955836 -0400 +++ new/src/hotspot/share/oops/accessDecorators.hpp 2018-06-13 04:02:32.901939255 -0400 @@ -184,16 +184,16 @@ // be omitted if this decorator is not set. // * IN_HEAP_ARRAY: The access is performed on a heap allocated array. This is sometimes a special case // for some GCs, and implies that it is an IN_HEAP. -// * IN_ROOT: The access is performed in an off-heap data structure pointing into the Java heap. +// * IN_NATIVE: The access is performed in an off-heap data structure pointing into the Java heap. // * IN_CONCURRENT_ROOT: The access is performed in an off-heap data structure pointing into the Java heap, // but is notably not scanned during safepoints. This is sometimes a special case for some GCs and -// implies that it is also an IN_ROOT. +// implies that it is also an IN_NATIVE. const DecoratorSet IN_HEAP = UCONST64(1) << 20; const DecoratorSet IN_HEAP_ARRAY = UCONST64(1) << 21; -const DecoratorSet IN_ROOT = UCONST64(1) << 22; +const DecoratorSet IN_NATIVE = UCONST64(1) << 22; const DecoratorSet IN_CONCURRENT_ROOT = UCONST64(1) << 23; const DecoratorSet IN_DECORATOR_MASK = IN_HEAP | IN_HEAP_ARRAY | - IN_ROOT | IN_CONCURRENT_ROOT; + IN_NATIVE | IN_CONCURRENT_ROOT; // == Value Decorators == // * OOP_NOT_NULL: This property can make certain barriers faster such as compressing oops. @@ -242,7 +242,7 @@ static const DecoratorSet heap_array_is_in_heap = barrier_strength_default | ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY); static const DecoratorSet conc_root_is_root = heap_array_is_in_heap | - ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY); + ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY); static const DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS; }; @@ -263,7 +263,7 @@ DecoratorSet heap_array_is_in_heap = barrier_strength_default | ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY); DecoratorSet conc_root_is_root = heap_array_is_in_heap | - ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY); + ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY); DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS; return value; } --- old/src/hotspot/share/oops/oopHandle.inline.hpp 2018-06-13 04:02:34.098002029 -0400 +++ new/src/hotspot/share/oops/oopHandle.inline.hpp 2018-06-13 04:02:33.781985440 -0400 @@ -29,7 +29,7 @@ #include "oops/oopHandle.hpp" inline oop OopHandle::resolve() const { - return (_obj == NULL) ? (oop)NULL : RootAccess::oop_load(_obj); + return (_obj == NULL) ? (oop)NULL : NativeAccess::oop_load(_obj); } #endif // SHARE_VM_OOPS_OOPHANDLE_INLINE_HPP --- old/src/hotspot/share/oops/weakHandle.cpp 2018-06-13 04:02:34.982048436 -0400 +++ new/src/hotspot/share/oops/weakHandle.cpp 2018-06-13 04:02:34.666031842 -0400 @@ -48,7 +48,7 @@ vm_exit_out_of_memory(sizeof(oop*), OOM_MALLOC_ERROR, "Unable to create new weak oop handle in OopStorage"); } // Create WeakHandle with address returned and store oop into it. - RootAccess::oop_store(oop_addr, obj()); + NativeAccess::oop_store(oop_addr, obj()); return WeakHandle(oop_addr); } @@ -58,7 +58,7 @@ if (_obj != NULL) { // Clear the WeakHandle. For race in creating ClassLoaderData, we can release this // WeakHandle before it is cleared by GC. - RootAccess::oop_store(_obj, (oop)NULL); + NativeAccess::oop_store(_obj, (oop)NULL); get_storage()->release(_obj); } } --- old/src/hotspot/share/oops/weakHandle.inline.hpp 2018-06-13 04:02:35.846093781 -0400 +++ new/src/hotspot/share/oops/weakHandle.inline.hpp 2018-06-13 04:02:35.530077199 -0400 @@ -31,18 +31,18 @@ template oop WeakHandle::resolve() const { assert(!is_null(), "Must be created"); - return RootAccess::oop_load(_obj); + return NativeAccess::oop_load(_obj); } template oop WeakHandle::peek() const { assert(!is_null(), "Must be created"); - return RootAccess::oop_load(_obj); + return NativeAccess::oop_load(_obj); } template void WeakHandle::replace(oop with_obj) { - RootAccess::oop_store(_obj, with_obj); + NativeAccess::oop_store(_obj, with_obj); } #endif // SHARE_VM_OOPS_WEAKHANDLE_INLINE_HPP --- old/src/hotspot/share/prims/jvmtiTagMap.cpp 2018-06-13 04:02:36.714139342 -0400 +++ new/src/hotspot/share/prims/jvmtiTagMap.cpp 2018-06-13 04:02:36.398122756 -0400 @@ -90,11 +90,11 @@ // accessor methods inline oop* object_addr() { return &_object; } - inline oop object() { return RootAccess::oop_load(object_addr()); } + inline oop object() { return NativeAccess::oop_load(object_addr()); } // Peek at the object without keeping it alive. The returned object must be // kept alive using a normal access if it leaks out of a thread transition from VM. inline oop object_peek() { - return RootAccess::oop_load(object_addr()); + return NativeAccess::oop_load(object_addr()); } inline jlong tag() const { return _tag; } --- old/src/hotspot/share/runtime/jniHandles.cpp 2018-06-13 04:02:37.618186798 -0400 +++ new/src/hotspot/share/runtime/jniHandles.cpp 2018-06-13 04:02:37.306170422 -0400 @@ -109,7 +109,7 @@ // Return NULL on allocation failure. if (ptr != NULL) { assert(*ptr == NULL, "invariant"); - RootAccess::oop_store(ptr, obj()); + NativeAccess::oop_store(ptr, obj()); res = reinterpret_cast(ptr); } else { report_handle_allocation_failure(alloc_failmode, "global"); @@ -133,7 +133,7 @@ // Return NULL on allocation failure. if (ptr != NULL) { assert(*ptr == NULL, "invariant"); - RootAccess::oop_store(ptr, obj()); + NativeAccess::oop_store(ptr, obj()); char* tptr = reinterpret_cast(ptr) + weak_tag_value; res = reinterpret_cast(tptr); } else { @@ -160,14 +160,14 @@ oop JNIHandles::resolve_jweak(jweak handle) { assert(handle != NULL, "precondition"); assert(is_jweak(handle), "precondition"); - return RootAccess::oop_load(jweak_ptr(handle)); + return NativeAccess::oop_load(jweak_ptr(handle)); } bool JNIHandles::is_global_weak_cleared(jweak handle) { assert(handle != NULL, "precondition"); assert(is_jweak(handle), "not a weak handle"); oop* oop_ptr = jweak_ptr(handle); - oop value = RootAccess::oop_load(oop_ptr); + oop value = NativeAccess::oop_load(oop_ptr); return value == NULL; } @@ -175,7 +175,7 @@ if (handle != NULL) { assert(!is_jweak(handle), "wrong method for detroying jweak"); oop* oop_ptr = jobject_ptr(handle); - RootAccess::oop_store(oop_ptr, (oop)NULL); + NativeAccess::oop_store(oop_ptr, (oop)NULL); global_handles()->release(oop_ptr); } } @@ -185,7 +185,7 @@ if (handle != NULL) { assert(is_jweak(handle), "JNI handle not jweak"); oop* oop_ptr = jweak_ptr(handle); - RootAccess::oop_store(oop_ptr, (oop)NULL); + NativeAccess::oop_store(oop_ptr, (oop)NULL); weak_global_handles()->release(oop_ptr); } } @@ -517,7 +517,7 @@ // Try last block if (_last->_top < block_size_in_oops) { oop* handle = &(_last->_handles)[_last->_top++]; - RootAccess::oop_store(handle, obj); + NativeAccess::oop_store(handle, obj); return (jobject) handle; } @@ -525,7 +525,7 @@ if (_free_list != NULL) { oop* handle = _free_list; _free_list = (oop*) *_free_list; - RootAccess::oop_store(handle, obj); + NativeAccess::oop_store(handle, obj); return (jobject) handle; } // Check if unused block follow last --- old/src/hotspot/share/runtime/jniHandles.inline.hpp 2018-06-13 04:02:38.506233407 -0400 +++ new/src/hotspot/share/runtime/jniHandles.inline.hpp 2018-06-13 04:02:38.190216824 -0400 @@ -57,7 +57,7 @@ if (is_jweak(handle)) { // Unlikely result = resolve_jweak(handle); } else { - result = RootAccess::oop_load(jobject_ptr(handle)); + result = NativeAccess::oop_load(jobject_ptr(handle)); // Construction of jobjects canonicalize a null value into a null // jobject, so for non-jweak the pointee should never be null. assert(external_guard || result != NULL, "Invalid JNI handle"); @@ -83,7 +83,7 @@ inline void JNIHandles::destroy_local(jobject handle) { if (handle != NULL) { assert(!is_jweak(handle), "Invalid JNI local handle"); - RootAccess<>::oop_store(jobject_ptr(handle), (oop)NULL); + NativeAccess<>::oop_store(jobject_ptr(handle), (oop)NULL); } }