8204939: Change Access nomenclature: root to native
authorkbarrett
Fri, 15 Jun 2018 16:53:58 -0400
changeset 50599 ecc2af326b5f
parent 50598 8d9d4d91be7f
child 50600 8e17fffa0a4b
8204939: Change Access nomenclature: root to native Summary: Rename RootAccess => NativeAccess, IN_ROOT => IN_NATIVE, and related cleanups. Reviewed-by: pliden, stefank, coleenp
src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp
src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp
src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp
src/hotspot/cpu/arm/macroAssembler_arm.cpp
src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp
src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp
src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp
src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp
src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp
src/hotspot/cpu/s390/macroAssembler_s390.cpp
src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp
src/hotspot/cpu/sparc/macroAssembler_sparc.cpp
src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp
src/hotspot/cpu/x86/macroAssembler_x86.cpp
src/hotspot/share/classfile/classLoaderData.cpp
src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp
src/hotspot/share/gc/shared/barrierSet.hpp
src/hotspot/share/gc/shared/c2/barrierSetC2.cpp
src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp
src/hotspot/share/gc/shared/stringdedup/stringDedupTable.cpp
src/hotspot/share/gc/z/zOopClosures.cpp
src/hotspot/share/oops/access.hpp
src/hotspot/share/oops/accessDecorators.hpp
src/hotspot/share/oops/oopHandle.inline.hpp
src/hotspot/share/oops/weakHandle.cpp
src/hotspot/share/oops/weakHandle.inline.hpp
src/hotspot/share/prims/jvmtiTagMap.cpp
src/hotspot/share/runtime/jniHandles.cpp
src/hotspot/share/runtime/jniHandles.inline.hpp
--- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -33,13 +33,13 @@
 
   // LR is live.  It must be saved around calls.
 
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool oop_not_null = (decorators & OOP_NOT_NULL) != 0;
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
-    if (on_heap) {
+    if (in_heap) {
       if (UseCompressedOops) {
         __ ldrw(dst, src);
         if (oop_not_null) {
@@ -51,7 +51,7 @@
         __ ldr(dst, src);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ ldr(dst, src);
     }
     break;
@@ -71,13 +71,13 @@
 
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    Address dst, Register val, Register tmp1, Register tmp2) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
     val = val == noreg ? zr : val;
-    if (on_heap) {
+    if (in_heap) {
       if (UseCompressedOops) {
         assert(!dst.uses(val), "not enough registers");
         if (val != zr) {
@@ -88,7 +88,7 @@
         __ str(val, dst);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ str(val, dst);
     }
     break;
--- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -2120,7 +2120,7 @@
   tbz(r0, 0, not_weak);    // Test for jweak tag.
 
   // Resolve jweak.
-  access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF, value,
+  access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF, value,
                  Address(value, -JNIHandles::weak_tag_value), tmp, thread);
   verify_oop(value);
   b(done);
--- a/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -29,12 +29,12 @@
 
 void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                   Register dst, Address src, Register tmp1, Register tmp2, Register tmp3) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
-    if (on_heap) {
+    if (in_heap) {
 #ifdef AARCH64
       if (UseCompressedOops) {
         __ ldr_w(dst, src);
@@ -45,7 +45,7 @@
         __ ldr(dst, src);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ ldr(dst, src);
     }
     break;
@@ -57,12 +57,12 @@
 
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    Address obj, Register val, Register tmp1, Register tmp2, Register tmp3, bool is_null) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
-    if (on_heap) {
+    if (in_heap) {
 #ifdef AARCH64
       if (UseCompressedOops) {
         assert(!dst.uses(src), "not enough registers");
@@ -76,7 +76,7 @@
         __ str(val, obj);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ str(val, obj);
     }
     break;
--- a/src/hotspot/cpu/arm/macroAssembler_arm.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/arm/macroAssembler_arm.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -2135,7 +2135,7 @@
   tbz(value, 0, not_weak);      // Test for jweak tag.
 
   // Resolve jweak.
-  access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF,
+  access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
                  Address(value, -JNIHandles::weak_tag_value), value, tmp1, tmp2, noreg);
   b(done);
   bind(not_weak);
--- a/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/ppc/gc/g1/g1BarrierSetAssembler_ppc.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -335,7 +335,7 @@
 
   __ beq(CCR0, not_weak);     // Test for jweak tag.
   __ verify_oop(value);
-  g1_write_barrier_pre(masm, IN_ROOT | ON_PHANTOM_OOP_REF,
+  g1_write_barrier_pre(masm, IN_NATIVE | ON_PHANTOM_OOP_REF,
                        noreg, noreg, value,
                        tmp1, tmp2, needs_frame);
   __ bind(not_weak);
--- a/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -33,16 +33,16 @@
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    Register base, RegisterOrConstant ind_or_offs, Register val,
                                    Register tmp1, Register tmp2, Register tmp3, bool needs_frame) {
-  bool on_heap  = (decorators & IN_HEAP) != 0;
-  bool on_root  = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool not_null = (decorators & OOP_NOT_NULL) != 0;
-  assert(on_heap || on_root, "where?");
+  assert(in_heap || in_native, "where?");
   assert_different_registers(base, val, tmp1, tmp2, R0);
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (UseCompressedOops && on_heap) {
+    if (UseCompressedOops && in_heap) {
       Register co = tmp1;
       if (val == noreg) {
         __ li(co, 0);
@@ -66,16 +66,16 @@
 void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                   Register base, RegisterOrConstant ind_or_offs, Register dst,
                                   Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) {
-  bool on_heap  = (decorators & IN_HEAP) != 0;
-  bool on_root  = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool not_null = (decorators & OOP_NOT_NULL) != 0;
-  assert(on_heap || on_root, "where?");
+  assert(in_heap || in_native, "where?");
   assert_different_registers(ind_or_offs.register_or_noreg(), dst, R0);
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (UseCompressedOops && on_heap) {
+    if (UseCompressedOops && in_heap) {
       if (L_handle_null != NULL) { // Label provided.
         __ lwz(dst, ind_or_offs, base);
         __ cmpwi(CCR0, dst, 0);
--- a/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/ppc/macroAssembler_ppc.inline.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -329,7 +329,7 @@
 inline void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators,
                                             Register base, RegisterOrConstant ind_or_offs, Register val,
                                             Register tmp1, Register tmp2, Register tmp3, bool needs_frame) {
-  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL |
+  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL |
                          ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator");
   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
   bool as_raw = (decorators & AS_RAW) != 0;
@@ -348,7 +348,7 @@
 inline void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators,
                                            Register base, RegisterOrConstant ind_or_offs, Register dst,
                                            Register tmp1, Register tmp2, bool needs_frame, Label *L_handle_null) {
-  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL |
+  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL |
                          ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator");
   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
   decorators = AccessInternal::decorator_fixup(decorators);
--- a/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -403,7 +403,7 @@
   __ z_tmll(tmp1, JNIHandles::weak_tag_mask); // Test for jweak tag.
   __ z_braz(Lnot_weak);
   __ verify_oop(value);
-  DecoratorSet decorators = IN_ROOT | ON_PHANTOM_OOP_REF;
+  DecoratorSet decorators = IN_NATIVE | ON_PHANTOM_OOP_REF;
   g1_write_barrier_pre(masm, decorators, (const Address*)NULL, value, noreg, tmp1, tmp2, true);
   __ bind(Lnot_weak);
   __ verify_oop(value);
--- a/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -37,15 +37,15 @@
 
 void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                   const Address& addr, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) {
-  bool on_heap  = (decorators & IN_HEAP) != 0;
-  bool on_root  = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool not_null = (decorators & OOP_NOT_NULL) != 0;
-  assert(on_heap || on_root, "where?");
+  assert(in_heap || in_native, "where?");
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (UseCompressedOops && on_heap) {
+    if (UseCompressedOops && in_heap) {
       __ z_llgf(dst, addr);
       if (L_handle_null != NULL) { // Label provided.
         __ compareU32_and_branch(dst, (intptr_t)0, Assembler::bcondEqual, *L_handle_null);
@@ -67,16 +67,16 @@
 
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    const Address& addr, Register val, Register tmp1, Register tmp2, Register tmp3) {
-  bool on_heap  = (decorators & IN_HEAP) != 0;
-  bool on_root  = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool not_null = (decorators & OOP_NOT_NULL) != 0;
-  assert(on_heap || on_root, "where?");
+  assert(in_heap || in_native, "where?");
   assert_different_registers(val, tmp1, tmp2);
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (UseCompressedOops && on_heap) {
+    if (UseCompressedOops && in_heap) {
       if (val == noreg) {
         __ clear_mem(addr, 4);
       } else if (Universe::narrow_oop_mode() == Universe::UnscaledNarrowOop) {
--- a/src/hotspot/cpu/s390/macroAssembler_s390.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/s390/macroAssembler_s390.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -4051,7 +4051,7 @@
 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators,
                                      const Address& addr, Register val,
                                      Register tmp1, Register tmp2, Register tmp3) {
-  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL |
+  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL |
                          ON_UNKNOWN_OOP_REF)) == 0, "unsupported decorator");
   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
   decorators = AccessInternal::decorator_fixup(decorators);
@@ -4070,7 +4070,7 @@
 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators,
                                     const Address& addr, Register dst,
                                     Register tmp1, Register tmp2, Label *is_null) {
-  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_ROOT | OOP_NOT_NULL |
+  assert((decorators & ~(AS_RAW | IN_HEAP | IN_HEAP_ARRAY | IN_NATIVE | OOP_NOT_NULL |
                          ON_PHANTOM_OOP_REF | ON_WEAK_OOP_REF)) == 0, "unsupported decorator");
   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
   decorators = AccessInternal::decorator_fixup(decorators);
--- a/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -32,14 +32,14 @@
 
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    Register val, Address dst, Register tmp) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool oop_not_null = (decorators & OOP_NOT_NULL) != 0;
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (on_heap) {
+    if (in_heap) {
       if (dst.has_disp() && !Assembler::is_simm13(dst.disp())) {
         assert(!dst.has_index(), "not supported yet");
         __ set(dst.disp(), tmp);
@@ -57,7 +57,7 @@
         __ st_ptr(val, dst);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ st_ptr(val, dst);
     }
     break;
@@ -68,14 +68,14 @@
 
 void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                   Address src, Register dst, Register tmp) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool oop_not_null = (decorators & OOP_NOT_NULL) != 0;
 
   switch (type) {
   case T_ARRAY:
   case T_OBJECT: {
-    if (on_heap) {
+    if (in_heap) {
       if (src.has_disp() && !Assembler::is_simm13(src.disp())) {
         assert(!src.has_index(), "not supported yet");
         __ set(src.disp(), tmp);
@@ -92,7 +92,7 @@
         __ ld_ptr(src, dst);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ ld_ptr(src, dst);
     }
     break;
--- a/src/hotspot/cpu/sparc/macroAssembler_sparc.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/sparc/macroAssembler_sparc.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -176,7 +176,7 @@
   delayed()->andcc(value, JNIHandles::weak_tag_mask, G0); // Test for jweak
   brx(Assembler::zero, true, Assembler::pt, not_weak);
   delayed()->nop();
-  access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF,
+  access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
                  Address(value, -JNIHandles::weak_tag_value), value, tmp);
   verify_oop(value);
   br (Assembler::always, true, Assembler::pt, done);
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -31,15 +31,15 @@
 
 void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                   Register dst, Address src, Register tmp1, Register tmp_thread) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool oop_not_null = (decorators & OOP_NOT_NULL) != 0;
   bool atomic = (decorators & MO_RELAXED) != 0;
 
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
-    if (on_heap) {
+    if (in_heap) {
 #ifdef _LP64
       if (UseCompressedOops) {
         __ movl(dst, src);
@@ -54,7 +54,7 @@
         __ movptr(dst, src);
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       __ movptr(dst, src);
     }
     break;
@@ -96,15 +96,15 @@
 
 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                                    Address dst, Register val, Register tmp1, Register tmp2) {
-  bool on_heap = (decorators & IN_HEAP) != 0;
-  bool on_root = (decorators & IN_ROOT) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
+  bool in_native = (decorators & IN_NATIVE) != 0;
   bool oop_not_null = (decorators & OOP_NOT_NULL) != 0;
   bool atomic = (decorators & MO_RELAXED) != 0;
 
   switch (type) {
   case T_OBJECT:
   case T_ARRAY: {
-    if (on_heap) {
+    if (in_heap) {
       if (val == noreg) {
         assert(!oop_not_null, "inconsistent access");
 #ifdef _LP64
@@ -133,7 +133,7 @@
         }
       }
     } else {
-      assert(on_root, "why else?");
+      assert(in_native, "why else?");
       assert(val != noreg, "not supported");
       __ movptr(dst, val);
     }
--- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -5266,7 +5266,7 @@
   testptr(value, JNIHandles::weak_tag_mask); // Test for jweak tag.
   jcc(Assembler::zero, not_weak);
   // Resolve jweak.
-  access_load_at(T_OBJECT, IN_ROOT | ON_PHANTOM_OOP_REF,
+  access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
                  value, Address(value, -JNIHandles::weak_tag_value), tmp, thread);
   verify_oop(value);
   jmp(done);
--- a/src/hotspot/share/classfile/classLoaderData.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/classfile/classLoaderData.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -815,7 +815,7 @@
     assert(_handles.owner_of(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
     // This root is not walked in safepoints, and hence requires an appropriate
     // decorator that e.g. maintains the SATB invariant in SATB collectors.
-    RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL));
+    NativeAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL));
   }
 }
 
--- a/src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -601,10 +601,10 @@
 
   bool mismatched = (decorators & C2_MISMATCHED) != 0;
   bool unknown = (decorators & ON_UNKNOWN_OOP_REF) != 0;
-  bool on_heap = (decorators & IN_HEAP) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
   bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
   bool is_unordered = (decorators & MO_UNORDERED) != 0;
-  bool need_cpu_mem_bar = !is_unordered || mismatched || !on_heap;
+  bool need_cpu_mem_bar = !is_unordered || mismatched || !in_heap;
 
   Node* offset = adr->is_AddP() ? adr->in(AddPNode::Offset) : kit->top();
   Node* load = CardTableBarrierSetC2::load_at_resolved(access, val_type);
@@ -615,7 +615,7 @@
   // SATB log buffer using the pre-barrier mechanism.
   // Also we need to add memory barrier to prevent commoning reads
   // from this field across safepoint since GC can change its value.
-  bool need_read_barrier = on_heap && (on_weak ||
+  bool need_read_barrier = in_heap && (on_weak ||
                                        (unknown && offset != kit->top() && obj != kit->top()));
 
   if (!access.is_oop() || !need_read_barrier) {
--- a/src/hotspot/share/gc/shared/barrierSet.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/shared/barrierSet.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -270,7 +270,7 @@
     }
 
     // Off-heap oop accesses. These accessors get resolved when
-    // IN_HEAP is not set (e.g. when using the RootAccess API), it is
+    // IN_HEAP is not set (e.g. when using the NativeAccess API), it is
     // an oop* overload, and the barrier strength is AS_NORMAL.
     template <typename T>
     static oop oop_load_not_in_heap(T* addr) {
--- a/src/hotspot/share/gc/shared/c2/barrierSetC2.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/shared/c2/barrierSetC2.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -41,7 +41,7 @@
   bool mismatched = (_decorators & C2_MISMATCHED) != 0;
   bool is_unordered = (_decorators & MO_UNORDERED) != 0;
   bool anonymous = (_decorators & C2_UNSAFE_ACCESS) != 0;
-  bool on_heap = (_decorators & IN_HEAP) != 0;
+  bool in_heap = (_decorators & IN_HEAP) != 0;
 
   bool is_write = (_decorators & C2_WRITE_ACCESS) != 0;
   bool is_read = (_decorators & C2_READ_ACCESS) != 0;
@@ -58,7 +58,7 @@
     // the barriers get omitted and the unsafe reference begins to "pollute"
     // the alias analysis of the rest of the graph, either Compile::can_alias
     // or Compile::must_alias will throw a diagnostic assert.)
-    if (!on_heap || !is_unordered || (mismatched && !_addr.type()->isa_aryptr())) {
+    if (!in_heap || !is_unordered || (mismatched && !_addr.type()->isa_aryptr())) {
       return true;
     }
   }
@@ -74,8 +74,8 @@
   bool unaligned = (decorators & C2_UNALIGNED) != 0;
   bool requires_atomic_access = (decorators & MO_UNORDERED) == 0;
 
-  bool in_root = (decorators & IN_ROOT) != 0;
-  assert(!in_root, "not supported yet");
+  bool in_native = (decorators & IN_NATIVE) != 0;
+  assert(!in_native, "not supported yet");
 
   if (access.type() == T_DOUBLE) {
     Node* new_val = kit->dstore_rounding(val.node());
@@ -103,8 +103,8 @@
   bool control_dependent = (decorators & C2_CONTROL_DEPENDENT_LOAD) != 0;
   bool pinned = (decorators & C2_PINNED_LOAD) != 0;
 
-  bool in_root = (decorators & IN_ROOT) != 0;
-  assert(!in_root, "not supported yet");
+  bool in_native = (decorators & IN_NATIVE) != 0;
+  assert(!in_native, "not supported yet");
 
   MemNode::MemOrd mo = access.mem_node_mo();
   LoadNode::ControlDependency dep = pinned ? LoadNode::Pinned : LoadNode::DependsOnlyOnTest;
--- a/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/shared/c2/modRefBarrierSetC2.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -39,10 +39,10 @@
 
   bool on_array = (decorators & IN_HEAP_ARRAY) != 0;
   bool anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
-  bool on_heap = (decorators & IN_HEAP) != 0;
+  bool in_heap = (decorators & IN_HEAP) != 0;
   bool use_precise = on_array || anonymous;
 
-  if (!access.is_oop() || (!on_heap && !anonymous)) {
+  if (!access.is_oop() || (!in_heap && !anonymous)) {
     return BarrierSetC2::store_at_resolved(access, val);
   }
 
--- a/src/hotspot/share/gc/shared/stringdedup/stringDedupTable.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/shared/stringdedup/stringDedupTable.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -286,7 +286,7 @@
         // Apply proper barrier to make sure it is kept alive. Concurrent mark might
         // otherwise declare it dead if there are no other strong references to this object.
         oop* obj_addr = (oop*)entry->obj_addr();
-        oop obj = RootAccess<IN_CONCURRENT_ROOT | ON_WEAK_OOP_REF>::oop_load(obj_addr);
+        oop obj = NativeAccess<IN_CONCURRENT_ROOT | ON_WEAK_OOP_REF>::oop_load(obj_addr);
         return typeArrayOop(obj);
       }
     }
--- a/src/hotspot/share/gc/z/zOopClosures.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/gc/z/zOopClosures.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -57,7 +57,7 @@
 void ZVerifyRootOopClosure::do_oop(oop* p) {
   guarantee(!ZHeap::heap()->is_in((uintptr_t)p), "oop* " PTR_FORMAT " in heap", p2i(p));
 
-  const oop obj = RootAccess<>::oop_load(p);
+  const oop obj = NativeAccess<>::oop_load(p);
   z_verify_loaded_object(p, obj);
 }
 
--- a/src/hotspot/share/oops/access.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/oops/access.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -121,7 +121,7 @@
   static void verify_heap_oop_decorators() {
     const DecoratorSet heap_oop_decorators = AS_DECORATOR_MASK | ON_DECORATOR_MASK |
                                              OOP_DECORATOR_MASK | (IN_DECORATOR_MASK ^
-                                                                   (IN_ROOT | IN_CONCURRENT_ROOT)); // no root accesses in the heap
+                                                                   (IN_NATIVE | IN_CONCURRENT_ROOT)); // no root accesses in the heap
     verify_decorators<expected_mo_decorators | heap_oop_decorators>();
   }
 
@@ -296,7 +296,7 @@
 // Helper for performing normal accesses in roots. These accesses
 // may resolve an accessor on a GC barrier set
 template <DecoratorSet decorators = INTERNAL_EMPTY>
-class RootAccess: public Access<IN_ROOT | decorators> {};
+class NativeAccess: public Access<IN_NATIVE | decorators> {};
 
 // Helper for array access.
 template <DecoratorSet decorators = INTERNAL_EMPTY>
@@ -376,10 +376,10 @@
   ));
   const DecoratorSet location_decorators = decorators & IN_DECORATOR_MASK;
   STATIC_ASSERT(location_decorators == 0 || ( // make sure location decorators are disjoint if set
-    (location_decorators ^ IN_ROOT) == 0 ||
+    (location_decorators ^ IN_NATIVE) == 0 ||
     (location_decorators ^ IN_HEAP) == 0 ||
     (location_decorators ^ (IN_HEAP | IN_HEAP_ARRAY)) == 0 ||
-    (location_decorators ^ (IN_ROOT | IN_CONCURRENT_ROOT)) == 0
+    (location_decorators ^ (IN_NATIVE | IN_CONCURRENT_ROOT)) == 0
   ));
 }
 
--- a/src/hotspot/share/oops/accessDecorators.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/oops/accessDecorators.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -184,16 +184,16 @@
 //   be omitted if this decorator is not set.
 // * IN_HEAP_ARRAY: The access is performed on a heap allocated array. This is sometimes a special case
 //   for some GCs, and implies that it is an IN_HEAP.
-// * IN_ROOT: The access is performed in an off-heap data structure pointing into the Java heap.
+// * IN_NATIVE: The access is performed in an off-heap data structure pointing into the Java heap.
 // * IN_CONCURRENT_ROOT: The access is performed in an off-heap data structure pointing into the Java heap,
 //   but is notably not scanned during safepoints. This is sometimes a special case for some GCs and
-//   implies that it is also an IN_ROOT.
+//   implies that it is also an IN_NATIVE.
 const DecoratorSet IN_HEAP            = UCONST64(1) << 20;
 const DecoratorSet IN_HEAP_ARRAY      = UCONST64(1) << 21;
-const DecoratorSet IN_ROOT            = UCONST64(1) << 22;
+const DecoratorSet IN_NATIVE          = UCONST64(1) << 22;
 const DecoratorSet IN_CONCURRENT_ROOT = UCONST64(1) << 23;
 const DecoratorSet IN_DECORATOR_MASK  = IN_HEAP | IN_HEAP_ARRAY |
-                                        IN_ROOT | IN_CONCURRENT_ROOT;
+                                        IN_NATIVE | IN_CONCURRENT_ROOT;
 
 // == Value Decorators ==
 // * OOP_NOT_NULL: This property can make certain barriers faster such as compressing oops.
@@ -242,7 +242,7 @@
     static const DecoratorSet heap_array_is_in_heap = barrier_strength_default |
       ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY);
     static const DecoratorSet conc_root_is_root = heap_array_is_in_heap |
-      ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY);
+      ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY);
     static const DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS;
   };
 
@@ -263,7 +263,7 @@
     DecoratorSet heap_array_is_in_heap = barrier_strength_default |
       ((IN_HEAP_ARRAY & barrier_strength_default) != 0 ? IN_HEAP : INTERNAL_EMPTY);
     DecoratorSet conc_root_is_root = heap_array_is_in_heap |
-      ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_ROOT : INTERNAL_EMPTY);
+      ((IN_CONCURRENT_ROOT & heap_array_is_in_heap) != 0 ? IN_NATIVE : INTERNAL_EMPTY);
     DecoratorSet value = conc_root_is_root | BT_BUILDTIME_DECORATORS;
     return value;
   }
--- a/src/hotspot/share/oops/oopHandle.inline.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/oops/oopHandle.inline.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -29,7 +29,7 @@
 #include "oops/oopHandle.hpp"
 
 inline oop OopHandle::resolve() const {
-  return (_obj == NULL) ? (oop)NULL : RootAccess<IN_CONCURRENT_ROOT>::oop_load(_obj);
+  return (_obj == NULL) ? (oop)NULL : NativeAccess<IN_CONCURRENT_ROOT>::oop_load(_obj);
 }
 
 #endif //  SHARE_VM_OOPS_OOPHANDLE_INLINE_HPP
--- a/src/hotspot/share/oops/weakHandle.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/oops/weakHandle.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -48,7 +48,7 @@
     vm_exit_out_of_memory(sizeof(oop*), OOM_MALLOC_ERROR, "Unable to create new weak oop handle in OopStorage");
   }
   // Create WeakHandle with address returned and store oop into it.
-  RootAccess<ON_PHANTOM_OOP_REF>::oop_store(oop_addr, obj());
+  NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(oop_addr, obj());
   return WeakHandle(oop_addr);
 }
 
@@ -58,7 +58,7 @@
   if (_obj != NULL) {
     // Clear the WeakHandle.  For race in creating ClassLoaderData, we can release this
     // WeakHandle before it is cleared by GC.
-    RootAccess<ON_PHANTOM_OOP_REF>::oop_store(_obj, (oop)NULL);
+    NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(_obj, (oop)NULL);
     get_storage()->release(_obj);
   }
 }
--- a/src/hotspot/share/oops/weakHandle.inline.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/oops/weakHandle.inline.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -31,18 +31,18 @@
 template <WeakHandleType T>
 oop WeakHandle<T>::resolve() const {
   assert(!is_null(), "Must be created");
-  return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(_obj);
+  return NativeAccess<ON_PHANTOM_OOP_REF>::oop_load(_obj);
 }
 
 template <WeakHandleType T>
 oop WeakHandle<T>::peek() const {
   assert(!is_null(), "Must be created");
-  return RootAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(_obj);
+  return NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(_obj);
 }
 
 template <WeakHandleType T>
 void WeakHandle<T>::replace(oop with_obj) {
-  RootAccess<ON_PHANTOM_OOP_REF>::oop_store(_obj, with_obj);
+  NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(_obj, with_obj);
 }
 
 #endif // SHARE_VM_OOPS_WEAKHANDLE_INLINE_HPP
--- a/src/hotspot/share/prims/jvmtiTagMap.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/prims/jvmtiTagMap.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -90,11 +90,11 @@
 
   // accessor methods
   inline oop* object_addr() { return &_object; }
-  inline oop object()       { return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); }
+  inline oop object()       { return NativeAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); }
   // Peek at the object without keeping it alive. The returned object must be
   // kept alive using a normal access if it leaks out of a thread transition from VM.
   inline oop object_peek()  {
-    return RootAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr());
+    return NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr());
   }
   inline jlong tag() const  { return _tag; }
 
--- a/src/hotspot/share/runtime/jniHandles.cpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/runtime/jniHandles.cpp	Fri Jun 15 16:53:58 2018 -0400
@@ -109,7 +109,7 @@
     // Return NULL on allocation failure.
     if (ptr != NULL) {
       assert(*ptr == NULL, "invariant");
-      RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, obj());
+      NativeAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, obj());
       res = reinterpret_cast<jobject>(ptr);
     } else {
       report_handle_allocation_failure(alloc_failmode, "global");
@@ -133,7 +133,7 @@
     // Return NULL on allocation failure.
     if (ptr != NULL) {
       assert(*ptr == NULL, "invariant");
-      RootAccess<ON_PHANTOM_OOP_REF>::oop_store(ptr, obj());
+      NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(ptr, obj());
       char* tptr = reinterpret_cast<char*>(ptr) + weak_tag_value;
       res = reinterpret_cast<jobject>(tptr);
     } else {
@@ -160,14 +160,14 @@
 oop JNIHandles::resolve_jweak(jweak handle) {
   assert(handle != NULL, "precondition");
   assert(is_jweak(handle), "precondition");
-  return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(jweak_ptr(handle));
+  return NativeAccess<ON_PHANTOM_OOP_REF>::oop_load(jweak_ptr(handle));
 }
 
 bool JNIHandles::is_global_weak_cleared(jweak handle) {
   assert(handle != NULL, "precondition");
   assert(is_jweak(handle), "not a weak handle");
   oop* oop_ptr = jweak_ptr(handle);
-  oop value = RootAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(oop_ptr);
+  oop value = NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(oop_ptr);
   return value == NULL;
 }
 
@@ -175,7 +175,7 @@
   if (handle != NULL) {
     assert(!is_jweak(handle), "wrong method for detroying jweak");
     oop* oop_ptr = jobject_ptr(handle);
-    RootAccess<IN_CONCURRENT_ROOT>::oop_store(oop_ptr, (oop)NULL);
+    NativeAccess<IN_CONCURRENT_ROOT>::oop_store(oop_ptr, (oop)NULL);
     global_handles()->release(oop_ptr);
   }
 }
@@ -185,7 +185,7 @@
   if (handle != NULL) {
     assert(is_jweak(handle), "JNI handle not jweak");
     oop* oop_ptr = jweak_ptr(handle);
-    RootAccess<ON_PHANTOM_OOP_REF>::oop_store(oop_ptr, (oop)NULL);
+    NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(oop_ptr, (oop)NULL);
     weak_global_handles()->release(oop_ptr);
   }
 }
@@ -517,7 +517,7 @@
   // Try last block
   if (_last->_top < block_size_in_oops) {
     oop* handle = &(_last->_handles)[_last->_top++];
-    RootAccess<AS_DEST_NOT_INITIALIZED>::oop_store(handle, obj);
+    NativeAccess<AS_DEST_NOT_INITIALIZED>::oop_store(handle, obj);
     return (jobject) handle;
   }
 
@@ -525,7 +525,7 @@
   if (_free_list != NULL) {
     oop* handle = _free_list;
     _free_list = (oop*) *_free_list;
-    RootAccess<AS_DEST_NOT_INITIALIZED>::oop_store(handle, obj);
+    NativeAccess<AS_DEST_NOT_INITIALIZED>::oop_store(handle, obj);
     return (jobject) handle;
   }
   // Check if unused block follow last
--- a/src/hotspot/share/runtime/jniHandles.inline.hpp	Fri Jun 15 13:08:32 2018 -0700
+++ b/src/hotspot/share/runtime/jniHandles.inline.hpp	Fri Jun 15 16:53:58 2018 -0400
@@ -57,7 +57,7 @@
   if (is_jweak(handle)) {       // Unlikely
     result = resolve_jweak(handle);
   } else {
-    result = RootAccess<IN_CONCURRENT_ROOT>::oop_load(jobject_ptr(handle));
+    result = NativeAccess<IN_CONCURRENT_ROOT>::oop_load(jobject_ptr(handle));
     // Construction of jobjects canonicalize a null value into a null
     // jobject, so for non-jweak the pointee should never be null.
     assert(external_guard || result != NULL, "Invalid JNI handle");
@@ -83,7 +83,7 @@
 inline void JNIHandles::destroy_local(jobject handle) {
   if (handle != NULL) {
     assert(!is_jweak(handle), "Invalid JNI local handle");
-    RootAccess<>::oop_store(jobject_ptr(handle), (oop)NULL);
+    NativeAccess<>::oop_store(jobject_ptr(handle), (oop)NULL);
   }
 }