8200235: Generalize jniFastGetField jobject/jweak resolve
authoreosterlund
Thu, 26 Apr 2018 11:56:24 +0200
changeset 49898 4745598b307f
parent 49897 117501815bed
child 49899 b99fcb855107
8200235: Generalize jniFastGetField jobject/jweak resolve Reviewed-by: kbarrett, dholmes
src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp
src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp
src/hotspot/cpu/aarch64/jniFastGetField_aarch64.cpp
src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp
src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.hpp
src/hotspot/cpu/sparc/jniFastGetField_sparc.cpp
src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp
src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp
src/hotspot/cpu/x86/jniFastGetField_x86_64.cpp
--- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -24,6 +24,7 @@
 
 #include "precompiled.hpp"
 #include "gc/shared/barrierSetAssembler.hpp"
+#include "runtime/jniHandles.hpp"
 
 #define __ masm->
 
@@ -64,3 +65,10 @@
   default: Unimplemented();
   }
 }
+
+void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath) {
+  // If mask changes we need to ensure that the inverse is still encodable as an immediate
+  STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
+  __ andr(robj, robj, ~JNIHandles::weak_tag_mask);
+  __ ldr(robj, Address(robj, 0));             // *obj
+}
--- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp	Thu Apr 26 11:56:24 2018 +0200
@@ -40,6 +40,8 @@
   virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                         Address dst, Register val, Register tmp1, Register tmp2);
 
+  virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath);
+
   virtual void barrier_stubs_init() {}
 };
 
--- a/src/hotspot/cpu/aarch64/jniFastGetField_aarch64.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/aarch64/jniFastGetField_aarch64.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -25,6 +25,8 @@
 
 #include "precompiled.hpp"
 #include "asm/macroAssembler.hpp"
+#include "gc/shared/barrierSet.hpp"
+#include "gc/shared/barrierSetAssembler.hpp"
 #include "memory/resourceArea.hpp"
 #include "prims/jniFastGetField.hpp"
 #include "prims/jvm_misc.hpp"
@@ -82,11 +84,9 @@
                                               // robj ^ rcounter ^ rcounter == robj
                                               // robj is address dependent on rcounter.
 
-  // If mask changes we need to ensure that the inverse is still encodable as an immediate
-  STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
-  __ andr(robj, robj, ~JNIHandles::weak_tag_mask);
+  BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+  bs->try_resolve_jobject_in_native(masm, robj, rscratch1, slow);
 
-  __ ldr(robj, Address(robj, 0));             // *obj
   __ lsr(roffset, c_rarg2, 2);                // offset
 
   assert(count < LIST_CAPACITY, "LIST_CAPACITY too small");
@@ -177,4 +177,3 @@
 address JNI_FastGetField::generate_fast_get_double_field() {
   return generate_fast_get_int_field0(T_DOUBLE);
 }
-
--- a/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -25,6 +25,7 @@
 #include "precompiled.hpp"
 #include "gc/shared/barrierSetAssembler.hpp"
 #include "interpreter/interp_masm.hpp"
+#include "runtime/jniHandles.hpp"
 
 #define __ masm->
 
@@ -98,3 +99,8 @@
   default: Unimplemented();
   }
 }
+
+void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath) {
+  __ andn (robj, JNIHandles::weak_tag_mask, robj);
+  __ ld_ptr(robj, 0, robj);
+}
--- a/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.hpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/sparc/gc/shared/barrierSetAssembler_sparc.hpp	Thu Apr 26 11:56:24 2018 +0200
@@ -44,6 +44,9 @@
   virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                        Address src, Register dst, Register tmp);
 
+  // Support for jniFastGetField to try resolving a jobject/jweak in native
+  virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath);
+
   virtual void barrier_stubs_init() {}
 };
 
--- a/src/hotspot/cpu/sparc/jniFastGetField_sparc.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/sparc/jniFastGetField_sparc.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -24,6 +24,8 @@
 
 #include "precompiled.hpp"
 #include "asm/macroAssembler.inline.hpp"
+#include "gc/shared/barrierSet.hpp"
+#include "gc/shared/barrierSetAssembler.hpp"
 #include "memory/resourceArea.hpp"
 #include "prims/jniFastGetField.hpp"
 #include "prims/jvm_misc.hpp"
@@ -68,17 +70,18 @@
   __ andcc (G4, 1, G0);
   __ br (Assembler::notZero, false, Assembler::pn, label1);
   __ delayed()->srl (O2, 2, O4);
-  __ andn (O1, JNIHandles::weak_tag_mask, O1);
-  __ ld_ptr (O1, 0, O5);
+
+  BarrierSetAssembler *bs = BarrierSet::barrier_set()->barrier_set_assembler();
+  bs->try_resolve_jobject_in_native(masm, O1, G3_scratch, label1);
 
   assert(count < LIST_CAPACITY, "LIST_CAPACITY too small");
   speculative_load_pclist[count] = __ pc();
   switch (type) {
-    case T_BOOLEAN: __ ldub (O5, O4, G3);  break;
-    case T_BYTE:    __ ldsb (O5, O4, G3);  break;
-    case T_CHAR:    __ lduh (O5, O4, G3);  break;
-    case T_SHORT:   __ ldsh (O5, O4, G3);  break;
-    case T_INT:     __ ld (O5, O4, G3);    break;
+    case T_BOOLEAN: __ ldub (O1, O4, G3);  break;
+    case T_BYTE:    __ ldsb (O1, O4, G3);  break;
+    case T_CHAR:    __ lduh (O1, O4, G3);  break;
+    case T_SHORT:   __ ldsh (O1, O4, G3);  break;
+    case T_INT:     __ ld (O1, O4, G3);    break;
     default:        ShouldNotReachHere();
   }
 
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -25,6 +25,7 @@
 #include "precompiled.hpp"
 #include "gc/shared/barrierSetAssembler.hpp"
 #include "interpreter/interp_masm.hpp"
+#include "runtime/jniHandles.hpp"
 
 #define __ masm->
 
@@ -108,3 +109,8 @@
   default: Unimplemented();
   }
 }
+
+void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath) {
+  __ clear_jweak_tag(robj);
+  __ movptr(robj, Address(robj, 0));
+}
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp	Thu Apr 26 11:56:24 2018 +0200
@@ -44,6 +44,9 @@
   virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
                         Address dst, Register val, Register tmp1, Register tmp2);
 
+  // Support for jniFastGetField to try resolving a jobject/jweak in native
+  virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register robj, Register tmp, Label& slowpath);
+
   virtual void barrier_stubs_init() {}
 };
 
--- a/src/hotspot/cpu/x86/jniFastGetField_x86_64.cpp	Thu Apr 26 09:04:18 2018 +0100
+++ b/src/hotspot/cpu/x86/jniFastGetField_x86_64.cpp	Thu Apr 26 11:56:24 2018 +0200
@@ -24,6 +24,8 @@
 
 #include "precompiled.hpp"
 #include "asm/macroAssembler.hpp"
+#include "gc/shared/barrierSet.hpp"
+#include "gc/shared/barrierSetAssembler.hpp"
 #include "memory/resourceArea.hpp"
 #include "prims/jniFastGetField.hpp"
 #include "prims/jvm_misc.hpp"
@@ -81,12 +83,12 @@
                                                 // robj is data dependent on rcounter.
   }
 
-  __ clear_jweak_tag(robj);
-
-  __ movptr(robj, Address(robj, 0));             // *obj
   __ mov   (roffset, c_rarg2);
   __ shrptr(roffset, 2);                         // offset
 
+  BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+  bs->try_resolve_jobject_in_native(masm, robj, rscratch1, slow);
+
   assert(count < LIST_CAPACITY, "LIST_CAPACITY too small");
   speculative_load_pclist[count] = __ pc();
   switch (type) {