--- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp Fri Jun 22 16:07:15 2018 +0200
@@ -48,6 +48,10 @@
virtual void obj_equals(MacroAssembler* masm,
Register obj1, Register obj2);
+ virtual void resolve(MacroAssembler* masm, DecoratorSet decorators, Register obj) {
+ // Default implementation does not need to do anything.
+ }
+
virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
Register obj, Register tmp, Label& slowpath);
--- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -3990,6 +3990,15 @@
}
}
+void MacroAssembler::resolve(DecoratorSet decorators, Register obj) {
+ // Use stronger ACCESS_WRITE|ACCESS_READ by default.
+ if ((decorators & (ACCESS_READ | ACCESS_WRITE)) == 0) {
+ decorators |= ACCESS_READ | ACCESS_WRITE;
+ }
+ BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+ return bs->resolve(this, decorators, obj);
+}
+
void MacroAssembler::load_heap_oop(Register dst, Address src, Register tmp1,
Register thread_tmp, DecoratorSet decorators) {
access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, thread_tmp);
--- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp Fri Jun 22 16:07:15 2018 +0200
@@ -795,6 +795,10 @@
void access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register src,
Register tmp1, Register tmp_thread);
+ // Resolves obj for access. Result is placed in the same register.
+ // All other registers are preserved.
+ void resolve(DecoratorSet decorators, Register obj);
+
void load_heap_oop(Register dst, Address src, Register tmp1 = noreg,
Register thread_tmp = noreg, DecoratorSet decorators = 0);
--- a/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -1839,6 +1839,8 @@
// Load the oop from the handle
__ ldr(obj_reg, Address(oop_handle_reg, 0));
+ __ resolve(IS_NOT_NULL, obj_reg);
+
if (UseBiasedLocking) {
__ biased_locking_enter(lock_reg, obj_reg, swap_reg, tmp, false, lock_done, &slow_path_lock);
}
@@ -2001,6 +2003,8 @@
// Get locked oop from the handle we passed to jni
__ ldr(obj_reg, Address(oop_handle_reg, 0));
+ __ resolve(IS_NOT_NULL, obj_reg);
+
Label done;
if (UseBiasedLocking) {
--- a/src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -836,6 +836,7 @@
#endif // ASSERT
__ bind(done);
+ __ resolve(IS_NOT_NULL, r0);
}
// add space for monitor & lock
@@ -1062,6 +1063,7 @@
__ ldrw(crc, Address(esp, 4*wordSize)); // Initial CRC
} else {
__ ldr(buf, Address(esp, 2*wordSize)); // byte[] array
+ __ resolve(IS_NOT_NULL | ACCESS_READ, buf);
__ add(buf, buf, arrayOopDesc::base_offset_in_bytes(T_BYTE)); // + header size
__ ldrw(off, Address(esp, wordSize)); // offset
__ add(buf, buf, off); // + offset
@@ -1106,6 +1108,9 @@
__ ldrw(off, Address(esp, wordSize)); // int offset
__ sub(len, end, off);
__ ldr(buf, Address(esp, 2*wordSize)); // byte[] buf | long buf
+ if (kind == Interpreter::java_util_zip_CRC32C_updateBytes) {
+ __ resolve(IS_NOT_NULL | ACCESS_READ, buf);
+ }
__ add(buf, buf, off); // + offset
if (kind == Interpreter::java_util_zip_CRC32C_updateDirectByteBuffer) {
__ ldrw(crc, Address(esp, 4*wordSize)); // long crc
--- a/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/aarch64/templateTable_aarch64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -3840,6 +3840,8 @@
// check for NULL object
__ null_check(r0);
+ __ resolve(IS_NOT_NULL, r0);
+
const Address monitor_block_top(
rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
const Address monitor_block_bot(
@@ -3939,6 +3941,8 @@
// check for NULL object
__ null_check(r0);
+ __ resolve(IS_NOT_NULL, r0);
+
const Address monitor_block_top(
rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
const Address monitor_block_bot(
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp Fri Jun 22 16:07:15 2018 +0200
@@ -61,6 +61,10 @@
virtual void obj_equals(MacroAssembler* masm,
Register obj1, Address obj2);
+ virtual void resolve(MacroAssembler* masm, DecoratorSet decorators, Register obj) {
+ // Default implementation does not need to do anything.
+ }
+
// Support for jniFastGetField to try resolving a jobject/jweak in native
virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
Register obj, Register tmp, Label& slowpath);
--- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -6287,6 +6287,15 @@
}
}
+void MacroAssembler::resolve(DecoratorSet decorators, Register obj) {
+ // Use stronger ACCESS_WRITE|ACCESS_READ by default.
+ if ((decorators & (ACCESS_READ | ACCESS_WRITE)) == 0) {
+ decorators |= ACCESS_READ | ACCESS_WRITE;
+ }
+ BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+ return bs->resolve(this, decorators, obj);
+}
+
void MacroAssembler::load_heap_oop(Register dst, Address src, Register tmp1,
Register thread_tmp, DecoratorSet decorators) {
access_load_at(T_OBJECT, IN_HEAP | decorators, dst, src, tmp1, thread_tmp);
--- a/src/hotspot/cpu/x86/macroAssembler_x86.hpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.hpp Fri Jun 22 16:07:15 2018 +0200
@@ -319,6 +319,10 @@
void access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register src,
Register tmp1, Register tmp2);
+ // Resolves obj access. Result is placed in the same register.
+ // All other registers are preserved.
+ void resolve(DecoratorSet decorators, Register obj);
+
void load_heap_oop(Register dst, Address src, Register tmp1 = noreg,
Register thread_tmp = noreg, DecoratorSet decorators = 0);
void load_heap_oop_not_null(Register dst, Address src, Register tmp1 = noreg,
--- a/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -2450,6 +2450,7 @@
// Load the oop from the handle
__ movptr(obj_reg, Address(oop_handle_reg, 0));
+ __ resolve(IS_NOT_NULL, obj_reg);
if (UseBiasedLocking) {
__ biased_locking_enter(lock_reg, obj_reg, swap_reg, rscratch1, false, lock_done, &slow_path_lock);
}
@@ -2635,6 +2636,7 @@
// Get locked oop from the handle we passed to jni
__ movptr(obj_reg, Address(oop_handle_reg, 0));
+ __ resolve(IS_NOT_NULL, obj_reg);
Label done;
--- a/src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -635,6 +635,7 @@
#endif // ASSERT
__ bind(done);
+ __ resolve(IS_NOT_NULL, rax);
}
// add space for monitor & lock
--- a/src/hotspot/cpu/x86/templateInterpreterGenerator_x86_64.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/templateInterpreterGenerator_x86_64.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -257,6 +257,7 @@
__ movl(crc, Address(rsp, 5*wordSize)); // Initial CRC
} else {
__ movptr(buf, Address(rsp, 3*wordSize)); // byte[] array
+ __ resolve(IS_NOT_NULL | ACCESS_READ, buf);
__ addptr(buf, arrayOopDesc::base_offset_in_bytes(T_BYTE)); // + header size
__ movl2ptr(off, Address(rsp, 2*wordSize)); // offset
__ addq(buf, off); // + offset
@@ -312,6 +313,7 @@
// "When calculating operand stack length, values of type long and double have length two."
} else {
__ movptr(buf, Address(rsp, 3 * wordSize)); // byte[] array
+ __ resolve(IS_NOT_NULL | ACCESS_READ, buf);
__ addptr(buf, arrayOopDesc::base_offset_in_bytes(T_BYTE)); // + header size
__ movl2ptr(off, Address(rsp, 2 * wordSize)); // offset
__ addq(buf, off); // + offset
--- a/src/hotspot/cpu/x86/templateTable_x86.cpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/cpu/x86/templateTable_x86.cpp Fri Jun 22 16:07:15 2018 +0200
@@ -4357,6 +4357,8 @@
// check for NULL object
__ null_check(rax);
+ __ resolve(IS_NOT_NULL, rax);
+
const Address monitor_block_top(
rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
const Address monitor_block_bot(
@@ -4454,6 +4456,8 @@
// check for NULL object
__ null_check(rax);
+ __ resolve(IS_NOT_NULL, rax);
+
const Address monitor_block_top(
rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
const Address monitor_block_bot(
--- a/src/hotspot/share/oops/accessDecorators.hpp Thu Aug 09 17:11:11 2018 +0200
+++ b/src/hotspot/share/oops/accessDecorators.hpp Fri Jun 22 16:07:15 2018 +0200
@@ -212,8 +212,15 @@
ARRAYCOPY_DISJOINT | ARRAYCOPY_ARRAYOF |
ARRAYCOPY_ATOMIC | ARRAYCOPY_ALIGNED;
+// == Resolve barrier decorators ==
+// * ACCESS_READ: Indicate that the resolved object is accessed read-only. This allows the GC
+// backend to use weaker and more efficient barriers.
+// * ACCESS_WRITE: Indicate that the resolved object is used for write access.
+const DecoratorSet ACCESS_READ = UCONST64(1) << 29;
+const DecoratorSet ACCESS_WRITE = UCONST64(1) << 30;
+
// Keep track of the last decorator.
-const DecoratorSet DECORATOR_LAST = UCONST64(1) << 28;
+const DecoratorSet DECORATOR_LAST = UCONST64(1) << 30;
namespace AccessInternal {
// This class adds implied decorators that follow according to decorator rules.