8217874: Shenandoah: Clobbered register in ShenandoahBarrierSetAssembler::cmpxchg_oop()
authorrkennke
Mon, 11 Feb 2019 16:49:08 +0100
changeset 53719 3a56e823d843
parent 53718 066d23ce545e
child 53720 3e451bff6f7f
8217874: Shenandoah: Clobbered register in ShenandoahBarrierSetAssembler::cmpxchg_oop() Reviewed-by: adinn
src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp
--- a/src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp	Mon Feb 11 07:19:32 2019 -0500
+++ b/src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp	Mon Feb 11 16:49:08 2019 +0100
@@ -425,54 +425,56 @@
 void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
                                                 bool acquire, bool release, bool weak, bool is_cae,
                                                 Register result) {
-
-  Register tmp = rscratch2;
+  Register tmp1 = rscratch1;
+  Register tmp2 = rscratch2;
   bool is_narrow = UseCompressedOops;
   Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword;
 
-  assert_different_registers(addr, expected, new_val, result, tmp);
+  assert_different_registers(addr, expected, new_val, tmp1, tmp2);
 
   Label retry, done, fail;
 
   // CAS, using LL/SC pair.
   __ bind(retry);
-  __ load_exclusive(result, addr, size, acquire);
+  __ load_exclusive(tmp1, addr, size, acquire);
   if (is_narrow) {
-    __ cmpw(result, expected);
+    __ cmpw(tmp1, expected);
   } else {
-    __ cmp(result, expected);
+    __ cmp(tmp1, expected);
   }
   __ br(Assembler::NE, fail);
-  __ store_exclusive(tmp, new_val, addr, size, release);
+  __ store_exclusive(tmp2, new_val, addr, size, release);
   if (weak) {
-    __ cmpw(tmp, 0u); // If the store fails, return NE to our caller
+    __ cmpw(tmp2, 0u); // If the store fails, return NE to our caller
   } else {
-    __ cbnzw(tmp, retry);
+    __ cbnzw(tmp2, retry);
   }
   __ b(done);
 
  __  bind(fail);
-  // Check if rb(expected)==rb(result)
+  // Check if rb(expected)==rb(tmp1)
   // Shuffle registers so that we have memory value ready for next expected.
-  __ mov(tmp, expected);
-  __ mov(expected, result);
+  __ mov(tmp2, expected);
+  __ mov(expected, tmp1);
   if (is_narrow) {
-    __ decode_heap_oop(result, result);
-    __ decode_heap_oop(tmp, tmp);
+    __ decode_heap_oop(tmp1, tmp1);
+    __ decode_heap_oop(tmp2, tmp2);
   }
-  read_barrier_impl(masm, result);
-  read_barrier_impl(masm, tmp);
-  __ cmp(result, tmp);
+  read_barrier_impl(masm, tmp1);
+  read_barrier_impl(masm, tmp2);
+  __ cmp(tmp1, tmp2);
   // Retry with expected now being the value we just loaded from addr.
   __ br(Assembler::EQ, retry);
   if (is_cae && is_narrow) {
     // For cmp-and-exchange and narrow oops, we need to restore
     // the compressed old-value. We moved it to 'expected' a few lines up.
-    __ mov(result, expected);
+    __ mov(tmp1, expected);
   }
   __ bind(done);
 
-  if (!is_cae) {
+  if (is_cae) {
+    __ mov(result, tmp1);
+  } else {
     __ cset(result, Assembler::EQ);
   }
 }