--- a/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Mon Jul 20 11:41:34 2015 +0100
+++ b/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Thu Jul 23 17:29:44 2015 +0100
@@ -2314,6 +2314,28 @@
}
}
+Address MacroAssembler::spill_address(int size, int offset, Register tmp)
+{
+ assert(offset >= 0, "spill to negative address?");
+ // Offset reachable ?
+ // Not aligned - 9 bits signed offset
+ // Aligned - 12 bits unsigned offset shifted
+ Register base = sp;
+ if ((offset & (size-1)) && offset >= (1<<8)) {
+ add(tmp, base, offset & ((1<<12)-1));
+ base = tmp;
+ offset &= -1<<12;
+ }
+
+ if (offset >= (1<<12) * size) {
+ add(tmp, base, offset & (((1<<12)-1)<<12));
+ base = tmp;
+ offset &= ~(((1<<12)-1)<<12);
+ }
+
+ return Address(base, offset);
+}
+
/**
* Multiply 64 bit by 64 bit first loop.
*/
@@ -3799,8 +3821,8 @@
br(Assembler::HI, slow_case);
// If heap_top hasn't been changed by some other thread, update it.
- stlxr(rscratch1, end, rscratch1);
- cbnzw(rscratch1, retry);
+ stlxr(rscratch2, end, rscratch1);
+ cbnzw(rscratch2, retry);
}
}