8148328: aarch64: redundant lsr instructions in stub code.
Summary: avoid redundant lsr instructions in jbyte_arraycopy and jbyte_disjoint_arraycopy.
Reviewed-by: aph
Contributed-by: felix.yang@linaro.org
--- a/hotspot/src/cpu/aarch64/vm/stubGenerator_aarch64.cpp Thu Jan 28 08:33:45 2016 +0100
+++ b/hotspot/src/cpu/aarch64/vm/stubGenerator_aarch64.cpp Wed Jan 27 12:20:53 2016 +0800
@@ -962,7 +962,7 @@
__ lea(d, Address(d, count, Address::lsl(exact_log2(-step))));
}
- Label done, tail;
+ Label tail;
__ cmp(count, 16/granularity);
__ br(Assembler::LO, tail);
@@ -987,7 +987,8 @@
}
// rscratch2 is the byte adjustment needed to align s.
__ cbz(rscratch2, aligned);
- __ lsr(rscratch2, rscratch2, exact_log2(granularity));
+ int shift = exact_log2(granularity);
+ if (shift) __ lsr(rscratch2, rscratch2, shift);
__ sub(count, count, rscratch2);
#if 0