43972
|
1 |
/*
|
|
2 |
* Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
|
|
3 |
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
|
4 |
*
|
|
5 |
* This code is free software; you can redistribute it and/or modify it
|
|
6 |
* under the terms of the GNU General Public License version 2 only, as
|
|
7 |
* published by the Free Software Foundation.
|
|
8 |
*
|
|
9 |
* This code is distributed in the hope that it will be useful, but WITHOUT
|
|
10 |
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
11 |
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
12 |
* version 2 for more details (a copy is included in the LICENSE file that
|
|
13 |
* accompanied this code).
|
|
14 |
*
|
|
15 |
* You should have received a copy of the GNU General Public License version
|
|
16 |
* 2 along with this work; if not, write to the Free Software Foundation,
|
|
17 |
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
18 |
*
|
|
19 |
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
|
20 |
* or visit www.oracle.com if you need additional information or have any
|
|
21 |
* questions.
|
|
22 |
*/
|
|
23 |
package org.graalvm.compiler.lir.aarch64;
|
|
24 |
|
|
25 |
import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
|
|
26 |
import static jdk.vm.ci.code.ValueUtil.asRegister;
|
|
27 |
|
|
28 |
import java.util.function.Function;
|
|
29 |
|
|
30 |
import org.graalvm.compiler.asm.Label;
|
|
31 |
import org.graalvm.compiler.asm.NumUtil;
|
|
32 |
import org.graalvm.compiler.asm.aarch64.AArch64Address;
|
|
33 |
import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
|
|
34 |
import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
|
|
35 |
import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
|
|
36 |
import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler.PatchLabelKind;
|
|
37 |
import org.graalvm.compiler.code.CompilationResult.JumpTable;
|
|
38 |
import org.graalvm.compiler.core.common.LIRKind;
|
|
39 |
import org.graalvm.compiler.core.common.calc.Condition;
|
|
40 |
import org.graalvm.compiler.debug.GraalError;
|
|
41 |
import org.graalvm.compiler.lir.ConstantValue;
|
|
42 |
import org.graalvm.compiler.lir.LIRInstructionClass;
|
|
43 |
import org.graalvm.compiler.lir.LabelRef;
|
|
44 |
import org.graalvm.compiler.lir.Opcode;
|
|
45 |
import org.graalvm.compiler.lir.StandardOp;
|
|
46 |
import org.graalvm.compiler.lir.SwitchStrategy;
|
|
47 |
import org.graalvm.compiler.lir.SwitchStrategy.BaseSwitchClosure;
|
|
48 |
import org.graalvm.compiler.lir.Variable;
|
|
49 |
import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
|
|
50 |
|
|
51 |
import jdk.vm.ci.aarch64.AArch64Kind;
|
|
52 |
import jdk.vm.ci.code.Register;
|
|
53 |
import jdk.vm.ci.meta.Constant;
|
|
54 |
import jdk.vm.ci.meta.JavaConstant;
|
|
55 |
import jdk.vm.ci.meta.Value;
|
|
56 |
|
|
57 |
public class AArch64ControlFlow {
|
|
58 |
|
|
59 |
/**
|
|
60 |
* Compares integer register to 0 and branches if condition is true. Condition may only be equal
|
|
61 |
* or non-equal.
|
|
62 |
*/
|
|
63 |
// TODO (das) where do we need this?
|
|
64 |
// public static class CompareAndBranchOp extends AArch64LIRInstruction implements
|
|
65 |
// StandardOp.BranchOp {
|
|
66 |
// private final ConditionFlag condition;
|
|
67 |
// private final LabelRef destination;
|
|
68 |
// @Use({REG}) private Value x;
|
|
69 |
//
|
|
70 |
// public CompareAndBranchOp(Condition condition, LabelRef destination, Value x) {
|
|
71 |
// assert condition == Condition.EQ || condition == Condition.NE;
|
|
72 |
// assert ARMv8.isGpKind(x.getKind());
|
|
73 |
// this.condition = condition == Condition.EQ ? ConditionFlag.EQ : ConditionFlag.NE;
|
|
74 |
// this.destination = destination;
|
|
75 |
// this.x = x;
|
|
76 |
// }
|
|
77 |
//
|
|
78 |
// @Override
|
|
79 |
// public void emitCode(CompilationResultBuilder crb, ARMv8MacroAssembler masm) {
|
|
80 |
// int size = ARMv8.bitsize(x.getKind());
|
|
81 |
// if (condition == ConditionFlag.EQ) {
|
|
82 |
// masm.cbz(size, asRegister(x), destination.label());
|
|
83 |
// } else {
|
|
84 |
// masm.cbnz(size, asRegister(x), destination.label());
|
|
85 |
// }
|
|
86 |
// }
|
|
87 |
// }
|
|
88 |
|
|
89 |
public static class BranchOp extends AArch64BlockEndOp implements StandardOp.BranchOp {
|
|
90 |
public static final LIRInstructionClass<BranchOp> TYPE = LIRInstructionClass.create(BranchOp.class);
|
|
91 |
|
|
92 |
private final AArch64Assembler.ConditionFlag condition;
|
|
93 |
private final LabelRef trueDestination;
|
|
94 |
private final LabelRef falseDestination;
|
|
95 |
|
|
96 |
private final double trueDestinationProbability;
|
|
97 |
|
|
98 |
public BranchOp(AArch64Assembler.ConditionFlag condition, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
|
|
99 |
super(TYPE);
|
|
100 |
this.condition = condition;
|
|
101 |
this.trueDestination = trueDestination;
|
|
102 |
this.falseDestination = falseDestination;
|
|
103 |
this.trueDestinationProbability = trueDestinationProbability;
|
|
104 |
}
|
|
105 |
|
|
106 |
@Override
|
|
107 |
public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
|
|
108 |
/*
|
|
109 |
* Explanation: Depending on what the successor edge is, we can use the fall-through to
|
|
110 |
* optimize the generated code. If neither is a successor edge, use the branch
|
|
111 |
* probability to try to take the conditional jump as often as possible to avoid
|
|
112 |
* executing two instructions instead of one.
|
|
113 |
*/
|
|
114 |
if (crb.isSuccessorEdge(trueDestination)) {
|
|
115 |
masm.branchConditionally(condition.negate(), falseDestination.label());
|
|
116 |
} else if (crb.isSuccessorEdge(falseDestination)) {
|
|
117 |
masm.branchConditionally(condition, trueDestination.label());
|
|
118 |
} else if (trueDestinationProbability < 0.5) {
|
|
119 |
masm.branchConditionally(condition.negate(), falseDestination.label());
|
|
120 |
masm.jmp(trueDestination.label());
|
|
121 |
} else {
|
|
122 |
masm.branchConditionally(condition, trueDestination.label());
|
|
123 |
masm.jmp(falseDestination.label());
|
|
124 |
}
|
|
125 |
}
|
|
126 |
|
|
127 |
}
|
|
128 |
|
|
129 |
@Opcode("CMOVE")
|
|
130 |
public static class CondMoveOp extends AArch64LIRInstruction {
|
|
131 |
public static final LIRInstructionClass<CondMoveOp> TYPE = LIRInstructionClass.create(CondMoveOp.class);
|
|
132 |
|
|
133 |
@Def protected Value result;
|
|
134 |
@Use protected Value trueValue;
|
|
135 |
@Use protected Value falseValue;
|
|
136 |
private final AArch64Assembler.ConditionFlag condition;
|
|
137 |
|
|
138 |
public CondMoveOp(Variable result, AArch64Assembler.ConditionFlag condition, Value trueValue, Value falseValue) {
|
|
139 |
super(TYPE);
|
|
140 |
assert trueValue.getPlatformKind() == falseValue.getPlatformKind() && trueValue.getPlatformKind() == result.getPlatformKind();
|
|
141 |
this.result = result;
|
|
142 |
this.condition = condition;
|
|
143 |
this.trueValue = trueValue;
|
|
144 |
this.falseValue = falseValue;
|
|
145 |
}
|
|
146 |
|
|
147 |
@Override
|
|
148 |
public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
|
|
149 |
AArch64Kind kind = (AArch64Kind) trueValue.getPlatformKind();
|
|
150 |
int size = kind.getSizeInBytes() * Byte.SIZE;
|
|
151 |
if (kind.isInteger()) {
|
|
152 |
masm.cmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
|
|
153 |
} else {
|
|
154 |
masm.fcmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
|
|
155 |
}
|
|
156 |
}
|
|
157 |
}
|
|
158 |
|
|
159 |
public static class StrategySwitchOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
|
|
160 |
public static final LIRInstructionClass<StrategySwitchOp> TYPE = LIRInstructionClass.create(StrategySwitchOp.class);
|
|
161 |
|
|
162 |
private final Constant[] keyConstants;
|
|
163 |
protected final SwitchStrategy strategy;
|
|
164 |
private final Function<Condition, ConditionFlag> converter;
|
|
165 |
private final LabelRef[] keyTargets;
|
|
166 |
private final LabelRef defaultTarget;
|
|
167 |
@Alive protected Value key;
|
|
168 |
// TODO (das) This could be optimized: We only need the scratch register in case of a
|
|
169 |
// datapatch, or too large immediates.
|
|
170 |
@Temp protected Value scratch;
|
|
171 |
|
|
172 |
public StrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
|
|
173 |
Function<Condition, ConditionFlag> converter) {
|
|
174 |
this(TYPE, strategy, keyTargets, defaultTarget, key, scratch, converter);
|
|
175 |
}
|
|
176 |
|
|
177 |
protected StrategySwitchOp(LIRInstructionClass<? extends StrategySwitchOp> c, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
|
|
178 |
Function<Condition, ConditionFlag> converter) {
|
|
179 |
super(c);
|
|
180 |
this.strategy = strategy;
|
|
181 |
this.converter = converter;
|
|
182 |
this.keyConstants = strategy.getKeyConstants();
|
|
183 |
this.keyTargets = keyTargets;
|
|
184 |
this.defaultTarget = defaultTarget;
|
|
185 |
this.key = key;
|
|
186 |
this.scratch = scratch;
|
|
187 |
assert keyConstants.length == keyTargets.length;
|
|
188 |
assert keyConstants.length == strategy.keyProbabilities.length;
|
|
189 |
}
|
|
190 |
|
|
191 |
@Override
|
|
192 |
public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
|
|
193 |
strategy.run(new SwitchClosure(asRegister(key), crb, masm));
|
|
194 |
}
|
|
195 |
|
|
196 |
public class SwitchClosure extends BaseSwitchClosure {
|
|
197 |
|
|
198 |
protected final Register keyRegister;
|
|
199 |
protected final CompilationResultBuilder crb;
|
|
200 |
protected final AArch64MacroAssembler masm;
|
|
201 |
|
|
202 |
protected SwitchClosure(Register keyRegister, CompilationResultBuilder crb, AArch64MacroAssembler masm) {
|
|
203 |
super(crb, masm, keyTargets, defaultTarget);
|
|
204 |
this.keyRegister = keyRegister;
|
|
205 |
this.crb = crb;
|
|
206 |
this.masm = masm;
|
|
207 |
}
|
|
208 |
|
|
209 |
protected void emitComparison(Constant c) {
|
|
210 |
JavaConstant jc = (JavaConstant) c;
|
|
211 |
ConstantValue constVal = new ConstantValue(LIRKind.value(key.getPlatformKind()), c);
|
|
212 |
switch (jc.getJavaKind()) {
|
|
213 |
case Int:
|
|
214 |
long lc = jc.asLong();
|
|
215 |
assert NumUtil.isInt(lc);
|
|
216 |
emitCompare(crb, masm, key, scratch, constVal);
|
|
217 |
break;
|
|
218 |
case Long:
|
|
219 |
emitCompare(crb, masm, key, scratch, constVal);
|
|
220 |
break;
|
|
221 |
case Object:
|
|
222 |
emitCompare(crb, masm, key, scratch, constVal);
|
|
223 |
break;
|
|
224 |
default:
|
|
225 |
throw new GraalError("switch only supported for int, long and object");
|
|
226 |
}
|
|
227 |
}
|
|
228 |
|
|
229 |
@Override
|
|
230 |
protected void conditionalJump(int index, Condition condition, Label target) {
|
|
231 |
emitComparison(keyConstants[index]);
|
|
232 |
masm.branchConditionally(converter.apply(condition), target);
|
|
233 |
}
|
|
234 |
}
|
|
235 |
}
|
|
236 |
|
|
237 |
public static class TableSwitchOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
|
|
238 |
public static final LIRInstructionClass<TableSwitchOp> TYPE = LIRInstructionClass.create(TableSwitchOp.class);
|
|
239 |
|
|
240 |
private final int lowKey;
|
|
241 |
private final LabelRef defaultTarget;
|
|
242 |
private final LabelRef[] targets;
|
|
243 |
@Alive protected Variable keyValue;
|
|
244 |
@Temp protected Variable scratchValue;
|
|
245 |
|
|
246 |
public TableSwitchOp(int lowKey, LabelRef defaultTarget, LabelRef[] targets, Variable key, Variable scratch) {
|
|
247 |
super(TYPE);
|
|
248 |
this.lowKey = lowKey;
|
|
249 |
this.defaultTarget = defaultTarget;
|
|
250 |
this.targets = targets;
|
|
251 |
this.keyValue = key;
|
|
252 |
this.scratchValue = scratch;
|
|
253 |
}
|
|
254 |
|
|
255 |
@Override
|
|
256 |
public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
|
|
257 |
Register key = asRegister(keyValue);
|
|
258 |
Register scratch = asRegister(scratchValue);
|
|
259 |
if (lowKey != 0) {
|
|
260 |
if (AArch64MacroAssembler.isArithmeticImmediate(lowKey)) {
|
|
261 |
masm.sub(32, key, key, lowKey);
|
|
262 |
} else {
|
|
263 |
ConstantValue constVal = new ConstantValue(LIRKind.value(AArch64Kind.WORD), JavaConstant.forInt(lowKey));
|
|
264 |
AArch64Move.move(crb, masm, scratchValue, constVal);
|
|
265 |
masm.sub(32, key, key, scratch);
|
|
266 |
}
|
|
267 |
}
|
|
268 |
if (defaultTarget != null) {
|
|
269 |
// if key is not in table range, jump to default target if it exists.
|
|
270 |
ConstantValue constVal = new ConstantValue(LIRKind.value(AArch64Kind.WORD), JavaConstant.forInt(targets.length));
|
|
271 |
emitCompare(crb, masm, keyValue, scratchValue, constVal);
|
|
272 |
masm.branchConditionally(AArch64Assembler.ConditionFlag.HS, defaultTarget.label());
|
|
273 |
}
|
|
274 |
|
|
275 |
// Load the start address of the jump table - which starts 3 instructions after the adr
|
|
276 |
// - into scratch.
|
|
277 |
masm.adr(scratch, 4 * 3);
|
|
278 |
masm.ldr(32, scratch, AArch64Address.createRegisterOffsetAddress(scratch, key, /* scaled */true));
|
|
279 |
masm.jmp(scratch);
|
|
280 |
int jumpTablePos = masm.position();
|
|
281 |
// emit jump table entries
|
|
282 |
for (LabelRef target : targets) {
|
|
283 |
Label label = target.label();
|
|
284 |
if (label.isBound()) {
|
|
285 |
masm.emitInt(target.label().position());
|
|
286 |
} else {
|
|
287 |
label.addPatchAt(masm.position());
|
|
288 |
masm.emitInt(PatchLabelKind.JUMP_ADDRESS.encoding);
|
|
289 |
}
|
|
290 |
}
|
|
291 |
JumpTable jt = new JumpTable(jumpTablePos, lowKey, lowKey + targets.length - 1, 4);
|
|
292 |
crb.compilationResult.addAnnotation(jt);
|
|
293 |
}
|
|
294 |
}
|
|
295 |
|
|
296 |
private static void emitCompare(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value key, Value scratchValue, ConstantValue c) {
|
|
297 |
long imm = c.getJavaConstant().asLong();
|
|
298 |
final int size = key.getPlatformKind().getSizeInBytes() * Byte.SIZE;
|
|
299 |
if (AArch64MacroAssembler.isComparisonImmediate(imm)) {
|
|
300 |
masm.cmp(size, asRegister(key), (int) imm);
|
|
301 |
} else {
|
|
302 |
AArch64Move.move(crb, masm, asAllocatableValue(scratchValue), c);
|
|
303 |
masm.cmp(size, asRegister(key), asRegister(scratchValue));
|
|
304 |
}
|
|
305 |
}
|
|
306 |
|
|
307 |
}
|