1 /* |
|
2 * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved. |
|
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
|
4 * |
|
5 * This code is free software; you can redistribute it and/or modify it |
|
6 * under the terms of the GNU General Public License version 2 only, as |
|
7 * published by the Free Software Foundation. |
|
8 * |
|
9 * This code is distributed in the hope that it will be useful, but WITHOUT |
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
12 * version 2 for more details (a copy is included in the LICENSE file that |
|
13 * accompanied this code). |
|
14 * |
|
15 * You should have received a copy of the GNU General Public License version |
|
16 * 2 along with this work; if not, write to the Free Software Foundation, |
|
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
18 * |
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
|
20 * or visit www.oracle.com if you need additional information or have any |
|
21 * questions. |
|
22 */ |
|
23 |
|
24 |
|
25 package org.graalvm.compiler.lir.alloc.trace.lsra; |
|
26 |
|
27 import static jdk.vm.ci.code.ValueUtil.asRegister; |
|
28 import static jdk.vm.ci.code.ValueUtil.asStackSlot; |
|
29 import static jdk.vm.ci.code.ValueUtil.isIllegal; |
|
30 import static jdk.vm.ci.code.ValueUtil.isRegister; |
|
31 import static jdk.vm.ci.code.ValueUtil.isStackSlot; |
|
32 import static org.graalvm.compiler.lir.LIRValueUtil.asVirtualStackSlot; |
|
33 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; |
|
34 import static org.graalvm.compiler.lir.LIRValueUtil.isVirtualStackSlot; |
|
35 |
|
36 import java.util.ArrayList; |
|
37 import java.util.Arrays; |
|
38 import java.util.HashSet; |
|
39 import java.util.List; |
|
40 |
|
41 import org.graalvm.compiler.core.common.LIRKind; |
|
42 import org.graalvm.compiler.debug.CounterKey; |
|
43 import org.graalvm.compiler.debug.DebugContext; |
|
44 import org.graalvm.compiler.debug.GraalError; |
|
45 import org.graalvm.compiler.debug.Indent; |
|
46 import org.graalvm.compiler.lir.LIRInsertionBuffer; |
|
47 import org.graalvm.compiler.lir.LIRInstruction; |
|
48 import org.graalvm.compiler.lir.VirtualStackSlot; |
|
49 import org.graalvm.compiler.lir.alloc.trace.lsra.TraceLinearScanPhase.TraceLinearScan; |
|
50 import org.graalvm.compiler.lir.framemap.FrameMap; |
|
51 import org.graalvm.compiler.lir.framemap.FrameMapBuilderTool; |
|
52 |
|
53 import jdk.vm.ci.code.StackSlot; |
|
54 import jdk.vm.ci.meta.AllocatableValue; |
|
55 import jdk.vm.ci.meta.Constant; |
|
56 import jdk.vm.ci.meta.JavaConstant; |
|
57 import jdk.vm.ci.meta.Value; |
|
58 |
|
59 final class TraceLocalMoveResolver { |
|
60 |
|
61 private static final CounterKey cycleBreakingSlotsAllocated = DebugContext.counter("TraceRA[cycleBreakingSlotsAllocated(local)]"); |
|
62 |
|
63 private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1; |
|
64 private final TraceLinearScan allocator; |
|
65 |
|
66 private int insertIdx; |
|
67 private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted |
|
68 |
|
69 private final ArrayList<TraceInterval> mappingFrom; |
|
70 private final ArrayList<Constant> mappingFromOpr; |
|
71 private final ArrayList<TraceInterval> mappingTo; |
|
72 private final int[] registerBlocked; |
|
73 |
|
74 private int[] stackBlocked; |
|
75 private final int firstVirtualStackIndex; |
|
76 |
|
77 private final DebugContext debug; |
|
78 |
|
79 private int getStackArrayIndex(Value stackSlotValue) { |
|
80 if (isStackSlot(stackSlotValue)) { |
|
81 return getStackArrayIndex(asStackSlot(stackSlotValue)); |
|
82 } |
|
83 if (isVirtualStackSlot(stackSlotValue)) { |
|
84 return getStackArrayIndex(asVirtualStackSlot(stackSlotValue)); |
|
85 } |
|
86 throw GraalError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue); |
|
87 } |
|
88 |
|
89 private int getStackArrayIndex(StackSlot stackSlot) { |
|
90 int stackIdx; |
|
91 if (stackSlot.isInCallerFrame()) { |
|
92 // incoming stack arguments can be ignored |
|
93 stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX; |
|
94 } else { |
|
95 assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot; |
|
96 int offset = -stackSlot.getRawOffset(); |
|
97 assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex); |
|
98 stackIdx = offset; |
|
99 } |
|
100 return stackIdx; |
|
101 } |
|
102 |
|
103 private int getStackArrayIndex(VirtualStackSlot virtualStackSlot) { |
|
104 return firstVirtualStackIndex + virtualStackSlot.getId(); |
|
105 } |
|
106 |
|
107 protected void setValueBlocked(Value location, int direction) { |
|
108 assert direction == 1 || direction == -1 : "out of bounds"; |
|
109 if (isStackSlotValue(location)) { |
|
110 int stackIdx = getStackArrayIndex(location); |
|
111 if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) { |
|
112 // incoming stack arguments can be ignored |
|
113 return; |
|
114 } |
|
115 if (stackIdx >= stackBlocked.length) { |
|
116 stackBlocked = Arrays.copyOf(stackBlocked, stackIdx + 1); |
|
117 } |
|
118 stackBlocked[stackIdx] += direction; |
|
119 } else { |
|
120 assert direction == 1 || direction == -1 : "out of bounds"; |
|
121 if (isRegister(location)) { |
|
122 registerBlocked[asRegister(location).number] += direction; |
|
123 } else { |
|
124 throw GraalError.shouldNotReachHere("unhandled value " + location); |
|
125 } |
|
126 } |
|
127 } |
|
128 |
|
129 protected TraceInterval getMappingFrom(int i) { |
|
130 return mappingFrom.get(i); |
|
131 } |
|
132 |
|
133 protected int mappingFromSize() { |
|
134 return mappingFrom.size(); |
|
135 } |
|
136 |
|
137 protected int valueBlocked(Value location) { |
|
138 if (isStackSlotValue(location)) { |
|
139 int stackIdx = getStackArrayIndex(location); |
|
140 if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) { |
|
141 // incoming stack arguments are always blocked (aka they can not be written) |
|
142 return 1; |
|
143 } |
|
144 if (stackIdx >= stackBlocked.length) { |
|
145 return 0; |
|
146 } |
|
147 return stackBlocked[stackIdx]; |
|
148 } |
|
149 if (isRegister(location)) { |
|
150 return registerBlocked[asRegister(location).number]; |
|
151 } |
|
152 throw GraalError.shouldNotReachHere("unhandled value " + location); |
|
153 } |
|
154 |
|
155 /* |
|
156 * TODO (je) remove? |
|
157 */ |
|
158 protected static boolean areMultipleReadsAllowed() { |
|
159 return true; |
|
160 } |
|
161 |
|
162 boolean hasMappings() { |
|
163 return mappingFrom.size() > 0; |
|
164 } |
|
165 |
|
166 protected TraceLinearScan getAllocator() { |
|
167 return allocator; |
|
168 } |
|
169 |
|
170 protected TraceLocalMoveResolver(TraceLinearScan allocator) { |
|
171 |
|
172 this.allocator = allocator; |
|
173 this.debug = allocator.getDebug(); |
|
174 this.mappingFrom = new ArrayList<>(8); |
|
175 this.mappingFromOpr = new ArrayList<>(8); |
|
176 this.mappingTo = new ArrayList<>(8); |
|
177 this.insertIdx = -1; |
|
178 this.insertionBuffer = new LIRInsertionBuffer(); |
|
179 this.registerBlocked = new int[allocator.getRegisters().size()]; |
|
180 FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder(); |
|
181 FrameMap frameMap = frameMapBuilderTool.getFrameMap(); |
|
182 this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()]; |
|
183 this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1; |
|
184 } |
|
185 |
|
186 protected boolean checkEmpty() { |
|
187 assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing"; |
|
188 for (int i = 0; i < stackBlocked.length; i++) { |
|
189 assert stackBlocked[i] == 0 : "stack map must be empty before and after processing"; |
|
190 } |
|
191 for (int i = 0; i < getAllocator().getRegisters().size(); i++) { |
|
192 assert registerBlocked[i] == 0 : "register map must be empty before and after processing"; |
|
193 } |
|
194 checkMultipleReads(); |
|
195 return true; |
|
196 } |
|
197 |
|
198 protected void checkMultipleReads() { |
|
199 // multiple reads are allowed in SSA LSRA |
|
200 } |
|
201 |
|
202 private boolean verifyBeforeResolve() { |
|
203 assert mappingFrom.size() == mappingFromOpr.size() : "length must be equal"; |
|
204 assert mappingFrom.size() == mappingTo.size() : "length must be equal"; |
|
205 assert insertIdx != -1 : "insert position not set"; |
|
206 |
|
207 int i; |
|
208 int j; |
|
209 if (!areMultipleReadsAllowed()) { |
|
210 for (i = 0; i < mappingFrom.size(); i++) { |
|
211 for (j = i + 1; j < mappingFrom.size(); j++) { |
|
212 assert mappingFrom.get(i) == null || mappingFrom.get(i) != mappingFrom.get(j) : "cannot read from same interval twice"; |
|
213 } |
|
214 } |
|
215 } |
|
216 |
|
217 for (i = 0; i < mappingTo.size(); i++) { |
|
218 for (j = i + 1; j < mappingTo.size(); j++) { |
|
219 assert mappingTo.get(i) != mappingTo.get(j) : "cannot write to same interval twice"; |
|
220 } |
|
221 } |
|
222 |
|
223 HashSet<Value> usedRegs = new HashSet<>(); |
|
224 if (!areMultipleReadsAllowed()) { |
|
225 for (i = 0; i < mappingFrom.size(); i++) { |
|
226 TraceInterval interval = mappingFrom.get(i); |
|
227 if (interval != null && !isIllegal(interval.location())) { |
|
228 boolean unique = usedRegs.add(interval.location()); |
|
229 assert unique : "cannot read from same register twice"; |
|
230 } |
|
231 } |
|
232 } |
|
233 |
|
234 usedRegs.clear(); |
|
235 for (i = 0; i < mappingTo.size(); i++) { |
|
236 TraceInterval interval = mappingTo.get(i); |
|
237 if (isIllegal(interval.location())) { |
|
238 // After insertion the location may become illegal, so don't check it since multiple |
|
239 // intervals might be illegal. |
|
240 continue; |
|
241 } |
|
242 boolean unique = usedRegs.add(interval.location()); |
|
243 assert unique : "cannot write to same register twice"; |
|
244 } |
|
245 |
|
246 verifyStackSlotMapping(); |
|
247 |
|
248 return true; |
|
249 } |
|
250 |
|
251 protected void verifyStackSlotMapping() { |
|
252 // relax disjoint stack maps invariant |
|
253 } |
|
254 |
|
255 // mark assignedReg and assignedRegHi of the interval as blocked |
|
256 private void blockRegisters(TraceInterval interval) { |
|
257 Value location = interval.location(); |
|
258 if (mightBeBlocked(location)) { |
|
259 assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location; |
|
260 int direction = 1; |
|
261 setValueBlocked(location, direction); |
|
262 debug.log("block %s", location); |
|
263 } |
|
264 } |
|
265 |
|
266 // mark assignedReg and assignedRegHi of the interval as unblocked |
|
267 private void unblockRegisters(TraceInterval interval) { |
|
268 Value location = interval.location(); |
|
269 if (mightBeBlocked(location)) { |
|
270 assert valueBlocked(location) > 0 : "location already marked as unused: " + location; |
|
271 setValueBlocked(location, -1); |
|
272 debug.log("unblock %s", location); |
|
273 } |
|
274 } |
|
275 |
|
276 /** |
|
277 * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or |
|
278 * is only blocked by {@code from}. |
|
279 */ |
|
280 private boolean safeToProcessMove(TraceInterval from, TraceInterval to) { |
|
281 Value fromReg = from != null ? from.location() : null; |
|
282 |
|
283 Value location = to.location(); |
|
284 if (mightBeBlocked(location)) { |
|
285 if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) { |
|
286 return false; |
|
287 } |
|
288 } |
|
289 |
|
290 return true; |
|
291 } |
|
292 |
|
293 protected static boolean isMoveToSelf(Value from, Value to) { |
|
294 assert to != null; |
|
295 if (to.equals(from)) { |
|
296 return true; |
|
297 } |
|
298 if (from != null && isRegister(from) && isRegister(to) && asRegister(from).equals(asRegister(to))) { |
|
299 return true; |
|
300 } |
|
301 return false; |
|
302 } |
|
303 |
|
304 protected static boolean mightBeBlocked(Value location) { |
|
305 if (isRegister(location)) { |
|
306 return true; |
|
307 } |
|
308 if (isStackSlotValue(location)) { |
|
309 return true; |
|
310 } |
|
311 return false; |
|
312 } |
|
313 |
|
314 private void createInsertionBuffer(List<LIRInstruction> list) { |
|
315 assert !insertionBuffer.initialized() : "overwriting existing buffer"; |
|
316 insertionBuffer.init(list); |
|
317 } |
|
318 |
|
319 private void appendInsertionBuffer() { |
|
320 if (insertionBuffer.initialized()) { |
|
321 insertionBuffer.finish(); |
|
322 } |
|
323 assert !insertionBuffer.initialized() : "must be uninitialized now"; |
|
324 |
|
325 insertIdx = -1; |
|
326 } |
|
327 |
|
328 private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) { |
|
329 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval; |
|
330 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : "move between different types"; |
|
331 assert insertIdx != -1 : "must setup insert position first"; |
|
332 |
|
333 insertionBuffer.append(insertIdx, createMove(allocator.getOperand(fromInterval), allocator.getOperand(toInterval), fromInterval.location(), toInterval.location())); |
|
334 |
|
335 if (debug.isLogEnabled()) { |
|
336 debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx); |
|
337 } |
|
338 } |
|
339 |
|
340 /** |
|
341 * @param fromOpr {@link TraceInterval operand} of the {@code from} interval |
|
342 * @param toOpr {@link TraceInterval operand} of the {@code to} interval |
|
343 * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval |
|
344 * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval |
|
345 */ |
|
346 protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) { |
|
347 if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) { |
|
348 return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr); |
|
349 } |
|
350 return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr); |
|
351 } |
|
352 |
|
353 private void insertMove(Constant fromOpr, TraceInterval toInterval) { |
|
354 assert insertIdx != -1 : "must setup insert position first"; |
|
355 |
|
356 AllocatableValue toOpr = allocator.getOperand(toInterval); |
|
357 LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr); |
|
358 insertionBuffer.append(insertIdx, move); |
|
359 |
|
360 if (debug.isLogEnabled()) { |
|
361 debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx); |
|
362 } |
|
363 } |
|
364 |
|
365 @SuppressWarnings("try") |
|
366 private void resolveMappings() { |
|
367 try (Indent indent = debug.logAndIndent("resolveMapping")) { |
|
368 assert verifyBeforeResolve(); |
|
369 if (debug.isLogEnabled()) { |
|
370 printMapping(); |
|
371 } |
|
372 |
|
373 // Block all registers that are used as input operands of a move. |
|
374 // When a register is blocked, no move to this register is emitted. |
|
375 // This is necessary for detecting cycles in moves. |
|
376 int i; |
|
377 for (i = mappingFrom.size() - 1; i >= 0; i--) { |
|
378 TraceInterval fromInterval = mappingFrom.get(i); |
|
379 if (fromInterval != null) { |
|
380 blockRegisters(fromInterval); |
|
381 } |
|
382 } |
|
383 |
|
384 ArrayList<AllocatableValue> busySpillSlots = null; |
|
385 while (mappingFrom.size() > 0) { |
|
386 boolean processedInterval = false; |
|
387 |
|
388 int spillCandidate = -1; |
|
389 for (i = mappingFrom.size() - 1; i >= 0; i--) { |
|
390 TraceInterval fromInterval = mappingFrom.get(i); |
|
391 TraceInterval toInterval = mappingTo.get(i); |
|
392 |
|
393 if (safeToProcessMove(fromInterval, toInterval)) { |
|
394 // this interval can be processed because target is free |
|
395 if (fromInterval != null) { |
|
396 insertMove(fromInterval, toInterval); |
|
397 unblockRegisters(fromInterval); |
|
398 } else { |
|
399 insertMove(mappingFromOpr.get(i), toInterval); |
|
400 } |
|
401 if (isStackSlotValue(toInterval.location())) { |
|
402 if (busySpillSlots == null) { |
|
403 busySpillSlots = new ArrayList<>(2); |
|
404 } |
|
405 busySpillSlots.add(toInterval.location()); |
|
406 } |
|
407 mappingFrom.remove(i); |
|
408 mappingFromOpr.remove(i); |
|
409 mappingTo.remove(i); |
|
410 |
|
411 processedInterval = true; |
|
412 } else if (fromInterval != null && isRegister(fromInterval.location()) && (busySpillSlots == null || !busySpillSlots.contains(fromInterval.spillSlot()))) { |
|
413 // this interval cannot be processed now because target is not free |
|
414 // it starts in a register, so it is a possible candidate for spilling |
|
415 spillCandidate = i; |
|
416 } |
|
417 } |
|
418 |
|
419 if (!processedInterval) { |
|
420 breakCycle(spillCandidate); |
|
421 } |
|
422 } |
|
423 } |
|
424 |
|
425 // check that all intervals have been processed |
|
426 assert checkEmpty(); |
|
427 } |
|
428 |
|
429 protected void breakCycle(int spillCandidate) { |
|
430 if (spillCandidate != -1) { |
|
431 // no move could be processed because there is a cycle in the move list |
|
432 // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory |
|
433 assert spillCandidate != -1 : "no interval in register for spilling found"; |
|
434 |
|
435 // create a new spill interval and assign a stack slot to it |
|
436 TraceInterval fromInterval1 = mappingFrom.get(spillCandidate); |
|
437 // do not allocate a new spill slot for temporary interval, but |
|
438 // use spill slot assigned to fromInterval. Otherwise moves from |
|
439 // one stack slot to another can happen (not allowed by LIRAssembler |
|
440 AllocatableValue spillSlot1 = fromInterval1.spillSlot(); |
|
441 if (spillSlot1 == null) { |
|
442 spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval1)); |
|
443 fromInterval1.setSpillSlot(spillSlot1); |
|
444 cycleBreakingSlotsAllocated.increment(debug); |
|
445 } |
|
446 spillInterval(spillCandidate, fromInterval1, spillSlot1); |
|
447 return; |
|
448 } |
|
449 assert mappingFromSize() > 1; |
|
450 // Arbitrarily select the first entry for spilling. |
|
451 int stackSpillCandidate = 0; |
|
452 TraceInterval fromInterval = getMappingFrom(stackSpillCandidate); |
|
453 // allocate new stack slot |
|
454 VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(allocator.getKind(fromInterval)); |
|
455 spillInterval(stackSpillCandidate, fromInterval, spillSlot); |
|
456 } |
|
457 |
|
458 protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) { |
|
459 assert mappingFrom.get(spillCandidate).equals(fromInterval); |
|
460 TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval); |
|
461 |
|
462 // add a dummy range because real position is difficult to calculate |
|
463 // Note: this range is a special case when the integrity of the allocation is |
|
464 // checked |
|
465 spillInterval.addRange(1, 2); |
|
466 |
|
467 spillInterval.assignLocation(spillSlot); |
|
468 |
|
469 if (debug.isLogEnabled()) { |
|
470 debug.log("created new Interval for spilling: %s", spillInterval); |
|
471 } |
|
472 blockRegisters(spillInterval); |
|
473 |
|
474 // insert a move from register to stack and update the mapping |
|
475 insertMove(fromInterval, spillInterval); |
|
476 mappingFrom.set(spillCandidate, spillInterval); |
|
477 unblockRegisters(fromInterval); |
|
478 } |
|
479 |
|
480 @SuppressWarnings("try") |
|
481 private void printMapping() { |
|
482 try (Indent indent = debug.logAndIndent("Mapping")) { |
|
483 for (int i = mappingFrom.size() - 1; i >= 0; i--) { |
|
484 TraceInterval fromInterval = mappingFrom.get(i); |
|
485 TraceInterval toInterval = mappingTo.get(i); |
|
486 String from; |
|
487 Value to = toInterval.location(); |
|
488 if (fromInterval == null) { |
|
489 from = mappingFromOpr.get(i).toString(); |
|
490 } else { |
|
491 from = fromInterval.location().toString(); |
|
492 } |
|
493 debug.log("move %s <- %s", from, to); |
|
494 } |
|
495 } |
|
496 } |
|
497 |
|
498 void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) { |
|
499 assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set"; |
|
500 |
|
501 createInsertionBuffer(insertList); |
|
502 this.insertIdx = insertIdx; |
|
503 } |
|
504 |
|
505 void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) { |
|
506 if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) { |
|
507 // insert position changed . resolve current mappings |
|
508 resolveMappings(); |
|
509 } |
|
510 |
|
511 assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx); |
|
512 |
|
513 if (insertionBuffer.lirList() != newInsertList) { |
|
514 // block changed . append insertionBuffer because it is |
|
515 // bound to a specific block and create a new insertionBuffer |
|
516 appendInsertionBuffer(); |
|
517 createInsertionBuffer(newInsertList); |
|
518 } |
|
519 |
|
520 this.insertIdx = newInsertIdx; |
|
521 } |
|
522 |
|
523 public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) { |
|
524 |
|
525 if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) { |
|
526 if (debug.isLogEnabled()) { |
|
527 debug.log("no store to rematerializable interval %s needed", toInterval); |
|
528 } |
|
529 return; |
|
530 } |
|
531 if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) { |
|
532 // Instead of a reload, re-materialize the value |
|
533 JavaConstant rematValue = fromInterval.getMaterializedValue(); |
|
534 addMapping(rematValue, toInterval); |
|
535 return; |
|
536 } |
|
537 if (debug.isLogEnabled()) { |
|
538 debug.log("add move mapping from %s to %s", fromInterval, toInterval); |
|
539 } |
|
540 |
|
541 assert fromInterval.operandNumber != toInterval.operandNumber : "from and to interval equal: " + fromInterval; |
|
542 assert LIRKind.verifyMoveKinds(allocator.getKind(toInterval), allocator.getKind(fromInterval), allocator.getRegisterAllocationConfig()) : String.format( |
|
543 "Kind mismatch: %s vs. %s, from=%s, to=%s", allocator.getKind(fromInterval), allocator.getKind(toInterval), fromInterval, toInterval); |
|
544 mappingFrom.add(fromInterval); |
|
545 mappingFromOpr.add(null); |
|
546 mappingTo.add(toInterval); |
|
547 } |
|
548 |
|
549 public void addMapping(Constant fromOpr, TraceInterval toInterval) { |
|
550 if (debug.isLogEnabled()) { |
|
551 debug.log("add move mapping from %s to %s", fromOpr, toInterval); |
|
552 } |
|
553 |
|
554 mappingFrom.add(null); |
|
555 mappingFromOpr.add(fromOpr); |
|
556 mappingTo.add(toInterval); |
|
557 } |
|
558 |
|
559 void resolveAndAppendMoves() { |
|
560 if (hasMappings()) { |
|
561 resolveMappings(); |
|
562 } |
|
563 appendInsertionBuffer(); |
|
564 } |
|
565 } |
|