|
1 /* |
|
2 * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. |
|
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
|
4 * |
|
5 * This code is free software; you can redistribute it and/or modify it |
|
6 * under the terms of the GNU General Public License version 2 only, as |
|
7 * published by the Free Software Foundation. |
|
8 * |
|
9 * This code is distributed in the hope that it will be useful, but WITHOUT |
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
12 * version 2 for more details (a copy is included in the LICENSE file that |
|
13 * accompanied this code). |
|
14 * |
|
15 * You should have received a copy of the GNU General Public License version |
|
16 * 2 along with this work; if not, write to the Free Software Foundation, |
|
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
18 * |
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
|
20 * or visit www.oracle.com if you need additional information or have any |
|
21 * questions. |
|
22 */ |
|
23 |
|
24 #include "precompiled.hpp" |
|
25 #include "asm/macroAssembler.inline.hpp" |
|
26 #include "code/codeBlob.hpp" |
|
27 #include "code/vmreg.inline.hpp" |
|
28 #include "gc/z/zBarrier.inline.hpp" |
|
29 #include "gc/z/zBarrierSet.hpp" |
|
30 #include "gc/z/zBarrierSetAssembler.hpp" |
|
31 #include "gc/z/zBarrierSetRuntime.hpp" |
|
32 #include "gc/z/zThreadLocalData.hpp" |
|
33 #include "memory/resourceArea.hpp" |
|
34 #include "runtime/sharedRuntime.hpp" |
|
35 #include "utilities/macros.hpp" |
|
36 #ifdef COMPILER1 |
|
37 #include "c1/c1_LIRAssembler.hpp" |
|
38 #include "c1/c1_MacroAssembler.hpp" |
|
39 #include "gc/z/c1/zBarrierSetC1.hpp" |
|
40 #endif // COMPILER1 |
|
41 #ifdef COMPILER2 |
|
42 #include "gc/z/c2/zBarrierSetC2.hpp" |
|
43 #endif // COMPILER2 |
|
44 |
|
45 #ifdef PRODUCT |
|
46 #define BLOCK_COMMENT(str) /* nothing */ |
|
47 #else |
|
48 #define BLOCK_COMMENT(str) __ block_comment(str) |
|
49 #endif |
|
50 |
|
51 #undef __ |
|
52 #define __ masm-> |
|
53 |
|
54 void ZBarrierSetAssembler::load_at(MacroAssembler* masm, |
|
55 DecoratorSet decorators, |
|
56 BasicType type, |
|
57 Register dst, |
|
58 Address src, |
|
59 Register tmp1, |
|
60 Register tmp_thread) { |
|
61 if (!ZBarrierSet::barrier_needed(decorators, type)) { |
|
62 // Barrier not needed |
|
63 BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread); |
|
64 return; |
|
65 } |
|
66 |
|
67 assert_different_registers(rscratch1, rscratch2, src.base()); |
|
68 assert_different_registers(rscratch1, rscratch2, dst); |
|
69 |
|
70 RegSet savedRegs = RegSet::range(r0, r28) - RegSet::of(dst, rscratch1, rscratch2); |
|
71 |
|
72 Label done; |
|
73 |
|
74 // Load bad mask into scratch register. |
|
75 __ ldr(rscratch1, address_bad_mask_from_thread(rthread)); |
|
76 __ lea(rscratch2, src); |
|
77 __ ldr(dst, src); |
|
78 |
|
79 // Test reference against bad mask. If mask bad, then we need to fix it up. |
|
80 __ tst(dst, rscratch1); |
|
81 __ br(Assembler::EQ, done); |
|
82 |
|
83 __ enter(); |
|
84 |
|
85 __ push(savedRegs, sp); |
|
86 |
|
87 if (c_rarg0 != dst) { |
|
88 __ mov(c_rarg0, dst); |
|
89 } |
|
90 __ mov(c_rarg1, rscratch2); |
|
91 |
|
92 int step = 4 * wordSize; |
|
93 __ mov(rscratch2, -step); |
|
94 __ sub(sp, sp, step); |
|
95 |
|
96 for (int i = 28; i >= 4; i -= 4) { |
|
97 __ st1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2), |
|
98 as_FloatRegister(i+3), __ T1D, Address(__ post(sp, rscratch2))); |
|
99 } |
|
100 __ st1(as_FloatRegister(0), as_FloatRegister(1), as_FloatRegister(2), |
|
101 as_FloatRegister(3), __ T1D, Address(sp)); |
|
102 |
|
103 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2); |
|
104 |
|
105 for (int i = 0; i <= 28; i += 4) { |
|
106 __ ld1(as_FloatRegister(i), as_FloatRegister(i+1), as_FloatRegister(i+2), |
|
107 as_FloatRegister(i+3), __ T1D, Address(__ post(sp, step))); |
|
108 } |
|
109 |
|
110 // Make sure dst has the return value. |
|
111 if (dst != r0) { |
|
112 __ mov(dst, r0); |
|
113 } |
|
114 |
|
115 __ pop(savedRegs, sp); |
|
116 __ leave(); |
|
117 |
|
118 __ bind(done); |
|
119 } |
|
120 |
|
121 #ifdef ASSERT |
|
122 |
|
123 void ZBarrierSetAssembler::store_at(MacroAssembler* masm, |
|
124 DecoratorSet decorators, |
|
125 BasicType type, |
|
126 Address dst, |
|
127 Register val, |
|
128 Register tmp1, |
|
129 Register tmp2) { |
|
130 // Verify value |
|
131 if (is_reference_type(type)) { |
|
132 // Note that src could be noreg, which means we |
|
133 // are storing null and can skip verification. |
|
134 if (val != noreg) { |
|
135 Label done; |
|
136 |
|
137 // tmp1 and tmp2 are often set to noreg. |
|
138 RegSet savedRegs = RegSet::of(rscratch1); |
|
139 __ push(savedRegs, sp); |
|
140 |
|
141 __ ldr(rscratch1, address_bad_mask_from_thread(rthread)); |
|
142 __ tst(val, rscratch1); |
|
143 __ br(Assembler::EQ, done); |
|
144 __ stop("Verify oop store failed"); |
|
145 __ should_not_reach_here(); |
|
146 __ bind(done); |
|
147 __ pop(savedRegs, sp); |
|
148 } |
|
149 } |
|
150 |
|
151 // Store value |
|
152 BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2); |
|
153 } |
|
154 |
|
155 #endif // ASSERT |
|
156 |
|
157 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, |
|
158 DecoratorSet decorators, |
|
159 bool is_oop, |
|
160 Register src, |
|
161 Register dst, |
|
162 Register count, |
|
163 RegSet saved_regs) { |
|
164 if (!is_oop) { |
|
165 // Barrier not needed |
|
166 return; |
|
167 } |
|
168 |
|
169 BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {"); |
|
170 |
|
171 assert_different_registers(src, count, rscratch1); |
|
172 |
|
173 __ pusha(); |
|
174 |
|
175 if (count == c_rarg0) { |
|
176 if (src == c_rarg1) { |
|
177 // exactly backwards!! |
|
178 __ mov(rscratch1, c_rarg0); |
|
179 __ mov(c_rarg0, c_rarg1); |
|
180 __ mov(c_rarg1, rscratch1); |
|
181 } else { |
|
182 __ mov(c_rarg1, count); |
|
183 __ mov(c_rarg0, src); |
|
184 } |
|
185 } else { |
|
186 __ mov(c_rarg0, src); |
|
187 __ mov(c_rarg1, count); |
|
188 } |
|
189 |
|
190 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2); |
|
191 |
|
192 __ popa(); |
|
193 BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue"); |
|
194 } |
|
195 |
|
196 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, |
|
197 Register jni_env, |
|
198 Register robj, |
|
199 Register tmp, |
|
200 Label& slowpath) { |
|
201 BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {"); |
|
202 |
|
203 assert_different_registers(jni_env, robj, tmp); |
|
204 |
|
205 // Resolve jobject |
|
206 BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath); |
|
207 |
|
208 // The Address offset is too large to direct load - -784. Our range is +127, -128. |
|
209 __ mov(tmp, (long int)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) - |
|
210 in_bytes(JavaThread::jni_environment_offset()))); |
|
211 |
|
212 // Load address bad mask |
|
213 __ add(tmp, jni_env, tmp); |
|
214 __ ldr(tmp, Address(tmp)); |
|
215 |
|
216 // Check address bad mask |
|
217 __ tst(robj, tmp); |
|
218 __ br(Assembler::NE, slowpath); |
|
219 |
|
220 BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native"); |
|
221 } |
|
222 |
|
223 #ifdef COMPILER1 |
|
224 |
|
225 #undef __ |
|
226 #define __ ce->masm()-> |
|
227 |
|
228 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce, |
|
229 LIR_Opr ref) const { |
|
230 assert_different_registers(rheapbase, rthread, ref->as_register()); |
|
231 |
|
232 __ ldr(rheapbase, address_bad_mask_from_thread(rthread)); |
|
233 __ tst(ref->as_register(), rheapbase); |
|
234 } |
|
235 |
|
236 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce, |
|
237 ZLoadBarrierStubC1* stub) const { |
|
238 // Stub entry |
|
239 __ bind(*stub->entry()); |
|
240 |
|
241 Register ref = stub->ref()->as_register(); |
|
242 Register ref_addr = noreg; |
|
243 Register tmp = noreg; |
|
244 |
|
245 if (stub->tmp()->is_valid()) { |
|
246 // Load address into tmp register |
|
247 ce->leal(stub->ref_addr(), stub->tmp()); |
|
248 ref_addr = tmp = stub->tmp()->as_pointer_register(); |
|
249 } else { |
|
250 // Address already in register |
|
251 ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register(); |
|
252 } |
|
253 |
|
254 assert_different_registers(ref, ref_addr, noreg); |
|
255 |
|
256 // Save r0 unless it is the result or tmp register |
|
257 // Set up SP to accomodate parameters and maybe r0.. |
|
258 if (ref != r0 && tmp != r0) { |
|
259 __ sub(sp, sp, 32); |
|
260 __ str(r0, Address(sp, 16)); |
|
261 } else { |
|
262 __ sub(sp, sp, 16); |
|
263 } |
|
264 |
|
265 // Setup arguments and call runtime stub |
|
266 ce->store_parameter(ref_addr, 1); |
|
267 ce->store_parameter(ref, 0); |
|
268 |
|
269 __ far_call(stub->runtime_stub()); |
|
270 |
|
271 // Verify result |
|
272 __ verify_oop(r0, "Bad oop"); |
|
273 |
|
274 // Move result into place |
|
275 if (ref != r0) { |
|
276 __ mov(ref, r0); |
|
277 } |
|
278 |
|
279 // Restore r0 unless it is the result or tmp register |
|
280 if (ref != r0 && tmp != r0) { |
|
281 __ ldr(r0, Address(sp, 16)); |
|
282 __ add(sp, sp, 32); |
|
283 } else { |
|
284 __ add(sp, sp, 16); |
|
285 } |
|
286 |
|
287 // Stub exit |
|
288 __ b(*stub->continuation()); |
|
289 } |
|
290 |
|
291 #undef __ |
|
292 #define __ sasm-> |
|
293 |
|
294 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm, |
|
295 DecoratorSet decorators) const { |
|
296 __ prologue("zgc_load_barrier stub", false); |
|
297 |
|
298 // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0. |
|
299 for (int i = 0; i < 32; i += 2) { |
|
300 __ stpd(as_FloatRegister(i), as_FloatRegister(i + 1), Address(__ pre(sp,-16))); |
|
301 } |
|
302 |
|
303 const RegSet save_regs = RegSet::range(r1, r28); |
|
304 __ push(save_regs, sp); |
|
305 |
|
306 // Setup arguments |
|
307 __ load_parameter(0, c_rarg0); |
|
308 __ load_parameter(1, c_rarg1); |
|
309 |
|
310 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2); |
|
311 |
|
312 __ pop(save_regs, sp); |
|
313 |
|
314 for (int i = 30; i >= 0; i -= 2) { |
|
315 __ ldpd(as_FloatRegister(i), as_FloatRegister(i + 1), Address(__ post(sp, 16))); |
|
316 } |
|
317 |
|
318 __ epilogue(); |
|
319 } |
|
320 #endif // COMPILER1 |
|
321 |
|
322 #ifdef COMPILER2 |
|
323 |
|
324 OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) { |
|
325 if (!OptoReg::is_reg(opto_reg)) { |
|
326 return OptoReg::Bad; |
|
327 } |
|
328 |
|
329 const VMReg vm_reg = OptoReg::as_VMReg(opto_reg); |
|
330 if (vm_reg->is_FloatRegister()) { |
|
331 return opto_reg & ~1; |
|
332 } |
|
333 |
|
334 return opto_reg; |
|
335 } |
|
336 |
|
337 #undef __ |
|
338 #define __ _masm-> |
|
339 |
|
340 class ZSaveLiveRegisters { |
|
341 private: |
|
342 MacroAssembler* const _masm; |
|
343 RegSet _gp_regs; |
|
344 RegSet _fp_regs; |
|
345 |
|
346 public: |
|
347 void initialize(ZLoadBarrierStubC2* stub) { |
|
348 // Create mask of live registers |
|
349 RegMask live = stub->live(); |
|
350 |
|
351 // Record registers that needs to be saved/restored |
|
352 while (live.is_NotEmpty()) { |
|
353 const OptoReg::Name opto_reg = live.find_first_elem(); |
|
354 live.Remove(opto_reg); |
|
355 if (OptoReg::is_reg(opto_reg)) { |
|
356 const VMReg vm_reg = OptoReg::as_VMReg(opto_reg); |
|
357 if (vm_reg->is_Register()) { |
|
358 _gp_regs += RegSet::of(vm_reg->as_Register()); |
|
359 } else if (vm_reg->is_FloatRegister()) { |
|
360 _fp_regs += RegSet::of((Register)vm_reg->as_FloatRegister()); |
|
361 } else { |
|
362 fatal("Unknown register type"); |
|
363 } |
|
364 } |
|
365 } |
|
366 |
|
367 // Remove C-ABI SOE registers, scratch regs and _ref register that will be updated |
|
368 _gp_regs -= RegSet::range(r19, r30) + RegSet::of(r8, r9, stub->ref()); |
|
369 } |
|
370 |
|
371 ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) : |
|
372 _masm(masm), |
|
373 _gp_regs(), |
|
374 _fp_regs() { |
|
375 |
|
376 // Figure out what registers to save/restore |
|
377 initialize(stub); |
|
378 |
|
379 // Save registers |
|
380 __ push(_gp_regs, sp); |
|
381 __ push_fp(_fp_regs, sp); |
|
382 } |
|
383 |
|
384 ~ZSaveLiveRegisters() { |
|
385 // Restore registers |
|
386 __ pop_fp(_fp_regs, sp); |
|
387 __ pop(_gp_regs, sp); |
|
388 } |
|
389 }; |
|
390 |
|
391 #undef __ |
|
392 #define __ _masm-> |
|
393 |
|
394 class ZSetupArguments { |
|
395 private: |
|
396 MacroAssembler* const _masm; |
|
397 const Register _ref; |
|
398 const Address _ref_addr; |
|
399 |
|
400 public: |
|
401 ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) : |
|
402 _masm(masm), |
|
403 _ref(stub->ref()), |
|
404 _ref_addr(stub->ref_addr()) { |
|
405 |
|
406 // Setup arguments |
|
407 if (_ref_addr.base() == noreg) { |
|
408 // No self healing |
|
409 if (_ref != c_rarg0) { |
|
410 __ mov(c_rarg0, _ref); |
|
411 } |
|
412 __ mov(c_rarg1, 0); |
|
413 } else { |
|
414 // Self healing |
|
415 if (_ref == c_rarg0) { |
|
416 // _ref is already at correct place |
|
417 __ lea(c_rarg1, _ref_addr); |
|
418 } else if (_ref != c_rarg1) { |
|
419 // _ref is in wrong place, but not in c_rarg1, so fix it first |
|
420 __ lea(c_rarg1, _ref_addr); |
|
421 __ mov(c_rarg0, _ref); |
|
422 } else if (_ref_addr.base() != c_rarg0 && _ref_addr.index() != c_rarg0) { |
|
423 assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0"); |
|
424 __ mov(c_rarg0, _ref); |
|
425 __ lea(c_rarg1, _ref_addr); |
|
426 } else { |
|
427 assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0"); |
|
428 if (_ref_addr.base() == c_rarg0 || _ref_addr.index() == c_rarg0) { |
|
429 __ mov(rscratch2, c_rarg1); |
|
430 __ lea(c_rarg1, _ref_addr); |
|
431 __ mov(c_rarg0, rscratch2); |
|
432 } else { |
|
433 ShouldNotReachHere(); |
|
434 } |
|
435 } |
|
436 } |
|
437 } |
|
438 |
|
439 ~ZSetupArguments() { |
|
440 // Transfer result |
|
441 if (_ref != r0) { |
|
442 __ mov(_ref, r0); |
|
443 } |
|
444 } |
|
445 }; |
|
446 |
|
447 #undef __ |
|
448 #define __ masm-> |
|
449 |
|
450 void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const { |
|
451 BLOCK_COMMENT("ZLoadBarrierStubC2"); |
|
452 |
|
453 // Stub entry |
|
454 __ bind(*stub->entry()); |
|
455 |
|
456 { |
|
457 ZSaveLiveRegisters save_live_registers(masm, stub); |
|
458 ZSetupArguments setup_arguments(masm, stub); |
|
459 __ mov(rscratch1, stub->slow_path()); |
|
460 __ blr(rscratch1); |
|
461 } |
|
462 |
|
463 // Stub exit |
|
464 __ b(*stub->continuation()); |
|
465 } |
|
466 |
|
467 #undef __ |
|
468 |
|
469 #endif // COMPILER2 |