22 */ |
22 */ |
23 |
23 |
24 #include "precompiled.hpp" |
24 #include "precompiled.hpp" |
25 #include "asm/macroAssembler.inline.hpp" |
25 #include "asm/macroAssembler.inline.hpp" |
26 #include "code/codeBlob.hpp" |
26 #include "code/codeBlob.hpp" |
|
27 #include "code/vmreg.inline.hpp" |
27 #include "gc/z/zBarrier.inline.hpp" |
28 #include "gc/z/zBarrier.inline.hpp" |
28 #include "gc/z/zBarrierSet.hpp" |
29 #include "gc/z/zBarrierSet.hpp" |
29 #include "gc/z/zBarrierSetAssembler.hpp" |
30 #include "gc/z/zBarrierSetAssembler.hpp" |
30 #include "gc/z/zBarrierSetRuntime.hpp" |
31 #include "gc/z/zBarrierSetRuntime.hpp" |
|
32 #include "gc/z/zThreadLocalData.hpp" |
31 #include "memory/resourceArea.hpp" |
33 #include "memory/resourceArea.hpp" |
|
34 #include "runtime/sharedRuntime.hpp" |
|
35 #include "utilities/macros.hpp" |
32 #ifdef COMPILER1 |
36 #ifdef COMPILER1 |
33 #include "c1/c1_LIRAssembler.hpp" |
37 #include "c1/c1_LIRAssembler.hpp" |
34 #include "c1/c1_MacroAssembler.hpp" |
38 #include "c1/c1_MacroAssembler.hpp" |
35 #include "gc/z/c1/zBarrierSetC1.hpp" |
39 #include "gc/z/c1/zBarrierSetC1.hpp" |
36 #endif // COMPILER1 |
40 #endif // COMPILER1 |
37 |
41 #ifdef COMPILER2 |
38 #include "gc/z/zThreadLocalData.hpp" |
42 #include "gc/z/c2/zBarrierSetC2.hpp" |
39 |
43 #endif // COMPILER2 |
40 ZBarrierSetAssembler::ZBarrierSetAssembler() : |
|
41 _load_barrier_slow_stub(), |
|
42 _load_barrier_weak_slow_stub() {} |
|
43 |
44 |
44 #ifdef PRODUCT |
45 #ifdef PRODUCT |
45 #define BLOCK_COMMENT(str) /* nothing */ |
46 #define BLOCK_COMMENT(str) /* nothing */ |
46 #else |
47 #else |
47 #define BLOCK_COMMENT(str) __ block_comment(str) |
48 #define BLOCK_COMMENT(str) __ block_comment(str) |
292 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm, |
294 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm, |
293 DecoratorSet decorators) const { |
295 DecoratorSet decorators) const { |
294 __ prologue("zgc_load_barrier stub", false); |
296 __ prologue("zgc_load_barrier stub", false); |
295 |
297 |
296 // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0. |
298 // We don't use push/pop_clobbered_registers() - we need to pull out the result from r0. |
297 for (int i = 0; i < 32; i +=2) { |
299 for (int i = 0; i < 32; i += 2) { |
298 __ stpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ pre(sp,-16))); |
300 __ stpd(as_FloatRegister(i), as_FloatRegister(i + 1), Address(__ pre(sp,-16))); |
299 } |
301 } |
300 |
302 |
301 RegSet saveRegs = RegSet::range(r0,r28) - RegSet::of(r0); |
303 const RegSet save_regs = RegSet::range(r1, r28); |
302 __ push(saveRegs, sp); |
304 __ push(save_regs, sp); |
303 |
305 |
304 // Setup arguments |
306 // Setup arguments |
305 __ load_parameter(0, c_rarg0); |
307 __ load_parameter(0, c_rarg0); |
306 __ load_parameter(1, c_rarg1); |
308 __ load_parameter(1, c_rarg1); |
307 |
309 |
308 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2); |
310 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2); |
309 |
311 |
310 __ pop(saveRegs, sp); |
312 __ pop(save_regs, sp); |
311 |
313 |
312 for (int i = 30; i >0; i -=2) { |
314 for (int i = 30; i >= 0; i -= 2) { |
313 __ ldpd(as_FloatRegister(i), as_FloatRegister(i+1), Address(__ post(sp, 16))); |
315 __ ldpd(as_FloatRegister(i), as_FloatRegister(i + 1), Address(__ post(sp, 16))); |
|
316 } |
|
317 |
|
318 __ epilogue(); |
|
319 } |
|
320 #endif // COMPILER1 |
|
321 |
|
322 #ifdef COMPILER2 |
|
323 |
|
324 OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) { |
|
325 if (!OptoReg::is_reg(opto_reg)) { |
|
326 return OptoReg::Bad; |
|
327 } |
|
328 |
|
329 const VMReg vm_reg = OptoReg::as_VMReg(opto_reg); |
|
330 if (vm_reg->is_FloatRegister()) { |
|
331 return opto_reg & ~1; |
|
332 } |
|
333 |
|
334 return opto_reg; |
|
335 } |
|
336 |
|
337 #undef __ |
|
338 #define __ _masm-> |
|
339 |
|
340 class ZSaveLiveRegisters { |
|
341 private: |
|
342 MacroAssembler* const _masm; |
|
343 RegSet _gp_regs; |
|
344 RegSet _fp_regs; |
|
345 |
|
346 public: |
|
347 void initialize(ZLoadBarrierStubC2* stub) { |
|
348 // Create mask of live registers |
|
349 RegMask live = stub->live(); |
|
350 |
|
351 // Record registers that needs to be saved/restored |
|
352 while (live.is_NotEmpty()) { |
|
353 const OptoReg::Name opto_reg = live.find_first_elem(); |
|
354 live.Remove(opto_reg); |
|
355 if (OptoReg::is_reg(opto_reg)) { |
|
356 const VMReg vm_reg = OptoReg::as_VMReg(opto_reg); |
|
357 if (vm_reg->is_Register()) { |
|
358 _gp_regs += RegSet::of(vm_reg->as_Register()); |
|
359 } else if (vm_reg->is_FloatRegister()) { |
|
360 _fp_regs += RegSet::of((Register)vm_reg->as_FloatRegister()); |
|
361 } else { |
|
362 fatal("Unknown register type"); |
|
363 } |
|
364 } |
314 } |
365 } |
315 |
366 |
316 __ epilogue(); |
367 // Remove C-ABI SOE registers, scratch regs and _ref register that will be updated |
317 } |
368 _gp_regs -= RegSet::range(r19, r30) + RegSet::of(r8, r9, stub->ref()); |
318 #endif // COMPILER1 |
369 } |
319 |
370 |
320 #undef __ |
371 ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) : |
321 #define __ cgen->assembler()-> |
372 _masm(masm), |
322 |
373 _gp_regs(), |
323 // Generates a register specific stub for calling |
374 _fp_regs() { |
324 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or |
375 |
325 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded(). |
376 // Figure out what registers to save/restore |
326 // |
377 initialize(stub); |
327 // The raddr register serves as both input and output for this stub. When the stub is |
378 |
328 // called the raddr register contains the object field address (oop*) where the bad oop |
379 // Save registers |
329 // was loaded from, which caused the slow path to be taken. On return from the stub the |
380 __ push(_gp_regs, sp); |
330 // raddr register contains the good/healed oop returned from |
381 __ push_fp(_fp_regs, sp); |
331 // ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or |
382 } |
332 // ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded(). |
383 |
333 static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) { |
384 ~ZSaveLiveRegisters() { |
334 // Don't generate stub for invalid registers |
385 // Restore registers |
335 if (raddr == zr || raddr == r29 || raddr == r30) { |
386 __ pop_fp(_fp_regs, sp); |
336 return NULL; |
387 __ pop(_gp_regs, sp); |
337 } |
388 } |
338 |
389 }; |
339 // Create stub name |
390 |
340 char name[64]; |
391 #undef __ |
341 const bool weak = (decorators & ON_WEAK_OOP_REF) != 0; |
392 #define __ _masm-> |
342 os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name()); |
393 |
343 |
394 class ZSetupArguments { |
344 __ align(CodeEntryAlignment); |
395 private: |
345 StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode)); |
396 MacroAssembler* const _masm; |
346 address start = __ pc(); |
397 const Register _ref; |
347 |
398 const Address _ref_addr; |
348 // Save live registers |
399 |
349 RegSet savedRegs = RegSet::range(r0,r18) - RegSet::of(raddr); |
400 public: |
350 |
401 ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) : |
351 __ enter(); |
402 _masm(masm), |
352 __ push(savedRegs, sp); |
403 _ref(stub->ref()), |
353 |
404 _ref_addr(stub->ref_addr()) { |
354 // Setup arguments |
405 |
355 if (raddr != c_rarg1) { |
406 // Setup arguments |
356 __ mov(c_rarg1, raddr); |
407 if (_ref_addr.base() == noreg) { |
357 } |
408 // No self healing |
358 |
409 if (_ref != c_rarg0) { |
359 __ ldr(c_rarg0, Address(raddr)); |
410 __ mov(c_rarg0, _ref); |
360 |
411 } |
361 // Call barrier function |
412 __ mov(c_rarg1, 0); |
362 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1); |
413 } else { |
363 |
414 // Self healing |
364 // Move result returned in r0 to raddr, if needed |
415 if (_ref == c_rarg0) { |
365 if (raddr != r0) { |
416 // _ref is already at correct place |
366 __ mov(raddr, r0); |
417 __ lea(c_rarg1, _ref_addr); |
367 } |
418 } else if (_ref != c_rarg1) { |
368 |
419 // _ref is in wrong place, but not in c_rarg1, so fix it first |
369 __ pop(savedRegs, sp); |
420 __ lea(c_rarg1, _ref_addr); |
370 __ leave(); |
421 __ mov(c_rarg0, _ref); |
371 __ ret(lr); |
422 } else if (_ref_addr.base() != c_rarg0 && _ref_addr.index() != c_rarg0) { |
372 |
423 assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0"); |
373 return start; |
424 __ mov(c_rarg0, _ref); |
374 } |
425 __ lea(c_rarg1, _ref_addr); |
375 |
426 } else { |
376 #undef __ |
427 assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0"); |
377 |
428 if (_ref_addr.base() == c_rarg0 || _ref_addr.index() == c_rarg0) { |
378 static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) { |
429 __ mov(rscratch2, c_rarg1); |
379 const int nregs = 28; // Exclude FP, XZR, SP from calculation. |
430 __ lea(c_rarg1, _ref_addr); |
380 const int code_size = nregs * 254; // Rough estimate of code size |
431 __ mov(c_rarg0, rscratch2); |
381 |
432 } else { |
382 ResourceMark rm; |
433 ShouldNotReachHere(); |
383 |
434 } |
384 CodeBuffer buf(BufferBlob::create(label, code_size)); |
435 } |
385 StubCodeGenerator cgen(&buf); |
436 } |
386 |
437 } |
387 for (int i = 0; i < nregs; i++) { |
438 |
388 const Register reg = as_Register(i); |
439 ~ZSetupArguments() { |
389 stub[i] = generate_load_barrier_stub(&cgen, reg, decorators); |
440 // Transfer result |
390 } |
441 if (_ref != r0) { |
391 } |
442 __ mov(_ref, r0); |
392 |
443 } |
393 void ZBarrierSetAssembler::barrier_stubs_init() { |
444 } |
394 barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub); |
445 }; |
395 barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub); |
446 |
396 } |
447 #undef __ |
397 |
448 #define __ masm-> |
398 address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) { |
449 |
399 return _load_barrier_slow_stub[reg->encoding()]; |
450 void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const { |
400 } |
451 BLOCK_COMMENT("ZLoadBarrierStubC2"); |
401 |
452 |
402 address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) { |
453 // Stub entry |
403 return _load_barrier_weak_slow_stub[reg->encoding()]; |
454 __ bind(*stub->entry()); |
404 } |
455 |
|
456 { |
|
457 ZSaveLiveRegisters save_live_registers(masm, stub); |
|
458 ZSetupArguments setup_arguments(masm, stub); |
|
459 __ mov(rscratch1, stub->slow_path()); |
|
460 __ blr(rscratch1); |
|
461 } |
|
462 |
|
463 // Stub exit |
|
464 __ b(*stub->continuation()); |
|
465 } |
|
466 |
|
467 #undef __ |
|
468 |
|
469 #endif // COMPILER2 |