492 if (satb_log_enqueue_frameless == 0) { |
495 if (satb_log_enqueue_frameless == 0) { |
493 generate_satb_log_enqueue(false); |
496 generate_satb_log_enqueue(false); |
494 assert(satb_log_enqueue_frameless != 0, "postcondition."); |
497 assert(satb_log_enqueue_frameless != 0, "postcondition."); |
495 } |
498 } |
496 } |
499 } |
|
500 |
|
501 #ifdef COMPILER1 |
|
502 |
|
503 #undef __ |
|
504 #define __ ce->masm()-> |
|
505 |
|
506 void G1BarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub) { |
|
507 G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1(); |
|
508 // At this point we know that marking is in progress. |
|
509 // If do_load() is true then we have to emit the |
|
510 // load of the previous value; otherwise it has already |
|
511 // been loaded into _pre_val. |
|
512 |
|
513 __ bind(*stub->entry()); |
|
514 |
|
515 assert(stub->pre_val()->is_register(), "Precondition."); |
|
516 Register pre_val_reg = stub->pre_val()->as_register(); |
|
517 |
|
518 if (stub->do_load()) { |
|
519 ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/); |
|
520 } |
|
521 |
|
522 if (__ is_in_wdisp16_range(*stub->continuation())) { |
|
523 __ br_null(pre_val_reg, /*annul*/false, Assembler::pt, *stub->continuation()); |
|
524 } else { |
|
525 __ cmp(pre_val_reg, G0); |
|
526 __ brx(Assembler::equal, false, Assembler::pn, *stub->continuation()); |
|
527 } |
|
528 __ delayed()->nop(); |
|
529 |
|
530 __ call(bs->pre_barrier_c1_runtime_code_blob()->code_begin()); |
|
531 __ delayed()->mov(pre_val_reg, G4); |
|
532 __ br(Assembler::always, false, Assembler::pt, *stub->continuation()); |
|
533 __ delayed()->nop(); |
|
534 } |
|
535 |
|
536 void G1BarrierSetAssembler::gen_post_barrier_stub(LIR_Assembler* ce, G1PostBarrierStub* stub) { |
|
537 G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1(); |
|
538 __ bind(*stub->entry()); |
|
539 |
|
540 assert(stub->addr()->is_register(), "Precondition."); |
|
541 assert(stub->new_val()->is_register(), "Precondition."); |
|
542 Register addr_reg = stub->addr()->as_pointer_register(); |
|
543 Register new_val_reg = stub->new_val()->as_register(); |
|
544 |
|
545 if (__ is_in_wdisp16_range(*stub->continuation())) { |
|
546 __ br_null(new_val_reg, /*annul*/false, Assembler::pt, *stub->continuation()); |
|
547 } else { |
|
548 __ cmp(new_val_reg, G0); |
|
549 __ brx(Assembler::equal, false, Assembler::pn, *stub->continuation()); |
|
550 } |
|
551 __ delayed()->nop(); |
|
552 |
|
553 __ call(bs->post_barrier_c1_runtime_code_blob()->code_begin()); |
|
554 __ delayed()->mov(addr_reg, G4); |
|
555 __ br(Assembler::always, false, Assembler::pt, *stub->continuation()); |
|
556 __ delayed()->nop(); |
|
557 } |
|
558 |
|
559 #undef __ |
|
560 #define __ sasm-> |
|
561 |
|
562 void G1BarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) { |
|
563 __ prologue("g1_pre_barrier", false); |
|
564 |
|
565 // G4: previous value of memory |
|
566 |
|
567 Register pre_val = G4; |
|
568 Register tmp = G1_scratch; |
|
569 Register tmp2 = G3_scratch; |
|
570 |
|
571 Label refill, restart; |
|
572 int satb_q_active_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()); |
|
573 int satb_q_index_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset()); |
|
574 int satb_q_buf_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset()); |
|
575 |
|
576 // Is marking still active? |
|
577 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) { |
|
578 __ ld(G2_thread, satb_q_active_byte_offset, tmp); |
|
579 } else { |
|
580 assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption"); |
|
581 __ ldsb(G2_thread, satb_q_active_byte_offset, tmp); |
|
582 } |
|
583 __ cmp_and_br_short(tmp, G0, Assembler::notEqual, Assembler::pt, restart); |
|
584 __ retl(); |
|
585 __ delayed()->nop(); |
|
586 |
|
587 __ bind(restart); |
|
588 // Load the index into the SATB buffer. SATBMarkQueue::_index is a |
|
589 // size_t so ld_ptr is appropriate |
|
590 __ ld_ptr(G2_thread, satb_q_index_byte_offset, tmp); |
|
591 |
|
592 // index == 0? |
|
593 __ cmp_and_brx_short(tmp, G0, Assembler::equal, Assembler::pn, refill); |
|
594 |
|
595 __ ld_ptr(G2_thread, satb_q_buf_byte_offset, tmp2); |
|
596 __ sub(tmp, oopSize, tmp); |
|
597 |
|
598 __ st_ptr(pre_val, tmp2, tmp); // [_buf + index] := <address_of_card> |
|
599 // Use return-from-leaf |
|
600 __ retl(); |
|
601 __ delayed()->st_ptr(tmp, G2_thread, satb_q_index_byte_offset); |
|
602 |
|
603 __ bind(refill); |
|
604 |
|
605 __ save_live_registers_no_oop_map(true); |
|
606 |
|
607 __ call_VM_leaf(L7_thread_cache, |
|
608 CAST_FROM_FN_PTR(address, |
|
609 SATBMarkQueueSet::handle_zero_index_for_thread), |
|
610 G2_thread); |
|
611 |
|
612 __ restore_live_registers(true); |
|
613 |
|
614 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart); |
|
615 __ epilogue(); |
|
616 } |
|
617 |
|
618 void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler* sasm) { |
|
619 __ prologue("g1_post_barrier", false); |
|
620 |
|
621 G1BarrierSet* bs = barrier_set_cast<G1BarrierSet>(BarrierSet::barrier_set()); |
|
622 |
|
623 Register addr = G4; |
|
624 Register cardtable = G5; |
|
625 Register tmp = G1_scratch; |
|
626 Register tmp2 = G3_scratch; |
|
627 jbyte* byte_map_base = bs->card_table()->byte_map_base(); |
|
628 |
|
629 Label not_already_dirty, restart, refill, young_card; |
|
630 |
|
631 #ifdef _LP64 |
|
632 __ srlx(addr, CardTable::card_shift, addr); |
|
633 #else |
|
634 __ srl(addr, CardTable::card_shift, addr); |
|
635 #endif |
|
636 |
|
637 AddressLiteral rs((address)byte_map_base); |
|
638 __ set(rs, cardtable); // cardtable := <card table base> |
|
639 __ ldub(addr, cardtable, tmp); // tmp := [addr + cardtable] |
|
640 |
|
641 __ cmp_and_br_short(tmp, G1CardTable::g1_young_card_val(), Assembler::equal, Assembler::pt, young_card); |
|
642 |
|
643 __ membar(Assembler::Membar_mask_bits(Assembler::StoreLoad)); |
|
644 __ ldub(addr, cardtable, tmp); // tmp := [addr + cardtable] |
|
645 |
|
646 assert(G1CardTable::dirty_card_val() == 0, "otherwise check this code"); |
|
647 __ cmp_and_br_short(tmp, G0, Assembler::notEqual, Assembler::pt, not_already_dirty); |
|
648 |
|
649 __ bind(young_card); |
|
650 // We didn't take the branch, so we're already dirty: return. |
|
651 // Use return-from-leaf |
|
652 __ retl(); |
|
653 __ delayed()->nop(); |
|
654 |
|
655 // Not dirty. |
|
656 __ bind(not_already_dirty); |
|
657 |
|
658 // Get cardtable + tmp into a reg by itself |
|
659 __ add(addr, cardtable, tmp2); |
|
660 |
|
661 // First, dirty it. |
|
662 __ stb(G0, tmp2, 0); // [cardPtr] := 0 (i.e., dirty). |
|
663 |
|
664 Register tmp3 = cardtable; |
|
665 Register tmp4 = tmp; |
|
666 |
|
667 // these registers are now dead |
|
668 addr = cardtable = tmp = noreg; |
|
669 |
|
670 int dirty_card_q_index_byte_offset = in_bytes(G1ThreadLocalData::dirty_card_queue_index_offset()); |
|
671 int dirty_card_q_buf_byte_offset = in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset()); |
|
672 |
|
673 __ bind(restart); |
|
674 |
|
675 // Get the index into the update buffer. DirtyCardQueue::_index is |
|
676 // a size_t so ld_ptr is appropriate here. |
|
677 __ ld_ptr(G2_thread, dirty_card_q_index_byte_offset, tmp3); |
|
678 |
|
679 // index == 0? |
|
680 __ cmp_and_brx_short(tmp3, G0, Assembler::equal, Assembler::pn, refill); |
|
681 |
|
682 __ ld_ptr(G2_thread, dirty_card_q_buf_byte_offset, tmp4); |
|
683 __ sub(tmp3, oopSize, tmp3); |
|
684 |
|
685 __ st_ptr(tmp2, tmp4, tmp3); // [_buf + index] := <address_of_card> |
|
686 // Use return-from-leaf |
|
687 __ retl(); |
|
688 __ delayed()->st_ptr(tmp3, G2_thread, dirty_card_q_index_byte_offset); |
|
689 |
|
690 __ bind(refill); |
|
691 |
|
692 __ save_live_registers_no_oop_map(true); |
|
693 |
|
694 __ call_VM_leaf(L7_thread_cache, |
|
695 CAST_FROM_FN_PTR(address, |
|
696 DirtyCardQueueSet::handle_zero_index_for_thread), |
|
697 G2_thread); |
|
698 |
|
699 __ restore_live_registers(true); |
|
700 |
|
701 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart); |
|
702 __ epilogue(); |
|
703 } |
|
704 |
|
705 #undef __ |
|
706 |
|
707 #endif // COMPILER1 |