188 RegisterSaver::live_reg_frame_size(reg_set) / VMRegImpl::stack_slot_size; |
183 RegisterSaver::live_reg_frame_size(reg_set) / VMRegImpl::stack_slot_size; |
189 sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word); |
184 sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word); |
190 return RegisterSaver::save_live_registers(sasm, reg_set); |
185 return RegisterSaver::save_live_registers(sasm, reg_set); |
191 } |
186 } |
192 |
187 |
193 static OopMap* save_volatile_registers(StubAssembler* sasm, Register return_pc = Z_R14) { |
|
194 __ block_comment("save_volatile_registers"); |
|
195 RegisterSaver::RegisterSet reg_set = RegisterSaver::all_volatile_registers; |
|
196 int frame_size_in_slots = |
|
197 RegisterSaver::live_reg_frame_size(reg_set) / VMRegImpl::stack_slot_size; |
|
198 sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word); |
|
199 return RegisterSaver::save_live_registers(sasm, reg_set, return_pc); |
|
200 } |
|
201 |
|
202 static void restore_live_registers(StubAssembler* sasm, bool restore_fpu_registers = true) { |
188 static void restore_live_registers(StubAssembler* sasm, bool restore_fpu_registers = true) { |
203 __ block_comment("restore_live_registers"); |
189 __ block_comment("restore_live_registers"); |
204 RegisterSaver::RegisterSet reg_set = |
190 RegisterSaver::RegisterSet reg_set = |
205 restore_fpu_registers ? RegisterSaver::all_registers : RegisterSaver::all_integer_registers; |
191 restore_fpu_registers ? RegisterSaver::all_registers : RegisterSaver::all_integer_registers; |
206 RegisterSaver::restore_live_registers(sasm, reg_set); |
192 RegisterSaver::restore_live_registers(sasm, reg_set); |
762 __ ret(0); |
742 __ ret(0); |
763 } |
743 } |
764 break; |
744 break; |
765 #endif // TODO |
745 #endif // TODO |
766 |
746 |
767 #if INCLUDE_ALL_GCS |
|
768 case g1_pre_barrier_slow_id: |
|
769 { // Z_R1_scratch: previous value of memory |
|
770 |
|
771 BarrierSet* bs = BarrierSet::barrier_set(); |
|
772 if (bs->kind() != BarrierSet::G1BarrierSet) { |
|
773 __ should_not_reach_here(FILE_AND_LINE); |
|
774 break; |
|
775 } |
|
776 |
|
777 __ set_info("g1_pre_barrier_slow_id", dont_gc_arguments); |
|
778 |
|
779 Register pre_val = Z_R1_scratch; |
|
780 Register tmp = Z_R6; // Must be non-volatile because it is used to save pre_val. |
|
781 Register tmp2 = Z_R7; |
|
782 |
|
783 Label refill, restart, marking_not_active; |
|
784 int satb_q_active_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()); |
|
785 int satb_q_index_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset()); |
|
786 int satb_q_buf_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset()); |
|
787 |
|
788 // Save tmp registers (see assertion in G1PreBarrierStub::emit_code()). |
|
789 __ z_stg(tmp, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
790 __ z_stg(tmp2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
791 |
|
792 // Is marking still active? |
|
793 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) { |
|
794 __ load_and_test_int(tmp, Address(Z_thread, satb_q_active_byte_offset)); |
|
795 } else { |
|
796 assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption"); |
|
797 __ load_and_test_byte(tmp, Address(Z_thread, satb_q_active_byte_offset)); |
|
798 } |
|
799 __ z_bre(marking_not_active); // Activity indicator is zero, so there is no marking going on currently. |
|
800 |
|
801 __ bind(restart); |
|
802 // Load the index into the SATB buffer. SATBMarkQueue::_index is a |
|
803 // size_t so ld_ptr is appropriate. |
|
804 __ z_ltg(tmp, satb_q_index_byte_offset, Z_R0, Z_thread); |
|
805 |
|
806 // index == 0? |
|
807 __ z_brz(refill); |
|
808 |
|
809 __ z_lg(tmp2, satb_q_buf_byte_offset, Z_thread); |
|
810 __ add2reg(tmp, -oopSize); |
|
811 |
|
812 __ z_stg(pre_val, 0, tmp, tmp2); // [_buf + index] := <address_of_card> |
|
813 __ z_stg(tmp, satb_q_index_byte_offset, Z_thread); |
|
814 |
|
815 __ bind(marking_not_active); |
|
816 // Restore tmp registers (see assertion in G1PreBarrierStub::emit_code()). |
|
817 __ z_lg(tmp, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
818 __ z_lg(tmp2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
819 __ z_br(Z_R14); |
|
820 |
|
821 __ bind(refill); |
|
822 save_volatile_registers(sasm); |
|
823 __ z_lgr(tmp, pre_val); // save pre_val |
|
824 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SATBMarkQueueSet::handle_zero_index_for_thread), |
|
825 Z_thread); |
|
826 __ z_lgr(pre_val, tmp); // restore pre_val |
|
827 restore_volatile_registers(sasm); |
|
828 __ z_bru(restart); |
|
829 } |
|
830 break; |
|
831 |
|
832 case g1_post_barrier_slow_id: |
|
833 { // Z_R1_scratch: oop address, address of updated memory slot |
|
834 BarrierSet* bs = BarrierSet::barrier_set(); |
|
835 if (bs->kind() != BarrierSet::G1BarrierSet) { |
|
836 __ should_not_reach_here(FILE_AND_LINE); |
|
837 break; |
|
838 } |
|
839 |
|
840 __ set_info("g1_post_barrier_slow_id", dont_gc_arguments); |
|
841 |
|
842 Register addr_oop = Z_R1_scratch; |
|
843 Register addr_card = Z_R1_scratch; |
|
844 Register r1 = Z_R6; // Must be saved/restored. |
|
845 Register r2 = Z_R7; // Must be saved/restored. |
|
846 Register cardtable = r1; // Must be non-volatile, because it is used to save addr_card. |
|
847 jbyte* byte_map_base = ci_card_table_address(); |
|
848 |
|
849 // Save registers used below (see assertion in G1PreBarrierStub::emit_code()). |
|
850 __ z_stg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
851 |
|
852 Label not_already_dirty, restart, refill, young_card; |
|
853 |
|
854 // Calculate address of card corresponding to the updated oop slot. |
|
855 AddressLiteral rs(byte_map_base); |
|
856 __ z_srlg(addr_card, addr_oop, CardTable::card_shift); |
|
857 addr_oop = noreg; // dead now |
|
858 __ load_const_optimized(cardtable, rs); // cardtable := <card table base> |
|
859 __ z_agr(addr_card, cardtable); // addr_card := addr_oop>>card_shift + cardtable |
|
860 |
|
861 __ z_cli(0, addr_card, (int)G1CardTable::g1_young_card_val()); |
|
862 __ z_bre(young_card); |
|
863 |
|
864 __ z_sync(); // Required to support concurrent cleaning. |
|
865 |
|
866 __ z_cli(0, addr_card, (int)CardTable::dirty_card_val()); |
|
867 __ z_brne(not_already_dirty); |
|
868 |
|
869 __ bind(young_card); |
|
870 // We didn't take the branch, so we're already dirty: restore |
|
871 // used registers and return. |
|
872 __ z_lg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
873 __ z_br(Z_R14); |
|
874 |
|
875 // Not dirty. |
|
876 __ bind(not_already_dirty); |
|
877 |
|
878 // First, dirty it: [addr_card] := 0 |
|
879 __ z_mvi(0, addr_card, CardTable::dirty_card_val()); |
|
880 |
|
881 Register idx = cardtable; // Must be non-volatile, because it is used to save addr_card. |
|
882 Register buf = r2; |
|
883 cardtable = noreg; // now dead |
|
884 |
|
885 // Save registers used below (see assertion in G1PreBarrierStub::emit_code()). |
|
886 __ z_stg(r2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
887 |
|
888 ByteSize dirty_card_q_index_byte_offset = G1ThreadLocalData::dirty_card_queue_index_offset(); |
|
889 ByteSize dirty_card_q_buf_byte_offset = G1ThreadLocalData::dirty_card_queue_buffer_offset(); |
|
890 |
|
891 __ bind(restart); |
|
892 |
|
893 // Get the index into the update buffer. DirtyCardQueue::_index is |
|
894 // a size_t so z_ltg is appropriate here. |
|
895 __ z_ltg(idx, Address(Z_thread, dirty_card_q_index_byte_offset)); |
|
896 |
|
897 // index == 0? |
|
898 __ z_brz(refill); |
|
899 |
|
900 __ z_lg(buf, Address(Z_thread, dirty_card_q_buf_byte_offset)); |
|
901 __ add2reg(idx, -oopSize); |
|
902 |
|
903 __ z_stg(addr_card, 0, idx, buf); // [_buf + index] := <address_of_card> |
|
904 __ z_stg(idx, Address(Z_thread, dirty_card_q_index_byte_offset)); |
|
905 // Restore killed registers and return. |
|
906 __ z_lg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
907 __ z_lg(r2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP); |
|
908 __ z_br(Z_R14); |
|
909 |
|
910 __ bind(refill); |
|
911 save_volatile_registers(sasm); |
|
912 __ z_lgr(idx, addr_card); // Save addr_card, tmp3 must be non-volatile. |
|
913 __ call_VM_leaf(CAST_FROM_FN_PTR(address, DirtyCardQueueSet::handle_zero_index_for_thread), |
|
914 Z_thread); |
|
915 __ z_lgr(addr_card, idx); |
|
916 restore_volatile_registers(sasm); // Restore addr_card. |
|
917 __ z_bru(restart); |
|
918 } |
|
919 break; |
|
920 #endif // INCLUDE_ALL_GCS |
|
921 case predicate_failed_trap_id: |
747 case predicate_failed_trap_id: |
922 { |
748 { |
923 __ set_info("predicate_failed_trap", dont_gc_arguments); |
749 __ set_info("predicate_failed_trap", dont_gc_arguments); |
924 |
750 |
925 OopMap* map = save_live_registers(sasm); |
751 OopMap* map = save_live_registers(sasm); |