1 /* |
1 /* |
2 * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved. |
2 * Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. |
3 * Copyright (c) 2016, 2017, SAP SE. All rights reserved. |
3 * Copyright (c) 2016, 2019, SAP SE. All rights reserved. |
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
5 * |
5 * |
6 * This code is free software; you can redistribute it and/or modify it |
6 * This code is free software; you can redistribute it and/or modify it |
7 * under the terms of the GNU General Public License version 2 only, as |
7 * under the terms of the GNU General Public License version 2 only, as |
8 * published by the Free Software Foundation. |
8 * published by the Free Software Foundation. |
79 int offset = __ offset(); |
79 int offset = __ offset(); |
80 __ inline_cache_check(receiver, Z_inline_cache); |
80 __ inline_cache_check(receiver, Z_inline_cache); |
81 return offset; |
81 return offset; |
82 } |
82 } |
83 |
83 |
|
84 void LIR_Assembler::clinit_barrier(ciMethod* method) { |
|
85 assert(!method->holder()->is_not_initialized(), "initialization should have been started"); |
|
86 |
|
87 Label L_skip_barrier; |
|
88 Register klass = Z_R1_scratch; |
|
89 |
|
90 metadata2reg(method->holder()->constant_encoding(), klass); |
|
91 __ clinit_barrier(klass, Z_thread, &L_skip_barrier /*L_fast_path*/); |
|
92 |
|
93 __ load_const_optimized(klass, SharedRuntime::get_handle_wrong_method_stub()); |
|
94 __ z_br(klass); |
|
95 |
|
96 __ bind(L_skip_barrier); |
|
97 } |
|
98 |
84 void LIR_Assembler::osr_entry() { |
99 void LIR_Assembler::osr_entry() { |
85 // On-stack-replacement entry sequence (interpreter frame layout described in interpreter_sparc.cpp): |
100 // On-stack-replacement entry sequence (interpreter frame layout described in interpreter_sparc.cpp): |
86 // |
101 // |
87 // 1. Create a new compiled activation. |
102 // 1. Create a new compiled activation. |
88 // 2. Initialize local variables in the compiled activation. The expression stack must be empty |
103 // 2. Initialize local variables in the compiled activation. The expression stack must be empty |
955 __ z_llgf(dest->as_register(), disp_value, disp_reg, src); |
970 __ z_llgf(dest->as_register(), disp_value, disp_reg, src); |
956 __ oop_decoder(dest->as_register(), dest->as_register(), true); |
971 __ oop_decoder(dest->as_register(), dest->as_register(), true); |
957 } else { |
972 } else { |
958 __ z_lg(dest->as_register(), disp_value, disp_reg, src); |
973 __ z_lg(dest->as_register(), disp_value, disp_reg, src); |
959 } |
974 } |
|
975 __ verify_oop(dest->as_register()); |
960 break; |
976 break; |
961 } |
977 } |
962 case T_FLOAT: |
978 case T_FLOAT: |
963 if (short_disp) { |
979 if (short_disp) { |
964 __ z_le(dest->as_float_reg(), disp_value, disp_reg, src); |
980 __ z_le(dest->as_float_reg(), disp_value, disp_reg, src); |
974 } |
990 } |
975 break; |
991 break; |
976 case T_LONG : __ z_lg(dest->as_register_lo(), disp_value, disp_reg, src); break; |
992 case T_LONG : __ z_lg(dest->as_register_lo(), disp_value, disp_reg, src); break; |
977 default : ShouldNotReachHere(); |
993 default : ShouldNotReachHere(); |
978 } |
994 } |
979 if (type == T_ARRAY || type == T_OBJECT) { |
|
980 __ verify_oop(dest->as_register()); |
|
981 } |
|
982 |
995 |
983 if (patch != NULL) { |
996 if (patch != NULL) { |
984 patching_epilog(patch, patch_code, src, info); |
997 patching_epilog(patch, patch_code, src, info); |
985 } |
998 } |
986 if (info != NULL) add_debug_info_for_null_check(offset, info); |
999 if (info != NULL) add_debug_info_for_null_check(offset, info); |
989 void LIR_Assembler::stack2reg(LIR_Opr src, LIR_Opr dest, BasicType type) { |
1002 void LIR_Assembler::stack2reg(LIR_Opr src, LIR_Opr dest, BasicType type) { |
990 assert(src->is_stack(), "should not call otherwise"); |
1003 assert(src->is_stack(), "should not call otherwise"); |
991 assert(dest->is_register(), "should not call otherwise"); |
1004 assert(dest->is_register(), "should not call otherwise"); |
992 |
1005 |
993 if (dest->is_single_cpu()) { |
1006 if (dest->is_single_cpu()) { |
994 if (type == T_ARRAY || type == T_OBJECT) { |
1007 if (is_reference_type(type)) { |
995 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true); |
1008 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true); |
996 __ verify_oop(dest->as_register()); |
1009 __ verify_oop(dest->as_register()); |
997 } else if (type == T_METADATA) { |
1010 } else if (type == T_METADATA) { |
998 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true); |
1011 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true); |
999 } else { |
1012 } else { |
1017 assert(src->is_register(), "should not call otherwise"); |
1030 assert(src->is_register(), "should not call otherwise"); |
1018 assert(dest->is_stack(), "should not call otherwise"); |
1031 assert(dest->is_stack(), "should not call otherwise"); |
1019 |
1032 |
1020 if (src->is_single_cpu()) { |
1033 if (src->is_single_cpu()) { |
1021 const Address dst = frame_map()->address_for_slot(dest->single_stack_ix()); |
1034 const Address dst = frame_map()->address_for_slot(dest->single_stack_ix()); |
1022 if (type == T_OBJECT || type == T_ARRAY) { |
1035 if (is_reference_type(type)) { |
1023 __ verify_oop(src->as_register()); |
1036 __ verify_oop(src->as_register()); |
1024 __ reg2mem_opt(src->as_register(), dst, true); |
1037 __ reg2mem_opt(src->as_register(), dst, true); |
1025 } else if (type == T_METADATA) { |
1038 } else if (type == T_METADATA) { |
1026 __ reg2mem_opt(src->as_register(), dst, true); |
1039 __ reg2mem_opt(src->as_register(), dst, true); |
1027 } else { |
1040 } else { |
1063 __ z_lgr(to_reg->as_register(), from_reg->as_register()); |
1076 __ z_lgr(to_reg->as_register(), from_reg->as_register()); |
1064 } |
1077 } |
1065 } else { |
1078 } else { |
1066 ShouldNotReachHere(); |
1079 ShouldNotReachHere(); |
1067 } |
1080 } |
1068 if (to_reg->type() == T_OBJECT || to_reg->type() == T_ARRAY) { |
1081 if (is_reference_type(to_reg->type())) { |
1069 __ verify_oop(to_reg->as_register()); |
1082 __ verify_oop(to_reg->as_register()); |
1070 } |
1083 } |
1071 } |
1084 } |
1072 |
1085 |
1073 void LIR_Assembler::reg2mem(LIR_Opr from, LIR_Opr dest_opr, BasicType type, |
1086 void LIR_Assembler::reg2mem(LIR_Opr from, LIR_Opr dest_opr, BasicType type, |
1114 disp_reg = addr->index()->as_pointer_register(); |
1127 disp_reg = addr->index()->as_pointer_register(); |
1115 } |
1128 } |
1116 |
1129 |
1117 assert(disp_reg != Z_R0 || Immediate::is_simm20(disp_value), "should have set this up"); |
1130 assert(disp_reg != Z_R0 || Immediate::is_simm20(disp_value), "should have set this up"); |
1118 |
1131 |
1119 if (type == T_ARRAY || type == T_OBJECT) { |
1132 if (is_reference_type(type)) { |
1120 __ verify_oop(from->as_register()); |
1133 __ verify_oop(from->as_register()); |
1121 } |
1134 } |
1122 |
1135 |
1123 bool short_disp = Immediate::is_uimm12(disp_value); |
1136 bool short_disp = Immediate::is_uimm12(disp_value); |
1124 |
1137 |
1277 bool unsigned_comp = condition == lir_cond_belowEqual || condition == lir_cond_aboveEqual; |
1290 bool unsigned_comp = condition == lir_cond_belowEqual || condition == lir_cond_aboveEqual; |
1278 if (opr1->is_single_cpu()) { |
1291 if (opr1->is_single_cpu()) { |
1279 Register reg1 = opr1->as_register(); |
1292 Register reg1 = opr1->as_register(); |
1280 if (opr2->is_single_cpu()) { |
1293 if (opr2->is_single_cpu()) { |
1281 // cpu register - cpu register |
1294 // cpu register - cpu register |
1282 if (opr1->type() == T_OBJECT || opr1->type() == T_ARRAY) { |
1295 if (is_reference_type(opr1->type())) { |
1283 __ z_clgr(reg1, opr2->as_register()); |
1296 __ z_clgr(reg1, opr2->as_register()); |
1284 } else { |
1297 } else { |
1285 assert(opr2->type() != T_OBJECT && opr2->type() != T_ARRAY, "cmp int, oop?"); |
1298 assert(!is_reference_type(opr2->type()), "cmp int, oop?"); |
1286 if (unsigned_comp) { |
1299 if (unsigned_comp) { |
1287 __ z_clr(reg1, opr2->as_register()); |
1300 __ z_clr(reg1, opr2->as_register()); |
1288 } else { |
1301 } else { |
1289 __ z_cr(reg1, opr2->as_register()); |
1302 __ z_cr(reg1, opr2->as_register()); |
1290 } |
1303 } |
1291 } |
1304 } |
1292 } else if (opr2->is_stack()) { |
1305 } else if (opr2->is_stack()) { |
1293 // cpu register - stack |
1306 // cpu register - stack |
1294 if (opr1->type() == T_OBJECT || opr1->type() == T_ARRAY) { |
1307 if (is_reference_type(opr1->type())) { |
1295 __ z_cg(reg1, frame_map()->address_for_slot(opr2->single_stack_ix())); |
1308 __ z_cg(reg1, frame_map()->address_for_slot(opr2->single_stack_ix())); |
1296 } else { |
1309 } else { |
1297 if (unsigned_comp) { |
1310 if (unsigned_comp) { |
1298 __ z_cly(reg1, frame_map()->address_for_slot(opr2->single_stack_ix())); |
1311 __ z_cly(reg1, frame_map()->address_for_slot(opr2->single_stack_ix())); |
1299 } else { |
1312 } else { |
1307 if (unsigned_comp) { |
1320 if (unsigned_comp) { |
1308 __ z_clfi(reg1, c->as_jint()); |
1321 __ z_clfi(reg1, c->as_jint()); |
1309 } else { |
1322 } else { |
1310 __ z_cfi(reg1, c->as_jint()); |
1323 __ z_cfi(reg1, c->as_jint()); |
1311 } |
1324 } |
1312 } else if (c->type() == T_OBJECT || c->type() == T_ARRAY) { |
1325 } else if (is_reference_type(c->type())) { |
1313 // In 64bit oops are single register. |
1326 // In 64bit oops are single register. |
1314 jobject o = c->as_jobject(); |
1327 jobject o = c->as_jobject(); |
1315 if (o == NULL) { |
1328 if (o == NULL) { |
1316 __ z_ltgr(reg1, reg1); |
1329 __ z_ltgr(reg1, reg1); |
1317 } else { |
1330 } else { |
1968 __ branch_optimized(Assembler::bcondAlways, *stub->entry()); |
1981 __ branch_optimized(Assembler::bcondAlways, *stub->entry()); |
1969 __ bind(*stub->continuation()); |
1982 __ bind(*stub->continuation()); |
1970 return; |
1983 return; |
1971 } |
1984 } |
1972 |
1985 |
1973 Label done; |
|
1974 // Save outgoing arguments in callee saved registers (C convention) in case |
1986 // Save outgoing arguments in callee saved registers (C convention) in case |
1975 // a call to System.arraycopy is needed. |
1987 // a call to System.arraycopy is needed. |
1976 Register callee_saved_src = Z_R10; |
1988 Register callee_saved_src = Z_R10; |
1977 Register callee_saved_src_pos = Z_R11; |
1989 Register callee_saved_src_pos = Z_R11; |
1978 Register callee_saved_dst = Z_R12; |
1990 Register callee_saved_dst = Z_R12; |
2140 __ check_klass_subtype_fast_path(src_klass, dst_klass, tmp, &cont, &slow, NULL); |
2152 __ check_klass_subtype_fast_path(src_klass, dst_klass, tmp, &cont, &slow, NULL); |
2141 |
2153 |
2142 store_parameter(src_klass, 0); // sub |
2154 store_parameter(src_klass, 0); // sub |
2143 store_parameter(dst_klass, 1); // super |
2155 store_parameter(dst_klass, 1); // super |
2144 emit_call_c(Runtime1::entry_for (Runtime1::slow_subtype_check_id)); |
2156 emit_call_c(Runtime1::entry_for (Runtime1::slow_subtype_check_id)); |
2145 CHECK_BAILOUT(); |
2157 CHECK_BAILOUT2(cont, slow); |
2146 // Sets condition code 0 for match (2 otherwise). |
2158 // Sets condition code 0 for match (2 otherwise). |
2147 __ branch_optimized(Assembler::bcondEqual, cont); |
2159 __ branch_optimized(Assembler::bcondEqual, cont); |
2148 |
2160 |
2149 __ bind(slow); |
2161 __ bind(slow); |
2150 |
2162 |
2199 |
2211 |
2200 __ load_klass(Z_ARG5, dst); |
2212 __ load_klass(Z_ARG5, dst); |
2201 __ z_lg(Z_ARG5, Address(Z_ARG5, ObjArrayKlass::element_klass_offset())); |
2213 __ z_lg(Z_ARG5, Address(Z_ARG5, ObjArrayKlass::element_klass_offset())); |
2202 __ z_lg(Z_ARG4, Address(Z_ARG5, Klass::super_check_offset_offset())); |
2214 __ z_lg(Z_ARG4, Address(Z_ARG5, Klass::super_check_offset_offset())); |
2203 emit_call_c(copyfunc_addr); |
2215 emit_call_c(copyfunc_addr); |
2204 CHECK_BAILOUT(); |
2216 CHECK_BAILOUT2(cont, slow); |
2205 |
2217 |
2206 #ifndef PRODUCT |
2218 #ifndef PRODUCT |
2207 if (PrintC1Statistics) { |
2219 if (PrintC1Statistics) { |
2208 NearLabel failed; |
2220 NearLabel failed; |
2209 __ compareU32_and_branch(Z_RET, (intptr_t)0, Assembler::bcondNotEqual, failed); |
2221 __ compareU32_and_branch(Z_RET, (intptr_t)0, Assembler::bcondNotEqual, failed); |
2397 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) { |
2409 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) { |
2398 Register len = op->len()->as_register(); |
2410 Register len = op->len()->as_register(); |
2399 __ move_reg_if_needed(len, T_LONG, len, T_INT); // sign extend |
2411 __ move_reg_if_needed(len, T_LONG, len, T_INT); // sign extend |
2400 |
2412 |
2401 if (UseSlowPath || |
2413 if (UseSlowPath || |
2402 (!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) || |
2414 (!UseFastNewObjectArray && (is_reference_type(op->type()))) || |
2403 (!UseFastNewTypeArray && (op->type() != T_OBJECT && op->type() != T_ARRAY))) { |
2415 (!UseFastNewTypeArray && (!is_reference_type(op->type())))) { |
2404 __ z_brul(*op->stub()->entry()); |
2416 __ z_brul(*op->stub()->entry()); |
2405 } else { |
2417 } else { |
2406 __ allocate_array(op->obj()->as_register(), |
2418 __ allocate_array(op->obj()->as_register(), |
2407 op->len()->as_register(), |
2419 op->len()->as_register(), |
2408 op->tmp1()->as_register(), |
2420 op->tmp1()->as_register(), |
2554 // Call out-of-line instance of __ check_klass_subtype_slow_path(...): |
2566 // Call out-of-line instance of __ check_klass_subtype_slow_path(...): |
2555 address a = Runtime1::entry_for (Runtime1::slow_subtype_check_id); |
2567 address a = Runtime1::entry_for (Runtime1::slow_subtype_check_id); |
2556 store_parameter(klass_RInfo, 0); // sub |
2568 store_parameter(klass_RInfo, 0); // sub |
2557 store_parameter(k_RInfo, 1); // super |
2569 store_parameter(k_RInfo, 1); // super |
2558 emit_call_c(a); // Sets condition code 0 for match (2 otherwise). |
2570 emit_call_c(a); // Sets condition code 0 for match (2 otherwise). |
2559 CHECK_BAILOUT(); |
2571 CHECK_BAILOUT2(profile_cast_failure, profile_cast_success); |
2560 __ branch_optimized(Assembler::bcondNotEqual, *failure_target); |
2572 __ branch_optimized(Assembler::bcondNotEqual, *failure_target); |
2561 // Fall through to success case. |
2573 // Fall through to success case. |
2562 } |
2574 } |
2563 } |
2575 } |
2564 |
2576 |
2637 // Call out-of-line instance of __ check_klass_subtype_slow_path(...): |
2649 // Call out-of-line instance of __ check_klass_subtype_slow_path(...): |
2638 address a = Runtime1::entry_for (Runtime1::slow_subtype_check_id); |
2650 address a = Runtime1::entry_for (Runtime1::slow_subtype_check_id); |
2639 store_parameter(klass_RInfo, 0); // sub |
2651 store_parameter(klass_RInfo, 0); // sub |
2640 store_parameter(k_RInfo, 1); // super |
2652 store_parameter(k_RInfo, 1); // super |
2641 emit_call_c(a); // Sets condition code 0 for match (2 otherwise). |
2653 emit_call_c(a); // Sets condition code 0 for match (2 otherwise). |
2642 CHECK_BAILOUT(); |
2654 CHECK_BAILOUT3(profile_cast_success, profile_cast_failure, done); |
2643 __ branch_optimized(Assembler::bcondNotEqual, *failure_target); |
2655 __ branch_optimized(Assembler::bcondNotEqual, *failure_target); |
2644 // Fall through to success case. |
2656 // Fall through to success case. |
2645 |
2657 |
2646 if (op->should_profile()) { |
2658 if (op->should_profile()) { |
2647 Register mdo = klass_RInfo, recv = k_RInfo; |
2659 Register mdo = klass_RInfo, recv = k_RInfo; |