src/hotspot/cpu/x86/sharedRuntime_x86_32.cpp
changeset 58641 55fe0d93bdd3
parent 58554 8c3c39710a08
child 58679 9c3209ff7550
child 59122 5d73255c2d52
equal deleted inserted replaced
58639:e0bb6b38a6f6 58641:55fe0d93bdd3
  1300         int offset_in_older_frame = in_regs[i].first()->reg2stack() + SharedRuntime::out_preserve_stack_slots();
  1300         int offset_in_older_frame = in_regs[i].first()->reg2stack() + SharedRuntime::out_preserve_stack_slots();
  1301         map->set_oop(VMRegImpl::stack2reg(offset_in_older_frame + stack_slots));
  1301         map->set_oop(VMRegImpl::stack2reg(offset_in_older_frame + stack_slots));
  1302       }
  1302       }
  1303     }
  1303     }
  1304   }
  1304   }
       
  1305 }
       
  1306 
       
  1307 // Registers need to be saved for runtime call
       
  1308 static Register caller_saved_registers[] = {
       
  1309   rcx, rdx, rsi, rdi
       
  1310 };
       
  1311 
       
  1312 // Save caller saved registers except r1 and r2
       
  1313 static void save_registers_except(MacroAssembler* masm, Register r1, Register r2) {
       
  1314   int reg_len = (int)(sizeof(caller_saved_registers) / sizeof(Register));
       
  1315   for (int index = 0; index < reg_len; index ++) {
       
  1316     Register this_reg = caller_saved_registers[index];
       
  1317     if (this_reg != r1 && this_reg != r2) {
       
  1318       __ push(this_reg);
       
  1319     }
       
  1320   }
       
  1321 }
       
  1322 
       
  1323 // Restore caller saved registers except r1 and r2
       
  1324 static void restore_registers_except(MacroAssembler* masm, Register r1, Register r2) {
       
  1325   int reg_len = (int)(sizeof(caller_saved_registers) / sizeof(Register));
       
  1326   for (int index = reg_len - 1; index >= 0; index --) {
       
  1327     Register this_reg = caller_saved_registers[index];
       
  1328     if (this_reg != r1 && this_reg != r2) {
       
  1329       __ pop(this_reg);
       
  1330     }
       
  1331   }
       
  1332 }
       
  1333 
       
  1334 // Pin object, return pinned object or null in rax
       
  1335 static void gen_pin_object(MacroAssembler* masm,
       
  1336                            Register thread, VMRegPair reg) {
       
  1337   __ block_comment("gen_pin_object {");
       
  1338 
       
  1339   Label is_null;
       
  1340   Register tmp_reg = rax;
       
  1341   VMRegPair tmp(tmp_reg->as_VMReg());
       
  1342   if (reg.first()->is_stack()) {
       
  1343     // Load the arg up from the stack
       
  1344     simple_move32(masm, reg, tmp);
       
  1345     reg = tmp;
       
  1346   } else {
       
  1347     __ movl(tmp_reg, reg.first()->as_Register());
       
  1348   }
       
  1349   __ testptr(reg.first()->as_Register(), reg.first()->as_Register());
       
  1350   __ jccb(Assembler::equal, is_null);
       
  1351 
       
  1352   // Save registers that may be used by runtime call
       
  1353   Register arg = reg.first()->is_Register() ? reg.first()->as_Register() : noreg;
       
  1354   save_registers_except(masm, arg, thread);
       
  1355 
       
  1356   __ call_VM_leaf(
       
  1357     CAST_FROM_FN_PTR(address, SharedRuntime::pin_object),
       
  1358     thread, reg.first()->as_Register());
       
  1359 
       
  1360   // Restore saved registers
       
  1361   restore_registers_except(masm, arg, thread);
       
  1362 
       
  1363   __ bind(is_null);
       
  1364   __ block_comment("} gen_pin_object");
       
  1365 }
       
  1366 
       
  1367 // Unpin object
       
  1368 static void gen_unpin_object(MacroAssembler* masm,
       
  1369                              Register thread, VMRegPair reg) {
       
  1370   __ block_comment("gen_unpin_object {");
       
  1371   Label is_null;
       
  1372 
       
  1373   // temp register
       
  1374   __ push(rax);
       
  1375   Register tmp_reg = rax;
       
  1376   VMRegPair tmp(tmp_reg->as_VMReg());
       
  1377 
       
  1378   simple_move32(masm, reg, tmp);
       
  1379 
       
  1380   __ testptr(rax, rax);
       
  1381   __ jccb(Assembler::equal, is_null);
       
  1382 
       
  1383   // Save registers that may be used by runtime call
       
  1384   Register arg = reg.first()->is_Register() ? reg.first()->as_Register() : noreg;
       
  1385   save_registers_except(masm, arg, thread);
       
  1386 
       
  1387   __ call_VM_leaf(
       
  1388     CAST_FROM_FN_PTR(address, SharedRuntime::unpin_object),
       
  1389     thread, rax);
       
  1390 
       
  1391   // Restore saved registers
       
  1392   restore_registers_except(masm, arg, thread);
       
  1393   __ bind(is_null);
       
  1394   __ pop(rax);
       
  1395   __ block_comment("} gen_unpin_object");
  1305 }
  1396 }
  1306 
  1397 
  1307 // Check GCLocker::needs_gc and enter the runtime if it's true.  This
  1398 // Check GCLocker::needs_gc and enter the runtime if it's true.  This
  1308 // keeps a new JNI critical region from starting until a GC has been
  1399 // keeps a new JNI critical region from starting until a GC has been
  1309 // forced.  Save down any oops in registers and describe them in an
  1400 // forced.  Save down any oops in registers and describe them in an
  1835 
  1926 
  1836   const Register oop_handle_reg = rsi;
  1927   const Register oop_handle_reg = rsi;
  1837 
  1928 
  1838   __ get_thread(thread);
  1929   __ get_thread(thread);
  1839 
  1930 
  1840   if (is_critical_native) {
  1931   if (is_critical_native && !Universe::heap()->supports_object_pinning()) {
  1841     check_needs_gc_for_critical_native(masm, thread, stack_slots, total_c_args, total_in_args,
  1932     check_needs_gc_for_critical_native(masm, thread, stack_slots, total_c_args, total_in_args,
  1842                                        oop_handle_offset, oop_maps, in_regs, in_sig_bt);
  1933                                        oop_handle_offset, oop_maps, in_regs, in_sig_bt);
  1843   }
  1934   }
  1844 
  1935 
  1845   //
  1936   //
  1873   // sure we can capture all the incoming oop args from the
  1964   // sure we can capture all the incoming oop args from the
  1874   // caller.
  1965   // caller.
  1875   //
  1966   //
  1876   OopMap* map = new OopMap(stack_slots * 2, 0 /* arg_slots*/);
  1967   OopMap* map = new OopMap(stack_slots * 2, 0 /* arg_slots*/);
  1877 
  1968 
       
  1969   // Inbound arguments that need to be pinned for critical natives
       
  1970   GrowableArray<int> pinned_args(total_in_args);
       
  1971   // Current stack slot for storing register based array argument
       
  1972   int pinned_slot = oop_handle_offset;
       
  1973 
  1878   // Mark location of rbp,
  1974   // Mark location of rbp,
  1879   // map->set_callee_saved(VMRegImpl::stack2reg( stack_slots - 2), stack_slots * 2, 0, rbp->as_VMReg());
  1975   // map->set_callee_saved(VMRegImpl::stack2reg( stack_slots - 2), stack_slots * 2, 0, rbp->as_VMReg());
  1880 
  1976 
  1881   // We know that we only have args in at most two integer registers (rcx, rdx). So rax, rbx
  1977   // We know that we only have args in at most two integer registers (rcx, rdx). So rax, rbx
  1882   // Are free to temporaries if we have to do  stack to steck moves.
  1978   // Are free to temporaries if we have to do  stack to steck moves.
  1884 
  1980 
  1885   for (int i = 0; i < total_in_args ; i++, c_arg++ ) {
  1981   for (int i = 0; i < total_in_args ; i++, c_arg++ ) {
  1886     switch (in_sig_bt[i]) {
  1982     switch (in_sig_bt[i]) {
  1887       case T_ARRAY:
  1983       case T_ARRAY:
  1888         if (is_critical_native) {
  1984         if (is_critical_native) {
  1889           unpack_array_argument(masm, in_regs[i], in_elem_bt[i], out_regs[c_arg + 1], out_regs[c_arg]);
  1985           VMRegPair in_arg = in_regs[i];
       
  1986           if (Universe::heap()->supports_object_pinning()) {
       
  1987             // gen_pin_object handles save and restore
       
  1988             // of any clobbered registers
       
  1989             gen_pin_object(masm, thread, in_arg);
       
  1990             pinned_args.append(i);
       
  1991 
       
  1992             // rax has pinned array
       
  1993             VMRegPair result_reg(rax->as_VMReg());
       
  1994             if (!in_arg.first()->is_stack()) {
       
  1995               assert(pinned_slot <= stack_slots, "overflow");
       
  1996               simple_move32(masm, result_reg, VMRegImpl::stack2reg(pinned_slot));
       
  1997               pinned_slot += VMRegImpl::slots_per_word;
       
  1998             } else {
       
  1999               // Write back pinned value, it will be used to unpin this argument
       
  2000               __ movptr(Address(rbp, reg2offset_in(in_arg.first())), result_reg.first()->as_Register());
       
  2001             }
       
  2002             // We have the array in register, use it
       
  2003             in_arg = result_reg;
       
  2004           }
       
  2005 
       
  2006           unpack_array_argument(masm, in_arg, in_elem_bt[i], out_regs[c_arg + 1], out_regs[c_arg]);
  1890           c_arg++;
  2007           c_arg++;
  1891           break;
  2008           break;
  1892         }
  2009         }
  1893       case T_OBJECT:
  2010       case T_OBJECT:
  1894         assert(!is_critical_native, "no oop arguments");
  2011         assert(!is_critical_native, "no oop arguments");
  2075   case T_OBJECT:                // Really a handle
  2192   case T_OBJECT:                // Really a handle
  2076       break; // can't de-handlize until after safepoint check
  2193       break; // can't de-handlize until after safepoint check
  2077   case T_VOID: break;
  2194   case T_VOID: break;
  2078   case T_LONG: break;
  2195   case T_LONG: break;
  2079   default       : ShouldNotReachHere();
  2196   default       : ShouldNotReachHere();
       
  2197   }
       
  2198 
       
  2199   // unpin pinned arguments
       
  2200   pinned_slot = oop_handle_offset;
       
  2201   if (pinned_args.length() > 0) {
       
  2202     // save return value that may be overwritten otherwise.
       
  2203     save_native_result(masm, ret_type, stack_slots);
       
  2204     for (int index = 0; index < pinned_args.length(); index ++) {
       
  2205       int i = pinned_args.at(index);
       
  2206       assert(pinned_slot <= stack_slots, "overflow");
       
  2207       if (!in_regs[i].first()->is_stack()) {
       
  2208         int offset = pinned_slot * VMRegImpl::stack_slot_size;
       
  2209         __ movl(in_regs[i].first()->as_Register(), Address(rsp, offset));
       
  2210         pinned_slot += VMRegImpl::slots_per_word;
       
  2211       }
       
  2212       // gen_pin_object handles save and restore
       
  2213       // of any other clobbered registers
       
  2214       gen_unpin_object(masm, thread, in_regs[i]);
       
  2215     }
       
  2216     restore_native_result(masm, ret_type, stack_slots);
  2080   }
  2217   }
  2081 
  2218 
  2082   // Switch thread to "native transition" state before reading the synchronization state.
  2219   // Switch thread to "native transition" state before reading the synchronization state.
  2083   // This additional state is necessary because reading and testing the synchronization
  2220   // This additional state is necessary because reading and testing the synchronization
  2084   // state is not atomic w.r.t. GC, as this scenario demonstrates:
  2221   // state is not atomic w.r.t. GC, as this scenario demonstrates: