hotspot/src/cpu/sparc/vm/sharedRuntime_sparc.cpp
changeset 13391 30245956af37
parent 12103 2ceb7aff05e3
child 13728 882756847a04
equal deleted inserted replaced
13309:50c604cb0d5f 13391:30245956af37
   398   for (int i = 0; i < total_args_passed; i++) {
   398   for (int i = 0; i < total_args_passed; i++) {
   399     switch (sig_bt[i]) {
   399     switch (sig_bt[i]) {
   400     case T_LONG:                // LP64, longs compete with int args
   400     case T_LONG:                // LP64, longs compete with int args
   401       assert(sig_bt[i+1] == T_VOID, "");
   401       assert(sig_bt[i+1] == T_VOID, "");
   402 #ifdef _LP64
   402 #ifdef _LP64
   403       if (int_reg_cnt < int_reg_max) int_reg_cnt++;
   403       if (int_reg_cnt < int_reg_max)  int_reg_cnt++;
   404 #endif
   404 #endif
   405       break;
   405       break;
   406     case T_OBJECT:
   406     case T_OBJECT:
   407     case T_ARRAY:
   407     case T_ARRAY:
   408     case T_ADDRESS: // Used, e.g., in slow-path locking for the lock's stack address
   408     case T_ADDRESS: // Used, e.g., in slow-path locking for the lock's stack address
   409       if (int_reg_cnt < int_reg_max) int_reg_cnt++;
   409       if (int_reg_cnt < int_reg_max)  int_reg_cnt++;
   410 #ifndef _LP64
   410 #ifndef _LP64
   411       else                            stk_reg_pairs++;
   411       else                            stk_reg_pairs++;
   412 #endif
   412 #endif
   413       break;
   413       break;
   414     case T_INT:
   414     case T_INT:
   415     case T_SHORT:
   415     case T_SHORT:
   416     case T_CHAR:
   416     case T_CHAR:
   417     case T_BYTE:
   417     case T_BYTE:
   418     case T_BOOLEAN:
   418     case T_BOOLEAN:
   419       if (int_reg_cnt < int_reg_max) int_reg_cnt++;
   419       if (int_reg_cnt < int_reg_max)  int_reg_cnt++;
   420       else                            stk_reg_pairs++;
   420       else                            stk_reg_pairs++;
   421       break;
   421       break;
   422     case T_FLOAT:
   422     case T_FLOAT:
   423       if (flt_reg_cnt < flt_reg_max) flt_reg_cnt++;
   423       if (flt_reg_cnt < flt_reg_max)  flt_reg_cnt++;
   424       else                            stk_reg_pairs++;
   424       else                            stk_reg_pairs++;
   425       break;
   425       break;
   426     case T_DOUBLE:
   426     case T_DOUBLE:
   427       assert(sig_bt[i+1] == T_VOID, "");
   427       assert(sig_bt[i+1] == T_VOID, "");
   428       break;
   428       break;
   434   }
   434   }
   435 
   435 
   436   // This is where the longs/doubles start on the stack.
   436   // This is where the longs/doubles start on the stack.
   437   stk_reg_pairs = (stk_reg_pairs+1) & ~1; // Round
   437   stk_reg_pairs = (stk_reg_pairs+1) & ~1; // Round
   438 
   438 
   439   int int_reg_pairs = (int_reg_cnt+1) & ~1; // 32-bit 2-reg longs only
       
   440   int flt_reg_pairs = (flt_reg_cnt+1) & ~1;
   439   int flt_reg_pairs = (flt_reg_cnt+1) & ~1;
   441 
   440 
   442   // int stk_reg = frame::register_save_words*(wordSize>>2);
   441   // int stk_reg = frame::register_save_words*(wordSize>>2);
   443   // int stk_reg = SharedRuntime::out_preserve_stack_slots();
   442   // int stk_reg = SharedRuntime::out_preserve_stack_slots();
   444   int stk_reg = 0;
   443   int stk_reg = 0;
   515         } else {
   514         } else {
   516           regs[i].set2(VMRegImpl::stack2reg(stk_reg_pairs));
   515           regs[i].set2(VMRegImpl::stack2reg(stk_reg_pairs));
   517           stk_reg_pairs += 2;
   516           stk_reg_pairs += 2;
   518         }
   517         }
   519 #else // COMPILER2
   518 #else // COMPILER2
   520         if (int_reg_pairs + 1 < int_reg_max) {
       
   521           if (is_outgoing) {
       
   522             regs[i].set_pair(as_oRegister(int_reg_pairs + 1)->as_VMReg(), as_oRegister(int_reg_pairs)->as_VMReg());
       
   523           } else {
       
   524             regs[i].set_pair(as_iRegister(int_reg_pairs + 1)->as_VMReg(), as_iRegister(int_reg_pairs)->as_VMReg());
       
   525           }
       
   526           int_reg_pairs += 2;
       
   527         } else {
       
   528           regs[i].set2(VMRegImpl::stack2reg(stk_reg_pairs));
   519           regs[i].set2(VMRegImpl::stack2reg(stk_reg_pairs));
   529           stk_reg_pairs += 2;
   520           stk_reg_pairs += 2;
   530         }
       
   531 #endif // COMPILER2
   521 #endif // COMPILER2
   532 #endif // _LP64
   522 #endif // _LP64
   533       break;
   523       break;
   534 
   524 
   535     case T_FLOAT:
   525     case T_FLOAT:
   536       if (flt_reg < flt_reg_max) regs[i].set1(as_FloatRegister(flt_reg++)->as_VMReg());
   526       if (flt_reg < flt_reg_max) regs[i].set1(as_FloatRegister(flt_reg++)->as_VMReg());
   537       else                       regs[i].set1(    VMRegImpl::stack2reg(stk_reg++));
   527       else                       regs[i].set1(VMRegImpl::stack2reg(stk_reg++));
   538       break;
   528       break;
   539     case T_DOUBLE:
   529     case T_DOUBLE:
   540       assert(sig_bt[i+1] == T_VOID, "expecting half");
   530       assert(sig_bt[i+1] == T_VOID, "expecting half");
   541       if (flt_reg_pairs + 1 < flt_reg_max) {
   531       if (flt_reg_pairs + 1 < flt_reg_max) {
   542         regs[i].set2(as_FloatRegister(flt_reg_pairs)->as_VMReg());
   532         regs[i].set2(as_FloatRegister(flt_reg_pairs)->as_VMReg());
   884   // the interpreter does not know where its args are without some kind of
   874   // the interpreter does not know where its args are without some kind of
   885   // arg pointer being passed in.  Pass it in Gargs.
   875   // arg pointer being passed in.  Pass it in Gargs.
   886   __ delayed()->add(SP, G1, Gargs);
   876   __ delayed()->add(SP, G1, Gargs);
   887 }
   877 }
   888 
   878 
       
   879 static void range_check(MacroAssembler* masm, Register pc_reg, Register temp_reg, Register temp2_reg,
       
   880                         address code_start, address code_end,
       
   881                         Label& L_ok) {
       
   882   Label L_fail;
       
   883   __ set(ExternalAddress(code_start), temp_reg);
       
   884   __ set(pointer_delta(code_end, code_start, 1), temp2_reg);
       
   885   __ cmp(pc_reg, temp_reg);
       
   886   __ brx(Assembler::lessEqualUnsigned, false, Assembler::pn, L_fail);
       
   887   __ delayed()->add(temp_reg, temp2_reg, temp_reg);
       
   888   __ cmp(pc_reg, temp_reg);
       
   889   __ cmp_and_brx_short(pc_reg, temp_reg, Assembler::lessUnsigned, Assembler::pt, L_ok);
       
   890   __ bind(L_fail);
       
   891 }
       
   892 
   889 void AdapterGenerator::gen_i2c_adapter(
   893 void AdapterGenerator::gen_i2c_adapter(
   890                             int total_args_passed,
   894                             int total_args_passed,
   891                             // VMReg max_arg,
   895                             // VMReg max_arg,
   892                             int comp_args_on_stack, // VMRegStackSlots
   896                             int comp_args_on_stack, // VMRegStackSlots
   893                             const BasicType *sig_bt,
   897                             const BasicType *sig_bt,
   904   // We will only enter here from an interpreted frame and never from after
   908   // We will only enter here from an interpreted frame and never from after
   905   // passing thru a c2i. Azul allowed this but we do not. If we lose the
   909   // passing thru a c2i. Azul allowed this but we do not. If we lose the
   906   // race and use a c2i we will remain interpreted for the race loser(s).
   910   // race and use a c2i we will remain interpreted for the race loser(s).
   907   // This removes all sorts of headaches on the x86 side and also eliminates
   911   // This removes all sorts of headaches on the x86 side and also eliminates
   908   // the possibility of having c2i -> i2c -> c2i -> ... endless transitions.
   912   // the possibility of having c2i -> i2c -> c2i -> ... endless transitions.
       
   913 
       
   914   // More detail:
       
   915   // Adapters can be frameless because they do not require the caller
       
   916   // to perform additional cleanup work, such as correcting the stack pointer.
       
   917   // An i2c adapter is frameless because the *caller* frame, which is interpreted,
       
   918   // routinely repairs its own stack pointer (from interpreter_frame_last_sp),
       
   919   // even if a callee has modified the stack pointer.
       
   920   // A c2i adapter is frameless because the *callee* frame, which is interpreted,
       
   921   // routinely repairs its caller's stack pointer (from sender_sp, which is set
       
   922   // up via the senderSP register).
       
   923   // In other words, if *either* the caller or callee is interpreted, we can
       
   924   // get the stack pointer repaired after a call.
       
   925   // This is why c2i and i2c adapters cannot be indefinitely composed.
       
   926   // In particular, if a c2i adapter were to somehow call an i2c adapter,
       
   927   // both caller and callee would be compiled methods, and neither would
       
   928   // clean up the stack pointer changes performed by the two adapters.
       
   929   // If this happens, control eventually transfers back to the compiled
       
   930   // caller, but with an uncorrected stack, causing delayed havoc.
       
   931 
       
   932   if (VerifyAdapterCalls &&
       
   933       (Interpreter::code() != NULL || StubRoutines::code1() != NULL)) {
       
   934     // So, let's test for cascading c2i/i2c adapters right now.
       
   935     //  assert(Interpreter::contains($return_addr) ||
       
   936     //         StubRoutines::contains($return_addr),
       
   937     //         "i2c adapter must return to an interpreter frame");
       
   938     __ block_comment("verify_i2c { ");
       
   939     Label L_ok;
       
   940     if (Interpreter::code() != NULL)
       
   941       range_check(masm, O7, O0, O1,
       
   942                   Interpreter::code()->code_start(), Interpreter::code()->code_end(),
       
   943                   L_ok);
       
   944     if (StubRoutines::code1() != NULL)
       
   945       range_check(masm, O7, O0, O1,
       
   946                   StubRoutines::code1()->code_begin(), StubRoutines::code1()->code_end(),
       
   947                   L_ok);
       
   948     if (StubRoutines::code2() != NULL)
       
   949       range_check(masm, O7, O0, O1,
       
   950                   StubRoutines::code2()->code_begin(), StubRoutines::code2()->code_end(),
       
   951                   L_ok);
       
   952     const char* msg = "i2c adapter must return to an interpreter frame";
       
   953     __ block_comment(msg);
       
   954     __ stop(msg);
       
   955     __ bind(L_ok);
       
   956     __ block_comment("} verify_i2ce ");
       
   957   }
   909 
   958 
   910   // As you can see from the list of inputs & outputs there are not a lot
   959   // As you can see from the list of inputs & outputs there are not a lot
   911   // of temp registers to work with: mostly G1, G3 & G4.
   960   // of temp registers to work with: mostly G1, G3 & G4.
   912 
   961 
   913   // Inputs:
   962   // Inputs:
  1935   move_ptr(masm, reg64_to_VMRegPair(G0), body_arg);
  1984   move_ptr(masm, reg64_to_VMRegPair(G0), body_arg);
  1936   move32_64(masm, reg64_to_VMRegPair(G0), length_arg);
  1985   move32_64(masm, reg64_to_VMRegPair(G0), length_arg);
  1937   __ bind(done);
  1986   __ bind(done);
  1938 }
  1987 }
  1939 
  1988 
       
  1989 static void verify_oop_args(MacroAssembler* masm,
       
  1990                             int total_args_passed,
       
  1991                             const BasicType* sig_bt,
       
  1992                             const VMRegPair* regs) {
       
  1993   Register temp_reg = G5_method;  // not part of any compiled calling seq
       
  1994   if (VerifyOops) {
       
  1995     for (int i = 0; i < total_args_passed; i++) {
       
  1996       if (sig_bt[i] == T_OBJECT ||
       
  1997           sig_bt[i] == T_ARRAY) {
       
  1998         VMReg r = regs[i].first();
       
  1999         assert(r->is_valid(), "bad oop arg");
       
  2000         if (r->is_stack()) {
       
  2001           RegisterOrConstant ld_off = reg2offset(r) + STACK_BIAS;
       
  2002           ld_off = __ ensure_simm13_or_reg(ld_off, temp_reg);
       
  2003           __ ld_ptr(SP, ld_off, temp_reg);
       
  2004           __ verify_oop(temp_reg);
       
  2005         } else {
       
  2006           __ verify_oop(r->as_Register());
       
  2007         }
       
  2008       }
       
  2009     }
       
  2010   }
       
  2011 }
       
  2012 
       
  2013 static void gen_special_dispatch(MacroAssembler* masm,
       
  2014                                  int total_args_passed,
       
  2015                                  int comp_args_on_stack,
       
  2016                                  vmIntrinsics::ID special_dispatch,
       
  2017                                  const BasicType* sig_bt,
       
  2018                                  const VMRegPair* regs) {
       
  2019   verify_oop_args(masm, total_args_passed, sig_bt, regs);
       
  2020 
       
  2021   // Now write the args into the outgoing interpreter space
       
  2022   bool     has_receiver   = false;
       
  2023   Register receiver_reg   = noreg;
       
  2024   int      member_arg_pos = -1;
       
  2025   Register member_reg     = noreg;
       
  2026   int      ref_kind       = MethodHandles::signature_polymorphic_intrinsic_ref_kind(special_dispatch);
       
  2027   if (ref_kind != 0) {
       
  2028     member_arg_pos = total_args_passed - 1;  // trailing MemberName argument
       
  2029     member_reg = G5_method;  // known to be free at this point
       
  2030     has_receiver = MethodHandles::ref_kind_has_receiver(ref_kind);
       
  2031   } else if (special_dispatch == vmIntrinsics::_invokeBasic) {
       
  2032     has_receiver = true;
       
  2033   } else {
       
  2034     fatal(err_msg("special_dispatch=%d", special_dispatch));
       
  2035   }
       
  2036 
       
  2037   if (member_reg != noreg) {
       
  2038     // Load the member_arg into register, if necessary.
       
  2039     assert(member_arg_pos >= 0 && member_arg_pos < total_args_passed, "oob");
       
  2040     assert(sig_bt[member_arg_pos] == T_OBJECT, "dispatch argument must be an object");
       
  2041     VMReg r = regs[member_arg_pos].first();
       
  2042     assert(r->is_valid(), "bad member arg");
       
  2043     if (r->is_stack()) {
       
  2044       RegisterOrConstant ld_off = reg2offset(r) + STACK_BIAS;
       
  2045       ld_off = __ ensure_simm13_or_reg(ld_off, member_reg);
       
  2046       __ ld_ptr(SP, ld_off, member_reg);
       
  2047     } else {
       
  2048       // no data motion is needed
       
  2049       member_reg = r->as_Register();
       
  2050     }
       
  2051   }
       
  2052 
       
  2053   if (has_receiver) {
       
  2054     // Make sure the receiver is loaded into a register.
       
  2055     assert(total_args_passed > 0, "oob");
       
  2056     assert(sig_bt[0] == T_OBJECT, "receiver argument must be an object");
       
  2057     VMReg r = regs[0].first();
       
  2058     assert(r->is_valid(), "bad receiver arg");
       
  2059     if (r->is_stack()) {
       
  2060       // Porting note:  This assumes that compiled calling conventions always
       
  2061       // pass the receiver oop in a register.  If this is not true on some
       
  2062       // platform, pick a temp and load the receiver from stack.
       
  2063       assert(false, "receiver always in a register");
       
  2064       receiver_reg = G3_scratch;  // known to be free at this point
       
  2065       RegisterOrConstant ld_off = reg2offset(r) + STACK_BIAS;
       
  2066       ld_off = __ ensure_simm13_or_reg(ld_off, member_reg);
       
  2067       __ ld_ptr(SP, ld_off, receiver_reg);
       
  2068     } else {
       
  2069       // no data motion is needed
       
  2070       receiver_reg = r->as_Register();
       
  2071     }
       
  2072   }
       
  2073 
       
  2074   // Figure out which address we are really jumping to:
       
  2075   MethodHandles::generate_method_handle_dispatch(masm, special_dispatch,
       
  2076                                                  receiver_reg, member_reg, /*for_compiler_entry:*/ true);
       
  2077 }
       
  2078 
  1940 // ---------------------------------------------------------------------------
  2079 // ---------------------------------------------------------------------------
  1941 // Generate a native wrapper for a given method.  The method takes arguments
  2080 // Generate a native wrapper for a given method.  The method takes arguments
  1942 // in the Java compiled code convention, marshals them to the native
  2081 // in the Java compiled code convention, marshals them to the native
  1943 // convention (handlizes oops, etc), transitions to native, makes the call,
  2082 // convention (handlizes oops, etc), transitions to native, makes the call,
  1944 // returns to java state (possibly blocking), unhandlizes any result and
  2083 // returns to java state (possibly blocking), unhandlizes any result and
  1945 // returns.
  2084 // returns.
       
  2085 //
       
  2086 // Critical native functions are a shorthand for the use of
       
  2087 // GetPrimtiveArrayCritical and disallow the use of any other JNI
       
  2088 // functions.  The wrapper is expected to unpack the arguments before
       
  2089 // passing them to the callee and perform checks before and after the
       
  2090 // native call to ensure that they GC_locker
       
  2091 // lock_critical/unlock_critical semantics are followed.  Some other
       
  2092 // parts of JNI setup are skipped like the tear down of the JNI handle
       
  2093 // block and the check for pending exceptions it's impossible for them
       
  2094 // to be thrown.
       
  2095 //
       
  2096 // They are roughly structured like this:
       
  2097 //    if (GC_locker::needs_gc())
       
  2098 //      SharedRuntime::block_for_jni_critical();
       
  2099 //    tranistion to thread_in_native
       
  2100 //    unpack arrray arguments and call native entry point
       
  2101 //    check for safepoint in progress
       
  2102 //    check if any thread suspend flags are set
       
  2103 //      call into JVM and possible unlock the JNI critical
       
  2104 //      if a GC was suppressed while in the critical native.
       
  2105 //    transition back to thread_in_Java
       
  2106 //    return to caller
       
  2107 //
  1946 nmethod *SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
  2108 nmethod *SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
  1947                                                 methodHandle method,
  2109                                                 methodHandle method,
  1948                                                 int compile_id,
  2110                                                 int compile_id,
  1949                                                 int total_in_args,
  2111                                                 int total_in_args,
  1950                                                 int comp_args_on_stack, // in VMRegStackSlots
  2112                                                 int comp_args_on_stack, // in VMRegStackSlots
  1951                                                 BasicType *in_sig_bt,
  2113                                                 BasicType* in_sig_bt,
  1952                                                 VMRegPair *in_regs,
  2114                                                 VMRegPair* in_regs,
  1953                                                 BasicType ret_type) {
  2115                                                 BasicType ret_type) {
       
  2116   if (method->is_method_handle_intrinsic()) {
       
  2117     vmIntrinsics::ID iid = method->intrinsic_id();
       
  2118     intptr_t start = (intptr_t)__ pc();
       
  2119     int vep_offset = ((intptr_t)__ pc()) - start;
       
  2120     gen_special_dispatch(masm,
       
  2121                          total_in_args,
       
  2122                          comp_args_on_stack,
       
  2123                          method->intrinsic_id(),
       
  2124                          in_sig_bt,
       
  2125                          in_regs);
       
  2126     int frame_complete = ((intptr_t)__ pc()) - start;  // not complete, period
       
  2127     __ flush();
       
  2128     int stack_slots = SharedRuntime::out_preserve_stack_slots();  // no out slots at all, actually
       
  2129     return nmethod::new_native_nmethod(method,
       
  2130                                        compile_id,
       
  2131                                        masm->code(),
       
  2132                                        vep_offset,
       
  2133                                        frame_complete,
       
  2134                                        stack_slots / VMRegImpl::slots_per_word,
       
  2135                                        in_ByteSize(-1),
       
  2136                                        in_ByteSize(-1),
       
  2137                                        (OopMapSet*)NULL);
       
  2138   }
  1954   bool is_critical_native = true;
  2139   bool is_critical_native = true;
  1955   address native_func = method->critical_native_function();
  2140   address native_func = method->critical_native_function();
  1956   if (native_func == NULL) {
  2141   if (native_func == NULL) {
  1957     native_func = method->native_function();
  2142     native_func = method->native_function();
  1958     is_critical_native = false;
  2143     is_critical_native = false;