hotspot/src/cpu/sparc/vm/templateTable_sparc.cpp
changeset 13391 30245956af37
parent 11439 7af64224c70b
child 13728 882756847a04
equal deleted inserted replaced
13309:50c604cb0d5f 13391:30245956af37
   376   }
   376   }
   377 
   377 
   378   Register Rcache = G3_scratch;
   378   Register Rcache = G3_scratch;
   379   Register Rscratch = G4_scratch;
   379   Register Rscratch = G4_scratch;
   380 
   380 
   381   resolve_cache_and_index(f1_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
   381   resolve_cache_and_index(f12_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
   382 
   382 
   383   __ verify_oop(Otos_i);
   383   __ verify_oop(Otos_i);
   384 
   384 
   385   Label L_done;
   385   Label L_done;
   386   const Register Rcon_klass = G3_scratch;  // same as Rcache
   386   const Register Rcon_klass = G3_scratch;  // same as Rcache
  2091                                             Register index,
  2091                                             Register index,
  2092                                             size_t index_size) {
  2092                                             size_t index_size) {
  2093   // Depends on cpCacheOop layout!
  2093   // Depends on cpCacheOop layout!
  2094   Label resolved;
  2094   Label resolved;
  2095 
  2095 
  2096   if (byte_no == f1_oop) {
  2096   if (byte_no == f12_oop) {
  2097     // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
  2097     // We are resolved if the f1 field contains a non-null object (CallSite, MethodType, etc.)
  2098     // This kind of CP cache entry does not need to match the flags byte, because
  2098     // This kind of CP cache entry does not need to match bytecode_1 or bytecode_2, because
  2099     // there is a 1-1 relation between bytecode type and CP entry type.
  2099     // there is a 1-1 relation between bytecode type and CP entry type.
       
  2100     // The caller will also load a methodOop from f2.
       
  2101     assert(result != noreg, "");
  2100     assert_different_registers(result, Rcache);
  2102     assert_different_registers(result, Rcache);
  2101     __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
  2103     __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
  2102     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
  2104     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
  2103               ConstantPoolCacheEntry::f1_offset(), result);
  2105               ConstantPoolCacheEntry::f1_offset(), result);
  2104     __ tst(result);
  2106     __ tst(result);
  2121     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
  2123     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
  2122     case Bytecodes::_invokevirtual  : // fall through
  2124     case Bytecodes::_invokevirtual  : // fall through
  2123     case Bytecodes::_invokespecial  : // fall through
  2125     case Bytecodes::_invokespecial  : // fall through
  2124     case Bytecodes::_invokestatic   : // fall through
  2126     case Bytecodes::_invokestatic   : // fall through
  2125     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);  break;
  2127     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);  break;
       
  2128     case Bytecodes::_invokehandle   : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);  break;
  2126     case Bytecodes::_invokedynamic  : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);  break;
  2129     case Bytecodes::_invokedynamic  : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);  break;
  2127     case Bytecodes::_fast_aldc      : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);     break;
  2130     case Bytecodes::_fast_aldc      : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);     break;
  2128     case Bytecodes::_fast_aldc_w    : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);     break;
  2131     case Bytecodes::_fast_aldc_w    : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);     break;
  2129     default                         : ShouldNotReachHere();                                 break;
  2132     default:
       
  2133       fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
       
  2134       break;
  2130   }
  2135   }
  2131   // first time invocation - must resolve first
  2136   // first time invocation - must resolve first
  2132   __ call_VM(noreg, entry, O1);
  2137   __ call_VM(noreg, entry, O1);
  2133   // Update registers with resolved info
  2138   // Update registers with resolved info
  2134   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
  2139   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
  2137               ConstantPoolCacheEntry::f1_offset(), result);
  2142               ConstantPoolCacheEntry::f1_offset(), result);
  2138   __ bind(resolved);
  2143   __ bind(resolved);
  2139 }
  2144 }
  2140 
  2145 
  2141 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
  2146 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
  2142                                                Register Rmethod,
  2147                                                Register method,
  2143                                                Register Ritable_index,
  2148                                                Register itable_index,
  2144                                                Register Rflags,
  2149                                                Register flags,
  2145                                                bool is_invokevirtual,
  2150                                                bool is_invokevirtual,
  2146                                                bool is_invokevfinal,
  2151                                                bool is_invokevfinal,
  2147                                                bool is_invokedynamic) {
  2152                                                bool is_invokedynamic) {
  2148   // Uses both G3_scratch and G4_scratch
  2153   // Uses both G3_scratch and G4_scratch
  2149   Register Rcache = G3_scratch;
  2154   Register cache = G3_scratch;
  2150   Register Rscratch = G4_scratch;
  2155   Register index = G4_scratch;
  2151   assert_different_registers(Rcache, Rmethod, Ritable_index);
  2156   assert_different_registers(cache, method, itable_index);
  2152 
       
  2153   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
       
  2154 
  2157 
  2155   // determine constant pool cache field offsets
  2158   // determine constant pool cache field offsets
       
  2159   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
  2156   const int method_offset = in_bytes(
  2160   const int method_offset = in_bytes(
  2157     cp_base_offset +
  2161       constantPoolCacheOopDesc::base_offset() +
  2158       (is_invokevirtual
  2162       ((byte_no == f2_byte)
  2159        ? ConstantPoolCacheEntry::f2_offset()
  2163        ? ConstantPoolCacheEntry::f2_offset()
  2160        : ConstantPoolCacheEntry::f1_offset()
  2164        : ConstantPoolCacheEntry::f1_offset()
  2161       )
  2165       )
  2162     );
  2166     );
  2163   const int flags_offset = in_bytes(cp_base_offset +
  2167   const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() +
  2164                                     ConstantPoolCacheEntry::flags_offset());
  2168                                     ConstantPoolCacheEntry::flags_offset());
  2165   // access constant pool cache fields
  2169   // access constant pool cache fields
  2166   const int index_offset = in_bytes(cp_base_offset +
  2170   const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() +
  2167                                     ConstantPoolCacheEntry::f2_offset());
  2171                                     ConstantPoolCacheEntry::f2_offset());
  2168 
  2172 
  2169   if (is_invokevfinal) {
  2173   if (is_invokevfinal) {
  2170     __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1);
  2174     __ get_cache_and_index_at_bcp(cache, index, 1);
  2171     __ ld_ptr(Rcache, method_offset, Rmethod);
  2175     __ ld_ptr(Address(cache, method_offset), method);
  2172   } else if (byte_no == f1_oop) {
  2176   } else if (byte_no == f12_oop) {
  2173     // Resolved f1_oop goes directly into 'method' register.
  2177     // Resolved f1_oop (CallSite, MethodType, etc.) goes into 'itable_index'.
  2174     resolve_cache_and_index(byte_no, Rmethod, Rcache, Rscratch, sizeof(u4));
  2178     // Resolved f2_oop (methodOop invoker) will go into 'method' (at index_offset).
       
  2179     // See ConstantPoolCacheEntry::set_dynamic_call and set_method_handle.
       
  2180     size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
       
  2181     resolve_cache_and_index(byte_no, itable_index, cache, index, index_size);
       
  2182     __ ld_ptr(Address(cache, index_offset), method);
       
  2183     itable_index = noreg;  // hack to disable load below
  2175   } else {
  2184   } else {
  2176     resolve_cache_and_index(byte_no, noreg, Rcache, Rscratch, sizeof(u2));
  2185     resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2));
  2177     __ ld_ptr(Rcache, method_offset, Rmethod);
  2186     __ ld_ptr(Address(cache, method_offset), method);
  2178   }
  2187   }
  2179 
  2188 
  2180   if (Ritable_index != noreg) {
  2189   if (itable_index != noreg) {
  2181     __ ld_ptr(Rcache, index_offset, Ritable_index);
  2190     // pick up itable index from f2 also:
  2182   }
  2191     assert(byte_no == f1_byte, "already picked up f1");
  2183   __ ld_ptr(Rcache, flags_offset, Rflags);
  2192     __ ld_ptr(Address(cache, index_offset), itable_index);
       
  2193   }
       
  2194   __ ld_ptr(Address(cache, flags_offset), flags);
  2184 }
  2195 }
  2185 
  2196 
  2186 // The Rcache register must be set before call
  2197 // The Rcache register must be set before call
  2187 void TemplateTable::load_field_cp_cache_entry(Register Robj,
  2198 void TemplateTable::load_field_cp_cache_entry(Register Robj,
  2188                                               Register Rcache,
  2199                                               Register Rcache,
  2270   Assembler::Membar_mask_bits membar_bits =
  2281   Assembler::Membar_mask_bits membar_bits =
  2271     Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
  2282     Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
  2272 
  2283 
  2273   if (__ membar_has_effect(membar_bits)) {
  2284   if (__ membar_has_effect(membar_bits)) {
  2274     // Get volatile flag
  2285     // Get volatile flag
  2275     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
  2286     __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
  2276     __ and3(Rflags, Lscratch, Lscratch);
  2287     __ and3(Rflags, Lscratch, Lscratch);
  2277   }
  2288   }
  2278 
  2289 
  2279   Label checkVolatile;
  2290   Label checkVolatile;
  2280 
  2291 
  2281   // compute field type
  2292   // compute field type
  2282   Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
  2293   Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
  2283   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
  2294   __ srl(Rflags, ConstantPoolCacheEntry::tos_state_shift, Rflags);
  2284   // Make sure we don't need to mask Rflags for tosBits after the above shift
  2295   // Make sure we don't need to mask Rflags after the above shift
  2285   ConstantPoolCacheEntry::verify_tosBits();
  2296   ConstantPoolCacheEntry::verify_tos_state_shift();
  2286 
  2297 
  2287   // Check atos before itos for getstatic, more likely (in Queens at least)
  2298   // Check atos before itos for getstatic, more likely (in Queens at least)
  2288   __ cmp(Rflags, atos);
  2299   __ cmp(Rflags, atos);
  2289   __ br(Assembler::notEqual, false, Assembler::pt, notObj);
  2300   __ br(Assembler::notEqual, false, Assembler::pt, notObj);
  2290   __ delayed() ->cmp(Rflags, itos);
  2301   __ delayed() ->cmp(Rflags, itos);
  2443   Assembler::Membar_mask_bits membar_bits =
  2454   Assembler::Membar_mask_bits membar_bits =
  2444     Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
  2455     Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
  2445   if (__ membar_has_effect(membar_bits)) {
  2456   if (__ membar_has_effect(membar_bits)) {
  2446     // Get volatile flag
  2457     // Get volatile flag
  2447     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Rflags);
  2458     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Rflags);
  2448     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
  2459     __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
  2449   }
  2460   }
  2450 
  2461 
  2451   switch (bytecode()) {
  2462   switch (bytecode()) {
  2452     case Bytecodes::_fast_bgetfield:
  2463     case Bytecodes::_fast_bgetfield:
  2453       __ ldsb(Otos_i, Roffset, Otos_i);
  2464       __ ldsb(Otos_i, Roffset, Otos_i);
  2567       // the type to determine where the object is.
  2578       // the type to determine where the object is.
  2568 
  2579 
  2569       Label two_word, valsizeknown;
  2580       Label two_word, valsizeknown;
  2570       __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2581       __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2571       __ mov(Lesp, G4_scratch);
  2582       __ mov(Lesp, G4_scratch);
  2572       __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
  2583       __ srl(Rflags, ConstantPoolCacheEntry::tos_state_shift, Rflags);
  2573       // Make sure we don't need to mask Rflags for tosBits after the above shift
  2584       // Make sure we don't need to mask Rflags after the above shift
  2574       ConstantPoolCacheEntry::verify_tosBits();
  2585       ConstantPoolCacheEntry::verify_tos_state_shift();
  2575       __ cmp(Rflags, ltos);
  2586       __ cmp(Rflags, ltos);
  2576       __ br(Assembler::equal, false, Assembler::pt, two_word);
  2587       __ br(Assembler::equal, false, Assembler::pt, two_word);
  2577       __ delayed()->cmp(Rflags, dtos);
  2588       __ delayed()->cmp(Rflags, dtos);
  2578       __ br(Assembler::equal, false, Assembler::pt, two_word);
  2589       __ br(Assembler::equal, false, Assembler::pt, two_word);
  2579       __ delayed()->nop();
  2590       __ delayed()->nop();
  2623     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
  2634     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
  2624   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
  2635   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
  2625 
  2636 
  2626   Label notVolatile, checkVolatile, exit;
  2637   Label notVolatile, checkVolatile, exit;
  2627   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
  2638   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
  2628     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
  2639     __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
  2629     __ and3(Rflags, Lscratch, Lscratch);
  2640     __ and3(Rflags, Lscratch, Lscratch);
  2630 
  2641 
  2631     if (__ membar_has_effect(read_bits)) {
  2642     if (__ membar_has_effect(read_bits)) {
  2632       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);
  2643       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);
  2633       volatile_barrier(read_bits);
  2644       volatile_barrier(read_bits);
  2634       __ bind(notVolatile);
  2645       __ bind(notVolatile);
  2635     }
  2646     }
  2636   }
  2647   }
  2637 
  2648 
  2638   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
  2649   __ srl(Rflags, ConstantPoolCacheEntry::tos_state_shift, Rflags);
  2639   // Make sure we don't need to mask Rflags for tosBits after the above shift
  2650   // Make sure we don't need to mask Rflags after the above shift
  2640   ConstantPoolCacheEntry::verify_tosBits();
  2651   ConstantPoolCacheEntry::verify_tos_state_shift();
  2641 
  2652 
  2642   // compute field type
  2653   // compute field type
  2643   Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
  2654   Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
  2644 
  2655 
  2645   if (is_static) {
  2656   if (is_static) {
  2831   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
  2842   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
  2832 
  2843 
  2833   Label notVolatile, checkVolatile, exit;
  2844   Label notVolatile, checkVolatile, exit;
  2834   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
  2845   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
  2835     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2846     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2836     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
  2847     __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
  2837     __ and3(Rflags, Lscratch, Lscratch);
  2848     __ and3(Rflags, Lscratch, Lscratch);
  2838     if (__ membar_has_effect(read_bits)) {
  2849     if (__ membar_has_effect(read_bits)) {
  2839       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);
  2850       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);
  2840       volatile_barrier(read_bits);
  2851       volatile_barrier(read_bits);
  2841       __ bind(notVolatile);
  2852       __ bind(notVolatile);
  2914     // Get is_volatile value in Rflags and check if membar is needed
  2925     // Get is_volatile value in Rflags and check if membar is needed
  2915     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2926     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags);
  2916 
  2927 
  2917     // Test volatile
  2928     // Test volatile
  2918     Label notVolatile;
  2929     Label notVolatile;
  2919     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
  2930     __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
  2920     __ btst(Rflags, Lscratch);
  2931     __ btst(Rflags, Lscratch);
  2921     __ br(Assembler::zero, false, Assembler::pt, notVolatile);
  2932     __ br(Assembler::zero, false, Assembler::pt, notVolatile);
  2922     __ delayed()->nop();
  2933     __ delayed()->nop();
  2923     volatile_barrier(membar_bits);
  2934     volatile_barrier(membar_bits);
  2924     __ bind(notVolatile);
  2935     __ bind(notVolatile);
  2934 void TemplateTable::count_calls(Register method, Register temp) {
  2945 void TemplateTable::count_calls(Register method, Register temp) {
  2935   // implemented elsewhere
  2946   // implemented elsewhere
  2936   ShouldNotReachHere();
  2947   ShouldNotReachHere();
  2937 }
  2948 }
  2938 
  2949 
       
  2950 
       
  2951 void TemplateTable::prepare_invoke(int byte_no,
       
  2952                                    Register method,  // linked method (or i-klass)
       
  2953                                    Register ra,      // return address
       
  2954                                    Register index,   // itable index, MethodType, etc.
       
  2955                                    Register recv,    // if caller wants to see it
       
  2956                                    Register flags    // if caller wants to test it
       
  2957                                    ) {
       
  2958   // determine flags
       
  2959   const Bytecodes::Code code = bytecode();
       
  2960   const bool is_invokeinterface  = code == Bytecodes::_invokeinterface;
       
  2961   const bool is_invokedynamic    = code == Bytecodes::_invokedynamic;
       
  2962   const bool is_invokehandle     = code == Bytecodes::_invokehandle;
       
  2963   const bool is_invokevirtual    = code == Bytecodes::_invokevirtual;
       
  2964   const bool is_invokespecial    = code == Bytecodes::_invokespecial;
       
  2965   const bool load_receiver       = (recv != noreg);
       
  2966   assert(load_receiver == (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic), "");
       
  2967   assert(recv  == noreg || recv  == O0, "");
       
  2968   assert(flags == noreg || flags == O1, "");
       
  2969 
       
  2970   // setup registers & access constant pool cache
       
  2971   if (recv  == noreg)  recv  = O0;
       
  2972   if (flags == noreg)  flags = O1;
       
  2973   const Register temp = O2;
       
  2974   assert_different_registers(method, ra, index, recv, flags, temp);
       
  2975 
       
  2976   load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic);
       
  2977 
       
  2978   __ mov(SP, O5_savedSP);  // record SP that we wanted the callee to restore
       
  2979 
       
  2980   // maybe push appendix to arguments
       
  2981   if (is_invokedynamic || is_invokehandle) {
       
  2982     Label L_no_push;
       
  2983     __ verify_oop(index);
       
  2984     __ set((1 << ConstantPoolCacheEntry::has_appendix_shift), temp);
       
  2985     __ btst(flags, temp);
       
  2986     __ br(Assembler::zero, false, Assembler::pt, L_no_push);
       
  2987     __ delayed()->nop();
       
  2988     // Push the appendix as a trailing parameter.
       
  2989     // This must be done before we get the receiver,
       
  2990     // since the parameter_size includes it.
       
  2991     __ push_ptr(index);  // push appendix (MethodType, CallSite, etc.)
       
  2992     __ bind(L_no_push);
       
  2993   }
       
  2994 
       
  2995   // load receiver if needed (after appendix is pushed so parameter size is correct)
       
  2996   if (load_receiver) {
       
  2997     __ and3(flags, ConstantPoolCacheEntry::parameter_size_mask, temp);  // get parameter size
       
  2998     __ load_receiver(temp, recv);  //  __ argument_address uses Gargs but we need Lesp
       
  2999     __ verify_oop(recv);
       
  3000   }
       
  3001 
       
  3002   // compute return type
       
  3003   __ srl(flags, ConstantPoolCacheEntry::tos_state_shift, ra);
       
  3004   // Make sure we don't need to mask flags after the above shift
       
  3005   ConstantPoolCacheEntry::verify_tos_state_shift();
       
  3006   // load return address
       
  3007   {
       
  3008     const address table_addr = (is_invokeinterface || is_invokedynamic) ?
       
  3009         (address)Interpreter::return_5_addrs_by_index_table() :
       
  3010         (address)Interpreter::return_3_addrs_by_index_table();
       
  3011     AddressLiteral table(table_addr);
       
  3012     __ set(table, temp);
       
  3013     __ sll(ra, LogBytesPerWord, ra);
       
  3014     __ ld_ptr(Address(temp, ra), ra);
       
  3015   }
       
  3016 }
       
  3017 
       
  3018 
  2939 void TemplateTable::generate_vtable_call(Register Rrecv, Register Rindex, Register Rret) {
  3019 void TemplateTable::generate_vtable_call(Register Rrecv, Register Rindex, Register Rret) {
  2940   Register Rtemp = G4_scratch;
  3020   Register Rtemp = G4_scratch;
  2941   Register Rcall = Rindex;
  3021   Register Rcall = Rindex;
  2942   assert_different_registers(Rcall, G5_method, Gargs, Rret);
  3022   assert_different_registers(Rcall, G5_method, Gargs, Rret);
  2943 
  3023 
  2944   // get target methodOop & entry point
  3024   // get target methodOop & entry point
  2945   const int base = instanceKlass::vtable_start_offset() * wordSize;
  3025   __ lookup_virtual_method(Rrecv, Rindex, G5_method);
  2946   if (vtableEntry::size() % 3 == 0) {
       
  2947     // scale the vtable index by 12:
       
  2948     int one_third = vtableEntry::size() / 3;
       
  2949     __ sll(Rindex, exact_log2(one_third * 1 * wordSize), Rtemp);
       
  2950     __ sll(Rindex, exact_log2(one_third * 2 * wordSize), Rindex);
       
  2951     __ add(Rindex, Rtemp, Rindex);
       
  2952   } else {
       
  2953     // scale the vtable index by 8:
       
  2954     __ sll(Rindex, exact_log2(vtableEntry::size() * wordSize), Rindex);
       
  2955   }
       
  2956 
       
  2957   __ add(Rrecv, Rindex, Rrecv);
       
  2958   __ ld_ptr(Rrecv, base + vtableEntry::method_offset_in_bytes(), G5_method);
       
  2959 
       
  2960   __ call_from_interpreter(Rcall, Gargs, Rret);
  3026   __ call_from_interpreter(Rcall, Gargs, Rret);
  2961 }
  3027 }
  2962 
  3028 
  2963 void TemplateTable::invokevirtual(int byte_no) {
  3029 void TemplateTable::invokevirtual(int byte_no) {
  2964   transition(vtos, vtos);
  3030   transition(vtos, vtos);
  2965   assert(byte_no == f2_byte, "use this argument");
  3031   assert(byte_no == f2_byte, "use this argument");
  2966 
  3032 
  2967   Register Rscratch = G3_scratch;
  3033   Register Rscratch = G3_scratch;
  2968   Register Rtemp = G4_scratch;
  3034   Register Rtemp    = G4_scratch;
  2969   Register Rret = Lscratch;
  3035   Register Rret     = Lscratch;
  2970   Register Rrecv = G5_method;
  3036   Register O0_recv  = O0;
  2971   Label notFinal;
  3037   Label notFinal;
  2972 
  3038 
  2973   load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
  3039   load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
  2974   __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
  3040   __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
  2975 
  3041 
  2976   // Check for vfinal
  3042   // Check for vfinal
  2977   __ set((1 << ConstantPoolCacheEntry::vfinalMethod), G4_scratch);
  3043   __ set((1 << ConstantPoolCacheEntry::is_vfinal_shift), G4_scratch);
  2978   __ btst(Rret, G4_scratch);
  3044   __ btst(Rret, G4_scratch);
  2979   __ br(Assembler::zero, false, Assembler::pt, notFinal);
  3045   __ br(Assembler::zero, false, Assembler::pt, notFinal);
  2980   __ delayed()->and3(Rret, 0xFF, G4_scratch);      // gets number of parameters
  3046   __ delayed()->and3(Rret, 0xFF, G4_scratch);      // gets number of parameters
  2981 
  3047 
  2982   patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp);
  3048   patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp);
  2984   invokevfinal_helper(Rscratch, Rret);
  3050   invokevfinal_helper(Rscratch, Rret);
  2985 
  3051 
  2986   __ bind(notFinal);
  3052   __ bind(notFinal);
  2987 
  3053 
  2988   __ mov(G5_method, Rscratch);  // better scratch register
  3054   __ mov(G5_method, Rscratch);  // better scratch register
  2989   __ load_receiver(G4_scratch, O0);  // gets receiverOop
  3055   __ load_receiver(G4_scratch, O0_recv);  // gets receiverOop
  2990   // receiver is in O0
  3056   // receiver is in O0_recv
  2991   __ verify_oop(O0);
  3057   __ verify_oop(O0_recv);
  2992 
  3058 
  2993   // get return address
  3059   // get return address
  2994   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
  3060   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
  2995   __ set(table, Rtemp);
  3061   __ set(table, Rtemp);
  2996   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);          // get return type
  3062   __ srl(Rret, ConstantPoolCacheEntry::tos_state_shift, Rret);          // get return type
  2997   // Make sure we don't need to mask Rret for tosBits after the above shift
  3063   // Make sure we don't need to mask Rret after the above shift
  2998   ConstantPoolCacheEntry::verify_tosBits();
  3064   ConstantPoolCacheEntry::verify_tos_state_shift();
  2999   __ sll(Rret,  LogBytesPerWord, Rret);
  3065   __ sll(Rret,  LogBytesPerWord, Rret);
  3000   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
  3066   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
  3001 
  3067 
  3002   // get receiver klass
  3068   // get receiver klass
  3003   __ null_check(O0, oopDesc::klass_offset_in_bytes());
  3069   __ null_check(O0_recv, oopDesc::klass_offset_in_bytes());
  3004   __ load_klass(O0, Rrecv);
  3070   __ load_klass(O0_recv, O0_recv);
  3005   __ verify_oop(Rrecv);
  3071   __ verify_oop(O0_recv);
  3006 
  3072 
  3007   __ profile_virtual_call(Rrecv, O4);
  3073   __ profile_virtual_call(O0_recv, O4);
  3008 
  3074 
  3009   generate_vtable_call(Rrecv, Rscratch, Rret);
  3075   generate_vtable_call(O0_recv, Rscratch, Rret);
  3010 }
  3076 }
  3011 
  3077 
  3012 void TemplateTable::fast_invokevfinal(int byte_no) {
  3078 void TemplateTable::fast_invokevfinal(int byte_no) {
  3013   transition(vtos, vtos);
  3079   transition(vtos, vtos);
  3014   assert(byte_no == f2_byte, "use this argument");
  3080   assert(byte_no == f2_byte, "use this argument");
  3034   __ profile_final_call(O4);
  3100   __ profile_final_call(O4);
  3035 
  3101 
  3036   // get return address
  3102   // get return address
  3037   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
  3103   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
  3038   __ set(table, Rtemp);
  3104   __ set(table, Rtemp);
  3039   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);          // get return type
  3105   __ srl(Rret, ConstantPoolCacheEntry::tos_state_shift, Rret);          // get return type
  3040   // Make sure we don't need to mask Rret for tosBits after the above shift
  3106   // Make sure we don't need to mask Rret after the above shift
  3041   ConstantPoolCacheEntry::verify_tosBits();
  3107   ConstantPoolCacheEntry::verify_tos_state_shift();
  3042   __ sll(Rret,  LogBytesPerWord, Rret);
  3108   __ sll(Rret,  LogBytesPerWord, Rret);
  3043   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
  3109   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
  3044 
  3110 
  3045 
  3111 
  3046   // do the call
  3112   // do the call
  3047   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3113   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3048 }
  3114 }
       
  3115 
  3049 
  3116 
  3050 void TemplateTable::invokespecial(int byte_no) {
  3117 void TemplateTable::invokespecial(int byte_no) {
  3051   transition(vtos, vtos);
  3118   transition(vtos, vtos);
  3052   assert(byte_no == f1_byte, "use this argument");
  3119   assert(byte_no == f1_byte, "use this argument");
  3053 
  3120 
  3054   Register Rscratch = G3_scratch;
  3121   const Register Rret     = Lscratch;
  3055   Register Rtemp = G4_scratch;
  3122   const Register O0_recv  = O0;
  3056   Register Rret = Lscratch;
  3123   const Register Rscratch = G3_scratch;
  3057 
  3124 
  3058   load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
  3125   prepare_invoke(byte_no, G5_method, Rret, noreg, O0_recv);  // get receiver also for null check
  3059   __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
  3126   __ null_check(O0_recv);
  3060 
  3127 
       
  3128   // do the call
  3061   __ verify_oop(G5_method);
  3129   __ verify_oop(G5_method);
  3062 
       
  3063   __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
       
  3064   __ load_receiver(G4_scratch, O0);
       
  3065 
       
  3066   // receiver NULL check
       
  3067   __ null_check(O0);
       
  3068 
       
  3069   __ profile_call(O4);
  3130   __ profile_call(O4);
  3070 
       
  3071   // get return address
       
  3072   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
       
  3073   __ set(table, Rtemp);
       
  3074   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);          // get return type
       
  3075   // Make sure we don't need to mask Rret for tosBits after the above shift
       
  3076   ConstantPoolCacheEntry::verify_tosBits();
       
  3077   __ sll(Rret,  LogBytesPerWord, Rret);
       
  3078   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
       
  3079 
       
  3080   // do the call
       
  3081   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3131   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3082 }
  3132 }
       
  3133 
  3083 
  3134 
  3084 void TemplateTable::invokestatic(int byte_no) {
  3135 void TemplateTable::invokestatic(int byte_no) {
  3085   transition(vtos, vtos);
  3136   transition(vtos, vtos);
  3086   assert(byte_no == f1_byte, "use this argument");
  3137   assert(byte_no == f1_byte, "use this argument");
  3087 
  3138 
  3088   Register Rscratch = G3_scratch;
  3139   const Register Rret     = Lscratch;
  3089   Register Rtemp = G4_scratch;
  3140   const Register Rscratch = G3_scratch;
  3090   Register Rret = Lscratch;
  3141 
  3091 
  3142   prepare_invoke(byte_no, G5_method, Rret);  // get f1 methodOop
  3092   load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
  3143 
  3093   __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
  3144   // do the call
  3094 
       
  3095   __ verify_oop(G5_method);
  3145   __ verify_oop(G5_method);
  3096 
       
  3097   __ profile_call(O4);
  3146   __ profile_call(O4);
  3098 
       
  3099   // get return address
       
  3100   AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
       
  3101   __ set(table, Rtemp);
       
  3102   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);          // get return type
       
  3103   // Make sure we don't need to mask Rret for tosBits after the above shift
       
  3104   ConstantPoolCacheEntry::verify_tosBits();
       
  3105   __ sll(Rret,  LogBytesPerWord, Rret);
       
  3106   __ ld_ptr(Rtemp, Rret, Rret);         // get return address
       
  3107 
       
  3108   // do the call
       
  3109   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3147   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3110 }
  3148 }
  3111 
  3149 
  3112 
  3150 
  3113 void TemplateTable::invokeinterface_object_method(Register RklassOop,
  3151 void TemplateTable::invokeinterface_object_method(Register RklassOop,
  3120   assert_different_registers(Rscratch, Rindex, Rret);
  3158   assert_different_registers(Rscratch, Rindex, Rret);
  3121 
  3159 
  3122   Label notFinal;
  3160   Label notFinal;
  3123 
  3161 
  3124   // Check for vfinal
  3162   // Check for vfinal
  3125   __ set((1 << ConstantPoolCacheEntry::vfinalMethod), Rscratch);
  3163   __ set((1 << ConstantPoolCacheEntry::is_vfinal_shift), Rscratch);
  3126   __ btst(Rflags, Rscratch);
  3164   __ btst(Rflags, Rscratch);
  3127   __ br(Assembler::zero, false, Assembler::pt, notFinal);
  3165   __ br(Assembler::zero, false, Assembler::pt, notFinal);
  3128   __ delayed()->nop();
  3166   __ delayed()->nop();
  3129 
  3167 
  3130   __ profile_final_call(O4);
  3168   __ profile_final_call(O4);
  3142 
  3180 
  3143 void TemplateTable::invokeinterface(int byte_no) {
  3181 void TemplateTable::invokeinterface(int byte_no) {
  3144   transition(vtos, vtos);
  3182   transition(vtos, vtos);
  3145   assert(byte_no == f1_byte, "use this argument");
  3183   assert(byte_no == f1_byte, "use this argument");
  3146 
  3184 
  3147   Register Rscratch = G4_scratch;
  3185   const Register Rinterface  = G1_scratch;
  3148   Register Rret = G3_scratch;
  3186   const Register Rret        = G3_scratch;
  3149   Register Rindex = Lscratch;
  3187   const Register Rindex      = Lscratch;
  3150   Register Rinterface = G1_scratch;
  3188   const Register O0_recv     = O0;
  3151   Register RklassOop = G5_method;
  3189   const Register O1_flags    = O1;
  3152   Register Rflags = O1;
  3190   const Register O2_klassOop = O2;
       
  3191   const Register Rscratch    = G4_scratch;
  3153   assert_different_registers(Rscratch, G5_method);
  3192   assert_different_registers(Rscratch, G5_method);
  3154 
  3193 
  3155   load_invoke_cp_cache_entry(byte_no, Rinterface, Rindex, Rflags, /*virtual*/ false, false, false);
  3194   prepare_invoke(byte_no, Rinterface, Rret, Rindex, O0_recv, O1_flags);
  3156   __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
       
  3157 
       
  3158   // get receiver
       
  3159   __ and3(Rflags, 0xFF, Rscratch);       // gets number of parameters
       
  3160   __ load_receiver(Rscratch, O0);
       
  3161   __ verify_oop(O0);
       
  3162 
       
  3163   __ mov(Rflags, Rret);
       
  3164 
       
  3165   // get return address
       
  3166   AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
       
  3167   __ set(table, Rscratch);
       
  3168   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);          // get return type
       
  3169   // Make sure we don't need to mask Rret for tosBits after the above shift
       
  3170   ConstantPoolCacheEntry::verify_tosBits();
       
  3171   __ sll(Rret,  LogBytesPerWord, Rret);
       
  3172   __ ld_ptr(Rscratch, Rret, Rret);      // get return address
       
  3173 
  3195 
  3174   // get receiver klass
  3196   // get receiver klass
  3175   __ null_check(O0, oopDesc::klass_offset_in_bytes());
  3197   __ null_check(O0_recv, oopDesc::klass_offset_in_bytes());
  3176   __ load_klass(O0, RklassOop);
  3198   __ load_klass(O0_recv, O2_klassOop);
  3177   __ verify_oop(RklassOop);
  3199   __ verify_oop(O2_klassOop);
  3178 
  3200 
  3179   // Special case of invokeinterface called for virtual method of
  3201   // Special case of invokeinterface called for virtual method of
  3180   // java.lang.Object.  See cpCacheOop.cpp for details.
  3202   // java.lang.Object.  See cpCacheOop.cpp for details.
  3181   // This code isn't produced by javac, but could be produced by
  3203   // This code isn't produced by javac, but could be produced by
  3182   // another compliant java compiler.
  3204   // another compliant java compiler.
  3183   Label notMethod;
  3205   Label notMethod;
  3184   __ set((1 << ConstantPoolCacheEntry::methodInterface), Rscratch);
  3206   __ set((1 << ConstantPoolCacheEntry::is_forced_virtual_shift), Rscratch);
  3185   __ btst(Rflags, Rscratch);
  3207   __ btst(O1_flags, Rscratch);
  3186   __ br(Assembler::zero, false, Assembler::pt, notMethod);
  3208   __ br(Assembler::zero, false, Assembler::pt, notMethod);
  3187   __ delayed()->nop();
  3209   __ delayed()->nop();
  3188 
  3210 
  3189   invokeinterface_object_method(RklassOop, Rinterface, Rret, Rflags);
  3211   invokeinterface_object_method(O2_klassOop, Rinterface, Rret, O1_flags);
  3190 
  3212 
  3191   __ bind(notMethod);
  3213   __ bind(notMethod);
  3192 
  3214 
  3193   __ profile_virtual_call(RklassOop, O4);
  3215   __ profile_virtual_call(O2_klassOop, O4);
  3194 
  3216 
  3195   //
  3217   //
  3196   // find entry point to call
  3218   // find entry point to call
  3197   //
  3219   //
  3198 
  3220 
  3199   // compute start of first itableOffsetEntry (which is at end of vtable)
  3221   // compute start of first itableOffsetEntry (which is at end of vtable)
  3200   const int base = instanceKlass::vtable_start_offset() * wordSize;
  3222   const int base = instanceKlass::vtable_start_offset() * wordSize;
  3201   Label search;
  3223   Label search;
  3202   Register Rtemp = Rflags;
  3224   Register Rtemp = O1_flags;
  3203 
  3225 
  3204   __ ld(RklassOop, instanceKlass::vtable_length_offset() * wordSize, Rtemp);
  3226   __ ld(O2_klassOop, instanceKlass::vtable_length_offset() * wordSize, Rtemp);
  3205   if (align_object_offset(1) > 1) {
  3227   if (align_object_offset(1) > 1) {
  3206     __ round_to(Rtemp, align_object_offset(1));
  3228     __ round_to(Rtemp, align_object_offset(1));
  3207   }
  3229   }
  3208   __ sll(Rtemp, LogBytesPerWord, Rtemp);   // Rscratch *= 4;
  3230   __ sll(Rtemp, LogBytesPerWord, Rtemp);   // Rscratch *= 4;
  3209   if (Assembler::is_simm13(base)) {
  3231   if (Assembler::is_simm13(base)) {
  3210     __ add(Rtemp, base, Rtemp);
  3232     __ add(Rtemp, base, Rtemp);
  3211   } else {
  3233   } else {
  3212     __ set(base, Rscratch);
  3234     __ set(base, Rscratch);
  3213     __ add(Rscratch, Rtemp, Rtemp);
  3235     __ add(Rscratch, Rtemp, Rtemp);
  3214   }
  3236   }
  3215   __ add(RklassOop, Rtemp, Rscratch);
  3237   __ add(O2_klassOop, Rtemp, Rscratch);
  3216 
  3238 
  3217   __ bind(search);
  3239   __ bind(search);
  3218 
  3240 
  3219   __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
  3241   __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
  3220   {
  3242   {
  3242   __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
  3264   __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
  3243 
  3265 
  3244   assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
  3266   assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
  3245   __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex);       // Rindex *= 8;
  3267   __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex);       // Rindex *= 8;
  3246   __ add(Rscratch, Rindex, Rscratch);
  3268   __ add(Rscratch, Rindex, Rscratch);
  3247   __ ld_ptr(RklassOop, Rscratch, G5_method);
  3269   __ ld_ptr(O2_klassOop, Rscratch, G5_method);
  3248 
  3270 
  3249   // Check for abstract method error.
  3271   // Check for abstract method error.
  3250   {
  3272   {
  3251     Label ok;
  3273     Label ok;
  3252     __ br_notnull_short(G5_method, Assembler::pt, ok);
  3274     __ br_notnull_short(G5_method, Assembler::pt, ok);
  3258   Register Rcall = Rinterface;
  3280   Register Rcall = Rinterface;
  3259   assert_different_registers(Rcall, G5_method, Gargs, Rret);
  3281   assert_different_registers(Rcall, G5_method, Gargs, Rret);
  3260 
  3282 
  3261   __ verify_oop(G5_method);
  3283   __ verify_oop(G5_method);
  3262   __ call_from_interpreter(Rcall, Gargs, Rret);
  3284   __ call_from_interpreter(Rcall, Gargs, Rret);
  3263 
  3285 }
       
  3286 
       
  3287 
       
  3288 void TemplateTable::invokehandle(int byte_no) {
       
  3289   transition(vtos, vtos);
       
  3290   assert(byte_no == f12_oop, "use this argument");
       
  3291 
       
  3292   if (!EnableInvokeDynamic) {
       
  3293     // rewriter does not generate this bytecode
       
  3294     __ should_not_reach_here();
       
  3295     return;
       
  3296   }
       
  3297 
       
  3298   const Register Rret       = Lscratch;
       
  3299   const Register G4_mtype   = G4_scratch;  // f1
       
  3300   const Register O0_recv    = O0;
       
  3301   const Register Rscratch   = G3_scratch;
       
  3302 
       
  3303   prepare_invoke(byte_no, G5_method, Rret, G4_mtype, O0_recv);
       
  3304   __ null_check(O0_recv);
       
  3305 
       
  3306   // G4: MethodType object (from f1)
       
  3307   // G5: MH.linkToCallSite method (from f2)
       
  3308 
       
  3309   // Note:  G4_mtype is already pushed (if necessary) by prepare_invoke
       
  3310 
       
  3311   // do the call
       
  3312   __ verify_oop(G5_method);
       
  3313   __ profile_final_call(O4);  // FIXME: profile the LambdaForm also
       
  3314   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3264 }
  3315 }
  3265 
  3316 
  3266 
  3317 
  3267 void TemplateTable::invokedynamic(int byte_no) {
  3318 void TemplateTable::invokedynamic(int byte_no) {
  3268   transition(vtos, vtos);
  3319   transition(vtos, vtos);
  3269   assert(byte_no == f1_oop, "use this argument");
  3320   assert(byte_no == f12_oop, "use this argument");
  3270 
  3321 
  3271   if (!EnableInvokeDynamic) {
  3322   if (!EnableInvokeDynamic) {
  3272     // We should not encounter this bytecode if !EnableInvokeDynamic.
  3323     // We should not encounter this bytecode if !EnableInvokeDynamic.
  3273     // The verifier will stop it.  However, if we get past the verifier,
  3324     // The verifier will stop it.  However, if we get past the verifier,
  3274     // this will stop the thread in a reasonable way, without crashing the JVM.
  3325     // this will stop the thread in a reasonable way, without crashing the JVM.
  3277     // the call_VM checks for exception, so we should never return here.
  3328     // the call_VM checks for exception, so we should never return here.
  3278     __ should_not_reach_here();
  3329     __ should_not_reach_here();
  3279     return;
  3330     return;
  3280   }
  3331   }
  3281 
  3332 
  3282   // G5: CallSite object (f1)
  3333   const Register Rret        = Lscratch;
  3283   // XX: unused (f2)
  3334   const Register G4_callsite = G4_scratch;
  3284   // XX: flags (unused)
  3335   const Register Rscratch    = G3_scratch;
  3285 
  3336 
  3286   Register G5_callsite = G5_method;
  3337   prepare_invoke(byte_no, G5_method, Rret, G4_callsite);
  3287   Register Rscratch    = G3_scratch;
  3338 
  3288   Register Rtemp       = G1_scratch;
  3339   // G4: CallSite object (from f1)
  3289   Register Rret        = Lscratch;
  3340   // G5: MH.linkToCallSite method (from f2)
  3290 
  3341 
  3291   load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret,
  3342   // Note:  G4_callsite is already pushed by prepare_invoke
  3292                              /*virtual*/ false, /*vfinal*/ false, /*indy*/ true);
  3343 
  3293   __ mov(SP, O5_savedSP);  // record SP that we wanted the callee to restore
  3344   // %%% should make a type profile for any invokedynamic that takes a ref argument
  3294 
       
  3295   // profile this call
  3345   // profile this call
  3296   __ profile_call(O4);
  3346   __ profile_call(O4);
  3297 
  3347 
  3298   // get return address
  3348   // do the call
  3299   AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
  3349   __ verify_oop(G5_method);
  3300   __ set(table, Rtemp);
  3350   __ call_from_interpreter(Rscratch, Gargs, Rret);
  3301   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);  // get return type
       
  3302   // Make sure we don't need to mask Rret for tosBits after the above shift
       
  3303   ConstantPoolCacheEntry::verify_tosBits();
       
  3304   __ sll(Rret, LogBytesPerWord, Rret);
       
  3305   __ ld_ptr(Rtemp, Rret, Rret);  // get return address
       
  3306 
       
  3307   __ verify_oop(G5_callsite);
       
  3308   __ load_heap_oop(G5_callsite, __ delayed_value(java_lang_invoke_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle);
       
  3309   __ null_check(G3_method_handle);
       
  3310   __ verify_oop(G3_method_handle);
       
  3311 
       
  3312   // Adjust Rret first so Llast_SP can be same as Rret
       
  3313   __ add(Rret, -frame::pc_return_offset, O7);
       
  3314   __ add(Lesp, BytesPerWord, Gargs);  // setup parameter pointer
       
  3315   __ jump_to_method_handle_entry(G3_method_handle, Rtemp, /* emit_delayed_nop */ false);
       
  3316   // Record SP so we can remove any stack space allocated by adapter transition
       
  3317   __ delayed()->mov(SP, Llast_SP);
       
  3318 }
  3351 }
  3319 
  3352 
  3320 
  3353 
  3321 //----------------------------------------------------------------------------------------------------
  3354 //----------------------------------------------------------------------------------------------------
  3322 // Allocation
  3355 // Allocation