201 __ movzbl(scratch, at_bcp(0)); |
201 __ movzbl(scratch, at_bcp(0)); |
202 __ cmpl(scratch, Bytecodes::_breakpoint); |
202 __ cmpl(scratch, Bytecodes::_breakpoint); |
203 __ jcc(Assembler::notEqual, fast_patch); |
203 __ jcc(Assembler::notEqual, fast_patch); |
204 __ get_method(scratch); |
204 __ get_method(scratch); |
205 // Let breakpoint table handling rewrite to quicker bytecode |
205 // Let breakpoint table handling rewrite to quicker bytecode |
206 __ call_VM(noreg, |
206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc); |
207 CAST_FROM_FN_PTR(address, |
|
208 InterpreterRuntime::set_original_bytecode_at), |
|
209 scratch, r13, bc); |
|
210 #ifndef ASSERT |
207 #ifndef ASSERT |
211 __ jmpb(patch_done); |
208 __ jmpb(patch_done); |
212 __ bind(fast_patch); |
|
213 } |
|
214 #else |
209 #else |
215 __ jmp(patch_done); |
210 __ jmp(patch_done); |
|
211 #endif |
216 __ bind(fast_patch); |
212 __ bind(fast_patch); |
217 } |
213 } |
|
214 #ifdef ASSERT |
218 Label okay; |
215 Label okay; |
219 __ load_unsigned_byte(scratch, at_bcp(0)); |
216 __ load_unsigned_byte(scratch, at_bcp(0)); |
220 __ cmpl(scratch, (int) Bytecodes::java_code(bytecode)); |
217 __ cmpl(scratch, (int) Bytecodes::java_code(bytecode)); |
221 __ jcc(Assembler::equal, okay); |
218 __ jcc(Assembler::equal, okay); |
222 __ cmpl(scratch, bc); |
219 __ cmpl(scratch, bc); |
2052 if (os::is_MP()) { // Not needed on single CPU |
2049 if (os::is_MP()) { // Not needed on single CPU |
2053 __ membar(order_constraint); |
2050 __ membar(order_constraint); |
2054 } |
2051 } |
2055 } |
2052 } |
2056 |
2053 |
2057 void TemplateTable::resolve_cache_and_index(int byte_no, |
2054 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { |
2058 Register Rcache, |
|
2059 Register index) { |
|
2060 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); |
2055 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); |
|
2056 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic); |
2061 |
2057 |
2062 const Register temp = rbx; |
2058 const Register temp = rbx; |
2063 assert_different_registers(Rcache, index, temp); |
2059 assert_different_registers(Rcache, index, temp); |
2064 |
2060 |
2065 const int shift_count = (1 + byte_no) * BitsPerByte; |
2061 const int shift_count = (1 + byte_no) * BitsPerByte; |
2066 Label resolved; |
2062 Label resolved; |
2067 __ get_cache_and_index_at_bcp(Rcache, index, 1); |
2063 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); |
2068 __ movl(temp, Address(Rcache, |
2064 if (is_invokedynamic) { |
2069 index, Address::times_8, |
2065 // we are resolved if the f1 field contains a non-null CallSite object |
2070 constantPoolCacheOopDesc::base_offset() + |
2066 __ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD); |
2071 ConstantPoolCacheEntry::indices_offset())); |
2067 __ jcc(Assembler::notEqual, resolved); |
2072 __ shrl(temp, shift_count); |
2068 } else { |
2073 // have we resolved this bytecode? |
2069 __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); |
2074 __ andl(temp, 0xFF); |
2070 __ shrl(temp, shift_count); |
2075 __ cmpl(temp, (int) bytecode()); |
2071 // have we resolved this bytecode? |
2076 __ jcc(Assembler::equal, resolved); |
2072 __ andl(temp, 0xFF); |
|
2073 __ cmpl(temp, (int) bytecode()); |
|
2074 __ jcc(Assembler::equal, resolved); |
|
2075 } |
2077 |
2076 |
2078 // resolve first time through |
2077 // resolve first time through |
2079 address entry; |
2078 address entry; |
2080 switch (bytecode()) { |
2079 switch (bytecode()) { |
2081 case Bytecodes::_getstatic: |
2080 case Bytecodes::_getstatic: |
2088 case Bytecodes::_invokespecial: |
2087 case Bytecodes::_invokespecial: |
2089 case Bytecodes::_invokestatic: |
2088 case Bytecodes::_invokestatic: |
2090 case Bytecodes::_invokeinterface: |
2089 case Bytecodes::_invokeinterface: |
2091 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); |
2090 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); |
2092 break; |
2091 break; |
|
2092 case Bytecodes::_invokedynamic: |
|
2093 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); |
|
2094 break; |
2093 default: |
2095 default: |
2094 ShouldNotReachHere(); |
2096 ShouldNotReachHere(); |
2095 break; |
2097 break; |
2096 } |
2098 } |
2097 __ movl(temp, (int) bytecode()); |
2099 __ movl(temp, (int) bytecode()); |
2098 __ call_VM(noreg, entry, temp); |
2100 __ call_VM(noreg, entry, temp); |
2099 |
2101 |
2100 // Update registers with resolved info |
2102 // Update registers with resolved info |
2101 __ get_cache_and_index_at_bcp(Rcache, index, 1); |
2103 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); |
2102 __ bind(resolved); |
2104 __ bind(resolved); |
2103 } |
2105 } |
2104 |
2106 |
2105 // The Rcache and index registers must be set before call |
2107 // The Rcache and index registers must be set before call |
2106 void TemplateTable::load_field_cp_cache_entry(Register obj, |
2108 void TemplateTable::load_field_cp_cache_entry(Register obj, |
2830 void TemplateTable::count_calls(Register method, Register temp) { |
2832 void TemplateTable::count_calls(Register method, Register temp) { |
2831 // implemented elsewhere |
2833 // implemented elsewhere |
2832 ShouldNotReachHere(); |
2834 ShouldNotReachHere(); |
2833 } |
2835 } |
2834 |
2836 |
2835 void TemplateTable::prepare_invoke(Register method, |
2837 void TemplateTable::prepare_invoke(Register method, Register index, int byte_no) { |
2836 Register index, |
|
2837 int byte_no, |
|
2838 Bytecodes::Code code) { |
|
2839 // determine flags |
2838 // determine flags |
|
2839 Bytecodes::Code code = bytecode(); |
2840 const bool is_invokeinterface = code == Bytecodes::_invokeinterface; |
2840 const bool is_invokeinterface = code == Bytecodes::_invokeinterface; |
|
2841 const bool is_invokedynamic = code == Bytecodes::_invokedynamic; |
2841 const bool is_invokevirtual = code == Bytecodes::_invokevirtual; |
2842 const bool is_invokevirtual = code == Bytecodes::_invokevirtual; |
2842 const bool is_invokespecial = code == Bytecodes::_invokespecial; |
2843 const bool is_invokespecial = code == Bytecodes::_invokespecial; |
2843 const bool load_receiver = code != Bytecodes::_invokestatic; |
2844 const bool load_receiver = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic); |
2844 const bool receiver_null_check = is_invokespecial; |
2845 const bool receiver_null_check = is_invokespecial; |
2845 const bool save_flags = is_invokeinterface || is_invokevirtual; |
2846 const bool save_flags = is_invokeinterface || is_invokevirtual; |
2846 // setup registers & access constant pool cache |
2847 // setup registers & access constant pool cache |
2847 const Register recv = rcx; |
2848 const Register recv = rcx; |
2848 const Register flags = rdx; |
2849 const Register flags = rdx; |
2856 // load receiver if needed (note: no return address pushed yet) |
2857 // load receiver if needed (note: no return address pushed yet) |
2857 if (load_receiver) { |
2858 if (load_receiver) { |
2858 __ movl(recv, flags); |
2859 __ movl(recv, flags); |
2859 __ andl(recv, 0xFF); |
2860 __ andl(recv, 0xFF); |
2860 if (TaggedStackInterpreter) __ shll(recv, 1); // index*2 |
2861 if (TaggedStackInterpreter) __ shll(recv, 1); // index*2 |
2861 __ movptr(recv, Address(rsp, recv, Address::times_8, |
2862 Address recv_addr(rsp, recv, Address::times_8, -Interpreter::expr_offset_in_bytes(1)); |
2862 -Interpreter::expr_offset_in_bytes(1))); |
2863 if (is_invokedynamic) { |
2863 __ verify_oop(recv); |
2864 __ lea(recv, recv_addr); |
|
2865 } else { |
|
2866 __ movptr(recv, recv_addr); |
|
2867 __ verify_oop(recv); |
|
2868 } |
2864 } |
2869 } |
2865 |
2870 |
2866 // do null check if needed |
2871 // do null check if needed |
2867 if (receiver_null_check) { |
2872 if (receiver_null_check) { |
2868 __ null_check(recv); |
2873 __ null_check(recv); |
2876 __ shrl(flags, ConstantPoolCacheEntry::tosBits); |
2881 __ shrl(flags, ConstantPoolCacheEntry::tosBits); |
2877 // Make sure we don't need to mask flags for tosBits after the above shift |
2882 // Make sure we don't need to mask flags for tosBits after the above shift |
2878 ConstantPoolCacheEntry::verify_tosBits(); |
2883 ConstantPoolCacheEntry::verify_tosBits(); |
2879 // load return address |
2884 // load return address |
2880 { |
2885 { |
2881 ExternalAddress return_5((address)Interpreter::return_5_addrs_by_index_table()); |
2886 address table_addr; |
2882 ExternalAddress return_3((address)Interpreter::return_3_addrs_by_index_table()); |
2887 if (is_invokeinterface || is_invokedynamic) |
2883 __ lea(rscratch1, (is_invokeinterface ? return_5 : return_3)); |
2888 table_addr = (address)Interpreter::return_5_addrs_by_index_table(); |
2884 __ movptr(flags, Address(rscratch1, flags, Address::times_8)); |
2889 else |
|
2890 table_addr = (address)Interpreter::return_3_addrs_by_index_table(); |
|
2891 ExternalAddress table(table_addr); |
|
2892 __ lea(rscratch1, table); |
|
2893 __ movptr(flags, Address(rscratch1, flags, Address::times_ptr)); |
2885 } |
2894 } |
2886 |
2895 |
2887 // push return address |
2896 // push return address |
2888 __ push(flags); |
2897 __ push(flags); |
2889 |
2898 |
2957 } |
2966 } |
2958 |
2967 |
2959 |
2968 |
2960 void TemplateTable::invokespecial(int byte_no) { |
2969 void TemplateTable::invokespecial(int byte_no) { |
2961 transition(vtos, vtos); |
2970 transition(vtos, vtos); |
2962 prepare_invoke(rbx, noreg, byte_no, bytecode()); |
2971 prepare_invoke(rbx, noreg, byte_no); |
2963 // do the call |
2972 // do the call |
2964 __ verify_oop(rbx); |
2973 __ verify_oop(rbx); |
2965 __ profile_call(rax); |
2974 __ profile_call(rax); |
2966 __ jump_from_interpreted(rbx, rax); |
2975 __ jump_from_interpreted(rbx, rax); |
2967 } |
2976 } |
2968 |
2977 |
2969 |
2978 |
2970 void TemplateTable::invokestatic(int byte_no) { |
2979 void TemplateTable::invokestatic(int byte_no) { |
2971 transition(vtos, vtos); |
2980 transition(vtos, vtos); |
2972 prepare_invoke(rbx, noreg, byte_no, bytecode()); |
2981 prepare_invoke(rbx, noreg, byte_no); |
2973 // do the call |
2982 // do the call |
2974 __ verify_oop(rbx); |
2983 __ verify_oop(rbx); |
2975 __ profile_call(rax); |
2984 __ profile_call(rax); |
2976 __ jump_from_interpreted(rbx, rax); |
2985 __ jump_from_interpreted(rbx, rax); |
2977 } |
2986 } |
3070 // the call_VM checks for exception, so we should never return here. |
3079 // the call_VM checks for exception, so we should never return here. |
3071 __ should_not_reach_here(); |
3080 __ should_not_reach_here(); |
3072 return; |
3081 return; |
3073 } |
3082 } |
3074 |
3083 |
3075 __ stop("invokedynamic NYI");//6815692// |
3084 prepare_invoke(rax, rbx, byte_no); |
|
3085 |
|
3086 // rax: CallSite object (f1) |
|
3087 // rbx: unused (f2) |
|
3088 // rcx: receiver address |
|
3089 // rdx: flags (unused) |
|
3090 |
|
3091 if (ProfileInterpreter) { |
|
3092 Label L; |
|
3093 // %%% should make a type profile for any invokedynamic that takes a ref argument |
|
3094 // profile this call |
|
3095 __ profile_call(r13); |
|
3096 } |
|
3097 |
|
3098 __ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx))); |
|
3099 __ null_check(rcx); |
|
3100 __ prepare_to_jump_from_interpreted(); |
|
3101 __ jump_to_method_handle_entry(rcx, rdx); |
3076 } |
3102 } |
3077 |
3103 |
3078 |
3104 |
3079 //----------------------------------------------------------------------------- |
3105 //----------------------------------------------------------------------------- |
3080 // Allocation |
3106 // Allocation |