|
1 /* |
|
2 * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved. |
|
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
|
4 * |
|
5 * This code is free software; you can redistribute it and/or modify it |
|
6 * under the terms of the GNU General Public License version 2 only, as |
|
7 * published by the Free Software Foundation. |
|
8 * |
|
9 * This code is distributed in the hope that it will be useful, but WITHOUT |
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
12 * version 2 for more details (a copy is included in the LICENSE file that |
|
13 * accompanied this code). |
|
14 * |
|
15 * You should have received a copy of the GNU General Public License version |
|
16 * 2 along with this work; if not, write to the Free Software Foundation, |
|
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
18 * |
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
|
20 * or visit www.oracle.com if you need additional information or have any |
|
21 * questions. |
|
22 * |
|
23 */ |
|
24 |
|
25 #ifndef CPU_ARM_VM_INTERP_MASM_ARM_HPP |
|
26 #define CPU_ARM_VM_INTERP_MASM_ARM_HPP |
|
27 |
|
28 #include "asm/macroAssembler.hpp" |
|
29 #include "asm/macroAssembler.inline.hpp" |
|
30 #include "interpreter/invocationCounter.hpp" |
|
31 #include "runtime/frame.hpp" |
|
32 #include "prims/jvmtiExport.hpp" |
|
33 |
|
34 // This file specializes the assember with interpreter-specific macros |
|
35 |
|
36 |
|
37 class InterpreterMacroAssembler: public MacroAssembler { |
|
38 |
|
39 public: |
|
40 |
|
41 // allow JvmtiExport checks to be extended |
|
42 bool can_force_early_return() { return JvmtiExport::can_force_early_return(); } |
|
43 bool can_post_interpreter_events() { return JvmtiExport::can_post_interpreter_events(); } |
|
44 bool can_pop_frame() { return JvmtiExport::can_pop_frame(); } |
|
45 bool can_post_breakpoint() { return JvmtiExport::can_post_breakpoint(); } |
|
46 bool can_post_field_access() { return JvmtiExport::can_post_field_access(); } |
|
47 bool can_post_field_modification() { return JvmtiExport::can_post_field_modification(); } |
|
48 // flags controlled by JVMTI settings |
|
49 bool rewrite_frequent_pairs() { return RewriteFrequentPairs; } |
|
50 |
|
51 protected: |
|
52 |
|
53 // Template interpreter specific version of call_VM_helper |
|
54 virtual void call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions); |
|
55 |
|
56 virtual void check_and_handle_popframe(); |
|
57 virtual void check_and_handle_earlyret(); |
|
58 |
|
59 // base routine for all dispatches |
|
60 typedef enum { DispatchDefault, DispatchNormal } DispatchTableMode; |
|
61 void dispatch_base(TosState state, DispatchTableMode table_mode, bool verifyoop = true); |
|
62 |
|
63 public: |
|
64 InterpreterMacroAssembler(CodeBuffer* code); |
|
65 |
|
66 // Interpreter-specific registers |
|
67 #if defined(AARCH64) && defined(ASSERT) |
|
68 |
|
69 #define check_stack_top() _check_stack_top("invalid Rstack_top at " __FILE__ ":" XSTR(__LINE__)) |
|
70 #define check_stack_top_on_expansion() _check_stack_top("invalid Rstack_top at " __FILE__ ":" XSTR(__LINE__), VerifyInterpreterStackTop) |
|
71 #define check_extended_sp(tmp) _check_extended_sp(tmp, "SP does not match extended SP in frame at " __FILE__ ":" XSTR(__LINE__)) |
|
72 #define check_no_cached_stack_top(tmp) _check_no_cached_stack_top(tmp, "stack_top is already cached in frame at " __FILE__ ":" XSTR(__LINE__)) |
|
73 |
|
74 void _check_stack_top(const char* msg, bool enabled = true) { |
|
75 if (enabled) { |
|
76 Label L; |
|
77 cmp(SP, Rstack_top); |
|
78 b(L, ls); |
|
79 stop(msg); |
|
80 bind(L); |
|
81 } |
|
82 } |
|
83 |
|
84 void _check_extended_sp(Register tmp, const char* msg) { |
|
85 Label L; |
|
86 ldr(tmp, Address(FP, frame::interpreter_frame_extended_sp_offset * wordSize)); |
|
87 cmp(SP, tmp); |
|
88 b(L, eq); |
|
89 stop(msg); |
|
90 bind(L); |
|
91 } |
|
92 |
|
93 void _check_no_cached_stack_top(Register tmp, const char* msg) { |
|
94 Label L; |
|
95 ldr(tmp, Address(FP, frame::interpreter_frame_stack_top_offset * wordSize)); |
|
96 cbz(tmp, L); |
|
97 stop(msg); |
|
98 bind(L); |
|
99 } |
|
100 |
|
101 #else |
|
102 |
|
103 inline void check_stack_top() {} |
|
104 inline void check_stack_top_on_expansion() {} |
|
105 inline void check_extended_sp(Register tmp) {} |
|
106 inline void check_no_cached_stack_top(Register tmp) {} |
|
107 |
|
108 #endif // AARCH64 && ASSERT |
|
109 |
|
110 void save_bcp() { str(Rbcp, Address(FP, frame::interpreter_frame_bcp_offset * wordSize)); } |
|
111 void restore_bcp() { ldr(Rbcp, Address(FP, frame::interpreter_frame_bcp_offset * wordSize)); } |
|
112 void restore_locals() { ldr(Rlocals, Address(FP, frame::interpreter_frame_locals_offset * wordSize)); } |
|
113 void restore_method() { ldr(Rmethod, Address(FP, frame::interpreter_frame_method_offset * wordSize)); } |
|
114 void restore_dispatch(); |
|
115 |
|
116 #ifdef AARCH64 |
|
117 void save_stack_top() { check_stack_top(); str(Rstack_top, Address(FP, frame::interpreter_frame_stack_top_offset * wordSize)); } |
|
118 void clear_cached_stack_top() { str(ZR, Address(FP, frame::interpreter_frame_stack_top_offset * wordSize)); } |
|
119 void restore_stack_top() { ldr(Rstack_top, Address(FP, frame::interpreter_frame_stack_top_offset * wordSize)); clear_cached_stack_top(); check_stack_top(); } |
|
120 void cut_sp_before_call() { align_reg(SP, Rstack_top, StackAlignmentInBytes); } |
|
121 void restore_sp_after_call(Register tmp) { ldr(tmp, Address(FP, frame::interpreter_frame_extended_sp_offset * wordSize)); mov(SP, tmp); } |
|
122 #endif |
|
123 |
|
124 // Helpers for runtime call arguments/results |
|
125 void get_const(Register reg) { ldr(reg, Address(Rmethod, Method::const_offset())); } |
|
126 void get_constant_pool(Register reg) { get_const(reg); ldr(reg, Address(reg, ConstMethod::constants_offset())); } |
|
127 void get_constant_pool_cache(Register reg) { get_constant_pool(reg); ldr(reg, Address(reg, ConstantPool::cache_offset_in_bytes())); } |
|
128 void get_cpool_and_tags(Register cpool, Register tags) { get_constant_pool(cpool); ldr(tags, Address(cpool, ConstantPool::tags_offset_in_bytes())); } |
|
129 |
|
130 // Sets reg. Blows Rtemp. |
|
131 void get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset); |
|
132 |
|
133 // Sets index. Blows reg_tmp. |
|
134 void get_index_at_bcp(Register index, int bcp_offset, Register reg_tmp, size_t index_size = sizeof(u2)); |
|
135 // Sets cache, index. |
|
136 void get_cache_and_index_at_bcp(Register cache, Register index, int bcp_offset, size_t index_size = sizeof(u2)); |
|
137 void get_cache_and_index_and_bytecode_at_bcp(Register cache, Register index, Register bytecode, int byte_no, int bcp_offset, size_t index_size = sizeof(u2)); |
|
138 // Sets cache. Blows reg_tmp. |
|
139 void get_cache_entry_pointer_at_bcp(Register cache, Register reg_tmp, int bcp_offset, size_t index_size = sizeof(u2)); |
|
140 |
|
141 // Load object from cpool->resolved_references(*bcp+1) |
|
142 void load_resolved_reference_at_index(Register result, Register tmp); |
|
143 |
|
144 void store_check_part1(Register card_table_base); // Sets card_table_base register. |
|
145 void store_check_part2(Register obj, Register card_table_base, Register tmp); |
|
146 |
|
147 void set_card(Register card_table_base, Address card_table_addr, Register tmp); |
|
148 |
|
149 #if INCLUDE_ALL_GCS |
|
150 // G1 pre-barrier. |
|
151 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR). |
|
152 // If store_addr != noreg, then previous value is loaded from [store_addr]; |
|
153 // in such case store_addr and new_val registers are preserved; |
|
154 // otherwise pre_val register is preserved. |
|
155 void g1_write_barrier_pre(Register store_addr, |
|
156 Register new_val, |
|
157 Register pre_val, |
|
158 Register tmp1, |
|
159 Register tmp2); |
|
160 |
|
161 // G1 post-barrier. |
|
162 // Blows all volatile registers (R0-R3 on 32-bit ARM, R0-R18 on AArch64, Rtemp, LR). |
|
163 void g1_write_barrier_post(Register store_addr, |
|
164 Register new_val, |
|
165 Register tmp1, |
|
166 Register tmp2, |
|
167 Register tmp3); |
|
168 #endif // INCLUDE_ALL_GCS |
|
169 |
|
170 void pop_ptr(Register r); |
|
171 void pop_i(Register r = R0_tos); |
|
172 #ifdef AARCH64 |
|
173 void pop_l(Register r = R0_tos); |
|
174 #else |
|
175 void pop_l(Register lo = R0_tos_lo, Register hi = R1_tos_hi); |
|
176 #endif |
|
177 void pop_f(FloatRegister fd); |
|
178 void pop_d(FloatRegister fd); |
|
179 |
|
180 void push_ptr(Register r); |
|
181 void push_i(Register r = R0_tos); |
|
182 #ifdef AARCH64 |
|
183 void push_l(Register r = R0_tos); |
|
184 #else |
|
185 void push_l(Register lo = R0_tos_lo, Register hi = R1_tos_hi); |
|
186 #endif |
|
187 void push_f(); |
|
188 void push_d(); |
|
189 |
|
190 // Transition vtos -> state. Blows R0, R1. Sets TOS cached value. |
|
191 void pop(TosState state); |
|
192 // Transition state -> vtos. Blows Rtemp. |
|
193 void push(TosState state); |
|
194 |
|
195 #ifndef AARCH64 |
|
196 // The following methods are overridden to allow overloaded calls to |
|
197 // MacroAssembler::push/pop(Register) |
|
198 // MacroAssembler::push/pop(RegisterSet) |
|
199 // InterpreterMacroAssembler::push/pop(TosState) |
|
200 void push(Register rd, AsmCondition cond = al) { MacroAssembler::push(rd, cond); } |
|
201 void pop(Register rd, AsmCondition cond = al) { MacroAssembler::pop(rd, cond); } |
|
202 |
|
203 void push(RegisterSet reg_set, AsmCondition cond = al) { MacroAssembler::push(reg_set, cond); } |
|
204 void pop(RegisterSet reg_set, AsmCondition cond = al) { MacroAssembler::pop(reg_set, cond); } |
|
205 |
|
206 // Converts return value in R0/R1 (interpreter calling conventions) to TOS cached value. |
|
207 void convert_retval_to_tos(TosState state); |
|
208 // Converts TOS cached value to return value in R0/R1 (according to interpreter calling conventions). |
|
209 void convert_tos_to_retval(TosState state); |
|
210 #endif |
|
211 |
|
212 // JVMTI ForceEarlyReturn support |
|
213 void load_earlyret_value(TosState state); |
|
214 |
|
215 void jump_to_entry(address entry); |
|
216 |
|
217 // Blows Rtemp. |
|
218 void empty_expression_stack() { |
|
219 ldr(Rstack_top, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize)); |
|
220 check_stack_top(); |
|
221 #ifdef AARCH64 |
|
222 clear_cached_stack_top(); |
|
223 #else |
|
224 // NULL last_sp until next java call |
|
225 str(zero_register(Rtemp), Address(FP, frame::interpreter_frame_last_sp_offset * wordSize)); |
|
226 #endif // AARCH64 |
|
227 } |
|
228 |
|
229 // Helpers for swap and dup |
|
230 void load_ptr(int n, Register val); |
|
231 void store_ptr(int n, Register val); |
|
232 |
|
233 // Generate a subtype check: branch to not_subtype if sub_klass is |
|
234 // not a subtype of super_klass. |
|
235 // Profiling code for the subtype check failure (profile_typecheck_failed) |
|
236 // should be explicitly generated by the caller in the not_subtype case. |
|
237 // Blows Rtemp, tmp1, tmp2. |
|
238 void gen_subtype_check(Register Rsub_klass, Register Rsuper_klass, |
|
239 Label ¬_subtype, Register tmp1, Register tmp2); |
|
240 |
|
241 // Dispatching |
|
242 void dispatch_prolog(TosState state, int step = 0); |
|
243 void dispatch_epilog(TosState state, int step = 0); |
|
244 void dispatch_only(TosState state); // dispatch by R3_bytecode |
|
245 void dispatch_only_normal(TosState state); // dispatch normal table by R3_bytecode |
|
246 void dispatch_only_noverify(TosState state); |
|
247 void dispatch_next(TosState state, int step = 0); // load R3_bytecode from [Rbcp + step] and dispatch by R3_bytecode |
|
248 |
|
249 // jump to an invoked target |
|
250 void prepare_to_jump_from_interpreted(); |
|
251 void jump_from_interpreted(Register method); |
|
252 |
|
253 void narrow(Register result); |
|
254 |
|
255 // Returning from interpreted functions |
|
256 // |
|
257 // Removes the current activation (incl. unlocking of monitors) |
|
258 // and sets up the return address. This code is also used for |
|
259 // exception unwindwing. In that case, we do not want to throw |
|
260 // IllegalMonitorStateExceptions, since that might get us into an |
|
261 // infinite rethrow exception loop. |
|
262 // Additionally this code is used for popFrame and earlyReturn. |
|
263 // In popFrame case we want to skip throwing an exception, |
|
264 // installing an exception, and notifying jvmdi. |
|
265 // In earlyReturn case we only want to skip throwing an exception |
|
266 // and installing an exception. |
|
267 void remove_activation(TosState state, Register ret_addr, |
|
268 bool throw_monitor_exception = true, |
|
269 bool install_monitor_exception = true, |
|
270 bool notify_jvmdi = true); |
|
271 |
|
272 // At certain points in the method invocation the monitor of |
|
273 // synchronized methods hasn't been entered yet. |
|
274 // To correctly handle exceptions at these points, we set the thread local |
|
275 // variable _do_not_unlock_if_synchronized to true. The remove_activation will |
|
276 // check this flag. |
|
277 void set_do_not_unlock_if_synchronized(bool flag, Register tmp); |
|
278 |
|
279 // Debugging |
|
280 void interp_verify_oop(Register reg, TosState state, const char* file, int line); // only if +VerifyOops && state == atos |
|
281 |
|
282 void verify_FPU(int stack_depth, TosState state = ftos) { |
|
283 // No VFP state verification is required for ARM |
|
284 } |
|
285 |
|
286 // Object locking |
|
287 void lock_object (Register lock_reg); |
|
288 void unlock_object(Register lock_reg); |
|
289 |
|
290 // Interpreter profiling operations |
|
291 void set_method_data_pointer_for_bcp(); // Blows R0-R3/R0-R18, Rtemp, LR |
|
292 void test_method_data_pointer(Register mdp, Label& zero_continue); |
|
293 void verify_method_data_pointer(); |
|
294 |
|
295 void set_mdp_data_at(Register mdp_in, int offset, Register value); |
|
296 |
|
297 // Increments mdp data. Sets bumped_count register to adjusted counter. |
|
298 void increment_mdp_data_at(Address data, Register bumped_count, bool decrement = false); |
|
299 // Increments mdp data. Sets bumped_count register to adjusted counter. |
|
300 void increment_mdp_data_at(Register mdp_in, int offset, Register bumped_count, bool decrement = false); |
|
301 void increment_mask_and_jump(Address counter_addr, |
|
302 int increment, Address mask_addr, |
|
303 Register scratch, Register scratch2, |
|
304 AsmCondition cond, Label* where); |
|
305 void set_mdp_flag_at(Register mdp_in, int flag_constant); |
|
306 |
|
307 void test_mdp_data_at(Register mdp_in, int offset, Register value, |
|
308 Register test_value_out, |
|
309 Label& not_equal_continue); |
|
310 |
|
311 void record_klass_in_profile(Register receiver, Register mdp, |
|
312 Register reg_tmp, bool is_virtual_call); |
|
313 void record_klass_in_profile_helper(Register receiver, Register mdp, |
|
314 Register reg_tmp, |
|
315 int start_row, Label& done, bool is_virtual_call); |
|
316 |
|
317 void update_mdp_by_offset(Register mdp_in, int offset_of_offset, Register reg_tmp); |
|
318 void update_mdp_by_offset(Register mdp_in, Register reg_offset, Register reg_tmp); |
|
319 void update_mdp_by_constant(Register mdp_in, int constant); |
|
320 void update_mdp_for_ret(Register return_bci); // Blows R0-R3/R0-R18, Rtemp, LR |
|
321 |
|
322 void profile_taken_branch(Register mdp, Register bumped_count); // Sets mdp, bumped_count registers, blows Rtemp. |
|
323 void profile_not_taken_branch(Register mdp); // Sets mdp, blows Rtemp. |
|
324 |
|
325 void profile_call(Register mdp); // Sets mdp, blows Rtemp. |
|
326 void profile_final_call(Register mdp); // Sets mdp, blows Rtemp. |
|
327 void profile_virtual_call(Register mdp, Register receiver, // Sets mdp, blows Rtemp. |
|
328 bool receiver_can_be_null = false); |
|
329 void profile_ret(Register mdp, Register return_bci); // Sets mdp, blows R0-R3/R0-R18, Rtemp, LR |
|
330 void profile_null_seen(Register mdp); // Sets mdp. |
|
331 void profile_typecheck(Register mdp, Register klass); // Sets mdp, blows Rtemp. |
|
332 |
|
333 void profile_typecheck_failed(Register mdp); // Sets mdp, blows Rtemp. |
|
334 void profile_switch_default(Register mdp); // Sets mdp, blows Rtemp. |
|
335 |
|
336 // Sets mdp. Blows reg_tmp1, reg_tmp2. Index could be the same as reg_tmp2. |
|
337 void profile_switch_case(Register mdp, Register index, Register reg_tmp1, Register reg_tmp2); |
|
338 |
|
339 void byteswap_u32(Register r, Register rtmp1, Register rtmp2); |
|
340 |
|
341 void inc_global_counter(address address_of_counter, int offset_in_bytes, Register tmp1, Register tmp2, bool avoid_overflow); |
|
342 |
|
343 typedef enum { NotifyJVMTI, SkipNotifyJVMTI } NotifyMethodExitMode; |
|
344 |
|
345 // support for jvmti |
|
346 void notify_method_entry(); |
|
347 void notify_method_exit(TosState state, NotifyMethodExitMode mode, |
|
348 bool native = false, Register result_lo = noreg, Register result_hi = noreg, FloatRegister result_fp = fnoreg); |
|
349 |
|
350 void trace_state(const char* msg) PRODUCT_RETURN; |
|
351 |
|
352 void get_method_counters(Register method, Register Rcounters, Label& skip); |
|
353 }; |
|
354 |
|
355 #endif // CPU_ARM_VM_INTERP_MASM_ARM_HPP |