200 assert(_desc->uses_bcp(), "inconsistent uses_bcp information"); |
200 assert(_desc->uses_bcp(), "inconsistent uses_bcp information"); |
201 return Address(rsi, offset); |
201 return Address(rsi, offset); |
202 } |
202 } |
203 |
203 |
204 |
204 |
205 void TemplateTable::patch_bytecode(Bytecodes::Code bytecode, Register bc, |
205 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg, |
206 Register scratch, |
206 Register temp_reg, bool load_bc_into_bc_reg/*=true*/, |
207 bool load_bc_into_scratch/*=true*/) { |
207 int byte_no) { |
208 |
208 if (!RewriteBytecodes) return; |
209 if (!RewriteBytecodes) return; |
209 Label L_patch_done; |
210 // the pair bytecodes have already done the load. |
210 |
211 if (load_bc_into_scratch) { |
211 switch (bc) { |
212 __ movl(bc, bytecode); |
212 case Bytecodes::_fast_aputfield: |
213 } |
213 case Bytecodes::_fast_bputfield: |
214 Label patch_done; |
214 case Bytecodes::_fast_cputfield: |
|
215 case Bytecodes::_fast_dputfield: |
|
216 case Bytecodes::_fast_fputfield: |
|
217 case Bytecodes::_fast_iputfield: |
|
218 case Bytecodes::_fast_lputfield: |
|
219 case Bytecodes::_fast_sputfield: |
|
220 { |
|
221 // We skip bytecode quickening for putfield instructions when |
|
222 // the put_code written to the constant pool cache is zero. |
|
223 // This is required so that every execution of this instruction |
|
224 // calls out to InterpreterRuntime::resolve_get_put to do |
|
225 // additional, required work. |
|
226 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); |
|
227 assert(load_bc_into_bc_reg, "we use bc_reg as temp"); |
|
228 __ get_cache_and_index_and_bytecode_at_bcp(bc_reg, temp_reg, temp_reg, byte_no, 1); |
|
229 __ movl(bc_reg, bc); |
|
230 __ cmpl(temp_reg, (int) 0); |
|
231 __ jcc(Assembler::zero, L_patch_done); // don't patch |
|
232 } |
|
233 break; |
|
234 default: |
|
235 assert(byte_no == -1, "sanity"); |
|
236 // the pair bytecodes have already done the load. |
|
237 if (load_bc_into_bc_reg) { |
|
238 __ movl(bc_reg, bc); |
|
239 } |
|
240 } |
|
241 |
215 if (JvmtiExport::can_post_breakpoint()) { |
242 if (JvmtiExport::can_post_breakpoint()) { |
216 Label fast_patch; |
243 Label L_fast_patch; |
217 // if a breakpoint is present we can't rewrite the stream directly |
244 // if a breakpoint is present we can't rewrite the stream directly |
218 __ movzbl(scratch, at_bcp(0)); |
245 __ movzbl(temp_reg, at_bcp(0)); |
219 __ cmpl(scratch, Bytecodes::_breakpoint); |
246 __ cmpl(temp_reg, Bytecodes::_breakpoint); |
220 __ jcc(Assembler::notEqual, fast_patch); |
247 __ jcc(Assembler::notEqual, L_fast_patch); |
221 __ get_method(scratch); |
248 __ get_method(temp_reg); |
222 // Let breakpoint table handling rewrite to quicker bytecode |
249 // Let breakpoint table handling rewrite to quicker bytecode |
223 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, rsi, bc); |
250 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), temp_reg, rsi, bc_reg); |
224 #ifndef ASSERT |
251 #ifndef ASSERT |
225 __ jmpb(patch_done); |
252 __ jmpb(L_patch_done); |
226 #else |
253 #else |
227 __ jmp(patch_done); |
254 __ jmp(L_patch_done); |
228 #endif |
255 #endif |
229 __ bind(fast_patch); |
256 __ bind(L_fast_patch); |
230 } |
257 } |
|
258 |
231 #ifdef ASSERT |
259 #ifdef ASSERT |
232 Label okay; |
260 Label L_okay; |
233 __ load_unsigned_byte(scratch, at_bcp(0)); |
261 __ load_unsigned_byte(temp_reg, at_bcp(0)); |
234 __ cmpl(scratch, (int)Bytecodes::java_code(bytecode)); |
262 __ cmpl(temp_reg, (int)Bytecodes::java_code(bc)); |
235 __ jccb(Assembler::equal, okay); |
263 __ jccb(Assembler::equal, L_okay); |
236 __ cmpl(scratch, bc); |
264 __ cmpl(temp_reg, bc_reg); |
237 __ jcc(Assembler::equal, okay); |
265 __ jcc(Assembler::equal, L_okay); |
238 __ stop("patching the wrong bytecode"); |
266 __ stop("patching the wrong bytecode"); |
239 __ bind(okay); |
267 __ bind(L_okay); |
240 #endif |
268 #endif |
|
269 |
241 // patch bytecode |
270 // patch bytecode |
242 __ movb(at_bcp(0), bc); |
271 __ movb(at_bcp(0), bc_reg); |
243 __ bind(patch_done); |
272 __ bind(L_patch_done); |
244 } |
273 } |
245 |
274 |
246 //---------------------------------------------------------------------------------------------------- |
275 //---------------------------------------------------------------------------------------------------- |
247 // Individual instructions |
276 // Individual instructions |
248 |
277 |
2058 Register temp = rbx; |
2087 Register temp = rbx; |
2059 |
2088 |
2060 assert_different_registers(result, Rcache, index, temp); |
2089 assert_different_registers(result, Rcache, index, temp); |
2061 |
2090 |
2062 Label resolved; |
2091 Label resolved; |
2063 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
|
2064 if (byte_no == f1_oop) { |
2092 if (byte_no == f1_oop) { |
2065 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) |
2093 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) |
2066 // This kind of CP cache entry does not need to match the flags byte, because |
2094 // This kind of CP cache entry does not need to match the flags byte, because |
2067 // there is a 1-1 relation between bytecode type and CP entry type. |
2095 // there is a 1-1 relation between bytecode type and CP entry type. |
2068 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) |
2096 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) |
|
2097 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
2069 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); |
2098 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); |
2070 __ testptr(result, result); |
2099 __ testptr(result, result); |
2071 __ jcc(Assembler::notEqual, resolved); |
2100 __ jcc(Assembler::notEqual, resolved); |
2072 } else { |
2101 } else { |
2073 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); |
2102 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); |
2074 assert(result == noreg, ""); //else change code for setting result |
2103 assert(result == noreg, ""); //else change code for setting result |
2075 const int shift_count = (1 + byte_no)*BitsPerByte; |
2104 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); |
2076 __ movl(temp, Address(Rcache, index, Address::times_4, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); |
2105 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode? |
2077 __ shrl(temp, shift_count); |
|
2078 // have we resolved this bytecode? |
|
2079 __ andl(temp, 0xFF); |
|
2080 __ cmpl(temp, (int)bytecode()); |
|
2081 __ jcc(Assembler::equal, resolved); |
2106 __ jcc(Assembler::equal, resolved); |
2082 } |
2107 } |
2083 |
2108 |
2084 // resolve first time through |
2109 // resolve first time through |
2085 address entry; |
2110 address entry; |
2451 |
2476 |
2452 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; |
2477 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; |
2453 |
2478 |
2454 __ shrl(flags, ConstantPoolCacheEntry::tosBits); |
2479 __ shrl(flags, ConstantPoolCacheEntry::tosBits); |
2455 assert(btos == 0, "change code, btos != 0"); |
2480 assert(btos == 0, "change code, btos != 0"); |
2456 // btos |
|
2457 __ andl(flags, 0x0f); |
2481 __ andl(flags, 0x0f); |
2458 __ jcc(Assembler::notZero, notByte); |
2482 __ jcc(Assembler::notZero, notByte); |
2459 |
2483 |
2460 __ pop(btos); |
2484 // btos |
2461 if (!is_static) pop_and_check_object(obj); |
2485 { |
2462 __ movb(lo, rax ); |
2486 __ pop(btos); |
2463 if (!is_static) { |
2487 if (!is_static) pop_and_check_object(obj); |
2464 patch_bytecode(Bytecodes::_fast_bputfield, rcx, rbx); |
2488 __ movb(lo, rax); |
2465 } |
2489 if (!is_static) { |
2466 __ jmp(Done); |
2490 patch_bytecode(Bytecodes::_fast_bputfield, rcx, rbx, true, byte_no); |
|
2491 } |
|
2492 __ jmp(Done); |
|
2493 } |
2467 |
2494 |
2468 __ bind(notByte); |
2495 __ bind(notByte); |
|
2496 __ cmpl(flags, itos); |
|
2497 __ jcc(Assembler::notEqual, notInt); |
|
2498 |
2469 // itos |
2499 // itos |
2470 __ cmpl(flags, itos ); |
2500 { |
2471 __ jcc(Assembler::notEqual, notInt); |
2501 __ pop(itos); |
2472 |
2502 if (!is_static) pop_and_check_object(obj); |
2473 __ pop(itos); |
2503 __ movl(lo, rax); |
2474 if (!is_static) pop_and_check_object(obj); |
2504 if (!is_static) { |
2475 |
2505 patch_bytecode(Bytecodes::_fast_iputfield, rcx, rbx, true, byte_no); |
2476 __ movl(lo, rax ); |
2506 } |
2477 if (!is_static) { |
2507 __ jmp(Done); |
2478 patch_bytecode(Bytecodes::_fast_iputfield, rcx, rbx); |
2508 } |
2479 } |
|
2480 __ jmp(Done); |
|
2481 |
2509 |
2482 __ bind(notInt); |
2510 __ bind(notInt); |
|
2511 __ cmpl(flags, atos); |
|
2512 __ jcc(Assembler::notEqual, notObj); |
|
2513 |
2483 // atos |
2514 // atos |
2484 __ cmpl(flags, atos ); |
2515 { |
2485 __ jcc(Assembler::notEqual, notObj); |
2516 __ pop(atos); |
2486 |
2517 if (!is_static) pop_and_check_object(obj); |
2487 __ pop(atos); |
2518 do_oop_store(_masm, lo, rax, _bs->kind(), false); |
2488 if (!is_static) pop_and_check_object(obj); |
2519 if (!is_static) { |
2489 |
2520 patch_bytecode(Bytecodes::_fast_aputfield, rcx, rbx, true, byte_no); |
2490 do_oop_store(_masm, lo, rax, _bs->kind(), false); |
2521 } |
2491 |
2522 __ jmp(Done); |
2492 if (!is_static) { |
2523 } |
2493 patch_bytecode(Bytecodes::_fast_aputfield, rcx, rbx); |
|
2494 } |
|
2495 |
|
2496 __ jmp(Done); |
|
2497 |
2524 |
2498 __ bind(notObj); |
2525 __ bind(notObj); |
|
2526 __ cmpl(flags, ctos); |
|
2527 __ jcc(Assembler::notEqual, notChar); |
|
2528 |
2499 // ctos |
2529 // ctos |
2500 __ cmpl(flags, ctos ); |
2530 { |
2501 __ jcc(Assembler::notEqual, notChar); |
2531 __ pop(ctos); |
2502 |
2532 if (!is_static) pop_and_check_object(obj); |
2503 __ pop(ctos); |
2533 __ movw(lo, rax); |
2504 if (!is_static) pop_and_check_object(obj); |
2534 if (!is_static) { |
2505 __ movw(lo, rax ); |
2535 patch_bytecode(Bytecodes::_fast_cputfield, rcx, rbx, true, byte_no); |
2506 if (!is_static) { |
2536 } |
2507 patch_bytecode(Bytecodes::_fast_cputfield, rcx, rbx); |
2537 __ jmp(Done); |
2508 } |
2538 } |
2509 __ jmp(Done); |
|
2510 |
2539 |
2511 __ bind(notChar); |
2540 __ bind(notChar); |
|
2541 __ cmpl(flags, stos); |
|
2542 __ jcc(Assembler::notEqual, notShort); |
|
2543 |
2512 // stos |
2544 // stos |
2513 __ cmpl(flags, stos ); |
2545 { |
2514 __ jcc(Assembler::notEqual, notShort); |
2546 __ pop(stos); |
2515 |
2547 if (!is_static) pop_and_check_object(obj); |
2516 __ pop(stos); |
2548 __ movw(lo, rax); |
2517 if (!is_static) pop_and_check_object(obj); |
2549 if (!is_static) { |
2518 __ movw(lo, rax ); |
2550 patch_bytecode(Bytecodes::_fast_sputfield, rcx, rbx, true, byte_no); |
2519 if (!is_static) { |
2551 } |
2520 patch_bytecode(Bytecodes::_fast_sputfield, rcx, rbx); |
2552 __ jmp(Done); |
2521 } |
2553 } |
2522 __ jmp(Done); |
|
2523 |
2554 |
2524 __ bind(notShort); |
2555 __ bind(notShort); |
|
2556 __ cmpl(flags, ltos); |
|
2557 __ jcc(Assembler::notEqual, notLong); |
|
2558 |
2525 // ltos |
2559 // ltos |
2526 __ cmpl(flags, ltos ); |
2560 { |
2527 __ jcc(Assembler::notEqual, notLong); |
2561 Label notVolatileLong; |
2528 |
2562 __ testl(rdx, rdx); |
2529 Label notVolatileLong; |
2563 __ jcc(Assembler::zero, notVolatileLong); |
2530 __ testl(rdx, rdx); |
2564 |
2531 __ jcc(Assembler::zero, notVolatileLong); |
2565 __ pop(ltos); // overwrites rdx, do this after testing volatile. |
2532 |
2566 if (!is_static) pop_and_check_object(obj); |
2533 __ pop(ltos); // overwrites rdx, do this after testing volatile. |
2567 |
2534 if (!is_static) pop_and_check_object(obj); |
2568 // Replace with real volatile test |
2535 |
2569 __ push(rdx); |
2536 // Replace with real volatile test |
2570 __ push(rax); // Must update atomically with FIST |
2537 __ push(rdx); |
2571 __ fild_d(Address(rsp,0)); // So load into FPU register |
2538 __ push(rax); // Must update atomically with FIST |
2572 __ fistp_d(lo); // and put into memory atomically |
2539 __ fild_d(Address(rsp,0)); // So load into FPU register |
2573 __ addptr(rsp, 2*wordSize); |
2540 __ fistp_d(lo); // and put into memory atomically |
2574 // volatile_barrier(); |
2541 __ addptr(rsp, 2*wordSize); |
2575 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad | |
2542 // volatile_barrier(); |
2576 Assembler::StoreStore)); |
2543 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad | |
2577 // Don't rewrite volatile version |
2544 Assembler::StoreStore)); |
2578 __ jmp(notVolatile); |
2545 // Don't rewrite volatile version |
2579 |
2546 __ jmp(notVolatile); |
2580 __ bind(notVolatileLong); |
2547 |
2581 |
2548 __ bind(notVolatileLong); |
2582 __ pop(ltos); // overwrites rdx |
2549 |
2583 if (!is_static) pop_and_check_object(obj); |
2550 __ pop(ltos); // overwrites rdx |
2584 NOT_LP64(__ movptr(hi, rdx)); |
2551 if (!is_static) pop_and_check_object(obj); |
2585 __ movptr(lo, rax); |
2552 NOT_LP64(__ movptr(hi, rdx)); |
2586 if (!is_static) { |
2553 __ movptr(lo, rax); |
2587 patch_bytecode(Bytecodes::_fast_lputfield, rcx, rbx, true, byte_no); |
2554 if (!is_static) { |
2588 } |
2555 patch_bytecode(Bytecodes::_fast_lputfield, rcx, rbx); |
2589 __ jmp(notVolatile); |
2556 } |
2590 } |
2557 __ jmp(notVolatile); |
|
2558 |
2591 |
2559 __ bind(notLong); |
2592 __ bind(notLong); |
|
2593 __ cmpl(flags, ftos); |
|
2594 __ jcc(Assembler::notEqual, notFloat); |
|
2595 |
2560 // ftos |
2596 // ftos |
2561 __ cmpl(flags, ftos ); |
2597 { |
2562 __ jcc(Assembler::notEqual, notFloat); |
2598 __ pop(ftos); |
2563 |
2599 if (!is_static) pop_and_check_object(obj); |
2564 __ pop(ftos); |
2600 __ fstp_s(lo); |
2565 if (!is_static) pop_and_check_object(obj); |
2601 if (!is_static) { |
2566 __ fstp_s(lo); |
2602 patch_bytecode(Bytecodes::_fast_fputfield, rcx, rbx, true, byte_no); |
2567 if (!is_static) { |
2603 } |
2568 patch_bytecode(Bytecodes::_fast_fputfield, rcx, rbx); |
2604 __ jmp(Done); |
2569 } |
2605 } |
2570 __ jmp(Done); |
|
2571 |
2606 |
2572 __ bind(notFloat); |
2607 __ bind(notFloat); |
|
2608 #ifdef ASSERT |
|
2609 __ cmpl(flags, dtos); |
|
2610 __ jcc(Assembler::notEqual, notDouble); |
|
2611 #endif |
|
2612 |
2573 // dtos |
2613 // dtos |
2574 __ cmpl(flags, dtos ); |
2614 { |
2575 __ jcc(Assembler::notEqual, notDouble); |
2615 __ pop(dtos); |
2576 |
2616 if (!is_static) pop_and_check_object(obj); |
2577 __ pop(dtos); |
2617 __ fstp_d(lo); |
2578 if (!is_static) pop_and_check_object(obj); |
2618 if (!is_static) { |
2579 __ fstp_d(lo); |
2619 patch_bytecode(Bytecodes::_fast_dputfield, rcx, rbx, true, byte_no); |
2580 if (!is_static) { |
2620 } |
2581 patch_bytecode(Bytecodes::_fast_dputfield, rcx, rbx); |
2621 __ jmp(Done); |
2582 } |
2622 } |
2583 __ jmp(Done); |
2623 |
2584 |
2624 #ifdef ASSERT |
2585 __ bind(notDouble); |
2625 __ bind(notDouble); |
2586 |
|
2587 __ stop("Bad state"); |
2626 __ stop("Bad state"); |
|
2627 #endif |
2588 |
2628 |
2589 __ bind(Done); |
2629 __ bind(Done); |
2590 |
2630 |
2591 // Check for volatile store |
2631 // Check for volatile store |
2592 __ testl(rdx, rdx); |
2632 __ testl(rdx, rdx); |