1 /* |
1 /* |
2 * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. |
2 * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved. |
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * |
4 * |
5 * This code is free software; you can redistribute it and/or modify it |
5 * This code is free software; you can redistribute it and/or modify it |
6 * under the terms of the GNU General Public License version 2 only, as |
6 * under the terms of the GNU General Public License version 2 only, as |
7 * published by the Free Software Foundation. |
7 * published by the Free Software Foundation. |
33 #include "utilities/align.hpp" |
33 #include "utilities/align.hpp" |
34 #include "utilities/debug.hpp" |
34 #include "utilities/debug.hpp" |
35 |
35 |
36 class NativeNMethodCmpBarrier: public NativeInstruction { |
36 class NativeNMethodCmpBarrier: public NativeInstruction { |
37 public: |
37 public: |
|
38 #ifdef _LP64 |
38 enum Intel_specific_constants { |
39 enum Intel_specific_constants { |
39 instruction_code = 0x81, |
40 instruction_code = 0x81, |
40 instruction_size = 8, |
41 instruction_size = 8, |
41 imm_offset = 4, |
42 imm_offset = 4, |
42 instruction_rex_prefix = Assembler::REX | Assembler::REX_B, |
43 instruction_rex_prefix = Assembler::REX | Assembler::REX_B, |
43 instruction_modrm = 0x7f // [r15 + offset] |
44 instruction_modrm = 0x7f // [r15 + offset] |
44 }; |
45 }; |
|
46 #else |
|
47 enum Intel_specific_constants { |
|
48 instruction_code = 0x81, |
|
49 instruction_size = 7, |
|
50 imm_offset = 2, |
|
51 instruction_modrm = 0x3f // [rdi] |
|
52 }; |
|
53 #endif |
45 |
54 |
46 address instruction_address() const { return addr_at(0); } |
55 address instruction_address() const { return addr_at(0); } |
47 address immediate_address() const { return addr_at(imm_offset); } |
56 address immediate_address() const { return addr_at(imm_offset); } |
48 |
57 |
49 jint get_immedate() const { return int_at(imm_offset); } |
58 jint get_immedate() const { return int_at(imm_offset); } |
50 void set_immediate(jint imm) { set_int_at(imm_offset, imm); } |
59 void set_immediate(jint imm) { set_int_at(imm_offset, imm); } |
51 void verify() const; |
60 void verify() const; |
52 }; |
61 }; |
53 |
62 |
|
63 #ifdef _LP64 |
54 void NativeNMethodCmpBarrier::verify() const { |
64 void NativeNMethodCmpBarrier::verify() const { |
55 if (((uintptr_t) instruction_address()) & 0x7) { |
65 if (((uintptr_t) instruction_address()) & 0x7) { |
56 fatal("Not properly aligned"); |
66 fatal("Not properly aligned"); |
57 } |
67 } |
58 |
68 |
75 tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()), |
85 tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()), |
76 modrm); |
86 modrm); |
77 fatal("not a cmp barrier"); |
87 fatal("not a cmp barrier"); |
78 } |
88 } |
79 } |
89 } |
|
90 #else |
|
91 void NativeNMethodCmpBarrier::verify() const { |
|
92 if (((uintptr_t) instruction_address()) & 0x3) { |
|
93 fatal("Not properly aligned"); |
|
94 } |
|
95 |
|
96 int inst = ubyte_at(0); |
|
97 if (inst != instruction_code) { |
|
98 tty->print_cr("Addr: " INTPTR_FORMAT " Code: 0x%x", p2i(instruction_address()), |
|
99 inst); |
|
100 fatal("not a cmp barrier"); |
|
101 } |
|
102 |
|
103 int modrm = ubyte_at(1); |
|
104 if (modrm != instruction_modrm) { |
|
105 tty->print_cr("Addr: " INTPTR_FORMAT " mod/rm: 0x%x", p2i(instruction_address()), |
|
106 modrm); |
|
107 fatal("not a cmp barrier"); |
|
108 } |
|
109 } |
|
110 #endif // _LP64 |
80 |
111 |
81 void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) { |
112 void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) { |
82 /* |
113 /* |
83 * [ callers frame ] |
114 * [ callers frame ] |
84 * [ callers return address ] <- callers rsp |
115 * [ callers return address ] <- callers rsp |
125 // If any code changes between the end of the verified entry where the entry |
156 // If any code changes between the end of the verified entry where the entry |
126 // barrier resides, and the completion of the frame, then |
157 // barrier resides, and the completion of the frame, then |
127 // NativeNMethodCmpBarrier::verify() will immediately complain when it does |
158 // NativeNMethodCmpBarrier::verify() will immediately complain when it does |
128 // not find the expected native instruction at this offset, which needs updating. |
159 // not find the expected native instruction at this offset, which needs updating. |
129 // Note that this offset is invariant of PreserveFramePointer. |
160 // Note that this offset is invariant of PreserveFramePointer. |
130 static const int entry_barrier_offset = -19; |
161 static const int entry_barrier_offset = LP64_ONLY(-19) NOT_LP64(-18); |
131 |
162 |
132 static NativeNMethodCmpBarrier* native_nmethod_barrier(nmethod* nm) { |
163 static NativeNMethodCmpBarrier* native_nmethod_barrier(nmethod* nm) { |
133 address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset; |
164 address barrier_address = nm->code_begin() + nm->frame_complete_offset() + entry_barrier_offset; |
134 NativeNMethodCmpBarrier* barrier = reinterpret_cast<NativeNMethodCmpBarrier*>(barrier_address); |
165 NativeNMethodCmpBarrier* barrier = reinterpret_cast<NativeNMethodCmpBarrier*>(barrier_address); |
135 debug_only(barrier->verify()); |
166 debug_only(barrier->verify()); |