43 |
43 |
44 %} |
44 %} |
45 |
45 |
46 // For XMM and YMM enabled processors |
46 // For XMM and YMM enabled processors |
47 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr, |
47 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr, |
48 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, |
48 rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3, |
49 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
49 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
50 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
50 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
51 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ |
51 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ |
52 |
52 match(Set dst (LoadBarrierSlowReg src dst)); |
53 match(Set dst (LoadBarrierSlowReg src)); |
53 predicate(UseAVX <= 2 && !n->as_LoadBarrierSlowReg()->is_weak()); |
54 predicate((UseAVX <= 2) && !n->as_LoadBarrierSlowReg()->is_weak()); |
54 |
55 |
55 effect(KILL cr, |
56 effect(DEF dst, KILL cr, |
|
57 KILL x0, KILL x1, KILL x2, KILL x3, |
56 KILL x0, KILL x1, KILL x2, KILL x3, |
58 KILL x4, KILL x5, KILL x6, KILL x7, |
57 KILL x4, KILL x5, KILL x6, KILL x7, |
59 KILL x8, KILL x9, KILL x10, KILL x11, |
58 KILL x8, KILL x9, KILL x10, KILL x11, |
60 KILL x12, KILL x13, KILL x14, KILL x15); |
59 KILL x12, KILL x13, KILL x14, KILL x15); |
61 |
60 |
62 format %{ "zLoadBarrierSlowRegXmmAndYmm $dst, $src" %} |
61 format %{ "lea $dst, $src\n\t" |
|
62 "call #ZLoadBarrierSlowPath" %} |
63 |
63 |
64 ins_encode %{ |
64 ins_encode %{ |
65 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */); |
65 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */); |
66 %} |
66 %} |
67 |
|
68 ins_pipe(pipe_slow); |
67 ins_pipe(pipe_slow); |
69 %} |
68 %} |
70 |
69 |
71 // For ZMM enabled processors |
70 // For ZMM enabled processors |
72 instruct zLoadBarrierSlowRegZmm(rRegP dst, memory src, rFlagsReg cr, |
71 instruct zLoadBarrierSlowRegZmm(rRegP dst, memory src, rFlagsReg cr, |
73 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, |
72 rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3, |
74 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
73 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
75 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
74 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
76 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15, |
75 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15, |
77 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, |
76 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, |
78 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, |
77 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, |
79 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, |
78 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, |
80 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ |
79 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ |
81 |
80 |
82 match(Set dst (LoadBarrierSlowReg src)); |
81 match(Set dst (LoadBarrierSlowReg src dst)); |
83 predicate((UseAVX == 3) && !n->as_LoadBarrierSlowReg()->is_weak()); |
82 predicate(UseAVX == 3 && !n->as_LoadBarrierSlowReg()->is_weak()); |
84 |
83 |
85 effect(DEF dst, KILL cr, |
84 effect(KILL cr, |
86 KILL x0, KILL x1, KILL x2, KILL x3, |
85 KILL x0, KILL x1, KILL x2, KILL x3, |
87 KILL x4, KILL x5, KILL x6, KILL x7, |
86 KILL x4, KILL x5, KILL x6, KILL x7, |
88 KILL x8, KILL x9, KILL x10, KILL x11, |
87 KILL x8, KILL x9, KILL x10, KILL x11, |
89 KILL x12, KILL x13, KILL x14, KILL x15, |
88 KILL x12, KILL x13, KILL x14, KILL x15, |
90 KILL x16, KILL x17, KILL x18, KILL x19, |
89 KILL x16, KILL x17, KILL x18, KILL x19, |
91 KILL x20, KILL x21, KILL x22, KILL x23, |
90 KILL x20, KILL x21, KILL x22, KILL x23, |
92 KILL x24, KILL x25, KILL x26, KILL x27, |
91 KILL x24, KILL x25, KILL x26, KILL x27, |
93 KILL x28, KILL x29, KILL x30, KILL x31); |
92 KILL x28, KILL x29, KILL x30, KILL x31); |
94 |
93 |
95 format %{ "zLoadBarrierSlowRegZmm $dst, $src" %} |
94 format %{ "lea $dst, $src\n\t" |
|
95 "call #ZLoadBarrierSlowPath" %} |
96 |
96 |
97 ins_encode %{ |
97 ins_encode %{ |
98 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */); |
98 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */); |
99 %} |
99 %} |
100 |
|
101 ins_pipe(pipe_slow); |
100 ins_pipe(pipe_slow); |
102 %} |
101 %} |
103 |
102 |
104 // For XMM and YMM enabled processors |
103 // For XMM and YMM enabled processors |
105 instruct zLoadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr, |
104 instruct zLoadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr, |
106 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, |
105 rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3, |
107 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
106 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
108 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
107 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
109 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ |
108 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{ |
110 |
109 match(Set dst (LoadBarrierSlowReg src dst)); |
111 match(Set dst (LoadBarrierSlowReg src)); |
110 predicate(UseAVX <= 2 && n->as_LoadBarrierSlowReg()->is_weak()); |
112 predicate((UseAVX <= 2) && n->as_LoadBarrierSlowReg()->is_weak()); |
111 |
113 |
112 effect(KILL cr, |
114 effect(DEF dst, KILL cr, |
|
115 KILL x0, KILL x1, KILL x2, KILL x3, |
113 KILL x0, KILL x1, KILL x2, KILL x3, |
116 KILL x4, KILL x5, KILL x6, KILL x7, |
114 KILL x4, KILL x5, KILL x6, KILL x7, |
117 KILL x8, KILL x9, KILL x10, KILL x11, |
115 KILL x8, KILL x9, KILL x10, KILL x11, |
118 KILL x12, KILL x13, KILL x14, KILL x15); |
116 KILL x12, KILL x13, KILL x14, KILL x15); |
119 |
117 |
120 format %{ "zLoadBarrierWeakSlowRegXmmAndYmm $dst, $src" %} |
118 format %{ "lea $dst, $src\n\t" |
|
119 "call #ZLoadBarrierSlowPath" %} |
121 |
120 |
122 ins_encode %{ |
121 ins_encode %{ |
123 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */); |
122 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */); |
124 %} |
123 %} |
125 |
|
126 ins_pipe(pipe_slow); |
124 ins_pipe(pipe_slow); |
127 %} |
125 %} |
128 |
126 |
129 // For ZMM enabled processors |
127 // For ZMM enabled processors |
130 instruct zLoadBarrierWeakSlowRegZmm(rRegP dst, memory src, rFlagsReg cr, |
128 instruct zLoadBarrierWeakSlowRegZmm(rRegP dst, memory src, rFlagsReg cr, |
131 rxmm0 x0, rxmm1 x1, rxmm2 x2,rxmm3 x3, |
129 rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3, |
132 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
130 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7, |
133 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
131 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11, |
134 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15, |
132 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15, |
135 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, |
133 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19, |
136 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, |
134 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23, |
137 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, |
135 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27, |
138 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ |
136 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{ |
139 |
137 |
140 match(Set dst (LoadBarrierSlowReg src)); |
138 match(Set dst (LoadBarrierSlowReg src dst)); |
141 predicate((UseAVX == 3) && n->as_LoadBarrierSlowReg()->is_weak()); |
139 predicate(UseAVX == 3 && n->as_LoadBarrierSlowReg()->is_weak()); |
142 |
140 |
143 effect(DEF dst, KILL cr, |
141 effect(KILL cr, |
144 KILL x0, KILL x1, KILL x2, KILL x3, |
142 KILL x0, KILL x1, KILL x2, KILL x3, |
145 KILL x4, KILL x5, KILL x6, KILL x7, |
143 KILL x4, KILL x5, KILL x6, KILL x7, |
146 KILL x8, KILL x9, KILL x10, KILL x11, |
144 KILL x8, KILL x9, KILL x10, KILL x11, |
147 KILL x12, KILL x13, KILL x14, KILL x15, |
145 KILL x12, KILL x13, KILL x14, KILL x15, |
148 KILL x16, KILL x17, KILL x18, KILL x19, |
146 KILL x16, KILL x17, KILL x18, KILL x19, |
149 KILL x20, KILL x21, KILL x22, KILL x23, |
147 KILL x20, KILL x21, KILL x22, KILL x23, |
150 KILL x24, KILL x25, KILL x26, KILL x27, |
148 KILL x24, KILL x25, KILL x26, KILL x27, |
151 KILL x28, KILL x29, KILL x30, KILL x31); |
149 KILL x28, KILL x29, KILL x30, KILL x31); |
152 |
150 |
153 format %{ "zLoadBarrierWeakSlowRegZmm $dst, $src" %} |
151 format %{ "lea $dst, $src\n\t" |
|
152 "call #ZLoadBarrierSlowPath" %} |
154 |
153 |
155 ins_encode %{ |
154 ins_encode %{ |
156 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */); |
155 z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */); |
157 %} |
156 %} |
158 |
|
159 ins_pipe(pipe_slow); |
157 ins_pipe(pipe_slow); |
160 %} |
158 %} |
161 |
159 |
162 // Specialized versions of compareAndExchangeP that adds a keepalive that is consumed |
160 // Specialized versions of compareAndExchangeP that adds a keepalive that is consumed |
163 // but doesn't affect output. |
161 // but doesn't affect output. |