1756 } |
1756 } |
1757 |
1757 |
1758 return ret_value; // Per default match rules are supported. |
1758 return ret_value; // Per default match rules are supported. |
1759 } |
1759 } |
1760 |
1760 |
|
1761 const bool Matcher::has_predicated_vectors(void) { |
|
1762 bool ret_value = false; |
|
1763 if (UseAVX > 2) { |
|
1764 ret_value = VM_Version::supports_avx512vl(); |
|
1765 } |
|
1766 |
|
1767 return ret_value; |
|
1768 } |
|
1769 |
1761 const int Matcher::float_pressure(int default_pressure_threshold) { |
1770 const int Matcher::float_pressure(int default_pressure_threshold) { |
1762 int float_pressure_threshold = default_pressure_threshold; |
1771 int float_pressure_threshold = default_pressure_threshold; |
1763 #ifdef _LP64 |
1772 #ifdef _LP64 |
1764 if (UseAVX > 2) { |
1773 if (UseAVX > 2) { |
1765 // Increase pressure threshold on machines with AVX3 which have |
1774 // Increase pressure threshold on machines with AVX3 which have |
1873 break; |
1882 break; |
1874 case Op_VecY: |
1883 case Op_VecY: |
1875 __ vmovdqu(as_XMMRegister(Matcher::_regEncode[dst_lo]), as_XMMRegister(Matcher::_regEncode[src_lo])); |
1884 __ vmovdqu(as_XMMRegister(Matcher::_regEncode[dst_lo]), as_XMMRegister(Matcher::_regEncode[src_lo])); |
1876 break; |
1885 break; |
1877 case Op_VecZ: |
1886 case Op_VecZ: |
1878 __ evmovdqul(as_XMMRegister(Matcher::_regEncode[dst_lo]), as_XMMRegister(Matcher::_regEncode[src_lo]), 2); |
1887 __ evmovdquq(as_XMMRegister(Matcher::_regEncode[dst_lo]), as_XMMRegister(Matcher::_regEncode[src_lo]), 2); |
1879 break; |
1888 break; |
1880 default: |
1889 default: |
1881 ShouldNotReachHere(); |
1890 ShouldNotReachHere(); |
1882 } |
1891 } |
1883 int size = __ offset() - offset; |
1892 int size = __ offset() - offset; |
1928 break; |
1937 break; |
1929 case Op_VecY: |
1938 case Op_VecY: |
1930 __ vmovdqu(as_XMMRegister(Matcher::_regEncode[reg]), Address(rsp, stack_offset)); |
1939 __ vmovdqu(as_XMMRegister(Matcher::_regEncode[reg]), Address(rsp, stack_offset)); |
1931 break; |
1940 break; |
1932 case Op_VecZ: |
1941 case Op_VecZ: |
1933 __ evmovdqul(as_XMMRegister(Matcher::_regEncode[reg]), Address(rsp, stack_offset), 2); |
1942 __ evmovdquq(as_XMMRegister(Matcher::_regEncode[reg]), Address(rsp, stack_offset), 2); |
1934 break; |
1943 break; |
1935 default: |
1944 default: |
1936 ShouldNotReachHere(); |
1945 ShouldNotReachHere(); |
1937 } |
1946 } |
1938 } else { // store |
1947 } else { // store |
1948 break; |
1957 break; |
1949 case Op_VecY: |
1958 case Op_VecY: |
1950 __ vmovdqu(Address(rsp, stack_offset), as_XMMRegister(Matcher::_regEncode[reg])); |
1959 __ vmovdqu(Address(rsp, stack_offset), as_XMMRegister(Matcher::_regEncode[reg])); |
1951 break; |
1960 break; |
1952 case Op_VecZ: |
1961 case Op_VecZ: |
1953 __ evmovdqul(Address(rsp, stack_offset), as_XMMRegister(Matcher::_regEncode[reg]), 2); |
1962 __ evmovdquq(Address(rsp, stack_offset), as_XMMRegister(Matcher::_regEncode[reg]), 2); |
1954 break; |
1963 break; |
1955 default: |
1964 default: |
1956 ShouldNotReachHere(); |
1965 ShouldNotReachHere(); |
1957 } |
1966 } |
1958 } |
1967 } |
2174 __ int3(); |
2183 __ int3(); |
2175 %} |
2184 %} |
2176 ins_pipe(pipe_slow); |
2185 ins_pipe(pipe_slow); |
2177 %} |
2186 %} |
2178 |
2187 |
|
2188 // =================================EVEX special=============================== |
|
2189 |
|
2190 instruct setMask(rRegI dst, rRegI src) %{ |
|
2191 predicate(Matcher::has_predicated_vectors()); |
|
2192 match(Set dst (SetVectMaskI src)); |
|
2193 effect(TEMP dst); |
|
2194 format %{ "setvectmask $dst, $src" %} |
|
2195 ins_encode %{ |
|
2196 __ setvectmask($dst$$Register, $src$$Register); |
|
2197 %} |
|
2198 ins_pipe(pipe_slow); |
|
2199 %} |
|
2200 |
2179 // ============================================================================ |
2201 // ============================================================================ |
2180 |
2202 |
2181 instruct addF_reg(regF dst, regF src) %{ |
2203 instruct addF_reg(regF dst, regF src) %{ |
2182 predicate((UseSSE>=1) && (UseAVX == 0)); |
2204 predicate((UseSSE>=1) && (UseAVX == 0)); |
2183 match(Set dst (AddF dst src)); |
2205 match(Set dst (AddF dst src)); |
3067 %} |
3089 %} |
3068 ins_pipe( pipe_slow ); |
3090 ins_pipe( pipe_slow ); |
3069 %} |
3091 %} |
3070 |
3092 |
3071 // Load vectors (64 bytes long) |
3093 // Load vectors (64 bytes long) |
3072 instruct loadV64(vecZ dst, memory mem) %{ |
3094 instruct loadV64_dword(vecZ dst, memory mem) %{ |
3073 predicate(n->as_LoadVector()->memory_size() == 64); |
3095 predicate(n->as_LoadVector()->memory_size() == 64 && n->as_LoadVector()->element_size() <= 4); |
3074 match(Set dst (LoadVector mem)); |
3096 match(Set dst (LoadVector mem)); |
3075 ins_cost(125); |
3097 ins_cost(125); |
3076 format %{ "vmovdqu $dst k0,$mem\t! load vector (64 bytes)" %} |
3098 format %{ "vmovdqul $dst k0,$mem\t! load vector (64 bytes)" %} |
3077 ins_encode %{ |
3099 ins_encode %{ |
3078 int vector_len = 2; |
3100 int vector_len = 2; |
3079 __ evmovdqul($dst$$XMMRegister, $mem$$Address, vector_len); |
3101 __ evmovdqul($dst$$XMMRegister, $mem$$Address, vector_len); |
|
3102 %} |
|
3103 ins_pipe( pipe_slow ); |
|
3104 %} |
|
3105 |
|
3106 // Load vectors (64 bytes long) |
|
3107 instruct loadV64_qword(vecZ dst, memory mem) %{ |
|
3108 predicate(n->as_LoadVector()->memory_size() == 64 && n->as_LoadVector()->element_size() > 4); |
|
3109 match(Set dst (LoadVector mem)); |
|
3110 ins_cost(125); |
|
3111 format %{ "vmovdquq $dst k0,$mem\t! load vector (64 bytes)" %} |
|
3112 ins_encode %{ |
|
3113 int vector_len = 2; |
|
3114 __ evmovdquq($dst$$XMMRegister, $mem$$Address, vector_len); |
3080 %} |
3115 %} |
3081 ins_pipe( pipe_slow ); |
3116 ins_pipe( pipe_slow ); |
3082 %} |
3117 %} |
3083 |
3118 |
3084 // Store vectors |
3119 // Store vectors |
3124 __ vmovdqu($mem$$Address, $src$$XMMRegister); |
3159 __ vmovdqu($mem$$Address, $src$$XMMRegister); |
3125 %} |
3160 %} |
3126 ins_pipe( pipe_slow ); |
3161 ins_pipe( pipe_slow ); |
3127 %} |
3162 %} |
3128 |
3163 |
3129 instruct storeV64(memory mem, vecZ src) %{ |
3164 instruct storeV64_dword(memory mem, vecZ src) %{ |
3130 predicate(n->as_StoreVector()->memory_size() == 64); |
3165 predicate(n->as_StoreVector()->memory_size() == 64 && n->as_StoreVector()->element_size() <= 4); |
3131 match(Set mem (StoreVector mem src)); |
3166 match(Set mem (StoreVector mem src)); |
3132 ins_cost(145); |
3167 ins_cost(145); |
3133 format %{ "vmovdqu $mem k0,$src\t! store vector (64 bytes)" %} |
3168 format %{ "vmovdqul $mem k0,$src\t! store vector (64 bytes)" %} |
3134 ins_encode %{ |
3169 ins_encode %{ |
3135 int vector_len = 2; |
3170 int vector_len = 2; |
3136 __ evmovdqul($mem$$Address, $src$$XMMRegister, vector_len); |
3171 __ evmovdqul($mem$$Address, $src$$XMMRegister, vector_len); |
|
3172 %} |
|
3173 ins_pipe( pipe_slow ); |
|
3174 %} |
|
3175 |
|
3176 instruct storeV64_qword(memory mem, vecZ src) %{ |
|
3177 predicate(n->as_StoreVector()->memory_size() == 64 && n->as_StoreVector()->element_size() > 4); |
|
3178 match(Set mem (StoreVector mem src)); |
|
3179 ins_cost(145); |
|
3180 format %{ "vmovdquq $mem k0,$src\t! store vector (64 bytes)" %} |
|
3181 ins_encode %{ |
|
3182 int vector_len = 2; |
|
3183 __ evmovdquq($mem$$Address, $src$$XMMRegister, vector_len); |
3137 %} |
3184 %} |
3138 ins_pipe( pipe_slow ); |
3185 ins_pipe( pipe_slow ); |
3139 %} |
3186 %} |
3140 |
3187 |
3141 // ====================LEGACY REPLICATE======================================= |
3188 // ====================LEGACY REPLICATE======================================= |