src/hotspot/cpu/x86/gc/z/z_x86_64.ad
changeset 58516 d376d86b0a01
parent 55563 d56b192c73e9
child 58679 9c3209ff7550
equal deleted inserted replaced
58515:8f849d3ec1e5 58516:d376d86b0a01
    22 //
    22 //
    23 
    23 
    24 source_hpp %{
    24 source_hpp %{
    25 
    25 
    26 #include "gc/z/c2/zBarrierSetC2.hpp"
    26 #include "gc/z/c2/zBarrierSetC2.hpp"
       
    27 #include "gc/z/zThreadLocalData.hpp"
    27 
    28 
    28 %}
    29 %}
    29 
    30 
    30 source %{
    31 source %{
    31 
    32 
    32 #include "gc/z/zBarrierSetAssembler.hpp"
    33 static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
       
    34   ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
       
    35   __ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
       
    36   __ jcc(Assembler::notZero, *stub->entry());
       
    37   __ bind(*stub->continuation());
       
    38 }
    33 
    39 
    34 static void z_load_barrier_slow_reg(MacroAssembler& _masm, Register dst, Address src, bool weak) {
    40 static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
    35   assert(dst != rsp, "Invalid register");
    41   ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
    36   assert(dst != r15, "Invalid register");
    42   __ jmp(*stub->entry());
    37 
    43   __ bind(*stub->continuation());
    38   const address stub = weak ? ZBarrierSet::assembler()->load_barrier_weak_slow_stub(dst)
       
    39                             : ZBarrierSet::assembler()->load_barrier_slow_stub(dst);
       
    40   __ lea(dst, src);
       
    41   __ call(RuntimeAddress(stub));
       
    42 }
    44 }
    43 
    45 
    44 %}
    46 %}
    45 
    47 
    46 // For XMM and YMM enabled processors
    48 // Load Pointer
    47 instruct zLoadBarrierSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
    49 instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr)
    48                                       rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3,
    50 %{
    49                                       rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
    51   predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierStrong);
    50                                       rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
    52   match(Set dst (LoadP mem));
    51                                       rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
    53   effect(KILL cr, TEMP dst);
    52   match(Set dst (LoadBarrierSlowReg src dst));
       
    53   predicate(UseAVX <= 2 && !n->as_LoadBarrierSlowReg()->is_weak());
       
    54 
    54 
    55   effect(KILL cr,
    55   ins_cost(125);
    56          KILL x0, KILL x1, KILL x2, KILL x3,
       
    57          KILL x4, KILL x5, KILL x6, KILL x7,
       
    58          KILL x8, KILL x9, KILL x10, KILL x11,
       
    59          KILL x12, KILL x13, KILL x14, KILL x15);
       
    60 
    56 
    61   format %{ "lea $dst, $src\n\t"
    57   format %{ "movq     $dst, $mem" %}
    62             "call #ZLoadBarrierSlowPath" %}
       
    63 
    58 
    64   ins_encode %{
    59   ins_encode %{
    65     z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */);
    60     __ movptr($dst$$Register, $mem$$Address);
       
    61     if (barrier_data() != ZLoadBarrierElided) {
       
    62       z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, false /* weak */);
       
    63     }
    66   %}
    64   %}
    67   ins_pipe(pipe_slow);
    65 
       
    66   ins_pipe(ialu_reg_mem);
    68 %}
    67 %}
    69 
    68 
    70 // For ZMM enabled processors
    69 // Load Weak Pointer
    71 instruct zLoadBarrierSlowRegZmm(rRegP dst, memory src, rFlagsReg cr,
    70 instruct zLoadWeakP(rRegP dst, memory mem, rFlagsReg cr)
    72                                 rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3,
    71 %{
    73                                 rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
    72   predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierWeak);
    74                                 rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
    73   match(Set dst (LoadP mem));
    75                                 rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15,
    74   effect(KILL cr, TEMP dst);
    76                                 rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19,
       
    77                                 rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23,
       
    78                                 rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27,
       
    79                                 rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
       
    80 
    75 
    81   match(Set dst (LoadBarrierSlowReg src dst));
    76   ins_cost(125);
    82   predicate(UseAVX == 3 && !n->as_LoadBarrierSlowReg()->is_weak());
       
    83 
    77 
    84   effect(KILL cr,
    78   format %{ "movq     $dst, $mem" %}
    85          KILL x0, KILL x1, KILL x2, KILL x3,
       
    86          KILL x4, KILL x5, KILL x6, KILL x7,
       
    87          KILL x8, KILL x9, KILL x10, KILL x11,
       
    88          KILL x12, KILL x13, KILL x14, KILL x15,
       
    89          KILL x16, KILL x17, KILL x18, KILL x19,
       
    90          KILL x20, KILL x21, KILL x22, KILL x23,
       
    91          KILL x24, KILL x25, KILL x26, KILL x27,
       
    92          KILL x28, KILL x29, KILL x30, KILL x31);
       
    93 
       
    94   format %{ "lea $dst, $src\n\t"
       
    95             "call #ZLoadBarrierSlowPath" %}
       
    96 
    79 
    97   ins_encode %{
    80   ins_encode %{
    98     z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, false /* weak */);
    81     __ movptr($dst$$Register, $mem$$Address);
       
    82     z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, true /* weak */);
    99   %}
    83   %}
   100   ins_pipe(pipe_slow);
    84 
       
    85   ins_pipe(ialu_reg_mem);
   101 %}
    86 %}
   102 
    87 
   103 // For XMM and YMM enabled processors
    88 instruct zCompareAndExchangeP(memory mem, rax_RegP oldval, rRegP newval, rRegP tmp, rFlagsReg cr) %{
   104 instruct zLoadBarrierWeakSlowRegXmmAndYmm(rRegP dst, memory src, rFlagsReg cr,
    89   match(Set oldval (CompareAndExchangeP mem (Binary oldval newval)));
   105                                           rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3,
    90   predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
   106                                           rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
    91   effect(KILL cr, TEMP tmp);
   107                                           rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
       
   108                                           rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15) %{
       
   109   match(Set dst (LoadBarrierSlowReg src dst));
       
   110   predicate(UseAVX <= 2 && n->as_LoadBarrierSlowReg()->is_weak());
       
   111 
    92 
   112   effect(KILL cr,
    93   format %{ "lock\n\t"
   113          KILL x0, KILL x1, KILL x2, KILL x3,
    94             "cmpxchgq $newval, $mem" %}
   114          KILL x4, KILL x5, KILL x6, KILL x7,
       
   115          KILL x8, KILL x9, KILL x10, KILL x11,
       
   116          KILL x12, KILL x13, KILL x14, KILL x15);
       
   117 
       
   118   format %{ "lea $dst, $src\n\t"
       
   119             "call #ZLoadBarrierSlowPath" %}
       
   120 
    95 
   121   ins_encode %{
    96   ins_encode %{
   122     z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */);
    97     if (barrier_data() != ZLoadBarrierElided) {
       
    98       __ movptr($tmp$$Register, $oldval$$Register);
       
    99     }
       
   100     __ lock();
       
   101     __ cmpxchgptr($newval$$Register, $mem$$Address);
       
   102     if (barrier_data() != ZLoadBarrierElided) {
       
   103       Label good;
       
   104       __ testptr($oldval$$Register, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
       
   105       __ jcc(Assembler::zero, good);
       
   106       z_load_barrier_slow_path(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register);
       
   107       __ movptr($oldval$$Register, $tmp$$Register);
       
   108       __ lock();
       
   109       __ cmpxchgptr($newval$$Register, $mem$$Address);
       
   110       __ bind(good);
       
   111     }
   123   %}
   112   %}
   124   ins_pipe(pipe_slow);
   113 
       
   114   ins_pipe(pipe_cmpxchg);
   125 %}
   115 %}
   126 
   116 
   127 // For ZMM enabled processors
   117 instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlagsReg cr, rax_RegP oldval) %{
   128 instruct zLoadBarrierWeakSlowRegZmm(rRegP dst, memory src, rFlagsReg cr,
   118   match(Set res (CompareAndSwapP mem (Binary oldval newval)));
   129                                     rxmm0 x0, rxmm1 x1, rxmm2 x2, rxmm3 x3,
   119   match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
   130                                     rxmm4 x4, rxmm5 x5, rxmm6 x6, rxmm7 x7,
   120   predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
   131                                     rxmm8 x8, rxmm9 x9, rxmm10 x10, rxmm11 x11,
   121   effect(KILL cr, KILL oldval, TEMP tmp);
   132                                     rxmm12 x12, rxmm13 x13, rxmm14 x14, rxmm15 x15,
       
   133                                     rxmm16 x16, rxmm17 x17, rxmm18 x18, rxmm19 x19,
       
   134                                     rxmm20 x20, rxmm21 x21, rxmm22 x22, rxmm23 x23,
       
   135                                     rxmm24 x24, rxmm25 x25, rxmm26 x26, rxmm27 x27,
       
   136                                     rxmm28 x28, rxmm29 x29, rxmm30 x30, rxmm31 x31) %{
       
   137 
   122 
   138   match(Set dst (LoadBarrierSlowReg src dst));
   123   format %{ "lock\n\t"
   139   predicate(UseAVX == 3 && n->as_LoadBarrierSlowReg()->is_weak());
   124             "cmpxchgq $newval, $mem\n\t"
   140 
   125             "sete     $res\n\t"
   141   effect(KILL cr,
   126             "movzbl   $res, $res" %}
   142          KILL x0, KILL x1, KILL x2, KILL x3,
       
   143          KILL x4, KILL x5, KILL x6, KILL x7,
       
   144          KILL x8, KILL x9, KILL x10, KILL x11,
       
   145          KILL x12, KILL x13, KILL x14, KILL x15,
       
   146          KILL x16, KILL x17, KILL x18, KILL x19,
       
   147          KILL x20, KILL x21, KILL x22, KILL x23,
       
   148          KILL x24, KILL x25, KILL x26, KILL x27,
       
   149          KILL x28, KILL x29, KILL x30, KILL x31);
       
   150 
       
   151   format %{ "lea $dst, $src\n\t"
       
   152             "call #ZLoadBarrierSlowPath" %}
       
   153 
   127 
   154   ins_encode %{
   128   ins_encode %{
   155     z_load_barrier_slow_reg(_masm, $dst$$Register, $src$$Address, true /* weak */);
   129     if (barrier_data() != ZLoadBarrierElided) {
       
   130       __ movptr($tmp$$Register, $oldval$$Register);
       
   131     }
       
   132     __ lock();
       
   133     __ cmpxchgptr($newval$$Register, $mem$$Address);
       
   134     if (barrier_data() != ZLoadBarrierElided) {
       
   135       Label good;
       
   136       __ testptr($oldval$$Register, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
       
   137       __ jcc(Assembler::zero, good);
       
   138       z_load_barrier_slow_path(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register);
       
   139       __ movptr($oldval$$Register, $tmp$$Register);
       
   140       __ lock();
       
   141       __ cmpxchgptr($newval$$Register, $mem$$Address);
       
   142       __ bind(good);
       
   143       __ cmpptr($tmp$$Register, $oldval$$Register);
       
   144     }
       
   145     __ setb(Assembler::equal, $res$$Register);
       
   146     __ movzbl($res$$Register, $res$$Register);
   156   %}
   147   %}
   157   ins_pipe(pipe_slow);
   148 
       
   149   ins_pipe(pipe_cmpxchg);
   158 %}
   150 %}
   159 
   151 
   160 // Specialized versions of compareAndExchangeP that adds a keepalive that is consumed
   152 instruct zXChgP(memory mem, rRegP newval, rFlagsReg cr) %{
   161 // but doesn't affect output.
   153   match(Set newval (GetAndSetP mem newval));
       
   154   predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
       
   155   effect(KILL cr);
   162 
   156 
   163 instruct z_compareAndExchangeP(
   157   format %{ "xchgq    $newval, $mem" %}
   164         memory mem_ptr,
       
   165         rax_RegP oldval, rRegP newval, rRegP keepalive,
       
   166         rFlagsReg cr) %{
       
   167     predicate(VM_Version::supports_cx8());
       
   168     match(Set oldval (ZCompareAndExchangeP (Binary mem_ptr keepalive) (Binary oldval newval)));
       
   169     effect(KILL cr);
       
   170 
   158 
   171     format %{ "cmpxchgq $mem_ptr,$newval\t# "
   159   ins_encode %{
   172               "If rax == $mem_ptr then store $newval into $mem_ptr\n\t" %}
   160     __ xchgptr($newval$$Register, $mem$$Address);
   173     opcode(0x0F, 0xB1);
   161     if (barrier_data() != ZLoadBarrierElided) {
   174     ins_encode(lock_prefix,
   162       z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, false /* weak */);
   175             REX_reg_mem_wide(newval, mem_ptr),
   163     }
   176             OpcP, OpcS,
   164   %}
   177             reg_mem(newval, mem_ptr)  // lock cmpxchg
   165 
   178     );
   166   ins_pipe(pipe_cmpxchg);
   179     ins_pipe( pipe_cmpxchg );
       
   180 %}
   167 %}
   181 
       
   182 instruct z_compareAndSwapP(rRegI res,
       
   183                          memory mem_ptr,
       
   184                          rax_RegP oldval, rRegP newval, rRegP keepalive,
       
   185                          rFlagsReg cr) %{
       
   186   predicate(VM_Version::supports_cx8());
       
   187   match(Set res (ZCompareAndSwapP (Binary mem_ptr keepalive) (Binary oldval newval)));
       
   188   match(Set res (ZWeakCompareAndSwapP (Binary mem_ptr keepalive) (Binary oldval newval)));
       
   189   effect(KILL cr, KILL oldval);
       
   190 
       
   191   format %{ "cmpxchgq $mem_ptr,$newval\t# "
       
   192             "If rax == $mem_ptr then store $newval into $mem_ptr\n\t"
       
   193             "sete    $res\n\t"
       
   194             "movzbl  $res, $res" %}
       
   195   opcode(0x0F, 0xB1);
       
   196   ins_encode(lock_prefix,
       
   197           REX_reg_mem_wide(newval, mem_ptr),
       
   198           OpcP, OpcS,
       
   199           reg_mem(newval, mem_ptr),
       
   200           REX_breg(res), Opcode(0x0F), Opcode(0x94), reg(res), // sete
       
   201           REX_reg_breg(res, res), // movzbl
       
   202           Opcode(0xF), Opcode(0xB6), reg_reg(res, res));
       
   203   ins_pipe( pipe_cmpxchg );
       
   204 %}
       
   205 
       
   206 instruct z_xchgP( memory mem, rRegP newval, rRegP keepalive) %{
       
   207   match(Set newval (ZGetAndSetP mem (Binary newval keepalive)));
       
   208   format %{ "XCHGQ  $newval,[$mem]" %}
       
   209   ins_encode %{
       
   210     __ xchgq($newval$$Register, $mem$$Address);
       
   211   %}
       
   212   ins_pipe( pipe_cmpxchg );
       
   213 %}