src/hotspot/os_cpu/linux_x86/atomic_linux_x86.hpp
changeset 59249 29b0d0b61615
parent 59248 e92153ed8bdc
child 59251 4cbfa5077d68
equal deleted inserted replaced
59248:e92153ed8bdc 59249:29b0d0b61615
    29 
    29 
    30 template<size_t byte_size>
    30 template<size_t byte_size>
    31 struct Atomic::PlatformAdd
    31 struct Atomic::PlatformAdd
    32   : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> >
    32   : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> >
    33 {
    33 {
    34   template<typename I, typename D>
    34   template<typename D, typename I>
    35   D fetch_and_add(I add_value, D volatile* dest, atomic_memory_order order) const;
    35   D fetch_and_add(D volatile* dest, I add_value, atomic_memory_order order) const;
    36 };
    36 };
    37 
    37 
    38 template<>
    38 template<>
    39 template<typename I, typename D>
    39 template<typename D, typename I>
    40 inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest,
    40 inline D Atomic::PlatformAdd<4>::fetch_and_add(D volatile* dest, I add_value,
    41                                                atomic_memory_order order) const {
    41                                                atomic_memory_order order) const {
    42   STATIC_ASSERT(4 == sizeof(I));
    42   STATIC_ASSERT(4 == sizeof(I));
    43   STATIC_ASSERT(4 == sizeof(D));
    43   STATIC_ASSERT(4 == sizeof(D));
    44   D old_value;
    44   D old_value;
    45   __asm__ volatile (  "lock xaddl %0,(%2)"
    45   __asm__ volatile (  "lock xaddl %0,(%2)"
    91 }
    91 }
    92 
    92 
    93 #ifdef AMD64
    93 #ifdef AMD64
    94 
    94 
    95 template<>
    95 template<>
    96 template<typename I, typename D>
    96 template<typename D, typename I>
    97 inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest,
    97 inline D Atomic::PlatformAdd<8>::fetch_and_add(D volatile* dest, I add_value,
    98                                                atomic_memory_order order) const {
    98                                                atomic_memory_order order) const {
    99   STATIC_ASSERT(8 == sizeof(I));
    99   STATIC_ASSERT(8 == sizeof(I));
   100   STATIC_ASSERT(8 == sizeof(D));
   100   STATIC_ASSERT(8 == sizeof(D));
   101   D old_value;
   101   D old_value;
   102   __asm__ __volatile__ ("lock xaddq %0,(%2)"
   102   __asm__ __volatile__ ("lock xaddq %0,(%2)"