--- a/src/hotspot/os_cpu/bsd_x86/atomic_bsd_x86.hpp Mon Nov 25 14:06:13 2019 +0100
+++ b/src/hotspot/os_cpu/bsd_x86/atomic_bsd_x86.hpp Mon Nov 25 12:22:13 2019 +0100
@@ -169,4 +169,54 @@
#endif // AMD64
+template<>
+struct Atomic::PlatformOrderedStore<1, RELEASE_X_FENCE>
+{
+ template <typename T>
+ void operator()(T v, volatile T* p) const {
+ __asm__ volatile ( "xchgb (%2),%0"
+ : "=q" (v)
+ : "0" (v), "r" (p)
+ : "memory");
+ }
+};
+
+template<>
+struct Atomic::PlatformOrderedStore<2, RELEASE_X_FENCE>
+{
+ template <typename T>
+ void operator()(T v, volatile T* p) const {
+ __asm__ volatile ( "xchgw (%2),%0"
+ : "=r" (v)
+ : "0" (v), "r" (p)
+ : "memory");
+ }
+};
+
+template<>
+struct Atomic::PlatformOrderedStore<4, RELEASE_X_FENCE>
+{
+ template <typename T>
+ void operator()(T v, volatile T* p) const {
+ __asm__ volatile ( "xchgl (%2),%0"
+ : "=r" (v)
+ : "0" (v), "r" (p)
+ : "memory");
+ }
+};
+
+#ifdef AMD64
+template<>
+struct Atomic::PlatformOrderedStore<8, RELEASE_X_FENCE>
+{
+ template <typename T>
+ void operator()(T v, volatile T* p) const {
+ __asm__ volatile ( "xchgq (%2), %0"
+ : "=r" (v)
+ : "0" (v), "r" (p)
+ : "memory");
+ }
+};
+#endif // AMD64
+
#endif // OS_CPU_BSD_X86_ATOMIC_BSD_X86_HPP