34 // Note: Currently only used in 64 bit Windows implementations |
34 // Note: Currently only used in 64 bit Windows implementations |
35 static bool register_code_area(char *low, char *high) { return true; } |
35 static bool register_code_area(char *low, char *high) { return true; } |
36 |
36 |
37 // Atomically copy 64 bits of data |
37 // Atomically copy 64 bits of data |
38 static void atomic_copy64(const volatile void *src, volatile void *dst) { |
38 static void atomic_copy64(const volatile void *src, volatile void *dst) { |
39 #if defined(PPC32) |
39 #if defined(PPC32) && !defined(__SPE__) |
40 double tmp; |
40 double tmp; |
41 asm volatile ("lfd %0, %2\n" |
41 asm volatile ("lfd %0, %2\n" |
42 "stfd %0, %1\n" |
42 "stfd %0, %1\n" |
43 : "=&f"(tmp), "=Q"(*(volatile double*)dst) |
43 : "=&f"(tmp), "=Q"(*(volatile double*)dst) |
44 : "Q"(*(volatile double*)src)); |
44 : "Q"(*(volatile double*)src)); |
|
45 #elif defined(PPC32) && defined(__SPE__) |
|
46 long tmp; |
|
47 asm volatile ("evldd %0, %2\n" |
|
48 "evstdd %0, %1\n" |
|
49 : "=&r"(tmp), "=Q"(*(volatile long*)dst) |
|
50 : "Q"(*(volatile long*)src)); |
45 #elif defined(S390) && !defined(_LP64) |
51 #elif defined(S390) && !defined(_LP64) |
46 double tmp; |
52 double tmp; |
47 asm volatile ("ld %0, 0(%1)\n" |
53 asm volatile ("ld %0, 0(%1)\n" |
48 "std %0, 0(%2)\n" |
54 "std %0, 0(%2)\n" |
49 : "=r"(tmp) |
55 : "=r"(tmp) |