1 /* |
|
2 * Copyright © 2007 Chris Wilson |
|
3 * Copyright © 2009,2010 Red Hat, Inc. |
|
4 * Copyright © 2011,2012 Google, Inc. |
|
5 * |
|
6 * This is part of HarfBuzz, a text shaping library. |
|
7 * |
|
8 * Permission is hereby granted, without written agreement and without |
|
9 * license or royalty fees, to use, copy, modify, and distribute this |
|
10 * software and its documentation for any purpose, provided that the |
|
11 * above copyright notice and the following two paragraphs appear in |
|
12 * all copies of this software. |
|
13 * |
|
14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
|
15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
|
16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
|
17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
|
18 * DAMAGE. |
|
19 * |
|
20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
|
21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
|
22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
|
23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
|
24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
|
25 * |
|
26 * Contributor(s): |
|
27 * Chris Wilson <chris@chris-wilson.co.uk> |
|
28 * Red Hat Author(s): Behdad Esfahbod |
|
29 * Google Author(s): Behdad Esfahbod |
|
30 */ |
|
31 |
|
32 #ifndef HB_ATOMIC_PRIVATE_HH |
|
33 #define HB_ATOMIC_PRIVATE_HH |
|
34 |
|
35 #include "hb-private.hh" |
|
36 |
|
37 |
|
38 /* atomic_int */ |
|
39 |
|
40 /* We need external help for these */ |
|
41 |
|
42 #if defined(hb_atomic_int_impl_add) \ |
|
43 && defined(hb_atomic_ptr_impl_get) \ |
|
44 && defined(hb_atomic_ptr_impl_cmpexch) |
|
45 |
|
46 /* Defined externally, i.e. in config.h; must have typedef'ed hb_atomic_int_impl_t as well. */ |
|
47 |
|
48 |
|
49 #elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__)) |
|
50 |
|
51 #include <windows.h> |
|
52 |
|
53 /* MinGW has a convoluted history of supporting MemoryBarrier |
|
54 * properly. As such, define a function to wrap the whole |
|
55 * thing. */ |
|
56 static inline void _HBMemoryBarrier (void) { |
|
57 #if !defined(MemoryBarrier) |
|
58 long dummy = 0; |
|
59 InterlockedExchange (&dummy, 1); |
|
60 #else |
|
61 MemoryBarrier (); |
|
62 #endif |
|
63 } |
|
64 |
|
65 typedef LONG hb_atomic_int_impl_t; |
|
66 #define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd (&(AI), (V)) |
|
67 |
|
68 #define hb_atomic_ptr_impl_get(P) (_HBMemoryBarrier (), (void *) *(P)) |
|
69 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O)) |
|
70 |
|
71 |
|
72 #elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES) |
|
73 |
|
74 typedef int hb_atomic_int_impl_t; |
|
75 #define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add (&(AI), (V)) |
|
76 |
|
77 #define hb_atomic_ptr_impl_get(P) (void *) (__sync_synchronize (), *(P)) |
|
78 #define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N)) |
|
79 |
|
80 |
|
81 #elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS) |
|
82 |
|
83 #include <atomic.h> |
|
84 #include <mbarrier.h> |
|
85 |
|
86 typedef unsigned int hb_atomic_int_impl_t; |
|
87 #define hb_atomic_int_impl_add(AI, V) ( ({__machine_rw_barrier ();}), atomic_add_int_nv (&(AI), (V)) - (V)) |
|
88 |
|
89 #define hb_atomic_ptr_impl_get(P) ( ({__machine_rw_barrier ();}), (void *) *(P)) |
|
90 #define hb_atomic_ptr_impl_cmpexch(P,O,N) ( ({__machine_rw_barrier ();}), atomic_cas_ptr ((void **) (P), (void *) (O), (void *) (N)) == (void *) (O) ? true : false) |
|
91 |
|
92 |
|
93 #elif !defined(HB_NO_MT) && defined(__APPLE__) |
|
94 |
|
95 #include <libkern/OSAtomic.h> |
|
96 #ifdef __MAC_OS_X_MIN_REQUIRED |
|
97 #include <AvailabilityMacros.h> |
|
98 #elif defined(__IPHONE_OS_MIN_REQUIRED) |
|
99 #include <Availability.h> |
|
100 #endif |
|
101 |
|
102 |
|
103 typedef int32_t hb_atomic_int_impl_t; |
|
104 #define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), &(AI)) - (V)) |
|
105 |
|
106 #define hb_atomic_ptr_impl_get(P) (OSMemoryBarrier (), (void *) *(P)) |
|
107 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100) |
|
108 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P)) |
|
109 #else |
|
110 #if __ppc64__ || __x86_64__ || __aarch64__ |
|
111 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P)) |
|
112 #else |
|
113 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P)) |
|
114 #endif |
|
115 #endif |
|
116 |
|
117 |
|
118 #elif !defined(HB_NO_MT) && defined(_AIX) && (defined(__IBMCPP__) || defined(__ibmxl__)) |
|
119 |
|
120 #include <builtins.h> |
|
121 |
|
122 |
|
123 static inline int _hb_fetch_and_add(volatile int* AI, unsigned int V) { |
|
124 __lwsync(); |
|
125 int result = __fetch_and_add(AI, V); |
|
126 __isync(); |
|
127 return result; |
|
128 } |
|
129 static inline int _hb_compare_and_swaplp(volatile long* P, long O, long N) { |
|
130 __sync(); |
|
131 int result = __compare_and_swaplp (P, &O, N); |
|
132 __sync(); |
|
133 return result; |
|
134 } |
|
135 |
|
136 typedef int hb_atomic_int_impl_t; |
|
137 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add (&(AI), (V)) |
|
138 |
|
139 #define hb_atomic_ptr_impl_get(P) (__sync(), (void *) *(P)) |
|
140 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long*)(P), (long)(O), (long)(N)) |
|
141 |
|
142 #elif !defined(HB_NO_MT) |
|
143 |
|
144 #define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */ |
|
145 |
|
146 typedef volatile int hb_atomic_int_impl_t; |
|
147 #define hb_atomic_int_impl_add(AI, V) (((AI) += (V)) - (V)) |
|
148 |
|
149 #define hb_atomic_ptr_impl_get(P) ((void *) *(P)) |
|
150 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void * volatile *) (P) == (void *) (O) ? (* (void * volatile *) (P) = (void *) (N), true) : false) |
|
151 |
|
152 |
|
153 #else /* HB_NO_MT */ |
|
154 |
|
155 typedef int hb_atomic_int_impl_t; |
|
156 #define hb_atomic_int_impl_add(AI, V) (((AI) += (V)) - (V)) |
|
157 |
|
158 #define hb_atomic_ptr_impl_get(P) ((void *) *(P)) |
|
159 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false) |
|
160 |
|
161 |
|
162 #endif |
|
163 |
|
164 |
|
165 #define HB_ATOMIC_INT_INIT(V) {V} |
|
166 |
|
167 struct hb_atomic_int_t |
|
168 { |
|
169 hb_atomic_int_impl_t v; |
|
170 |
|
171 inline void set_unsafe (int v_) { v = v_; } |
|
172 inline int get_unsafe (void) const { return v; } |
|
173 inline int inc (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (v), 1); } |
|
174 inline int dec (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (v), -1); } |
|
175 }; |
|
176 |
|
177 |
|
178 #define hb_atomic_ptr_get(P) hb_atomic_ptr_impl_get(P) |
|
179 #define hb_atomic_ptr_cmpexch(P,O,N) hb_atomic_ptr_impl_cmpexch((P),(O),(N)) |
|
180 |
|
181 |
|
182 #endif /* HB_ATOMIC_PRIVATE_HH */ |
|