This source file includes following definitions.
- pmix_atomic_mb
- pmix_atomic_rmb
- pmix_atomic_wmb
- pmix_atomic_isync
- pmix_atomic_compare_exchange_strong_32
- pmix_atomic_swap_32
- pmix_atomic_compare_exchange_strong_acq_32
- pmix_atomic_compare_exchange_strong_rel_32
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_swap_64
- pmix_atomic_compare_exchange_strong_acq_64
- pmix_atomic_compare_exchange_strong_rel_64
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25 #if !defined(PMIX_SYS_ARCH_ATOMIC_H)
26
27 #define PMIX_SYS_ARCH_ATOMIC_H 1
28
29 #if PMIX_GCC_INLINE_ASSEMBLY
30
31 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
32 #define PMIX_HAVE_ATOMIC_LLSC_32 1
33 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
34 #define PMIX_HAVE_ATOMIC_SWAP_32 1
35 #define PMIX_HAVE_ATOMIC_MATH_32 1
36 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
37 #define PMIX_HAVE_ATOMIC_SWAP_64 1
38 #define PMIX_HAVE_ATOMIC_LLSC_64 1
39 #define PMIX_HAVE_ATOMIC_ADD_32 1
40 #define PMIX_HAVE_ATOMIC_AND_32 1
41 #define PMIX_HAVE_ATOMIC_OR_32 1
42 #define PMIX_HAVE_ATOMIC_XOR_32 1
43 #define PMIX_HAVE_ATOMIC_SUB_32 1
44 #define PMIX_HAVE_ATOMIC_ADD_64 1
45 #define PMIX_HAVE_ATOMIC_AND_64 1
46 #define PMIX_HAVE_ATOMIC_OR_64 1
47 #define PMIX_HAVE_ATOMIC_XOR_64 1
48 #define PMIX_HAVE_ATOMIC_SUB_64 1
49
50 #define PMIXMB() __asm__ __volatile__ ("dmb sy" : : : "memory")
51 #define PMIXRMB() __asm__ __volatile__ ("dmb ld" : : : "memory")
52 #define PMIXWMB() __asm__ __volatile__ ("dmb st" : : : "memory")
53
54
55
56
57
58
59
60 static inline void pmix_atomic_mb (void)
61 {
62 PMIXMB();
63 }
64
65 static inline void pmix_atomic_rmb (void)
66 {
67 PMIXRMB();
68 }
69
70 static inline void pmix_atomic_wmb (void)
71 {
72 PMIXWMB();
73 }
74
75 static inline void pmix_atomic_isync (void)
76 {
77 __asm__ __volatile__ ("isb");
78 }
79
80
81
82
83
84
85
86 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
87 {
88 int32_t prev, tmp;
89 bool ret;
90
91 __asm__ __volatile__ ("1: ldaxr %w0, [%2] \n"
92 " cmp %w0, %w3 \n"
93 " bne 2f \n"
94 " stxr %w1, %w4, [%2] \n"
95 " cbnz %w1, 1b \n"
96 "2: \n"
97 : "=&r" (prev), "=&r" (tmp)
98 : "r" (addr), "r" (*oldval), "r" (newval)
99 : "cc", "memory");
100
101 ret = (prev == *oldval);
102 *oldval = prev;
103 return ret;
104 }
105
106 static inline int32_t pmix_atomic_swap_32(pmix_atomic_int32_t *addr, int32_t newval)
107 {
108 int32_t ret, tmp;
109
110 __asm__ __volatile__ ("1: ldaxr %w0, [%2] \n"
111 " stlxr %w1, %w3, [%2] \n"
112 " cbnz %w1, 1b \n"
113 : "=&r" (ret), "=&r" (tmp)
114 : "r" (addr), "r" (newval)
115 : "cc", "memory");
116
117 return ret;
118 }
119
120
121
122
123
124
125 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
126 {
127 int32_t prev, tmp;
128 bool ret;
129
130 __asm__ __volatile__ ("1: ldaxr %w0, [%2] \n"
131 " cmp %w0, %w3 \n"
132 " bne 2f \n"
133 " stxr %w1, %w4, [%2] \n"
134 " cbnz %w1, 1b \n"
135 "2: \n"
136 : "=&r" (prev), "=&r" (tmp)
137 : "r" (addr), "r" (*oldval), "r" (newval)
138 : "cc", "memory");
139
140 ret = (prev == *oldval);
141 *oldval = prev;
142 return ret;
143 }
144
145
146 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
147 {
148 int32_t prev, tmp;
149 bool ret;
150
151 __asm__ __volatile__ ("1: ldxr %w0, [%2] \n"
152 " cmp %w0, %w3 \n"
153 " bne 2f \n"
154 " stlxr %w1, %w4, [%2] \n"
155 " cbnz %w1, 1b \n"
156 "2: \n"
157 : "=&r" (prev), "=&r" (tmp)
158 : "r" (addr), "r" (*oldval), "r" (newval)
159 : "cc", "memory");
160
161 ret = (prev == *oldval);
162 *oldval = prev;
163 return ret;
164 }
165
166 #define pmix_atomic_ll_32(addr, ret) \
167 do { \
168 pmix_atomic_int32_t *_addr = (addr); \
169 int32_t _ret; \
170 \
171 __asm__ __volatile__ ("ldaxr %w0, [%1] \n" \
172 : "=&r" (_ret) \
173 : "r" (_addr)); \
174 \
175 ret = (typeof(ret)) _ret; \
176 } while (0)
177
178 #define pmix_atomic_sc_32(addr, newval, ret) \
179 do { \
180 pmix_atomic_int32_t *_addr = (addr); \
181 int32_t _newval = (int32_t) newval; \
182 int _ret; \
183 \
184 __asm__ __volatile__ ("stlxr %w0, %w2, [%1] \n" \
185 : "=&r" (_ret) \
186 : "r" (_addr), "r" (_newval) \
187 : "cc", "memory"); \
188 \
189 ret = (_ret == 0); \
190 } while (0)
191
192 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
193 {
194 int64_t prev;
195 int tmp;
196 bool ret;
197
198 __asm__ __volatile__ ("1: ldaxr %0, [%2] \n"
199 " cmp %0, %3 \n"
200 " bne 2f \n"
201 " stxr %w1, %4, [%2] \n"
202 " cbnz %w1, 1b \n"
203 "2: \n"
204 : "=&r" (prev), "=&r" (tmp)
205 : "r" (addr), "r" (*oldval), "r" (newval)
206 : "cc", "memory");
207
208 ret = (prev == *oldval);
209 *oldval = prev;
210 return ret;
211 }
212
213 static inline int64_t pmix_atomic_swap_64 (pmix_atomic_int64_t *addr, int64_t newval)
214 {
215 int64_t ret;
216 int tmp;
217
218 __asm__ __volatile__ ("1: ldaxr %0, [%2] \n"
219 " stlxr %w1, %3, [%2] \n"
220 " cbnz %w1, 1b \n"
221 : "=&r" (ret), "=&r" (tmp)
222 : "r" (addr), "r" (newval)
223 : "cc", "memory");
224
225 return ret;
226 }
227
228
229
230
231
232
233 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
234 {
235 int64_t prev;
236 int tmp;
237 bool ret;
238
239 __asm__ __volatile__ ("1: ldaxr %0, [%2] \n"
240 " cmp %0, %3 \n"
241 " bne 2f \n"
242 " stxr %w1, %4, [%2] \n"
243 " cbnz %w1, 1b \n"
244 "2: \n"
245 : "=&r" (prev), "=&r" (tmp)
246 : "r" (addr), "r" (*oldval), "r" (newval)
247 : "cc", "memory");
248
249 ret = (prev == *oldval);
250 *oldval = prev;
251 return ret;
252 }
253
254
255 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
256 {
257 int64_t prev;
258 int tmp;
259 bool ret;
260
261 __asm__ __volatile__ ("1: ldxr %0, [%2] \n"
262 " cmp %0, %3 \n"
263 " bne 2f \n"
264 " stlxr %w1, %4, [%2] \n"
265 " cbnz %w1, 1b \n"
266 "2: \n"
267 : "=&r" (prev), "=&r" (tmp)
268 : "r" (addr), "r" (*oldval), "r" (newval)
269 : "cc", "memory");
270
271 ret = (prev == *oldval);
272 *oldval = prev;
273 return ret;
274 }
275
276 #define pmix_atomic_ll_64(addr, ret) \
277 do { \
278 pmix_atomic_int64_t *_addr = (addr); \
279 int64_t _ret; \
280 \
281 __asm__ __volatile__ ("ldaxr %0, [%1] \n" \
282 : "=&r" (_ret) \
283 : "r" (_addr)); \
284 \
285 ret = (typeof(ret)) _ret; \
286 } while (0)
287
288 #define pmix_atomic_sc_64(addr, newval, ret) \
289 do { \
290 pmix_atomic_int64_t *_addr = (addr); \
291 int64_t _newval = (int64_t) newval; \
292 int _ret; \
293 \
294 __asm__ __volatile__ ("stlxr %w0, %2, [%1] \n" \
295 : "=&r" (_ret) \
296 : "r" (_addr), "r" (_newval) \
297 : "cc", "memory"); \
298 \
299 ret = (_ret == 0); \
300 } while (0)
301
302 #define PMIX_ASM_MAKE_ATOMIC(type, bits, name, inst, reg) \
303 static inline type pmix_atomic_fetch_ ## name ## _ ## bits (pmix_atomic_ ## type *addr, type value) \
304 { \
305 type newval, old; \
306 int32_t tmp; \
307 \
308 __asm__ __volatile__("1: ldxr %" reg "1, [%3] \n" \
309 " " inst " %" reg "0, %" reg "1, %" reg "4 \n" \
310 " stxr %w2, %" reg "0, [%3] \n" \
311 " cbnz %w2, 1b \n" \
312 : "=&r" (newval), "=&r" (old), "=&r" (tmp) \
313 : "r" (addr), "r" (value) \
314 : "cc", "memory"); \
315 \
316 return old; \
317 }
318
319 PMIX_ASM_MAKE_ATOMIC(int32_t, 32, add, "add", "w")
320 PMIX_ASM_MAKE_ATOMIC(int32_t, 32, and, "and", "w")
321 PMIX_ASM_MAKE_ATOMIC(int32_t, 32, or, "orr", "w")
322 PMIX_ASM_MAKE_ATOMIC(int32_t, 32, xor, "eor", "w")
323 PMIX_ASM_MAKE_ATOMIC(int32_t, 32, sub, "sub", "w")
324 PMIX_ASM_MAKE_ATOMIC(int64_t, 64, add, "add", "")
325 PMIX_ASM_MAKE_ATOMIC(int64_t, 64, and, "and", "")
326 PMIX_ASM_MAKE_ATOMIC(int64_t, 64, or, "orr", "")
327 PMIX_ASM_MAKE_ATOMIC(int64_t, 64, xor, "eor", "")
328 PMIX_ASM_MAKE_ATOMIC(int64_t, 64, sub, "sub", "")
329
330 #endif
331
332 #endif