This source file includes following definitions.
- pmix_atomic_mb
- pmix_atomic_rmb
- pmix_atomic_wmb
- pmix_atomic_isync
- pmix_atomic_compare_exchange_strong_32
- pmix_atomic_compare_exchange_strong_acq_32
- pmix_atomic_compare_exchange_strong_rel_32
- pmix_atomic_swap_32
- PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64
- pmix_atomic_swap_64
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_compare_exchange_strong_acq_64
- pmix_atomic_compare_exchange_strong_rel_64
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24 #ifndef PMIX_SYS_ARCH_ATOMIC_H
25 #define PMIX_SYS_ARCH_ATOMIC_H 1
26
27
28
29
30
31 #define PMIXMB() __asm__ __volatile__ ("sync" : : : "memory")
32 #define PMIXRMB() __asm__ __volatile__ ("lwsync" : : : "memory")
33 #define PMIXWMB() __asm__ __volatile__ ("lwsync" : : : "memory")
34 #define ISYNC() __asm__ __volatile__ ("isync" : : : "memory")
35
36
37
38
39
40
41
42 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
43
44 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
45 #define PMIX_HAVE_ATOMIC_SWAP_32 1
46 #define PMIX_HAVE_ATOMIC_LLSC_32 1
47
48 #define PMIX_HAVE_ATOMIC_MATH_32 1
49 #define PMIX_HAVE_ATOMIC_ADD_32 1
50 #define PMIX_HAVE_ATOMIC_AND_32 1
51 #define PMIX_HAVE_ATOMIC_OR_32 1
52 #define PMIX_HAVE_ATOMIC_XOR_32 1
53 #define PMIX_HAVE_ATOMIC_SUB_32 1
54
55
56 #if (PMIX_ASSEMBLY_ARCH == PMIX_POWERPC64) || PMIX_ASM_SUPPORT_64BIT
57 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
58 #define PMIX_HAVE_ATOMIC_SWAP_64 1
59 #define PMIX_HAVE_ATOMIC_LLSC_64 1
60 #define PMIX_HAVE_ATOMIC_MATH_64 1
61 #define PMIX_HAVE_ATOMIC_ADD_64 1
62 #define PMIX_HAVE_ATOMIC_AND_64 1
63 #define PMIX_HAVE_ATOMIC_OR_64 1
64 #define PMIX_HAVE_ATOMIC_XOR_64 1
65 #define PMIX_HAVE_ATOMIC_SUB_64 1
66 #endif
67
68
69
70
71
72
73
74 #if PMIX_GCC_INLINE_ASSEMBLY
75
76 static inline
77 void pmix_atomic_mb(void)
78 {
79 PMIXMB();
80 }
81
82
83 static inline
84 void pmix_atomic_rmb(void)
85 {
86 PMIXRMB();
87 }
88
89
90 static inline
91 void pmix_atomic_wmb(void)
92 {
93 PMIXWMB();
94 }
95
96 static inline
97 void pmix_atomic_isync(void)
98 {
99 ISYNC();
100 }
101
102 #endif
103
104
105
106
107
108
109 #if PMIX_GCC_INLINE_ASSEMBLY
110
111 #ifdef __xlC__
112
113
114 #define PMIX_ASM_ADDR(a) ((uintptr_t)a)
115 #else
116 #define PMIX_ASM_ADDR(a) (a)
117 #endif
118
119 #if defined(__PGI)
120
121
122
123 #define PMIX_ASM_VALUE64(x) (void *)(intptr_t) (x)
124 #else
125 #define PMIX_ASM_VALUE64(x) x
126 #endif
127
128 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
129 {
130 int32_t prev;
131 bool ret;
132
133 __asm__ __volatile__ (
134 "1: lwarx %0, 0, %2 \n\t"
135 " cmpw 0, %0, %3 \n\t"
136 " bne- 2f \n\t"
137 " stwcx. %4, 0, %2 \n\t"
138 " bne- 1b \n\t"
139 "2:"
140 : "=&r" (prev), "=m" (*addr)
141 : "r" PMIX_ASM_ADDR(addr), "r" (*oldval), "r" (newval), "m" (*addr)
142 : "cc", "memory");
143
144 ret = (prev == *oldval);
145 *oldval = prev;
146 return ret;
147 }
148
149
150
151
152 #define pmix_atomic_ll_32(addr, ret) \
153 do { \
154 pmix_atomic_int32_t *_addr = (addr); \
155 int32_t _ret; \
156 __asm__ __volatile__ ("lwarx %0, 0, %1 \n\t" \
157 : "=&r" (_ret) \
158 : "r" (_addr) \
159 ); \
160 ret = (typeof(ret)) _ret; \
161 } while (0)
162
163 #define pmix_atomic_sc_32(addr, value, ret) \
164 do { \
165 pmix_atomic_int32_t *_addr = (addr); \
166 int32_t _ret, _foo, _newval = (int32_t) value; \
167 \
168 __asm__ __volatile__ (" stwcx. %4, 0, %3 \n\t" \
169 " li %0,0 \n\t" \
170 " bne- 1f \n\t" \
171 " ori %0,%0,1 \n\t" \
172 "1:" \
173 : "=r" (_ret), "=m" (*_addr), "=r" (_foo) \
174 : "r" (_addr), "r" (_newval) \
175 : "cc", "memory"); \
176 ret = _ret; \
177 } while (0)
178
179
180
181
182
183
184 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
185 {
186 bool rc;
187
188 rc = pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
189 pmix_atomic_rmb();
190
191 return rc;
192 }
193
194
195 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
196 {
197 pmix_atomic_wmb();
198 return pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
199 }
200
201 static inline int32_t pmix_atomic_swap_32(pmix_atomic_int32_t *addr, int32_t newval)
202 {
203 int32_t ret;
204
205 __asm__ __volatile__ ("1: lwarx %0, 0, %2 \n\t"
206 " stwcx. %3, 0, %2 \n\t"
207 " bne- 1b \n\t"
208 : "=&r" (ret), "=m" (*addr)
209 : "r" (addr), "r" (newval)
210 : "cc", "memory");
211
212 return ret;
213 }
214
215 #endif
216
217
218 #if (PMIX_ASSEMBLY_ARCH == PMIX_POWERPC64)
219
220 #if PMIX_GCC_INLINE_ASSEMBLY
221
222 #define PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(type, instr) \
223 static inline int64_t pmix_atomic_fetch_ ## type ## _64(pmix_atomic_int64_t* v, int64_t val) \
224 { \
225 int64_t t, old; \
226 \
227 __asm__ __volatile__( \
228 "1: ldarx %1, 0, %4 \n\t" \
229 " " #instr " %0, %3, %1 \n\t" \
230 " stdcx. %0, 0, %4 \n\t" \
231 " bne- 1b \n\t" \
232 : "=&r" (t), "=&r" (old), "=m" (*v) \
233 : "r" (PMIX_ASM_VALUE64(val)), "r" PMIX_ASM_ADDR(v), "m" (*v) \
234 : "cc"); \
235 \
236 return old; \
237 }
238
239 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(add, add)
240 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(and, and)
241 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(or, or)
242 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(xor, xor)
243 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_64(sub, subf)
244
245 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
246 {
247 int64_t prev;
248 bool ret;
249
250 __asm__ __volatile__ (
251 "1: ldarx %0, 0, %2 \n\t"
252 " cmpd 0, %0, %3 \n\t"
253 " bne- 2f \n\t"
254 " stdcx. %4, 0, %2 \n\t"
255 " bne- 1b \n\t"
256 "2:"
257 : "=&r" (prev), "=m" (*addr)
258 : "r" (addr), "r" (PMIX_ASM_VALUE64(*oldval)), "r" (PMIX_ASM_VALUE64(newval)), "m" (*addr)
259 : "cc", "memory");
260
261 ret = (prev == *oldval);
262 *oldval = prev;
263 return ret;
264 }
265
266 #define pmix_atomic_ll_64(addr, ret) \
267 do { \
268 pmix_atomic_int64_t *_addr = (addr); \
269 int64_t _ret; \
270 __asm__ __volatile__ ("ldarx %0, 0, %1 \n\t" \
271 : "=&r" (_ret) \
272 : "r" (_addr) \
273 ); \
274 ret = (typeof(ret)) _ret; \
275 } while (0)
276
277 #define pmix_atomic_sc_64(addr, value, ret) \
278 do { \
279 pmix_atomic_int64_t *_addr = (addr); \
280 int64_t _newval = (int64_t) value; \
281 int32_t _ret; \
282 \
283 __asm__ __volatile__ (" stdcx. %2, 0, %1 \n\t" \
284 " li %0,0 \n\t" \
285 " bne- 1f \n\t" \
286 " ori %0,%0,1 \n\t" \
287 "1:" \
288 : "=r" (_ret) \
289 : "r" (_addr), "r" (PMIX_ASM_VALUE64(_newval)) \
290 : "cc", "memory"); \
291 ret = _ret; \
292 } while (0)
293
294 static inline int64_t pmix_atomic_swap_64(pmix_atomic_int64_t *addr, int64_t newval)
295 {
296 int64_t ret;
297
298 __asm__ __volatile__ ("1: ldarx %0, 0, %2 \n\t"
299 " stdcx. %3, 0, %2 \n\t"
300 " bne- 1b \n\t"
301 : "=&r" (ret), "=m" (*addr)
302 : "r" (addr), "r" (PMIX_ASM_VALUE64(newval))
303 : "cc", "memory");
304
305 return ret;
306 }
307
308 #endif
309
310 #elif (PMIX_ASSEMBLY_ARCH == PMIX_POWERPC32) && PMIX_ASM_SUPPORT_64BIT
311
312 #ifndef ll_low
313 #define ll_low(x) *(((unsigned int*)&(x))+0)
314 #define ll_high(x) *(((unsigned int*)&(x))+1)
315 #endif
316
317 #if PMIX_GCC_INLINE_ASSEMBLY
318
319 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
320 {
321 int64_t prev;
322 int ret;
323
324
325
326
327
328
329
330
331
332
333
334
335 __asm__ __volatile__ (
336 "ld r4,%3 \n\t"
337 "ld r5,%4 \n\t"
338 "1: ldarx %1, 0, %2 \n\t"
339 " cmpd 0, %1, r4 \n\t"
340 " bne- 2f \n\t"
341 " stdcx. r5, 0, %2 \n\t"
342 " bne- 1b \n\t"
343 "2: \n\t"
344 "xor r5,r4,%1 \n\t"
345 "subfic r9,r5,0 \n\t"
346 "adde %0,r9,r5 \n\t"
347 : "=&r" (ret), "+r" (prev)
348 : "r"PMIX_ASM_ADDR(addr),
349 "m"(*oldval), "m"(newval)
350 : "r4", "r5", "r9", "cc", "memory");
351 *oldval = prev;
352 return (bool) ret;
353 }
354
355 #endif
356
357 #endif
358
359 #if PMIX_GCC_INLINE_ASSEMBLY
360
361
362
363
364
365
366 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
367 {
368 bool rc;
369
370 rc = pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
371 pmix_atomic_rmb();
372
373 return rc;
374 }
375
376
377 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
378 {
379 pmix_atomic_wmb();
380 return pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
381 }
382
383
384 #define PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(type, instr) \
385 static inline int32_t pmix_atomic_fetch_ ## type ## _32(pmix_atomic_int32_t* v, int val) \
386 { \
387 int32_t t, old; \
388 \
389 __asm__ __volatile__( \
390 "1: lwarx %1, 0, %4 \n\t" \
391 " " #instr " %0, %3, %1 \n\t" \
392 " stwcx. %0, 0, %4 \n\t" \
393 " bne- 1b \n\t" \
394 : "=&r" (t), "=&r" (old), "=m" (*v) \
395 : "r" (val), "r" PMIX_ASM_ADDR(v), "m" (*v) \
396 : "cc"); \
397 \
398 return old; \
399 }
400
401 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(add, add)
402 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(and, and)
403 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(or, or)
404 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(xor, xor)
405 PMIX_ATOMIC_POWERPC_DEFINE_ATOMIC_32(sub, subf)
406
407 #endif
408
409 #endif