This source file includes following definitions.
- pmix_atomic_fetch_min_32
- pmix_atomic_fetch_max_32
- pmix_atomic_swap_32
- PMIX_ATOMIC_DEFINE_CMPXCG_OP
- pmix_atomic_fetch_max_64
- pmix_atomic_swap_64
- pmix_atomic_add_xx
- pmix_atomic_sub_xx
- PMIX_ATOMIC_DEFINE_OP_FETCH
- pmix_atomic_max_fetch_32
- PMIX_ATOMIC_DEFINE_OP_FETCH
- pmix_atomic_max_fetch_64
- pmix_atomic_fetch_add_ptr
- pmix_atomic_add_fetch_ptr
- pmix_atomic_fetch_sub_ptr
- pmix_atomic_sub_fetch_ptr
- pmix_atomic_lock_init
- pmix_atomic_trylock
- pmix_atomic_lock
- pmix_atomic_unlock
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26 #include <stdlib.h>
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41 #if PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32
42
43 #if !defined(PMIX_HAVE_ATOMIC_MIN_32)
44 static inline int32_t pmix_atomic_fetch_min_32 (pmix_atomic_int32_t *addr, int32_t value)
45 {
46 int32_t old = *addr;
47 do {
48 if (old <= value) {
49 break;
50 }
51 } while (!pmix_atomic_compare_exchange_strong_32 (addr, &old, value));
52
53 return old;
54 }
55
56 #define PMIX_HAVE_ATOMIC_MIN_32 1
57
58 #endif
59
60 #if !defined(PMIX_HAVE_ATOMIC_MAX_32)
61 static inline int32_t pmix_atomic_fetch_max_32 (pmix_atomic_int32_t *addr, int32_t value)
62 {
63 int32_t old = *addr;
64 do {
65 if (old >= value) {
66 break;
67 }
68 } while (!pmix_atomic_compare_exchange_strong_32 (addr, &old, value));
69
70 return old;
71 }
72
73 #define PMIX_HAVE_ATOMIC_MAX_32 1
74 #endif
75
76 #define PMIX_ATOMIC_DEFINE_CMPXCG_OP(type, bits, operation, name) \
77 static inline type pmix_atomic_fetch_ ## name ## _ ## bits (pmix_atomic_ ## type *addr, type value) \
78 { \
79 type oldval; \
80 do { \
81 oldval = *addr; \
82 } while (!pmix_atomic_compare_exchange_strong_ ## bits (addr, &oldval, oldval operation value)); \
83 \
84 return oldval; \
85 }
86
87 #if !defined(PMIX_HAVE_ATOMIC_SWAP_32)
88 #define PMIX_HAVE_ATOMIC_SWAP_32 1
89 static inline int32_t pmix_atomic_swap_32(pmix_atomic_int32_t *addr,
90 int32_t newval)
91 {
92 int32_t old = *addr;
93 do {
94 } while (!pmix_atomic_compare_exchange_strong_32 (addr, &old, newval));
95
96 return old;
97 }
98 #endif
99
100 #if !defined(PMIX_HAVE_ATOMIC_ADD_32)
101 #define PMIX_HAVE_ATOMIC_ADD_32 1
102
103 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int32_t, 32, +, add)
104
105 #endif
106
107 #if !defined(PMIX_HAVE_ATOMIC_AND_32)
108 #define PMIX_HAVE_ATOMIC_AND_32 1
109
110 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int32_t, 32, &, and)
111
112 #endif
113
114 #if !defined(PMIX_HAVE_ATOMIC_OR_32)
115 #define PMIX_HAVE_ATOMIC_OR_32 1
116
117 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int32_t, 32, |, or)
118
119 #endif
120
121 #if !defined(PMIX_HAVE_ATOMIC_XOR_32)
122 #define PMIX_HAVE_ATOMIC_XOR_32 1
123
124 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int32_t, 32, ^, xor)
125
126 #endif
127
128
129 #if !defined(PMIX_HAVE_ATOMIC_SUB_32)
130 #define PMIX_HAVE_ATOMIC_SUB_32 1
131
132 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int32_t, 32, -, sub)
133
134 #endif
135
136 #endif
137
138
139 #if PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64
140
141 #if !defined(PMIX_HAVE_ATOMIC_MIN_64)
142 static inline int64_t pmix_atomic_fetch_min_64 (pmix_atomic_int64_t *addr, int64_t value)
143 {
144 int64_t old = *addr;
145 do {
146 if (old <= value) {
147 break;
148 }
149 } while (!pmix_atomic_compare_exchange_strong_64 (addr, &old, value));
150
151 return old;
152 }
153
154 #define PMIX_HAVE_ATOMIC_MIN_64 1
155
156 #endif
157
158 #if !defined(PMIX_HAVE_ATOMIC_MAX_64)
159 static inline int64_t pmix_atomic_fetch_max_64 (pmix_atomic_int64_t *addr, int64_t value)
160 {
161 int64_t old = *addr;
162 do {
163 if (old >= value) {
164 break;
165 }
166 } while (!pmix_atomic_compare_exchange_strong_64 (addr, &old, value));
167
168 return old;
169 }
170
171 #define PMIX_HAVE_ATOMIC_MAX_64 1
172 #endif
173
174 #if !defined(PMIX_HAVE_ATOMIC_SWAP_64)
175 #define PMIX_HAVE_ATOMIC_SWAP_64 1
176 static inline int64_t pmix_atomic_swap_64(pmix_atomic_int64_t *addr,
177 int64_t newval)
178 {
179 int64_t old = *addr;
180 do {
181 } while (!pmix_atomic_compare_exchange_strong_64 (addr, &old, newval));
182
183 return old;
184 }
185 #endif
186
187 #if !defined(PMIX_HAVE_ATOMIC_ADD_64)
188 #define PMIX_HAVE_ATOMIC_ADD_64 1
189
190 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int64_t, 64, +, add)
191
192 #endif
193
194 #if !defined(PMIX_HAVE_ATOMIC_AND_64)
195 #define PMIX_HAVE_ATOMIC_AND_64 1
196
197 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int64_t, 64, &, and)
198
199 #endif
200
201 #if !defined(PMIX_HAVE_ATOMIC_OR_64)
202 #define PMIX_HAVE_ATOMIC_OR_64 1
203
204 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int64_t, 64, |, or)
205
206 #endif
207
208 #if !defined(PMIX_HAVE_ATOMIC_XOR_64)
209 #define PMIX_HAVE_ATOMIC_XOR_64 1
210
211 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int64_t, 64, ^, xor)
212
213 #endif
214
215 #if !defined(PMIX_HAVE_ATOMIC_SUB_64)
216 #define PMIX_HAVE_ATOMIC_SUB_64 1
217
218 PMIX_ATOMIC_DEFINE_CMPXCG_OP(int64_t, 64, -, sub)
219
220 #endif
221
222 #else
223
224 #if !defined(PMIX_HAVE_ATOMIC_ADD_64)
225 #define PMIX_HAVE_ATOMIC_ADD_64 0
226 #endif
227
228 #if !defined(PMIX_HAVE_ATOMIC_SUB_64)
229 #define PMIX_HAVE_ATOMIC_SUB_64 0
230 #endif
231
232 #endif
233
234 #if (PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 || PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64)
235
236 #if PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 && PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64
237 #define PMIX_ATOMIC_DEFINE_CMPXCG_XX(semantics) \
238 static inline bool \
239 pmix_atomic_compare_exchange_strong ## semantics ## xx (pmix_atomic_intptr_t* addr, intptr_t *oldval, \
240 int64_t newval, const size_t length) \
241 { \
242 switch (length) { \
243 case 4: \
244 return pmix_atomic_compare_exchange_strong_32 ((pmix_atomic_int32_t *) addr, \
245 (int32_t *) oldval, (int32_t) newval); \
246 case 8: \
247 return pmix_atomic_compare_exchange_strong_64 ((pmix_atomic_int64_t *) addr, \
248 (int64_t *) oldval, (int64_t) newval); \
249 } \
250 abort(); \
251 }
252 #elif PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32
253 #define PMIX_ATOMIC_DEFINE_CMPXCG_XX(semantics) \
254 static inline bool \
255 pmix_atomic_compare_exchange_strong ## semantics ## xx (pmix_atomic_intptr_t* addr, intptr_t *oldval, \
256 int64_t newval, const size_t length) \
257 { \
258 switch (length) { \
259 case 4: \
260 return pmix_atomic_compare_exchange_strong_32 ((pmix_atomic_int32_t *) addr, \
261 (int32_t *) oldval, (int32_t) newval); \
262 } \
263 abort(); \
264 }
265 #else
266 #error "Platform does not have required atomic compare-and-swap functionality"
267 #endif
268
269 PMIX_ATOMIC_DEFINE_CMPXCG_XX(_)
270 PMIX_ATOMIC_DEFINE_CMPXCG_XX(_acq_)
271 PMIX_ATOMIC_DEFINE_CMPXCG_XX(_rel_)
272
273 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32
274 #define PMIX_ATOMIC_DEFINE_CMPXCG_PTR_XX(semantics) \
275 static inline bool \
276 pmix_atomic_compare_exchange_strong ## semantics ## ptr (pmix_atomic_intptr_t* addr, intptr_t *oldval, intptr_t newval) \
277 { \
278 return pmix_atomic_compare_exchange_strong_32 ((pmix_atomic_int32_t *) addr, (int32_t *) oldval, (int32_t) newval); \
279 }
280 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64
281 #define PMIX_ATOMIC_DEFINE_CMPXCG_PTR_XX(semantics) \
282 static inline bool \
283 pmix_atomic_compare_exchange_strong ## semantics ## ptr (pmix_atomic_intptr_t* addr, intptr_t *oldval, intptr_t newval) \
284 { \
285 return pmix_atomic_compare_exchange_strong_64 ((pmix_atomic_int64_t *) addr, (int64_t *) oldval, (int64_t) newval); \
286 }
287 #else
288 #error "Can not define pmix_atomic_compare_exchange_strong_ptr with existing atomics"
289 #endif
290
291 PMIX_ATOMIC_DEFINE_CMPXCG_PTR_XX(_)
292 PMIX_ATOMIC_DEFINE_CMPXCG_PTR_XX(_acq_)
293 PMIX_ATOMIC_DEFINE_CMPXCG_PTR_XX(_rel_)
294
295 #endif
296
297
298 #if (PMIX_HAVE_ATOMIC_SWAP_32 || PMIX_HAVE_ATOMIC_SWAP_64)
299
300 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_SWAP_32
301 #define pmix_atomic_swap_ptr(addr, value) (intptr_t) pmix_atomic_swap_32((pmix_atomic_int32_t *) addr, (int32_t) value)
302 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_SWAP_64
303 #define pmix_atomic_swap_ptr(addr, value) (intptr_t) pmix_atomic_swap_64((pmix_atomic_int64_t *) addr, (int64_t) value)
304 #endif
305
306 #endif
307
308 #if (PMIX_HAVE_ATOMIC_LLSC_32 || PMIX_HAVE_ATOMIC_LLSC_64)
309
310 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_LLSC_32
311
312 #define pmix_atomic_ll_ptr(addr, ret) pmix_atomic_ll_32((pmix_atomic_int32_t *) (addr), ret)
313 #define pmix_atomic_sc_ptr(addr, value, ret) pmix_atomic_sc_32((pmix_atomic_int32_t *) (addr), (intptr_t) (value), ret)
314
315 #define PMIX_HAVE_ATOMIC_LLSC_PTR 1
316
317 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_LLSC_64
318
319 #define pmix_atomic_ll_ptr(addr, ret) pmix_atomic_ll_64((pmix_atomic_int64_t *) (addr), ret)
320 #define pmix_atomic_sc_ptr(addr, value, ret) pmix_atomic_sc_64((pmix_atomic_int64_t *) (addr), (intptr_t) (value), ret)
321
322 #define PMIX_HAVE_ATOMIC_LLSC_PTR 1
323
324 #endif
325
326 #endif
327
328 #if !defined(PMIX_HAVE_ATOMIC_LLSC_PTR)
329 #define PMIX_HAVE_ATOMIC_LLSC_PTR 0
330 #endif
331
332 #if PMIX_HAVE_ATOMIC_MATH_32 || PMIX_HAVE_ATOMIC_MATH_64
333
334 static inline void
335 pmix_atomic_add_xx(pmix_atomic_intptr_t* addr, int32_t value, size_t length)
336 {
337 switch( length ) {
338 #if PMIX_HAVE_ATOMIC_ADD_32
339 case 4:
340 (void) pmix_atomic_fetch_add_32( (pmix_atomic_int32_t*)addr, (int32_t)value );
341 break;
342 #endif
343
344 #if PMIX_HAVE_ATOMIC_ADD_64
345 case 8:
346 (void) pmix_atomic_fetch_add_64( (pmix_atomic_int64_t*)addr, (int64_t)value );
347 break;
348 #endif
349 default:
350
351
352 abort();
353 }
354 }
355
356
357 static inline void
358 pmix_atomic_sub_xx(pmix_atomic_intptr_t* addr, int32_t value, size_t length)
359 {
360 switch( length ) {
361 #if PMIX_HAVE_ATOMIC_SUB_32
362 case 4:
363 (void) pmix_atomic_fetch_sub_32( (pmix_atomic_int32_t*)addr, (int32_t)value );
364 break;
365 #endif
366
367 #if PMIX_HAVE_ATOMIC_SUB_64
368 case 8:
369 (void) pmix_atomic_fetch_sub_64( (pmix_atomic_int64_t*)addr, (int64_t)value );
370 break;
371 #endif
372 default:
373
374
375 abort();
376 }
377 }
378
379 #define PMIX_ATOMIC_DEFINE_OP_FETCH(op, operation, type, ptr_type, suffix) \
380 static inline type pmix_atomic_ ## op ## _fetch_ ## suffix (pmix_atomic_ ## ptr_type *addr, type value) \
381 { \
382 return pmix_atomic_fetch_ ## op ## _ ## suffix (addr, value) operation value; \
383 }
384
385 PMIX_ATOMIC_DEFINE_OP_FETCH(add, +, int32_t, int32_t, 32)
386 PMIX_ATOMIC_DEFINE_OP_FETCH(and, &, int32_t, int32_t, 32)
387 PMIX_ATOMIC_DEFINE_OP_FETCH(or, |, int32_t, int32_t, 32)
388 PMIX_ATOMIC_DEFINE_OP_FETCH(xor, ^, int32_t, int32_t, 32)
389 PMIX_ATOMIC_DEFINE_OP_FETCH(sub, -, int32_t, int32_t, 32)
390
391 static inline int32_t pmix_atomic_min_fetch_32 (pmix_atomic_int32_t *addr, int32_t value)
392 {
393 int32_t old = pmix_atomic_fetch_min_32 (addr, value);
394 return old <= value ? old : value;
395 }
396
397 static inline int32_t pmix_atomic_max_fetch_32 (pmix_atomic_int32_t *addr, int32_t value)
398 {
399 int32_t old = pmix_atomic_fetch_max_32 (addr, value);
400 return old >= value ? old : value;
401 }
402
403 #if PMIX_HAVE_ATOMIC_MATH_64
404 PMIX_ATOMIC_DEFINE_OP_FETCH(add, +, int64_t, int64_t, 64)
405 PMIX_ATOMIC_DEFINE_OP_FETCH(and, &, int64_t, int64_t, 64)
406 PMIX_ATOMIC_DEFINE_OP_FETCH(or, |, int64_t, int64_t, 64)
407 PMIX_ATOMIC_DEFINE_OP_FETCH(xor, ^, int64_t, int64_t, 64)
408 PMIX_ATOMIC_DEFINE_OP_FETCH(sub, -, int64_t, int64_t, 64)
409
410 static inline int64_t pmix_atomic_min_fetch_64 (pmix_atomic_int64_t *addr, int64_t value)
411 {
412 int64_t old = pmix_atomic_fetch_min_64 (addr, value);
413 return old <= value ? old : value;
414 }
415
416 static inline int64_t pmix_atomic_max_fetch_64 (pmix_atomic_int64_t *addr, int64_t value)
417 {
418 int64_t old = pmix_atomic_fetch_max_64 (addr, value);
419 return old >= value ? old : value;
420 }
421
422 #endif
423
424 static inline intptr_t pmix_atomic_fetch_add_ptr( pmix_atomic_intptr_t* addr,
425 void* delta )
426 {
427 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_ADD_32
428 return pmix_atomic_fetch_add_32((pmix_atomic_int32_t*) addr, (unsigned long) delta);
429 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_ADD_64
430 return pmix_atomic_fetch_add_64((pmix_atomic_int64_t*) addr, (unsigned long) delta);
431 #else
432 abort ();
433 return 0;
434 #endif
435 }
436
437 static inline intptr_t pmix_atomic_add_fetch_ptr( pmix_atomic_intptr_t* addr,
438 void* delta )
439 {
440 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_ADD_32
441 return pmix_atomic_add_fetch_32((pmix_atomic_int32_t*) addr, (unsigned long) delta);
442 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_ADD_64
443 return pmix_atomic_add_fetch_64((pmix_atomic_int64_t*) addr, (unsigned long) delta);
444 #else
445 abort ();
446 return 0;
447 #endif
448 }
449
450 static inline intptr_t pmix_atomic_fetch_sub_ptr( pmix_atomic_intptr_t* addr,
451 void* delta )
452 {
453 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_SUB_32
454 return pmix_atomic_fetch_sub_32((pmix_atomic_int32_t*) addr, (unsigned long) delta);
455 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_SUB_32
456 return pmix_atomic_fetch_sub_64((pmix_atomic_int64_t*) addr, (unsigned long) delta);
457 #else
458 abort();
459 return 0;
460 #endif
461 }
462
463 static inline intptr_t pmix_atomic_sub_fetch_ptr( pmix_atomic_intptr_t* addr,
464 void* delta )
465 {
466 #if SIZEOF_VOID_P == 4 && PMIX_HAVE_ATOMIC_SUB_32
467 return pmix_atomic_sub_fetch_32((pmix_atomic_int32_t*) addr, (unsigned long) delta);
468 #elif SIZEOF_VOID_P == 8 && PMIX_HAVE_ATOMIC_SUB_32
469 return pmix_atomic_sub_fetch_64((pmix_atomic_int64_t*) addr, (unsigned long) delta);
470 #else
471 abort();
472 return 0;
473 #endif
474 }
475
476 #endif
477
478
479
480
481
482
483 #ifdef PMIX_NEED_INLINE_ATOMIC_SPINLOCKS
484
485
486
487
488 static inline void
489 pmix_atomic_lock_init( pmix_atomic_lock_t* lock, int32_t value )
490 {
491 lock->u.lock = value;
492 }
493
494
495 static inline int
496 pmix_atomic_trylock(pmix_atomic_lock_t *lock)
497 {
498 int32_t unlocked = PMIX_ATOMIC_LOCK_UNLOCKED;
499 bool ret = pmix_atomic_compare_exchange_strong_acq_32 (&lock->u.lock, &unlocked, PMIX_ATOMIC_LOCK_LOCKED);
500 return (ret == false) ? 1 : 0;
501 }
502
503
504 static inline void
505 pmix_atomic_lock(pmix_atomic_lock_t *lock)
506 {
507 while (pmix_atomic_trylock (lock)) {
508 while (lock->u.lock == PMIX_ATOMIC_LOCK_LOCKED) {
509 ;
510 }
511 }
512 }
513
514
515 static inline void
516 pmix_atomic_unlock(pmix_atomic_lock_t *lock)
517 {
518 pmix_atomic_wmb();
519 lock->u.lock=PMIX_ATOMIC_LOCK_UNLOCKED;
520 }
521
522 #endif