root/opal/mca/pmix/pmix4x/pmix/src/atomics/sys/gcc_builtin/atomic.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. pmix_atomic_mb
  2. pmix_atomic_rmb
  3. pmix_atomic_wmb
  4. pmix_atomic_compare_exchange_strong_acq_32
  5. pmix_atomic_compare_exchange_strong_rel_32
  6. pmix_atomic_compare_exchange_strong_32
  7. pmix_atomic_swap_32
  8. pmix_atomic_fetch_add_32
  9. pmix_atomic_fetch_and_32
  10. pmix_atomic_fetch_or_32
  11. pmix_atomic_fetch_xor_32
  12. pmix_atomic_fetch_sub_32
  13. pmix_atomic_compare_exchange_strong_acq_64
  14. pmix_atomic_compare_exchange_strong_rel_64
  15. pmix_atomic_compare_exchange_strong_64
  16. pmix_atomic_swap_64
  17. pmix_atomic_fetch_add_64
  18. pmix_atomic_fetch_and_64
  19. pmix_atomic_fetch_or_64
  20. pmix_atomic_fetch_xor_64
  21. pmix_atomic_fetch_sub_64
  22. pmix_atomic_compare_exchange_strong_128
  23. pmix_atomic_compare_exchange_strong_128
  24. pmix_atomic_lock_init
  25. pmix_atomic_trylock
  26. pmix_atomic_lock
  27. pmix_atomic_unlock

   1 /* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
   2 /*
   3  * Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
   4  *                         University Research and Technology
   5  *                         Corporation.  All rights reserved.
   6  * Copyright (c) 2004-2013 The University of Tennessee and The University
   7  *                         of Tennessee Research Foundation.  All rights
   8  *                         reserved.
   9  * Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
  10  *                         University of Stuttgart.  All rights reserved.
  11  * Copyright (c) 2004-2005 The Regents of the University of California.
  12  *                         All rights reserved.
  13  * Copyright (c) 2011      Sandia National Laboratories. All rights reserved.
  14  * Copyright (c) 2014-2018 Los Alamos National Security, LLC. All rights
  15  *                         reserved.
  16  * Copyright (c) 2016-2017 Research Organization for Information Science
  17  *                         and Technology (RIST). All rights reserved.
  18  * Copyright (c) 2018      Triad National Security, LLC. All rights
  19  *                         reserved.
  20  * Copyright (c) 2018      Intel, Inc.  All rights reserved.
  21  * $COPYRIGHT$
  22  *
  23  * Additional copyrights may follow
  24  *
  25  * $HEADER$
  26  */
  27 
  28 #ifndef PMIX_SYS_ARCH_ATOMIC_H
  29 #define PMIX_SYS_ARCH_ATOMIC_H 1
  30 
  31 /**********************************************************************
  32  *
  33  * Memory Barriers
  34  *
  35  *********************************************************************/
  36 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
  37 
  38 #define PMIX_HAVE_ATOMIC_MATH_32 1
  39 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
  40 #define PMIX_HAVE_ATOMIC_ADD_32 1
  41 #define PMIX_HAVE_ATOMIC_AND_32 1
  42 #define PMIX_HAVE_ATOMIC_OR_32 1
  43 #define PMIX_HAVE_ATOMIC_XOR_32 1
  44 #define PMIX_HAVE_ATOMIC_SUB_32 1
  45 #define PMIX_HAVE_ATOMIC_SWAP_32 1
  46 #define PMIX_HAVE_ATOMIC_MATH_64 1
  47 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
  48 #define PMIX_HAVE_ATOMIC_ADD_64 1
  49 #define PMIX_HAVE_ATOMIC_AND_64 1
  50 #define PMIX_HAVE_ATOMIC_OR_64 1
  51 #define PMIX_HAVE_ATOMIC_XOR_64 1
  52 #define PMIX_HAVE_ATOMIC_SUB_64 1
  53 #define PMIX_HAVE_ATOMIC_SWAP_64 1
  54 
  55 
  56 static inline void pmix_atomic_mb(void)
  57 {
  58     __atomic_thread_fence (__ATOMIC_SEQ_CST);
  59 }
  60 
  61 static inline void pmix_atomic_rmb(void)
  62 {
  63 #if PMIX_ASSEMBLY_ARCH == PMIX_X86_64
  64     /* work around a bug in older gcc versions where ACQUIRE seems to get
  65      * treated as a no-op instead of being equivalent to
  66      * __asm__ __volatile__("": : :"memory") */
  67     __atomic_thread_fence (__ATOMIC_SEQ_CST);
  68 #else
  69     __atomic_thread_fence (__ATOMIC_ACQUIRE);
  70 #endif
  71 }
  72 
  73 static inline void pmix_atomic_wmb(void)
  74 {
  75     __atomic_thread_fence (__ATOMIC_RELEASE);
  76 }
  77 
  78 #define PMIXMB() pmix_atomic_mb()
  79 
  80 /**********************************************************************
  81  *
  82  * Atomic math operations
  83  *
  84  *********************************************************************/
  85 
  86 /*
  87  * Suppress numerous (spurious ?) warnings from Oracle Studio compilers
  88  * see https://community.oracle.com/thread/3968347
  89  */ 
  90 #if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
  91 #pragma error_messages(off, E_ARG_INCOMPATIBLE_WITH_ARG_L)
  92 #endif
  93 
  94 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
  95 {
  96     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
  97 }
  98 
  99 
 100 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
 101 {
 102     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
 103 }
 104 
 105 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
 106 {
 107     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
 108 }
 109 
 110 static inline int32_t pmix_atomic_swap_32 (pmix_atomic_int32_t *addr, int32_t newval)
 111 {
 112     int32_t oldval;
 113     __atomic_exchange (addr, &newval, &oldval, __ATOMIC_RELAXED);
 114     return oldval;
 115 }
 116 
 117 static inline int32_t pmix_atomic_fetch_add_32(pmix_atomic_int32_t *addr, int32_t delta)
 118 {
 119     return __atomic_fetch_add (addr, delta, __ATOMIC_RELAXED);
 120 }
 121 
 122 static inline int32_t pmix_atomic_fetch_and_32(pmix_atomic_int32_t *addr, int32_t value)
 123 {
 124     return __atomic_fetch_and (addr, value, __ATOMIC_RELAXED);
 125 }
 126 
 127 static inline int32_t pmix_atomic_fetch_or_32(pmix_atomic_int32_t *addr, int32_t value)
 128 {
 129     return __atomic_fetch_or (addr, value, __ATOMIC_RELAXED);
 130 }
 131 
 132 static inline int32_t pmix_atomic_fetch_xor_32(pmix_atomic_int32_t *addr, int32_t value)
 133 {
 134     return __atomic_fetch_xor (addr, value, __ATOMIC_RELAXED);
 135 }
 136 
 137 static inline int32_t pmix_atomic_fetch_sub_32(pmix_atomic_int32_t *addr, int32_t delta)
 138 {
 139     return __atomic_fetch_sub (addr, delta, __ATOMIC_RELAXED);
 140 }
 141 
 142 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
 143 {
 144     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
 145 }
 146 
 147 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
 148 {
 149     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
 150 }
 151 
 152 
 153 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
 154 {
 155     return __atomic_compare_exchange_n (addr, oldval, newval, false, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
 156 }
 157 
 158 static inline int64_t pmix_atomic_swap_64 (pmix_atomic_int64_t *addr, int64_t newval)
 159 {
 160     int64_t oldval;
 161     __atomic_exchange (addr, &newval, &oldval, __ATOMIC_RELAXED);
 162     return oldval;
 163 }
 164 
 165 static inline int64_t pmix_atomic_fetch_add_64(pmix_atomic_int64_t *addr, int64_t delta)
 166 {
 167     return __atomic_fetch_add (addr, delta, __ATOMIC_RELAXED);
 168 }
 169 
 170 static inline int64_t pmix_atomic_fetch_and_64(pmix_atomic_int64_t *addr, int64_t value)
 171 {
 172     return __atomic_fetch_and (addr, value, __ATOMIC_RELAXED);
 173 }
 174 
 175 static inline int64_t pmix_atomic_fetch_or_64(pmix_atomic_int64_t *addr, int64_t value)
 176 {
 177     return __atomic_fetch_or (addr, value, __ATOMIC_RELAXED);
 178 }
 179 
 180 static inline int64_t pmix_atomic_fetch_xor_64(pmix_atomic_int64_t *addr, int64_t value)
 181 {
 182     return __atomic_fetch_xor (addr, value, __ATOMIC_RELAXED);
 183 }
 184 
 185 static inline int64_t pmix_atomic_fetch_sub_64(pmix_atomic_int64_t *addr, int64_t delta)
 186 {
 187     return __atomic_fetch_sub (addr, delta, __ATOMIC_RELAXED);
 188 }
 189 
 190 #if PMIX_HAVE_GCC_BUILTIN_CSWAP_INT128
 191 
 192 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_128 1
 193 
 194 static inline bool pmix_atomic_compare_exchange_strong_128 (pmix_atomic_int128_t *addr,
 195                                                             pmix_int128_t *oldval, pmix_int128_t newval)
 196 {
 197     return __atomic_compare_exchange_n (addr, oldval, newval, false,
 198                                         __ATOMIC_ACQUIRE, __ATOMIC_RELAXED);
 199 }
 200 
 201 #elif defined(PMIX_HAVE_SYNC_BUILTIN_CSWAP_INT128) && PMIX_HAVE_SYNC_BUILTIN_CSWAP_INT128
 202 
 203 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_128 1
 204 
 205 /* __atomic version is not lock-free so use legacy __sync version */
 206 
 207 static inline bool pmix_atomic_compare_exchange_strong_128 (pmix_atomic_pmix_int128_t *addr,
 208                                                             pmix_int128_t *oldval, pmix_int128_t newval)
 209 {
 210     pmix_int128_t prev = __sync_val_compare_and_swap (addr, *oldval, newval);
 211     bool ret = prev == *oldval;
 212     *oldval = prev;
 213     return ret;
 214 }
 215 
 216 #endif
 217 
 218 #if defined(__HLE__)
 219 
 220 #include <immintrin.h>
 221 
 222 #define PMIX_HAVE_ATOMIC_SPINLOCKS 1
 223 
 224 static inline void pmix_atomic_lock_init (pmix_atomic_lock_t* lock, int32_t value)
 225 {
 226    lock->u.lock = value;
 227 }
 228 
 229 static inline int pmix_atomic_trylock(pmix_atomic_lock_t *lock)
 230 {
 231     int ret = __atomic_exchange_n (&lock->u.lock, PMIX_ATOMIC_LOCK_LOCKED,
 232                                    __ATOMIC_ACQUIRE | __ATOMIC_HLE_ACQUIRE);
 233     if (PMIX_ATOMIC_LOCK_LOCKED == ret) {
 234         /* abort the transaction */
 235         _mm_pause ();
 236         return 1;
 237     }
 238 
 239     return 0;
 240 }
 241 
 242 static inline void pmix_atomic_lock (pmix_atomic_lock_t *lock)
 243 {
 244     while (PMIX_ATOMIC_LOCK_LOCKED == __atomic_exchange_n (&lock->u.lock, PMIX_ATOMIC_LOCK_LOCKED,
 245                                                       __ATOMIC_ACQUIRE | __ATOMIC_HLE_ACQUIRE)) {
 246         /* abort the transaction */
 247         _mm_pause ();
 248     }
 249 }
 250 
 251 static inline void pmix_atomic_unlock (pmix_atomic_lock_t *lock)
 252 {
 253     __atomic_store_n (&lock->u.lock, PMIX_ATOMIC_LOCK_UNLOCKED,
 254                        __ATOMIC_RELEASE | __ATOMIC_HLE_RELEASE);
 255 }
 256 
 257 #endif
 258 
 259 #if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
 260 #pragma error_messages(default, E_ARG_INCOMPATIBLE_WITH_ARG_L)
 261 #endif
 262 
 263 #endif /* ! PMIX_SYS_ARCH_ATOMIC_H */

/* [<][>][^][v][top][bottom][index][help] */