This source file includes following definitions.
- pmix_atomic_mb
- pmix_atomic_rmb
- pmix_atomic_wmb
- pmix_atomic_isync
- pmix_atomic_compare_exchange_strong_32
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_compare_exchange_strong_128
- pmix_atomic_swap_32
- pmix_atomic_swap_64
- pmix_atomic_fetch_add_32
- pmix_atomic_fetch_add_64
- pmix_atomic_fetch_sub_32
- pmix_atomic_fetch_sub_64
   1 
   2 
   3 
   4 
   5 
   6 
   7 
   8 
   9 
  10 
  11 
  12 
  13 
  14 
  15 
  16 
  17 
  18 
  19 
  20 
  21 
  22 
  23 
  24 
  25 #ifndef PMIX_SYS_ARCH_ATOMIC_H
  26 #define PMIX_SYS_ARCH_ATOMIC_H 1
  27 
  28 
  29 
  30 
  31 
  32 
  33 #define SMPLOCK "lock; "
  34 #define PMIXMB() __asm__ __volatile__("": : :"memory")
  35 
  36 
  37 
  38 
  39 
  40 
  41 
  42 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
  43 
  44 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
  45 
  46 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
  47 
  48 
  49 
  50 
  51 
  52 
  53 #if PMIX_GCC_INLINE_ASSEMBLY
  54 
  55 static inline void pmix_atomic_mb(void)
  56 {
  57     PMIXMB();
  58 }
  59 
  60 
  61 static inline void pmix_atomic_rmb(void)
  62 {
  63     PMIXMB();
  64 }
  65 
  66 
  67 static inline void pmix_atomic_wmb(void)
  68 {
  69     PMIXMB();
  70 }
  71 
  72 static inline void pmix_atomic_isync(void)
  73 {
  74 }
  75 
  76 #endif 
  77 
  78 
  79 
  80 
  81 
  82 
  83 
  84 #if PMIX_GCC_INLINE_ASSEMBLY
  85 
  86 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
  87 {
  88    unsigned char ret;
  89    __asm__ __volatile__ (
  90                        SMPLOCK "cmpxchgl %3,%2   \n\t"
  91                                "sete     %0      \n\t"
  92                        : "=qm" (ret), "+a" (*oldval), "+m" (*addr)
  93                        : "q"(newval)
  94                        : "memory", "cc");
  95 
  96    return (bool) ret;
  97 }
  98 
  99 #endif 
 100 
 101 #define pmix_atomic_compare_exchange_strong_acq_32 pmix_atomic_compare_exchange_strong_32
 102 #define pmix_atomic_compare_exchange_strong_rel_32 pmix_atomic_compare_exchange_strong_32
 103 
 104 #if PMIX_GCC_INLINE_ASSEMBLY
 105 
 106 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
 107 {
 108    unsigned char ret;
 109    __asm__ __volatile__ (
 110                        SMPLOCK "cmpxchgq %3,%2   \n\t"
 111                                "sete     %0      \n\t"
 112                        : "=qm" (ret), "+a" (*oldval), "+m" (*((pmix_atomic_long_t *)addr))
 113                        : "q"(newval)
 114                        : "memory", "cc"
 115                        );
 116 
 117    return (bool) ret;
 118 }
 119 
 120 #endif 
 121 
 122 #define pmix_atomic_compare_exchange_strong_acq_64 pmix_atomic_compare_exchange_strong_64
 123 #define pmix_atomic_compare_exchange_strong_rel_64 pmix_atomic_compare_exchange_strong_64
 124 
 125 #if PMIX_GCC_INLINE_ASSEMBLY && PMIX_HAVE_CMPXCHG16B && HAVE_PMIX_INT128_T
 126 
 127 static inline bool pmix_atomic_compare_exchange_strong_128 (pmix_atomic_int128_t *addr, pmix_int128_t *oldval, pmix_int128_t newval)
 128 {
 129     unsigned char ret;
 130 
 131     
 132 
 133 
 134     __asm__ __volatile__ (SMPLOCK "cmpxchg16b (%%rsi)   \n\t"
 135                                   "sete     %0      \n\t"
 136                           : "=qm" (ret), "+a" (((int64_t *)oldval)[0]), "+d" (((int64_t *)oldval)[1])
 137                           : "S" (addr), "b" (((int64_t *)&newval)[0]), "c" (((int64_t *)&newval)[1])
 138                           : "memory", "cc");
 139 
 140     return (bool) ret;
 141 }
 142 
 143 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_128 1
 144 
 145 #endif 
 146 
 147 
 148 #if PMIX_GCC_INLINE_ASSEMBLY
 149 
 150 #define PMIX_HAVE_ATOMIC_SWAP_32 1
 151 
 152 #define PMIX_HAVE_ATOMIC_SWAP_64 1
 153 
 154 static inline int32_t pmix_atomic_swap_32( pmix_atomic_int32_t *addr,
 155                                            int32_t newval)
 156 {
 157     int32_t oldval;
 158 
 159     __asm__ __volatile__("xchg %1, %0" :
 160                          "=r" (oldval), "+m" (*addr) :
 161                          "0" (newval) :
 162                          "memory");
 163     return oldval;
 164 }
 165 
 166 #endif 
 167 
 168 #if PMIX_GCC_INLINE_ASSEMBLY
 169 
 170 static inline int64_t pmix_atomic_swap_64( pmix_atomic_int64_t *addr,
 171                                            int64_t newval)
 172 {
 173     int64_t oldval;
 174 
 175     __asm__ __volatile__("xchgq %1, %0" :
 176                          "=r" (oldval), "+m" (*addr) :
 177                          "0" (newval) :
 178                          "memory");
 179     return oldval;
 180 }
 181 
 182 #endif 
 183 
 184 
 185 
 186 #if PMIX_GCC_INLINE_ASSEMBLY
 187 
 188 #define PMIX_HAVE_ATOMIC_MATH_32 1
 189 #define PMIX_HAVE_ATOMIC_MATH_64 1
 190 
 191 #define PMIX_HAVE_ATOMIC_ADD_32 1
 192 
 193 
 194 
 195 
 196 
 197 
 198 
 199 
 200 static inline int32_t pmix_atomic_fetch_add_32(pmix_atomic_int32_t* v, int i)
 201 {
 202     int ret = i;
 203    __asm__ __volatile__(
 204                         SMPLOCK "xaddl %1,%0"
 205                         :"+m" (*v), "+r" (ret)
 206                         :
 207                         :"memory", "cc"
 208                         );
 209    return ret;
 210 }
 211 
 212 #define PMIX_HAVE_ATOMIC_ADD_64 1
 213 
 214 
 215 
 216 
 217 
 218 
 219 
 220 
 221 static inline int64_t pmix_atomic_fetch_add_64(pmix_atomic_int64_t* v, int64_t i)
 222 {
 223     int64_t ret = i;
 224    __asm__ __volatile__(
 225                         SMPLOCK "xaddq %1,%0"
 226                         :"+m" (*v), "+r" (ret)
 227                         :
 228                         :"memory", "cc"
 229                         );
 230    return ret;
 231 }
 232 
 233 #define PMIX_HAVE_ATOMIC_SUB_32 1
 234 
 235 
 236 
 237 
 238 
 239 
 240 
 241 
 242 static inline int32_t pmix_atomic_fetch_sub_32(pmix_atomic_int32_t* v, int i)
 243 {
 244     int ret = -i;
 245    __asm__ __volatile__(
 246                         SMPLOCK "xaddl %1,%0"
 247                         :"+m" (*v), "+r" (ret)
 248                         :
 249                         :"memory", "cc"
 250                         );
 251    return ret;
 252 }
 253 
 254 #define PMIX_HAVE_ATOMIC_SUB_64 1
 255 
 256 
 257 
 258 
 259 
 260 
 261 
 262 
 263 static inline int64_t pmix_atomic_fetch_sub_64(pmix_atomic_int64_t* v, int64_t i)
 264 {
 265     int64_t ret = -i;
 266    __asm__ __volatile__(
 267                         SMPLOCK "xaddq %1,%0"
 268                         :"+m" (*v), "+r" (ret)
 269                         :
 270                         :"memory", "cc"
 271                         );
 272    return ret;
 273 }
 274 
 275 #endif 
 276 
 277 #endif