root/opal/mca/pmix/pmix4x/pmix/src/atomics/sys/x86_64/atomic.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. pmix_atomic_mb
  2. pmix_atomic_rmb
  3. pmix_atomic_wmb
  4. pmix_atomic_isync
  5. pmix_atomic_compare_exchange_strong_32
  6. pmix_atomic_compare_exchange_strong_64
  7. pmix_atomic_compare_exchange_strong_128
  8. pmix_atomic_swap_32
  9. pmix_atomic_swap_64
  10. pmix_atomic_fetch_add_32
  11. pmix_atomic_fetch_add_64
  12. pmix_atomic_fetch_sub_32
  13. pmix_atomic_fetch_sub_64

   1 /* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
   2 /*
   3  * Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
   4  *                         University Research and Technology
   5  *                         Corporation.  All rights reserved.
   6  * Copyright (c) 2004-2010 The University of Tennessee and The University
   7  *                         of Tennessee Research Foundation.  All rights
   8  *                         reserved.
   9  * Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
  10  *                         University of Stuttgart.  All rights reserved.
  11  * Copyright (c) 2004-2005 The Regents of the University of California.
  12  *                         All rights reserved.
  13  * Copyright (c) 2007      Sun Microsystems, Inc.  All rights reserverd.
  14  * Copyright (c) 2012-2018 Los Alamos National Security, LLC. All rights
  15  *                         reserved.
  16  * Copyright (c) 2016-2017 Research Organization for Information Science
  17  *                         and Technology (RIST). All rights reserved.
  18  * Copyright (c) 2018      Intel, Inc.  All rights reserved.
  19  * $COPYRIGHT$
  20  *
  21  * Additional copyrights may follow
  22  *
  23  * $HEADER$
  24  */
  25 #ifndef PMIX_SYS_ARCH_ATOMIC_H
  26 #define PMIX_SYS_ARCH_ATOMIC_H 1
  27 
  28 /*
  29  * On x86_64, we use cmpxchg.
  30  */
  31 
  32 
  33 #define SMPLOCK "lock; "
  34 #define PMIXMB() __asm__ __volatile__("": : :"memory")
  35 
  36 
  37 /**********************************************************************
  38  *
  39  * Define constants for AMD64 / x86_64 / EM64T / ...
  40  *
  41  *********************************************************************/
  42 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
  43 
  44 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
  45 
  46 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
  47 
  48 /**********************************************************************
  49  *
  50  * Memory Barriers
  51  *
  52  *********************************************************************/
  53 #if PMIX_GCC_INLINE_ASSEMBLY
  54 
  55 static inline void pmix_atomic_mb(void)
  56 {
  57     PMIXMB();
  58 }
  59 
  60 
  61 static inline void pmix_atomic_rmb(void)
  62 {
  63     PMIXMB();
  64 }
  65 
  66 
  67 static inline void pmix_atomic_wmb(void)
  68 {
  69     PMIXMB();
  70 }
  71 
  72 static inline void pmix_atomic_isync(void)
  73 {
  74 }
  75 
  76 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
  77 
  78 
  79 /**********************************************************************
  80  *
  81  * Atomic math operations
  82  *
  83  *********************************************************************/
  84 #if PMIX_GCC_INLINE_ASSEMBLY
  85 
  86 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
  87 {
  88    unsigned char ret;
  89    __asm__ __volatile__ (
  90                        SMPLOCK "cmpxchgl %3,%2   \n\t"
  91                                "sete     %0      \n\t"
  92                        : "=qm" (ret), "+a" (*oldval), "+m" (*addr)
  93                        : "q"(newval)
  94                        : "memory", "cc");
  95 
  96    return (bool) ret;
  97 }
  98 
  99 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 100 
 101 #define pmix_atomic_compare_exchange_strong_acq_32 pmix_atomic_compare_exchange_strong_32
 102 #define pmix_atomic_compare_exchange_strong_rel_32 pmix_atomic_compare_exchange_strong_32
 103 
 104 #if PMIX_GCC_INLINE_ASSEMBLY
 105 
 106 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
 107 {
 108    unsigned char ret;
 109    __asm__ __volatile__ (
 110                        SMPLOCK "cmpxchgq %3,%2   \n\t"
 111                                "sete     %0      \n\t"
 112                        : "=qm" (ret), "+a" (*oldval), "+m" (*((pmix_atomic_long_t *)addr))
 113                        : "q"(newval)
 114                        : "memory", "cc"
 115                        );
 116 
 117    return (bool) ret;
 118 }
 119 
 120 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 121 
 122 #define pmix_atomic_compare_exchange_strong_acq_64 pmix_atomic_compare_exchange_strong_64
 123 #define pmix_atomic_compare_exchange_strong_rel_64 pmix_atomic_compare_exchange_strong_64
 124 
 125 #if PMIX_GCC_INLINE_ASSEMBLY && PMIX_HAVE_CMPXCHG16B && HAVE_PMIX_INT128_T
 126 
 127 static inline bool pmix_atomic_compare_exchange_strong_128 (pmix_atomic_int128_t *addr, pmix_int128_t *oldval, pmix_int128_t newval)
 128 {
 129     unsigned char ret;
 130 
 131     /* cmpxchg16b compares the value at the address with eax:edx (low:high). if the values are
 132      * the same the contents of ebx:ecx are stores at the address. in all cases the value stored
 133      * at the address is returned in eax:edx. */
 134     __asm__ __volatile__ (SMPLOCK "cmpxchg16b (%%rsi)   \n\t"
 135                                   "sete     %0      \n\t"
 136                           : "=qm" (ret), "+a" (((int64_t *)oldval)[0]), "+d" (((int64_t *)oldval)[1])
 137                           : "S" (addr), "b" (((int64_t *)&newval)[0]), "c" (((int64_t *)&newval)[1])
 138                           : "memory", "cc");
 139 
 140     return (bool) ret;
 141 }
 142 
 143 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_128 1
 144 
 145 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 146 
 147 
 148 #if PMIX_GCC_INLINE_ASSEMBLY
 149 
 150 #define PMIX_HAVE_ATOMIC_SWAP_32 1
 151 
 152 #define PMIX_HAVE_ATOMIC_SWAP_64 1
 153 
 154 static inline int32_t pmix_atomic_swap_32( pmix_atomic_int32_t *addr,
 155                                            int32_t newval)
 156 {
 157     int32_t oldval;
 158 
 159     __asm__ __volatile__("xchg %1, %0" :
 160                          "=r" (oldval), "+m" (*addr) :
 161                          "0" (newval) :
 162                          "memory");
 163     return oldval;
 164 }
 165 
 166 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 167 
 168 #if PMIX_GCC_INLINE_ASSEMBLY
 169 
 170 static inline int64_t pmix_atomic_swap_64( pmix_atomic_int64_t *addr,
 171                                            int64_t newval)
 172 {
 173     int64_t oldval;
 174 
 175     __asm__ __volatile__("xchgq %1, %0" :
 176                          "=r" (oldval), "+m" (*addr) :
 177                          "0" (newval) :
 178                          "memory");
 179     return oldval;
 180 }
 181 
 182 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 183 
 184 
 185 
 186 #if PMIX_GCC_INLINE_ASSEMBLY
 187 
 188 #define PMIX_HAVE_ATOMIC_MATH_32 1
 189 #define PMIX_HAVE_ATOMIC_MATH_64 1
 190 
 191 #define PMIX_HAVE_ATOMIC_ADD_32 1
 192 
 193 /**
 194  * atomic_add - add integer to atomic variable
 195  * @i: integer value to add
 196  * @v: pointer of type int
 197  *
 198  * Atomically adds @i to @v.
 199  */
 200 static inline int32_t pmix_atomic_fetch_add_32(pmix_atomic_int32_t* v, int i)
 201 {
 202     int ret = i;
 203    __asm__ __volatile__(
 204                         SMPLOCK "xaddl %1,%0"
 205                         :"+m" (*v), "+r" (ret)
 206                         :
 207                         :"memory", "cc"
 208                         );
 209    return ret;
 210 }
 211 
 212 #define PMIX_HAVE_ATOMIC_ADD_64 1
 213 
 214 /**
 215  * atomic_add - add integer to atomic variable
 216  * @i: integer value to add
 217  * @v: pointer of type int
 218  *
 219  * Atomically adds @i to @v.
 220  */
 221 static inline int64_t pmix_atomic_fetch_add_64(pmix_atomic_int64_t* v, int64_t i)
 222 {
 223     int64_t ret = i;
 224    __asm__ __volatile__(
 225                         SMPLOCK "xaddq %1,%0"
 226                         :"+m" (*v), "+r" (ret)
 227                         :
 228                         :"memory", "cc"
 229                         );
 230    return ret;
 231 }
 232 
 233 #define PMIX_HAVE_ATOMIC_SUB_32 1
 234 
 235 /**
 236  * atomic_sub - subtract the atomic variable
 237  * @i: integer value to subtract
 238  * @v: pointer of type int
 239  *
 240  * Atomically subtracts @i from @v.
 241  */
 242 static inline int32_t pmix_atomic_fetch_sub_32(pmix_atomic_int32_t* v, int i)
 243 {
 244     int ret = -i;
 245    __asm__ __volatile__(
 246                         SMPLOCK "xaddl %1,%0"
 247                         :"+m" (*v), "+r" (ret)
 248                         :
 249                         :"memory", "cc"
 250                         );
 251    return ret;
 252 }
 253 
 254 #define PMIX_HAVE_ATOMIC_SUB_64 1
 255 
 256 /**
 257  * atomic_sub - subtract the atomic variable
 258  * @i: integer value to subtract
 259  * @v: pointer of type int
 260  *
 261  * Atomically subtracts @i from @v.
 262  */
 263 static inline int64_t pmix_atomic_fetch_sub_64(pmix_atomic_int64_t* v, int64_t i)
 264 {
 265     int64_t ret = -i;
 266    __asm__ __volatile__(
 267                         SMPLOCK "xaddq %1,%0"
 268                         :"+m" (*v), "+r" (ret)
 269                         :
 270                         :"memory", "cc"
 271                         );
 272    return ret;
 273 }
 274 
 275 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
 276 
 277 #endif /* ! PMIX_SYS_ARCH_ATOMIC_H */

/* [<][>][^][v][top][bottom][index][help] */