1 /* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
2 /*
3 * Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
4 * University Research and Technology
5 * Corporation. All rights reserved.
6 * Copyright (c) 2004-2010 The University of Tennessee and The University
7 * of Tennessee Research Foundation. All rights
8 * reserved.
9 * Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
10 * University of Stuttgart. All rights reserved.
11 * Copyright (c) 2004-2005 The Regents of the University of California.
12 * All rights reserved.
13 * Copyright (c) 2007-2010 Oracle and/or its affiliates. All rights reserved.
14 * Copyright (c) 2015 Research Organization for Information Science
15 * and Technology (RIST). All rights reserved.
16 * Copyright (c) 2015-2018 Los Alamos National Security, LLC. All rights
17 * reserved.
18 * Copyright (c) 2018 Intel, Inc. All rights reserved.
19 * $COPYRIGHT$
20 *
21 * Additional copyrights may follow
22 *
23 * $HEADER$
24 */
25
26 #ifndef PMIX_SYS_ARCH_ATOMIC_H
27 #define PMIX_SYS_ARCH_ATOMIC_H 1
28
29 /*
30 * On ia32, we use cmpxchg.
31 */
32
33 #define SMPLOCK "lock; "
34 #define PMIXMB() __asm__ __volatile__("": : :"memory")
35
36
37 /**********************************************************************
38 *
39 * Define constants for IA32
40 *
41 *********************************************************************/
42 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
43
44 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
45
46 #define PMIX_HAVE_ATOMIC_MATH_32 1
47 #define PMIX_HAVE_ATOMIC_ADD_32 1
48 #define PMIX_HAVE_ATOMIC_SUB_32 1
49
50 /**********************************************************************
51 *
52 * Memory Barriers
53 *
54 *********************************************************************/
55 #if PMIX_GCC_INLINE_ASSEMBLY
56
57 static inline void pmix_atomic_mb(void)
58 {
59 PMIXMB();
60 }
61
62
63 static inline void pmix_atomic_rmb(void)
64 {
65 PMIXMB();
66 }
67
68
69 static inline void pmix_atomic_wmb(void)
70 {
71 PMIXMB();
72 }
73
74 static inline void pmix_atomic_isync(void)
75 {
76 }
77
78 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
79
80
81 /**********************************************************************
82 *
83 * Atomic math operations
84 *
85 *********************************************************************/
86 #if PMIX_GCC_INLINE_ASSEMBLY
87
88 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
89 {
90 unsigned char ret;
91 __asm__ __volatile__ (
92 SMPLOCK "cmpxchgl %3,%2 \n\t"
93 "sete %0 \n\t"
94 : "=qm" (ret), "+a" (*oldval), "+m" (*addr)
95 : "q"(newval)
96 : "memory", "cc");
97
98 return (bool) ret;
99 }
100
101 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
102
103 #define pmix_atomic_compare_exchange_strong_acq_32 pmix_atomic_compare_exchange_strong_32
104 #define pmix_atomic_compare_exchange_strong_rel_32 pmix_atomic_compare_exchange_strong_32
105
106 #if PMIX_GCC_INLINE_ASSEMBLY
107
108 #define PMIX_HAVE_ATOMIC_SWAP_32 1
109
110 static inline int32_t pmix_atomic_swap_32( pmix_atomic_int32_t *addr,
111 int32_t newval)
112 {
113 int32_t oldval;
114
115 __asm__ __volatile__("xchg %1, %0" :
116 "=r" (oldval), "=m" (*addr) :
117 "0" (newval), "m" (*addr) :
118 "memory");
119 return oldval;
120 }
121
122 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
123
124
125 #if PMIX_GCC_INLINE_ASSEMBLY
126
127 /**
128 * atomic_add - add integer to atomic variable
129 * @i: integer value to add
130 * @v: pointer of type int
131 *
132 * Atomically adds @i to @v.
133 */
134 static inline int32_t pmix_atomic_fetch_add_32(pmix_atomic_int32_t* v, int i)
135 {
136 int ret = i;
137 __asm__ __volatile__(
138 SMPLOCK "xaddl %1,%0"
139 :"+m" (*v), "+r" (ret)
140 :
141 :"memory", "cc"
142 );
143 return ret;
144 }
145
146
147 /**
148 * atomic_sub - subtract the atomic variable
149 * @i: integer value to subtract
150 * @v: pointer of type int
151 *
152 * Atomically subtracts @i from @v.
153 */
154 static inline int32_t pmix_atomic_fetch_sub_32(pmix_atomic_int32_t* v, int i)
155 {
156 int ret = -i;
157 __asm__ __volatile__(
158 SMPLOCK "xaddl %1,%0"
159 :"+m" (*v), "+r" (ret)
160 :
161 :"memory", "cc"
162 );
163 return ret;
164 }
165
166 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
167
168 #endif /* ! PMIX_SYS_ARCH_ATOMIC_H */