This source file includes following definitions.
- pmix_atomic_mb
- pmix_atomic_rmb
- pmix_atomic_wmb
- pmix_atomic_isync
- pmix_atomic_compare_exchange_strong_32
- pmix_atomic_compare_exchange_strong_acq_32
- pmix_atomic_compare_exchange_strong_rel_32
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_compare_exchange_strong_acq_64
- pmix_atomic_compare_exchange_strong_rel_64
- pmix_atomic_fetch_add_32
- pmix_atomic_fetch_sub_32
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35 #ifndef PMIX_SYS_ARCH_ATOMIC_H
36 #define PMIX_SYS_ARCH_ATOMIC_H 1
37
38 #if (PMIX_ASM_ARM_VERSION >= 7)
39
40 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
41
42
43 #define PMIXMB() __asm__ __volatile__ ("dmb" : : : "memory")
44 #define PMIXRMB() __asm__ __volatile__ ("dmb" : : : "memory")
45 #define PMIXWMB() __asm__ __volatile__ ("dmb" : : : "memory")
46
47 #elif (PMIX_ASM_ARM_VERSION == 6)
48
49 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
50
51
52 #define PMIXMB() __asm__ __volatile__ ("mcr p15, 0, r0, c7, c10, 5" : : : "memory")
53 #define PMIXRMB() PMIXMB()
54 #define PMIXWMB() PMIXMB()
55
56 #else
57
58 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
59
60
61 #define PMIXMB() (*((void (*)(void))(0xffff0fa0)))()
62 #define PMIXRMB() PMIXMB()
63 #define PMIXWMB() PMIXMB()
64
65 #endif
66
67
68
69
70
71
72
73 #if (PMIX_HAVE_ATOMIC_MEM_BARRIER == 1)
74
75 static inline
76 void pmix_atomic_mb(void)
77 {
78 PMIXMB();
79 }
80
81
82 static inline
83 void pmix_atomic_rmb(void)
84 {
85 PMIXRMB();
86 }
87
88
89 static inline
90 void pmix_atomic_wmb(void)
91 {
92 PMIXWMB();
93 }
94
95 static inline
96 void pmix_atomic_isync(void)
97 {
98 }
99
100 #endif
101
102
103
104
105
106
107
108
109 #if (PMIX_GCC_INLINE_ASSEMBLY && (PMIX_ASM_ARM_VERSION >= 6))
110
111 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
112 #define PMIX_HAVE_ATOMIC_MATH_32 1
113 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
114 {
115 int32_t prev, tmp;
116 bool ret;
117
118 __asm__ __volatile__ (
119 "1: ldrex %0, [%2] \n"
120 " cmp %0, %3 \n"
121 " bne 2f \n"
122 " strex %1, %4, [%2] \n"
123 " cmp %1, #0 \n"
124 " bne 1b \n"
125 "2: \n"
126
127 : "=&r" (prev), "=&r" (tmp)
128 : "r" (addr), "r" (*oldval), "r" (newval)
129 : "cc", "memory");
130
131 ret = (prev == *oldval);
132 *oldval = prev;
133 return ret;
134 }
135
136
137
138
139
140
141 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
142 {
143 bool rc;
144
145 rc = pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
146 pmix_atomic_rmb();
147
148 return rc;
149 }
150
151
152 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
153 {
154 pmix_atomic_wmb();
155 return pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
156 }
157
158 #if (PMIX_ASM_SUPPORT_64BIT == 1)
159
160 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
161 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
162 {
163 int64_t prev;
164 int tmp;
165 bool ret;
166
167 __asm__ __volatile__ (
168 "1: ldrexd %0, %H0, [%2] \n"
169 " cmp %0, %3 \n"
170 " it eq \n"
171 " cmpeq %H0, %H3 \n"
172 " bne 2f \n"
173 " strexd %1, %4, %H4, [%2] \n"
174 " cmp %1, #0 \n"
175 " bne 1b \n"
176 "2: \n"
177
178 : "=&r" (prev), "=&r" (tmp)
179 : "r" (addr), "r" (*oldval), "r" (newval)
180 : "cc", "memory");
181
182 ret = (prev == *oldval);
183 *oldval = prev;
184 return ret;
185 }
186
187
188
189
190
191
192 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
193 {
194 bool rc;
195
196 rc = pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
197 pmix_atomic_rmb();
198
199 return rc;
200 }
201
202
203 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
204 {
205 pmix_atomic_wmb();
206 return pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
207 }
208
209 #endif
210
211
212 #define PMIX_HAVE_ATOMIC_ADD_32 1
213 static inline int32_t pmix_atomic_fetch_add_32(pmix_atomic_int32_t* v, int inc)
214 {
215 int32_t t, old;
216 int tmp;
217
218 __asm__ __volatile__(
219 "1: ldrex %1, [%3] \n"
220 " add %0, %1, %4 \n"
221 " strex %2, %0, [%3] \n"
222 " cmp %2, #0 \n"
223 " bne 1b \n"
224
225 : "=&r" (t), "=&r" (old), "=&r" (tmp)
226 : "r" (v), "r" (inc)
227 : "cc", "memory");
228
229
230 return old;
231 }
232
233 #define PMIX_HAVE_ATOMIC_SUB_32 1
234 static inline int32_t pmix_atomic_fetch_sub_32(pmix_atomic_int32_t* v, int dec)
235 {
236 int32_t t, old;
237 int tmp;
238
239 __asm__ __volatile__(
240 "1: ldrex %1, [%3] \n"
241 " sub %0, %1, %4 \n"
242 " strex %2, %0, [%3] \n"
243 " cmp %2, #0 \n"
244 " bne 1b \n"
245
246 : "=&r" (t), "=&r" (old), "=&r" (tmp)
247 : "r" (v), "r" (dec)
248 : "cc", "memory");
249
250 return t;
251 }
252
253 #endif
254
255 #endif