This source file includes following definitions.
- pmix_atomic_mb
- pmix_atomic_rmb
- pmix_atomic_wmb
- pmix_atomic_isync
- pmix_atomic_compare_exchange_strong_32
- pmix_atomic_compare_exchange_strong_acq_32
- pmix_atomic_compare_exchange_strong_rel_32
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_compare_exchange_strong_64
- pmix_atomic_compare_exchange_strong_acq_64
- pmix_atomic_compare_exchange_strong_rel_64
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26 #ifndef PMIX_SYS_ARCH_ATOMIC_H
27 #define PMIX_SYS_ARCH_ATOMIC_H 1
28
29
30
31
32
33 #define ASI_P "0x80"
34
35 #define MEPMIXMBAR(type) __asm__ __volatile__ ("membar " type : : : "memory")
36
37
38
39
40
41
42
43 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
44
45 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
46
47 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
48
49
50
51
52
53
54
55 #if PMIX_GCC_INLINE_ASSEMBLY
56
57 static inline void pmix_atomic_mb(void)
58 {
59 MEPMIXMBAR("#LoadLoad | #LoadStore | #StoreStore | #StoreLoad");
60 }
61
62
63 static inline void pmix_atomic_rmb(void)
64 {
65 MEPMIXMBAR("#LoadLoad");
66 }
67
68
69 static inline void pmix_atomic_wmb(void)
70 {
71 MEPMIXMBAR("#StoreStore");
72 }
73
74 static inline void pmix_atomic_isync(void)
75 {
76 }
77
78
79 #endif
80
81
82
83
84
85
86
87 #if PMIX_GCC_INLINE_ASSEMBLY
88
89 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
90 {
91
92
93
94
95
96
97
98
99 int32_t prev = newval;
100 bool ret;
101
102 __asm__ __volatile__("casa [%1] " ASI_P ", %2, %0"
103 : "+r" (prev)
104 : "r" (addr), "r" (*oldval));
105 ret = (prev == *oldval);
106 *oldval = prev;
107 return ret;
108 }
109
110
111 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
112 {
113 bool rc;
114
115 rc = pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
116 pmix_atomic_rmb();
117
118 return rc;
119 }
120
121
122 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
123 {
124 pmix_atomic_wmb();
125 return pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
126 }
127
128
129 #if PMIX_ASSEMBLY_ARCH == PMIX_SPARCV9_64
130
131 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
132 {
133
134
135
136
137
138
139
140 int64_t prev = newval;
141 bool ret;
142
143 __asm__ __volatile__("casxa [%1] " ASI_P ", %2, %0"
144 : "+r" (prev)
145 : "r" (addr), "r" (*oldval));
146 ret = (prev == *oldval);
147 *oldval = prev;
148 return ret;
149 }
150
151 #else
152
153 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
154 {
155
156
157
158
159
160
161
162
163 int64_t prev = newval;
164 bool ret;
165
166 __asm__ __volatile__(
167 "ldx %0, %%g1 \n\t"
168 "ldx %2, %%g2 \n\t"
169 "casxa [%1] " ASI_P ", %%g2, %%g1 \n\t"
170 "stx %%g1, %0 \n"
171 : "+m"(prev)
172 : "r"(addr), "m"(*oldval)
173 : "%g1", "%g2"
174 );
175
176 ret = (prev == *oldval);
177 *oldval = prev;
178 return ret;
179 }
180
181 #endif
182
183 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
184 {
185 bool rc;
186
187 rc = pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
188 pmix_atomic_rmb();
189
190 return rc;
191 }
192
193
194 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
195 {
196 pmix_atomic_wmb();
197 return pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
198 }
199
200 #endif
201
202
203 #endif