1 /* Atomic operations. PowerPC Common version.
2 Copyright (C) 2003-2022 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <https://www.gnu.org/licenses/>. */
20 * Never include sysdeps/powerpc/atomic-machine.h directly.
21 * Alway use include/atomic.h which will include either
22 * sysdeps/powerpc/powerpc32/atomic-machine.h
24 * sysdeps/powerpc/powerpc64/atomic-machine.h
25 * as appropriate and which in turn include this file.
29 * Powerpc does not have byte and halfword forms of load and reserve and
30 * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
32 #define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
35 #define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
38 #define __ARCH_ACQ_INSTR "isync"
39 #ifndef __ARCH_REL_INSTR
40 # define __ARCH_REL_INSTR "sync"
43 #ifndef MUTEX_HINT_ACQ
44 # define MUTEX_HINT_ACQ
46 #ifndef MUTEX_HINT_REL
47 # define MUTEX_HINT_REL
50 #define atomic_full_barrier() __asm ("sync" ::: "memory")
52 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
54 __typeof (*(mem)) __tmp; \
55 __typeof (mem) __memp = (mem); \
57 "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
62 "2: " __ARCH_ACQ_INSTR \
64 : "b" (__memp), "r" (oldval), "r" (newval) \
69 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval) \
71 __typeof (*(mem)) __tmp; \
72 __typeof (mem) __memp = (mem); \
73 __asm __volatile (__ARCH_REL_INSTR "\n" \
74 "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
81 : "b" (__memp), "r" (oldval), "r" (newval) \
86 #define __arch_atomic_exchange_32_acq(mem, value) \
88 __typeof (*mem) __val; \
90 "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
93 " " __ARCH_ACQ_INSTR \
94 : "=&r" (__val), "=m" (*mem) \
95 : "b" (mem), "r" (value), "m" (*mem) \
100 #define __arch_atomic_exchange_32_rel(mem, value) \
102 __typeof (*mem) __val; \
103 __asm __volatile (__ARCH_REL_INSTR "\n" \
104 "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
105 " stwcx. %3,0,%2\n" \
107 : "=&r" (__val), "=m" (*mem) \
108 : "b" (mem), "r" (value), "m" (*mem) \
109 : "cr0", "memory"); \
113 #define __arch_atomic_exchange_and_add_32(mem, value) \
115 __typeof (*mem) __val, __tmp; \
116 __asm __volatile ("1: lwarx %0,0,%3\n" \
118 " stwcx. %1,0,%3\n" \
120 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
121 : "b" (mem), "r" (value), "m" (*mem) \
122 : "cr0", "memory"); \
126 #define __arch_atomic_exchange_and_add_32_acq(mem, value) \
128 __typeof (*mem) __val, __tmp; \
129 __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
131 " stwcx. %1,0,%3\n" \
134 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
135 : "b" (mem), "r" (value), "m" (*mem) \
136 : "cr0", "memory"); \
140 #define __arch_atomic_exchange_and_add_32_rel(mem, value) \
142 __typeof (*mem) __val, __tmp; \
143 __asm __volatile (__ARCH_REL_INSTR "\n" \
144 "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
146 " stwcx. %1,0,%3\n" \
148 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
149 : "b" (mem), "r" (value), "m" (*mem) \
150 : "cr0", "memory"); \
154 #define __arch_atomic_increment_val_32(mem) \
156 __typeof (*(mem)) __val; \
157 __asm __volatile ("1: lwarx %0,0,%2\n" \
159 " stwcx. %0,0,%2\n" \
161 : "=&b" (__val), "=m" (*mem) \
162 : "b" (mem), "m" (*mem) \
163 : "cr0", "memory"); \
167 #define __arch_atomic_decrement_val_32(mem) \
169 __typeof (*(mem)) __val; \
170 __asm __volatile ("1: lwarx %0,0,%2\n" \
172 " stwcx. %0,0,%2\n" \
174 : "=&b" (__val), "=m" (*mem) \
175 : "b" (mem), "m" (*mem) \
176 : "cr0", "memory"); \
180 #define __arch_atomic_decrement_if_positive_32(mem) \
181 ({ int __val, __tmp; \
182 __asm __volatile ("1: lwarx %0,0,%3\n" \
186 " stwcx. %1,0,%3\n" \
188 "2: " __ARCH_ACQ_INSTR \
189 : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
190 : "b" (mem), "m" (*mem) \
191 : "cr0", "memory"); \
195 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
197 __typeof (*(mem)) __result; \
198 if (sizeof (*mem) == 4) \
199 __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
200 else if (sizeof (*mem) == 8) \
201 __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
207 #define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
209 __typeof (*(mem)) __result; \
210 if (sizeof (*mem) == 4) \
211 __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
212 else if (sizeof (*mem) == 8) \
213 __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
219 #define atomic_exchange_acq(mem, value) \
221 __typeof (*(mem)) __result; \
222 if (sizeof (*mem) == 4) \
223 __result = __arch_atomic_exchange_32_acq (mem, value); \
224 else if (sizeof (*mem) == 8) \
225 __result = __arch_atomic_exchange_64_acq (mem, value); \
231 #define atomic_exchange_rel(mem, value) \
233 __typeof (*(mem)) __result; \
234 if (sizeof (*mem) == 4) \
235 __result = __arch_atomic_exchange_32_rel (mem, value); \
236 else if (sizeof (*mem) == 8) \
237 __result = __arch_atomic_exchange_64_rel (mem, value); \
243 #define atomic_exchange_and_add(mem, value) \
245 __typeof (*(mem)) __result; \
246 if (sizeof (*mem) == 4) \
247 __result = __arch_atomic_exchange_and_add_32 (mem, value); \
248 else if (sizeof (*mem) == 8) \
249 __result = __arch_atomic_exchange_and_add_64 (mem, value); \
254 #define atomic_exchange_and_add_acq(mem, value) \
256 __typeof (*(mem)) __result; \
257 if (sizeof (*mem) == 4) \
258 __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
259 else if (sizeof (*mem) == 8) \
260 __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
265 #define atomic_exchange_and_add_rel(mem, value) \
267 __typeof (*(mem)) __result; \
268 if (sizeof (*mem) == 4) \
269 __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
270 else if (sizeof (*mem) == 8) \
271 __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
277 #define atomic_increment_val(mem) \
279 __typeof (*(mem)) __result; \
280 if (sizeof (*(mem)) == 4) \
281 __result = __arch_atomic_increment_val_32 (mem); \
282 else if (sizeof (*(mem)) == 8) \
283 __result = __arch_atomic_increment_val_64 (mem); \
289 #define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
291 #define atomic_decrement_val(mem) \
293 __typeof (*(mem)) __result; \
294 if (sizeof (*(mem)) == 4) \
295 __result = __arch_atomic_decrement_val_32 (mem); \
296 else if (sizeof (*(mem)) == 8) \
297 __result = __arch_atomic_decrement_val_64 (mem); \
303 #define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
306 /* Decrement *MEM if it is > 0, and return the old value. */
307 #define atomic_decrement_if_positive(mem) \
308 ({ __typeof (*(mem)) __result; \
309 if (sizeof (*mem) == 4) \
310 __result = __arch_atomic_decrement_if_positive_32 (mem); \
311 else if (sizeof (*mem) == 8) \
312 __result = __arch_atomic_decrement_if_positive_64 (mem); \