1 /* Atomic operations.  PowerPC Common version.
2    Copyright (C) 2003-2022 Free Software Foundation, Inc.
3    This file is part of the GNU C Library.
4 
5    The GNU C Library is free software; you can redistribute it and/or
6    modify it under the terms of the GNU Lesser General Public
7    License as published by the Free Software Foundation; either
8    version 2.1 of the License, or (at your option) any later version.
9 
10    The GNU C Library is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13    Lesser General Public License for more details.
14 
15    You should have received a copy of the GNU Lesser General Public
16    License along with the GNU C Library; if not, see
17    <https://www.gnu.org/licenses/>.  */
18 
19 /*
20  * Never include sysdeps/powerpc/atomic-machine.h directly.
21  * Alway use include/atomic.h which will include either
22  * sysdeps/powerpc/powerpc32/atomic-machine.h
23  * or
24  * sysdeps/powerpc/powerpc64/atomic-machine.h
25  * as appropriate and which in turn include this file.
26  */
27 
28 /*
29  * Powerpc does not have byte and halfword forms of load and reserve and
30  * store conditional. So for powerpc we stub out the 8- and 16-bit forms.
31  */
32 #define __arch_compare_and_exchange_bool_8_acq(mem, newval, oldval) \
33   (abort (), 0)
34 
35 #define __arch_compare_and_exchange_bool_16_acq(mem, newval, oldval) \
36   (abort (), 0)
37 
38 #define __ARCH_ACQ_INSTR	"isync"
39 #ifndef __ARCH_REL_INSTR
40 # define __ARCH_REL_INSTR	"sync"
41 #endif
42 
43 #ifndef MUTEX_HINT_ACQ
44 # define MUTEX_HINT_ACQ
45 #endif
46 #ifndef MUTEX_HINT_REL
47 # define MUTEX_HINT_REL
48 #endif
49 
50 #define atomic_full_barrier()	__asm ("sync" ::: "memory")
51 
52 #define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval)	      \
53   ({									      \
54       __typeof (*(mem)) __tmp;						      \
55       __typeof (mem)  __memp = (mem);					      \
56       __asm __volatile (						      \
57 		        "1:	lwarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
58 		        "	cmpw	%0,%2\n"			      \
59 		        "	bne	2f\n"				      \
60 		        "	stwcx.	%3,0,%1\n"			      \
61 		        "	bne-	1b\n"				      \
62 		        "2:	" __ARCH_ACQ_INSTR			      \
63 		        : "=&r" (__tmp)					      \
64 		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
65 		        : "cr0", "memory");				      \
66       __tmp;								      \
67   })
68 
69 #define __arch_compare_and_exchange_val_32_rel(mem, newval, oldval)	      \
70   ({									      \
71       __typeof (*(mem)) __tmp;						      \
72       __typeof (mem)  __memp = (mem);					      \
73       __asm __volatile (__ARCH_REL_INSTR "\n"				      \
74 		        "1:	lwarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
75 		        "	cmpw	%0,%2\n"			      \
76 		        "	bne	2f\n"				      \
77 		        "	stwcx.	%3,0,%1\n"			      \
78 		        "	bne-	1b\n"				      \
79 		        "2:	"					      \
80 		        : "=&r" (__tmp)					      \
81 		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
82 		        : "cr0", "memory");				      \
83       __tmp;								      \
84   })
85 
86 #define __arch_atomic_exchange_32_acq(mem, value)			      \
87   ({									      \
88     __typeof (*mem) __val;						      \
89     __asm __volatile (							      \
90 		      "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
91 		      "		stwcx.	%3,0,%2\n"			      \
92 		      "		bne-	1b\n"				      \
93 		      "   " __ARCH_ACQ_INSTR				      \
94 		      : "=&r" (__val), "=m" (*mem)			      \
95 		      : "b" (mem), "r" (value), "m" (*mem)		      \
96 		      : "cr0", "memory");				      \
97     __val;								      \
98   })
99 
100 #define __arch_atomic_exchange_32_rel(mem, value) \
101   ({									      \
102     __typeof (*mem) __val;						      \
103     __asm __volatile (__ARCH_REL_INSTR "\n"				      \
104 		      "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
105 		      "		stwcx.	%3,0,%2\n"			      \
106 		      "		bne-	1b"				      \
107 		      : "=&r" (__val), "=m" (*mem)			      \
108 		      : "b" (mem), "r" (value), "m" (*mem)		      \
109 		      : "cr0", "memory");				      \
110     __val;								      \
111   })
112 
113 #define __arch_atomic_exchange_and_add_32(mem, value) \
114   ({									      \
115     __typeof (*mem) __val, __tmp;					      \
116     __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
117 		      "		add	%1,%0,%4\n"			      \
118 		      "		stwcx.	%1,0,%3\n"			      \
119 		      "		bne-	1b"				      \
120 		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
121 		      : "b" (mem), "r" (value), "m" (*mem)		      \
122 		      : "cr0", "memory");				      \
123     __val;								      \
124   })
125 
126 #define __arch_atomic_exchange_and_add_32_acq(mem, value) \
127   ({									      \
128     __typeof (*mem) __val, __tmp;					      \
129     __asm __volatile ("1:	lwarx	%0,0,%3" MUTEX_HINT_ACQ "\n"	      \
130 		      "		add	%1,%0,%4\n"			      \
131 		      "		stwcx.	%1,0,%3\n"			      \
132 		      "		bne-	1b\n"				      \
133 		      __ARCH_ACQ_INSTR					      \
134 		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
135 		      : "b" (mem), "r" (value), "m" (*mem)		      \
136 		      : "cr0", "memory");				      \
137     __val;								      \
138   })
139 
140 #define __arch_atomic_exchange_and_add_32_rel(mem, value) \
141   ({									      \
142     __typeof (*mem) __val, __tmp;					      \
143     __asm __volatile (__ARCH_REL_INSTR "\n"				      \
144 		      "1:	lwarx	%0,0,%3" MUTEX_HINT_REL "\n"	      \
145 		      "		add	%1,%0,%4\n"			      \
146 		      "		stwcx.	%1,0,%3\n"			      \
147 		      "		bne-	1b"				      \
148 		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
149 		      : "b" (mem), "r" (value), "m" (*mem)		      \
150 		      : "cr0", "memory");				      \
151     __val;								      \
152   })
153 
154 #define __arch_atomic_increment_val_32(mem) \
155   ({									      \
156     __typeof (*(mem)) __val;						      \
157     __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
158 		      "		addi	%0,%0,1\n"			      \
159 		      "		stwcx.	%0,0,%2\n"			      \
160 		      "		bne-	1b"				      \
161 		      : "=&b" (__val), "=m" (*mem)			      \
162 		      : "b" (mem), "m" (*mem)				      \
163 		      : "cr0", "memory");				      \
164     __val;								      \
165   })
166 
167 #define __arch_atomic_decrement_val_32(mem) \
168   ({									      \
169     __typeof (*(mem)) __val;						      \
170     __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
171 		      "		subi	%0,%0,1\n"			      \
172 		      "		stwcx.	%0,0,%2\n"			      \
173 		      "		bne-	1b"				      \
174 		      : "=&b" (__val), "=m" (*mem)			      \
175 		      : "b" (mem), "m" (*mem)				      \
176 		      : "cr0", "memory");				      \
177     __val;								      \
178   })
179 
180 #define __arch_atomic_decrement_if_positive_32(mem) \
181   ({ int __val, __tmp;							      \
182      __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
183 		       "	cmpwi	0,%0,0\n"			      \
184 		       "	addi	%1,%0,-1\n"			      \
185 		       "	ble	2f\n"				      \
186 		       "	stwcx.	%1,0,%3\n"			      \
187 		       "	bne-	1b\n"				      \
188 		       "2:	" __ARCH_ACQ_INSTR			      \
189 		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
190 		       : "b" (mem), "m" (*mem)				      \
191 		       : "cr0", "memory");				      \
192      __val;								      \
193   })
194 
195 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
196   ({									      \
197     __typeof (*(mem)) __result;						      \
198     if (sizeof (*mem) == 4)						      \
199       __result = __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
200     else if (sizeof (*mem) == 8)					      \
201       __result = __arch_compare_and_exchange_val_64_acq(mem, newval, oldval); \
202     else 								      \
203        abort ();							      \
204     __result;								      \
205   })
206 
207 #define atomic_compare_and_exchange_val_rel(mem, newval, oldval) \
208   ({									      \
209     __typeof (*(mem)) __result;						      \
210     if (sizeof (*mem) == 4)						      \
211       __result = __arch_compare_and_exchange_val_32_rel(mem, newval, oldval); \
212     else if (sizeof (*mem) == 8)					      \
213       __result = __arch_compare_and_exchange_val_64_rel(mem, newval, oldval); \
214     else 								      \
215        abort ();							      \
216     __result;								      \
217   })
218 
219 #define atomic_exchange_acq(mem, value) \
220   ({									      \
221     __typeof (*(mem)) __result;						      \
222     if (sizeof (*mem) == 4)						      \
223       __result = __arch_atomic_exchange_32_acq (mem, value);		      \
224     else if (sizeof (*mem) == 8)					      \
225       __result = __arch_atomic_exchange_64_acq (mem, value);		      \
226     else 								      \
227        abort ();							      \
228     __result;								      \
229   })
230 
231 #define atomic_exchange_rel(mem, value) \
232   ({									      \
233     __typeof (*(mem)) __result;						      \
234     if (sizeof (*mem) == 4)						      \
235       __result = __arch_atomic_exchange_32_rel (mem, value);		      \
236     else if (sizeof (*mem) == 8)					      \
237       __result = __arch_atomic_exchange_64_rel (mem, value);		      \
238     else 								      \
239        abort ();							      \
240     __result;								      \
241   })
242 
243 #define atomic_exchange_and_add(mem, value) \
244   ({									      \
245     __typeof (*(mem)) __result;						      \
246     if (sizeof (*mem) == 4)						      \
247       __result = __arch_atomic_exchange_and_add_32 (mem, value);	      \
248     else if (sizeof (*mem) == 8)					      \
249       __result = __arch_atomic_exchange_and_add_64 (mem, value);	      \
250     else 								      \
251        abort ();							      \
252     __result;								      \
253   })
254 #define atomic_exchange_and_add_acq(mem, value) \
255   ({									      \
256     __typeof (*(mem)) __result;						      \
257     if (sizeof (*mem) == 4)						      \
258       __result = __arch_atomic_exchange_and_add_32_acq (mem, value);	      \
259     else if (sizeof (*mem) == 8)					      \
260       __result = __arch_atomic_exchange_and_add_64_acq (mem, value);	      \
261     else 								      \
262        abort ();							      \
263     __result;								      \
264   })
265 #define atomic_exchange_and_add_rel(mem, value) \
266   ({									      \
267     __typeof (*(mem)) __result;						      \
268     if (sizeof (*mem) == 4)						      \
269       __result = __arch_atomic_exchange_and_add_32_rel (mem, value);	      \
270     else if (sizeof (*mem) == 8)					      \
271       __result = __arch_atomic_exchange_and_add_64_rel (mem, value);	      \
272     else 								      \
273        abort ();							      \
274     __result;								      \
275   })
276 
277 #define atomic_increment_val(mem) \
278   ({									      \
279     __typeof (*(mem)) __result;						      \
280     if (sizeof (*(mem)) == 4)						      \
281       __result = __arch_atomic_increment_val_32 (mem);			      \
282     else if (sizeof (*(mem)) == 8)					      \
283       __result = __arch_atomic_increment_val_64 (mem);			      \
284     else 								      \
285        abort ();							      \
286     __result;								      \
287   })
288 
289 #define atomic_increment(mem) ({ atomic_increment_val (mem); (void) 0; })
290 
291 #define atomic_decrement_val(mem) \
292   ({									      \
293     __typeof (*(mem)) __result;						      \
294     if (sizeof (*(mem)) == 4)						      \
295       __result = __arch_atomic_decrement_val_32 (mem);			      \
296     else if (sizeof (*(mem)) == 8)					      \
297       __result = __arch_atomic_decrement_val_64 (mem);			      \
298     else 								      \
299        abort ();							      \
300     __result;								      \
301   })
302 
303 #define atomic_decrement(mem) ({ atomic_decrement_val (mem); (void) 0; })
304 
305 
306 /* Decrement *MEM if it is > 0, and return the old value.  */
307 #define atomic_decrement_if_positive(mem) \
308   ({ __typeof (*(mem)) __result;					      \
309     if (sizeof (*mem) == 4)						      \
310       __result = __arch_atomic_decrement_if_positive_32 (mem);		      \
311     else if (sizeof (*mem) == 8)					      \
312       __result = __arch_atomic_decrement_if_positive_64 (mem);		      \
313     else								      \
314        abort ();							      \
315     __result;								      \
316   })
317