1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8 
9 #include <linux/compiler.h>
10 
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
16 
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 	__atomic_op_acquire(arch_xchg, __VA_ARGS__)
20 #endif
21 
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 	__atomic_op_release(arch_xchg, __VA_ARGS__)
25 #endif
26 
27 #ifndef arch_xchg
28 #define arch_xchg(...) \
29 	__atomic_op_fence(arch_xchg, __VA_ARGS__)
30 #endif
31 
32 #endif /* arch_xchg_relaxed */
33 
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
39 
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 	__atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
43 #endif
44 
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 	__atomic_op_release(arch_cmpxchg, __VA_ARGS__)
48 #endif
49 
50 #ifndef arch_cmpxchg
51 #define arch_cmpxchg(...) \
52 	__atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
53 #endif
54 
55 #endif /* arch_cmpxchg_relaxed */
56 
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
62 
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 	__atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
66 #endif
67 
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 	__atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
71 #endif
72 
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 	__atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
76 #endif
77 
78 #endif /* arch_cmpxchg64_relaxed */
79 
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
86 
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
89 ({ \
90 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 	___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 	if (unlikely(___r != ___o)) \
93 		*___op = ___r; \
94 	likely(___r == ___o); \
95 })
96 #endif /* arch_try_cmpxchg */
97 
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
100 ({ \
101 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 	___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 	if (unlikely(___r != ___o)) \
104 		*___op = ___r; \
105 	likely(___r == ___o); \
106 })
107 #endif /* arch_try_cmpxchg_acquire */
108 
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
111 ({ \
112 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 	___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 	if (unlikely(___r != ___o)) \
115 		*___op = ___r; \
116 	likely(___r == ___o); \
117 })
118 #endif /* arch_try_cmpxchg_release */
119 
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
122 ({ \
123 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 	___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 	if (unlikely(___r != ___o)) \
126 		*___op = ___r; \
127 	likely(___r == ___o); \
128 })
129 #endif /* arch_try_cmpxchg_relaxed */
130 
131 #else /* arch_try_cmpxchg_relaxed */
132 
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 	__atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
136 #endif
137 
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 	__atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
141 #endif
142 
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 	__atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
146 #endif
147 
148 #endif /* arch_try_cmpxchg_relaxed */
149 
150 #ifndef arch_try_cmpxchg64_relaxed
151 #ifdef arch_try_cmpxchg64
152 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
153 #define arch_try_cmpxchg64_release arch_try_cmpxchg64
154 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
155 #endif /* arch_try_cmpxchg64 */
156 
157 #ifndef arch_try_cmpxchg64
158 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
159 ({ \
160 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
161 	___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
162 	if (unlikely(___r != ___o)) \
163 		*___op = ___r; \
164 	likely(___r == ___o); \
165 })
166 #endif /* arch_try_cmpxchg64 */
167 
168 #ifndef arch_try_cmpxchg64_acquire
169 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
170 ({ \
171 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
172 	___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
173 	if (unlikely(___r != ___o)) \
174 		*___op = ___r; \
175 	likely(___r == ___o); \
176 })
177 #endif /* arch_try_cmpxchg64_acquire */
178 
179 #ifndef arch_try_cmpxchg64_release
180 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
181 ({ \
182 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
183 	___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
184 	if (unlikely(___r != ___o)) \
185 		*___op = ___r; \
186 	likely(___r == ___o); \
187 })
188 #endif /* arch_try_cmpxchg64_release */
189 
190 #ifndef arch_try_cmpxchg64_relaxed
191 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
192 ({ \
193 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
194 	___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
195 	if (unlikely(___r != ___o)) \
196 		*___op = ___r; \
197 	likely(___r == ___o); \
198 })
199 #endif /* arch_try_cmpxchg64_relaxed */
200 
201 #else /* arch_try_cmpxchg64_relaxed */
202 
203 #ifndef arch_try_cmpxchg64_acquire
204 #define arch_try_cmpxchg64_acquire(...) \
205 	__atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
206 #endif
207 
208 #ifndef arch_try_cmpxchg64_release
209 #define arch_try_cmpxchg64_release(...) \
210 	__atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
211 #endif
212 
213 #ifndef arch_try_cmpxchg64
214 #define arch_try_cmpxchg64(...) \
215 	__atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
216 #endif
217 
218 #endif /* arch_try_cmpxchg64_relaxed */
219 
220 #ifndef arch_atomic_read_acquire
221 static __always_inline int
arch_atomic_read_acquire(const atomic_t * v)222 arch_atomic_read_acquire(const atomic_t *v)
223 {
224 	int ret;
225 
226 	if (__native_word(atomic_t)) {
227 		ret = smp_load_acquire(&(v)->counter);
228 	} else {
229 		ret = arch_atomic_read(v);
230 		__atomic_acquire_fence();
231 	}
232 
233 	return ret;
234 }
235 #define arch_atomic_read_acquire arch_atomic_read_acquire
236 #endif
237 
238 #ifndef arch_atomic_set_release
239 static __always_inline void
arch_atomic_set_release(atomic_t * v,int i)240 arch_atomic_set_release(atomic_t *v, int i)
241 {
242 	if (__native_word(atomic_t)) {
243 		smp_store_release(&(v)->counter, i);
244 	} else {
245 		__atomic_release_fence();
246 		arch_atomic_set(v, i);
247 	}
248 }
249 #define arch_atomic_set_release arch_atomic_set_release
250 #endif
251 
252 #ifndef arch_atomic_add_return_relaxed
253 #define arch_atomic_add_return_acquire arch_atomic_add_return
254 #define arch_atomic_add_return_release arch_atomic_add_return
255 #define arch_atomic_add_return_relaxed arch_atomic_add_return
256 #else /* arch_atomic_add_return_relaxed */
257 
258 #ifndef arch_atomic_add_return_acquire
259 static __always_inline int
arch_atomic_add_return_acquire(int i,atomic_t * v)260 arch_atomic_add_return_acquire(int i, atomic_t *v)
261 {
262 	int ret = arch_atomic_add_return_relaxed(i, v);
263 	__atomic_acquire_fence();
264 	return ret;
265 }
266 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
267 #endif
268 
269 #ifndef arch_atomic_add_return_release
270 static __always_inline int
arch_atomic_add_return_release(int i,atomic_t * v)271 arch_atomic_add_return_release(int i, atomic_t *v)
272 {
273 	__atomic_release_fence();
274 	return arch_atomic_add_return_relaxed(i, v);
275 }
276 #define arch_atomic_add_return_release arch_atomic_add_return_release
277 #endif
278 
279 #ifndef arch_atomic_add_return
280 static __always_inline int
arch_atomic_add_return(int i,atomic_t * v)281 arch_atomic_add_return(int i, atomic_t *v)
282 {
283 	int ret;
284 	__atomic_pre_full_fence();
285 	ret = arch_atomic_add_return_relaxed(i, v);
286 	__atomic_post_full_fence();
287 	return ret;
288 }
289 #define arch_atomic_add_return arch_atomic_add_return
290 #endif
291 
292 #endif /* arch_atomic_add_return_relaxed */
293 
294 #ifndef arch_atomic_fetch_add_relaxed
295 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
296 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
297 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
298 #else /* arch_atomic_fetch_add_relaxed */
299 
300 #ifndef arch_atomic_fetch_add_acquire
301 static __always_inline int
arch_atomic_fetch_add_acquire(int i,atomic_t * v)302 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
303 {
304 	int ret = arch_atomic_fetch_add_relaxed(i, v);
305 	__atomic_acquire_fence();
306 	return ret;
307 }
308 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
309 #endif
310 
311 #ifndef arch_atomic_fetch_add_release
312 static __always_inline int
arch_atomic_fetch_add_release(int i,atomic_t * v)313 arch_atomic_fetch_add_release(int i, atomic_t *v)
314 {
315 	__atomic_release_fence();
316 	return arch_atomic_fetch_add_relaxed(i, v);
317 }
318 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
319 #endif
320 
321 #ifndef arch_atomic_fetch_add
322 static __always_inline int
arch_atomic_fetch_add(int i,atomic_t * v)323 arch_atomic_fetch_add(int i, atomic_t *v)
324 {
325 	int ret;
326 	__atomic_pre_full_fence();
327 	ret = arch_atomic_fetch_add_relaxed(i, v);
328 	__atomic_post_full_fence();
329 	return ret;
330 }
331 #define arch_atomic_fetch_add arch_atomic_fetch_add
332 #endif
333 
334 #endif /* arch_atomic_fetch_add_relaxed */
335 
336 #ifndef arch_atomic_sub_return_relaxed
337 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
338 #define arch_atomic_sub_return_release arch_atomic_sub_return
339 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
340 #else /* arch_atomic_sub_return_relaxed */
341 
342 #ifndef arch_atomic_sub_return_acquire
343 static __always_inline int
arch_atomic_sub_return_acquire(int i,atomic_t * v)344 arch_atomic_sub_return_acquire(int i, atomic_t *v)
345 {
346 	int ret = arch_atomic_sub_return_relaxed(i, v);
347 	__atomic_acquire_fence();
348 	return ret;
349 }
350 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
351 #endif
352 
353 #ifndef arch_atomic_sub_return_release
354 static __always_inline int
arch_atomic_sub_return_release(int i,atomic_t * v)355 arch_atomic_sub_return_release(int i, atomic_t *v)
356 {
357 	__atomic_release_fence();
358 	return arch_atomic_sub_return_relaxed(i, v);
359 }
360 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
361 #endif
362 
363 #ifndef arch_atomic_sub_return
364 static __always_inline int
arch_atomic_sub_return(int i,atomic_t * v)365 arch_atomic_sub_return(int i, atomic_t *v)
366 {
367 	int ret;
368 	__atomic_pre_full_fence();
369 	ret = arch_atomic_sub_return_relaxed(i, v);
370 	__atomic_post_full_fence();
371 	return ret;
372 }
373 #define arch_atomic_sub_return arch_atomic_sub_return
374 #endif
375 
376 #endif /* arch_atomic_sub_return_relaxed */
377 
378 #ifndef arch_atomic_fetch_sub_relaxed
379 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
380 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
381 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
382 #else /* arch_atomic_fetch_sub_relaxed */
383 
384 #ifndef arch_atomic_fetch_sub_acquire
385 static __always_inline int
arch_atomic_fetch_sub_acquire(int i,atomic_t * v)386 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
387 {
388 	int ret = arch_atomic_fetch_sub_relaxed(i, v);
389 	__atomic_acquire_fence();
390 	return ret;
391 }
392 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
393 #endif
394 
395 #ifndef arch_atomic_fetch_sub_release
396 static __always_inline int
arch_atomic_fetch_sub_release(int i,atomic_t * v)397 arch_atomic_fetch_sub_release(int i, atomic_t *v)
398 {
399 	__atomic_release_fence();
400 	return arch_atomic_fetch_sub_relaxed(i, v);
401 }
402 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
403 #endif
404 
405 #ifndef arch_atomic_fetch_sub
406 static __always_inline int
arch_atomic_fetch_sub(int i,atomic_t * v)407 arch_atomic_fetch_sub(int i, atomic_t *v)
408 {
409 	int ret;
410 	__atomic_pre_full_fence();
411 	ret = arch_atomic_fetch_sub_relaxed(i, v);
412 	__atomic_post_full_fence();
413 	return ret;
414 }
415 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
416 #endif
417 
418 #endif /* arch_atomic_fetch_sub_relaxed */
419 
420 #ifndef arch_atomic_inc
421 static __always_inline void
arch_atomic_inc(atomic_t * v)422 arch_atomic_inc(atomic_t *v)
423 {
424 	arch_atomic_add(1, v);
425 }
426 #define arch_atomic_inc arch_atomic_inc
427 #endif
428 
429 #ifndef arch_atomic_inc_return_relaxed
430 #ifdef arch_atomic_inc_return
431 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
432 #define arch_atomic_inc_return_release arch_atomic_inc_return
433 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
434 #endif /* arch_atomic_inc_return */
435 
436 #ifndef arch_atomic_inc_return
437 static __always_inline int
arch_atomic_inc_return(atomic_t * v)438 arch_atomic_inc_return(atomic_t *v)
439 {
440 	return arch_atomic_add_return(1, v);
441 }
442 #define arch_atomic_inc_return arch_atomic_inc_return
443 #endif
444 
445 #ifndef arch_atomic_inc_return_acquire
446 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)447 arch_atomic_inc_return_acquire(atomic_t *v)
448 {
449 	return arch_atomic_add_return_acquire(1, v);
450 }
451 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
452 #endif
453 
454 #ifndef arch_atomic_inc_return_release
455 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)456 arch_atomic_inc_return_release(atomic_t *v)
457 {
458 	return arch_atomic_add_return_release(1, v);
459 }
460 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
461 #endif
462 
463 #ifndef arch_atomic_inc_return_relaxed
464 static __always_inline int
arch_atomic_inc_return_relaxed(atomic_t * v)465 arch_atomic_inc_return_relaxed(atomic_t *v)
466 {
467 	return arch_atomic_add_return_relaxed(1, v);
468 }
469 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
470 #endif
471 
472 #else /* arch_atomic_inc_return_relaxed */
473 
474 #ifndef arch_atomic_inc_return_acquire
475 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)476 arch_atomic_inc_return_acquire(atomic_t *v)
477 {
478 	int ret = arch_atomic_inc_return_relaxed(v);
479 	__atomic_acquire_fence();
480 	return ret;
481 }
482 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
483 #endif
484 
485 #ifndef arch_atomic_inc_return_release
486 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)487 arch_atomic_inc_return_release(atomic_t *v)
488 {
489 	__atomic_release_fence();
490 	return arch_atomic_inc_return_relaxed(v);
491 }
492 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
493 #endif
494 
495 #ifndef arch_atomic_inc_return
496 static __always_inline int
arch_atomic_inc_return(atomic_t * v)497 arch_atomic_inc_return(atomic_t *v)
498 {
499 	int ret;
500 	__atomic_pre_full_fence();
501 	ret = arch_atomic_inc_return_relaxed(v);
502 	__atomic_post_full_fence();
503 	return ret;
504 }
505 #define arch_atomic_inc_return arch_atomic_inc_return
506 #endif
507 
508 #endif /* arch_atomic_inc_return_relaxed */
509 
510 #ifndef arch_atomic_fetch_inc_relaxed
511 #ifdef arch_atomic_fetch_inc
512 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
513 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
514 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
515 #endif /* arch_atomic_fetch_inc */
516 
517 #ifndef arch_atomic_fetch_inc
518 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)519 arch_atomic_fetch_inc(atomic_t *v)
520 {
521 	return arch_atomic_fetch_add(1, v);
522 }
523 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
524 #endif
525 
526 #ifndef arch_atomic_fetch_inc_acquire
527 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)528 arch_atomic_fetch_inc_acquire(atomic_t *v)
529 {
530 	return arch_atomic_fetch_add_acquire(1, v);
531 }
532 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
533 #endif
534 
535 #ifndef arch_atomic_fetch_inc_release
536 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)537 arch_atomic_fetch_inc_release(atomic_t *v)
538 {
539 	return arch_atomic_fetch_add_release(1, v);
540 }
541 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
542 #endif
543 
544 #ifndef arch_atomic_fetch_inc_relaxed
545 static __always_inline int
arch_atomic_fetch_inc_relaxed(atomic_t * v)546 arch_atomic_fetch_inc_relaxed(atomic_t *v)
547 {
548 	return arch_atomic_fetch_add_relaxed(1, v);
549 }
550 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
551 #endif
552 
553 #else /* arch_atomic_fetch_inc_relaxed */
554 
555 #ifndef arch_atomic_fetch_inc_acquire
556 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)557 arch_atomic_fetch_inc_acquire(atomic_t *v)
558 {
559 	int ret = arch_atomic_fetch_inc_relaxed(v);
560 	__atomic_acquire_fence();
561 	return ret;
562 }
563 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
564 #endif
565 
566 #ifndef arch_atomic_fetch_inc_release
567 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)568 arch_atomic_fetch_inc_release(atomic_t *v)
569 {
570 	__atomic_release_fence();
571 	return arch_atomic_fetch_inc_relaxed(v);
572 }
573 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
574 #endif
575 
576 #ifndef arch_atomic_fetch_inc
577 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)578 arch_atomic_fetch_inc(atomic_t *v)
579 {
580 	int ret;
581 	__atomic_pre_full_fence();
582 	ret = arch_atomic_fetch_inc_relaxed(v);
583 	__atomic_post_full_fence();
584 	return ret;
585 }
586 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
587 #endif
588 
589 #endif /* arch_atomic_fetch_inc_relaxed */
590 
591 #ifndef arch_atomic_dec
592 static __always_inline void
arch_atomic_dec(atomic_t * v)593 arch_atomic_dec(atomic_t *v)
594 {
595 	arch_atomic_sub(1, v);
596 }
597 #define arch_atomic_dec arch_atomic_dec
598 #endif
599 
600 #ifndef arch_atomic_dec_return_relaxed
601 #ifdef arch_atomic_dec_return
602 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
603 #define arch_atomic_dec_return_release arch_atomic_dec_return
604 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
605 #endif /* arch_atomic_dec_return */
606 
607 #ifndef arch_atomic_dec_return
608 static __always_inline int
arch_atomic_dec_return(atomic_t * v)609 arch_atomic_dec_return(atomic_t *v)
610 {
611 	return arch_atomic_sub_return(1, v);
612 }
613 #define arch_atomic_dec_return arch_atomic_dec_return
614 #endif
615 
616 #ifndef arch_atomic_dec_return_acquire
617 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)618 arch_atomic_dec_return_acquire(atomic_t *v)
619 {
620 	return arch_atomic_sub_return_acquire(1, v);
621 }
622 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
623 #endif
624 
625 #ifndef arch_atomic_dec_return_release
626 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)627 arch_atomic_dec_return_release(atomic_t *v)
628 {
629 	return arch_atomic_sub_return_release(1, v);
630 }
631 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
632 #endif
633 
634 #ifndef arch_atomic_dec_return_relaxed
635 static __always_inline int
arch_atomic_dec_return_relaxed(atomic_t * v)636 arch_atomic_dec_return_relaxed(atomic_t *v)
637 {
638 	return arch_atomic_sub_return_relaxed(1, v);
639 }
640 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
641 #endif
642 
643 #else /* arch_atomic_dec_return_relaxed */
644 
645 #ifndef arch_atomic_dec_return_acquire
646 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)647 arch_atomic_dec_return_acquire(atomic_t *v)
648 {
649 	int ret = arch_atomic_dec_return_relaxed(v);
650 	__atomic_acquire_fence();
651 	return ret;
652 }
653 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
654 #endif
655 
656 #ifndef arch_atomic_dec_return_release
657 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)658 arch_atomic_dec_return_release(atomic_t *v)
659 {
660 	__atomic_release_fence();
661 	return arch_atomic_dec_return_relaxed(v);
662 }
663 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
664 #endif
665 
666 #ifndef arch_atomic_dec_return
667 static __always_inline int
arch_atomic_dec_return(atomic_t * v)668 arch_atomic_dec_return(atomic_t *v)
669 {
670 	int ret;
671 	__atomic_pre_full_fence();
672 	ret = arch_atomic_dec_return_relaxed(v);
673 	__atomic_post_full_fence();
674 	return ret;
675 }
676 #define arch_atomic_dec_return arch_atomic_dec_return
677 #endif
678 
679 #endif /* arch_atomic_dec_return_relaxed */
680 
681 #ifndef arch_atomic_fetch_dec_relaxed
682 #ifdef arch_atomic_fetch_dec
683 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
684 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
685 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
686 #endif /* arch_atomic_fetch_dec */
687 
688 #ifndef arch_atomic_fetch_dec
689 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)690 arch_atomic_fetch_dec(atomic_t *v)
691 {
692 	return arch_atomic_fetch_sub(1, v);
693 }
694 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
695 #endif
696 
697 #ifndef arch_atomic_fetch_dec_acquire
698 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)699 arch_atomic_fetch_dec_acquire(atomic_t *v)
700 {
701 	return arch_atomic_fetch_sub_acquire(1, v);
702 }
703 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
704 #endif
705 
706 #ifndef arch_atomic_fetch_dec_release
707 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)708 arch_atomic_fetch_dec_release(atomic_t *v)
709 {
710 	return arch_atomic_fetch_sub_release(1, v);
711 }
712 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
713 #endif
714 
715 #ifndef arch_atomic_fetch_dec_relaxed
716 static __always_inline int
arch_atomic_fetch_dec_relaxed(atomic_t * v)717 arch_atomic_fetch_dec_relaxed(atomic_t *v)
718 {
719 	return arch_atomic_fetch_sub_relaxed(1, v);
720 }
721 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
722 #endif
723 
724 #else /* arch_atomic_fetch_dec_relaxed */
725 
726 #ifndef arch_atomic_fetch_dec_acquire
727 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)728 arch_atomic_fetch_dec_acquire(atomic_t *v)
729 {
730 	int ret = arch_atomic_fetch_dec_relaxed(v);
731 	__atomic_acquire_fence();
732 	return ret;
733 }
734 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
735 #endif
736 
737 #ifndef arch_atomic_fetch_dec_release
738 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)739 arch_atomic_fetch_dec_release(atomic_t *v)
740 {
741 	__atomic_release_fence();
742 	return arch_atomic_fetch_dec_relaxed(v);
743 }
744 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
745 #endif
746 
747 #ifndef arch_atomic_fetch_dec
748 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)749 arch_atomic_fetch_dec(atomic_t *v)
750 {
751 	int ret;
752 	__atomic_pre_full_fence();
753 	ret = arch_atomic_fetch_dec_relaxed(v);
754 	__atomic_post_full_fence();
755 	return ret;
756 }
757 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
758 #endif
759 
760 #endif /* arch_atomic_fetch_dec_relaxed */
761 
762 #ifndef arch_atomic_fetch_and_relaxed
763 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
764 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
765 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
766 #else /* arch_atomic_fetch_and_relaxed */
767 
768 #ifndef arch_atomic_fetch_and_acquire
769 static __always_inline int
arch_atomic_fetch_and_acquire(int i,atomic_t * v)770 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
771 {
772 	int ret = arch_atomic_fetch_and_relaxed(i, v);
773 	__atomic_acquire_fence();
774 	return ret;
775 }
776 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
777 #endif
778 
779 #ifndef arch_atomic_fetch_and_release
780 static __always_inline int
arch_atomic_fetch_and_release(int i,atomic_t * v)781 arch_atomic_fetch_and_release(int i, atomic_t *v)
782 {
783 	__atomic_release_fence();
784 	return arch_atomic_fetch_and_relaxed(i, v);
785 }
786 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
787 #endif
788 
789 #ifndef arch_atomic_fetch_and
790 static __always_inline int
arch_atomic_fetch_and(int i,atomic_t * v)791 arch_atomic_fetch_and(int i, atomic_t *v)
792 {
793 	int ret;
794 	__atomic_pre_full_fence();
795 	ret = arch_atomic_fetch_and_relaxed(i, v);
796 	__atomic_post_full_fence();
797 	return ret;
798 }
799 #define arch_atomic_fetch_and arch_atomic_fetch_and
800 #endif
801 
802 #endif /* arch_atomic_fetch_and_relaxed */
803 
804 #ifndef arch_atomic_andnot
805 static __always_inline void
arch_atomic_andnot(int i,atomic_t * v)806 arch_atomic_andnot(int i, atomic_t *v)
807 {
808 	arch_atomic_and(~i, v);
809 }
810 #define arch_atomic_andnot arch_atomic_andnot
811 #endif
812 
813 #ifndef arch_atomic_fetch_andnot_relaxed
814 #ifdef arch_atomic_fetch_andnot
815 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
816 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
817 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
818 #endif /* arch_atomic_fetch_andnot */
819 
820 #ifndef arch_atomic_fetch_andnot
821 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)822 arch_atomic_fetch_andnot(int i, atomic_t *v)
823 {
824 	return arch_atomic_fetch_and(~i, v);
825 }
826 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
827 #endif
828 
829 #ifndef arch_atomic_fetch_andnot_acquire
830 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)831 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
832 {
833 	return arch_atomic_fetch_and_acquire(~i, v);
834 }
835 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
836 #endif
837 
838 #ifndef arch_atomic_fetch_andnot_release
839 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)840 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
841 {
842 	return arch_atomic_fetch_and_release(~i, v);
843 }
844 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
845 #endif
846 
847 #ifndef arch_atomic_fetch_andnot_relaxed
848 static __always_inline int
arch_atomic_fetch_andnot_relaxed(int i,atomic_t * v)849 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
850 {
851 	return arch_atomic_fetch_and_relaxed(~i, v);
852 }
853 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
854 #endif
855 
856 #else /* arch_atomic_fetch_andnot_relaxed */
857 
858 #ifndef arch_atomic_fetch_andnot_acquire
859 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)860 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
861 {
862 	int ret = arch_atomic_fetch_andnot_relaxed(i, v);
863 	__atomic_acquire_fence();
864 	return ret;
865 }
866 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
867 #endif
868 
869 #ifndef arch_atomic_fetch_andnot_release
870 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)871 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
872 {
873 	__atomic_release_fence();
874 	return arch_atomic_fetch_andnot_relaxed(i, v);
875 }
876 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
877 #endif
878 
879 #ifndef arch_atomic_fetch_andnot
880 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)881 arch_atomic_fetch_andnot(int i, atomic_t *v)
882 {
883 	int ret;
884 	__atomic_pre_full_fence();
885 	ret = arch_atomic_fetch_andnot_relaxed(i, v);
886 	__atomic_post_full_fence();
887 	return ret;
888 }
889 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
890 #endif
891 
892 #endif /* arch_atomic_fetch_andnot_relaxed */
893 
894 #ifndef arch_atomic_fetch_or_relaxed
895 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
896 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
897 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
898 #else /* arch_atomic_fetch_or_relaxed */
899 
900 #ifndef arch_atomic_fetch_or_acquire
901 static __always_inline int
arch_atomic_fetch_or_acquire(int i,atomic_t * v)902 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
903 {
904 	int ret = arch_atomic_fetch_or_relaxed(i, v);
905 	__atomic_acquire_fence();
906 	return ret;
907 }
908 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
909 #endif
910 
911 #ifndef arch_atomic_fetch_or_release
912 static __always_inline int
arch_atomic_fetch_or_release(int i,atomic_t * v)913 arch_atomic_fetch_or_release(int i, atomic_t *v)
914 {
915 	__atomic_release_fence();
916 	return arch_atomic_fetch_or_relaxed(i, v);
917 }
918 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
919 #endif
920 
921 #ifndef arch_atomic_fetch_or
922 static __always_inline int
arch_atomic_fetch_or(int i,atomic_t * v)923 arch_atomic_fetch_or(int i, atomic_t *v)
924 {
925 	int ret;
926 	__atomic_pre_full_fence();
927 	ret = arch_atomic_fetch_or_relaxed(i, v);
928 	__atomic_post_full_fence();
929 	return ret;
930 }
931 #define arch_atomic_fetch_or arch_atomic_fetch_or
932 #endif
933 
934 #endif /* arch_atomic_fetch_or_relaxed */
935 
936 #ifndef arch_atomic_fetch_xor_relaxed
937 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
938 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
939 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
940 #else /* arch_atomic_fetch_xor_relaxed */
941 
942 #ifndef arch_atomic_fetch_xor_acquire
943 static __always_inline int
arch_atomic_fetch_xor_acquire(int i,atomic_t * v)944 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
945 {
946 	int ret = arch_atomic_fetch_xor_relaxed(i, v);
947 	__atomic_acquire_fence();
948 	return ret;
949 }
950 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
951 #endif
952 
953 #ifndef arch_atomic_fetch_xor_release
954 static __always_inline int
arch_atomic_fetch_xor_release(int i,atomic_t * v)955 arch_atomic_fetch_xor_release(int i, atomic_t *v)
956 {
957 	__atomic_release_fence();
958 	return arch_atomic_fetch_xor_relaxed(i, v);
959 }
960 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
961 #endif
962 
963 #ifndef arch_atomic_fetch_xor
964 static __always_inline int
arch_atomic_fetch_xor(int i,atomic_t * v)965 arch_atomic_fetch_xor(int i, atomic_t *v)
966 {
967 	int ret;
968 	__atomic_pre_full_fence();
969 	ret = arch_atomic_fetch_xor_relaxed(i, v);
970 	__atomic_post_full_fence();
971 	return ret;
972 }
973 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
974 #endif
975 
976 #endif /* arch_atomic_fetch_xor_relaxed */
977 
978 #ifndef arch_atomic_xchg_relaxed
979 #define arch_atomic_xchg_acquire arch_atomic_xchg
980 #define arch_atomic_xchg_release arch_atomic_xchg
981 #define arch_atomic_xchg_relaxed arch_atomic_xchg
982 #else /* arch_atomic_xchg_relaxed */
983 
984 #ifndef arch_atomic_xchg_acquire
985 static __always_inline int
arch_atomic_xchg_acquire(atomic_t * v,int i)986 arch_atomic_xchg_acquire(atomic_t *v, int i)
987 {
988 	int ret = arch_atomic_xchg_relaxed(v, i);
989 	__atomic_acquire_fence();
990 	return ret;
991 }
992 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
993 #endif
994 
995 #ifndef arch_atomic_xchg_release
996 static __always_inline int
arch_atomic_xchg_release(atomic_t * v,int i)997 arch_atomic_xchg_release(atomic_t *v, int i)
998 {
999 	__atomic_release_fence();
1000 	return arch_atomic_xchg_relaxed(v, i);
1001 }
1002 #define arch_atomic_xchg_release arch_atomic_xchg_release
1003 #endif
1004 
1005 #ifndef arch_atomic_xchg
1006 static __always_inline int
arch_atomic_xchg(atomic_t * v,int i)1007 arch_atomic_xchg(atomic_t *v, int i)
1008 {
1009 	int ret;
1010 	__atomic_pre_full_fence();
1011 	ret = arch_atomic_xchg_relaxed(v, i);
1012 	__atomic_post_full_fence();
1013 	return ret;
1014 }
1015 #define arch_atomic_xchg arch_atomic_xchg
1016 #endif
1017 
1018 #endif /* arch_atomic_xchg_relaxed */
1019 
1020 #ifndef arch_atomic_cmpxchg_relaxed
1021 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
1022 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
1023 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
1024 #else /* arch_atomic_cmpxchg_relaxed */
1025 
1026 #ifndef arch_atomic_cmpxchg_acquire
1027 static __always_inline int
arch_atomic_cmpxchg_acquire(atomic_t * v,int old,int new)1028 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1029 {
1030 	int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1031 	__atomic_acquire_fence();
1032 	return ret;
1033 }
1034 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
1035 #endif
1036 
1037 #ifndef arch_atomic_cmpxchg_release
1038 static __always_inline int
arch_atomic_cmpxchg_release(atomic_t * v,int old,int new)1039 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
1040 {
1041 	__atomic_release_fence();
1042 	return arch_atomic_cmpxchg_relaxed(v, old, new);
1043 }
1044 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
1045 #endif
1046 
1047 #ifndef arch_atomic_cmpxchg
1048 static __always_inline int
arch_atomic_cmpxchg(atomic_t * v,int old,int new)1049 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
1050 {
1051 	int ret;
1052 	__atomic_pre_full_fence();
1053 	ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1054 	__atomic_post_full_fence();
1055 	return ret;
1056 }
1057 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
1058 #endif
1059 
1060 #endif /* arch_atomic_cmpxchg_relaxed */
1061 
1062 #ifndef arch_atomic_try_cmpxchg_relaxed
1063 #ifdef arch_atomic_try_cmpxchg
1064 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
1065 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
1066 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
1067 #endif /* arch_atomic_try_cmpxchg */
1068 
1069 #ifndef arch_atomic_try_cmpxchg
1070 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)1071 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1072 {
1073 	int r, o = *old;
1074 	r = arch_atomic_cmpxchg(v, o, new);
1075 	if (unlikely(r != o))
1076 		*old = r;
1077 	return likely(r == o);
1078 }
1079 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1080 #endif
1081 
1082 #ifndef arch_atomic_try_cmpxchg_acquire
1083 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1084 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1085 {
1086 	int r, o = *old;
1087 	r = arch_atomic_cmpxchg_acquire(v, o, new);
1088 	if (unlikely(r != o))
1089 		*old = r;
1090 	return likely(r == o);
1091 }
1092 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1093 #endif
1094 
1095 #ifndef arch_atomic_try_cmpxchg_release
1096 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1097 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1098 {
1099 	int r, o = *old;
1100 	r = arch_atomic_cmpxchg_release(v, o, new);
1101 	if (unlikely(r != o))
1102 		*old = r;
1103 	return likely(r == o);
1104 }
1105 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1106 #endif
1107 
1108 #ifndef arch_atomic_try_cmpxchg_relaxed
1109 static __always_inline bool
arch_atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1110 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1111 {
1112 	int r, o = *old;
1113 	r = arch_atomic_cmpxchg_relaxed(v, o, new);
1114 	if (unlikely(r != o))
1115 		*old = r;
1116 	return likely(r == o);
1117 }
1118 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1119 #endif
1120 
1121 #else /* arch_atomic_try_cmpxchg_relaxed */
1122 
1123 #ifndef arch_atomic_try_cmpxchg_acquire
1124 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1125 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1126 {
1127 	bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1128 	__atomic_acquire_fence();
1129 	return ret;
1130 }
1131 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1132 #endif
1133 
1134 #ifndef arch_atomic_try_cmpxchg_release
1135 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1136 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1137 {
1138 	__atomic_release_fence();
1139 	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1140 }
1141 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1142 #endif
1143 
1144 #ifndef arch_atomic_try_cmpxchg
1145 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)1146 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1147 {
1148 	bool ret;
1149 	__atomic_pre_full_fence();
1150 	ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1151 	__atomic_post_full_fence();
1152 	return ret;
1153 }
1154 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1155 #endif
1156 
1157 #endif /* arch_atomic_try_cmpxchg_relaxed */
1158 
1159 #ifndef arch_atomic_sub_and_test
1160 /**
1161  * arch_atomic_sub_and_test - subtract value from variable and test result
1162  * @i: integer value to subtract
1163  * @v: pointer of type atomic_t
1164  *
1165  * Atomically subtracts @i from @v and returns
1166  * true if the result is zero, or false for all
1167  * other cases.
1168  */
1169 static __always_inline bool
arch_atomic_sub_and_test(int i,atomic_t * v)1170 arch_atomic_sub_and_test(int i, atomic_t *v)
1171 {
1172 	return arch_atomic_sub_return(i, v) == 0;
1173 }
1174 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1175 #endif
1176 
1177 #ifndef arch_atomic_dec_and_test
1178 /**
1179  * arch_atomic_dec_and_test - decrement and test
1180  * @v: pointer of type atomic_t
1181  *
1182  * Atomically decrements @v by 1 and
1183  * returns true if the result is 0, or false for all other
1184  * cases.
1185  */
1186 static __always_inline bool
arch_atomic_dec_and_test(atomic_t * v)1187 arch_atomic_dec_and_test(atomic_t *v)
1188 {
1189 	return arch_atomic_dec_return(v) == 0;
1190 }
1191 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1192 #endif
1193 
1194 #ifndef arch_atomic_inc_and_test
1195 /**
1196  * arch_atomic_inc_and_test - increment and test
1197  * @v: pointer of type atomic_t
1198  *
1199  * Atomically increments @v by 1
1200  * and returns true if the result is zero, or false for all
1201  * other cases.
1202  */
1203 static __always_inline bool
arch_atomic_inc_and_test(atomic_t * v)1204 arch_atomic_inc_and_test(atomic_t *v)
1205 {
1206 	return arch_atomic_inc_return(v) == 0;
1207 }
1208 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1209 #endif
1210 
1211 #ifndef arch_atomic_add_negative
1212 /**
1213  * arch_atomic_add_negative - add and test if negative
1214  * @i: integer value to add
1215  * @v: pointer of type atomic_t
1216  *
1217  * Atomically adds @i to @v and returns true
1218  * if the result is negative, or false when
1219  * result is greater than or equal to zero.
1220  */
1221 static __always_inline bool
arch_atomic_add_negative(int i,atomic_t * v)1222 arch_atomic_add_negative(int i, atomic_t *v)
1223 {
1224 	return arch_atomic_add_return(i, v) < 0;
1225 }
1226 #define arch_atomic_add_negative arch_atomic_add_negative
1227 #endif
1228 
1229 #ifndef arch_atomic_fetch_add_unless
1230 /**
1231  * arch_atomic_fetch_add_unless - add unless the number is already a given value
1232  * @v: pointer of type atomic_t
1233  * @a: the amount to add to v...
1234  * @u: ...unless v is equal to u.
1235  *
1236  * Atomically adds @a to @v, so long as @v was not already @u.
1237  * Returns original value of @v
1238  */
1239 static __always_inline int
arch_atomic_fetch_add_unless(atomic_t * v,int a,int u)1240 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1241 {
1242 	int c = arch_atomic_read(v);
1243 
1244 	do {
1245 		if (unlikely(c == u))
1246 			break;
1247 	} while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1248 
1249 	return c;
1250 }
1251 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1252 #endif
1253 
1254 #ifndef arch_atomic_add_unless
1255 /**
1256  * arch_atomic_add_unless - add unless the number is already a given value
1257  * @v: pointer of type atomic_t
1258  * @a: the amount to add to v...
1259  * @u: ...unless v is equal to u.
1260  *
1261  * Atomically adds @a to @v, if @v was not already @u.
1262  * Returns true if the addition was done.
1263  */
1264 static __always_inline bool
arch_atomic_add_unless(atomic_t * v,int a,int u)1265 arch_atomic_add_unless(atomic_t *v, int a, int u)
1266 {
1267 	return arch_atomic_fetch_add_unless(v, a, u) != u;
1268 }
1269 #define arch_atomic_add_unless arch_atomic_add_unless
1270 #endif
1271 
1272 #ifndef arch_atomic_inc_not_zero
1273 /**
1274  * arch_atomic_inc_not_zero - increment unless the number is zero
1275  * @v: pointer of type atomic_t
1276  *
1277  * Atomically increments @v by 1, if @v is non-zero.
1278  * Returns true if the increment was done.
1279  */
1280 static __always_inline bool
arch_atomic_inc_not_zero(atomic_t * v)1281 arch_atomic_inc_not_zero(atomic_t *v)
1282 {
1283 	return arch_atomic_add_unless(v, 1, 0);
1284 }
1285 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1286 #endif
1287 
1288 #ifndef arch_atomic_inc_unless_negative
1289 static __always_inline bool
arch_atomic_inc_unless_negative(atomic_t * v)1290 arch_atomic_inc_unless_negative(atomic_t *v)
1291 {
1292 	int c = arch_atomic_read(v);
1293 
1294 	do {
1295 		if (unlikely(c < 0))
1296 			return false;
1297 	} while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1298 
1299 	return true;
1300 }
1301 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1302 #endif
1303 
1304 #ifndef arch_atomic_dec_unless_positive
1305 static __always_inline bool
arch_atomic_dec_unless_positive(atomic_t * v)1306 arch_atomic_dec_unless_positive(atomic_t *v)
1307 {
1308 	int c = arch_atomic_read(v);
1309 
1310 	do {
1311 		if (unlikely(c > 0))
1312 			return false;
1313 	} while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1314 
1315 	return true;
1316 }
1317 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1318 #endif
1319 
1320 #ifndef arch_atomic_dec_if_positive
1321 static __always_inline int
arch_atomic_dec_if_positive(atomic_t * v)1322 arch_atomic_dec_if_positive(atomic_t *v)
1323 {
1324 	int dec, c = arch_atomic_read(v);
1325 
1326 	do {
1327 		dec = c - 1;
1328 		if (unlikely(dec < 0))
1329 			break;
1330 	} while (!arch_atomic_try_cmpxchg(v, &c, dec));
1331 
1332 	return dec;
1333 }
1334 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1335 #endif
1336 
1337 #ifdef CONFIG_GENERIC_ATOMIC64
1338 #include <asm-generic/atomic64.h>
1339 #endif
1340 
1341 #ifndef arch_atomic64_read_acquire
1342 static __always_inline s64
arch_atomic64_read_acquire(const atomic64_t * v)1343 arch_atomic64_read_acquire(const atomic64_t *v)
1344 {
1345 	s64 ret;
1346 
1347 	if (__native_word(atomic64_t)) {
1348 		ret = smp_load_acquire(&(v)->counter);
1349 	} else {
1350 		ret = arch_atomic64_read(v);
1351 		__atomic_acquire_fence();
1352 	}
1353 
1354 	return ret;
1355 }
1356 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1357 #endif
1358 
1359 #ifndef arch_atomic64_set_release
1360 static __always_inline void
arch_atomic64_set_release(atomic64_t * v,s64 i)1361 arch_atomic64_set_release(atomic64_t *v, s64 i)
1362 {
1363 	if (__native_word(atomic64_t)) {
1364 		smp_store_release(&(v)->counter, i);
1365 	} else {
1366 		__atomic_release_fence();
1367 		arch_atomic64_set(v, i);
1368 	}
1369 }
1370 #define arch_atomic64_set_release arch_atomic64_set_release
1371 #endif
1372 
1373 #ifndef arch_atomic64_add_return_relaxed
1374 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1375 #define arch_atomic64_add_return_release arch_atomic64_add_return
1376 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1377 #else /* arch_atomic64_add_return_relaxed */
1378 
1379 #ifndef arch_atomic64_add_return_acquire
1380 static __always_inline s64
arch_atomic64_add_return_acquire(s64 i,atomic64_t * v)1381 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1382 {
1383 	s64 ret = arch_atomic64_add_return_relaxed(i, v);
1384 	__atomic_acquire_fence();
1385 	return ret;
1386 }
1387 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1388 #endif
1389 
1390 #ifndef arch_atomic64_add_return_release
1391 static __always_inline s64
arch_atomic64_add_return_release(s64 i,atomic64_t * v)1392 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1393 {
1394 	__atomic_release_fence();
1395 	return arch_atomic64_add_return_relaxed(i, v);
1396 }
1397 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1398 #endif
1399 
1400 #ifndef arch_atomic64_add_return
1401 static __always_inline s64
arch_atomic64_add_return(s64 i,atomic64_t * v)1402 arch_atomic64_add_return(s64 i, atomic64_t *v)
1403 {
1404 	s64 ret;
1405 	__atomic_pre_full_fence();
1406 	ret = arch_atomic64_add_return_relaxed(i, v);
1407 	__atomic_post_full_fence();
1408 	return ret;
1409 }
1410 #define arch_atomic64_add_return arch_atomic64_add_return
1411 #endif
1412 
1413 #endif /* arch_atomic64_add_return_relaxed */
1414 
1415 #ifndef arch_atomic64_fetch_add_relaxed
1416 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1417 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1418 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1419 #else /* arch_atomic64_fetch_add_relaxed */
1420 
1421 #ifndef arch_atomic64_fetch_add_acquire
1422 static __always_inline s64
arch_atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1423 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1424 {
1425 	s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1426 	__atomic_acquire_fence();
1427 	return ret;
1428 }
1429 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1430 #endif
1431 
1432 #ifndef arch_atomic64_fetch_add_release
1433 static __always_inline s64
arch_atomic64_fetch_add_release(s64 i,atomic64_t * v)1434 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1435 {
1436 	__atomic_release_fence();
1437 	return arch_atomic64_fetch_add_relaxed(i, v);
1438 }
1439 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1440 #endif
1441 
1442 #ifndef arch_atomic64_fetch_add
1443 static __always_inline s64
arch_atomic64_fetch_add(s64 i,atomic64_t * v)1444 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1445 {
1446 	s64 ret;
1447 	__atomic_pre_full_fence();
1448 	ret = arch_atomic64_fetch_add_relaxed(i, v);
1449 	__atomic_post_full_fence();
1450 	return ret;
1451 }
1452 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1453 #endif
1454 
1455 #endif /* arch_atomic64_fetch_add_relaxed */
1456 
1457 #ifndef arch_atomic64_sub_return_relaxed
1458 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1459 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1460 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1461 #else /* arch_atomic64_sub_return_relaxed */
1462 
1463 #ifndef arch_atomic64_sub_return_acquire
1464 static __always_inline s64
arch_atomic64_sub_return_acquire(s64 i,atomic64_t * v)1465 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1466 {
1467 	s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1468 	__atomic_acquire_fence();
1469 	return ret;
1470 }
1471 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1472 #endif
1473 
1474 #ifndef arch_atomic64_sub_return_release
1475 static __always_inline s64
arch_atomic64_sub_return_release(s64 i,atomic64_t * v)1476 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1477 {
1478 	__atomic_release_fence();
1479 	return arch_atomic64_sub_return_relaxed(i, v);
1480 }
1481 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1482 #endif
1483 
1484 #ifndef arch_atomic64_sub_return
1485 static __always_inline s64
arch_atomic64_sub_return(s64 i,atomic64_t * v)1486 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1487 {
1488 	s64 ret;
1489 	__atomic_pre_full_fence();
1490 	ret = arch_atomic64_sub_return_relaxed(i, v);
1491 	__atomic_post_full_fence();
1492 	return ret;
1493 }
1494 #define arch_atomic64_sub_return arch_atomic64_sub_return
1495 #endif
1496 
1497 #endif /* arch_atomic64_sub_return_relaxed */
1498 
1499 #ifndef arch_atomic64_fetch_sub_relaxed
1500 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1501 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1502 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1503 #else /* arch_atomic64_fetch_sub_relaxed */
1504 
1505 #ifndef arch_atomic64_fetch_sub_acquire
1506 static __always_inline s64
arch_atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1507 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1508 {
1509 	s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1510 	__atomic_acquire_fence();
1511 	return ret;
1512 }
1513 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1514 #endif
1515 
1516 #ifndef arch_atomic64_fetch_sub_release
1517 static __always_inline s64
arch_atomic64_fetch_sub_release(s64 i,atomic64_t * v)1518 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1519 {
1520 	__atomic_release_fence();
1521 	return arch_atomic64_fetch_sub_relaxed(i, v);
1522 }
1523 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1524 #endif
1525 
1526 #ifndef arch_atomic64_fetch_sub
1527 static __always_inline s64
arch_atomic64_fetch_sub(s64 i,atomic64_t * v)1528 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1529 {
1530 	s64 ret;
1531 	__atomic_pre_full_fence();
1532 	ret = arch_atomic64_fetch_sub_relaxed(i, v);
1533 	__atomic_post_full_fence();
1534 	return ret;
1535 }
1536 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1537 #endif
1538 
1539 #endif /* arch_atomic64_fetch_sub_relaxed */
1540 
1541 #ifndef arch_atomic64_inc
1542 static __always_inline void
arch_atomic64_inc(atomic64_t * v)1543 arch_atomic64_inc(atomic64_t *v)
1544 {
1545 	arch_atomic64_add(1, v);
1546 }
1547 #define arch_atomic64_inc arch_atomic64_inc
1548 #endif
1549 
1550 #ifndef arch_atomic64_inc_return_relaxed
1551 #ifdef arch_atomic64_inc_return
1552 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1553 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1554 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1555 #endif /* arch_atomic64_inc_return */
1556 
1557 #ifndef arch_atomic64_inc_return
1558 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1559 arch_atomic64_inc_return(atomic64_t *v)
1560 {
1561 	return arch_atomic64_add_return(1, v);
1562 }
1563 #define arch_atomic64_inc_return arch_atomic64_inc_return
1564 #endif
1565 
1566 #ifndef arch_atomic64_inc_return_acquire
1567 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1568 arch_atomic64_inc_return_acquire(atomic64_t *v)
1569 {
1570 	return arch_atomic64_add_return_acquire(1, v);
1571 }
1572 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1573 #endif
1574 
1575 #ifndef arch_atomic64_inc_return_release
1576 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1577 arch_atomic64_inc_return_release(atomic64_t *v)
1578 {
1579 	return arch_atomic64_add_return_release(1, v);
1580 }
1581 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1582 #endif
1583 
1584 #ifndef arch_atomic64_inc_return_relaxed
1585 static __always_inline s64
arch_atomic64_inc_return_relaxed(atomic64_t * v)1586 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1587 {
1588 	return arch_atomic64_add_return_relaxed(1, v);
1589 }
1590 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1591 #endif
1592 
1593 #else /* arch_atomic64_inc_return_relaxed */
1594 
1595 #ifndef arch_atomic64_inc_return_acquire
1596 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1597 arch_atomic64_inc_return_acquire(atomic64_t *v)
1598 {
1599 	s64 ret = arch_atomic64_inc_return_relaxed(v);
1600 	__atomic_acquire_fence();
1601 	return ret;
1602 }
1603 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1604 #endif
1605 
1606 #ifndef arch_atomic64_inc_return_release
1607 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1608 arch_atomic64_inc_return_release(atomic64_t *v)
1609 {
1610 	__atomic_release_fence();
1611 	return arch_atomic64_inc_return_relaxed(v);
1612 }
1613 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1614 #endif
1615 
1616 #ifndef arch_atomic64_inc_return
1617 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1618 arch_atomic64_inc_return(atomic64_t *v)
1619 {
1620 	s64 ret;
1621 	__atomic_pre_full_fence();
1622 	ret = arch_atomic64_inc_return_relaxed(v);
1623 	__atomic_post_full_fence();
1624 	return ret;
1625 }
1626 #define arch_atomic64_inc_return arch_atomic64_inc_return
1627 #endif
1628 
1629 #endif /* arch_atomic64_inc_return_relaxed */
1630 
1631 #ifndef arch_atomic64_fetch_inc_relaxed
1632 #ifdef arch_atomic64_fetch_inc
1633 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1634 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1635 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1636 #endif /* arch_atomic64_fetch_inc */
1637 
1638 #ifndef arch_atomic64_fetch_inc
1639 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1640 arch_atomic64_fetch_inc(atomic64_t *v)
1641 {
1642 	return arch_atomic64_fetch_add(1, v);
1643 }
1644 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1645 #endif
1646 
1647 #ifndef arch_atomic64_fetch_inc_acquire
1648 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1649 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1650 {
1651 	return arch_atomic64_fetch_add_acquire(1, v);
1652 }
1653 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1654 #endif
1655 
1656 #ifndef arch_atomic64_fetch_inc_release
1657 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1658 arch_atomic64_fetch_inc_release(atomic64_t *v)
1659 {
1660 	return arch_atomic64_fetch_add_release(1, v);
1661 }
1662 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1663 #endif
1664 
1665 #ifndef arch_atomic64_fetch_inc_relaxed
1666 static __always_inline s64
arch_atomic64_fetch_inc_relaxed(atomic64_t * v)1667 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1668 {
1669 	return arch_atomic64_fetch_add_relaxed(1, v);
1670 }
1671 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1672 #endif
1673 
1674 #else /* arch_atomic64_fetch_inc_relaxed */
1675 
1676 #ifndef arch_atomic64_fetch_inc_acquire
1677 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1678 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1679 {
1680 	s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1681 	__atomic_acquire_fence();
1682 	return ret;
1683 }
1684 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1685 #endif
1686 
1687 #ifndef arch_atomic64_fetch_inc_release
1688 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1689 arch_atomic64_fetch_inc_release(atomic64_t *v)
1690 {
1691 	__atomic_release_fence();
1692 	return arch_atomic64_fetch_inc_relaxed(v);
1693 }
1694 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1695 #endif
1696 
1697 #ifndef arch_atomic64_fetch_inc
1698 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1699 arch_atomic64_fetch_inc(atomic64_t *v)
1700 {
1701 	s64 ret;
1702 	__atomic_pre_full_fence();
1703 	ret = arch_atomic64_fetch_inc_relaxed(v);
1704 	__atomic_post_full_fence();
1705 	return ret;
1706 }
1707 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1708 #endif
1709 
1710 #endif /* arch_atomic64_fetch_inc_relaxed */
1711 
1712 #ifndef arch_atomic64_dec
1713 static __always_inline void
arch_atomic64_dec(atomic64_t * v)1714 arch_atomic64_dec(atomic64_t *v)
1715 {
1716 	arch_atomic64_sub(1, v);
1717 }
1718 #define arch_atomic64_dec arch_atomic64_dec
1719 #endif
1720 
1721 #ifndef arch_atomic64_dec_return_relaxed
1722 #ifdef arch_atomic64_dec_return
1723 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1724 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1725 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1726 #endif /* arch_atomic64_dec_return */
1727 
1728 #ifndef arch_atomic64_dec_return
1729 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1730 arch_atomic64_dec_return(atomic64_t *v)
1731 {
1732 	return arch_atomic64_sub_return(1, v);
1733 }
1734 #define arch_atomic64_dec_return arch_atomic64_dec_return
1735 #endif
1736 
1737 #ifndef arch_atomic64_dec_return_acquire
1738 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1739 arch_atomic64_dec_return_acquire(atomic64_t *v)
1740 {
1741 	return arch_atomic64_sub_return_acquire(1, v);
1742 }
1743 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1744 #endif
1745 
1746 #ifndef arch_atomic64_dec_return_release
1747 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1748 arch_atomic64_dec_return_release(atomic64_t *v)
1749 {
1750 	return arch_atomic64_sub_return_release(1, v);
1751 }
1752 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1753 #endif
1754 
1755 #ifndef arch_atomic64_dec_return_relaxed
1756 static __always_inline s64
arch_atomic64_dec_return_relaxed(atomic64_t * v)1757 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1758 {
1759 	return arch_atomic64_sub_return_relaxed(1, v);
1760 }
1761 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1762 #endif
1763 
1764 #else /* arch_atomic64_dec_return_relaxed */
1765 
1766 #ifndef arch_atomic64_dec_return_acquire
1767 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1768 arch_atomic64_dec_return_acquire(atomic64_t *v)
1769 {
1770 	s64 ret = arch_atomic64_dec_return_relaxed(v);
1771 	__atomic_acquire_fence();
1772 	return ret;
1773 }
1774 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1775 #endif
1776 
1777 #ifndef arch_atomic64_dec_return_release
1778 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1779 arch_atomic64_dec_return_release(atomic64_t *v)
1780 {
1781 	__atomic_release_fence();
1782 	return arch_atomic64_dec_return_relaxed(v);
1783 }
1784 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1785 #endif
1786 
1787 #ifndef arch_atomic64_dec_return
1788 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1789 arch_atomic64_dec_return(atomic64_t *v)
1790 {
1791 	s64 ret;
1792 	__atomic_pre_full_fence();
1793 	ret = arch_atomic64_dec_return_relaxed(v);
1794 	__atomic_post_full_fence();
1795 	return ret;
1796 }
1797 #define arch_atomic64_dec_return arch_atomic64_dec_return
1798 #endif
1799 
1800 #endif /* arch_atomic64_dec_return_relaxed */
1801 
1802 #ifndef arch_atomic64_fetch_dec_relaxed
1803 #ifdef arch_atomic64_fetch_dec
1804 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1805 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1806 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1807 #endif /* arch_atomic64_fetch_dec */
1808 
1809 #ifndef arch_atomic64_fetch_dec
1810 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1811 arch_atomic64_fetch_dec(atomic64_t *v)
1812 {
1813 	return arch_atomic64_fetch_sub(1, v);
1814 }
1815 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1816 #endif
1817 
1818 #ifndef arch_atomic64_fetch_dec_acquire
1819 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1820 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1821 {
1822 	return arch_atomic64_fetch_sub_acquire(1, v);
1823 }
1824 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1825 #endif
1826 
1827 #ifndef arch_atomic64_fetch_dec_release
1828 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1829 arch_atomic64_fetch_dec_release(atomic64_t *v)
1830 {
1831 	return arch_atomic64_fetch_sub_release(1, v);
1832 }
1833 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1834 #endif
1835 
1836 #ifndef arch_atomic64_fetch_dec_relaxed
1837 static __always_inline s64
arch_atomic64_fetch_dec_relaxed(atomic64_t * v)1838 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1839 {
1840 	return arch_atomic64_fetch_sub_relaxed(1, v);
1841 }
1842 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1843 #endif
1844 
1845 #else /* arch_atomic64_fetch_dec_relaxed */
1846 
1847 #ifndef arch_atomic64_fetch_dec_acquire
1848 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1849 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1850 {
1851 	s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1852 	__atomic_acquire_fence();
1853 	return ret;
1854 }
1855 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1856 #endif
1857 
1858 #ifndef arch_atomic64_fetch_dec_release
1859 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1860 arch_atomic64_fetch_dec_release(atomic64_t *v)
1861 {
1862 	__atomic_release_fence();
1863 	return arch_atomic64_fetch_dec_relaxed(v);
1864 }
1865 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1866 #endif
1867 
1868 #ifndef arch_atomic64_fetch_dec
1869 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1870 arch_atomic64_fetch_dec(atomic64_t *v)
1871 {
1872 	s64 ret;
1873 	__atomic_pre_full_fence();
1874 	ret = arch_atomic64_fetch_dec_relaxed(v);
1875 	__atomic_post_full_fence();
1876 	return ret;
1877 }
1878 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1879 #endif
1880 
1881 #endif /* arch_atomic64_fetch_dec_relaxed */
1882 
1883 #ifndef arch_atomic64_fetch_and_relaxed
1884 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1885 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1886 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1887 #else /* arch_atomic64_fetch_and_relaxed */
1888 
1889 #ifndef arch_atomic64_fetch_and_acquire
1890 static __always_inline s64
arch_atomic64_fetch_and_acquire(s64 i,atomic64_t * v)1891 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1892 {
1893 	s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1894 	__atomic_acquire_fence();
1895 	return ret;
1896 }
1897 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1898 #endif
1899 
1900 #ifndef arch_atomic64_fetch_and_release
1901 static __always_inline s64
arch_atomic64_fetch_and_release(s64 i,atomic64_t * v)1902 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1903 {
1904 	__atomic_release_fence();
1905 	return arch_atomic64_fetch_and_relaxed(i, v);
1906 }
1907 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1908 #endif
1909 
1910 #ifndef arch_atomic64_fetch_and
1911 static __always_inline s64
arch_atomic64_fetch_and(s64 i,atomic64_t * v)1912 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1913 {
1914 	s64 ret;
1915 	__atomic_pre_full_fence();
1916 	ret = arch_atomic64_fetch_and_relaxed(i, v);
1917 	__atomic_post_full_fence();
1918 	return ret;
1919 }
1920 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1921 #endif
1922 
1923 #endif /* arch_atomic64_fetch_and_relaxed */
1924 
1925 #ifndef arch_atomic64_andnot
1926 static __always_inline void
arch_atomic64_andnot(s64 i,atomic64_t * v)1927 arch_atomic64_andnot(s64 i, atomic64_t *v)
1928 {
1929 	arch_atomic64_and(~i, v);
1930 }
1931 #define arch_atomic64_andnot arch_atomic64_andnot
1932 #endif
1933 
1934 #ifndef arch_atomic64_fetch_andnot_relaxed
1935 #ifdef arch_atomic64_fetch_andnot
1936 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1937 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1938 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1939 #endif /* arch_atomic64_fetch_andnot */
1940 
1941 #ifndef arch_atomic64_fetch_andnot
1942 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)1943 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1944 {
1945 	return arch_atomic64_fetch_and(~i, v);
1946 }
1947 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1948 #endif
1949 
1950 #ifndef arch_atomic64_fetch_andnot_acquire
1951 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1952 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1953 {
1954 	return arch_atomic64_fetch_and_acquire(~i, v);
1955 }
1956 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1957 #endif
1958 
1959 #ifndef arch_atomic64_fetch_andnot_release
1960 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1961 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1962 {
1963 	return arch_atomic64_fetch_and_release(~i, v);
1964 }
1965 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1966 #endif
1967 
1968 #ifndef arch_atomic64_fetch_andnot_relaxed
1969 static __always_inline s64
arch_atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)1970 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1971 {
1972 	return arch_atomic64_fetch_and_relaxed(~i, v);
1973 }
1974 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1975 #endif
1976 
1977 #else /* arch_atomic64_fetch_andnot_relaxed */
1978 
1979 #ifndef arch_atomic64_fetch_andnot_acquire
1980 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1981 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1982 {
1983 	s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1984 	__atomic_acquire_fence();
1985 	return ret;
1986 }
1987 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1988 #endif
1989 
1990 #ifndef arch_atomic64_fetch_andnot_release
1991 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1992 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1993 {
1994 	__atomic_release_fence();
1995 	return arch_atomic64_fetch_andnot_relaxed(i, v);
1996 }
1997 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1998 #endif
1999 
2000 #ifndef arch_atomic64_fetch_andnot
2001 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)2002 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
2003 {
2004 	s64 ret;
2005 	__atomic_pre_full_fence();
2006 	ret = arch_atomic64_fetch_andnot_relaxed(i, v);
2007 	__atomic_post_full_fence();
2008 	return ret;
2009 }
2010 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
2011 #endif
2012 
2013 #endif /* arch_atomic64_fetch_andnot_relaxed */
2014 
2015 #ifndef arch_atomic64_fetch_or_relaxed
2016 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
2017 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
2018 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
2019 #else /* arch_atomic64_fetch_or_relaxed */
2020 
2021 #ifndef arch_atomic64_fetch_or_acquire
2022 static __always_inline s64
arch_atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2023 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2024 {
2025 	s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
2026 	__atomic_acquire_fence();
2027 	return ret;
2028 }
2029 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
2030 #endif
2031 
2032 #ifndef arch_atomic64_fetch_or_release
2033 static __always_inline s64
arch_atomic64_fetch_or_release(s64 i,atomic64_t * v)2034 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
2035 {
2036 	__atomic_release_fence();
2037 	return arch_atomic64_fetch_or_relaxed(i, v);
2038 }
2039 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
2040 #endif
2041 
2042 #ifndef arch_atomic64_fetch_or
2043 static __always_inline s64
arch_atomic64_fetch_or(s64 i,atomic64_t * v)2044 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
2045 {
2046 	s64 ret;
2047 	__atomic_pre_full_fence();
2048 	ret = arch_atomic64_fetch_or_relaxed(i, v);
2049 	__atomic_post_full_fence();
2050 	return ret;
2051 }
2052 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
2053 #endif
2054 
2055 #endif /* arch_atomic64_fetch_or_relaxed */
2056 
2057 #ifndef arch_atomic64_fetch_xor_relaxed
2058 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
2059 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
2060 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
2061 #else /* arch_atomic64_fetch_xor_relaxed */
2062 
2063 #ifndef arch_atomic64_fetch_xor_acquire
2064 static __always_inline s64
arch_atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2065 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2066 {
2067 	s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2068 	__atomic_acquire_fence();
2069 	return ret;
2070 }
2071 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2072 #endif
2073 
2074 #ifndef arch_atomic64_fetch_xor_release
2075 static __always_inline s64
arch_atomic64_fetch_xor_release(s64 i,atomic64_t * v)2076 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2077 {
2078 	__atomic_release_fence();
2079 	return arch_atomic64_fetch_xor_relaxed(i, v);
2080 }
2081 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2082 #endif
2083 
2084 #ifndef arch_atomic64_fetch_xor
2085 static __always_inline s64
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)2086 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2087 {
2088 	s64 ret;
2089 	__atomic_pre_full_fence();
2090 	ret = arch_atomic64_fetch_xor_relaxed(i, v);
2091 	__atomic_post_full_fence();
2092 	return ret;
2093 }
2094 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2095 #endif
2096 
2097 #endif /* arch_atomic64_fetch_xor_relaxed */
2098 
2099 #ifndef arch_atomic64_xchg_relaxed
2100 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2101 #define arch_atomic64_xchg_release arch_atomic64_xchg
2102 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2103 #else /* arch_atomic64_xchg_relaxed */
2104 
2105 #ifndef arch_atomic64_xchg_acquire
2106 static __always_inline s64
arch_atomic64_xchg_acquire(atomic64_t * v,s64 i)2107 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2108 {
2109 	s64 ret = arch_atomic64_xchg_relaxed(v, i);
2110 	__atomic_acquire_fence();
2111 	return ret;
2112 }
2113 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2114 #endif
2115 
2116 #ifndef arch_atomic64_xchg_release
2117 static __always_inline s64
arch_atomic64_xchg_release(atomic64_t * v,s64 i)2118 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2119 {
2120 	__atomic_release_fence();
2121 	return arch_atomic64_xchg_relaxed(v, i);
2122 }
2123 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2124 #endif
2125 
2126 #ifndef arch_atomic64_xchg
2127 static __always_inline s64
arch_atomic64_xchg(atomic64_t * v,s64 i)2128 arch_atomic64_xchg(atomic64_t *v, s64 i)
2129 {
2130 	s64 ret;
2131 	__atomic_pre_full_fence();
2132 	ret = arch_atomic64_xchg_relaxed(v, i);
2133 	__atomic_post_full_fence();
2134 	return ret;
2135 }
2136 #define arch_atomic64_xchg arch_atomic64_xchg
2137 #endif
2138 
2139 #endif /* arch_atomic64_xchg_relaxed */
2140 
2141 #ifndef arch_atomic64_cmpxchg_relaxed
2142 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2143 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2144 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2145 #else /* arch_atomic64_cmpxchg_relaxed */
2146 
2147 #ifndef arch_atomic64_cmpxchg_acquire
2148 static __always_inline s64
arch_atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2149 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2150 {
2151 	s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2152 	__atomic_acquire_fence();
2153 	return ret;
2154 }
2155 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2156 #endif
2157 
2158 #ifndef arch_atomic64_cmpxchg_release
2159 static __always_inline s64
arch_atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2160 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2161 {
2162 	__atomic_release_fence();
2163 	return arch_atomic64_cmpxchg_relaxed(v, old, new);
2164 }
2165 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2166 #endif
2167 
2168 #ifndef arch_atomic64_cmpxchg
2169 static __always_inline s64
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2170 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2171 {
2172 	s64 ret;
2173 	__atomic_pre_full_fence();
2174 	ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2175 	__atomic_post_full_fence();
2176 	return ret;
2177 }
2178 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2179 #endif
2180 
2181 #endif /* arch_atomic64_cmpxchg_relaxed */
2182 
2183 #ifndef arch_atomic64_try_cmpxchg_relaxed
2184 #ifdef arch_atomic64_try_cmpxchg
2185 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2186 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2187 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2188 #endif /* arch_atomic64_try_cmpxchg */
2189 
2190 #ifndef arch_atomic64_try_cmpxchg
2191 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2192 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2193 {
2194 	s64 r, o = *old;
2195 	r = arch_atomic64_cmpxchg(v, o, new);
2196 	if (unlikely(r != o))
2197 		*old = r;
2198 	return likely(r == o);
2199 }
2200 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2201 #endif
2202 
2203 #ifndef arch_atomic64_try_cmpxchg_acquire
2204 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2205 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2206 {
2207 	s64 r, o = *old;
2208 	r = arch_atomic64_cmpxchg_acquire(v, o, new);
2209 	if (unlikely(r != o))
2210 		*old = r;
2211 	return likely(r == o);
2212 }
2213 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2214 #endif
2215 
2216 #ifndef arch_atomic64_try_cmpxchg_release
2217 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2218 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2219 {
2220 	s64 r, o = *old;
2221 	r = arch_atomic64_cmpxchg_release(v, o, new);
2222 	if (unlikely(r != o))
2223 		*old = r;
2224 	return likely(r == o);
2225 }
2226 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2227 #endif
2228 
2229 #ifndef arch_atomic64_try_cmpxchg_relaxed
2230 static __always_inline bool
arch_atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2231 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2232 {
2233 	s64 r, o = *old;
2234 	r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2235 	if (unlikely(r != o))
2236 		*old = r;
2237 	return likely(r == o);
2238 }
2239 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2240 #endif
2241 
2242 #else /* arch_atomic64_try_cmpxchg_relaxed */
2243 
2244 #ifndef arch_atomic64_try_cmpxchg_acquire
2245 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2246 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2247 {
2248 	bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2249 	__atomic_acquire_fence();
2250 	return ret;
2251 }
2252 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2253 #endif
2254 
2255 #ifndef arch_atomic64_try_cmpxchg_release
2256 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2257 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2258 {
2259 	__atomic_release_fence();
2260 	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2261 }
2262 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2263 #endif
2264 
2265 #ifndef arch_atomic64_try_cmpxchg
2266 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2267 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2268 {
2269 	bool ret;
2270 	__atomic_pre_full_fence();
2271 	ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2272 	__atomic_post_full_fence();
2273 	return ret;
2274 }
2275 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2276 #endif
2277 
2278 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2279 
2280 #ifndef arch_atomic64_sub_and_test
2281 /**
2282  * arch_atomic64_sub_and_test - subtract value from variable and test result
2283  * @i: integer value to subtract
2284  * @v: pointer of type atomic64_t
2285  *
2286  * Atomically subtracts @i from @v and returns
2287  * true if the result is zero, or false for all
2288  * other cases.
2289  */
2290 static __always_inline bool
arch_atomic64_sub_and_test(s64 i,atomic64_t * v)2291 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2292 {
2293 	return arch_atomic64_sub_return(i, v) == 0;
2294 }
2295 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2296 #endif
2297 
2298 #ifndef arch_atomic64_dec_and_test
2299 /**
2300  * arch_atomic64_dec_and_test - decrement and test
2301  * @v: pointer of type atomic64_t
2302  *
2303  * Atomically decrements @v by 1 and
2304  * returns true if the result is 0, or false for all other
2305  * cases.
2306  */
2307 static __always_inline bool
arch_atomic64_dec_and_test(atomic64_t * v)2308 arch_atomic64_dec_and_test(atomic64_t *v)
2309 {
2310 	return arch_atomic64_dec_return(v) == 0;
2311 }
2312 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2313 #endif
2314 
2315 #ifndef arch_atomic64_inc_and_test
2316 /**
2317  * arch_atomic64_inc_and_test - increment and test
2318  * @v: pointer of type atomic64_t
2319  *
2320  * Atomically increments @v by 1
2321  * and returns true if the result is zero, or false for all
2322  * other cases.
2323  */
2324 static __always_inline bool
arch_atomic64_inc_and_test(atomic64_t * v)2325 arch_atomic64_inc_and_test(atomic64_t *v)
2326 {
2327 	return arch_atomic64_inc_return(v) == 0;
2328 }
2329 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2330 #endif
2331 
2332 #ifndef arch_atomic64_add_negative
2333 /**
2334  * arch_atomic64_add_negative - add and test if negative
2335  * @i: integer value to add
2336  * @v: pointer of type atomic64_t
2337  *
2338  * Atomically adds @i to @v and returns true
2339  * if the result is negative, or false when
2340  * result is greater than or equal to zero.
2341  */
2342 static __always_inline bool
arch_atomic64_add_negative(s64 i,atomic64_t * v)2343 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2344 {
2345 	return arch_atomic64_add_return(i, v) < 0;
2346 }
2347 #define arch_atomic64_add_negative arch_atomic64_add_negative
2348 #endif
2349 
2350 #ifndef arch_atomic64_fetch_add_unless
2351 /**
2352  * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2353  * @v: pointer of type atomic64_t
2354  * @a: the amount to add to v...
2355  * @u: ...unless v is equal to u.
2356  *
2357  * Atomically adds @a to @v, so long as @v was not already @u.
2358  * Returns original value of @v
2359  */
2360 static __always_inline s64
arch_atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)2361 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2362 {
2363 	s64 c = arch_atomic64_read(v);
2364 
2365 	do {
2366 		if (unlikely(c == u))
2367 			break;
2368 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2369 
2370 	return c;
2371 }
2372 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2373 #endif
2374 
2375 #ifndef arch_atomic64_add_unless
2376 /**
2377  * arch_atomic64_add_unless - add unless the number is already a given value
2378  * @v: pointer of type atomic64_t
2379  * @a: the amount to add to v...
2380  * @u: ...unless v is equal to u.
2381  *
2382  * Atomically adds @a to @v, if @v was not already @u.
2383  * Returns true if the addition was done.
2384  */
2385 static __always_inline bool
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)2386 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2387 {
2388 	return arch_atomic64_fetch_add_unless(v, a, u) != u;
2389 }
2390 #define arch_atomic64_add_unless arch_atomic64_add_unless
2391 #endif
2392 
2393 #ifndef arch_atomic64_inc_not_zero
2394 /**
2395  * arch_atomic64_inc_not_zero - increment unless the number is zero
2396  * @v: pointer of type atomic64_t
2397  *
2398  * Atomically increments @v by 1, if @v is non-zero.
2399  * Returns true if the increment was done.
2400  */
2401 static __always_inline bool
arch_atomic64_inc_not_zero(atomic64_t * v)2402 arch_atomic64_inc_not_zero(atomic64_t *v)
2403 {
2404 	return arch_atomic64_add_unless(v, 1, 0);
2405 }
2406 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2407 #endif
2408 
2409 #ifndef arch_atomic64_inc_unless_negative
2410 static __always_inline bool
arch_atomic64_inc_unless_negative(atomic64_t * v)2411 arch_atomic64_inc_unless_negative(atomic64_t *v)
2412 {
2413 	s64 c = arch_atomic64_read(v);
2414 
2415 	do {
2416 		if (unlikely(c < 0))
2417 			return false;
2418 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2419 
2420 	return true;
2421 }
2422 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2423 #endif
2424 
2425 #ifndef arch_atomic64_dec_unless_positive
2426 static __always_inline bool
arch_atomic64_dec_unless_positive(atomic64_t * v)2427 arch_atomic64_dec_unless_positive(atomic64_t *v)
2428 {
2429 	s64 c = arch_atomic64_read(v);
2430 
2431 	do {
2432 		if (unlikely(c > 0))
2433 			return false;
2434 	} while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2435 
2436 	return true;
2437 }
2438 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2439 #endif
2440 
2441 #ifndef arch_atomic64_dec_if_positive
2442 static __always_inline s64
arch_atomic64_dec_if_positive(atomic64_t * v)2443 arch_atomic64_dec_if_positive(atomic64_t *v)
2444 {
2445 	s64 dec, c = arch_atomic64_read(v);
2446 
2447 	do {
2448 		dec = c - 1;
2449 		if (unlikely(dec < 0))
2450 			break;
2451 	} while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2452 
2453 	return dec;
2454 }
2455 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2456 #endif
2457 
2458 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2459 // b5e87bdd5ede61470c29f7a7e4de781af3770f09
2460