1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-long.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 #ifndef _LINUX_ATOMIC_LONG_H
7 #define _LINUX_ATOMIC_LONG_H
8 
9 #include <linux/compiler.h>
10 #include <asm/types.h>
11 
12 #ifdef CONFIG_64BIT
13 typedef atomic64_t atomic_long_t;
14 #define ATOMIC_LONG_INIT(i)		ATOMIC64_INIT(i)
15 #define atomic_long_cond_read_acquire	atomic64_cond_read_acquire
16 #define atomic_long_cond_read_relaxed	atomic64_cond_read_relaxed
17 #else
18 typedef atomic_t atomic_long_t;
19 #define ATOMIC_LONG_INIT(i)		ATOMIC_INIT(i)
20 #define atomic_long_cond_read_acquire	atomic_cond_read_acquire
21 #define atomic_long_cond_read_relaxed	atomic_cond_read_relaxed
22 #endif
23 
24 #ifdef CONFIG_64BIT
25 
26 static __always_inline long
arch_atomic_long_read(const atomic_long_t * v)27 arch_atomic_long_read(const atomic_long_t *v)
28 {
29 	return arch_atomic64_read(v);
30 }
31 
32 static __always_inline long
arch_atomic_long_read_acquire(const atomic_long_t * v)33 arch_atomic_long_read_acquire(const atomic_long_t *v)
34 {
35 	return arch_atomic64_read_acquire(v);
36 }
37 
38 static __always_inline void
arch_atomic_long_set(atomic_long_t * v,long i)39 arch_atomic_long_set(atomic_long_t *v, long i)
40 {
41 	arch_atomic64_set(v, i);
42 }
43 
44 static __always_inline void
arch_atomic_long_set_release(atomic_long_t * v,long i)45 arch_atomic_long_set_release(atomic_long_t *v, long i)
46 {
47 	arch_atomic64_set_release(v, i);
48 }
49 
50 static __always_inline void
arch_atomic_long_add(long i,atomic_long_t * v)51 arch_atomic_long_add(long i, atomic_long_t *v)
52 {
53 	arch_atomic64_add(i, v);
54 }
55 
56 static __always_inline long
arch_atomic_long_add_return(long i,atomic_long_t * v)57 arch_atomic_long_add_return(long i, atomic_long_t *v)
58 {
59 	return arch_atomic64_add_return(i, v);
60 }
61 
62 static __always_inline long
arch_atomic_long_add_return_acquire(long i,atomic_long_t * v)63 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
64 {
65 	return arch_atomic64_add_return_acquire(i, v);
66 }
67 
68 static __always_inline long
arch_atomic_long_add_return_release(long i,atomic_long_t * v)69 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
70 {
71 	return arch_atomic64_add_return_release(i, v);
72 }
73 
74 static __always_inline long
arch_atomic_long_add_return_relaxed(long i,atomic_long_t * v)75 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
76 {
77 	return arch_atomic64_add_return_relaxed(i, v);
78 }
79 
80 static __always_inline long
arch_atomic_long_fetch_add(long i,atomic_long_t * v)81 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
82 {
83 	return arch_atomic64_fetch_add(i, v);
84 }
85 
86 static __always_inline long
arch_atomic_long_fetch_add_acquire(long i,atomic_long_t * v)87 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
88 {
89 	return arch_atomic64_fetch_add_acquire(i, v);
90 }
91 
92 static __always_inline long
arch_atomic_long_fetch_add_release(long i,atomic_long_t * v)93 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
94 {
95 	return arch_atomic64_fetch_add_release(i, v);
96 }
97 
98 static __always_inline long
arch_atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)99 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
100 {
101 	return arch_atomic64_fetch_add_relaxed(i, v);
102 }
103 
104 static __always_inline void
arch_atomic_long_sub(long i,atomic_long_t * v)105 arch_atomic_long_sub(long i, atomic_long_t *v)
106 {
107 	arch_atomic64_sub(i, v);
108 }
109 
110 static __always_inline long
arch_atomic_long_sub_return(long i,atomic_long_t * v)111 arch_atomic_long_sub_return(long i, atomic_long_t *v)
112 {
113 	return arch_atomic64_sub_return(i, v);
114 }
115 
116 static __always_inline long
arch_atomic_long_sub_return_acquire(long i,atomic_long_t * v)117 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
118 {
119 	return arch_atomic64_sub_return_acquire(i, v);
120 }
121 
122 static __always_inline long
arch_atomic_long_sub_return_release(long i,atomic_long_t * v)123 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
124 {
125 	return arch_atomic64_sub_return_release(i, v);
126 }
127 
128 static __always_inline long
arch_atomic_long_sub_return_relaxed(long i,atomic_long_t * v)129 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
130 {
131 	return arch_atomic64_sub_return_relaxed(i, v);
132 }
133 
134 static __always_inline long
arch_atomic_long_fetch_sub(long i,atomic_long_t * v)135 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
136 {
137 	return arch_atomic64_fetch_sub(i, v);
138 }
139 
140 static __always_inline long
arch_atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)141 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
142 {
143 	return arch_atomic64_fetch_sub_acquire(i, v);
144 }
145 
146 static __always_inline long
arch_atomic_long_fetch_sub_release(long i,atomic_long_t * v)147 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
148 {
149 	return arch_atomic64_fetch_sub_release(i, v);
150 }
151 
152 static __always_inline long
arch_atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)153 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
154 {
155 	return arch_atomic64_fetch_sub_relaxed(i, v);
156 }
157 
158 static __always_inline void
arch_atomic_long_inc(atomic_long_t * v)159 arch_atomic_long_inc(atomic_long_t *v)
160 {
161 	arch_atomic64_inc(v);
162 }
163 
164 static __always_inline long
arch_atomic_long_inc_return(atomic_long_t * v)165 arch_atomic_long_inc_return(atomic_long_t *v)
166 {
167 	return arch_atomic64_inc_return(v);
168 }
169 
170 static __always_inline long
arch_atomic_long_inc_return_acquire(atomic_long_t * v)171 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
172 {
173 	return arch_atomic64_inc_return_acquire(v);
174 }
175 
176 static __always_inline long
arch_atomic_long_inc_return_release(atomic_long_t * v)177 arch_atomic_long_inc_return_release(atomic_long_t *v)
178 {
179 	return arch_atomic64_inc_return_release(v);
180 }
181 
182 static __always_inline long
arch_atomic_long_inc_return_relaxed(atomic_long_t * v)183 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
184 {
185 	return arch_atomic64_inc_return_relaxed(v);
186 }
187 
188 static __always_inline long
arch_atomic_long_fetch_inc(atomic_long_t * v)189 arch_atomic_long_fetch_inc(atomic_long_t *v)
190 {
191 	return arch_atomic64_fetch_inc(v);
192 }
193 
194 static __always_inline long
arch_atomic_long_fetch_inc_acquire(atomic_long_t * v)195 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
196 {
197 	return arch_atomic64_fetch_inc_acquire(v);
198 }
199 
200 static __always_inline long
arch_atomic_long_fetch_inc_release(atomic_long_t * v)201 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
202 {
203 	return arch_atomic64_fetch_inc_release(v);
204 }
205 
206 static __always_inline long
arch_atomic_long_fetch_inc_relaxed(atomic_long_t * v)207 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
208 {
209 	return arch_atomic64_fetch_inc_relaxed(v);
210 }
211 
212 static __always_inline void
arch_atomic_long_dec(atomic_long_t * v)213 arch_atomic_long_dec(atomic_long_t *v)
214 {
215 	arch_atomic64_dec(v);
216 }
217 
218 static __always_inline long
arch_atomic_long_dec_return(atomic_long_t * v)219 arch_atomic_long_dec_return(atomic_long_t *v)
220 {
221 	return arch_atomic64_dec_return(v);
222 }
223 
224 static __always_inline long
arch_atomic_long_dec_return_acquire(atomic_long_t * v)225 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
226 {
227 	return arch_atomic64_dec_return_acquire(v);
228 }
229 
230 static __always_inline long
arch_atomic_long_dec_return_release(atomic_long_t * v)231 arch_atomic_long_dec_return_release(atomic_long_t *v)
232 {
233 	return arch_atomic64_dec_return_release(v);
234 }
235 
236 static __always_inline long
arch_atomic_long_dec_return_relaxed(atomic_long_t * v)237 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
238 {
239 	return arch_atomic64_dec_return_relaxed(v);
240 }
241 
242 static __always_inline long
arch_atomic_long_fetch_dec(atomic_long_t * v)243 arch_atomic_long_fetch_dec(atomic_long_t *v)
244 {
245 	return arch_atomic64_fetch_dec(v);
246 }
247 
248 static __always_inline long
arch_atomic_long_fetch_dec_acquire(atomic_long_t * v)249 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
250 {
251 	return arch_atomic64_fetch_dec_acquire(v);
252 }
253 
254 static __always_inline long
arch_atomic_long_fetch_dec_release(atomic_long_t * v)255 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
256 {
257 	return arch_atomic64_fetch_dec_release(v);
258 }
259 
260 static __always_inline long
arch_atomic_long_fetch_dec_relaxed(atomic_long_t * v)261 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
262 {
263 	return arch_atomic64_fetch_dec_relaxed(v);
264 }
265 
266 static __always_inline void
arch_atomic_long_and(long i,atomic_long_t * v)267 arch_atomic_long_and(long i, atomic_long_t *v)
268 {
269 	arch_atomic64_and(i, v);
270 }
271 
272 static __always_inline long
arch_atomic_long_fetch_and(long i,atomic_long_t * v)273 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
274 {
275 	return arch_atomic64_fetch_and(i, v);
276 }
277 
278 static __always_inline long
arch_atomic_long_fetch_and_acquire(long i,atomic_long_t * v)279 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
280 {
281 	return arch_atomic64_fetch_and_acquire(i, v);
282 }
283 
284 static __always_inline long
arch_atomic_long_fetch_and_release(long i,atomic_long_t * v)285 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
286 {
287 	return arch_atomic64_fetch_and_release(i, v);
288 }
289 
290 static __always_inline long
arch_atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)291 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
292 {
293 	return arch_atomic64_fetch_and_relaxed(i, v);
294 }
295 
296 static __always_inline void
arch_atomic_long_andnot(long i,atomic_long_t * v)297 arch_atomic_long_andnot(long i, atomic_long_t *v)
298 {
299 	arch_atomic64_andnot(i, v);
300 }
301 
302 static __always_inline long
arch_atomic_long_fetch_andnot(long i,atomic_long_t * v)303 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
304 {
305 	return arch_atomic64_fetch_andnot(i, v);
306 }
307 
308 static __always_inline long
arch_atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)309 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
310 {
311 	return arch_atomic64_fetch_andnot_acquire(i, v);
312 }
313 
314 static __always_inline long
arch_atomic_long_fetch_andnot_release(long i,atomic_long_t * v)315 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
316 {
317 	return arch_atomic64_fetch_andnot_release(i, v);
318 }
319 
320 static __always_inline long
arch_atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)321 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
322 {
323 	return arch_atomic64_fetch_andnot_relaxed(i, v);
324 }
325 
326 static __always_inline void
arch_atomic_long_or(long i,atomic_long_t * v)327 arch_atomic_long_or(long i, atomic_long_t *v)
328 {
329 	arch_atomic64_or(i, v);
330 }
331 
332 static __always_inline long
arch_atomic_long_fetch_or(long i,atomic_long_t * v)333 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
334 {
335 	return arch_atomic64_fetch_or(i, v);
336 }
337 
338 static __always_inline long
arch_atomic_long_fetch_or_acquire(long i,atomic_long_t * v)339 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
340 {
341 	return arch_atomic64_fetch_or_acquire(i, v);
342 }
343 
344 static __always_inline long
arch_atomic_long_fetch_or_release(long i,atomic_long_t * v)345 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
346 {
347 	return arch_atomic64_fetch_or_release(i, v);
348 }
349 
350 static __always_inline long
arch_atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)351 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
352 {
353 	return arch_atomic64_fetch_or_relaxed(i, v);
354 }
355 
356 static __always_inline void
arch_atomic_long_xor(long i,atomic_long_t * v)357 arch_atomic_long_xor(long i, atomic_long_t *v)
358 {
359 	arch_atomic64_xor(i, v);
360 }
361 
362 static __always_inline long
arch_atomic_long_fetch_xor(long i,atomic_long_t * v)363 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
364 {
365 	return arch_atomic64_fetch_xor(i, v);
366 }
367 
368 static __always_inline long
arch_atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)369 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
370 {
371 	return arch_atomic64_fetch_xor_acquire(i, v);
372 }
373 
374 static __always_inline long
arch_atomic_long_fetch_xor_release(long i,atomic_long_t * v)375 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
376 {
377 	return arch_atomic64_fetch_xor_release(i, v);
378 }
379 
380 static __always_inline long
arch_atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)381 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
382 {
383 	return arch_atomic64_fetch_xor_relaxed(i, v);
384 }
385 
386 static __always_inline long
arch_atomic_long_xchg(atomic_long_t * v,long i)387 arch_atomic_long_xchg(atomic_long_t *v, long i)
388 {
389 	return arch_atomic64_xchg(v, i);
390 }
391 
392 static __always_inline long
arch_atomic_long_xchg_acquire(atomic_long_t * v,long i)393 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
394 {
395 	return arch_atomic64_xchg_acquire(v, i);
396 }
397 
398 static __always_inline long
arch_atomic_long_xchg_release(atomic_long_t * v,long i)399 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
400 {
401 	return arch_atomic64_xchg_release(v, i);
402 }
403 
404 static __always_inline long
arch_atomic_long_xchg_relaxed(atomic_long_t * v,long i)405 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
406 {
407 	return arch_atomic64_xchg_relaxed(v, i);
408 }
409 
410 static __always_inline long
arch_atomic_long_cmpxchg(atomic_long_t * v,long old,long new)411 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
412 {
413 	return arch_atomic64_cmpxchg(v, old, new);
414 }
415 
416 static __always_inline long
arch_atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)417 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
418 {
419 	return arch_atomic64_cmpxchg_acquire(v, old, new);
420 }
421 
422 static __always_inline long
arch_atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)423 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
424 {
425 	return arch_atomic64_cmpxchg_release(v, old, new);
426 }
427 
428 static __always_inline long
arch_atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)429 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
430 {
431 	return arch_atomic64_cmpxchg_relaxed(v, old, new);
432 }
433 
434 static __always_inline bool
arch_atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)435 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
436 {
437 	return arch_atomic64_try_cmpxchg(v, (s64 *)old, new);
438 }
439 
440 static __always_inline bool
arch_atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)441 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
442 {
443 	return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
444 }
445 
446 static __always_inline bool
arch_atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)447 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
448 {
449 	return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
450 }
451 
452 static __always_inline bool
arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)453 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
454 {
455 	return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
456 }
457 
458 static __always_inline bool
arch_atomic_long_sub_and_test(long i,atomic_long_t * v)459 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
460 {
461 	return arch_atomic64_sub_and_test(i, v);
462 }
463 
464 static __always_inline bool
arch_atomic_long_dec_and_test(atomic_long_t * v)465 arch_atomic_long_dec_and_test(atomic_long_t *v)
466 {
467 	return arch_atomic64_dec_and_test(v);
468 }
469 
470 static __always_inline bool
arch_atomic_long_inc_and_test(atomic_long_t * v)471 arch_atomic_long_inc_and_test(atomic_long_t *v)
472 {
473 	return arch_atomic64_inc_and_test(v);
474 }
475 
476 static __always_inline bool
arch_atomic_long_add_negative(long i,atomic_long_t * v)477 arch_atomic_long_add_negative(long i, atomic_long_t *v)
478 {
479 	return arch_atomic64_add_negative(i, v);
480 }
481 
482 static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)483 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
484 {
485 	return arch_atomic64_fetch_add_unless(v, a, u);
486 }
487 
488 static __always_inline bool
arch_atomic_long_add_unless(atomic_long_t * v,long a,long u)489 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
490 {
491 	return arch_atomic64_add_unless(v, a, u);
492 }
493 
494 static __always_inline bool
arch_atomic_long_inc_not_zero(atomic_long_t * v)495 arch_atomic_long_inc_not_zero(atomic_long_t *v)
496 {
497 	return arch_atomic64_inc_not_zero(v);
498 }
499 
500 static __always_inline bool
arch_atomic_long_inc_unless_negative(atomic_long_t * v)501 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
502 {
503 	return arch_atomic64_inc_unless_negative(v);
504 }
505 
506 static __always_inline bool
arch_atomic_long_dec_unless_positive(atomic_long_t * v)507 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
508 {
509 	return arch_atomic64_dec_unless_positive(v);
510 }
511 
512 static __always_inline long
arch_atomic_long_dec_if_positive(atomic_long_t * v)513 arch_atomic_long_dec_if_positive(atomic_long_t *v)
514 {
515 	return arch_atomic64_dec_if_positive(v);
516 }
517 
518 #else /* CONFIG_64BIT */
519 
520 static __always_inline long
arch_atomic_long_read(const atomic_long_t * v)521 arch_atomic_long_read(const atomic_long_t *v)
522 {
523 	return arch_atomic_read(v);
524 }
525 
526 static __always_inline long
arch_atomic_long_read_acquire(const atomic_long_t * v)527 arch_atomic_long_read_acquire(const atomic_long_t *v)
528 {
529 	return arch_atomic_read_acquire(v);
530 }
531 
532 static __always_inline void
arch_atomic_long_set(atomic_long_t * v,long i)533 arch_atomic_long_set(atomic_long_t *v, long i)
534 {
535 	arch_atomic_set(v, i);
536 }
537 
538 static __always_inline void
arch_atomic_long_set_release(atomic_long_t * v,long i)539 arch_atomic_long_set_release(atomic_long_t *v, long i)
540 {
541 	arch_atomic_set_release(v, i);
542 }
543 
544 static __always_inline void
arch_atomic_long_add(long i,atomic_long_t * v)545 arch_atomic_long_add(long i, atomic_long_t *v)
546 {
547 	arch_atomic_add(i, v);
548 }
549 
550 static __always_inline long
arch_atomic_long_add_return(long i,atomic_long_t * v)551 arch_atomic_long_add_return(long i, atomic_long_t *v)
552 {
553 	return arch_atomic_add_return(i, v);
554 }
555 
556 static __always_inline long
arch_atomic_long_add_return_acquire(long i,atomic_long_t * v)557 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
558 {
559 	return arch_atomic_add_return_acquire(i, v);
560 }
561 
562 static __always_inline long
arch_atomic_long_add_return_release(long i,atomic_long_t * v)563 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
564 {
565 	return arch_atomic_add_return_release(i, v);
566 }
567 
568 static __always_inline long
arch_atomic_long_add_return_relaxed(long i,atomic_long_t * v)569 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
570 {
571 	return arch_atomic_add_return_relaxed(i, v);
572 }
573 
574 static __always_inline long
arch_atomic_long_fetch_add(long i,atomic_long_t * v)575 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
576 {
577 	return arch_atomic_fetch_add(i, v);
578 }
579 
580 static __always_inline long
arch_atomic_long_fetch_add_acquire(long i,atomic_long_t * v)581 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
582 {
583 	return arch_atomic_fetch_add_acquire(i, v);
584 }
585 
586 static __always_inline long
arch_atomic_long_fetch_add_release(long i,atomic_long_t * v)587 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
588 {
589 	return arch_atomic_fetch_add_release(i, v);
590 }
591 
592 static __always_inline long
arch_atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)593 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
594 {
595 	return arch_atomic_fetch_add_relaxed(i, v);
596 }
597 
598 static __always_inline void
arch_atomic_long_sub(long i,atomic_long_t * v)599 arch_atomic_long_sub(long i, atomic_long_t *v)
600 {
601 	arch_atomic_sub(i, v);
602 }
603 
604 static __always_inline long
arch_atomic_long_sub_return(long i,atomic_long_t * v)605 arch_atomic_long_sub_return(long i, atomic_long_t *v)
606 {
607 	return arch_atomic_sub_return(i, v);
608 }
609 
610 static __always_inline long
arch_atomic_long_sub_return_acquire(long i,atomic_long_t * v)611 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
612 {
613 	return arch_atomic_sub_return_acquire(i, v);
614 }
615 
616 static __always_inline long
arch_atomic_long_sub_return_release(long i,atomic_long_t * v)617 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
618 {
619 	return arch_atomic_sub_return_release(i, v);
620 }
621 
622 static __always_inline long
arch_atomic_long_sub_return_relaxed(long i,atomic_long_t * v)623 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
624 {
625 	return arch_atomic_sub_return_relaxed(i, v);
626 }
627 
628 static __always_inline long
arch_atomic_long_fetch_sub(long i,atomic_long_t * v)629 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
630 {
631 	return arch_atomic_fetch_sub(i, v);
632 }
633 
634 static __always_inline long
arch_atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)635 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
636 {
637 	return arch_atomic_fetch_sub_acquire(i, v);
638 }
639 
640 static __always_inline long
arch_atomic_long_fetch_sub_release(long i,atomic_long_t * v)641 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
642 {
643 	return arch_atomic_fetch_sub_release(i, v);
644 }
645 
646 static __always_inline long
arch_atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)647 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
648 {
649 	return arch_atomic_fetch_sub_relaxed(i, v);
650 }
651 
652 static __always_inline void
arch_atomic_long_inc(atomic_long_t * v)653 arch_atomic_long_inc(atomic_long_t *v)
654 {
655 	arch_atomic_inc(v);
656 }
657 
658 static __always_inline long
arch_atomic_long_inc_return(atomic_long_t * v)659 arch_atomic_long_inc_return(atomic_long_t *v)
660 {
661 	return arch_atomic_inc_return(v);
662 }
663 
664 static __always_inline long
arch_atomic_long_inc_return_acquire(atomic_long_t * v)665 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
666 {
667 	return arch_atomic_inc_return_acquire(v);
668 }
669 
670 static __always_inline long
arch_atomic_long_inc_return_release(atomic_long_t * v)671 arch_atomic_long_inc_return_release(atomic_long_t *v)
672 {
673 	return arch_atomic_inc_return_release(v);
674 }
675 
676 static __always_inline long
arch_atomic_long_inc_return_relaxed(atomic_long_t * v)677 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
678 {
679 	return arch_atomic_inc_return_relaxed(v);
680 }
681 
682 static __always_inline long
arch_atomic_long_fetch_inc(atomic_long_t * v)683 arch_atomic_long_fetch_inc(atomic_long_t *v)
684 {
685 	return arch_atomic_fetch_inc(v);
686 }
687 
688 static __always_inline long
arch_atomic_long_fetch_inc_acquire(atomic_long_t * v)689 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
690 {
691 	return arch_atomic_fetch_inc_acquire(v);
692 }
693 
694 static __always_inline long
arch_atomic_long_fetch_inc_release(atomic_long_t * v)695 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
696 {
697 	return arch_atomic_fetch_inc_release(v);
698 }
699 
700 static __always_inline long
arch_atomic_long_fetch_inc_relaxed(atomic_long_t * v)701 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
702 {
703 	return arch_atomic_fetch_inc_relaxed(v);
704 }
705 
706 static __always_inline void
arch_atomic_long_dec(atomic_long_t * v)707 arch_atomic_long_dec(atomic_long_t *v)
708 {
709 	arch_atomic_dec(v);
710 }
711 
712 static __always_inline long
arch_atomic_long_dec_return(atomic_long_t * v)713 arch_atomic_long_dec_return(atomic_long_t *v)
714 {
715 	return arch_atomic_dec_return(v);
716 }
717 
718 static __always_inline long
arch_atomic_long_dec_return_acquire(atomic_long_t * v)719 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
720 {
721 	return arch_atomic_dec_return_acquire(v);
722 }
723 
724 static __always_inline long
arch_atomic_long_dec_return_release(atomic_long_t * v)725 arch_atomic_long_dec_return_release(atomic_long_t *v)
726 {
727 	return arch_atomic_dec_return_release(v);
728 }
729 
730 static __always_inline long
arch_atomic_long_dec_return_relaxed(atomic_long_t * v)731 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
732 {
733 	return arch_atomic_dec_return_relaxed(v);
734 }
735 
736 static __always_inline long
arch_atomic_long_fetch_dec(atomic_long_t * v)737 arch_atomic_long_fetch_dec(atomic_long_t *v)
738 {
739 	return arch_atomic_fetch_dec(v);
740 }
741 
742 static __always_inline long
arch_atomic_long_fetch_dec_acquire(atomic_long_t * v)743 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
744 {
745 	return arch_atomic_fetch_dec_acquire(v);
746 }
747 
748 static __always_inline long
arch_atomic_long_fetch_dec_release(atomic_long_t * v)749 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
750 {
751 	return arch_atomic_fetch_dec_release(v);
752 }
753 
754 static __always_inline long
arch_atomic_long_fetch_dec_relaxed(atomic_long_t * v)755 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
756 {
757 	return arch_atomic_fetch_dec_relaxed(v);
758 }
759 
760 static __always_inline void
arch_atomic_long_and(long i,atomic_long_t * v)761 arch_atomic_long_and(long i, atomic_long_t *v)
762 {
763 	arch_atomic_and(i, v);
764 }
765 
766 static __always_inline long
arch_atomic_long_fetch_and(long i,atomic_long_t * v)767 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
768 {
769 	return arch_atomic_fetch_and(i, v);
770 }
771 
772 static __always_inline long
arch_atomic_long_fetch_and_acquire(long i,atomic_long_t * v)773 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
774 {
775 	return arch_atomic_fetch_and_acquire(i, v);
776 }
777 
778 static __always_inline long
arch_atomic_long_fetch_and_release(long i,atomic_long_t * v)779 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
780 {
781 	return arch_atomic_fetch_and_release(i, v);
782 }
783 
784 static __always_inline long
arch_atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)785 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
786 {
787 	return arch_atomic_fetch_and_relaxed(i, v);
788 }
789 
790 static __always_inline void
arch_atomic_long_andnot(long i,atomic_long_t * v)791 arch_atomic_long_andnot(long i, atomic_long_t *v)
792 {
793 	arch_atomic_andnot(i, v);
794 }
795 
796 static __always_inline long
arch_atomic_long_fetch_andnot(long i,atomic_long_t * v)797 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
798 {
799 	return arch_atomic_fetch_andnot(i, v);
800 }
801 
802 static __always_inline long
arch_atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)803 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
804 {
805 	return arch_atomic_fetch_andnot_acquire(i, v);
806 }
807 
808 static __always_inline long
arch_atomic_long_fetch_andnot_release(long i,atomic_long_t * v)809 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
810 {
811 	return arch_atomic_fetch_andnot_release(i, v);
812 }
813 
814 static __always_inline long
arch_atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)815 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
816 {
817 	return arch_atomic_fetch_andnot_relaxed(i, v);
818 }
819 
820 static __always_inline void
arch_atomic_long_or(long i,atomic_long_t * v)821 arch_atomic_long_or(long i, atomic_long_t *v)
822 {
823 	arch_atomic_or(i, v);
824 }
825 
826 static __always_inline long
arch_atomic_long_fetch_or(long i,atomic_long_t * v)827 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
828 {
829 	return arch_atomic_fetch_or(i, v);
830 }
831 
832 static __always_inline long
arch_atomic_long_fetch_or_acquire(long i,atomic_long_t * v)833 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
834 {
835 	return arch_atomic_fetch_or_acquire(i, v);
836 }
837 
838 static __always_inline long
arch_atomic_long_fetch_or_release(long i,atomic_long_t * v)839 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
840 {
841 	return arch_atomic_fetch_or_release(i, v);
842 }
843 
844 static __always_inline long
arch_atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)845 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
846 {
847 	return arch_atomic_fetch_or_relaxed(i, v);
848 }
849 
850 static __always_inline void
arch_atomic_long_xor(long i,atomic_long_t * v)851 arch_atomic_long_xor(long i, atomic_long_t *v)
852 {
853 	arch_atomic_xor(i, v);
854 }
855 
856 static __always_inline long
arch_atomic_long_fetch_xor(long i,atomic_long_t * v)857 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
858 {
859 	return arch_atomic_fetch_xor(i, v);
860 }
861 
862 static __always_inline long
arch_atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)863 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
864 {
865 	return arch_atomic_fetch_xor_acquire(i, v);
866 }
867 
868 static __always_inline long
arch_atomic_long_fetch_xor_release(long i,atomic_long_t * v)869 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
870 {
871 	return arch_atomic_fetch_xor_release(i, v);
872 }
873 
874 static __always_inline long
arch_atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)875 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
876 {
877 	return arch_atomic_fetch_xor_relaxed(i, v);
878 }
879 
880 static __always_inline long
arch_atomic_long_xchg(atomic_long_t * v,long i)881 arch_atomic_long_xchg(atomic_long_t *v, long i)
882 {
883 	return arch_atomic_xchg(v, i);
884 }
885 
886 static __always_inline long
arch_atomic_long_xchg_acquire(atomic_long_t * v,long i)887 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
888 {
889 	return arch_atomic_xchg_acquire(v, i);
890 }
891 
892 static __always_inline long
arch_atomic_long_xchg_release(atomic_long_t * v,long i)893 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
894 {
895 	return arch_atomic_xchg_release(v, i);
896 }
897 
898 static __always_inline long
arch_atomic_long_xchg_relaxed(atomic_long_t * v,long i)899 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
900 {
901 	return arch_atomic_xchg_relaxed(v, i);
902 }
903 
904 static __always_inline long
arch_atomic_long_cmpxchg(atomic_long_t * v,long old,long new)905 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
906 {
907 	return arch_atomic_cmpxchg(v, old, new);
908 }
909 
910 static __always_inline long
arch_atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)911 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
912 {
913 	return arch_atomic_cmpxchg_acquire(v, old, new);
914 }
915 
916 static __always_inline long
arch_atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)917 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
918 {
919 	return arch_atomic_cmpxchg_release(v, old, new);
920 }
921 
922 static __always_inline long
arch_atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)923 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
924 {
925 	return arch_atomic_cmpxchg_relaxed(v, old, new);
926 }
927 
928 static __always_inline bool
arch_atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)929 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
930 {
931 	return arch_atomic_try_cmpxchg(v, (int *)old, new);
932 }
933 
934 static __always_inline bool
arch_atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)935 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
936 {
937 	return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new);
938 }
939 
940 static __always_inline bool
arch_atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)941 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
942 {
943 	return arch_atomic_try_cmpxchg_release(v, (int *)old, new);
944 }
945 
946 static __always_inline bool
arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)947 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
948 {
949 	return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
950 }
951 
952 static __always_inline bool
arch_atomic_long_sub_and_test(long i,atomic_long_t * v)953 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
954 {
955 	return arch_atomic_sub_and_test(i, v);
956 }
957 
958 static __always_inline bool
arch_atomic_long_dec_and_test(atomic_long_t * v)959 arch_atomic_long_dec_and_test(atomic_long_t *v)
960 {
961 	return arch_atomic_dec_and_test(v);
962 }
963 
964 static __always_inline bool
arch_atomic_long_inc_and_test(atomic_long_t * v)965 arch_atomic_long_inc_and_test(atomic_long_t *v)
966 {
967 	return arch_atomic_inc_and_test(v);
968 }
969 
970 static __always_inline bool
arch_atomic_long_add_negative(long i,atomic_long_t * v)971 arch_atomic_long_add_negative(long i, atomic_long_t *v)
972 {
973 	return arch_atomic_add_negative(i, v);
974 }
975 
976 static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)977 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
978 {
979 	return arch_atomic_fetch_add_unless(v, a, u);
980 }
981 
982 static __always_inline bool
arch_atomic_long_add_unless(atomic_long_t * v,long a,long u)983 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
984 {
985 	return arch_atomic_add_unless(v, a, u);
986 }
987 
988 static __always_inline bool
arch_atomic_long_inc_not_zero(atomic_long_t * v)989 arch_atomic_long_inc_not_zero(atomic_long_t *v)
990 {
991 	return arch_atomic_inc_not_zero(v);
992 }
993 
994 static __always_inline bool
arch_atomic_long_inc_unless_negative(atomic_long_t * v)995 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
996 {
997 	return arch_atomic_inc_unless_negative(v);
998 }
999 
1000 static __always_inline bool
arch_atomic_long_dec_unless_positive(atomic_long_t * v)1001 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
1002 {
1003 	return arch_atomic_dec_unless_positive(v);
1004 }
1005 
1006 static __always_inline long
arch_atomic_long_dec_if_positive(atomic_long_t * v)1007 arch_atomic_long_dec_if_positive(atomic_long_t *v)
1008 {
1009 	return arch_atomic_dec_if_positive(v);
1010 }
1011 
1012 #endif /* CONFIG_64BIT */
1013 #endif /* _LINUX_ATOMIC_LONG_H */
1014 // e8f0e08ff072b74d180eabe2ad001282b38c2c88
1015