1 /*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc.
4 *
5 * But use these as seldom as possible since they are slower than
6 * regular operations.
7 *
8 * Copyright (C) 2004-2006 Atmel Corporation
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License version 2 as
12 * published by the Free Software Foundation.
13 */
14 #ifndef __ASM_AVR32_CMPXCHG_H
15 #define __ASM_AVR32_CMPXCHG_H
16
17 #define xchg(ptr,x) \
18 ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
19
20 extern void __xchg_called_with_bad_pointer(void);
21
xchg_u32(u32 val,volatile u32 * m)22 static inline unsigned long xchg_u32(u32 val, volatile u32 *m)
23 {
24 u32 ret;
25
26 asm volatile("xchg %[ret], %[m], %[val]"
27 : [ret] "=&r"(ret), "=m"(*m)
28 : "m"(*m), [m] "r"(m), [val] "r"(val)
29 : "memory");
30 return ret;
31 }
32
__xchg(unsigned long x,volatile void * ptr,int size)33 static inline unsigned long __xchg(unsigned long x,
34 volatile void *ptr,
35 int size)
36 {
37 switch(size) {
38 case 4:
39 return xchg_u32(x, ptr);
40 default:
41 __xchg_called_with_bad_pointer();
42 return x;
43 }
44 }
45
__cmpxchg_u32(volatile int * m,unsigned long old,unsigned long new)46 static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old,
47 unsigned long new)
48 {
49 __u32 ret;
50
51 asm volatile(
52 "1: ssrf 5\n"
53 " ld.w %[ret], %[m]\n"
54 " cp.w %[ret], %[old]\n"
55 " brne 2f\n"
56 " stcond %[m], %[new]\n"
57 " brne 1b\n"
58 "2:\n"
59 : [ret] "=&r"(ret), [m] "=m"(*m)
60 : "m"(m), [old] "ir"(old), [new] "r"(new)
61 : "memory", "cc");
62 return ret;
63 }
64
65 extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
66 volatile int * m, unsigned long old, unsigned long new);
67 #define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels
68
69 /* This function doesn't exist, so you'll get a linker error
70 if something tries to do an invalid cmpxchg(). */
71 extern void __cmpxchg_called_with_bad_pointer(void);
72
73 #define __HAVE_ARCH_CMPXCHG 1
74
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)75 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
76 unsigned long new, int size)
77 {
78 switch (size) {
79 case 4:
80 return __cmpxchg_u32(ptr, old, new);
81 case 8:
82 return __cmpxchg_u64(ptr, old, new);
83 }
84
85 __cmpxchg_called_with_bad_pointer();
86 return old;
87 }
88
89 #define cmpxchg(ptr, old, new) \
90 ((typeof(*(ptr)))__cmpxchg((ptr), (unsigned long)(old), \
91 (unsigned long)(new), \
92 sizeof(*(ptr))))
93
94 #include <asm-generic/cmpxchg-local.h>
95
__cmpxchg_local(volatile void * ptr,unsigned long old,unsigned long new,int size)96 static inline unsigned long __cmpxchg_local(volatile void *ptr,
97 unsigned long old,
98 unsigned long new, int size)
99 {
100 switch (size) {
101 case 4:
102 return __cmpxchg_u32(ptr, old, new);
103 default:
104 return __cmpxchg_local_generic(ptr, old, new, size);
105 }
106
107 return old;
108 }
109
110 #define cmpxchg_local(ptr, old, new) \
111 ((typeof(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(old), \
112 (unsigned long)(new), \
113 sizeof(*(ptr))))
114
115 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
116
117 #endif /* __ASM_AVR32_CMPXCHG_H */
118