1 #ifndef __ASM_SH_CMPXCHG_LLSC_H
2 #define __ASM_SH_CMPXCHG_LLSC_H
3
xchg_u32(volatile u32 * m,unsigned long val)4 static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
5 {
6 unsigned long retval;
7 unsigned long tmp;
8
9 __asm__ __volatile__ (
10 "1: \n\t"
11 "movli.l @%2, %0 ! xchg_u32 \n\t"
12 "mov %0, %1 \n\t"
13 "mov %3, %0 \n\t"
14 "movco.l %0, @%2 \n\t"
15 "bf 1b \n\t"
16 "synco \n\t"
17 : "=&z"(tmp), "=&r" (retval)
18 : "r" (m), "r" (val)
19 : "t", "memory"
20 );
21
22 return retval;
23 }
24
xchg_u8(volatile u8 * m,unsigned long val)25 static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
26 {
27 unsigned long retval;
28 unsigned long tmp;
29
30 __asm__ __volatile__ (
31 "1: \n\t"
32 "movli.l @%2, %0 ! xchg_u8 \n\t"
33 "mov %0, %1 \n\t"
34 "mov %3, %0 \n\t"
35 "movco.l %0, @%2 \n\t"
36 "bf 1b \n\t"
37 "synco \n\t"
38 : "=&z"(tmp), "=&r" (retval)
39 : "r" (m), "r" (val & 0xff)
40 : "t", "memory"
41 );
42
43 return retval;
44 }
45
46 static inline unsigned long
__cmpxchg_u32(volatile int * m,unsigned long old,unsigned long new)47 __cmpxchg_u32(volatile int *m, unsigned long old, unsigned long new)
48 {
49 unsigned long retval;
50 unsigned long tmp;
51
52 __asm__ __volatile__ (
53 "1: \n\t"
54 "movli.l @%2, %0 ! __cmpxchg_u32 \n\t"
55 "mov %0, %1 \n\t"
56 "cmp/eq %1, %3 \n\t"
57 "bf 2f \n\t"
58 "mov %4, %0 \n\t"
59 "2: \n\t"
60 "movco.l %0, @%2 \n\t"
61 "bf 1b \n\t"
62 "synco \n\t"
63 : "=&z" (tmp), "=&r" (retval)
64 : "r" (m), "r" (old), "r" (new)
65 : "t", "memory"
66 );
67
68 return retval;
69 }
70
71 #endif /* __ASM_SH_CMPXCHG_LLSC_H */
72