1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_CMPXCHG_32_H
3 #define _ASM_X86_CMPXCHG_32_H
4 
5 /*
6  * Note: if you use set64_bit(), __cmpxchg64(), or their variants,
7  *       you need to test for the feature in boot_cpu_data.
8  */
9 
10 #ifdef CONFIG_X86_CMPXCHG64
11 #define arch_cmpxchg64(ptr, o, n)					\
12 	((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
13 					 (unsigned long long)(n)))
14 #define arch_cmpxchg64_local(ptr, o, n)					\
15 	((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
16 					       (unsigned long long)(n)))
17 #define arch_try_cmpxchg64(ptr, po, n)					\
18 	__try_cmpxchg64((ptr), (unsigned long long *)(po), \
19 			(unsigned long long)(n))
20 #endif
21 
__cmpxchg64(volatile u64 * ptr,u64 old,u64 new)22 static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
23 {
24 	u64 prev;
25 	asm volatile(LOCK_PREFIX "cmpxchg8b %1"
26 		     : "=A" (prev),
27 		       "+m" (*ptr)
28 		     : "b" ((u32)new),
29 		       "c" ((u32)(new >> 32)),
30 		       "0" (old)
31 		     : "memory");
32 	return prev;
33 }
34 
__cmpxchg64_local(volatile u64 * ptr,u64 old,u64 new)35 static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
36 {
37 	u64 prev;
38 	asm volatile("cmpxchg8b %1"
39 		     : "=A" (prev),
40 		       "+m" (*ptr)
41 		     : "b" ((u32)new),
42 		       "c" ((u32)(new >> 32)),
43 		       "0" (old)
44 		     : "memory");
45 	return prev;
46 }
47 
__try_cmpxchg64(volatile u64 * ptr,u64 * pold,u64 new)48 static inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *pold, u64 new)
49 {
50 	bool success;
51 	u64 old = *pold;
52 	asm volatile(LOCK_PREFIX "cmpxchg8b %[ptr]"
53 		     CC_SET(z)
54 		     : CC_OUT(z) (success),
55 		       [ptr] "+m" (*ptr),
56 		       "+A" (old)
57 		     : "b" ((u32)new),
58 		       "c" ((u32)(new >> 32))
59 		     : "memory");
60 
61 	if (unlikely(!success))
62 		*pold = old;
63 	return success;
64 }
65 
66 #ifndef CONFIG_X86_CMPXCHG64
67 /*
68  * Building a kernel capable running on 80386 and 80486. It may be necessary
69  * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
70  */
71 
72 #define arch_cmpxchg64(ptr, o, n)				\
73 ({								\
74 	__typeof__(*(ptr)) __ret;				\
75 	__typeof__(*(ptr)) __old = (o);				\
76 	__typeof__(*(ptr)) __new = (n);				\
77 	alternative_io(LOCK_PREFIX_HERE				\
78 			"call cmpxchg8b_emu",			\
79 			"lock; cmpxchg8b (%%esi)" ,		\
80 		       X86_FEATURE_CX8,				\
81 		       "=A" (__ret),				\
82 		       "S" ((ptr)), "0" (__old),		\
83 		       "b" ((unsigned int)__new),		\
84 		       "c" ((unsigned int)(__new>>32))		\
85 		       : "memory");				\
86 	__ret; })
87 
88 
89 #define arch_cmpxchg64_local(ptr, o, n)				\
90 ({								\
91 	__typeof__(*(ptr)) __ret;				\
92 	__typeof__(*(ptr)) __old = (o);				\
93 	__typeof__(*(ptr)) __new = (n);				\
94 	alternative_io("call cmpxchg8b_emu",			\
95 		       "cmpxchg8b (%%esi)" ,			\
96 		       X86_FEATURE_CX8,				\
97 		       "=A" (__ret),				\
98 		       "S" ((ptr)), "0" (__old),		\
99 		       "b" ((unsigned int)__new),		\
100 		       "c" ((unsigned int)(__new>>32))		\
101 		       : "memory");				\
102 	__ret; })
103 
104 #endif
105 
106 #define system_has_cmpxchg64()		boot_cpu_has(X86_FEATURE_CX8)
107 
108 #endif /* _ASM_X86_CMPXCHG_32_H */
109