1 #ifndef _ASM_X86_HWEIGHT_H
2 #define _ASM_X86_HWEIGHT_H
3 
4 #ifdef CONFIG_64BIT
5 /* popcnt %edi, %eax -- redundant REX prefix for alignment */
6 #define POPCNT32 ".byte 0xf3,0x40,0x0f,0xb8,0xc7"
7 /* popcnt %rdi, %rax */
8 #define POPCNT64 ".byte 0xf3,0x48,0x0f,0xb8,0xc7"
9 #define REG_IN "D"
10 #define REG_OUT "a"
11 #else
12 /* popcnt %eax, %eax */
13 #define POPCNT32 ".byte 0xf3,0x0f,0xb8,0xc0"
14 #define REG_IN "a"
15 #define REG_OUT "a"
16 #endif
17 
18 /*
19  * __sw_hweightXX are called from within the alternatives below
20  * and callee-clobbered registers need to be taken care of. See
21  * ARCH_HWEIGHT_CFLAGS in <arch/x86/Kconfig> for the respective
22  * compiler switches.
23  */
__arch_hweight32(unsigned int w)24 static inline unsigned int __arch_hweight32(unsigned int w)
25 {
26 	unsigned int res = 0;
27 
28 	asm (ALTERNATIVE("call __sw_hweight32", POPCNT32, X86_FEATURE_POPCNT)
29 		     : "="REG_OUT (res)
30 		     : REG_IN (w));
31 
32 	return res;
33 }
34 
__arch_hweight16(unsigned int w)35 static inline unsigned int __arch_hweight16(unsigned int w)
36 {
37 	return __arch_hweight32(w & 0xffff);
38 }
39 
__arch_hweight8(unsigned int w)40 static inline unsigned int __arch_hweight8(unsigned int w)
41 {
42 	return __arch_hweight32(w & 0xff);
43 }
44 
__arch_hweight64(__u64 w)45 static inline unsigned long __arch_hweight64(__u64 w)
46 {
47 	unsigned long res = 0;
48 
49 #ifdef CONFIG_X86_32
50 	return  __arch_hweight32((u32)w) +
51 		__arch_hweight32((u32)(w >> 32));
52 #else
53 	asm (ALTERNATIVE("call __sw_hweight64", POPCNT64, X86_FEATURE_POPCNT)
54 		     : "="REG_OUT (res)
55 		     : REG_IN (w));
56 #endif /* CONFIG_X86_32 */
57 
58 	return res;
59 }
60 
61 #endif
62