1 #ifndef _LINUX_BYTEORDER_SWAB_H
2 #define _LINUX_BYTEORDER_SWAB_H
3 
4 /*
5  * linux/byteorder/swab.h
6  * Byte-swapping, independently from CPU endianness
7  *	swabXX[ps]?(foo)
8  *
9  * Francois-Rene Rideau <fare@tunes.org> 19971205
10  *    separated swab functions from cpu_to_XX,
11  *    to clean up support for bizarre-endian architectures.
12  *
13  * See asm-i386/byteorder.h and suches for examples of how to provide
14  * architecture-dependent optimized versions
15  *
16  */
17 
18 #include <linux/compiler.h>
19 
20 /* casts are necessary for constants, because we never know how for sure
21  * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
22  */
23 #define ___swab16(x) \
24 ({ \
25 	__u16 __x = (x); \
26 	((__u16)( \
27 		(((__u16)(__x) & (__u16)0x00ffU) << 8) | \
28 		(((__u16)(__x) & (__u16)0xff00U) >> 8) )); \
29 })
30 
31 #define ___swab24(x) \
32 ({ \
33 	__u32 __x = (x); \
34 	((__u32)( \
35 		((__x & (__u32)0x000000ffUL) << 16) | \
36 		 (__x & (__u32)0x0000ff00UL)        | \
37 		((__x & (__u32)0x00ff0000UL) >> 16) )); \
38 })
39 
40 #define ___swab32(x) \
41 ({ \
42 	__u32 __x = (x); \
43 	((__u32)( \
44 		(((__u32)(__x) & (__u32)0x000000ffUL) << 24) | \
45 		(((__u32)(__x) & (__u32)0x0000ff00UL) <<  8) | \
46 		(((__u32)(__x) & (__u32)0x00ff0000UL) >>  8) | \
47 		(((__u32)(__x) & (__u32)0xff000000UL) >> 24) )); \
48 })
49 
50 #define ___swab64(x) \
51 ({ \
52 	__u64 __x = (x); \
53 	((__u64)( \
54 		(__u64)(((__u64)(__x) & (__u64)0x00000000000000ffULL) << 56) | \
55 		(__u64)(((__u64)(__x) & (__u64)0x000000000000ff00ULL) << 40) | \
56 		(__u64)(((__u64)(__x) & (__u64)0x0000000000ff0000ULL) << 24) | \
57 		(__u64)(((__u64)(__x) & (__u64)0x00000000ff000000ULL) <<  8) | \
58 	        (__u64)(((__u64)(__x) & (__u64)0x000000ff00000000ULL) >>  8) | \
59 		(__u64)(((__u64)(__x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
60 		(__u64)(((__u64)(__x) & (__u64)0x00ff000000000000ULL) >> 40) | \
61 		(__u64)(((__u64)(__x) & (__u64)0xff00000000000000ULL) >> 56) )); \
62 })
63 
64 #define ___constant_swab16(x) \
65 	((__u16)( \
66 		(((__u16)(x) & (__u16)0x00ffU) << 8) | \
67 		(((__u16)(x) & (__u16)0xff00U) >> 8) ))
68 #define ___constant_swab24(x) \
69 	((__u32)( \
70 		(((__u32)(x) & (__u32)0x000000ffU) << 16) | \
71 		(((__u32)(x) & (__u32)0x0000ff00U))	  | \
72 		(((__u32)(x) & (__u32)0x00ff0000U) >> 16) ))
73 #define ___constant_swab32(x) \
74 	((__u32)( \
75 		(((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
76 		(((__u32)(x) & (__u32)0x0000ff00UL) <<  8) | \
77 		(((__u32)(x) & (__u32)0x00ff0000UL) >>  8) | \
78 		(((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
79 #define ___constant_swab64(x) \
80 	((__u64)( \
81 		(__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
82 		(__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
83 		(__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
84 		(__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) <<  8) | \
85 	        (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >>  8) | \
86 		(__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
87 		(__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
88 		(__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
89 
90 /*
91  * provide defaults when no architecture-specific optimization is detected
92  */
93 #ifndef __arch__swab16
94 #  define __arch__swab16(x) ({ __u16 __tmp = (x) ; ___swab16(__tmp); })
95 #endif
96 #ifndef __arch__swab24
97 #  define __arch__swab24(x) ({ __u32 __tmp = (x) ; ___swab24(__tmp); })
98 #endif
99 #ifndef __arch__swab32
100 #  define __arch__swab32(x) ({ __u32 __tmp = (x) ; ___swab32(__tmp); })
101 #endif
102 #ifndef __arch__swab64
103 #  define __arch__swab64(x) ({ __u64 __tmp = (x) ; ___swab64(__tmp); })
104 #endif
105 
106 #ifndef __arch__swab16p
107 #  define __arch__swab16p(x) __arch__swab16(*(x))
108 #endif
109 #ifndef __arch__swab24p
110 #  define __arch__swab24p(x) __arch__swab24(*(x))
111 #endif
112 #ifndef __arch__swab32p
113 #  define __arch__swab32p(x) __arch__swab32(*(x))
114 #endif
115 #ifndef __arch__swab64p
116 #  define __arch__swab64p(x) __arch__swab64(*(x))
117 #endif
118 
119 #ifndef __arch__swab16s
120 #  define __arch__swab16s(x) do { *(x) = __arch__swab16p((x)); } while (0)
121 #endif
122 #ifndef __arch__swab24s
123 #  define __arch__swab24s(x) do { *(x) = __arch__swab24p((x)); } while (0)
124 #endif
125 #ifndef __arch__swab32s
126 #  define __arch__swab32s(x) do { *(x) = __arch__swab32p((x)); } while (0)
127 #endif
128 #ifndef __arch__swab64s
129 #  define __arch__swab64s(x) do { *(x) = __arch__swab64p((x)); } while (0)
130 #endif
131 
132 
133 /*
134  * Allow constant folding
135  */
136 #if defined(__GNUC__) && (__GNUC__ >= 2) && defined(__OPTIMIZE__)
137 #  define __swab16(x) \
138 (__builtin_constant_p((__u16)(x)) ? \
139  ___swab16((x)) : \
140  __fswab16((x)))
141 #  define __swab24(x) \
142 (__builtin_constant_p((__u32)(x)) ? \
143  ___swab24((x)) : \
144  __fswab24((x)))
145 #  define __swab32(x) \
146 (__builtin_constant_p((__u32)(x)) ? \
147  ___swab32((x)) : \
148  __fswab32((x)))
149 #  define __swab64(x) \
150 (__builtin_constant_p((__u64)(x)) ? \
151  ___swab64((x)) : \
152  __fswab64((x)))
153 #else
154 #  define __swab16(x) __fswab16(x)
155 #  define __swab24(x) __fswab24(x)
156 #  define __swab32(x) __fswab32(x)
157 #  define __swab64(x) __fswab64(x)
158 #endif /* OPTIMIZE */
159 
160 
__fswab16(__u16 x)161 static __inline__ __attribute_const__ __u16 __fswab16(__u16 x)
162 {
163 	return __arch__swab16(x);
164 }
__swab16p(__u16 * x)165 static __inline__ __u16 __swab16p(__u16 *x)
166 {
167 	return __arch__swab16p(x);
168 }
__swab16s(__u16 * addr)169 static __inline__ void __swab16s(__u16 *addr)
170 {
171 	__arch__swab16s(addr);
172 }
173 
__fswab24(__u32 x)174 static __inline__ __attribute_const__ __u32 __fswab24(__u32 x)
175 {
176 	return __arch__swab24(x);
177 }
__swab24p(__u32 * x)178 static __inline__ __u32 __swab24p(__u32 *x)
179 {
180 	return __arch__swab24p(x);
181 }
__swab24s(__u32 * addr)182 static __inline__ void __swab24s(__u32 *addr)
183 {
184 	__arch__swab24s(addr);
185 }
186 
__fswab32(__u32 x)187 static __inline__ __attribute_const__ __u32 __fswab32(__u32 x)
188 {
189 	return __arch__swab32(x);
190 }
__swab32p(__u32 * x)191 static __inline__ __u32 __swab32p(__u32 *x)
192 {
193 	return __arch__swab32p(x);
194 }
__swab32s(__u32 * addr)195 static __inline__ void __swab32s(__u32 *addr)
196 {
197 	__arch__swab32s(addr);
198 }
199 
200 #ifdef __BYTEORDER_HAS_U64__
__fswab64(__u64 x)201 static __inline__ __attribute_const__ __u64 __fswab64(__u64 x)
202 {
203 #  ifdef __SWAB_64_THRU_32__
204 	__u32 h = x >> 32;
205         __u32 l = x & ((1ULL<<32)-1);
206         return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h)));
207 #  else
208 	return __arch__swab64(x);
209 #  endif
210 }
__swab64p(__u64 * x)211 static __inline__ __u64 __swab64p(__u64 *x)
212 {
213 	return __arch__swab64p(x);
214 }
__swab64s(__u64 * addr)215 static __inline__ void __swab64s(__u64 *addr)
216 {
217 	__arch__swab64s(addr);
218 }
219 #endif /* __BYTEORDER_HAS_U64__ */
220 
221 #if defined(__KERNEL__)
222 #define swab16 __swab16
223 #define swab24 __swab24
224 #define swab32 __swab32
225 #define swab64 __swab64
226 #define swab16p __swab16p
227 #define swab24p __swab24p
228 #define swab32p __swab32p
229 #define swab64p __swab64p
230 #define swab16s __swab16s
231 #define swab24s __swab24s
232 #define swab32s __swab32s
233 #define swab64s __swab64s
234 #endif
235 
236 #endif /* _LINUX_BYTEORDER_SWAB_H */
237