1 #ifndef _PPC_BYTEORDER_H
2 #define _PPC_BYTEORDER_H
3 
4 #include <asm/types.h>
5 #include <linux/compiler.h>
6 
7 #ifdef __GNUC__
8 #ifdef __KERNEL__
9 
ld_le16(const volatile unsigned short * addr)10 extern __inline__ unsigned ld_le16(const volatile unsigned short *addr)
11 {
12 	unsigned val;
13 
14 	__asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
15 	return val;
16 }
17 
st_le16(volatile unsigned short * addr,const unsigned val)18 extern __inline__ void st_le16(volatile unsigned short *addr, const unsigned val)
19 {
20 	__asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
21 }
22 
ld_le32(const volatile unsigned * addr)23 extern __inline__ unsigned ld_le32(const volatile unsigned *addr)
24 {
25 	unsigned val;
26 
27 	__asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
28 	return val;
29 }
30 
st_le32(volatile unsigned * addr,const unsigned val)31 extern __inline__ void st_le32(volatile unsigned *addr, const unsigned val)
32 {
33 	__asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
34 }
35 
ld_le64(const volatile unsigned long long * addr)36 extern __inline__ unsigned long long ld_le64(const volatile unsigned long long *addr)
37 {
38 	unsigned char *taddr = (unsigned char *) addr;
39 	unsigned long long val;
40 
41 	__asm__ __volatile__ ("lwbrx %L0,0,%1" : "=r" (val) : "r" (taddr),   "m" (*addr));
42 	__asm__ __volatile__ ("lwbrx  %0,0,%1" : "=r" (val) : "r" (taddr+4), "m" (*addr), "0" (val));
43 	return val;
44 }
45 
st_le64(volatile unsigned long long * addr,const unsigned long long val)46 extern __inline__ void st_le64(volatile unsigned long long *addr, const unsigned long long val)
47 {
48 	unsigned char *taddr = (unsigned char *) addr;
49 
50 	__asm__ __volatile__ ("stwbrx %L1,0,%2" : "=m" (*addr) : "r" (val), "r" (taddr));
51 	__asm__ __volatile__ ("stwbrx  %1,0,%2" : "=m" (*addr) : "r" (val), "r" (taddr+4));
52 }
53 
___arch__swab16(__u16 value)54 static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
55 {
56 	__u16 result;
57 
58 	__asm__("rlwimi %0,%2,8,16,23" : "=&r" (result) : "0" (value >> 8), "r" (value));
59 	return result;
60 }
61 
___arch__swab32(__u32 value)62 static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
63 {
64 	__u32 result;
65 
66 	__asm__("rlwimi %0,%2,24,16,23" : "=&r" (result) : "0" (value>>24), "r" (value));
67 	__asm__("rlwimi %0,%2,8,8,15"   : "=&r" (result) : "0" (result),    "r" (value));
68 	__asm__("rlwimi %0,%2,24,0,7"   : "=&r" (result) : "0" (result),    "r" (value));
69 
70 	return result;
71 }
72 #define __arch__swab32(x) ___arch__swab32(x)
73 #define __arch__swab16(x) ___arch__swab16(x)
74 
75 /* The same, but returns converted value from the location pointer by addr. */
76 #define __arch__swab16p(addr) ld_le16(addr)
77 #define __arch__swab32p(addr) ld_le32(addr)
78 #define __arch__swab64p(addr) ld_le64(addr)
79 
80 /* The same, but do the conversion in situ, ie. put the value back to addr. */
81 #define __arch__swab16s(addr) st_le16(addr,*addr)
82 #define __arch__swab32s(addr) st_le32(addr,*addr)
83 #define __arch__swab64s(addr) st_le64(addr,*addr)
84 
85 #endif /* __KERNEL__ */
86 
87 #if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
88 #  define __BYTEORDER_HAS_U64__
89 #  define __SWAB_64_THRU_32__
90 #endif
91 
92 #endif /* __GNUC__ */
93 
94 #include <linux/byteorder/big_endian.h>
95 
96 #endif /* _PPC_BYTEORDER_H */
97