1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 1996, 1999, 2000, 2001, 2003 by Ralf Baechle
7  * Copyright (C) 1999, 2000, 2001 Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_UNALIGNED_H
10 #define _ASM_UNALIGNED_H
11 
12 #include <linux/types.h>
13 
14 /*
15  * get_unaligned - get value from possibly mis-aligned location
16  * @ptr: pointer to value
17  *
18  * This macro should be used for accessing values larger in size than
19  * single bytes at locations that are expected to be improperly aligned,
20  * e.g. retrieving a u16 value from a location not u16-aligned.
21  *
22  * Note that unaligned accesses can be very expensive on some architectures.
23  */
24 #define get_unaligned(ptr) \
25 	((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
26 
27 /*
28  * put_unaligned - put value to a possibly mis-aligned location
29  * @val: value to place
30  * @ptr: pointer to location
31  *
32  * This macro should be used for placing values larger in size than
33  * single bytes at locations that are expected to be improperly aligned,
34  * e.g. writing a u16 value to a location not u16-aligned.
35  *
36  * Note that unaligned accesses can be very expensive on some architectures.
37  */
38 #define put_unaligned(x,ptr) \
39 	__put_unaligned((__u64)(x), (ptr), sizeof(*(ptr)))
40 
41 /*
42  * This is a silly but good way to make sure that
43  * the get/put functions are indeed always optimized,
44  * and that we use the correct sizes.
45  */
46 extern void bad_unaligned_access_length(void);
47 
48 /*
49  * EGCS 1.1 knows about arbitrary unaligned loads.  Define some
50  * packed structures to talk about such things with.
51  */
52 
53 struct __una_u64 { __u64 x __attribute__((packed)); };
54 struct __una_u32 { __u32 x __attribute__((packed)); };
55 struct __una_u16 { __u16 x __attribute__((packed)); };
56 
57 /*
58  * Elemental unaligned loads
59  */
60 
__uldq(const __u64 * r11)61 static inline __u64 __uldq(const __u64 * r11)
62 {
63 	const struct __una_u64 *ptr = (const struct __una_u64 *) r11;
64 	return ptr->x;
65 }
66 
__uldl(const __u32 * r11)67 static inline __u32 __uldl(const __u32 * r11)
68 {
69 	const struct __una_u32 *ptr = (const struct __una_u32 *) r11;
70 	return ptr->x;
71 }
72 
__uldw(const __u16 * r11)73 static inline __u16 __uldw(const __u16 * r11)
74 {
75 	const struct __una_u16 *ptr = (const struct __una_u16 *) r11;
76 	return ptr->x;
77 }
78 
79 /*
80  * Elemental unaligned stores
81  */
82 
__ustq(__u64 r5,__u64 * r11)83 static inline void __ustq(__u64 r5, __u64 * r11)
84 {
85 	struct __una_u64 *ptr = (struct __una_u64 *) r11;
86 	ptr->x = r5;
87 }
88 
__ustl(__u32 r5,__u32 * r11)89 static inline void __ustl(__u32 r5, __u32 * r11)
90 {
91 	struct __una_u32 *ptr = (struct __una_u32 *) r11;
92 	ptr->x = r5;
93 }
94 
__ustw(__u16 r5,__u16 * r11)95 static inline void __ustw(__u16 r5, __u16 * r11)
96 {
97 	struct __una_u16 *ptr = (struct __una_u16 *) r11;
98 	ptr->x = r5;
99 }
100 
__get_unaligned(const void * ptr,size_t size)101 static inline __u64 __get_unaligned(const void *ptr, size_t size)
102 {
103 	__u64 val;
104 
105 	switch (size) {
106 	case 1:
107 		val = *(const __u8 *)ptr;
108 		break;
109 	case 2:
110 		val = __uldw((const __u16 *)ptr);
111 		break;
112 	case 4:
113 		val = __uldl((const __u32 *)ptr);
114 		break;
115 	case 8:
116 		val = __uldq((const __u64 *)ptr);
117 		break;
118 	default:
119 		bad_unaligned_access_length();
120 	}
121 	return val;
122 }
123 
__put_unaligned(__u64 val,void * ptr,size_t size)124 static inline void __put_unaligned(__u64 val, void *ptr, size_t size)
125 {
126 	switch (size) {
127 	      case 1:
128 		*(__u8 *)ptr = (val);
129 	        break;
130 	      case 2:
131 		__ustw(val, (__u16 *)ptr);
132 		break;
133 	      case 4:
134 		__ustl(val, (__u32 *)ptr);
135 		break;
136 	      case 8:
137 		__ustq(val, (__u64 *)ptr);
138 		break;
139 	      default:
140 	    	bad_unaligned_access_length();
141 	}
142 }
143 
144 #endif /* _ASM_UNALIGNED_H */
145