Import 2.3.18pre1
[davej-history.git] / include / asm-alpha / unaligned.h
blob8017f6bfa3889b13ab91ceee35dd83b54062db80
1 #ifndef __ALPHA_UNALIGNED_H
2 #define __ALPHA_UNALIGNED_H
4 /*
5 * The main single-value unaligned transfer routines.
6 */
7 #define get_unaligned(ptr) \
8 ((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
9 #define put_unaligned(x,ptr) \
10 __put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
13 * This is a silly but good way to make sure that
14 * the get/put functions are indeed always optimized,
15 * and that we use the correct sizes.
17 extern void bad_unaligned_access_length(void);
20 * EGCS 1.1 knows about arbitrary unaligned loads. Define some
21 * packed structures to talk about such things with.
24 struct __una_u64 { __u64 x __attribute__((packed)); };
25 struct __una_u32 { __u32 x __attribute__((packed)); };
26 struct __una_u16 { __u16 x __attribute__((packed)); };
29 * Elemental unaligned loads
32 extern inline unsigned long __uldq(const unsigned long * r11)
34 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
35 const struct __una_u64 *ptr = (const struct __una_u64 *) r11;
36 return ptr->x;
37 #else
38 unsigned long r1,r2;
39 __asm__("ldq_u %0,%3\n\t"
40 "ldq_u %1,%4\n\t"
41 "extql %0,%2,%0\n\t"
42 "extqh %1,%2,%1"
43 :"=&r" (r1), "=&r" (r2)
44 :"r" (r11),
45 "m" (*r11),
46 "m" (*(const unsigned long *)(7+(char *) r11)));
47 return r1 | r2;
48 #endif
51 extern inline unsigned long __uldl(const unsigned int * r11)
53 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
54 const struct __una_u32 *ptr = (const struct __una_u32 *) r11;
55 return ptr->x;
56 #else
57 unsigned long r1,r2;
58 __asm__("ldq_u %0,%3\n\t"
59 "ldq_u %1,%4\n\t"
60 "extll %0,%2,%0\n\t"
61 "extlh %1,%2,%1"
62 :"=&r" (r1), "=&r" (r2)
63 :"r" (r11),
64 "m" (*r11),
65 "m" (*(const unsigned long *)(3+(char *) r11)));
66 return r1 | r2;
67 #endif
70 extern inline unsigned long __uldw(const unsigned short * r11)
72 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
73 const struct __una_u16 *ptr = (const struct __una_u16 *) r11;
74 return ptr->x;
75 #else
76 unsigned long r1,r2;
77 __asm__("ldq_u %0,%3\n\t"
78 "ldq_u %1,%4\n\t"
79 "extwl %0,%2,%0\n\t"
80 "extwh %1,%2,%1"
81 :"=&r" (r1), "=&r" (r2)
82 :"r" (r11),
83 "m" (*r11),
84 "m" (*(const unsigned long *)(1+(char *) r11)));
85 return r1 | r2;
86 #endif
90 * Elemental unaligned stores
93 extern inline void __ustq(unsigned long r5, unsigned long * r11)
95 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
96 struct __una_u64 *ptr = (struct __una_u64 *) r11;
97 ptr->x = r5;
98 #else
99 unsigned long r1,r2,r3,r4;
101 __asm__("ldq_u %3,%1\n\t"
102 "ldq_u %2,%0\n\t"
103 "insqh %6,%7,%5\n\t"
104 "insql %6,%7,%4\n\t"
105 "mskqh %3,%7,%3\n\t"
106 "mskql %2,%7,%2\n\t"
107 "bis %3,%5,%3\n\t"
108 "bis %2,%4,%2\n\t"
109 "stq_u %3,%1\n\t"
110 "stq_u %2,%0"
111 :"=m" (*r11),
112 "=m" (*(unsigned long *)(7+(char *) r11)),
113 "=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
114 :"r" (r5), "r" (r11));
115 #endif
118 extern inline void __ustl(unsigned long r5, unsigned int * r11)
120 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
121 struct __una_u32 *ptr = (struct __una_u32 *) r11;
122 ptr->x = r5;
123 #else
124 unsigned long r1,r2,r3,r4;
126 __asm__("ldq_u %3,%1\n\t"
127 "ldq_u %2,%0\n\t"
128 "inslh %6,%7,%5\n\t"
129 "insll %6,%7,%4\n\t"
130 "msklh %3,%7,%3\n\t"
131 "mskll %2,%7,%2\n\t"
132 "bis %3,%5,%3\n\t"
133 "bis %2,%4,%2\n\t"
134 "stq_u %3,%1\n\t"
135 "stq_u %2,%0"
136 :"=m" (*r11),
137 "=m" (*(unsigned long *)(3+(char *) r11)),
138 "=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
139 :"r" (r5), "r" (r11));
140 #endif
143 extern inline void __ustw(unsigned long r5, unsigned short * r11)
145 #if __GNUC__ > 2 || __GNUC_MINOR__ >= 91
146 struct __una_u16 *ptr = (struct __una_u16 *) r11;
147 ptr->x = r5;
148 #else
149 unsigned long r1,r2,r3,r4;
151 __asm__("ldq_u %3,%1\n\t"
152 "ldq_u %2,%0\n\t"
153 "inswh %6,%7,%5\n\t"
154 "inswl %6,%7,%4\n\t"
155 "mskwh %3,%7,%3\n\t"
156 "mskwl %2,%7,%2\n\t"
157 "bis %3,%5,%3\n\t"
158 "bis %2,%4,%2\n\t"
159 "stq_u %3,%1\n\t"
160 "stq_u %2,%0"
161 :"=m" (*r11),
162 "=m" (*(unsigned long *)(1+(char *) r11)),
163 "=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
164 :"r" (r5), "r" (r11));
165 #endif
168 extern inline unsigned long __get_unaligned(const void *ptr, size_t size)
170 unsigned long val;
171 switch (size) {
172 case 1:
173 val = *(const unsigned char *)ptr;
174 break;
175 case 2:
176 val = __uldw((const unsigned short *)ptr);
177 break;
178 case 4:
179 val = __uldl((const unsigned int *)ptr);
180 break;
181 case 8:
182 val = __uldq((const unsigned long *)ptr);
183 break;
184 default:
185 bad_unaligned_access_length();
187 return val;
190 extern inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
192 switch (size) {
193 case 1:
194 *(unsigned char *)ptr = (val);
195 break;
196 case 2:
197 __ustw(val, (unsigned short *)ptr);
198 break;
199 case 4:
200 __ustl(val, (unsigned int *)ptr);
201 break;
202 case 8:
203 __ustq(val, (unsigned long *)ptr);
204 break;
205 default:
206 bad_unaligned_access_length();
210 #endif