buildscript: add option to compile using newly built toolchain
[cmplus.git] / arch / sh / include / asm / uaccess_32.h
blobae0d24f6653f8ee1979d4d369b5e8fac9480beb1
1 /*
2 * User space memory access functions
4 * Copyright (C) 1999, 2002 Niibe Yutaka
5 * Copyright (C) 2003 - 2008 Paul Mundt
7 * Based on:
8 * MIPS implementation version 1.15 by
9 * Copyright (C) 1996, 1997, 1998 by Ralf Baechle
10 * and i386 version.
12 #ifndef __ASM_SH_UACCESS_32_H
13 #define __ASM_SH_UACCESS_32_H
15 #define __get_user_size(x,ptr,size,retval) \
16 do { \
17 retval = 0; \
18 switch (size) { \
19 case 1: \
20 __get_user_asm(x, ptr, retval, "b"); \
21 break; \
22 case 2: \
23 __get_user_asm(x, ptr, retval, "w"); \
24 break; \
25 case 4: \
26 __get_user_asm(x, ptr, retval, "l"); \
27 break; \
28 default: \
29 __get_user_unknown(); \
30 break; \
31 } \
32 } while (0)
34 #ifdef CONFIG_MMU
35 #define __get_user_asm(x, addr, err, insn) \
36 ({ \
37 __asm__ __volatile__( \
38 "1:\n\t" \
39 "mov." insn " %2, %1\n\t" \
40 "2:\n" \
41 ".section .fixup,\"ax\"\n" \
42 "3:\n\t" \
43 "mov #0, %1\n\t" \
44 "mov.l 4f, %0\n\t" \
45 "jmp @%0\n\t" \
46 " mov %3, %0\n\t" \
47 ".balign 4\n" \
48 "4: .long 2b\n\t" \
49 ".previous\n" \
50 ".section __ex_table,\"a\"\n\t" \
51 ".long 1b, 3b\n\t" \
52 ".previous" \
53 :"=&r" (err), "=&r" (x) \
54 :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
55 #else
56 #define __get_user_asm(x, addr, err, insn) \
57 do { \
58 __asm__ __volatile__ ( \
59 "mov." insn " %1, %0\n\t" \
60 : "=&r" (x) \
61 : "m" (__m(addr)) \
62 ); \
63 } while (0)
64 #endif /* CONFIG_MMU */
66 extern void __get_user_unknown(void);
68 #define __put_user_size(x,ptr,size,retval) \
69 do { \
70 retval = 0; \
71 switch (size) { \
72 case 1: \
73 __put_user_asm(x, ptr, retval, "b"); \
74 break; \
75 case 2: \
76 __put_user_asm(x, ptr, retval, "w"); \
77 break; \
78 case 4: \
79 __put_user_asm(x, ptr, retval, "l"); \
80 break; \
81 case 8: \
82 __put_user_u64(x, ptr, retval); \
83 break; \
84 default: \
85 __put_user_unknown(); \
86 } \
87 } while (0)
89 #ifdef CONFIG_MMU
90 #define __put_user_asm(x, addr, err, insn) \
91 do { \
92 __asm__ __volatile__ ( \
93 "1:\n\t" \
94 "mov." insn " %1, %2\n\t" \
95 "2:\n" \
96 ".section .fixup,\"ax\"\n" \
97 "3:\n\t" \
98 "mov.l 4f, %0\n\t" \
99 "jmp @%0\n\t" \
100 " mov %3, %0\n\t" \
101 ".balign 4\n" \
102 "4: .long 2b\n\t" \
103 ".previous\n" \
104 ".section __ex_table,\"a\"\n\t" \
105 ".long 1b, 3b\n\t" \
106 ".previous" \
107 : "=&r" (err) \
108 : "r" (x), "m" (__m(addr)), "i" (-EFAULT), \
109 "0" (err) \
110 : "memory" \
111 ); \
112 } while (0)
113 #else
114 #define __put_user_asm(x, addr, err, insn) \
115 do { \
116 __asm__ __volatile__ ( \
117 "mov." insn " %0, %1\n\t" \
118 : /* no outputs */ \
119 : "r" (x), "m" (__m(addr)) \
120 : "memory" \
121 ); \
122 } while (0)
123 #endif /* CONFIG_MMU */
125 #if defined(CONFIG_CPU_LITTLE_ENDIAN)
126 #define __put_user_u64(val,addr,retval) \
127 ({ \
128 __asm__ __volatile__( \
129 "1:\n\t" \
130 "mov.l %R1,%2\n\t" \
131 "mov.l %S1,%T2\n\t" \
132 "2:\n" \
133 ".section .fixup,\"ax\"\n" \
134 "3:\n\t" \
135 "mov.l 4f,%0\n\t" \
136 "jmp @%0\n\t" \
137 " mov %3,%0\n\t" \
138 ".balign 4\n" \
139 "4: .long 2b\n\t" \
140 ".previous\n" \
141 ".section __ex_table,\"a\"\n\t" \
142 ".long 1b, 3b\n\t" \
143 ".previous" \
144 : "=r" (retval) \
145 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
146 : "memory"); })
147 #else
148 #define __put_user_u64(val,addr,retval) \
149 ({ \
150 __asm__ __volatile__( \
151 "1:\n\t" \
152 "mov.l %S1,%2\n\t" \
153 "mov.l %R1,%T2\n\t" \
154 "2:\n" \
155 ".section .fixup,\"ax\"\n" \
156 "3:\n\t" \
157 "mov.l 4f,%0\n\t" \
158 "jmp @%0\n\t" \
159 " mov %3,%0\n\t" \
160 ".balign 4\n" \
161 "4: .long 2b\n\t" \
162 ".previous\n" \
163 ".section __ex_table,\"a\"\n\t" \
164 ".long 1b, 3b\n\t" \
165 ".previous" \
166 : "=r" (retval) \
167 : "r" (val), "m" (__m(addr)), "i" (-EFAULT), "0" (retval) \
168 : "memory"); })
169 #endif
171 extern void __put_user_unknown(void);
173 static inline int
174 __strncpy_from_user(unsigned long __dest, unsigned long __user __src, int __count)
176 __kernel_size_t res;
177 unsigned long __dummy, _d, _s, _c;
179 __asm__ __volatile__(
180 "9:\n"
181 "mov.b @%2+, %1\n\t"
182 "cmp/eq #0, %1\n\t"
183 "bt/s 2f\n"
184 "1:\n"
185 "mov.b %1, @%3\n\t"
186 "dt %4\n\t"
187 "bf/s 9b\n\t"
188 " add #1, %3\n\t"
189 "2:\n\t"
190 "sub %4, %0\n"
191 "3:\n"
192 ".section .fixup,\"ax\"\n"
193 "4:\n\t"
194 "mov.l 5f, %1\n\t"
195 "jmp @%1\n\t"
196 " mov %9, %0\n\t"
197 ".balign 4\n"
198 "5: .long 3b\n"
199 ".previous\n"
200 ".section __ex_table,\"a\"\n"
201 " .balign 4\n"
202 " .long 9b,4b\n"
203 ".previous"
204 : "=r" (res), "=&z" (__dummy), "=r" (_s), "=r" (_d), "=r"(_c)
205 : "0" (__count), "2" (__src), "3" (__dest), "4" (__count),
206 "i" (-EFAULT)
207 : "memory", "t");
209 return res;
213 * Return the size of a string (including the ending 0 even when we have
214 * exceeded the maximum string length).
216 static inline long __strnlen_user(const char __user *__s, long __n)
218 unsigned long res;
219 unsigned long __dummy;
221 __asm__ __volatile__(
222 "1:\t"
223 "mov.b @(%0,%3), %1\n\t"
224 "cmp/eq %4, %0\n\t"
225 "bt/s 2f\n\t"
226 " add #1, %0\n\t"
227 "tst %1, %1\n\t"
228 "bf 1b\n\t"
229 "2:\n"
230 ".section .fixup,\"ax\"\n"
231 "3:\n\t"
232 "mov.l 4f, %1\n\t"
233 "jmp @%1\n\t"
234 " mov #0, %0\n"
235 ".balign 4\n"
236 "4: .long 2b\n"
237 ".previous\n"
238 ".section __ex_table,\"a\"\n"
239 " .balign 4\n"
240 " .long 1b,3b\n"
241 ".previous"
242 : "=z" (res), "=&r" (__dummy)
243 : "0" (0), "r" (__s), "r" (__n)
244 : "t");
245 return res;
248 #endif /* __ASM_SH_UACCESS_32_H */