2 * copy_page, __copy_user_page, __copy_user implementation of SuperH
4 * Copyright (C) 2001 Niibe Yutaka & Kaz Kojima
5 * Copyright (C) 2002 Toshinobu Sugioka
6 * Copyright (C) 2006 Paul Mundt
8 #include <linux/linkage.h>
16 * void copy_page(void *to, void *from)
20 * r0, r1, r2, r3, r4, r5, r6, r7 --- scratch
21 * r8 --- from + PAGE_SIZE
44 #if defined(CONFIG_CPU_SH4)
58 #if defined(CONFIG_CPU_SH4)
72 .Lpsz: .long PAGE_SIZE
75 * __kernel_size_t __copy_user(void *to, const void *from, __kernel_size_t n);
76 * Return the number of bytes NOT copied
80 .section __ex_table, "a"; \
81 .long 9999b, 6000f ; \
83 #define EX_NO_POP(...) \
85 .section __ex_table, "a"; \
86 .long 9999b, 6005f ; \
89 ! Check if small number of bytes
92 cmp/gt r0,r6 ! r6 (len) > r0 (11)
93 bf/s .L_cleanup_loop_no_pop
94 add r6,r3 ! last destination address
96 ! Calculate bytes needed to align to src
108 ! Copy bytes to long word align src
116 ! Jump to appropriate routine depending on dest
135 * Come here if there are less than 12 bytes to copy
137 * Keep the branch target close, so the bf/s callee doesn't overflow
138 * and result in a more expensive branch being inserted. This is the
139 * fast-path for small copies, the jump via the jump table will hit the
140 * default slow-path cleanup. -PFM.
142 .L_cleanup_loop_no_pop:
143 tst r6,r6 ! Check explicitly for zero
147 EX_NO_POP( mov.b @r5+,r0 )
149 EX_NO_POP( mov.b r0,@r4 )
153 1: mov #0,r0 ! normal return
157 .section .fixup, "ax"
173 ! Skip the large copy for small transfers
175 cmp/gt r6, r0 ! r0 (60) > r6 (len)
178 ! Align dest to a 32 byte boundary
203 #ifdef CONFIG_CPU_SH4
209 EX( mov.l r1,@(4,r4) )
211 EX( mov.l r2,@(8,r4) )
212 cmp/gt r6, r0 ! r0 (32) > r6 (len)
213 EX( mov.l r7,@(12,r4) )
214 EX( mov.l r8,@(16,r4) )
215 EX( mov.l r9,@(20,r4) )
216 EX( mov.l r10,@(24,r4) )
217 EX( mov.l r11,@(28,r4) )
247 #ifdef CONFIG_CPU_LITTLE_ENDIAN
261 EX( mov.l r1,@(4,r4) )
262 EX( mov.l r8,@(8,r4) )
263 EX( mov.l r9,@(12,r4) )
272 EX( mov.l r10,@(16,r4) )
273 EX( mov.l r1,@(20,r4) )
274 EX( mov.l r8,@(24,r4) )
275 EX( mov.w r0,@(28,r4) )
279 EX( mov.l @(28,r5),r0 )
280 EX( mov.l @(24,r5),r8 )
281 EX( mov.l @(20,r5),r9 )
282 EX( mov.l @(16,r5),r10 )
283 EX( mov.w r0,@(30,r4) )
288 EX( mov.l r0,@(28,r4) )
289 EX( mov.l r8,@(24,r4) )
290 EX( mov.l r9,@(20,r4) )
292 EX( mov.l @(12,r5),r0 )
293 EX( mov.l @(8,r5),r8 )
295 EX( mov.l @(4,r5),r9 )
301 EX( mov.l r0,@(12,r4) )
302 EX( mov.l r8,@(8,r4) )
304 EX( mov.l r9,@(4,r4) )
305 EX( mov.w r0,@(2,r4) )
314 1: ! Read longword, write two words per iteration
317 #ifdef CONFIG_CPU_LITTLE_ENDIAN
320 EX( mov.w r0,@(2,r4) )
322 EX( mov.w r0,@(2,r4) )
332 ! Destination = 01 or 11
336 ! Read longword, write byte, word, byte per iteration
339 #ifdef CONFIG_CPU_LITTLE_ENDIAN
345 EX( mov.b r0,@(2,r4) )
349 EX( mov.b r0,@(3,r4) )
359 ! Cleanup last few bytes
375 mov #0,r0 ! normal return
380 .section .fixup, "ax"