2 * linux/arch/arm/lib/csumpartialcopy.S
4 * Copyright (C) 1995-1998 Russell King
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 #include <linux/linkage.h>
11 #include <asm/assembler.h>
15 /* Function: __u32 csum_partial_copy_nocheck(const char *src, char *dst, int len, __u32 sum)
16 * Params : r0 = src, r1 = dst, r2 = len, r3 = checksum
17 * Returns : r0 = new checksum
21 stmfd sp!, {r4 - r8, fp, ip, lr, pc}
24 .macro load_regs,flags
25 LOADREGS(\flags,fp,{r4 - r8, fp, sp, pc})
32 .macro load2b, reg1, reg2
41 .macro load2l, reg1, reg2
46 .macro load4l, reg1, reg2, reg3, reg4
47 ldmia r0!, {\reg1, \reg2, \reg3, \reg4}
50 ENTRY(csum_partial_copy_nocheck)
56 tst r1, #2 @ Test destination alignment
59 subs r2, r2, #2 @ We do not know if SRC is aligned...
60 orr ip, ip, r8, lsl #8
65 strb ip, [r1], #1 @ Destination now aligned
66 .dst_aligned: tst r0, #3
69 bics ip, r2, #15 @ Routine for src & dst aligned
71 1: load4l r4, r5, r6, r7
72 stmia r1!, {r4, r5, r6, r7}
99 adcs r3, r3, r4, lsl #16
111 .too_small: teq r2, #0
116 orr ip, ip, r8, lsl #8
126 .csum_exit: adc r0, r3, #0
142 1: load4l r5, r6, r7, r8
143 orr r4, r4, r5, lsl #24
145 orr r5, r5, r6, lsl #24
147 orr r6, r6, r7, lsl #24
149 orr r7, r7, r8, lsl #24
150 stmia r1!, {r4, r5, r6, r7}
164 orr r4, r4, r5, lsl #24
166 orr r5, r5, r6, lsl #24
174 orr r4, r4, r5, lsl #24
183 adcs r3, r3, r4, lsl #16
190 .src2_aligned: mov r4, r4, lsr #16
194 1: load4l r5, r6, r7, r8
195 orr r4, r4, r5, lsl #16
197 orr r5, r5, r6, lsl #16
199 orr r6, r6, r7, lsl #16
201 orr r7, r7, r8, lsl #16
202 stmia r1!, {r4, r5, r6, r7}
216 orr r4, r4, r5, lsl #16
218 orr r5, r5, r6, lsl #16
226 orr r4, r4, r5, lsl #16
235 adcs r3, r3, r4, lsl #16
245 .src3_aligned: mov r4, r4, lsr #24
249 1: load4l r5, r6, r7, r8
250 orr r4, r4, r5, lsl #8
252 orr r5, r5, r6, lsl #8
254 orr r6, r6, r7, lsl #8
256 orr r7, r7, r8, lsl #8
257 stmia r1!, {r4, r5, r6, r7}
271 orr r4, r4, r5, lsl #8
273 orr r5, r5, r6, lsl #8
281 orr r4, r4, r5, lsl #8
290 adcs r3, r3, r4, lsl #16
294 adcs r3, r3, r4, lsl #24