1 /* checksum.S: Sparc V9 optimized checksum code.
3 * Copyright(C) 1995 Linus Torvalds
4 * Copyright(C) 1995 Miguel de Icaza
5 * Copyright(C) 1996, 2000 David S. Miller
6 * Copyright(C) 1997 Jakub Jelinek
9 * Linux/Alpha checksum c-code
10 * Linux/ix86 inline checksum assembly
11 * RFC1071 Computing the Internet Checksum (esp. Jacobsons m68k code)
12 * David Mosberger-Tang for optimized reference c-code
13 * BSD4.4 portable checksum routine
18 csum_partial_fix_alignment:
19 /* We checked for zero length already, so there must be
24 ldub [%o0 + 0x00], %o4
27 1: andcc %o0, 0x2, %g0
28 be,pn %icc, csum_partial_post_align
30 blu,pn %icc, csum_partial_end_cruft
32 lduh [%o0 + 0x00], %o5
35 ba,pt %xcc, csum_partial_post_align
40 csum_partial: /* %o0=buff, %o1=len, %o2=sum */
41 prefetch [%o0 + 0x000], #n_reads
43 prefetch [%o0 + 0x040], #n_reads
44 brz,pn %o1, csum_partial_finish
47 /* We "remember" whether the lowest bit in the address
48 * was set in %g7. Because if it is, we have to swap
49 * upper and lower 8 bit fields of the sum we calculate.
51 bne,pn %icc, csum_partial_fix_alignment
54 csum_partial_post_align:
55 prefetch [%o0 + 0x080], #n_reads
58 prefetch [%o0 + 0x0c0], #n_reads
61 prefetch [%o0 + 0x100], #n_reads
63 /* So that we don't need to use the non-pairing
64 * add-with-carry instructions we accumulate 32-bit
65 * values into a 64-bit register. At the end of the
66 * loop we fold it down to 32-bits and so on.
68 prefetch [%o0 + 0x140], #n_reads
69 1: lduw [%o0 + 0x00], %o5
70 lduw [%o0 + 0x04], %g1
71 lduw [%o0 + 0x08], %g2
73 lduw [%o0 + 0x0c], %g3
75 lduw [%o0 + 0x10], %o5
77 lduw [%o0 + 0x14], %g1
79 lduw [%o0 + 0x18], %g2
81 lduw [%o0 + 0x1c], %g3
83 lduw [%o0 + 0x20], %o5
85 lduw [%o0 + 0x24], %g1
87 lduw [%o0 + 0x28], %g2
89 lduw [%o0 + 0x2c], %g3
91 lduw [%o0 + 0x30], %o5
93 lduw [%o0 + 0x34], %g1
95 lduw [%o0 + 0x38], %g2
97 lduw [%o0 + 0x3c], %g3
99 prefetch [%o0 + 0x180], #n_reads
106 2: and %o1, 0x3c, %o3
109 1: lduw [%o0 + 0x00], %o5
125 sethi %hi(0xffff0000), %g1
133 csum_partial_end_cruft:
134 /* %o4 has the 16-bit sum we have calculated so-far. */
138 lduh [%o0 + 0x00], %o5
144 ldub [%o0 + 0x00], %o5
151 sethi %hi(0xffff0000), %g1
162 /* We started with an odd byte, byte-swap the result. */
168 1: addcc %o2, %o4, %o2