1 #ifndef _S390_CHECKSUM_H
2 #define _S390_CHECKSUM_H
5 * include/asm-s390/checksum.h
6 * S390 fast network checksum routines
7 * see also arch/S390/lib/checksum.c
10 * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
11 * Author(s): Ulrich Hild (first version)
12 * Martin Schwidefsky (heavily optimized CKSM version)
13 * D.J. Barrow (third attempt)
16 #include <asm/uaccess.h>
19 * computes the checksum of a memory block at buff, length len,
20 * and adds in "sum" (32-bit)
22 * returns a 32-bit number suitable for feeding into itself
23 * or csum_tcpudp_magic
25 * this function must be called with even lengths, except
26 * for the last fragment, which may be odd
28 * it's best to have buff aligned on a 32-bit boundary
30 static inline unsigned int
31 csum_partial(const unsigned char * buff
, int len
, unsigned int sum
)
34 * Experiments with ethernet and slip connections show that buf
35 * is aligned on either a 2-byte or 4-byte boundary.
40 rp
.subreg
.even
= (unsigned long) buff
;
41 rp
.subreg
.odd
= (unsigned long) len
;
42 __asm__
__volatile__ (
43 "0: cksm %0,%1\n" /* do checksum on longs */
45 : "+&d" (sum
), "+&a" (rp
) : : "cc", "memory" );
47 __asm__
__volatile__ (
48 " lgr 2,%1\n" /* address in gpr 2 */
49 " lgfr 3,%2\n" /* length in gpr 3 */
50 "0: cksm %0,2\n" /* do checksum on longs */
53 : "d" (buff
), "d" (len
)
54 : "cc", "memory", "2", "3" );
55 #endif /* __s390x__ */
60 * csum_partial as an inline function
62 static inline unsigned int
63 csum_partial_inline(const unsigned char * buff
, int len
, unsigned int sum
)
68 rp
.subreg
.even
= (unsigned long) buff
;
69 rp
.subreg
.odd
= (unsigned long) len
;
70 __asm__
__volatile__ (
71 "0: cksm %0,%1\n" /* do checksum on longs */
73 : "+&d" (sum
), "+&a" (rp
) : : "cc", "memory" );
75 __asm__
__volatile__ (
76 " lgr 2,%1\n" /* address in gpr 2 */
77 " lgfr 3,%2\n" /* length in gpr 3 */
78 "0: cksm %0,2\n" /* do checksum on longs */
81 : "d" (buff
), "d" (len
)
82 : "cc", "memory", "2", "3" );
83 #endif /* __s390x__ */
88 * the same as csum_partial_copy, but copies from user space.
90 * here even more important to align src and dst on a 32-bit (or even
91 * better 64-bit) boundary
93 * Copy from userspace and compute checksum. If we catch an exception
94 * then zero the rest of the buffer.
96 static inline unsigned int
97 csum_partial_copy_from_user(const char __user
*src
, char *dst
,
98 int len
, unsigned int sum
,
103 missing
= copy_from_user(dst
, src
, len
);
105 memset(dst
+ len
- missing
, 0, missing
);
109 return csum_partial(dst
, len
, sum
);
113 static inline unsigned int
114 csum_partial_copy_nocheck (const char *src
, char *dst
, int len
, unsigned int sum
)
117 return csum_partial_inline(dst
, len
, sum
);
121 * Fold a partial checksum without adding pseudo headers
123 static inline unsigned short
124 csum_fold(unsigned int sum
)
129 __asm__
__volatile__ (
130 " slr %N1,%N1\n" /* %0 = H L */
131 " lr %1,%0\n" /* %0 = H L, %1 = H L 0 0 */
132 " srdl %1,16\n" /* %0 = H L, %1 = 0 H L 0 */
133 " alr %1,%N1\n" /* %0 = H L, %1 = L H L 0 */
134 " alr %0,%1\n" /* %0 = H+L+C L+H */
135 " srl %0,16\n" /* %0 = H+L+C */
136 : "+&d" (sum
), "=d" (rp
) : : "cc" );
137 #else /* __s390x__ */
138 __asm__
__volatile__ (
139 " sr 3,3\n" /* %0 = H*65536 + L */
140 " lr 2,%0\n" /* %0 = H L, R2/R3 = H L / 0 0 */
141 " srdl 2,16\n" /* %0 = H L, R2/R3 = 0 H / L 0 */
142 " alr 2,3\n" /* %0 = H L, R2/R3 = L H / L 0 */
143 " alr %0,2\n" /* %0 = H+L+C L+H */
144 " srl %0,16\n" /* %0 = H+L+C */
145 : "+&d" (sum
) : : "cc", "2", "3");
146 #endif /* __s390x__ */
147 return ((unsigned short) ~sum
);
151 * This is a version of ip_compute_csum() optimized for IP headers,
152 * which always checksum on 4 octet boundaries.
155 static inline unsigned short
156 ip_fast_csum(unsigned char *iph
, unsigned int ihl
)
162 rp
.subreg
.even
= (unsigned long) iph
;
163 rp
.subreg
.odd
= (unsigned long) ihl
*4;
164 __asm__
__volatile__ (
165 " sr %0,%0\n" /* set sum to zero */
166 "0: cksm %0,%1\n" /* do checksum on longs */
168 : "=&d" (sum
), "+&a" (rp
) : : "cc", "memory" );
169 #else /* __s390x__ */
170 __asm__
__volatile__ (
171 " slgr %0,%0\n" /* set sum to zero */
172 " lgr 2,%1\n" /* address in gpr 2 */
173 " lgfr 3,%2\n" /* length in gpr 3 */
174 "0: cksm %0,2\n" /* do checksum on ints */
177 : "d" (iph
), "d" (ihl
*4)
178 : "cc", "memory", "2", "3" );
179 #endif /* __s390x__ */
180 return csum_fold(sum
);
184 * computes the checksum of the TCP/UDP pseudo-header
185 * returns a 32-bit checksum
187 static inline unsigned int
188 csum_tcpudp_nofold(unsigned long saddr
, unsigned long daddr
,
189 unsigned short len
, unsigned short proto
,
193 __asm__
__volatile__ (
194 " alr %0,%1\n" /* sum += saddr */
196 " ahi %0,1\n" /* add carry */
198 : "+&d" (sum
) : "d" (saddr
) : "cc" );
199 __asm__
__volatile__ (
200 " alr %0,%1\n" /* sum += daddr */
202 " ahi %0,1\n" /* add carry */
204 : "+&d" (sum
) : "d" (daddr
) : "cc" );
205 __asm__
__volatile__ (
206 " alr %0,%1\n" /* sum += (len<<16) + (proto<<8) */
208 " ahi %0,1\n" /* add carry */
211 : "d" (((unsigned int) len
<<16) + (unsigned int) proto
)
213 #else /* __s390x__ */
214 __asm__
__volatile__ (
216 " algr %0,%1\n" /* sum += saddr */
218 " aghi %0,1\n" /* add carry */
219 "0: algr %0,%2\n" /* sum += daddr */
221 " aghi %0,1\n" /* add carry */
222 "1: algfr %0,%3\n" /* sum += (len<<16) + proto */
224 " aghi %0,1\n" /* add carry */
226 " alr %0,0\n" /* fold to 32 bits */
228 " ahi %0,1\n" /* add carry */
231 : "d" (saddr
), "d" (daddr
),
232 "d" (((unsigned int) len
<<16) + (unsigned int) proto
)
234 #endif /* __s390x__ */
239 * computes the checksum of the TCP/UDP pseudo-header
240 * returns a 16-bit checksum, already complemented
243 static inline unsigned short int
244 csum_tcpudp_magic(unsigned long saddr
, unsigned long daddr
,
245 unsigned short len
, unsigned short proto
,
248 return csum_fold(csum_tcpudp_nofold(saddr
,daddr
,len
,proto
,sum
));
252 * this routine is used for miscellaneous IP-like checksums, mainly
256 static inline unsigned short
257 ip_compute_csum(unsigned char * buff
, int len
)
259 return csum_fold(csum_partial(buff
, len
, 0));
262 #endif /* _S390_CHECKSUM_H */