6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
27 struct crypto_tfm
*tfm
;
28 void (*crfn
)(void *ctx
, u8
*dst
, const u8
*src
);
29 unsigned int (*prfn
)(const struct cipher_desc
*desc
, u8
*dst
,
30 const u8
*src
, unsigned int nbytes
);
34 static inline void xor_64(u8
*a
, const u8
*b
)
36 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
37 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
40 static inline void xor_128(u8
*a
, const u8
*b
)
42 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
43 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
44 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
45 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
48 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
49 struct scatter_walk
*in
,
50 struct scatter_walk
*out
, unsigned int bsize
)
56 n
= scatterwalk_copychunks(src
, in
, bsize
, 0);
57 scatterwalk_advance(in
, n
);
59 desc
->prfn(desc
, dst
, src
, bsize
);
61 n
= scatterwalk_copychunks(dst
, out
, bsize
, 1);
62 scatterwalk_advance(out
, n
);
67 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
68 struct scatter_walk
*in
,
69 struct scatter_walk
*out
,
75 dst
= scatterwalk_samebuf(in
, out
) ? src
: out
->data
;
77 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
79 scatterwalk_advance(in
, nbytes
);
80 scatterwalk_advance(out
, nbytes
);
86 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
87 * multiple page boundaries by using temporary blocks. In user context,
88 * the kernel is given a chance to schedule us once per page.
90 static int crypt(const struct cipher_desc
*desc
,
91 struct scatterlist
*dst
,
92 struct scatterlist
*src
,
95 struct scatter_walk walk_in
, walk_out
;
96 struct crypto_tfm
*tfm
= desc
->tfm
;
97 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
102 if (nbytes
% bsize
) {
103 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
107 scatterwalk_start(&walk_in
, src
);
108 scatterwalk_start(&walk_out
, dst
);
113 scatterwalk_map(&walk_in
, 0);
114 scatterwalk_map(&walk_out
, 1);
116 n
= scatterwalk_clamp(&walk_in
, nbytes
);
117 n
= scatterwalk_clamp(&walk_out
, n
);
119 if (likely(n
>= bsize
))
120 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
);
122 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
126 scatterwalk_done(&walk_in
, 0, nbytes
);
127 scatterwalk_done(&walk_out
, 1, nbytes
);
136 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
137 u8
*dst
, const u8
*src
,
140 struct crypto_tfm
*tfm
= desc
->tfm
;
141 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
142 int bsize
= crypto_tfm_alg_blocksize(tfm
);
144 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
146 unsigned int done
= 0;
150 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
151 memcpy(iv
, dst
, bsize
);
155 } while ((done
+= bsize
) < nbytes
);
160 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
161 u8
*dst
, const u8
*src
,
164 struct crypto_tfm
*tfm
= desc
->tfm
;
165 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
166 int bsize
= crypto_tfm_alg_blocksize(tfm
);
168 u8 stack
[src
== dst
? bsize
: 0];
170 u8
**dst_p
= src
== dst
? &buf
: &dst
;
172 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
174 unsigned int done
= 0;
177 u8
*tmp_dst
= *dst_p
;
179 fn(crypto_tfm_ctx(tfm
), tmp_dst
, src
);
181 memcpy(iv
, src
, bsize
);
183 memcpy(dst
, tmp_dst
, bsize
);
187 } while ((done
+= bsize
) < nbytes
);
192 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
193 const u8
*src
, unsigned int nbytes
)
195 struct crypto_tfm
*tfm
= desc
->tfm
;
196 int bsize
= crypto_tfm_alg_blocksize(tfm
);
197 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
198 unsigned int done
= 0;
201 fn(crypto_tfm_ctx(tfm
), dst
, src
);
205 } while ((done
+= bsize
) < nbytes
);
210 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
212 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
214 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
215 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
218 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
222 static int ecb_encrypt(struct crypto_tfm
*tfm
,
223 struct scatterlist
*dst
,
224 struct scatterlist
*src
, unsigned int nbytes
)
226 struct cipher_desc desc
;
229 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_encrypt
;
230 desc
.prfn
= ecb_process
;
232 return crypt(&desc
, dst
, src
, nbytes
);
235 static int ecb_decrypt(struct crypto_tfm
*tfm
,
236 struct scatterlist
*dst
,
237 struct scatterlist
*src
,
240 struct cipher_desc desc
;
243 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_decrypt
;
244 desc
.prfn
= ecb_process
;
246 return crypt(&desc
, dst
, src
, nbytes
);
249 static int cbc_encrypt(struct crypto_tfm
*tfm
,
250 struct scatterlist
*dst
,
251 struct scatterlist
*src
,
254 struct cipher_desc desc
;
257 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_encrypt
;
258 desc
.prfn
= cbc_process_encrypt
;
259 desc
.info
= tfm
->crt_cipher
.cit_iv
;
261 return crypt(&desc
, dst
, src
, nbytes
);
264 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
265 struct scatterlist
*dst
,
266 struct scatterlist
*src
,
267 unsigned int nbytes
, u8
*iv
)
269 struct cipher_desc desc
;
272 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_encrypt
;
273 desc
.prfn
= cbc_process_encrypt
;
276 return crypt(&desc
, dst
, src
, nbytes
);
279 static int cbc_decrypt(struct crypto_tfm
*tfm
,
280 struct scatterlist
*dst
,
281 struct scatterlist
*src
,
284 struct cipher_desc desc
;
287 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_decrypt
;
288 desc
.prfn
= cbc_process_decrypt
;
289 desc
.info
= tfm
->crt_cipher
.cit_iv
;
291 return crypt(&desc
, dst
, src
, nbytes
);
294 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
295 struct scatterlist
*dst
,
296 struct scatterlist
*src
,
297 unsigned int nbytes
, u8
*iv
)
299 struct cipher_desc desc
;
302 desc
.crfn
= tfm
->__crt_alg
->cra_cipher
.cia_decrypt
;
303 desc
.prfn
= cbc_process_decrypt
;
306 return crypt(&desc
, dst
, src
, nbytes
);
309 static int nocrypt(struct crypto_tfm
*tfm
,
310 struct scatterlist
*dst
,
311 struct scatterlist
*src
,
317 static int nocrypt_iv(struct crypto_tfm
*tfm
,
318 struct scatterlist
*dst
,
319 struct scatterlist
*src
,
320 unsigned int nbytes
, u8
*iv
)
325 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
327 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
329 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
330 if (flags
& CRYPTO_TFM_REQ_WEAK_KEY
)
331 tfm
->crt_flags
= CRYPTO_TFM_REQ_WEAK_KEY
;
336 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
339 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
341 ops
->cit_setkey
= setkey
;
343 switch (tfm
->crt_cipher
.cit_mode
) {
344 case CRYPTO_TFM_MODE_ECB
:
345 ops
->cit_encrypt
= ecb_encrypt
;
346 ops
->cit_decrypt
= ecb_decrypt
;
349 case CRYPTO_TFM_MODE_CBC
:
350 ops
->cit_encrypt
= cbc_encrypt
;
351 ops
->cit_decrypt
= cbc_decrypt
;
352 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
353 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
356 case CRYPTO_TFM_MODE_CFB
:
357 ops
->cit_encrypt
= nocrypt
;
358 ops
->cit_decrypt
= nocrypt
;
359 ops
->cit_encrypt_iv
= nocrypt_iv
;
360 ops
->cit_decrypt_iv
= nocrypt_iv
;
363 case CRYPTO_TFM_MODE_CTR
:
364 ops
->cit_encrypt
= nocrypt
;
365 ops
->cit_decrypt
= nocrypt
;
366 ops
->cit_encrypt_iv
= nocrypt_iv
;
367 ops
->cit_decrypt_iv
= nocrypt_iv
;
374 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
376 switch (crypto_tfm_alg_blocksize(tfm
)) {
378 ops
->cit_xor_block
= xor_64
;
382 ops
->cit_xor_block
= xor_128
;
386 printk(KERN_WARNING
"%s: block size %u not supported\n",
387 crypto_tfm_alg_name(tfm
),
388 crypto_tfm_alg_blocksize(tfm
));
393 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
394 ops
->cit_iv
= kmalloc(ops
->cit_ivsize
, GFP_KERNEL
);
395 if (ops
->cit_iv
== NULL
)
403 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)
405 kfree(tfm
->crt_cipher
.cit_iv
);