6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
26 struct cipher_alg_compat
{
27 unsigned int cia_min_keysize
;
28 unsigned int cia_max_keysize
;
29 int (*cia_setkey
)(struct crypto_tfm
*tfm
, const u8
*key
,
31 void (*cia_encrypt
)(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
);
32 void (*cia_decrypt
)(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
);
34 unsigned int (*cia_encrypt_ecb
)(const struct cipher_desc
*desc
,
35 u8
*dst
, const u8
*src
,
37 unsigned int (*cia_decrypt_ecb
)(const struct cipher_desc
*desc
,
38 u8
*dst
, const u8
*src
,
40 unsigned int (*cia_encrypt_cbc
)(const struct cipher_desc
*desc
,
41 u8
*dst
, const u8
*src
,
43 unsigned int (*cia_decrypt_cbc
)(const struct cipher_desc
*desc
,
44 u8
*dst
, const u8
*src
,
48 static inline void xor_64(u8
*a
, const u8
*b
)
50 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
51 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
54 static inline void xor_128(u8
*a
, const u8
*b
)
56 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
57 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
58 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
59 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
62 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
63 struct scatter_walk
*in
,
64 struct scatter_walk
*out
, unsigned int bsize
)
66 unsigned long alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
67 u8 buffer
[bsize
* 2 + alignmask
];
68 u8
*src
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
69 u8
*dst
= src
+ bsize
;
71 scatterwalk_copychunks(src
, in
, bsize
, 0);
72 desc
->prfn(desc
, dst
, src
, bsize
);
73 scatterwalk_copychunks(dst
, out
, bsize
, 1);
78 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
79 struct scatter_walk
*in
,
80 struct scatter_walk
*out
,
81 unsigned int nbytes
, u8
*tmp
)
84 u8
*real_src
, *real_dst
;
86 real_src
= scatterwalk_map(in
, 0);
87 real_dst
= scatterwalk_map(out
, 1);
90 dst
= scatterwalk_samebuf(in
, out
) ? src
: real_dst
;
93 memcpy(tmp
, src
, nbytes
);
98 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
101 memcpy(real_dst
, tmp
, nbytes
);
103 scatterwalk_unmap(real_src
, 0);
104 scatterwalk_unmap(real_dst
, 1);
106 scatterwalk_advance(in
, nbytes
);
107 scatterwalk_advance(out
, nbytes
);
113 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
114 * multiple page boundaries by using temporary blocks. In user context,
115 * the kernel is given a chance to schedule us once per page.
117 static int crypt(const struct cipher_desc
*desc
,
118 struct scatterlist
*dst
,
119 struct scatterlist
*src
,
122 struct scatter_walk walk_in
, walk_out
;
123 struct crypto_tfm
*tfm
= desc
->tfm
;
124 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
125 unsigned int alignmask
= crypto_tfm_alg_alignmask(tfm
);
126 unsigned long buffer
= 0;
131 if (nbytes
% bsize
) {
132 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
136 scatterwalk_start(&walk_in
, src
);
137 scatterwalk_start(&walk_out
, dst
);
140 unsigned int n
= nbytes
;
143 if (!scatterwalk_aligned(&walk_in
, alignmask
) ||
144 !scatterwalk_aligned(&walk_out
, alignmask
)) {
146 buffer
= __get_free_page(GFP_ATOMIC
);
153 n
= scatterwalk_clamp(&walk_in
, n
);
154 n
= scatterwalk_clamp(&walk_out
, n
);
156 if (likely(n
>= bsize
))
157 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
, tmp
);
159 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
163 scatterwalk_done(&walk_in
, 0, nbytes
);
164 scatterwalk_done(&walk_out
, 1, nbytes
);
169 crypto_yield(tfm
->crt_flags
);
178 static int crypt_iv_unaligned(struct cipher_desc
*desc
,
179 struct scatterlist
*dst
,
180 struct scatterlist
*src
,
183 struct crypto_tfm
*tfm
= desc
->tfm
;
184 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
187 if (unlikely(((unsigned long)iv
& alignmask
))) {
188 unsigned int ivsize
= tfm
->crt_cipher
.cit_ivsize
;
189 u8 buffer
[ivsize
+ alignmask
];
190 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
193 desc
->info
= memcpy(tmp
, iv
, ivsize
);
194 err
= crypt(desc
, dst
, src
, nbytes
);
195 memcpy(iv
, tmp
, ivsize
);
200 return crypt(desc
, dst
, src
, nbytes
);
203 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
204 u8
*dst
, const u8
*src
,
207 struct crypto_tfm
*tfm
= desc
->tfm
;
208 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
209 int bsize
= crypto_tfm_alg_blocksize(tfm
);
211 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
213 unsigned int done
= 0;
220 memcpy(iv
, dst
, bsize
);
224 } while ((done
+= bsize
) <= nbytes
);
229 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
230 u8
*dst
, const u8
*src
,
233 struct crypto_tfm
*tfm
= desc
->tfm
;
234 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
235 int bsize
= crypto_tfm_alg_blocksize(tfm
);
236 unsigned long alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
238 u8 stack
[src
== dst
? bsize
+ alignmask
: 0];
239 u8
*buf
= (u8
*)ALIGN((unsigned long)stack
, alignmask
+ 1);
240 u8
**dst_p
= src
== dst
? &buf
: &dst
;
242 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
244 unsigned int done
= 0;
249 u8
*tmp_dst
= *dst_p
;
251 fn(tfm
, tmp_dst
, src
);
253 memcpy(iv
, src
, bsize
);
255 memcpy(dst
, tmp_dst
, bsize
);
259 } while ((done
+= bsize
) <= nbytes
);
264 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
265 const u8
*src
, unsigned int nbytes
)
267 struct crypto_tfm
*tfm
= desc
->tfm
;
268 int bsize
= crypto_tfm_alg_blocksize(tfm
);
269 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
270 unsigned int done
= 0;
279 } while ((done
+= bsize
) <= nbytes
);
284 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
286 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
288 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
289 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
290 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
293 return cia
->cia_setkey(tfm
, key
, keylen
);
296 static int ecb_encrypt(struct crypto_tfm
*tfm
,
297 struct scatterlist
*dst
,
298 struct scatterlist
*src
, unsigned int nbytes
)
300 struct cipher_desc desc
;
301 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
304 desc
.crfn
= cipher
->cia_encrypt
;
305 desc
.prfn
= cipher
->cia_encrypt_ecb
?: ecb_process
;
307 return crypt(&desc
, dst
, src
, nbytes
);
310 static int ecb_decrypt(struct crypto_tfm
*tfm
,
311 struct scatterlist
*dst
,
312 struct scatterlist
*src
,
315 struct cipher_desc desc
;
316 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
319 desc
.crfn
= cipher
->cia_decrypt
;
320 desc
.prfn
= cipher
->cia_decrypt_ecb
?: ecb_process
;
322 return crypt(&desc
, dst
, src
, nbytes
);
325 static int cbc_encrypt(struct crypto_tfm
*tfm
,
326 struct scatterlist
*dst
,
327 struct scatterlist
*src
,
330 struct cipher_desc desc
;
331 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
334 desc
.crfn
= cipher
->cia_encrypt
;
335 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
336 desc
.info
= tfm
->crt_cipher
.cit_iv
;
338 return crypt(&desc
, dst
, src
, nbytes
);
341 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
342 struct scatterlist
*dst
,
343 struct scatterlist
*src
,
344 unsigned int nbytes
, u8
*iv
)
346 struct cipher_desc desc
;
347 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
350 desc
.crfn
= cipher
->cia_encrypt
;
351 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
354 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
357 static int cbc_decrypt(struct crypto_tfm
*tfm
,
358 struct scatterlist
*dst
,
359 struct scatterlist
*src
,
362 struct cipher_desc desc
;
363 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
366 desc
.crfn
= cipher
->cia_decrypt
;
367 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
368 desc
.info
= tfm
->crt_cipher
.cit_iv
;
370 return crypt(&desc
, dst
, src
, nbytes
);
373 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
374 struct scatterlist
*dst
,
375 struct scatterlist
*src
,
376 unsigned int nbytes
, u8
*iv
)
378 struct cipher_desc desc
;
379 struct cipher_alg_compat
*cipher
= (void *)&tfm
->__crt_alg
->cra_cipher
;
382 desc
.crfn
= cipher
->cia_decrypt
;
383 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
386 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
389 static int nocrypt(struct crypto_tfm
*tfm
,
390 struct scatterlist
*dst
,
391 struct scatterlist
*src
,
397 static int nocrypt_iv(struct crypto_tfm
*tfm
,
398 struct scatterlist
*dst
,
399 struct scatterlist
*src
,
400 unsigned int nbytes
, u8
*iv
)
405 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
407 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
408 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
412 static void cipher_crypt_unaligned(void (*fn
)(struct crypto_tfm
*, u8
*,
414 struct crypto_tfm
*tfm
,
415 u8
*dst
, const u8
*src
)
417 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
418 unsigned int size
= crypto_tfm_alg_blocksize(tfm
);
419 u8 buffer
[size
+ alignmask
];
420 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
422 memcpy(tmp
, src
, size
);
424 memcpy(dst
, tmp
, size
);
427 static void cipher_encrypt_unaligned(struct crypto_tfm
*tfm
,
428 u8
*dst
, const u8
*src
)
430 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
431 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
433 if (unlikely(((unsigned long)dst
| (unsigned long)src
) & alignmask
)) {
434 cipher_crypt_unaligned(cipher
->cia_encrypt
, tfm
, dst
, src
);
438 cipher
->cia_encrypt(tfm
, dst
, src
);
441 static void cipher_decrypt_unaligned(struct crypto_tfm
*tfm
,
442 u8
*dst
, const u8
*src
)
444 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
445 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
447 if (unlikely(((unsigned long)dst
| (unsigned long)src
) & alignmask
)) {
448 cipher_crypt_unaligned(cipher
->cia_decrypt
, tfm
, dst
, src
);
452 cipher
->cia_decrypt(tfm
, dst
, src
);
455 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
458 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
459 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
461 ops
->cit_setkey
= setkey
;
462 ops
->cit_encrypt_one
= crypto_tfm_alg_alignmask(tfm
) ?
463 cipher_encrypt_unaligned
: cipher
->cia_encrypt
;
464 ops
->cit_decrypt_one
= crypto_tfm_alg_alignmask(tfm
) ?
465 cipher_decrypt_unaligned
: cipher
->cia_decrypt
;
467 switch (tfm
->crt_cipher
.cit_mode
) {
468 case CRYPTO_TFM_MODE_ECB
:
469 ops
->cit_encrypt
= ecb_encrypt
;
470 ops
->cit_decrypt
= ecb_decrypt
;
471 ops
->cit_encrypt_iv
= nocrypt_iv
;
472 ops
->cit_decrypt_iv
= nocrypt_iv
;
475 case CRYPTO_TFM_MODE_CBC
:
476 ops
->cit_encrypt
= cbc_encrypt
;
477 ops
->cit_decrypt
= cbc_decrypt
;
478 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
479 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
482 case CRYPTO_TFM_MODE_CFB
:
483 ops
->cit_encrypt
= nocrypt
;
484 ops
->cit_decrypt
= nocrypt
;
485 ops
->cit_encrypt_iv
= nocrypt_iv
;
486 ops
->cit_decrypt_iv
= nocrypt_iv
;
489 case CRYPTO_TFM_MODE_CTR
:
490 ops
->cit_encrypt
= nocrypt
;
491 ops
->cit_decrypt
= nocrypt
;
492 ops
->cit_encrypt_iv
= nocrypt_iv
;
493 ops
->cit_decrypt_iv
= nocrypt_iv
;
500 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
504 switch (crypto_tfm_alg_blocksize(tfm
)) {
506 ops
->cit_xor_block
= xor_64
;
510 ops
->cit_xor_block
= xor_128
;
514 printk(KERN_WARNING
"%s: block size %u not supported\n",
515 crypto_tfm_alg_name(tfm
),
516 crypto_tfm_alg_blocksize(tfm
));
521 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
522 align
= crypto_tfm_alg_alignmask(tfm
) + 1;
523 addr
= (unsigned long)crypto_tfm_ctx(tfm
);
524 addr
= ALIGN(addr
, align
);
525 addr
+= ALIGN(tfm
->__crt_alg
->cra_ctxsize
, align
);
526 ops
->cit_iv
= (void *)addr
;
533 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)