6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
26 static inline void xor_64(u8
*a
, const u8
*b
)
28 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
29 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
32 static inline void xor_128(u8
*a
, const u8
*b
)
34 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
35 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
36 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
37 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
40 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
41 struct scatter_walk
*in
,
42 struct scatter_walk
*out
, unsigned int bsize
)
44 unsigned long alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
45 u8 buffer
[bsize
* 2 + alignmask
];
46 u8
*src
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
47 u8
*dst
= src
+ bsize
;
50 n
= scatterwalk_copychunks(src
, in
, bsize
, 0);
51 scatterwalk_advance(in
, n
);
53 desc
->prfn(desc
, dst
, src
, bsize
);
55 n
= scatterwalk_copychunks(dst
, out
, bsize
, 1);
56 scatterwalk_advance(out
, n
);
61 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
62 struct scatter_walk
*in
,
63 struct scatter_walk
*out
,
64 unsigned int nbytes
, u8
*tmp
)
69 dst
= scatterwalk_samebuf(in
, out
) ? src
: out
->data
;
72 memcpy(tmp
, in
->data
, nbytes
);
77 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
80 memcpy(out
->data
, tmp
, nbytes
);
82 scatterwalk_advance(in
, nbytes
);
83 scatterwalk_advance(out
, nbytes
);
89 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
90 * multiple page boundaries by using temporary blocks. In user context,
91 * the kernel is given a chance to schedule us once per page.
93 static int crypt(const struct cipher_desc
*desc
,
94 struct scatterlist
*dst
,
95 struct scatterlist
*src
,
98 struct scatter_walk walk_in
, walk_out
;
99 struct crypto_tfm
*tfm
= desc
->tfm
;
100 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
101 unsigned int alignmask
= crypto_tfm_alg_alignmask(tfm
);
102 unsigned long buffer
= 0;
107 if (nbytes
% bsize
) {
108 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
112 scatterwalk_start(&walk_in
, src
);
113 scatterwalk_start(&walk_out
, dst
);
116 unsigned int n
= nbytes
;
119 if (!scatterwalk_aligned(&walk_in
, alignmask
) ||
120 !scatterwalk_aligned(&walk_out
, alignmask
)) {
122 buffer
= __get_free_page(GFP_ATOMIC
);
129 scatterwalk_map(&walk_in
, 0);
130 scatterwalk_map(&walk_out
, 1);
132 n
= scatterwalk_clamp(&walk_in
, n
);
133 n
= scatterwalk_clamp(&walk_out
, n
);
135 if (likely(n
>= bsize
))
136 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
, tmp
);
138 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
142 scatterwalk_done(&walk_in
, 0, nbytes
);
143 scatterwalk_done(&walk_out
, 1, nbytes
);
157 static int crypt_iv_unaligned(struct cipher_desc
*desc
,
158 struct scatterlist
*dst
,
159 struct scatterlist
*src
,
162 struct crypto_tfm
*tfm
= desc
->tfm
;
163 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
166 if (unlikely(((unsigned long)iv
& alignmask
))) {
167 unsigned int ivsize
= tfm
->crt_cipher
.cit_ivsize
;
168 u8 buffer
[ivsize
+ alignmask
];
169 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
172 desc
->info
= memcpy(tmp
, iv
, ivsize
);
173 err
= crypt(desc
, dst
, src
, nbytes
);
174 memcpy(iv
, tmp
, ivsize
);
179 return crypt(desc
, dst
, src
, nbytes
);
182 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
183 u8
*dst
, const u8
*src
,
186 struct crypto_tfm
*tfm
= desc
->tfm
;
187 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
188 int bsize
= crypto_tfm_alg_blocksize(tfm
);
190 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
192 unsigned int done
= 0;
198 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
199 memcpy(iv
, dst
, bsize
);
203 } while ((done
+= bsize
) <= nbytes
);
208 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
209 u8
*dst
, const u8
*src
,
212 struct crypto_tfm
*tfm
= desc
->tfm
;
213 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
214 int bsize
= crypto_tfm_alg_blocksize(tfm
);
216 u8 stack
[src
== dst
? bsize
: 0];
218 u8
**dst_p
= src
== dst
? &buf
: &dst
;
220 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
222 unsigned int done
= 0;
227 u8
*tmp_dst
= *dst_p
;
229 fn(crypto_tfm_ctx(tfm
), tmp_dst
, src
);
231 memcpy(iv
, src
, bsize
);
233 memcpy(dst
, tmp_dst
, bsize
);
237 } while ((done
+= bsize
) <= nbytes
);
242 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
243 const u8
*src
, unsigned int nbytes
)
245 struct crypto_tfm
*tfm
= desc
->tfm
;
246 int bsize
= crypto_tfm_alg_blocksize(tfm
);
247 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
248 unsigned int done
= 0;
253 fn(crypto_tfm_ctx(tfm
), dst
, src
);
257 } while ((done
+= bsize
) <= nbytes
);
262 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
264 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
266 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
267 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
270 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
274 static int ecb_encrypt(struct crypto_tfm
*tfm
,
275 struct scatterlist
*dst
,
276 struct scatterlist
*src
, unsigned int nbytes
)
278 struct cipher_desc desc
;
279 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
282 desc
.crfn
= cipher
->cia_encrypt
;
283 desc
.prfn
= cipher
->cia_encrypt_ecb
?: ecb_process
;
285 return crypt(&desc
, dst
, src
, nbytes
);
288 static int ecb_decrypt(struct crypto_tfm
*tfm
,
289 struct scatterlist
*dst
,
290 struct scatterlist
*src
,
293 struct cipher_desc desc
;
294 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
297 desc
.crfn
= cipher
->cia_decrypt
;
298 desc
.prfn
= cipher
->cia_decrypt_ecb
?: ecb_process
;
300 return crypt(&desc
, dst
, src
, nbytes
);
303 static int cbc_encrypt(struct crypto_tfm
*tfm
,
304 struct scatterlist
*dst
,
305 struct scatterlist
*src
,
308 struct cipher_desc desc
;
309 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
312 desc
.crfn
= cipher
->cia_encrypt
;
313 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
314 desc
.info
= tfm
->crt_cipher
.cit_iv
;
316 return crypt(&desc
, dst
, src
, nbytes
);
319 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
320 struct scatterlist
*dst
,
321 struct scatterlist
*src
,
322 unsigned int nbytes
, u8
*iv
)
324 struct cipher_desc desc
;
325 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
328 desc
.crfn
= cipher
->cia_encrypt
;
329 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
332 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
335 static int cbc_decrypt(struct crypto_tfm
*tfm
,
336 struct scatterlist
*dst
,
337 struct scatterlist
*src
,
340 struct cipher_desc desc
;
341 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
344 desc
.crfn
= cipher
->cia_decrypt
;
345 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
346 desc
.info
= tfm
->crt_cipher
.cit_iv
;
348 return crypt(&desc
, dst
, src
, nbytes
);
351 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
352 struct scatterlist
*dst
,
353 struct scatterlist
*src
,
354 unsigned int nbytes
, u8
*iv
)
356 struct cipher_desc desc
;
357 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
360 desc
.crfn
= cipher
->cia_decrypt
;
361 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
364 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
367 static int nocrypt(struct crypto_tfm
*tfm
,
368 struct scatterlist
*dst
,
369 struct scatterlist
*src
,
375 static int nocrypt_iv(struct crypto_tfm
*tfm
,
376 struct scatterlist
*dst
,
377 struct scatterlist
*src
,
378 unsigned int nbytes
, u8
*iv
)
383 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
385 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
386 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
390 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
393 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
395 ops
->cit_setkey
= setkey
;
397 switch (tfm
->crt_cipher
.cit_mode
) {
398 case CRYPTO_TFM_MODE_ECB
:
399 ops
->cit_encrypt
= ecb_encrypt
;
400 ops
->cit_decrypt
= ecb_decrypt
;
403 case CRYPTO_TFM_MODE_CBC
:
404 ops
->cit_encrypt
= cbc_encrypt
;
405 ops
->cit_decrypt
= cbc_decrypt
;
406 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
407 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
410 case CRYPTO_TFM_MODE_CFB
:
411 ops
->cit_encrypt
= nocrypt
;
412 ops
->cit_decrypt
= nocrypt
;
413 ops
->cit_encrypt_iv
= nocrypt_iv
;
414 ops
->cit_decrypt_iv
= nocrypt_iv
;
417 case CRYPTO_TFM_MODE_CTR
:
418 ops
->cit_encrypt
= nocrypt
;
419 ops
->cit_decrypt
= nocrypt
;
420 ops
->cit_encrypt_iv
= nocrypt_iv
;
421 ops
->cit_decrypt_iv
= nocrypt_iv
;
428 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
432 switch (crypto_tfm_alg_blocksize(tfm
)) {
434 ops
->cit_xor_block
= xor_64
;
438 ops
->cit_xor_block
= xor_128
;
442 printk(KERN_WARNING
"%s: block size %u not supported\n",
443 crypto_tfm_alg_name(tfm
),
444 crypto_tfm_alg_blocksize(tfm
));
449 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
450 align
= crypto_tfm_alg_alignmask(tfm
) + 1;
451 addr
= (unsigned long)crypto_tfm_ctx(tfm
);
452 addr
= ALIGN(addr
, align
);
453 addr
+= ALIGN(tfm
->__crt_alg
->cra_ctxsize
, align
);
454 ops
->cit_iv
= (void *)addr
;
461 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)