6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
26 static inline void xor_64(u8
*a
, const u8
*b
)
28 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
29 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
32 static inline void xor_128(u8
*a
, const u8
*b
)
34 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
35 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
36 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
37 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
40 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
41 struct scatter_walk
*in
,
42 struct scatter_walk
*out
, unsigned int bsize
)
44 unsigned int alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
45 u8 buffer
[bsize
* 2 + alignmask
];
46 u8
*src
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
47 u8
*dst
= src
+ bsize
;
50 n
= scatterwalk_copychunks(src
, in
, bsize
, 0);
51 scatterwalk_advance(in
, n
);
53 desc
->prfn(desc
, dst
, src
, bsize
);
55 n
= scatterwalk_copychunks(dst
, out
, bsize
, 1);
56 scatterwalk_advance(out
, n
);
61 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
62 struct scatter_walk
*in
,
63 struct scatter_walk
*out
,
64 unsigned int nbytes
, u8
*tmp
)
69 dst
= scatterwalk_samebuf(in
, out
) ? src
: out
->data
;
72 memcpy(tmp
, in
->data
, nbytes
);
77 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
80 memcpy(out
->data
, tmp
, nbytes
);
82 scatterwalk_advance(in
, nbytes
);
83 scatterwalk_advance(out
, nbytes
);
89 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
90 * multiple page boundaries by using temporary blocks. In user context,
91 * the kernel is given a chance to schedule us once per page.
93 static int crypt(const struct cipher_desc
*desc
,
94 struct scatterlist
*dst
,
95 struct scatterlist
*src
,
98 struct scatter_walk walk_in
, walk_out
;
99 struct crypto_tfm
*tfm
= desc
->tfm
;
100 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
101 unsigned int alignmask
= crypto_tfm_alg_alignmask(tfm
);
102 unsigned long buffer
= 0;
107 if (nbytes
% bsize
) {
108 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
112 scatterwalk_start(&walk_in
, src
);
113 scatterwalk_start(&walk_out
, dst
);
116 unsigned int n
= nbytes
;
119 if (!scatterwalk_aligned(&walk_in
, alignmask
) ||
120 !scatterwalk_aligned(&walk_out
, alignmask
)) {
122 buffer
= __get_free_page(GFP_ATOMIC
);
129 scatterwalk_map(&walk_in
, 0);
130 scatterwalk_map(&walk_out
, 1);
132 n
= scatterwalk_clamp(&walk_in
, n
);
133 n
= scatterwalk_clamp(&walk_out
, n
);
135 if (likely(n
>= bsize
))
136 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
, tmp
);
138 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
142 scatterwalk_done(&walk_in
, 0, nbytes
);
143 scatterwalk_done(&walk_out
, 1, nbytes
);
157 static int crypt_iv_unaligned(struct cipher_desc
*desc
,
158 struct scatterlist
*dst
,
159 struct scatterlist
*src
,
162 struct crypto_tfm
*tfm
= desc
->tfm
;
163 unsigned int alignmask
= crypto_tfm_alg_alignmask(tfm
);
166 if (unlikely(((unsigned long)iv
& alignmask
))) {
167 unsigned int ivsize
= tfm
->crt_cipher
.cit_ivsize
;
168 u8 buffer
[ivsize
+ alignmask
];
169 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
172 desc
->info
= memcpy(tmp
, iv
, ivsize
);
173 err
= crypt(desc
, dst
, src
, nbytes
);
174 memcpy(iv
, tmp
, ivsize
);
179 return crypt(desc
, dst
, src
, nbytes
);
182 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
183 u8
*dst
, const u8
*src
,
186 struct crypto_tfm
*tfm
= desc
->tfm
;
187 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
188 int bsize
= crypto_tfm_alg_blocksize(tfm
);
190 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
192 unsigned int done
= 0;
196 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
197 memcpy(iv
, dst
, bsize
);
201 } while ((done
+= bsize
) < nbytes
);
206 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
207 u8
*dst
, const u8
*src
,
210 struct crypto_tfm
*tfm
= desc
->tfm
;
211 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
212 int bsize
= crypto_tfm_alg_blocksize(tfm
);
214 u8 stack
[src
== dst
? bsize
: 0];
216 u8
**dst_p
= src
== dst
? &buf
: &dst
;
218 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
220 unsigned int done
= 0;
223 u8
*tmp_dst
= *dst_p
;
225 fn(crypto_tfm_ctx(tfm
), tmp_dst
, src
);
227 memcpy(iv
, src
, bsize
);
229 memcpy(dst
, tmp_dst
, bsize
);
233 } while ((done
+= bsize
) < nbytes
);
238 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
239 const u8
*src
, unsigned int nbytes
)
241 struct crypto_tfm
*tfm
= desc
->tfm
;
242 int bsize
= crypto_tfm_alg_blocksize(tfm
);
243 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
244 unsigned int done
= 0;
247 fn(crypto_tfm_ctx(tfm
), dst
, src
);
251 } while ((done
+= bsize
) < nbytes
);
256 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
258 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
260 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
261 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
264 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
268 static int ecb_encrypt(struct crypto_tfm
*tfm
,
269 struct scatterlist
*dst
,
270 struct scatterlist
*src
, unsigned int nbytes
)
272 struct cipher_desc desc
;
273 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
276 desc
.crfn
= cipher
->cia_encrypt
;
277 desc
.prfn
= cipher
->cia_encrypt_ecb
?: ecb_process
;
279 return crypt(&desc
, dst
, src
, nbytes
);
282 static int ecb_decrypt(struct crypto_tfm
*tfm
,
283 struct scatterlist
*dst
,
284 struct scatterlist
*src
,
287 struct cipher_desc desc
;
288 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
291 desc
.crfn
= cipher
->cia_decrypt
;
292 desc
.prfn
= cipher
->cia_decrypt_ecb
?: ecb_process
;
294 return crypt(&desc
, dst
, src
, nbytes
);
297 static int cbc_encrypt(struct crypto_tfm
*tfm
,
298 struct scatterlist
*dst
,
299 struct scatterlist
*src
,
302 struct cipher_desc desc
;
303 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
306 desc
.crfn
= cipher
->cia_encrypt
;
307 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
308 desc
.info
= tfm
->crt_cipher
.cit_iv
;
310 return crypt(&desc
, dst
, src
, nbytes
);
313 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
314 struct scatterlist
*dst
,
315 struct scatterlist
*src
,
316 unsigned int nbytes
, u8
*iv
)
318 struct cipher_desc desc
;
319 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
322 desc
.crfn
= cipher
->cia_encrypt
;
323 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
326 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
329 static int cbc_decrypt(struct crypto_tfm
*tfm
,
330 struct scatterlist
*dst
,
331 struct scatterlist
*src
,
334 struct cipher_desc desc
;
335 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
338 desc
.crfn
= cipher
->cia_decrypt
;
339 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
340 desc
.info
= tfm
->crt_cipher
.cit_iv
;
342 return crypt(&desc
, dst
, src
, nbytes
);
345 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
346 struct scatterlist
*dst
,
347 struct scatterlist
*src
,
348 unsigned int nbytes
, u8
*iv
)
350 struct cipher_desc desc
;
351 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
354 desc
.crfn
= cipher
->cia_decrypt
;
355 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
358 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
361 static int nocrypt(struct crypto_tfm
*tfm
,
362 struct scatterlist
*dst
,
363 struct scatterlist
*src
,
369 static int nocrypt_iv(struct crypto_tfm
*tfm
,
370 struct scatterlist
*dst
,
371 struct scatterlist
*src
,
372 unsigned int nbytes
, u8
*iv
)
377 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
379 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
381 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
382 if (flags
& CRYPTO_TFM_REQ_WEAK_KEY
)
383 tfm
->crt_flags
= CRYPTO_TFM_REQ_WEAK_KEY
;
388 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
391 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
393 ops
->cit_setkey
= setkey
;
395 switch (tfm
->crt_cipher
.cit_mode
) {
396 case CRYPTO_TFM_MODE_ECB
:
397 ops
->cit_encrypt
= ecb_encrypt
;
398 ops
->cit_decrypt
= ecb_decrypt
;
401 case CRYPTO_TFM_MODE_CBC
:
402 ops
->cit_encrypt
= cbc_encrypt
;
403 ops
->cit_decrypt
= cbc_decrypt
;
404 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
405 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
408 case CRYPTO_TFM_MODE_CFB
:
409 ops
->cit_encrypt
= nocrypt
;
410 ops
->cit_decrypt
= nocrypt
;
411 ops
->cit_encrypt_iv
= nocrypt_iv
;
412 ops
->cit_decrypt_iv
= nocrypt_iv
;
415 case CRYPTO_TFM_MODE_CTR
:
416 ops
->cit_encrypt
= nocrypt
;
417 ops
->cit_decrypt
= nocrypt
;
418 ops
->cit_encrypt_iv
= nocrypt_iv
;
419 ops
->cit_decrypt_iv
= nocrypt_iv
;
426 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
430 switch (crypto_tfm_alg_blocksize(tfm
)) {
432 ops
->cit_xor_block
= xor_64
;
436 ops
->cit_xor_block
= xor_128
;
440 printk(KERN_WARNING
"%s: block size %u not supported\n",
441 crypto_tfm_alg_name(tfm
),
442 crypto_tfm_alg_blocksize(tfm
));
447 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
448 align
= crypto_tfm_alg_alignmask(tfm
) + 1;
449 addr
= (unsigned long)crypto_tfm_ctx(tfm
);
450 addr
= ALIGN(addr
, align
);
451 addr
+= ALIGN(tfm
->__crt_alg
->cra_ctxsize
, align
);
452 ops
->cit_iv
= (void *)addr
;
459 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)