6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
26 static inline void xor_64(u8
*a
, const u8
*b
)
28 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
29 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
32 static inline void xor_128(u8
*a
, const u8
*b
)
34 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
35 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
36 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
37 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
40 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
41 struct scatter_walk
*in
,
42 struct scatter_walk
*out
, unsigned int bsize
)
44 unsigned int alignmask
= desc
->tfm
->__crt_alg
->cra_alignmask
;
45 u8 buffer
[bsize
* 2 + alignmask
];
46 u8
*src
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
47 u8
*dst
= src
+ bsize
;
50 n
= scatterwalk_copychunks(src
, in
, bsize
, 0);
51 scatterwalk_advance(in
, n
);
53 desc
->prfn(desc
, dst
, src
, bsize
);
55 n
= scatterwalk_copychunks(dst
, out
, bsize
, 1);
56 scatterwalk_advance(out
, n
);
61 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
62 struct scatter_walk
*in
,
63 struct scatter_walk
*out
,
64 unsigned int nbytes
, u8
*tmp
)
69 dst
= scatterwalk_samebuf(in
, out
) ? src
: out
->data
;
72 memcpy(tmp
, in
->data
, nbytes
);
77 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
80 memcpy(out
->data
, tmp
, nbytes
);
82 scatterwalk_advance(in
, nbytes
);
83 scatterwalk_advance(out
, nbytes
);
89 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
90 * multiple page boundaries by using temporary blocks. In user context,
91 * the kernel is given a chance to schedule us once per page.
93 static int crypt(const struct cipher_desc
*desc
,
94 struct scatterlist
*dst
,
95 struct scatterlist
*src
,
98 struct scatter_walk walk_in
, walk_out
;
99 struct crypto_tfm
*tfm
= desc
->tfm
;
100 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
101 unsigned int alignmask
= tfm
->__crt_alg
->cra_alignmask
;
102 unsigned long buffer
= 0;
107 if (nbytes
% bsize
) {
108 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
112 scatterwalk_start(&walk_in
, src
);
113 scatterwalk_start(&walk_out
, dst
);
116 unsigned int n
= nbytes
;
119 if (!scatterwalk_aligned(&walk_in
, alignmask
) ||
120 !scatterwalk_aligned(&walk_out
, alignmask
)) {
122 buffer
= __get_free_page(GFP_ATOMIC
);
129 scatterwalk_map(&walk_in
, 0);
130 scatterwalk_map(&walk_out
, 1);
132 n
= scatterwalk_clamp(&walk_in
, n
);
133 n
= scatterwalk_clamp(&walk_out
, n
);
135 if (likely(n
>= bsize
))
136 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
, tmp
);
138 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
142 scatterwalk_done(&walk_in
, 0, nbytes
);
143 scatterwalk_done(&walk_out
, 1, nbytes
);
157 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
158 u8
*dst
, const u8
*src
,
161 struct crypto_tfm
*tfm
= desc
->tfm
;
162 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
163 int bsize
= crypto_tfm_alg_blocksize(tfm
);
165 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
167 unsigned int done
= 0;
171 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
172 memcpy(iv
, dst
, bsize
);
176 } while ((done
+= bsize
) < nbytes
);
181 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
182 u8
*dst
, const u8
*src
,
185 struct crypto_tfm
*tfm
= desc
->tfm
;
186 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
187 int bsize
= crypto_tfm_alg_blocksize(tfm
);
189 u8 stack
[src
== dst
? bsize
: 0];
191 u8
**dst_p
= src
== dst
? &buf
: &dst
;
193 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
195 unsigned int done
= 0;
198 u8
*tmp_dst
= *dst_p
;
200 fn(crypto_tfm_ctx(tfm
), tmp_dst
, src
);
202 memcpy(iv
, src
, bsize
);
204 memcpy(dst
, tmp_dst
, bsize
);
208 } while ((done
+= bsize
) < nbytes
);
213 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
214 const u8
*src
, unsigned int nbytes
)
216 struct crypto_tfm
*tfm
= desc
->tfm
;
217 int bsize
= crypto_tfm_alg_blocksize(tfm
);
218 void (*fn
)(void *, u8
*, const u8
*) = desc
->crfn
;
219 unsigned int done
= 0;
222 fn(crypto_tfm_ctx(tfm
), dst
, src
);
226 } while ((done
+= bsize
) < nbytes
);
231 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
233 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
235 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
236 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
239 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
243 static int ecb_encrypt(struct crypto_tfm
*tfm
,
244 struct scatterlist
*dst
,
245 struct scatterlist
*src
, unsigned int nbytes
)
247 struct cipher_desc desc
;
248 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
251 desc
.crfn
= cipher
->cia_encrypt
;
252 desc
.prfn
= cipher
->cia_encrypt_ecb
?: ecb_process
;
254 return crypt(&desc
, dst
, src
, nbytes
);
257 static int ecb_decrypt(struct crypto_tfm
*tfm
,
258 struct scatterlist
*dst
,
259 struct scatterlist
*src
,
262 struct cipher_desc desc
;
263 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
266 desc
.crfn
= cipher
->cia_decrypt
;
267 desc
.prfn
= cipher
->cia_decrypt_ecb
?: ecb_process
;
269 return crypt(&desc
, dst
, src
, nbytes
);
272 static int cbc_encrypt(struct crypto_tfm
*tfm
,
273 struct scatterlist
*dst
,
274 struct scatterlist
*src
,
277 struct cipher_desc desc
;
278 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
281 desc
.crfn
= cipher
->cia_encrypt
;
282 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
283 desc
.info
= tfm
->crt_cipher
.cit_iv
;
285 return crypt(&desc
, dst
, src
, nbytes
);
288 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
289 struct scatterlist
*dst
,
290 struct scatterlist
*src
,
291 unsigned int nbytes
, u8
*iv
)
293 struct cipher_desc desc
;
294 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
297 desc
.crfn
= cipher
->cia_encrypt
;
298 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
301 return crypt(&desc
, dst
, src
, nbytes
);
304 static int cbc_decrypt(struct crypto_tfm
*tfm
,
305 struct scatterlist
*dst
,
306 struct scatterlist
*src
,
309 struct cipher_desc desc
;
310 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
313 desc
.crfn
= cipher
->cia_decrypt
;
314 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
315 desc
.info
= tfm
->crt_cipher
.cit_iv
;
317 return crypt(&desc
, dst
, src
, nbytes
);
320 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
321 struct scatterlist
*dst
,
322 struct scatterlist
*src
,
323 unsigned int nbytes
, u8
*iv
)
325 struct cipher_desc desc
;
326 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
329 desc
.crfn
= cipher
->cia_decrypt
;
330 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
333 return crypt(&desc
, dst
, src
, nbytes
);
336 static int nocrypt(struct crypto_tfm
*tfm
,
337 struct scatterlist
*dst
,
338 struct scatterlist
*src
,
344 static int nocrypt_iv(struct crypto_tfm
*tfm
,
345 struct scatterlist
*dst
,
346 struct scatterlist
*src
,
347 unsigned int nbytes
, u8
*iv
)
352 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
354 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
356 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
357 if (flags
& CRYPTO_TFM_REQ_WEAK_KEY
)
358 tfm
->crt_flags
= CRYPTO_TFM_REQ_WEAK_KEY
;
363 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
366 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
368 ops
->cit_setkey
= setkey
;
370 switch (tfm
->crt_cipher
.cit_mode
) {
371 case CRYPTO_TFM_MODE_ECB
:
372 ops
->cit_encrypt
= ecb_encrypt
;
373 ops
->cit_decrypt
= ecb_decrypt
;
376 case CRYPTO_TFM_MODE_CBC
:
377 ops
->cit_encrypt
= cbc_encrypt
;
378 ops
->cit_decrypt
= cbc_decrypt
;
379 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
380 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
383 case CRYPTO_TFM_MODE_CFB
:
384 ops
->cit_encrypt
= nocrypt
;
385 ops
->cit_decrypt
= nocrypt
;
386 ops
->cit_encrypt_iv
= nocrypt_iv
;
387 ops
->cit_decrypt_iv
= nocrypt_iv
;
390 case CRYPTO_TFM_MODE_CTR
:
391 ops
->cit_encrypt
= nocrypt
;
392 ops
->cit_decrypt
= nocrypt
;
393 ops
->cit_encrypt_iv
= nocrypt_iv
;
394 ops
->cit_decrypt_iv
= nocrypt_iv
;
401 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
403 switch (crypto_tfm_alg_blocksize(tfm
)) {
405 ops
->cit_xor_block
= xor_64
;
409 ops
->cit_xor_block
= xor_128
;
413 printk(KERN_WARNING
"%s: block size %u not supported\n",
414 crypto_tfm_alg_name(tfm
),
415 crypto_tfm_alg_blocksize(tfm
));
420 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
421 ops
->cit_iv
= kmalloc(ops
->cit_ivsize
, GFP_KERNEL
);
422 if (ops
->cit_iv
== NULL
)
430 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)
432 kfree(tfm
->crt_cipher
.cit_iv
);