6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
14 #include <linux/compiler.h>
15 #include <linux/kernel.h>
16 #include <linux/crypto.h>
17 #include <linux/errno.h>
19 #include <linux/slab.h>
20 #include <linux/string.h>
21 #include <asm/scatterlist.h>
23 #include "scatterwalk.h"
25 typedef void (cryptfn_t
)(void *, u8
*, const u8
*);
26 typedef void (procfn_t
)(struct crypto_tfm
*, u8
*,
27 u8
*, cryptfn_t
, void *);
29 static inline void xor_64(u8
*a
, const u8
*b
)
31 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
32 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
35 static inline void xor_128(u8
*a
, const u8
*b
)
37 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
38 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
39 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
40 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
43 static inline void *prepare_src(struct scatter_walk
*walk
, int bsize
,
44 void *tmp
, int in_place
)
46 void *src
= walk
->data
;
49 if (unlikely(scatterwalk_across_pages(walk
, bsize
))) {
51 n
= scatterwalk_copychunks(src
, walk
, bsize
, 0);
53 scatterwalk_advance(walk
, n
);
57 static inline void *prepare_dst(struct scatter_walk
*walk
, int bsize
,
58 void *tmp
, int in_place
)
60 void *dst
= walk
->data
;
62 if (unlikely(scatterwalk_across_pages(walk
, bsize
)) || in_place
)
67 static inline void complete_src(struct scatter_walk
*walk
, int bsize
,
68 void *src
, int in_place
)
72 static inline void complete_dst(struct scatter_walk
*walk
, int bsize
,
73 void *dst
, int in_place
)
77 if (unlikely(scatterwalk_across_pages(walk
, bsize
)))
78 n
= scatterwalk_copychunks(dst
, walk
, bsize
, 1);
80 memcpy(walk
->data
, dst
, bsize
);
81 scatterwalk_advance(walk
, n
);
85 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
86 * multiple page boundaries by using temporary blocks. In user context,
87 * the kernel is given a chance to schedule us once per block.
89 static int crypt(struct crypto_tfm
*tfm
,
90 struct scatterlist
*dst
,
91 struct scatterlist
*src
,
92 unsigned int nbytes
, cryptfn_t crfn
,
93 procfn_t prfn
, void *info
)
95 struct scatter_walk walk_in
, walk_out
;
96 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
103 if (nbytes
% bsize
) {
104 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
108 scatterwalk_start(&walk_in
, src
);
109 scatterwalk_start(&walk_out
, dst
);
115 scatterwalk_map(&walk_in
, 0);
116 scatterwalk_map(&walk_out
, 1);
118 in_place
= scatterwalk_samebuf(&walk_in
, &walk_out
);
121 src_p
= prepare_src(&walk_in
, bsize
, tmp_src
,
123 dst_p
= prepare_dst(&walk_out
, bsize
, tmp_dst
,
126 prfn(tfm
, dst_p
, src_p
, crfn
, info
);
128 complete_src(&walk_in
, bsize
, src_p
, in_place
);
129 complete_dst(&walk_out
, bsize
, dst_p
, in_place
);
133 !scatterwalk_across_pages(&walk_in
, bsize
) &&
134 !scatterwalk_across_pages(&walk_out
, bsize
));
136 scatterwalk_done(&walk_in
, 0, nbytes
);
137 scatterwalk_done(&walk_out
, 1, nbytes
);
146 static void cbc_process_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, u8
*src
,
147 cryptfn_t fn
, void *info
)
151 tfm
->crt_u
.cipher
.cit_xor_block(iv
, src
);
152 fn(crypto_tfm_ctx(tfm
), dst
, iv
);
153 memcpy(iv
, dst
, crypto_tfm_alg_blocksize(tfm
));
156 static void cbc_process_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, u8
*src
,
157 cryptfn_t fn
, void *info
)
161 fn(crypto_tfm_ctx(tfm
), dst
, src
);
162 tfm
->crt_u
.cipher
.cit_xor_block(dst
, iv
);
163 memcpy(iv
, src
, crypto_tfm_alg_blocksize(tfm
));
166 static void ecb_process(struct crypto_tfm
*tfm
, u8
*dst
, u8
*src
,
167 cryptfn_t fn
, void *info
)
169 fn(crypto_tfm_ctx(tfm
), dst
, src
);
172 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
174 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
176 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
177 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
180 return cia
->cia_setkey(crypto_tfm_ctx(tfm
), key
, keylen
,
184 static int ecb_encrypt(struct crypto_tfm
*tfm
,
185 struct scatterlist
*dst
,
186 struct scatterlist
*src
, unsigned int nbytes
)
188 return crypt(tfm
, dst
, src
, nbytes
,
189 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
193 static int ecb_decrypt(struct crypto_tfm
*tfm
,
194 struct scatterlist
*dst
,
195 struct scatterlist
*src
,
198 return crypt(tfm
, dst
, src
, nbytes
,
199 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
203 static int cbc_encrypt(struct crypto_tfm
*tfm
,
204 struct scatterlist
*dst
,
205 struct scatterlist
*src
,
208 return crypt(tfm
, dst
, src
, nbytes
,
209 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
210 cbc_process_encrypt
, tfm
->crt_cipher
.cit_iv
);
213 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
214 struct scatterlist
*dst
,
215 struct scatterlist
*src
,
216 unsigned int nbytes
, u8
*iv
)
218 return crypt(tfm
, dst
, src
, nbytes
,
219 tfm
->__crt_alg
->cra_cipher
.cia_encrypt
,
220 cbc_process_encrypt
, iv
);
223 static int cbc_decrypt(struct crypto_tfm
*tfm
,
224 struct scatterlist
*dst
,
225 struct scatterlist
*src
,
228 return crypt(tfm
, dst
, src
, nbytes
,
229 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
230 cbc_process_decrypt
, tfm
->crt_cipher
.cit_iv
);
233 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
234 struct scatterlist
*dst
,
235 struct scatterlist
*src
,
236 unsigned int nbytes
, u8
*iv
)
238 return crypt(tfm
, dst
, src
, nbytes
,
239 tfm
->__crt_alg
->cra_cipher
.cia_decrypt
,
240 cbc_process_decrypt
, iv
);
243 static int nocrypt(struct crypto_tfm
*tfm
,
244 struct scatterlist
*dst
,
245 struct scatterlist
*src
,
251 static int nocrypt_iv(struct crypto_tfm
*tfm
,
252 struct scatterlist
*dst
,
253 struct scatterlist
*src
,
254 unsigned int nbytes
, u8
*iv
)
259 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
261 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
263 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
264 if (flags
& CRYPTO_TFM_REQ_WEAK_KEY
)
265 tfm
->crt_flags
= CRYPTO_TFM_REQ_WEAK_KEY
;
270 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
273 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
275 ops
->cit_setkey
= setkey
;
277 switch (tfm
->crt_cipher
.cit_mode
) {
278 case CRYPTO_TFM_MODE_ECB
:
279 ops
->cit_encrypt
= ecb_encrypt
;
280 ops
->cit_decrypt
= ecb_decrypt
;
283 case CRYPTO_TFM_MODE_CBC
:
284 ops
->cit_encrypt
= cbc_encrypt
;
285 ops
->cit_decrypt
= cbc_decrypt
;
286 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
287 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
290 case CRYPTO_TFM_MODE_CFB
:
291 ops
->cit_encrypt
= nocrypt
;
292 ops
->cit_decrypt
= nocrypt
;
293 ops
->cit_encrypt_iv
= nocrypt_iv
;
294 ops
->cit_decrypt_iv
= nocrypt_iv
;
297 case CRYPTO_TFM_MODE_CTR
:
298 ops
->cit_encrypt
= nocrypt
;
299 ops
->cit_decrypt
= nocrypt
;
300 ops
->cit_encrypt_iv
= nocrypt_iv
;
301 ops
->cit_decrypt_iv
= nocrypt_iv
;
308 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
310 switch (crypto_tfm_alg_blocksize(tfm
)) {
312 ops
->cit_xor_block
= xor_64
;
316 ops
->cit_xor_block
= xor_128
;
320 printk(KERN_WARNING
"%s: block size %u not supported\n",
321 crypto_tfm_alg_name(tfm
),
322 crypto_tfm_alg_blocksize(tfm
));
327 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
328 ops
->cit_iv
= kmalloc(ops
->cit_ivsize
, GFP_KERNEL
);
329 if (ops
->cit_iv
== NULL
)
337 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)
339 if (tfm
->crt_cipher
.cit_iv
)
340 kfree(tfm
->crt_cipher
.cit_iv
);