Staging: rtl8192su: refactored EnableHWSecurityConfig8192 and setKey
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / crypto / cbc.c
blob61ac42e1e32bb75816c0c1b0a7dd614cd80f4474
1 /*
2 * CBC: Cipher Block Chaining mode
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #include <crypto/algapi.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/log2.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
22 struct crypto_cbc_ctx {
23 struct crypto_cipher *child;
26 static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
27 unsigned int keylen)
29 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(parent);
30 struct crypto_cipher *child = ctx->child;
31 int err;
33 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
34 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
35 CRYPTO_TFM_REQ_MASK);
36 err = crypto_cipher_setkey(child, key, keylen);
37 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
38 CRYPTO_TFM_RES_MASK);
39 return err;
42 static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
43 struct blkcipher_walk *walk,
44 struct crypto_cipher *tfm)
46 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
47 crypto_cipher_alg(tfm)->cia_encrypt;
48 int bsize = crypto_cipher_blocksize(tfm);
49 unsigned int nbytes = walk->nbytes;
50 u8 *src = walk->src.virt.addr;
51 u8 *dst = walk->dst.virt.addr;
52 u8 *iv = walk->iv;
54 do {
55 crypto_xor(iv, src, bsize);
56 fn(crypto_cipher_tfm(tfm), dst, iv);
57 memcpy(iv, dst, bsize);
59 src += bsize;
60 dst += bsize;
61 } while ((nbytes -= bsize) >= bsize);
63 return nbytes;
66 static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
67 struct blkcipher_walk *walk,
68 struct crypto_cipher *tfm)
70 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
71 crypto_cipher_alg(tfm)->cia_encrypt;
72 int bsize = crypto_cipher_blocksize(tfm);
73 unsigned int nbytes = walk->nbytes;
74 u8 *src = walk->src.virt.addr;
75 u8 *iv = walk->iv;
77 do {
78 crypto_xor(src, iv, bsize);
79 fn(crypto_cipher_tfm(tfm), src, src);
80 iv = src;
82 src += bsize;
83 } while ((nbytes -= bsize) >= bsize);
85 memcpy(walk->iv, iv, bsize);
87 return nbytes;
90 static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
91 struct scatterlist *dst, struct scatterlist *src,
92 unsigned int nbytes)
94 struct blkcipher_walk walk;
95 struct crypto_blkcipher *tfm = desc->tfm;
96 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
97 struct crypto_cipher *child = ctx->child;
98 int err;
100 blkcipher_walk_init(&walk, dst, src, nbytes);
101 err = blkcipher_walk_virt(desc, &walk);
103 while ((nbytes = walk.nbytes)) {
104 if (walk.src.virt.addr == walk.dst.virt.addr)
105 nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child);
106 else
107 nbytes = crypto_cbc_encrypt_segment(desc, &walk, child);
108 err = blkcipher_walk_done(desc, &walk, nbytes);
111 return err;
114 static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
115 struct blkcipher_walk *walk,
116 struct crypto_cipher *tfm)
118 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
119 crypto_cipher_alg(tfm)->cia_decrypt;
120 int bsize = crypto_cipher_blocksize(tfm);
121 unsigned int nbytes = walk->nbytes;
122 u8 *src = walk->src.virt.addr;
123 u8 *dst = walk->dst.virt.addr;
124 u8 *iv = walk->iv;
126 do {
127 fn(crypto_cipher_tfm(tfm), dst, src);
128 crypto_xor(dst, iv, bsize);
129 iv = src;
131 src += bsize;
132 dst += bsize;
133 } while ((nbytes -= bsize) >= bsize);
135 memcpy(walk->iv, iv, bsize);
137 return nbytes;
140 static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc,
141 struct blkcipher_walk *walk,
142 struct crypto_cipher *tfm)
144 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
145 crypto_cipher_alg(tfm)->cia_decrypt;
146 int bsize = crypto_cipher_blocksize(tfm);
147 unsigned int nbytes = walk->nbytes;
148 u8 *src = walk->src.virt.addr;
149 u8 last_iv[bsize];
151 /* Start of the last block. */
152 src += nbytes - (nbytes & (bsize - 1)) - bsize;
153 memcpy(last_iv, src, bsize);
155 for (;;) {
156 fn(crypto_cipher_tfm(tfm), src, src);
157 if ((nbytes -= bsize) < bsize)
158 break;
159 crypto_xor(src, src - bsize, bsize);
160 src -= bsize;
163 crypto_xor(src, walk->iv, bsize);
164 memcpy(walk->iv, last_iv, bsize);
166 return nbytes;
169 static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
170 struct scatterlist *dst, struct scatterlist *src,
171 unsigned int nbytes)
173 struct blkcipher_walk walk;
174 struct crypto_blkcipher *tfm = desc->tfm;
175 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
176 struct crypto_cipher *child = ctx->child;
177 int err;
179 blkcipher_walk_init(&walk, dst, src, nbytes);
180 err = blkcipher_walk_virt(desc, &walk);
182 while ((nbytes = walk.nbytes)) {
183 if (walk.src.virt.addr == walk.dst.virt.addr)
184 nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child);
185 else
186 nbytes = crypto_cbc_decrypt_segment(desc, &walk, child);
187 err = blkcipher_walk_done(desc, &walk, nbytes);
190 return err;
193 static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
195 struct crypto_instance *inst = (void *)tfm->__crt_alg;
196 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
197 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
198 struct crypto_cipher *cipher;
200 cipher = crypto_spawn_cipher(spawn);
201 if (IS_ERR(cipher))
202 return PTR_ERR(cipher);
204 ctx->child = cipher;
205 return 0;
208 static void crypto_cbc_exit_tfm(struct crypto_tfm *tfm)
210 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
211 crypto_free_cipher(ctx->child);
214 static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
216 struct crypto_instance *inst;
217 struct crypto_alg *alg;
218 int err;
220 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
221 if (err)
222 return ERR_PTR(err);
224 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
225 CRYPTO_ALG_TYPE_MASK);
226 if (IS_ERR(alg))
227 return ERR_CAST(alg);
229 inst = ERR_PTR(-EINVAL);
230 if (!is_power_of_2(alg->cra_blocksize))
231 goto out_put_alg;
233 inst = crypto_alloc_instance("cbc", alg);
234 if (IS_ERR(inst))
235 goto out_put_alg;
237 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
238 inst->alg.cra_priority = alg->cra_priority;
239 inst->alg.cra_blocksize = alg->cra_blocksize;
240 inst->alg.cra_alignmask = alg->cra_alignmask;
241 inst->alg.cra_type = &crypto_blkcipher_type;
243 /* We access the data as u32s when xoring. */
244 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
246 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
247 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
248 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
250 inst->alg.cra_ctxsize = sizeof(struct crypto_cbc_ctx);
252 inst->alg.cra_init = crypto_cbc_init_tfm;
253 inst->alg.cra_exit = crypto_cbc_exit_tfm;
255 inst->alg.cra_blkcipher.setkey = crypto_cbc_setkey;
256 inst->alg.cra_blkcipher.encrypt = crypto_cbc_encrypt;
257 inst->alg.cra_blkcipher.decrypt = crypto_cbc_decrypt;
259 out_put_alg:
260 crypto_mod_put(alg);
261 return inst;
264 static void crypto_cbc_free(struct crypto_instance *inst)
266 crypto_drop_spawn(crypto_instance_ctx(inst));
267 kfree(inst);
270 static struct crypto_template crypto_cbc_tmpl = {
271 .name = "cbc",
272 .alloc = crypto_cbc_alloc,
273 .free = crypto_cbc_free,
274 .module = THIS_MODULE,
277 static int __init crypto_cbc_module_init(void)
279 return crypto_register_template(&crypto_cbc_tmpl);
282 static void __exit crypto_cbc_module_exit(void)
284 crypto_unregister_template(&crypto_cbc_tmpl);
287 module_init(crypto_cbc_module_init);
288 module_exit(crypto_cbc_module_exit);
290 MODULE_LICENSE("GPL");
291 MODULE_DESCRIPTION("CBC block cipher algorithm");