libnvdimm/namespace: Fix label tracking error
[linux-stable.git] / crypto / pcbc.c
blob67009a532201732b0dc86f9b5949a2b454b5384c
1 /*
2 * PCBC: Propagating Cipher Block Chaining mode
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
7 * Derived from cbc.c
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
17 #include <crypto/internal/skcipher.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/slab.h>
23 #include <linux/compiler.h>
25 struct crypto_pcbc_ctx {
26 struct crypto_cipher *child;
29 static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
30 unsigned int keylen)
32 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
33 struct crypto_cipher *child = ctx->child;
34 int err;
36 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
37 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
38 CRYPTO_TFM_REQ_MASK);
39 err = crypto_cipher_setkey(child, key, keylen);
40 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
41 CRYPTO_TFM_RES_MASK);
42 return err;
45 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
46 struct skcipher_walk *walk,
47 struct crypto_cipher *tfm)
49 int bsize = crypto_cipher_blocksize(tfm);
50 unsigned int nbytes = walk->nbytes;
51 u8 *src = walk->src.virt.addr;
52 u8 *dst = walk->dst.virt.addr;
53 u8 * const iv = walk->iv;
55 do {
56 crypto_xor(iv, src, bsize);
57 crypto_cipher_encrypt_one(tfm, dst, iv);
58 crypto_xor_cpy(iv, dst, src, bsize);
60 src += bsize;
61 dst += bsize;
62 } while ((nbytes -= bsize) >= bsize);
64 return nbytes;
67 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
68 struct skcipher_walk *walk,
69 struct crypto_cipher *tfm)
71 int bsize = crypto_cipher_blocksize(tfm);
72 unsigned int nbytes = walk->nbytes;
73 u8 *src = walk->src.virt.addr;
74 u8 * const iv = walk->iv;
75 u8 tmpbuf[bsize];
77 do {
78 memcpy(tmpbuf, src, bsize);
79 crypto_xor(iv, src, bsize);
80 crypto_cipher_encrypt_one(tfm, src, iv);
81 crypto_xor_cpy(iv, tmpbuf, src, bsize);
83 src += bsize;
84 } while ((nbytes -= bsize) >= bsize);
86 return nbytes;
89 static int crypto_pcbc_encrypt(struct skcipher_request *req)
91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
92 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
93 struct crypto_cipher *child = ctx->child;
94 struct skcipher_walk walk;
95 unsigned int nbytes;
96 int err;
98 err = skcipher_walk_virt(&walk, req, false);
100 while ((nbytes = walk.nbytes)) {
101 if (walk.src.virt.addr == walk.dst.virt.addr)
102 nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
103 child);
104 else
105 nbytes = crypto_pcbc_encrypt_segment(req, &walk,
106 child);
107 err = skcipher_walk_done(&walk, nbytes);
110 return err;
113 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
114 struct skcipher_walk *walk,
115 struct crypto_cipher *tfm)
117 int bsize = crypto_cipher_blocksize(tfm);
118 unsigned int nbytes = walk->nbytes;
119 u8 *src = walk->src.virt.addr;
120 u8 *dst = walk->dst.virt.addr;
121 u8 * const iv = walk->iv;
123 do {
124 crypto_cipher_decrypt_one(tfm, dst, src);
125 crypto_xor(dst, iv, bsize);
126 crypto_xor_cpy(iv, dst, src, bsize);
128 src += bsize;
129 dst += bsize;
130 } while ((nbytes -= bsize) >= bsize);
132 return nbytes;
135 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
136 struct skcipher_walk *walk,
137 struct crypto_cipher *tfm)
139 int bsize = crypto_cipher_blocksize(tfm);
140 unsigned int nbytes = walk->nbytes;
141 u8 *src = walk->src.virt.addr;
142 u8 * const iv = walk->iv;
143 u8 tmpbuf[bsize] __aligned(__alignof__(u32));
145 do {
146 memcpy(tmpbuf, src, bsize);
147 crypto_cipher_decrypt_one(tfm, src, src);
148 crypto_xor(src, iv, bsize);
149 crypto_xor_cpy(iv, src, tmpbuf, bsize);
151 src += bsize;
152 } while ((nbytes -= bsize) >= bsize);
154 return nbytes;
157 static int crypto_pcbc_decrypt(struct skcipher_request *req)
159 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
160 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
161 struct crypto_cipher *child = ctx->child;
162 struct skcipher_walk walk;
163 unsigned int nbytes;
164 int err;
166 err = skcipher_walk_virt(&walk, req, false);
168 while ((nbytes = walk.nbytes)) {
169 if (walk.src.virt.addr == walk.dst.virt.addr)
170 nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
171 child);
172 else
173 nbytes = crypto_pcbc_decrypt_segment(req, &walk,
174 child);
175 err = skcipher_walk_done(&walk, nbytes);
178 return err;
181 static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
183 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
184 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
185 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
186 struct crypto_cipher *cipher;
188 cipher = crypto_spawn_cipher(spawn);
189 if (IS_ERR(cipher))
190 return PTR_ERR(cipher);
192 ctx->child = cipher;
193 return 0;
196 static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
198 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
200 crypto_free_cipher(ctx->child);
203 static void crypto_pcbc_free(struct skcipher_instance *inst)
205 crypto_drop_skcipher(skcipher_instance_ctx(inst));
206 kfree(inst);
209 static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
211 struct skcipher_instance *inst;
212 struct crypto_attr_type *algt;
213 struct crypto_spawn *spawn;
214 struct crypto_alg *alg;
215 int err;
217 algt = crypto_get_attr_type(tb);
218 if (IS_ERR(algt))
219 return PTR_ERR(algt);
221 if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
222 ~CRYPTO_ALG_INTERNAL)
223 return -EINVAL;
225 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
226 if (!inst)
227 return -ENOMEM;
229 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
230 (algt->type & CRYPTO_ALG_INTERNAL),
231 CRYPTO_ALG_TYPE_MASK |
232 (algt->mask & CRYPTO_ALG_INTERNAL));
233 err = PTR_ERR(alg);
234 if (IS_ERR(alg))
235 goto err_free_inst;
237 spawn = skcipher_instance_ctx(inst);
238 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
239 CRYPTO_ALG_TYPE_MASK);
240 crypto_mod_put(alg);
241 if (err)
242 goto err_free_inst;
244 err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
245 if (err)
246 goto err_drop_spawn;
248 inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
249 inst->alg.base.cra_priority = alg->cra_priority;
250 inst->alg.base.cra_blocksize = alg->cra_blocksize;
251 inst->alg.base.cra_alignmask = alg->cra_alignmask;
253 inst->alg.ivsize = alg->cra_blocksize;
254 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
255 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
257 inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
259 inst->alg.init = crypto_pcbc_init_tfm;
260 inst->alg.exit = crypto_pcbc_exit_tfm;
262 inst->alg.setkey = crypto_pcbc_setkey;
263 inst->alg.encrypt = crypto_pcbc_encrypt;
264 inst->alg.decrypt = crypto_pcbc_decrypt;
266 inst->free = crypto_pcbc_free;
268 err = skcipher_register_instance(tmpl, inst);
269 if (err)
270 goto err_drop_spawn;
272 out:
273 return err;
275 err_drop_spawn:
276 crypto_drop_spawn(spawn);
277 err_free_inst:
278 kfree(inst);
279 goto out;
282 static struct crypto_template crypto_pcbc_tmpl = {
283 .name = "pcbc",
284 .create = crypto_pcbc_create,
285 .module = THIS_MODULE,
288 static int __init crypto_pcbc_module_init(void)
290 return crypto_register_template(&crypto_pcbc_tmpl);
293 static void __exit crypto_pcbc_module_exit(void)
295 crypto_unregister_template(&crypto_pcbc_tmpl);
298 module_init(crypto_pcbc_module_init);
299 module_exit(crypto_pcbc_module_exit);
301 MODULE_LICENSE("GPL");
302 MODULE_DESCRIPTION("PCBC block cipher algorithm");
303 MODULE_ALIAS_CRYPTO("pcbc");