2 * pcrypt - Parallel crypto wrapper.
4 * Copyright (C) 2009 secunet Security Networks AG
5 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 * You should have received a copy of the GNU General Public License along with
17 * this program; if not, write to the Free Software Foundation, Inc.,
18 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
21 #include <crypto/algapi.h>
22 #include <crypto/internal/aead.h>
23 #include <linux/err.h>
24 #include <linux/init.h>
25 #include <linux/module.h>
26 #include <linux/slab.h>
27 #include <crypto/pcrypt.h>
29 static struct padata_instance
*pcrypt_enc_padata
;
30 static struct padata_instance
*pcrypt_dec_padata
;
31 static struct workqueue_struct
*encwq
;
32 static struct workqueue_struct
*decwq
;
34 struct pcrypt_instance_ctx
{
35 struct crypto_spawn spawn
;
36 unsigned int tfm_count
;
39 struct pcrypt_aead_ctx
{
40 struct crypto_aead
*child
;
44 static int pcrypt_do_parallel(struct padata_priv
*padata
, unsigned int *cb_cpu
,
45 struct padata_instance
*pinst
)
47 unsigned int cpu_index
, cpu
, i
;
51 if (cpumask_test_cpu(cpu
, cpu_active_mask
))
54 cpu_index
= cpu
% cpumask_weight(cpu_active_mask
);
56 cpu
= cpumask_first(cpu_active_mask
);
57 for (i
= 0; i
< cpu_index
; i
++)
58 cpu
= cpumask_next(cpu
, cpu_active_mask
);
63 return padata_do_parallel(pinst
, padata
, cpu
);
66 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
67 const u8
*key
, unsigned int keylen
)
69 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
71 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
74 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
75 unsigned int authsize
)
77 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
79 return crypto_aead_setauthsize(ctx
->child
, authsize
);
82 static void pcrypt_aead_serial(struct padata_priv
*padata
)
84 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
85 struct aead_request
*req
= pcrypt_request_ctx(preq
);
87 aead_request_complete(req
->base
.data
, padata
->info
);
90 static void pcrypt_aead_giv_serial(struct padata_priv
*padata
)
92 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
93 struct aead_givcrypt_request
*req
= pcrypt_request_ctx(preq
);
95 aead_request_complete(req
->areq
.base
.data
, padata
->info
);
98 static void pcrypt_aead_done(struct crypto_async_request
*areq
, int err
)
100 struct aead_request
*req
= areq
->data
;
101 struct pcrypt_request
*preq
= aead_request_ctx(req
);
102 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
105 req
->base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
107 padata_do_serial(padata
);
110 static void pcrypt_aead_enc(struct padata_priv
*padata
)
112 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
113 struct aead_request
*req
= pcrypt_request_ctx(preq
);
115 padata
->info
= crypto_aead_encrypt(req
);
117 if (padata
->info
== -EINPROGRESS
)
120 padata_do_serial(padata
);
123 static int pcrypt_aead_encrypt(struct aead_request
*req
)
126 struct pcrypt_request
*preq
= aead_request_ctx(req
);
127 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
128 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
129 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
130 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
131 u32 flags
= aead_request_flags(req
);
133 memset(padata
, 0, sizeof(struct padata_priv
));
135 padata
->parallel
= pcrypt_aead_enc
;
136 padata
->serial
= pcrypt_aead_serial
;
138 aead_request_set_tfm(creq
, ctx
->child
);
139 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
140 pcrypt_aead_done
, req
);
141 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
142 req
->cryptlen
, req
->iv
);
143 aead_request_set_assoc(creq
, req
->assoc
, req
->assoclen
);
145 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, pcrypt_enc_padata
);
149 err
= crypto_aead_encrypt(creq
);
154 static void pcrypt_aead_dec(struct padata_priv
*padata
)
156 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
157 struct aead_request
*req
= pcrypt_request_ctx(preq
);
159 padata
->info
= crypto_aead_decrypt(req
);
161 if (padata
->info
== -EINPROGRESS
)
164 padata_do_serial(padata
);
167 static int pcrypt_aead_decrypt(struct aead_request
*req
)
170 struct pcrypt_request
*preq
= aead_request_ctx(req
);
171 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
172 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
173 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
174 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
175 u32 flags
= aead_request_flags(req
);
177 memset(padata
, 0, sizeof(struct padata_priv
));
179 padata
->parallel
= pcrypt_aead_dec
;
180 padata
->serial
= pcrypt_aead_serial
;
182 aead_request_set_tfm(creq
, ctx
->child
);
183 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
184 pcrypt_aead_done
, req
);
185 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
186 req
->cryptlen
, req
->iv
);
187 aead_request_set_assoc(creq
, req
->assoc
, req
->assoclen
);
189 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, pcrypt_dec_padata
);
193 err
= crypto_aead_decrypt(creq
);
198 static void pcrypt_aead_givenc(struct padata_priv
*padata
)
200 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
201 struct aead_givcrypt_request
*req
= pcrypt_request_ctx(preq
);
203 padata
->info
= crypto_aead_givencrypt(req
);
205 if (padata
->info
== -EINPROGRESS
)
208 padata_do_serial(padata
);
211 static int pcrypt_aead_givencrypt(struct aead_givcrypt_request
*req
)
214 struct aead_request
*areq
= &req
->areq
;
215 struct pcrypt_request
*preq
= aead_request_ctx(areq
);
216 struct aead_givcrypt_request
*creq
= pcrypt_request_ctx(preq
);
217 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
218 struct crypto_aead
*aead
= aead_givcrypt_reqtfm(req
);
219 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
220 u32 flags
= aead_request_flags(areq
);
222 memset(padata
, 0, sizeof(struct padata_priv
));
224 padata
->parallel
= pcrypt_aead_givenc
;
225 padata
->serial
= pcrypt_aead_giv_serial
;
227 aead_givcrypt_set_tfm(creq
, ctx
->child
);
228 aead_givcrypt_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
229 pcrypt_aead_done
, areq
);
230 aead_givcrypt_set_crypt(creq
, areq
->src
, areq
->dst
,
231 areq
->cryptlen
, areq
->iv
);
232 aead_givcrypt_set_assoc(creq
, areq
->assoc
, areq
->assoclen
);
233 aead_givcrypt_set_giv(creq
, req
->giv
, req
->seq
);
235 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, pcrypt_enc_padata
);
239 err
= crypto_aead_givencrypt(creq
);
244 static int pcrypt_aead_init_tfm(struct crypto_tfm
*tfm
)
247 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
248 struct pcrypt_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
249 struct pcrypt_aead_ctx
*ctx
= crypto_tfm_ctx(tfm
);
250 struct crypto_aead
*cipher
;
254 cpu_index
= ictx
->tfm_count
% cpumask_weight(cpu_active_mask
);
256 ctx
->cb_cpu
= cpumask_first(cpu_active_mask
);
257 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
258 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_active_mask
);
260 cipher
= crypto_spawn_aead(crypto_instance_ctx(inst
));
263 return PTR_ERR(cipher
);
266 tfm
->crt_aead
.reqsize
= sizeof(struct pcrypt_request
)
267 + sizeof(struct aead_givcrypt_request
)
268 + crypto_aead_reqsize(cipher
);
273 static void pcrypt_aead_exit_tfm(struct crypto_tfm
*tfm
)
275 struct pcrypt_aead_ctx
*ctx
= crypto_tfm_ctx(tfm
);
277 crypto_free_aead(ctx
->child
);
280 static struct crypto_instance
*pcrypt_alloc_instance(struct crypto_alg
*alg
)
282 struct crypto_instance
*inst
;
283 struct pcrypt_instance_ctx
*ctx
;
286 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
288 inst
= ERR_PTR(-ENOMEM
);
293 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
294 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
297 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
299 ctx
= crypto_instance_ctx(inst
);
300 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
301 CRYPTO_ALG_TYPE_MASK
);
305 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
306 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
307 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
318 static struct crypto_instance
*pcrypt_alloc_aead(struct rtattr
**tb
)
320 struct crypto_instance
*inst
;
321 struct crypto_alg
*alg
;
322 struct crypto_attr_type
*algt
;
324 algt
= crypto_get_attr_type(tb
);
326 alg
= crypto_get_attr_alg(tb
, algt
->type
,
327 (algt
->mask
& CRYPTO_ALG_TYPE_MASK
));
329 return ERR_CAST(alg
);
331 inst
= pcrypt_alloc_instance(alg
);
335 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
;
336 inst
->alg
.cra_type
= &crypto_aead_type
;
338 inst
->alg
.cra_aead
.ivsize
= alg
->cra_aead
.ivsize
;
339 inst
->alg
.cra_aead
.geniv
= alg
->cra_aead
.geniv
;
340 inst
->alg
.cra_aead
.maxauthsize
= alg
->cra_aead
.maxauthsize
;
342 inst
->alg
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
344 inst
->alg
.cra_init
= pcrypt_aead_init_tfm
;
345 inst
->alg
.cra_exit
= pcrypt_aead_exit_tfm
;
347 inst
->alg
.cra_aead
.setkey
= pcrypt_aead_setkey
;
348 inst
->alg
.cra_aead
.setauthsize
= pcrypt_aead_setauthsize
;
349 inst
->alg
.cra_aead
.encrypt
= pcrypt_aead_encrypt
;
350 inst
->alg
.cra_aead
.decrypt
= pcrypt_aead_decrypt
;
351 inst
->alg
.cra_aead
.givencrypt
= pcrypt_aead_givencrypt
;
358 static struct crypto_instance
*pcrypt_alloc(struct rtattr
**tb
)
360 struct crypto_attr_type
*algt
;
362 algt
= crypto_get_attr_type(tb
);
364 return ERR_CAST(algt
);
366 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
367 case CRYPTO_ALG_TYPE_AEAD
:
368 return pcrypt_alloc_aead(tb
);
371 return ERR_PTR(-EINVAL
);
374 static void pcrypt_free(struct crypto_instance
*inst
)
376 struct pcrypt_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
378 crypto_drop_spawn(&ctx
->spawn
);
382 static struct crypto_template pcrypt_tmpl
= {
384 .alloc
= pcrypt_alloc
,
386 .module
= THIS_MODULE
,
389 static int __init
pcrypt_init(void)
391 encwq
= create_workqueue("pencrypt");
395 decwq
= create_workqueue("pdecrypt");
397 goto err_destroy_encwq
;
400 pcrypt_enc_padata
= padata_alloc(cpu_possible_mask
, encwq
);
401 if (!pcrypt_enc_padata
)
402 goto err_destroy_decwq
;
404 pcrypt_dec_padata
= padata_alloc(cpu_possible_mask
, decwq
);
405 if (!pcrypt_dec_padata
)
406 goto err_free_padata
;
408 padata_start(pcrypt_enc_padata
);
409 padata_start(pcrypt_dec_padata
);
411 return crypto_register_template(&pcrypt_tmpl
);
414 padata_free(pcrypt_enc_padata
);
417 destroy_workqueue(decwq
);
420 destroy_workqueue(encwq
);
426 static void __exit
pcrypt_exit(void)
428 padata_stop(pcrypt_enc_padata
);
429 padata_stop(pcrypt_dec_padata
);
431 destroy_workqueue(encwq
);
432 destroy_workqueue(decwq
);
434 padata_free(pcrypt_enc_padata
);
435 padata_free(pcrypt_dec_padata
);
437 crypto_unregister_template(&pcrypt_tmpl
);
440 module_init(pcrypt_init
);
441 module_exit(pcrypt_exit
);
443 MODULE_LICENSE("GPL");
444 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
445 MODULE_DESCRIPTION("Parallel crypto wrapper");