2 * pcrypt - Parallel crypto wrapper.
4 * Copyright (C) 2009 secunet Security Networks AG
5 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 * You should have received a copy of the GNU General Public License along with
17 * this program; if not, write to the Free Software Foundation, Inc.,
18 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
21 #include <crypto/algapi.h>
22 #include <crypto/internal/aead.h>
23 #include <linux/atomic.h>
24 #include <linux/err.h>
25 #include <linux/init.h>
26 #include <linux/module.h>
27 #include <linux/slab.h>
28 #include <linux/notifier.h>
29 #include <linux/kobject.h>
30 #include <linux/cpu.h>
31 #include <crypto/pcrypt.h>
33 struct padata_pcrypt
{
34 struct padata_instance
*pinst
;
35 struct workqueue_struct
*wq
;
38 * Cpumask for callback CPUs. It should be
39 * equal to serial cpumask of corresponding padata instance,
40 * so it is updated when padata notifies us about serial
43 * cb_cpumask is protected by RCU. This fact prevents us from
44 * using cpumask_var_t directly because the actual type of
45 * cpumsak_var_t depends on kernel configuration(particularly on
46 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
47 * cpumask_var_t may be either a pointer to the struct cpumask
48 * or a variable allocated on the stack. Thus we can not safely use
49 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
50 * rcu_dereference. So cpumask_var_t is wrapped with struct
51 * pcrypt_cpumask which makes possible to use it with RCU.
53 struct pcrypt_cpumask
{
56 struct notifier_block nblock
;
59 static struct padata_pcrypt pencrypt
;
60 static struct padata_pcrypt pdecrypt
;
61 static struct kset
*pcrypt_kset
;
63 struct pcrypt_instance_ctx
{
64 struct crypto_aead_spawn spawn
;
68 struct pcrypt_aead_ctx
{
69 struct crypto_aead
*child
;
73 static int pcrypt_do_parallel(struct padata_priv
*padata
, unsigned int *cb_cpu
,
74 struct padata_pcrypt
*pcrypt
)
76 unsigned int cpu_index
, cpu
, i
;
77 struct pcrypt_cpumask
*cpumask
;
82 cpumask
= rcu_dereference_bh(pcrypt
->cb_cpumask
);
83 if (cpumask_test_cpu(cpu
, cpumask
->mask
))
86 if (!cpumask_weight(cpumask
->mask
))
89 cpu_index
= cpu
% cpumask_weight(cpumask
->mask
);
91 cpu
= cpumask_first(cpumask
->mask
);
92 for (i
= 0; i
< cpu_index
; i
++)
93 cpu
= cpumask_next(cpu
, cpumask
->mask
);
99 return padata_do_parallel(pcrypt
->pinst
, padata
, cpu
);
102 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
103 const u8
*key
, unsigned int keylen
)
105 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
107 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
110 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
111 unsigned int authsize
)
113 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
115 return crypto_aead_setauthsize(ctx
->child
, authsize
);
118 static void pcrypt_aead_serial(struct padata_priv
*padata
)
120 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
121 struct aead_request
*req
= pcrypt_request_ctx(preq
);
123 aead_request_complete(req
->base
.data
, padata
->info
);
126 static void pcrypt_aead_done(struct crypto_async_request
*areq
, int err
)
128 struct aead_request
*req
= areq
->data
;
129 struct pcrypt_request
*preq
= aead_request_ctx(req
);
130 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
133 req
->base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
135 padata_do_serial(padata
);
138 static void pcrypt_aead_enc(struct padata_priv
*padata
)
140 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
141 struct aead_request
*req
= pcrypt_request_ctx(preq
);
143 padata
->info
= crypto_aead_encrypt(req
);
145 if (padata
->info
== -EINPROGRESS
)
148 padata_do_serial(padata
);
151 static int pcrypt_aead_encrypt(struct aead_request
*req
)
154 struct pcrypt_request
*preq
= aead_request_ctx(req
);
155 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
156 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
157 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
158 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
159 u32 flags
= aead_request_flags(req
);
161 memset(padata
, 0, sizeof(struct padata_priv
));
163 padata
->parallel
= pcrypt_aead_enc
;
164 padata
->serial
= pcrypt_aead_serial
;
166 aead_request_set_tfm(creq
, ctx
->child
);
167 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
168 pcrypt_aead_done
, req
);
169 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
170 req
->cryptlen
, req
->iv
);
171 aead_request_set_ad(creq
, req
->assoclen
);
173 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pencrypt
);
180 static void pcrypt_aead_dec(struct padata_priv
*padata
)
182 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
183 struct aead_request
*req
= pcrypt_request_ctx(preq
);
185 padata
->info
= crypto_aead_decrypt(req
);
187 if (padata
->info
== -EINPROGRESS
)
190 padata_do_serial(padata
);
193 static int pcrypt_aead_decrypt(struct aead_request
*req
)
196 struct pcrypt_request
*preq
= aead_request_ctx(req
);
197 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
198 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
199 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
200 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
201 u32 flags
= aead_request_flags(req
);
203 memset(padata
, 0, sizeof(struct padata_priv
));
205 padata
->parallel
= pcrypt_aead_dec
;
206 padata
->serial
= pcrypt_aead_serial
;
208 aead_request_set_tfm(creq
, ctx
->child
);
209 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
210 pcrypt_aead_done
, req
);
211 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
212 req
->cryptlen
, req
->iv
);
213 aead_request_set_ad(creq
, req
->assoclen
);
215 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pdecrypt
);
222 static int pcrypt_aead_init_tfm(struct crypto_aead
*tfm
)
225 struct aead_instance
*inst
= aead_alg_instance(tfm
);
226 struct pcrypt_instance_ctx
*ictx
= aead_instance_ctx(inst
);
227 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
228 struct crypto_aead
*cipher
;
230 cpu_index
= (unsigned int)atomic_inc_return(&ictx
->tfm_count
) %
231 cpumask_weight(cpu_online_mask
);
233 ctx
->cb_cpu
= cpumask_first(cpu_online_mask
);
234 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
235 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_online_mask
);
237 cipher
= crypto_spawn_aead(&ictx
->spawn
);
240 return PTR_ERR(cipher
);
243 crypto_aead_set_reqsize(tfm
, sizeof(struct pcrypt_request
) +
244 sizeof(struct aead_request
) +
245 crypto_aead_reqsize(cipher
));
250 static void pcrypt_aead_exit_tfm(struct crypto_aead
*tfm
)
252 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
254 crypto_free_aead(ctx
->child
);
257 static void pcrypt_free(struct aead_instance
*inst
)
259 struct pcrypt_instance_ctx
*ctx
= aead_instance_ctx(inst
);
261 crypto_drop_aead(&ctx
->spawn
);
265 static int pcrypt_init_instance(struct crypto_instance
*inst
,
266 struct crypto_alg
*alg
)
268 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
269 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
270 return -ENAMETOOLONG
;
272 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
274 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
275 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
276 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
281 static int pcrypt_create_aead(struct crypto_template
*tmpl
, struct rtattr
**tb
,
284 struct pcrypt_instance_ctx
*ctx
;
285 struct crypto_attr_type
*algt
;
286 struct aead_instance
*inst
;
287 struct aead_alg
*alg
;
291 algt
= crypto_get_attr_type(tb
);
293 return PTR_ERR(algt
);
295 name
= crypto_attr_alg_name(tb
[1]);
297 return PTR_ERR(name
);
299 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
303 ctx
= aead_instance_ctx(inst
);
304 crypto_set_aead_spawn(&ctx
->spawn
, aead_crypto_instance(inst
));
306 err
= crypto_grab_aead(&ctx
->spawn
, name
, 0, 0);
310 alg
= crypto_spawn_aead_alg(&ctx
->spawn
);
311 err
= pcrypt_init_instance(aead_crypto_instance(inst
), &alg
->base
);
315 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
;
317 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
318 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
320 inst
->alg
.base
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
322 inst
->alg
.init
= pcrypt_aead_init_tfm
;
323 inst
->alg
.exit
= pcrypt_aead_exit_tfm
;
325 inst
->alg
.setkey
= pcrypt_aead_setkey
;
326 inst
->alg
.setauthsize
= pcrypt_aead_setauthsize
;
327 inst
->alg
.encrypt
= pcrypt_aead_encrypt
;
328 inst
->alg
.decrypt
= pcrypt_aead_decrypt
;
330 inst
->free
= pcrypt_free
;
332 err
= aead_register_instance(tmpl
, inst
);
340 crypto_drop_aead(&ctx
->spawn
);
346 static int pcrypt_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
348 struct crypto_attr_type
*algt
;
350 algt
= crypto_get_attr_type(tb
);
352 return PTR_ERR(algt
);
354 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
355 case CRYPTO_ALG_TYPE_AEAD
:
356 return pcrypt_create_aead(tmpl
, tb
, algt
->type
, algt
->mask
);
362 static int pcrypt_cpumask_change_notify(struct notifier_block
*self
,
363 unsigned long val
, void *data
)
365 struct padata_pcrypt
*pcrypt
;
366 struct pcrypt_cpumask
*new_mask
, *old_mask
;
367 struct padata_cpumask
*cpumask
= (struct padata_cpumask
*)data
;
369 if (!(val
& PADATA_CPU_SERIAL
))
372 pcrypt
= container_of(self
, struct padata_pcrypt
, nblock
);
373 new_mask
= kmalloc(sizeof(*new_mask
), GFP_KERNEL
);
376 if (!alloc_cpumask_var(&new_mask
->mask
, GFP_KERNEL
)) {
381 old_mask
= pcrypt
->cb_cpumask
;
383 cpumask_copy(new_mask
->mask
, cpumask
->cbcpu
);
384 rcu_assign_pointer(pcrypt
->cb_cpumask
, new_mask
);
385 synchronize_rcu_bh();
387 free_cpumask_var(old_mask
->mask
);
392 static int pcrypt_sysfs_add(struct padata_instance
*pinst
, const char *name
)
396 pinst
->kobj
.kset
= pcrypt_kset
;
397 ret
= kobject_add(&pinst
->kobj
, NULL
, name
);
399 kobject_uevent(&pinst
->kobj
, KOBJ_ADD
);
404 static int pcrypt_init_padata(struct padata_pcrypt
*pcrypt
,
408 struct pcrypt_cpumask
*mask
;
412 pcrypt
->wq
= alloc_workqueue("%s", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
417 pcrypt
->pinst
= padata_alloc_possible(pcrypt
->wq
);
419 goto err_destroy_workqueue
;
421 mask
= kmalloc(sizeof(*mask
), GFP_KERNEL
);
423 goto err_free_padata
;
424 if (!alloc_cpumask_var(&mask
->mask
, GFP_KERNEL
)) {
426 goto err_free_padata
;
429 cpumask_and(mask
->mask
, cpu_possible_mask
, cpu_online_mask
);
430 rcu_assign_pointer(pcrypt
->cb_cpumask
, mask
);
432 pcrypt
->nblock
.notifier_call
= pcrypt_cpumask_change_notify
;
433 ret
= padata_register_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
435 goto err_free_cpumask
;
437 ret
= pcrypt_sysfs_add(pcrypt
->pinst
, name
);
439 goto err_unregister_notifier
;
445 err_unregister_notifier
:
446 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
448 free_cpumask_var(mask
->mask
);
451 padata_free(pcrypt
->pinst
);
452 err_destroy_workqueue
:
453 destroy_workqueue(pcrypt
->wq
);
460 static void pcrypt_fini_padata(struct padata_pcrypt
*pcrypt
)
462 free_cpumask_var(pcrypt
->cb_cpumask
->mask
);
463 kfree(pcrypt
->cb_cpumask
);
465 padata_stop(pcrypt
->pinst
);
466 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
467 destroy_workqueue(pcrypt
->wq
);
468 padata_free(pcrypt
->pinst
);
471 static struct crypto_template pcrypt_tmpl
= {
473 .create
= pcrypt_create
,
474 .module
= THIS_MODULE
,
477 static int __init
pcrypt_init(void)
481 pcrypt_kset
= kset_create_and_add("pcrypt", NULL
, kernel_kobj
);
485 err
= pcrypt_init_padata(&pencrypt
, "pencrypt");
489 err
= pcrypt_init_padata(&pdecrypt
, "pdecrypt");
491 goto err_deinit_pencrypt
;
493 padata_start(pencrypt
.pinst
);
494 padata_start(pdecrypt
.pinst
);
496 return crypto_register_template(&pcrypt_tmpl
);
499 pcrypt_fini_padata(&pencrypt
);
501 kset_unregister(pcrypt_kset
);
506 static void __exit
pcrypt_exit(void)
508 pcrypt_fini_padata(&pencrypt
);
509 pcrypt_fini_padata(&pdecrypt
);
511 kset_unregister(pcrypt_kset
);
512 crypto_unregister_template(&pcrypt_tmpl
);
515 module_init(pcrypt_init
);
516 module_exit(pcrypt_exit
);
518 MODULE_LICENSE("GPL");
519 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
520 MODULE_DESCRIPTION("Parallel crypto wrapper");
521 MODULE_ALIAS_CRYPTO("pcrypt");