2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/cryptd.h>
16 #include <crypto/crypto_wq.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/list.h>
21 #include <linux/module.h>
22 #include <linux/scatterlist.h>
23 #include <linux/sched.h>
24 #include <linux/slab.h>
26 #define CRYPTD_MAX_CPU_QLEN 100
28 struct cryptd_cpu_queue
{
29 struct crypto_queue queue
;
30 struct work_struct work
;
34 struct cryptd_cpu_queue
*cpu_queue
;
37 struct cryptd_instance_ctx
{
38 struct crypto_spawn spawn
;
39 struct cryptd_queue
*queue
;
42 struct hashd_instance_ctx
{
43 struct crypto_shash_spawn spawn
;
44 struct cryptd_queue
*queue
;
47 struct cryptd_blkcipher_ctx
{
48 struct crypto_blkcipher
*child
;
51 struct cryptd_blkcipher_request_ctx
{
52 crypto_completion_t complete
;
55 struct cryptd_hash_ctx
{
56 struct crypto_shash
*child
;
59 struct cryptd_hash_request_ctx
{
60 crypto_completion_t complete
;
61 struct shash_desc desc
;
64 static void cryptd_queue_worker(struct work_struct
*work
);
66 static int cryptd_init_queue(struct cryptd_queue
*queue
,
67 unsigned int max_cpu_qlen
)
70 struct cryptd_cpu_queue
*cpu_queue
;
72 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
73 if (!queue
->cpu_queue
)
75 for_each_possible_cpu(cpu
) {
76 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
77 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
78 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
83 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
86 struct cryptd_cpu_queue
*cpu_queue
;
88 for_each_possible_cpu(cpu
) {
89 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
90 BUG_ON(cpu_queue
->queue
.qlen
);
92 free_percpu(queue
->cpu_queue
);
95 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
96 struct crypto_async_request
*request
)
99 struct cryptd_cpu_queue
*cpu_queue
;
102 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
103 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
104 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
110 /* Called in workqueue context, do one real cryption work (via
111 * req->complete) and reschedule itself if there are more work to
113 static void cryptd_queue_worker(struct work_struct
*work
)
115 struct cryptd_cpu_queue
*cpu_queue
;
116 struct crypto_async_request
*req
, *backlog
;
118 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
119 /* Only handle one request at a time to avoid hogging crypto
120 * workqueue. preempt_disable/enable is used to prevent
121 * being preempted by cryptd_enqueue_request() */
123 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
124 req
= crypto_dequeue_request(&cpu_queue
->queue
);
131 backlog
->complete(backlog
, -EINPROGRESS
);
132 req
->complete(req
, 0);
134 if (cpu_queue
->queue
.qlen
)
135 queue_work(kcrypto_wq
, &cpu_queue
->work
);
138 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
140 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
141 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
145 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
146 const u8
*key
, unsigned int keylen
)
148 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
149 struct crypto_blkcipher
*child
= ctx
->child
;
152 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
153 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
154 CRYPTO_TFM_REQ_MASK
);
155 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
156 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
157 CRYPTO_TFM_RES_MASK
);
161 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
162 struct crypto_blkcipher
*child
,
164 int (*crypt
)(struct blkcipher_desc
*desc
,
165 struct scatterlist
*dst
,
166 struct scatterlist
*src
,
169 struct cryptd_blkcipher_request_ctx
*rctx
;
170 struct blkcipher_desc desc
;
172 rctx
= ablkcipher_request_ctx(req
);
174 if (unlikely(err
== -EINPROGRESS
))
178 desc
.info
= req
->info
;
179 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
181 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
183 req
->base
.complete
= rctx
->complete
;
187 rctx
->complete(&req
->base
, err
);
191 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
193 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
194 struct crypto_blkcipher
*child
= ctx
->child
;
196 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
197 crypto_blkcipher_crt(child
)->encrypt
);
200 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
202 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
203 struct crypto_blkcipher
*child
= ctx
->child
;
205 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
206 crypto_blkcipher_crt(child
)->decrypt
);
209 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
210 crypto_completion_t complete
)
212 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
213 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
214 struct cryptd_queue
*queue
;
216 queue
= cryptd_get_queue(crypto_ablkcipher_tfm(tfm
));
217 rctx
->complete
= req
->base
.complete
;
218 req
->base
.complete
= complete
;
220 return cryptd_enqueue_request(queue
, &req
->base
);
223 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
225 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
228 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
230 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
233 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
235 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
236 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
237 struct crypto_spawn
*spawn
= &ictx
->spawn
;
238 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
239 struct crypto_blkcipher
*cipher
;
241 cipher
= crypto_spawn_blkcipher(spawn
);
243 return PTR_ERR(cipher
);
246 tfm
->crt_ablkcipher
.reqsize
=
247 sizeof(struct cryptd_blkcipher_request_ctx
);
251 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
253 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
255 crypto_free_blkcipher(ctx
->child
);
258 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
262 struct crypto_instance
*inst
;
265 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
267 return ERR_PTR(-ENOMEM
);
269 inst
= (void *)(p
+ head
);
272 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
273 "cryptd(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
276 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
278 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
279 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
280 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
291 static int cryptd_create_blkcipher(struct crypto_template
*tmpl
,
293 struct cryptd_queue
*queue
)
295 struct cryptd_instance_ctx
*ctx
;
296 struct crypto_instance
*inst
;
297 struct crypto_alg
*alg
;
300 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_BLKCIPHER
,
301 CRYPTO_ALG_TYPE_MASK
);
305 inst
= cryptd_alloc_instance(alg
, 0, sizeof(*ctx
));
310 ctx
= crypto_instance_ctx(inst
);
313 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
314 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
318 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
319 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
321 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
322 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
323 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
325 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
327 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
329 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
330 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
332 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
333 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
334 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
336 err
= crypto_register_instance(tmpl
, inst
);
338 crypto_drop_spawn(&ctx
->spawn
);
348 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
350 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
351 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
352 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
353 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
354 struct crypto_shash
*hash
;
356 hash
= crypto_spawn_shash(spawn
);
358 return PTR_ERR(hash
);
361 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
362 sizeof(struct cryptd_hash_request_ctx
) +
363 crypto_shash_descsize(hash
));
367 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
369 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
371 crypto_free_shash(ctx
->child
);
374 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
375 const u8
*key
, unsigned int keylen
)
377 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
378 struct crypto_shash
*child
= ctx
->child
;
381 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
382 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
383 CRYPTO_TFM_REQ_MASK
);
384 err
= crypto_shash_setkey(child
, key
, keylen
);
385 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
386 CRYPTO_TFM_RES_MASK
);
390 static int cryptd_hash_enqueue(struct ahash_request
*req
,
391 crypto_completion_t complete
)
393 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
394 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
395 struct cryptd_queue
*queue
=
396 cryptd_get_queue(crypto_ahash_tfm(tfm
));
398 rctx
->complete
= req
->base
.complete
;
399 req
->base
.complete
= complete
;
401 return cryptd_enqueue_request(queue
, &req
->base
);
404 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
406 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
407 struct crypto_shash
*child
= ctx
->child
;
408 struct ahash_request
*req
= ahash_request_cast(req_async
);
409 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
410 struct shash_desc
*desc
= &rctx
->desc
;
412 if (unlikely(err
== -EINPROGRESS
))
416 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
418 err
= crypto_shash_init(desc
);
420 req
->base
.complete
= rctx
->complete
;
424 rctx
->complete(&req
->base
, err
);
428 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
430 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
433 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
435 struct ahash_request
*req
= ahash_request_cast(req_async
);
436 struct cryptd_hash_request_ctx
*rctx
;
438 rctx
= ahash_request_ctx(req
);
440 if (unlikely(err
== -EINPROGRESS
))
443 err
= shash_ahash_update(req
, &rctx
->desc
);
445 req
->base
.complete
= rctx
->complete
;
449 rctx
->complete(&req
->base
, err
);
453 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
455 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
458 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
460 struct ahash_request
*req
= ahash_request_cast(req_async
);
461 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
463 if (unlikely(err
== -EINPROGRESS
))
466 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
468 req
->base
.complete
= rctx
->complete
;
472 rctx
->complete(&req
->base
, err
);
476 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
478 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
481 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
483 struct ahash_request
*req
= ahash_request_cast(req_async
);
484 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
486 if (unlikely(err
== -EINPROGRESS
))
489 err
= shash_ahash_finup(req
, &rctx
->desc
);
491 req
->base
.complete
= rctx
->complete
;
495 rctx
->complete(&req
->base
, err
);
499 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
501 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
504 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
506 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
507 struct crypto_shash
*child
= ctx
->child
;
508 struct ahash_request
*req
= ahash_request_cast(req_async
);
509 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
510 struct shash_desc
*desc
= &rctx
->desc
;
512 if (unlikely(err
== -EINPROGRESS
))
516 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
518 err
= shash_ahash_digest(req
, desc
);
520 req
->base
.complete
= rctx
->complete
;
524 rctx
->complete(&req
->base
, err
);
528 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
530 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
533 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
535 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
537 return crypto_shash_export(&rctx
->desc
, out
);
540 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
542 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
544 return crypto_shash_import(&rctx
->desc
, in
);
547 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
548 struct cryptd_queue
*queue
)
550 struct hashd_instance_ctx
*ctx
;
551 struct ahash_instance
*inst
;
552 struct shash_alg
*salg
;
553 struct crypto_alg
*alg
;
556 salg
= shash_attr_alg(tb
[1], 0, 0);
558 return PTR_ERR(salg
);
561 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
567 ctx
= ahash_instance_ctx(inst
);
570 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
571 ahash_crypto_instance(inst
));
575 inst
->alg
.halg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
;
577 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
578 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
580 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
581 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
583 inst
->alg
.init
= cryptd_hash_init_enqueue
;
584 inst
->alg
.update
= cryptd_hash_update_enqueue
;
585 inst
->alg
.final
= cryptd_hash_final_enqueue
;
586 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
587 inst
->alg
.export
= cryptd_hash_export
;
588 inst
->alg
.import
= cryptd_hash_import
;
589 inst
->alg
.setkey
= cryptd_hash_setkey
;
590 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
592 err
= ahash_register_instance(tmpl
, inst
);
594 crypto_drop_shash(&ctx
->spawn
);
604 static struct cryptd_queue queue
;
606 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
608 struct crypto_attr_type
*algt
;
610 algt
= crypto_get_attr_type(tb
);
612 return PTR_ERR(algt
);
614 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
615 case CRYPTO_ALG_TYPE_BLKCIPHER
:
616 return cryptd_create_blkcipher(tmpl
, tb
, &queue
);
617 case CRYPTO_ALG_TYPE_DIGEST
:
618 return cryptd_create_hash(tmpl
, tb
, &queue
);
624 static void cryptd_free(struct crypto_instance
*inst
)
626 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
627 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
629 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
630 case CRYPTO_ALG_TYPE_AHASH
:
631 crypto_drop_shash(&hctx
->spawn
);
632 kfree(ahash_instance(inst
));
636 crypto_drop_spawn(&ctx
->spawn
);
640 static struct crypto_template cryptd_tmpl
= {
642 .create
= cryptd_create
,
644 .module
= THIS_MODULE
,
647 struct cryptd_ablkcipher
*cryptd_alloc_ablkcipher(const char *alg_name
,
650 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
651 struct crypto_tfm
*tfm
;
653 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
654 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
655 return ERR_PTR(-EINVAL
);
656 type
&= ~(CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
);
657 type
|= CRYPTO_ALG_TYPE_BLKCIPHER
;
658 mask
&= ~CRYPTO_ALG_TYPE_MASK
;
659 mask
|= (CRYPTO_ALG_GENIV
| CRYPTO_ALG_TYPE_BLKCIPHER_MASK
);
660 tfm
= crypto_alloc_base(cryptd_alg_name
, type
, mask
);
662 return ERR_CAST(tfm
);
663 if (tfm
->__crt_alg
->cra_module
!= THIS_MODULE
) {
664 crypto_free_tfm(tfm
);
665 return ERR_PTR(-EINVAL
);
668 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm
));
670 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher
);
672 struct crypto_blkcipher
*cryptd_ablkcipher_child(struct cryptd_ablkcipher
*tfm
)
674 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
677 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child
);
679 void cryptd_free_ablkcipher(struct cryptd_ablkcipher
*tfm
)
681 crypto_free_ablkcipher(&tfm
->base
);
683 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher
);
685 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
688 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
689 struct crypto_ahash
*tfm
;
691 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
692 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
693 return ERR_PTR(-EINVAL
);
694 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
696 return ERR_CAST(tfm
);
697 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
698 crypto_free_ahash(tfm
);
699 return ERR_PTR(-EINVAL
);
702 return __cryptd_ahash_cast(tfm
);
704 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
706 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
708 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
712 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
714 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
716 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
719 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
721 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
723 crypto_free_ahash(&tfm
->base
);
725 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
727 static int __init
cryptd_init(void)
731 err
= cryptd_init_queue(&queue
, CRYPTD_MAX_CPU_QLEN
);
735 err
= crypto_register_template(&cryptd_tmpl
);
737 cryptd_fini_queue(&queue
);
742 static void __exit
cryptd_exit(void)
744 cryptd_fini_queue(&queue
);
745 crypto_unregister_template(&cryptd_tmpl
);
748 module_init(cryptd_init
);
749 module_exit(cryptd_exit
);
751 MODULE_LICENSE("GPL");
752 MODULE_DESCRIPTION("Software async crypto daemon");