2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/init.h>
17 #include <linux/kernel.h>
18 #include <linux/kthread.h>
19 #include <linux/list.h>
20 #include <linux/module.h>
21 #include <linux/mutex.h>
22 #include <linux/scatterlist.h>
23 #include <linux/sched.h>
24 #include <linux/slab.h>
25 #include <linux/spinlock.h>
27 #define CRYPTD_MAX_QLEN 100
32 struct crypto_queue queue
;
33 struct task_struct
*task
;
36 struct cryptd_instance_ctx
{
37 struct crypto_spawn spawn
;
38 struct cryptd_state
*state
;
41 struct cryptd_blkcipher_ctx
{
42 struct crypto_blkcipher
*child
;
45 struct cryptd_blkcipher_request_ctx
{
46 crypto_completion_t complete
;
49 struct cryptd_hash_ctx
{
50 struct crypto_hash
*child
;
53 struct cryptd_hash_request_ctx
{
54 crypto_completion_t complete
;
57 static inline struct cryptd_state
*cryptd_get_state(struct crypto_tfm
*tfm
)
59 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
60 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
64 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
65 const u8
*key
, unsigned int keylen
)
67 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
68 struct crypto_blkcipher
*child
= ctx
->child
;
71 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
72 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
74 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
75 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
80 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
81 struct crypto_blkcipher
*child
,
83 int (*crypt
)(struct blkcipher_desc
*desc
,
84 struct scatterlist
*dst
,
85 struct scatterlist
*src
,
88 struct cryptd_blkcipher_request_ctx
*rctx
;
89 struct blkcipher_desc desc
;
91 rctx
= ablkcipher_request_ctx(req
);
93 if (unlikely(err
== -EINPROGRESS
))
97 desc
.info
= req
->info
;
98 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
100 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
102 req
->base
.complete
= rctx
->complete
;
106 rctx
->complete(&req
->base
, err
);
110 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
112 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
113 struct crypto_blkcipher
*child
= ctx
->child
;
115 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
116 crypto_blkcipher_crt(child
)->encrypt
);
119 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
121 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
122 struct crypto_blkcipher
*child
= ctx
->child
;
124 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
125 crypto_blkcipher_crt(child
)->decrypt
);
128 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
129 crypto_completion_t complete
)
131 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
132 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
133 struct cryptd_state
*state
=
134 cryptd_get_state(crypto_ablkcipher_tfm(tfm
));
137 rctx
->complete
= req
->base
.complete
;
138 req
->base
.complete
= complete
;
140 spin_lock_bh(&state
->lock
);
141 err
= ablkcipher_enqueue_request(&state
->queue
, req
);
142 spin_unlock_bh(&state
->lock
);
144 wake_up_process(state
->task
);
148 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
150 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
153 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
155 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
158 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
160 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
161 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
162 struct crypto_spawn
*spawn
= &ictx
->spawn
;
163 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
164 struct crypto_blkcipher
*cipher
;
166 cipher
= crypto_spawn_blkcipher(spawn
);
168 return PTR_ERR(cipher
);
171 tfm
->crt_ablkcipher
.reqsize
=
172 sizeof(struct cryptd_blkcipher_request_ctx
);
176 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
178 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
179 struct cryptd_state
*state
= cryptd_get_state(tfm
);
182 mutex_lock(&state
->mutex
);
183 active
= ablkcipher_tfm_in_queue(&state
->queue
,
184 __crypto_ablkcipher_cast(tfm
));
185 mutex_unlock(&state
->mutex
);
189 crypto_free_blkcipher(ctx
->child
);
192 static struct crypto_instance
*cryptd_alloc_instance(struct crypto_alg
*alg
,
193 struct cryptd_state
*state
)
195 struct crypto_instance
*inst
;
196 struct cryptd_instance_ctx
*ctx
;
199 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
201 inst
= ERR_PTR(-ENOMEM
);
206 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
207 "cryptd(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
210 ctx
= crypto_instance_ctx(inst
);
211 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
212 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
218 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
220 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
221 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
222 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
233 static struct crypto_instance
*cryptd_alloc_blkcipher(
234 struct rtattr
**tb
, struct cryptd_state
*state
)
236 struct crypto_instance
*inst
;
237 struct crypto_alg
*alg
;
239 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_BLKCIPHER
,
240 CRYPTO_ALG_TYPE_MASK
);
242 return ERR_CAST(alg
);
244 inst
= cryptd_alloc_instance(alg
, state
);
248 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
249 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
251 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
252 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
253 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
255 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
257 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
259 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
260 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
262 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
263 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
264 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
271 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
273 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
274 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
275 struct crypto_spawn
*spawn
= &ictx
->spawn
;
276 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
277 struct crypto_hash
*cipher
;
279 cipher
= crypto_spawn_hash(spawn
);
281 return PTR_ERR(cipher
);
284 tfm
->crt_ahash
.reqsize
=
285 sizeof(struct cryptd_hash_request_ctx
);
289 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
291 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
292 struct cryptd_state
*state
= cryptd_get_state(tfm
);
295 mutex_lock(&state
->mutex
);
296 active
= ahash_tfm_in_queue(&state
->queue
,
297 __crypto_ahash_cast(tfm
));
298 mutex_unlock(&state
->mutex
);
302 crypto_free_hash(ctx
->child
);
305 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
306 const u8
*key
, unsigned int keylen
)
308 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
309 struct crypto_hash
*child
= ctx
->child
;
312 crypto_hash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
313 crypto_hash_set_flags(child
, crypto_ahash_get_flags(parent
) &
314 CRYPTO_TFM_REQ_MASK
);
315 err
= crypto_hash_setkey(child
, key
, keylen
);
316 crypto_ahash_set_flags(parent
, crypto_hash_get_flags(child
) &
317 CRYPTO_TFM_RES_MASK
);
321 static int cryptd_hash_enqueue(struct ahash_request
*req
,
322 crypto_completion_t complete
)
324 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
325 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
326 struct cryptd_state
*state
=
327 cryptd_get_state(crypto_ahash_tfm(tfm
));
330 rctx
->complete
= req
->base
.complete
;
331 req
->base
.complete
= complete
;
333 spin_lock_bh(&state
->lock
);
334 err
= ahash_enqueue_request(&state
->queue
, req
);
335 spin_unlock_bh(&state
->lock
);
337 wake_up_process(state
->task
);
341 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
343 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
344 struct crypto_hash
*child
= ctx
->child
;
345 struct ahash_request
*req
= ahash_request_cast(req_async
);
346 struct cryptd_hash_request_ctx
*rctx
;
347 struct hash_desc desc
;
349 rctx
= ahash_request_ctx(req
);
351 if (unlikely(err
== -EINPROGRESS
))
355 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
357 err
= crypto_hash_crt(child
)->init(&desc
);
359 req
->base
.complete
= rctx
->complete
;
363 rctx
->complete(&req
->base
, err
);
367 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
369 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
372 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
374 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
375 struct crypto_hash
*child
= ctx
->child
;
376 struct ahash_request
*req
= ahash_request_cast(req_async
);
377 struct cryptd_hash_request_ctx
*rctx
;
378 struct hash_desc desc
;
380 rctx
= ahash_request_ctx(req
);
382 if (unlikely(err
== -EINPROGRESS
))
386 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
388 err
= crypto_hash_crt(child
)->update(&desc
,
392 req
->base
.complete
= rctx
->complete
;
396 rctx
->complete(&req
->base
, err
);
400 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
402 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
405 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
407 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
408 struct crypto_hash
*child
= ctx
->child
;
409 struct ahash_request
*req
= ahash_request_cast(req_async
);
410 struct cryptd_hash_request_ctx
*rctx
;
411 struct hash_desc desc
;
413 rctx
= ahash_request_ctx(req
);
415 if (unlikely(err
== -EINPROGRESS
))
419 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
421 err
= crypto_hash_crt(child
)->final(&desc
, req
->result
);
423 req
->base
.complete
= rctx
->complete
;
427 rctx
->complete(&req
->base
, err
);
431 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
433 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
436 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
438 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
439 struct crypto_hash
*child
= ctx
->child
;
440 struct ahash_request
*req
= ahash_request_cast(req_async
);
441 struct cryptd_hash_request_ctx
*rctx
;
442 struct hash_desc desc
;
444 rctx
= ahash_request_ctx(req
);
446 if (unlikely(err
== -EINPROGRESS
))
450 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
452 err
= crypto_hash_crt(child
)->digest(&desc
,
457 req
->base
.complete
= rctx
->complete
;
461 rctx
->complete(&req
->base
, err
);
465 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
467 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
470 static struct crypto_instance
*cryptd_alloc_hash(
471 struct rtattr
**tb
, struct cryptd_state
*state
)
473 struct crypto_instance
*inst
;
474 struct crypto_alg
*alg
;
476 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_HASH
,
477 CRYPTO_ALG_TYPE_HASH_MASK
);
479 return ERR_PTR(PTR_ERR(alg
));
481 inst
= cryptd_alloc_instance(alg
, state
);
485 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
;
486 inst
->alg
.cra_type
= &crypto_ahash_type
;
488 inst
->alg
.cra_ahash
.digestsize
= alg
->cra_hash
.digestsize
;
489 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
491 inst
->alg
.cra_init
= cryptd_hash_init_tfm
;
492 inst
->alg
.cra_exit
= cryptd_hash_exit_tfm
;
494 inst
->alg
.cra_ahash
.init
= cryptd_hash_init_enqueue
;
495 inst
->alg
.cra_ahash
.update
= cryptd_hash_update_enqueue
;
496 inst
->alg
.cra_ahash
.final
= cryptd_hash_final_enqueue
;
497 inst
->alg
.cra_ahash
.setkey
= cryptd_hash_setkey
;
498 inst
->alg
.cra_ahash
.digest
= cryptd_hash_digest_enqueue
;
505 static struct cryptd_state state
;
507 static struct crypto_instance
*cryptd_alloc(struct rtattr
**tb
)
509 struct crypto_attr_type
*algt
;
511 algt
= crypto_get_attr_type(tb
);
513 return ERR_CAST(algt
);
515 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
516 case CRYPTO_ALG_TYPE_BLKCIPHER
:
517 return cryptd_alloc_blkcipher(tb
, &state
);
518 case CRYPTO_ALG_TYPE_DIGEST
:
519 return cryptd_alloc_hash(tb
, &state
);
522 return ERR_PTR(-EINVAL
);
525 static void cryptd_free(struct crypto_instance
*inst
)
527 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
529 crypto_drop_spawn(&ctx
->spawn
);
533 static struct crypto_template cryptd_tmpl
= {
535 .alloc
= cryptd_alloc
,
537 .module
= THIS_MODULE
,
540 static inline int cryptd_create_thread(struct cryptd_state
*state
,
541 int (*fn
)(void *data
), const char *name
)
543 spin_lock_init(&state
->lock
);
544 mutex_init(&state
->mutex
);
545 crypto_init_queue(&state
->queue
, CRYPTD_MAX_QLEN
);
547 state
->task
= kthread_run(fn
, state
, name
);
548 if (IS_ERR(state
->task
))
549 return PTR_ERR(state
->task
);
554 static inline void cryptd_stop_thread(struct cryptd_state
*state
)
556 BUG_ON(state
->queue
.qlen
);
557 kthread_stop(state
->task
);
560 static int cryptd_thread(void *data
)
562 struct cryptd_state
*state
= data
;
565 current
->flags
|= PF_NOFREEZE
;
568 struct crypto_async_request
*req
, *backlog
;
570 mutex_lock(&state
->mutex
);
571 __set_current_state(TASK_INTERRUPTIBLE
);
573 spin_lock_bh(&state
->lock
);
574 backlog
= crypto_get_backlog(&state
->queue
);
575 req
= crypto_dequeue_request(&state
->queue
);
576 spin_unlock_bh(&state
->lock
);
578 stop
= kthread_should_stop();
581 __set_current_state(TASK_RUNNING
);
584 backlog
->complete(backlog
,
586 req
->complete(req
, 0);
590 mutex_unlock(&state
->mutex
);
598 static int __init
cryptd_init(void)
602 err
= cryptd_create_thread(&state
, cryptd_thread
, "cryptd");
606 err
= crypto_register_template(&cryptd_tmpl
);
608 kthread_stop(state
.task
);
613 static void __exit
cryptd_exit(void)
615 cryptd_stop_thread(&state
);
616 crypto_unregister_template(&cryptd_tmpl
);
619 module_init(cryptd_init
);
620 module_exit(cryptd_exit
);
622 MODULE_LICENSE("GPL");
623 MODULE_DESCRIPTION("Software async crypto daemon");