[media] v4l2-dv-timings: fix rounding in hblank and hsync calculation
[linux-2.6/btrfs-unstable.git] / crypto / cryptd.c
blobb0602ba03111230f42b34ca7c6d169a54d7c24ef
1 /*
2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
20 #include <crypto/algapi.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/err.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/list.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/sched.h>
32 #include <linux/slab.h>
34 #define CRYPTD_MAX_CPU_QLEN 100
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
41 struct cryptd_queue {
42 struct cryptd_cpu_queue __percpu *cpu_queue;
45 struct cryptd_instance_ctx {
46 struct crypto_spawn spawn;
47 struct cryptd_queue *queue;
50 struct hashd_instance_ctx {
51 struct crypto_shash_spawn spawn;
52 struct cryptd_queue *queue;
55 struct aead_instance_ctx {
56 struct crypto_aead_spawn aead_spawn;
57 struct cryptd_queue *queue;
60 struct cryptd_blkcipher_ctx {
61 struct crypto_blkcipher *child;
64 struct cryptd_blkcipher_request_ctx {
65 crypto_completion_t complete;
68 struct cryptd_hash_ctx {
69 struct crypto_shash *child;
72 struct cryptd_hash_request_ctx {
73 crypto_completion_t complete;
74 struct shash_desc desc;
77 struct cryptd_aead_ctx {
78 struct crypto_aead *child;
81 struct cryptd_aead_request_ctx {
82 crypto_completion_t complete;
85 static void cryptd_queue_worker(struct work_struct *work);
87 static int cryptd_init_queue(struct cryptd_queue *queue,
88 unsigned int max_cpu_qlen)
90 int cpu;
91 struct cryptd_cpu_queue *cpu_queue;
93 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
94 if (!queue->cpu_queue)
95 return -ENOMEM;
96 for_each_possible_cpu(cpu) {
97 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
98 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
99 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
101 return 0;
104 static void cryptd_fini_queue(struct cryptd_queue *queue)
106 int cpu;
107 struct cryptd_cpu_queue *cpu_queue;
109 for_each_possible_cpu(cpu) {
110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
111 BUG_ON(cpu_queue->queue.qlen);
113 free_percpu(queue->cpu_queue);
116 static int cryptd_enqueue_request(struct cryptd_queue *queue,
117 struct crypto_async_request *request)
119 int cpu, err;
120 struct cryptd_cpu_queue *cpu_queue;
122 cpu = get_cpu();
123 cpu_queue = this_cpu_ptr(queue->cpu_queue);
124 err = crypto_enqueue_request(&cpu_queue->queue, request);
125 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
126 put_cpu();
128 return err;
131 /* Called in workqueue context, do one real cryption work (via
132 * req->complete) and reschedule itself if there are more work to
133 * do. */
134 static void cryptd_queue_worker(struct work_struct *work)
136 struct cryptd_cpu_queue *cpu_queue;
137 struct crypto_async_request *req, *backlog;
139 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
141 * Only handle one request at a time to avoid hogging crypto workqueue.
142 * preempt_disable/enable is used to prevent being preempted by
143 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
144 * cryptd_enqueue_request() being accessed from software interrupts.
146 local_bh_disable();
147 preempt_disable();
148 backlog = crypto_get_backlog(&cpu_queue->queue);
149 req = crypto_dequeue_request(&cpu_queue->queue);
150 preempt_enable();
151 local_bh_enable();
153 if (!req)
154 return;
156 if (backlog)
157 backlog->complete(backlog, -EINPROGRESS);
158 req->complete(req, 0);
160 if (cpu_queue->queue.qlen)
161 queue_work(kcrypto_wq, &cpu_queue->work);
164 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
166 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
167 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
168 return ictx->queue;
171 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
172 u32 *mask)
174 struct crypto_attr_type *algt;
176 algt = crypto_get_attr_type(tb);
177 if (IS_ERR(algt))
178 return;
179 if ((algt->type & CRYPTO_ALG_INTERNAL))
180 *type |= CRYPTO_ALG_INTERNAL;
181 if ((algt->mask & CRYPTO_ALG_INTERNAL))
182 *mask |= CRYPTO_ALG_INTERNAL;
185 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
186 const u8 *key, unsigned int keylen)
188 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
189 struct crypto_blkcipher *child = ctx->child;
190 int err;
192 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
193 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
194 CRYPTO_TFM_REQ_MASK);
195 err = crypto_blkcipher_setkey(child, key, keylen);
196 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
197 CRYPTO_TFM_RES_MASK);
198 return err;
201 static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
202 struct crypto_blkcipher *child,
203 int err,
204 int (*crypt)(struct blkcipher_desc *desc,
205 struct scatterlist *dst,
206 struct scatterlist *src,
207 unsigned int len))
209 struct cryptd_blkcipher_request_ctx *rctx;
210 struct blkcipher_desc desc;
212 rctx = ablkcipher_request_ctx(req);
214 if (unlikely(err == -EINPROGRESS))
215 goto out;
217 desc.tfm = child;
218 desc.info = req->info;
219 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
221 err = crypt(&desc, req->dst, req->src, req->nbytes);
223 req->base.complete = rctx->complete;
225 out:
226 local_bh_disable();
227 rctx->complete(&req->base, err);
228 local_bh_enable();
231 static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
233 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
234 struct crypto_blkcipher *child = ctx->child;
236 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
237 crypto_blkcipher_crt(child)->encrypt);
240 static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
242 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
243 struct crypto_blkcipher *child = ctx->child;
245 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
246 crypto_blkcipher_crt(child)->decrypt);
249 static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
250 crypto_completion_t compl)
252 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
253 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254 struct cryptd_queue *queue;
256 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
257 rctx->complete = req->base.complete;
258 req->base.complete = compl;
260 return cryptd_enqueue_request(queue, &req->base);
263 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
265 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
268 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
270 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
273 static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
275 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
276 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
277 struct crypto_spawn *spawn = &ictx->spawn;
278 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
279 struct crypto_blkcipher *cipher;
281 cipher = crypto_spawn_blkcipher(spawn);
282 if (IS_ERR(cipher))
283 return PTR_ERR(cipher);
285 ctx->child = cipher;
286 tfm->crt_ablkcipher.reqsize =
287 sizeof(struct cryptd_blkcipher_request_ctx);
288 return 0;
291 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
293 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
295 crypto_free_blkcipher(ctx->child);
298 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
299 unsigned int tail)
301 char *p;
302 struct crypto_instance *inst;
303 int err;
305 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
306 if (!p)
307 return ERR_PTR(-ENOMEM);
309 inst = (void *)(p + head);
311 err = -ENAMETOOLONG;
312 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
313 "cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
314 goto out_free_inst;
316 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
318 inst->alg.cra_priority = alg->cra_priority + 50;
319 inst->alg.cra_blocksize = alg->cra_blocksize;
320 inst->alg.cra_alignmask = alg->cra_alignmask;
322 out:
323 return p;
325 out_free_inst:
326 kfree(p);
327 p = ERR_PTR(err);
328 goto out;
331 static int cryptd_create_blkcipher(struct crypto_template *tmpl,
332 struct rtattr **tb,
333 struct cryptd_queue *queue)
335 struct cryptd_instance_ctx *ctx;
336 struct crypto_instance *inst;
337 struct crypto_alg *alg;
338 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
339 u32 mask = CRYPTO_ALG_TYPE_MASK;
340 int err;
342 cryptd_check_internal(tb, &type, &mask);
344 alg = crypto_get_attr_alg(tb, type, mask);
345 if (IS_ERR(alg))
346 return PTR_ERR(alg);
348 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
349 err = PTR_ERR(inst);
350 if (IS_ERR(inst))
351 goto out_put_alg;
353 ctx = crypto_instance_ctx(inst);
354 ctx->queue = queue;
356 err = crypto_init_spawn(&ctx->spawn, alg, inst,
357 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
358 if (err)
359 goto out_free_inst;
361 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
362 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
363 type |= CRYPTO_ALG_INTERNAL;
364 inst->alg.cra_flags = type;
365 inst->alg.cra_type = &crypto_ablkcipher_type;
367 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
368 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
369 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
371 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
373 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
375 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
376 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
378 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
379 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
380 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
382 err = crypto_register_instance(tmpl, inst);
383 if (err) {
384 crypto_drop_spawn(&ctx->spawn);
385 out_free_inst:
386 kfree(inst);
389 out_put_alg:
390 crypto_mod_put(alg);
391 return err;
394 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
396 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
397 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
398 struct crypto_shash_spawn *spawn = &ictx->spawn;
399 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
400 struct crypto_shash *hash;
402 hash = crypto_spawn_shash(spawn);
403 if (IS_ERR(hash))
404 return PTR_ERR(hash);
406 ctx->child = hash;
407 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
408 sizeof(struct cryptd_hash_request_ctx) +
409 crypto_shash_descsize(hash));
410 return 0;
413 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
415 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
417 crypto_free_shash(ctx->child);
420 static int cryptd_hash_setkey(struct crypto_ahash *parent,
421 const u8 *key, unsigned int keylen)
423 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
424 struct crypto_shash *child = ctx->child;
425 int err;
427 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
428 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
429 CRYPTO_TFM_REQ_MASK);
430 err = crypto_shash_setkey(child, key, keylen);
431 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
432 CRYPTO_TFM_RES_MASK);
433 return err;
436 static int cryptd_hash_enqueue(struct ahash_request *req,
437 crypto_completion_t compl)
439 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
440 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
441 struct cryptd_queue *queue =
442 cryptd_get_queue(crypto_ahash_tfm(tfm));
444 rctx->complete = req->base.complete;
445 req->base.complete = compl;
447 return cryptd_enqueue_request(queue, &req->base);
450 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
452 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
453 struct crypto_shash *child = ctx->child;
454 struct ahash_request *req = ahash_request_cast(req_async);
455 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
456 struct shash_desc *desc = &rctx->desc;
458 if (unlikely(err == -EINPROGRESS))
459 goto out;
461 desc->tfm = child;
462 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
464 err = crypto_shash_init(desc);
466 req->base.complete = rctx->complete;
468 out:
469 local_bh_disable();
470 rctx->complete(&req->base, err);
471 local_bh_enable();
474 static int cryptd_hash_init_enqueue(struct ahash_request *req)
476 return cryptd_hash_enqueue(req, cryptd_hash_init);
479 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
481 struct ahash_request *req = ahash_request_cast(req_async);
482 struct cryptd_hash_request_ctx *rctx;
484 rctx = ahash_request_ctx(req);
486 if (unlikely(err == -EINPROGRESS))
487 goto out;
489 err = shash_ahash_update(req, &rctx->desc);
491 req->base.complete = rctx->complete;
493 out:
494 local_bh_disable();
495 rctx->complete(&req->base, err);
496 local_bh_enable();
499 static int cryptd_hash_update_enqueue(struct ahash_request *req)
501 return cryptd_hash_enqueue(req, cryptd_hash_update);
504 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
506 struct ahash_request *req = ahash_request_cast(req_async);
507 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
509 if (unlikely(err == -EINPROGRESS))
510 goto out;
512 err = crypto_shash_final(&rctx->desc, req->result);
514 req->base.complete = rctx->complete;
516 out:
517 local_bh_disable();
518 rctx->complete(&req->base, err);
519 local_bh_enable();
522 static int cryptd_hash_final_enqueue(struct ahash_request *req)
524 return cryptd_hash_enqueue(req, cryptd_hash_final);
527 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
529 struct ahash_request *req = ahash_request_cast(req_async);
530 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
532 if (unlikely(err == -EINPROGRESS))
533 goto out;
535 err = shash_ahash_finup(req, &rctx->desc);
537 req->base.complete = rctx->complete;
539 out:
540 local_bh_disable();
541 rctx->complete(&req->base, err);
542 local_bh_enable();
545 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
547 return cryptd_hash_enqueue(req, cryptd_hash_finup);
550 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
552 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
553 struct crypto_shash *child = ctx->child;
554 struct ahash_request *req = ahash_request_cast(req_async);
555 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
556 struct shash_desc *desc = &rctx->desc;
558 if (unlikely(err == -EINPROGRESS))
559 goto out;
561 desc->tfm = child;
562 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
564 err = shash_ahash_digest(req, desc);
566 req->base.complete = rctx->complete;
568 out:
569 local_bh_disable();
570 rctx->complete(&req->base, err);
571 local_bh_enable();
574 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
576 return cryptd_hash_enqueue(req, cryptd_hash_digest);
579 static int cryptd_hash_export(struct ahash_request *req, void *out)
581 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
583 return crypto_shash_export(&rctx->desc, out);
586 static int cryptd_hash_import(struct ahash_request *req, const void *in)
588 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
590 return crypto_shash_import(&rctx->desc, in);
593 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
594 struct cryptd_queue *queue)
596 struct hashd_instance_ctx *ctx;
597 struct ahash_instance *inst;
598 struct shash_alg *salg;
599 struct crypto_alg *alg;
600 u32 type = 0;
601 u32 mask = 0;
602 int err;
604 cryptd_check_internal(tb, &type, &mask);
606 salg = shash_attr_alg(tb[1], type, mask);
607 if (IS_ERR(salg))
608 return PTR_ERR(salg);
610 alg = &salg->base;
611 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
612 sizeof(*ctx));
613 err = PTR_ERR(inst);
614 if (IS_ERR(inst))
615 goto out_put_alg;
617 ctx = ahash_instance_ctx(inst);
618 ctx->queue = queue;
620 err = crypto_init_shash_spawn(&ctx->spawn, salg,
621 ahash_crypto_instance(inst));
622 if (err)
623 goto out_free_inst;
625 type = CRYPTO_ALG_ASYNC;
626 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
627 type |= CRYPTO_ALG_INTERNAL;
628 inst->alg.halg.base.cra_flags = type;
630 inst->alg.halg.digestsize = salg->digestsize;
631 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
633 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
634 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
636 inst->alg.init = cryptd_hash_init_enqueue;
637 inst->alg.update = cryptd_hash_update_enqueue;
638 inst->alg.final = cryptd_hash_final_enqueue;
639 inst->alg.finup = cryptd_hash_finup_enqueue;
640 inst->alg.export = cryptd_hash_export;
641 inst->alg.import = cryptd_hash_import;
642 inst->alg.setkey = cryptd_hash_setkey;
643 inst->alg.digest = cryptd_hash_digest_enqueue;
645 err = ahash_register_instance(tmpl, inst);
646 if (err) {
647 crypto_drop_shash(&ctx->spawn);
648 out_free_inst:
649 kfree(inst);
652 out_put_alg:
653 crypto_mod_put(alg);
654 return err;
657 static void cryptd_aead_crypt(struct aead_request *req,
658 struct crypto_aead *child,
659 int err,
660 int (*crypt)(struct aead_request *req))
662 struct cryptd_aead_request_ctx *rctx;
663 rctx = aead_request_ctx(req);
665 if (unlikely(err == -EINPROGRESS))
666 goto out;
667 aead_request_set_tfm(req, child);
668 err = crypt( req );
669 req->base.complete = rctx->complete;
670 out:
671 local_bh_disable();
672 rctx->complete(&req->base, err);
673 local_bh_enable();
676 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
678 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
679 struct crypto_aead *child = ctx->child;
680 struct aead_request *req;
682 req = container_of(areq, struct aead_request, base);
683 cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
686 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
688 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
689 struct crypto_aead *child = ctx->child;
690 struct aead_request *req;
692 req = container_of(areq, struct aead_request, base);
693 cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
696 static int cryptd_aead_enqueue(struct aead_request *req,
697 crypto_completion_t compl)
699 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
700 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
701 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
703 rctx->complete = req->base.complete;
704 req->base.complete = compl;
705 return cryptd_enqueue_request(queue, &req->base);
708 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
710 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
713 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
715 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
718 static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
720 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
721 struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
722 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
723 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
724 struct crypto_aead *cipher;
726 cipher = crypto_spawn_aead(spawn);
727 if (IS_ERR(cipher))
728 return PTR_ERR(cipher);
730 crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
731 ctx->child = cipher;
732 tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
733 return 0;
736 static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
738 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
739 crypto_free_aead(ctx->child);
742 static int cryptd_create_aead(struct crypto_template *tmpl,
743 struct rtattr **tb,
744 struct cryptd_queue *queue)
746 struct aead_instance_ctx *ctx;
747 struct crypto_instance *inst;
748 struct crypto_alg *alg;
749 u32 type = CRYPTO_ALG_TYPE_AEAD;
750 u32 mask = CRYPTO_ALG_TYPE_MASK;
751 int err;
753 cryptd_check_internal(tb, &type, &mask);
755 alg = crypto_get_attr_alg(tb, type, mask);
756 if (IS_ERR(alg))
757 return PTR_ERR(alg);
759 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
760 err = PTR_ERR(inst);
761 if (IS_ERR(inst))
762 goto out_put_alg;
764 ctx = crypto_instance_ctx(inst);
765 ctx->queue = queue;
767 err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
768 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
769 if (err)
770 goto out_free_inst;
772 type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
773 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
774 type |= CRYPTO_ALG_INTERNAL;
775 inst->alg.cra_flags = type;
776 inst->alg.cra_type = alg->cra_type;
777 inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
778 inst->alg.cra_init = cryptd_aead_init_tfm;
779 inst->alg.cra_exit = cryptd_aead_exit_tfm;
780 inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
781 inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
782 inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
783 inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
784 inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
785 inst->alg.cra_aead.encrypt = cryptd_aead_encrypt_enqueue;
786 inst->alg.cra_aead.decrypt = cryptd_aead_decrypt_enqueue;
787 inst->alg.cra_aead.givencrypt = alg->cra_aead.givencrypt;
788 inst->alg.cra_aead.givdecrypt = alg->cra_aead.givdecrypt;
790 err = crypto_register_instance(tmpl, inst);
791 if (err) {
792 crypto_drop_spawn(&ctx->aead_spawn.base);
793 out_free_inst:
794 kfree(inst);
796 out_put_alg:
797 crypto_mod_put(alg);
798 return err;
801 static struct cryptd_queue queue;
803 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
805 struct crypto_attr_type *algt;
807 algt = crypto_get_attr_type(tb);
808 if (IS_ERR(algt))
809 return PTR_ERR(algt);
811 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
812 case CRYPTO_ALG_TYPE_BLKCIPHER:
813 return cryptd_create_blkcipher(tmpl, tb, &queue);
814 case CRYPTO_ALG_TYPE_DIGEST:
815 return cryptd_create_hash(tmpl, tb, &queue);
816 case CRYPTO_ALG_TYPE_AEAD:
817 return cryptd_create_aead(tmpl, tb, &queue);
820 return -EINVAL;
823 static void cryptd_free(struct crypto_instance *inst)
825 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
826 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
827 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
829 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
830 case CRYPTO_ALG_TYPE_AHASH:
831 crypto_drop_shash(&hctx->spawn);
832 kfree(ahash_instance(inst));
833 return;
834 case CRYPTO_ALG_TYPE_AEAD:
835 crypto_drop_spawn(&aead_ctx->aead_spawn.base);
836 kfree(inst);
837 return;
838 default:
839 crypto_drop_spawn(&ctx->spawn);
840 kfree(inst);
844 static struct crypto_template cryptd_tmpl = {
845 .name = "cryptd",
846 .create = cryptd_create,
847 .free = cryptd_free,
848 .module = THIS_MODULE,
851 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
852 u32 type, u32 mask)
854 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
855 struct crypto_tfm *tfm;
857 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
858 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
859 return ERR_PTR(-EINVAL);
860 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
861 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
862 mask &= ~CRYPTO_ALG_TYPE_MASK;
863 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
864 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
865 if (IS_ERR(tfm))
866 return ERR_CAST(tfm);
867 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
868 crypto_free_tfm(tfm);
869 return ERR_PTR(-EINVAL);
872 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
874 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
876 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
878 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
879 return ctx->child;
881 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
883 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
885 crypto_free_ablkcipher(&tfm->base);
887 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
889 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
890 u32 type, u32 mask)
892 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
893 struct crypto_ahash *tfm;
895 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
896 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
897 return ERR_PTR(-EINVAL);
898 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
899 if (IS_ERR(tfm))
900 return ERR_CAST(tfm);
901 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
902 crypto_free_ahash(tfm);
903 return ERR_PTR(-EINVAL);
906 return __cryptd_ahash_cast(tfm);
908 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
910 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
912 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
914 return ctx->child;
916 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
918 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
920 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
921 return &rctx->desc;
923 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
925 void cryptd_free_ahash(struct cryptd_ahash *tfm)
927 crypto_free_ahash(&tfm->base);
929 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
931 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
932 u32 type, u32 mask)
934 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
935 struct crypto_aead *tfm;
937 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
938 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
939 return ERR_PTR(-EINVAL);
940 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
941 if (IS_ERR(tfm))
942 return ERR_CAST(tfm);
943 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
944 crypto_free_aead(tfm);
945 return ERR_PTR(-EINVAL);
947 return __cryptd_aead_cast(tfm);
949 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
951 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
953 struct cryptd_aead_ctx *ctx;
954 ctx = crypto_aead_ctx(&tfm->base);
955 return ctx->child;
957 EXPORT_SYMBOL_GPL(cryptd_aead_child);
959 void cryptd_free_aead(struct cryptd_aead *tfm)
961 crypto_free_aead(&tfm->base);
963 EXPORT_SYMBOL_GPL(cryptd_free_aead);
965 static int __init cryptd_init(void)
967 int err;
969 err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
970 if (err)
971 return err;
973 err = crypto_register_template(&cryptd_tmpl);
974 if (err)
975 cryptd_fini_queue(&queue);
977 return err;
980 static void __exit cryptd_exit(void)
982 cryptd_fini_queue(&queue);
983 crypto_unregister_template(&cryptd_tmpl);
986 subsys_initcall(cryptd_init);
987 module_exit(cryptd_exit);
989 MODULE_LICENSE("GPL");
990 MODULE_DESCRIPTION("Software async crypto daemon");
991 MODULE_ALIAS_CRYPTO("cryptd");