2 * CCM: Counter with CBC-MAC
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/slab.h>
25 struct ccm_instance_ctx
{
26 struct crypto_skcipher_spawn ctr
;
27 struct crypto_ahash_spawn mac
;
30 struct crypto_ccm_ctx
{
31 struct crypto_ahash
*mac
;
32 struct crypto_skcipher
*ctr
;
35 struct crypto_rfc4309_ctx
{
36 struct crypto_aead
*child
;
40 struct crypto_rfc4309_req_ctx
{
41 struct scatterlist src
[3];
42 struct scatterlist dst
[3];
43 struct aead_request subreq
;
46 struct crypto_ccm_req_priv_ctx
{
51 struct scatterlist src
[3];
52 struct scatterlist dst
[3];
53 struct skcipher_request skreq
;
56 struct cbcmac_tfm_ctx
{
57 struct crypto_cipher
*child
;
60 struct cbcmac_desc_ctx
{
64 static inline struct crypto_ccm_req_priv_ctx
*crypto_ccm_reqctx(
65 struct aead_request
*req
)
67 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
69 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
72 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
76 memset(block
, 0, csize
);
81 else if (msglen
> (1 << (8 * csize
)))
84 data
= cpu_to_be32(msglen
);
85 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
90 static int crypto_ccm_setkey(struct crypto_aead
*aead
, const u8
*key
,
93 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
94 struct crypto_skcipher
*ctr
= ctx
->ctr
;
95 struct crypto_ahash
*mac
= ctx
->mac
;
98 crypto_skcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
99 crypto_skcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
100 CRYPTO_TFM_REQ_MASK
);
101 err
= crypto_skcipher_setkey(ctr
, key
, keylen
);
102 crypto_aead_set_flags(aead
, crypto_skcipher_get_flags(ctr
) &
103 CRYPTO_TFM_RES_MASK
);
107 crypto_ahash_clear_flags(mac
, CRYPTO_TFM_REQ_MASK
);
108 crypto_ahash_set_flags(mac
, crypto_aead_get_flags(aead
) &
109 CRYPTO_TFM_REQ_MASK
);
110 err
= crypto_ahash_setkey(mac
, key
, keylen
);
111 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(mac
) &
112 CRYPTO_TFM_RES_MASK
);
118 static int crypto_ccm_setauthsize(struct crypto_aead
*tfm
,
119 unsigned int authsize
)
137 static int format_input(u8
*info
, struct aead_request
*req
,
138 unsigned int cryptlen
)
140 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
141 unsigned int lp
= req
->iv
[0];
142 unsigned int l
= lp
+ 1;
145 m
= crypto_aead_authsize(aead
);
147 memcpy(info
, req
->iv
, 16);
149 /* format control info per RFC 3610 and
150 * NIST Special Publication 800-38C
152 *info
|= (8 * ((m
- 2) / 2));
156 return set_msg_len(info
+ 16 - l
, cryptlen
, l
);
159 static int format_adata(u8
*adata
, unsigned int a
)
163 /* add control info for associated data
164 * RFC 3610 and NIST Special Publication 800-38C
167 *(__be16
*)adata
= cpu_to_be16(a
);
170 *(__be16
*)adata
= cpu_to_be16(0xfffe);
171 *(__be32
*)&adata
[2] = cpu_to_be32(a
);
178 static int crypto_ccm_auth(struct aead_request
*req
, struct scatterlist
*plain
,
179 unsigned int cryptlen
)
181 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
182 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
183 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
184 AHASH_REQUEST_ON_STACK(ahreq
, ctx
->mac
);
185 unsigned int assoclen
= req
->assoclen
;
186 struct scatterlist sg
[3];
187 u8
*odata
= pctx
->odata
;
188 u8
*idata
= pctx
->idata
;
191 /* format control data for input */
192 err
= format_input(odata
, req
, cryptlen
);
196 sg_init_table(sg
, 3);
197 sg_set_buf(&sg
[0], odata
, 16);
199 /* format associated data and compute into mac */
201 ilen
= format_adata(idata
, assoclen
);
202 sg_set_buf(&sg
[1], idata
, ilen
);
203 sg_chain(sg
, 3, req
->src
);
206 sg_chain(sg
, 2, req
->src
);
209 ahash_request_set_tfm(ahreq
, ctx
->mac
);
210 ahash_request_set_callback(ahreq
, pctx
->flags
, NULL
, NULL
);
211 ahash_request_set_crypt(ahreq
, sg
, NULL
, assoclen
+ ilen
+ 16);
212 err
= crypto_ahash_init(ahreq
);
215 err
= crypto_ahash_update(ahreq
);
219 /* we need to pad the MAC input to a round multiple of the block size */
220 ilen
= 16 - (assoclen
+ ilen
) % 16;
222 memset(idata
, 0, ilen
);
223 sg_init_table(sg
, 2);
224 sg_set_buf(&sg
[0], idata
, ilen
);
226 sg_chain(sg
, 2, plain
);
231 ahash_request_set_crypt(ahreq
, plain
, pctx
->odata
, cryptlen
);
232 err
= crypto_ahash_finup(ahreq
);
237 static void crypto_ccm_encrypt_done(struct crypto_async_request
*areq
, int err
)
239 struct aead_request
*req
= areq
->data
;
240 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
241 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
242 u8
*odata
= pctx
->odata
;
245 scatterwalk_map_and_copy(odata
, req
->dst
,
246 req
->assoclen
+ req
->cryptlen
,
247 crypto_aead_authsize(aead
), 1);
248 aead_request_complete(req
, err
);
251 static inline int crypto_ccm_check_iv(const u8
*iv
)
253 /* 2 <= L <= 8, so 1 <= L' <= 7. */
254 if (1 > iv
[0] || iv
[0] > 7)
260 static int crypto_ccm_init_crypt(struct aead_request
*req
, u8
*tag
)
262 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
263 struct scatterlist
*sg
;
267 err
= crypto_ccm_check_iv(iv
);
271 pctx
->flags
= aead_request_flags(req
);
273 /* Note: rfc 3610 and NIST 800-38C require counter of
274 * zero to encrypt auth tag.
276 memset(iv
+ 15 - iv
[0], 0, iv
[0] + 1);
278 sg_init_table(pctx
->src
, 3);
279 sg_set_buf(pctx
->src
, tag
, 16);
280 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
281 if (sg
!= pctx
->src
+ 1)
282 sg_chain(pctx
->src
, 2, sg
);
284 if (req
->src
!= req
->dst
) {
285 sg_init_table(pctx
->dst
, 3);
286 sg_set_buf(pctx
->dst
, tag
, 16);
287 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
288 if (sg
!= pctx
->dst
+ 1)
289 sg_chain(pctx
->dst
, 2, sg
);
295 static int crypto_ccm_encrypt(struct aead_request
*req
)
297 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
298 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
299 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
300 struct skcipher_request
*skreq
= &pctx
->skreq
;
301 struct scatterlist
*dst
;
302 unsigned int cryptlen
= req
->cryptlen
;
303 u8
*odata
= pctx
->odata
;
307 err
= crypto_ccm_init_crypt(req
, odata
);
311 err
= crypto_ccm_auth(req
, sg_next(pctx
->src
), cryptlen
);
316 if (req
->src
!= req
->dst
)
319 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
320 skcipher_request_set_callback(skreq
, pctx
->flags
,
321 crypto_ccm_encrypt_done
, req
);
322 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
323 err
= crypto_skcipher_encrypt(skreq
);
327 /* copy authtag to end of dst */
328 scatterwalk_map_and_copy(odata
, sg_next(dst
), cryptlen
,
329 crypto_aead_authsize(aead
), 1);
333 static void crypto_ccm_decrypt_done(struct crypto_async_request
*areq
,
336 struct aead_request
*req
= areq
->data
;
337 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
338 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
339 unsigned int authsize
= crypto_aead_authsize(aead
);
340 unsigned int cryptlen
= req
->cryptlen
- authsize
;
341 struct scatterlist
*dst
;
345 dst
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
348 err
= crypto_ccm_auth(req
, dst
, cryptlen
);
349 if (!err
&& crypto_memneq(pctx
->auth_tag
, pctx
->odata
, authsize
))
352 aead_request_complete(req
, err
);
355 static int crypto_ccm_decrypt(struct aead_request
*req
)
357 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
358 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
359 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
360 struct skcipher_request
*skreq
= &pctx
->skreq
;
361 struct scatterlist
*dst
;
362 unsigned int authsize
= crypto_aead_authsize(aead
);
363 unsigned int cryptlen
= req
->cryptlen
;
364 u8
*authtag
= pctx
->auth_tag
;
365 u8
*odata
= pctx
->odata
;
366 u8
*iv
= pctx
->idata
;
369 cryptlen
-= authsize
;
371 err
= crypto_ccm_init_crypt(req
, authtag
);
375 scatterwalk_map_and_copy(authtag
, sg_next(pctx
->src
), cryptlen
,
379 if (req
->src
!= req
->dst
)
382 memcpy(iv
, req
->iv
, 16);
384 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
385 skcipher_request_set_callback(skreq
, pctx
->flags
,
386 crypto_ccm_decrypt_done
, req
);
387 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
388 err
= crypto_skcipher_decrypt(skreq
);
392 err
= crypto_ccm_auth(req
, sg_next(dst
), cryptlen
);
397 if (crypto_memneq(authtag
, odata
, authsize
))
403 static int crypto_ccm_init_tfm(struct crypto_aead
*tfm
)
405 struct aead_instance
*inst
= aead_alg_instance(tfm
);
406 struct ccm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
407 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
408 struct crypto_ahash
*mac
;
409 struct crypto_skcipher
*ctr
;
413 mac
= crypto_spawn_ahash(&ictx
->mac
);
417 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
425 align
= crypto_aead_alignmask(tfm
);
426 align
&= ~(crypto_tfm_ctx_alignment() - 1);
427 crypto_aead_set_reqsize(
429 align
+ sizeof(struct crypto_ccm_req_priv_ctx
) +
430 crypto_skcipher_reqsize(ctr
));
435 crypto_free_ahash(mac
);
439 static void crypto_ccm_exit_tfm(struct crypto_aead
*tfm
)
441 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
443 crypto_free_ahash(ctx
->mac
);
444 crypto_free_skcipher(ctx
->ctr
);
447 static void crypto_ccm_free(struct aead_instance
*inst
)
449 struct ccm_instance_ctx
*ctx
= aead_instance_ctx(inst
);
451 crypto_drop_ahash(&ctx
->mac
);
452 crypto_drop_skcipher(&ctx
->ctr
);
456 static int crypto_ccm_create_common(struct crypto_template
*tmpl
,
458 const char *full_name
,
459 const char *ctr_name
,
460 const char *mac_name
)
462 struct crypto_attr_type
*algt
;
463 struct aead_instance
*inst
;
464 struct skcipher_alg
*ctr
;
465 struct crypto_alg
*mac_alg
;
466 struct hash_alg_common
*mac
;
467 struct ccm_instance_ctx
*ictx
;
470 algt
= crypto_get_attr_type(tb
);
472 return PTR_ERR(algt
);
474 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
477 mac_alg
= crypto_find_alg(mac_name
, &crypto_ahash_type
,
478 CRYPTO_ALG_TYPE_HASH
,
479 CRYPTO_ALG_TYPE_AHASH_MASK
|
482 return PTR_ERR(mac_alg
);
484 mac
= __crypto_hash_alg_common(mac_alg
);
486 if (mac
->digestsize
!= 16)
489 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
494 ictx
= aead_instance_ctx(inst
);
495 err
= crypto_init_ahash_spawn(&ictx
->mac
, mac
,
496 aead_crypto_instance(inst
));
500 crypto_set_skcipher_spawn(&ictx
->ctr
, aead_crypto_instance(inst
));
501 err
= crypto_grab_skcipher(&ictx
->ctr
, ctr_name
, 0,
502 crypto_requires_sync(algt
->type
,
507 ctr
= crypto_spawn_skcipher_alg(&ictx
->ctr
);
509 /* Not a stream cipher? */
511 if (ctr
->base
.cra_blocksize
!= 1)
514 /* We want the real thing! */
515 if (crypto_skcipher_alg_ivsize(ctr
) != 16)
519 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
520 "ccm_base(%s,%s)", ctr
->base
.cra_driver_name
,
521 mac
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
524 memcpy(inst
->alg
.base
.cra_name
, full_name
, CRYPTO_MAX_ALG_NAME
);
526 inst
->alg
.base
.cra_flags
= ctr
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
527 inst
->alg
.base
.cra_priority
= (mac
->base
.cra_priority
+
528 ctr
->base
.cra_priority
) / 2;
529 inst
->alg
.base
.cra_blocksize
= 1;
530 inst
->alg
.base
.cra_alignmask
= mac
->base
.cra_alignmask
|
531 ctr
->base
.cra_alignmask
;
532 inst
->alg
.ivsize
= 16;
533 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(ctr
);
534 inst
->alg
.maxauthsize
= 16;
535 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_ccm_ctx
);
536 inst
->alg
.init
= crypto_ccm_init_tfm
;
537 inst
->alg
.exit
= crypto_ccm_exit_tfm
;
538 inst
->alg
.setkey
= crypto_ccm_setkey
;
539 inst
->alg
.setauthsize
= crypto_ccm_setauthsize
;
540 inst
->alg
.encrypt
= crypto_ccm_encrypt
;
541 inst
->alg
.decrypt
= crypto_ccm_decrypt
;
543 inst
->free
= crypto_ccm_free
;
545 err
= aead_register_instance(tmpl
, inst
);
550 crypto_mod_put(mac_alg
);
554 crypto_drop_skcipher(&ictx
->ctr
);
556 crypto_drop_ahash(&ictx
->mac
);
562 static int crypto_ccm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
564 const char *cipher_name
;
565 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
566 char mac_name
[CRYPTO_MAX_ALG_NAME
];
567 char full_name
[CRYPTO_MAX_ALG_NAME
];
569 cipher_name
= crypto_attr_alg_name(tb
[1]);
570 if (IS_ERR(cipher_name
))
571 return PTR_ERR(cipher_name
);
573 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)",
574 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
575 return -ENAMETOOLONG
;
577 if (snprintf(mac_name
, CRYPTO_MAX_ALG_NAME
, "cbcmac(%s)",
578 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
579 return -ENAMETOOLONG
;
581 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "ccm(%s)", cipher_name
) >=
583 return -ENAMETOOLONG
;
585 return crypto_ccm_create_common(tmpl
, tb
, full_name
, ctr_name
,
589 static struct crypto_template crypto_ccm_tmpl
= {
591 .create
= crypto_ccm_create
,
592 .module
= THIS_MODULE
,
595 static int crypto_ccm_base_create(struct crypto_template
*tmpl
,
598 const char *ctr_name
;
599 const char *cipher_name
;
600 char full_name
[CRYPTO_MAX_ALG_NAME
];
602 ctr_name
= crypto_attr_alg_name(tb
[1]);
603 if (IS_ERR(ctr_name
))
604 return PTR_ERR(ctr_name
);
606 cipher_name
= crypto_attr_alg_name(tb
[2]);
607 if (IS_ERR(cipher_name
))
608 return PTR_ERR(cipher_name
);
610 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "ccm_base(%s,%s)",
611 ctr_name
, cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
612 return -ENAMETOOLONG
;
614 return crypto_ccm_create_common(tmpl
, tb
, full_name
, ctr_name
,
618 static struct crypto_template crypto_ccm_base_tmpl
= {
620 .create
= crypto_ccm_base_create
,
621 .module
= THIS_MODULE
,
624 static int crypto_rfc4309_setkey(struct crypto_aead
*parent
, const u8
*key
,
627 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
628 struct crypto_aead
*child
= ctx
->child
;
635 memcpy(ctx
->nonce
, key
+ keylen
, 3);
637 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
638 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
639 CRYPTO_TFM_REQ_MASK
);
640 err
= crypto_aead_setkey(child
, key
, keylen
);
641 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
642 CRYPTO_TFM_RES_MASK
);
647 static int crypto_rfc4309_setauthsize(struct crypto_aead
*parent
,
648 unsigned int authsize
)
650 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
661 return crypto_aead_setauthsize(ctx
->child
, authsize
);
664 static struct aead_request
*crypto_rfc4309_crypt(struct aead_request
*req
)
666 struct crypto_rfc4309_req_ctx
*rctx
= aead_request_ctx(req
);
667 struct aead_request
*subreq
= &rctx
->subreq
;
668 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
669 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(aead
);
670 struct crypto_aead
*child
= ctx
->child
;
671 struct scatterlist
*sg
;
672 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
673 crypto_aead_alignmask(child
) + 1);
678 memcpy(iv
+ 1, ctx
->nonce
, 3);
679 memcpy(iv
+ 4, req
->iv
, 8);
681 scatterwalk_map_and_copy(iv
+ 16, req
->src
, 0, req
->assoclen
- 8, 0);
683 sg_init_table(rctx
->src
, 3);
684 sg_set_buf(rctx
->src
, iv
+ 16, req
->assoclen
- 8);
685 sg
= scatterwalk_ffwd(rctx
->src
+ 1, req
->src
, req
->assoclen
);
686 if (sg
!= rctx
->src
+ 1)
687 sg_chain(rctx
->src
, 2, sg
);
689 if (req
->src
!= req
->dst
) {
690 sg_init_table(rctx
->dst
, 3);
691 sg_set_buf(rctx
->dst
, iv
+ 16, req
->assoclen
- 8);
692 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, req
->dst
, req
->assoclen
);
693 if (sg
!= rctx
->dst
+ 1)
694 sg_chain(rctx
->dst
, 2, sg
);
697 aead_request_set_tfm(subreq
, child
);
698 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
700 aead_request_set_crypt(subreq
, rctx
->src
,
701 req
->src
== req
->dst
? rctx
->src
: rctx
->dst
,
703 aead_request_set_ad(subreq
, req
->assoclen
- 8);
708 static int crypto_rfc4309_encrypt(struct aead_request
*req
)
710 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
713 req
= crypto_rfc4309_crypt(req
);
715 return crypto_aead_encrypt(req
);
718 static int crypto_rfc4309_decrypt(struct aead_request
*req
)
720 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
723 req
= crypto_rfc4309_crypt(req
);
725 return crypto_aead_decrypt(req
);
728 static int crypto_rfc4309_init_tfm(struct crypto_aead
*tfm
)
730 struct aead_instance
*inst
= aead_alg_instance(tfm
);
731 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
732 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
733 struct crypto_aead
*aead
;
736 aead
= crypto_spawn_aead(spawn
);
738 return PTR_ERR(aead
);
742 align
= crypto_aead_alignmask(aead
);
743 align
&= ~(crypto_tfm_ctx_alignment() - 1);
744 crypto_aead_set_reqsize(
746 sizeof(struct crypto_rfc4309_req_ctx
) +
747 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
753 static void crypto_rfc4309_exit_tfm(struct crypto_aead
*tfm
)
755 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
757 crypto_free_aead(ctx
->child
);
760 static void crypto_rfc4309_free(struct aead_instance
*inst
)
762 crypto_drop_aead(aead_instance_ctx(inst
));
766 static int crypto_rfc4309_create(struct crypto_template
*tmpl
,
769 struct crypto_attr_type
*algt
;
770 struct aead_instance
*inst
;
771 struct crypto_aead_spawn
*spawn
;
772 struct aead_alg
*alg
;
773 const char *ccm_name
;
776 algt
= crypto_get_attr_type(tb
);
778 return PTR_ERR(algt
);
780 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
783 ccm_name
= crypto_attr_alg_name(tb
[1]);
784 if (IS_ERR(ccm_name
))
785 return PTR_ERR(ccm_name
);
787 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
791 spawn
= aead_instance_ctx(inst
);
792 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
793 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
794 crypto_requires_sync(algt
->type
, algt
->mask
));
798 alg
= crypto_spawn_aead_alg(spawn
);
802 /* We only support 16-byte blocks. */
803 if (crypto_aead_alg_ivsize(alg
) != 16)
806 /* Not a stream cipher? */
807 if (alg
->base
.cra_blocksize
!= 1)
811 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
812 "rfc4309(%s)", alg
->base
.cra_name
) >=
813 CRYPTO_MAX_ALG_NAME
||
814 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
815 "rfc4309(%s)", alg
->base
.cra_driver_name
) >=
819 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
820 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
821 inst
->alg
.base
.cra_blocksize
= 1;
822 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
824 inst
->alg
.ivsize
= 8;
825 inst
->alg
.chunksize
= crypto_aead_alg_chunksize(alg
);
826 inst
->alg
.maxauthsize
= 16;
828 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4309_ctx
);
830 inst
->alg
.init
= crypto_rfc4309_init_tfm
;
831 inst
->alg
.exit
= crypto_rfc4309_exit_tfm
;
833 inst
->alg
.setkey
= crypto_rfc4309_setkey
;
834 inst
->alg
.setauthsize
= crypto_rfc4309_setauthsize
;
835 inst
->alg
.encrypt
= crypto_rfc4309_encrypt
;
836 inst
->alg
.decrypt
= crypto_rfc4309_decrypt
;
838 inst
->free
= crypto_rfc4309_free
;
840 err
= aead_register_instance(tmpl
, inst
);
848 crypto_drop_aead(spawn
);
854 static struct crypto_template crypto_rfc4309_tmpl
= {
856 .create
= crypto_rfc4309_create
,
857 .module
= THIS_MODULE
,
860 static int crypto_cbcmac_digest_setkey(struct crypto_shash
*parent
,
861 const u8
*inkey
, unsigned int keylen
)
863 struct cbcmac_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
865 return crypto_cipher_setkey(ctx
->child
, inkey
, keylen
);
868 static int crypto_cbcmac_digest_init(struct shash_desc
*pdesc
)
870 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
871 int bs
= crypto_shash_digestsize(pdesc
->tfm
);
872 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(pdesc
->tfm
) - bs
;
880 static int crypto_cbcmac_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
883 struct crypto_shash
*parent
= pdesc
->tfm
;
884 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
885 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
886 struct crypto_cipher
*tfm
= tctx
->child
;
887 int bs
= crypto_shash_digestsize(parent
);
888 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
891 unsigned int l
= min(len
, bs
- ctx
->len
);
893 crypto_xor(dg
+ ctx
->len
, p
, l
);
898 if (ctx
->len
== bs
) {
899 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
907 static int crypto_cbcmac_digest_final(struct shash_desc
*pdesc
, u8
*out
)
909 struct crypto_shash
*parent
= pdesc
->tfm
;
910 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
911 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
912 struct crypto_cipher
*tfm
= tctx
->child
;
913 int bs
= crypto_shash_digestsize(parent
);
914 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
917 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
923 static int cbcmac_init_tfm(struct crypto_tfm
*tfm
)
925 struct crypto_cipher
*cipher
;
926 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
927 struct crypto_spawn
*spawn
= crypto_instance_ctx(inst
);
928 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
930 cipher
= crypto_spawn_cipher(spawn
);
932 return PTR_ERR(cipher
);
939 static void cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
941 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
942 crypto_free_cipher(ctx
->child
);
945 static int cbcmac_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
947 struct shash_instance
*inst
;
948 struct crypto_alg
*alg
;
951 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
);
955 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
,
956 CRYPTO_ALG_TYPE_MASK
);
960 inst
= shash_alloc_instance("cbcmac", alg
);
965 err
= crypto_init_spawn(shash_instance_ctx(inst
), alg
,
966 shash_crypto_instance(inst
),
967 CRYPTO_ALG_TYPE_MASK
);
971 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
972 inst
->alg
.base
.cra_blocksize
= 1;
974 inst
->alg
.digestsize
= alg
->cra_blocksize
;
975 inst
->alg
.descsize
= ALIGN(sizeof(struct cbcmac_desc_ctx
),
976 alg
->cra_alignmask
+ 1) +
979 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cbcmac_tfm_ctx
);
980 inst
->alg
.base
.cra_init
= cbcmac_init_tfm
;
981 inst
->alg
.base
.cra_exit
= cbcmac_exit_tfm
;
983 inst
->alg
.init
= crypto_cbcmac_digest_init
;
984 inst
->alg
.update
= crypto_cbcmac_digest_update
;
985 inst
->alg
.final
= crypto_cbcmac_digest_final
;
986 inst
->alg
.setkey
= crypto_cbcmac_digest_setkey
;
988 err
= shash_register_instance(tmpl
, inst
);
992 shash_free_instance(shash_crypto_instance(inst
));
999 static struct crypto_template crypto_cbcmac_tmpl
= {
1001 .create
= cbcmac_create
,
1002 .free
= shash_free_instance
,
1003 .module
= THIS_MODULE
,
1006 static int __init
crypto_ccm_module_init(void)
1010 err
= crypto_register_template(&crypto_cbcmac_tmpl
);
1014 err
= crypto_register_template(&crypto_ccm_base_tmpl
);
1016 goto out_undo_cbcmac
;
1018 err
= crypto_register_template(&crypto_ccm_tmpl
);
1022 err
= crypto_register_template(&crypto_rfc4309_tmpl
);
1030 crypto_unregister_template(&crypto_ccm_tmpl
);
1032 crypto_unregister_template(&crypto_ccm_base_tmpl
);
1034 crypto_register_template(&crypto_cbcmac_tmpl
);
1038 static void __exit
crypto_ccm_module_exit(void)
1040 crypto_unregister_template(&crypto_rfc4309_tmpl
);
1041 crypto_unregister_template(&crypto_ccm_tmpl
);
1042 crypto_unregister_template(&crypto_ccm_base_tmpl
);
1043 crypto_unregister_template(&crypto_cbcmac_tmpl
);
1046 module_init(crypto_ccm_module_init
);
1047 module_exit(crypto_ccm_module_exit
);
1049 MODULE_LICENSE("GPL");
1050 MODULE_DESCRIPTION("Counter with CBC MAC");
1051 MODULE_ALIAS_CRYPTO("ccm_base");
1052 MODULE_ALIAS_CRYPTO("rfc4309");
1053 MODULE_ALIAS_CRYPTO("ccm");