2 * GCM: Galois/Counter Mode.
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
11 #include <crypto/gf128mul.h>
12 #include <crypto/internal/aead.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/hash.h>
18 #include <linux/completion.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/slab.h>
25 struct gcm_instance_ctx
{
26 struct crypto_skcipher_spawn ctr
;
27 struct crypto_ahash_spawn ghash
;
30 struct crypto_gcm_ctx
{
31 struct crypto_ablkcipher
*ctr
;
32 struct crypto_ahash
*ghash
;
35 struct crypto_rfc4106_ctx
{
36 struct crypto_aead
*child
;
40 struct crypto_gcm_ghash_ctx
{
41 unsigned int cryptlen
;
42 struct scatterlist
*src
;
43 void (*complete
)(struct aead_request
*req
, int err
);
46 struct crypto_gcm_req_priv_ctx
{
49 struct scatterlist src
[2];
50 struct scatterlist dst
[2];
51 struct crypto_gcm_ghash_ctx ghash_ctx
;
53 struct ahash_request ahreq
;
54 struct ablkcipher_request abreq
;
58 struct crypto_gcm_setkey_result
{
60 struct completion completion
;
63 static void *gcm_zeroes
;
65 static inline struct crypto_gcm_req_priv_ctx
*crypto_gcm_reqctx(
66 struct aead_request
*req
)
68 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
70 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
73 static void crypto_gcm_setkey_done(struct crypto_async_request
*req
, int err
)
75 struct crypto_gcm_setkey_result
*result
= req
->data
;
77 if (err
== -EINPROGRESS
)
81 complete(&result
->completion
);
84 static int crypto_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
87 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
88 struct crypto_ahash
*ghash
= ctx
->ghash
;
89 struct crypto_ablkcipher
*ctr
= ctx
->ctr
;
94 struct crypto_gcm_setkey_result result
;
96 struct scatterlist sg
[1];
97 struct ablkcipher_request req
;
101 crypto_ablkcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
102 crypto_ablkcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
103 CRYPTO_TFM_REQ_MASK
);
105 err
= crypto_ablkcipher_setkey(ctr
, key
, keylen
);
109 crypto_aead_set_flags(aead
, crypto_ablkcipher_get_flags(ctr
) &
110 CRYPTO_TFM_RES_MASK
);
112 data
= kzalloc(sizeof(*data
) + crypto_ablkcipher_reqsize(ctr
),
117 init_completion(&data
->result
.completion
);
118 sg_init_one(data
->sg
, &data
->hash
, sizeof(data
->hash
));
119 ablkcipher_request_set_tfm(&data
->req
, ctr
);
120 ablkcipher_request_set_callback(&data
->req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
121 CRYPTO_TFM_REQ_MAY_BACKLOG
,
122 crypto_gcm_setkey_done
,
124 ablkcipher_request_set_crypt(&data
->req
, data
->sg
, data
->sg
,
125 sizeof(data
->hash
), data
->iv
);
127 err
= crypto_ablkcipher_encrypt(&data
->req
);
128 if (err
== -EINPROGRESS
|| err
== -EBUSY
) {
129 err
= wait_for_completion_interruptible(
130 &data
->result
.completion
);
132 err
= data
->result
.err
;
138 crypto_ahash_clear_flags(ghash
, CRYPTO_TFM_REQ_MASK
);
139 crypto_ahash_set_flags(ghash
, crypto_aead_get_flags(aead
) &
140 CRYPTO_TFM_REQ_MASK
);
141 err
= crypto_ahash_setkey(ghash
, (u8
*)&data
->hash
, sizeof(be128
));
142 crypto_aead_set_flags(aead
, crypto_ahash_get_flags(ghash
) &
143 CRYPTO_TFM_RES_MASK
);
150 static int crypto_gcm_setauthsize(struct crypto_aead
*tfm
,
151 unsigned int authsize
)
169 static void crypto_gcm_init_crypt(struct ablkcipher_request
*ablk_req
,
170 struct aead_request
*req
,
171 unsigned int cryptlen
)
173 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
174 struct crypto_gcm_ctx
*ctx
= crypto_aead_ctx(aead
);
175 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
176 struct scatterlist
*dst
;
177 __be32 counter
= cpu_to_be32(1);
179 memset(pctx
->auth_tag
, 0, sizeof(pctx
->auth_tag
));
180 memcpy(req
->iv
+ 12, &counter
, 4);
182 sg_init_table(pctx
->src
, 2);
183 sg_set_buf(pctx
->src
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
184 scatterwalk_sg_chain(pctx
->src
, 2, req
->src
);
187 if (req
->src
!= req
->dst
) {
188 sg_init_table(pctx
->dst
, 2);
189 sg_set_buf(pctx
->dst
, pctx
->auth_tag
, sizeof(pctx
->auth_tag
));
190 scatterwalk_sg_chain(pctx
->dst
, 2, req
->dst
);
194 ablkcipher_request_set_tfm(ablk_req
, ctx
->ctr
);
195 ablkcipher_request_set_crypt(ablk_req
, pctx
->src
, dst
,
196 cryptlen
+ sizeof(pctx
->auth_tag
),
200 static inline unsigned int gcm_remain(unsigned int len
)
203 return len
? 16 - len
: 0;
206 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
);
207 static void gcm_hash_final_done(struct crypto_async_request
*areq
, int err
);
209 static int gcm_hash_update(struct aead_request
*req
,
210 struct crypto_gcm_req_priv_ctx
*pctx
,
211 crypto_completion_t complete
,
212 struct scatterlist
*src
,
215 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
217 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
219 ahash_request_set_crypt(ahreq
, src
, NULL
, len
);
221 return crypto_ahash_update(ahreq
);
224 static int gcm_hash_remain(struct aead_request
*req
,
225 struct crypto_gcm_req_priv_ctx
*pctx
,
227 crypto_completion_t complete
)
229 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
231 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
233 sg_init_one(pctx
->src
, gcm_zeroes
, remain
);
234 ahash_request_set_crypt(ahreq
, pctx
->src
, NULL
, remain
);
236 return crypto_ahash_update(ahreq
);
239 static int gcm_hash_len(struct aead_request
*req
,
240 struct crypto_gcm_req_priv_ctx
*pctx
)
242 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
243 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
246 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
247 lengths
.b
= cpu_to_be64(gctx
->cryptlen
* 8);
248 memcpy(pctx
->iauth_tag
, &lengths
, 16);
249 sg_init_one(pctx
->src
, pctx
->iauth_tag
, 16);
250 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
251 gcm_hash_len_done
, req
);
252 ahash_request_set_crypt(ahreq
, pctx
->src
,
253 NULL
, sizeof(lengths
));
255 return crypto_ahash_update(ahreq
);
258 static int gcm_hash_final(struct aead_request
*req
,
259 struct crypto_gcm_req_priv_ctx
*pctx
)
261 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
263 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
264 gcm_hash_final_done
, req
);
265 ahash_request_set_crypt(ahreq
, NULL
, pctx
->iauth_tag
, 0);
267 return crypto_ahash_final(ahreq
);
270 static void __gcm_hash_final_done(struct aead_request
*req
, int err
)
272 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
273 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
276 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
278 gctx
->complete(req
, err
);
281 static void gcm_hash_final_done(struct crypto_async_request
*areq
, int err
)
283 struct aead_request
*req
= areq
->data
;
285 __gcm_hash_final_done(req
, err
);
288 static void __gcm_hash_len_done(struct aead_request
*req
, int err
)
290 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
293 err
= gcm_hash_final(req
, pctx
);
294 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
298 __gcm_hash_final_done(req
, err
);
301 static void gcm_hash_len_done(struct crypto_async_request
*areq
, int err
)
303 struct aead_request
*req
= areq
->data
;
305 __gcm_hash_len_done(req
, err
);
308 static void __gcm_hash_crypt_remain_done(struct aead_request
*req
, int err
)
310 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
313 err
= gcm_hash_len(req
, pctx
);
314 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
318 __gcm_hash_len_done(req
, err
);
321 static void gcm_hash_crypt_remain_done(struct crypto_async_request
*areq
,
324 struct aead_request
*req
= areq
->data
;
326 __gcm_hash_crypt_remain_done(req
, err
);
329 static void __gcm_hash_crypt_done(struct aead_request
*req
, int err
)
331 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
332 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
336 remain
= gcm_remain(gctx
->cryptlen
);
338 err
= gcm_hash_remain(req
, pctx
, remain
,
339 gcm_hash_crypt_remain_done
);
340 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
344 __gcm_hash_crypt_remain_done(req
, err
);
347 static void gcm_hash_crypt_done(struct crypto_async_request
*areq
, int err
)
349 struct aead_request
*req
= areq
->data
;
351 __gcm_hash_crypt_done(req
, err
);
354 static void __gcm_hash_assoc_remain_done(struct aead_request
*req
, int err
)
356 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
357 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
358 crypto_completion_t complete
;
359 unsigned int remain
= 0;
361 if (!err
&& gctx
->cryptlen
) {
362 remain
= gcm_remain(gctx
->cryptlen
);
363 complete
= remain
? gcm_hash_crypt_done
:
364 gcm_hash_crypt_remain_done
;
365 err
= gcm_hash_update(req
, pctx
, complete
,
366 gctx
->src
, gctx
->cryptlen
);
367 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
372 __gcm_hash_crypt_done(req
, err
);
374 __gcm_hash_crypt_remain_done(req
, err
);
377 static void gcm_hash_assoc_remain_done(struct crypto_async_request
*areq
,
380 struct aead_request
*req
= areq
->data
;
382 __gcm_hash_assoc_remain_done(req
, err
);
385 static void __gcm_hash_assoc_done(struct aead_request
*req
, int err
)
387 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
391 remain
= gcm_remain(req
->assoclen
);
393 err
= gcm_hash_remain(req
, pctx
, remain
,
394 gcm_hash_assoc_remain_done
);
395 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
399 __gcm_hash_assoc_remain_done(req
, err
);
402 static void gcm_hash_assoc_done(struct crypto_async_request
*areq
, int err
)
404 struct aead_request
*req
= areq
->data
;
406 __gcm_hash_assoc_done(req
, err
);
409 static void __gcm_hash_init_done(struct aead_request
*req
, int err
)
411 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
412 crypto_completion_t complete
;
413 unsigned int remain
= 0;
415 if (!err
&& req
->assoclen
) {
416 remain
= gcm_remain(req
->assoclen
);
417 complete
= remain
? gcm_hash_assoc_done
:
418 gcm_hash_assoc_remain_done
;
419 err
= gcm_hash_update(req
, pctx
, complete
,
420 req
->assoc
, req
->assoclen
);
421 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
426 __gcm_hash_assoc_done(req
, err
);
428 __gcm_hash_assoc_remain_done(req
, err
);
431 static void gcm_hash_init_done(struct crypto_async_request
*areq
, int err
)
433 struct aead_request
*req
= areq
->data
;
435 __gcm_hash_init_done(req
, err
);
438 static int gcm_hash(struct aead_request
*req
,
439 struct crypto_gcm_req_priv_ctx
*pctx
)
441 struct ahash_request
*ahreq
= &pctx
->u
.ahreq
;
442 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
443 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
445 crypto_completion_t complete
;
448 ahash_request_set_tfm(ahreq
, ctx
->ghash
);
450 ahash_request_set_callback(ahreq
, aead_request_flags(req
),
451 gcm_hash_init_done
, req
);
452 err
= crypto_ahash_init(ahreq
);
455 remain
= gcm_remain(req
->assoclen
);
456 complete
= remain
? gcm_hash_assoc_done
: gcm_hash_assoc_remain_done
;
457 err
= gcm_hash_update(req
, pctx
, complete
, req
->assoc
, req
->assoclen
);
461 err
= gcm_hash_remain(req
, pctx
, remain
,
462 gcm_hash_assoc_remain_done
);
466 remain
= gcm_remain(gctx
->cryptlen
);
467 complete
= remain
? gcm_hash_crypt_done
: gcm_hash_crypt_remain_done
;
468 err
= gcm_hash_update(req
, pctx
, complete
, gctx
->src
, gctx
->cryptlen
);
472 err
= gcm_hash_remain(req
, pctx
, remain
,
473 gcm_hash_crypt_remain_done
);
477 err
= gcm_hash_len(req
, pctx
);
480 err
= gcm_hash_final(req
, pctx
);
487 static void gcm_enc_copy_hash(struct aead_request
*req
,
488 struct crypto_gcm_req_priv_ctx
*pctx
)
490 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
491 u8
*auth_tag
= pctx
->auth_tag
;
493 scatterwalk_map_and_copy(auth_tag
, req
->dst
, req
->cryptlen
,
494 crypto_aead_authsize(aead
), 1);
497 static void gcm_enc_hash_done(struct aead_request
*req
, int err
)
499 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
502 gcm_enc_copy_hash(req
, pctx
);
504 aead_request_complete(req
, err
);
507 static void gcm_encrypt_done(struct crypto_async_request
*areq
, int err
)
509 struct aead_request
*req
= areq
->data
;
510 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
513 err
= gcm_hash(req
, pctx
);
514 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
517 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
518 gcm_enc_copy_hash(req
, pctx
);
522 aead_request_complete(req
, err
);
525 static int crypto_gcm_encrypt(struct aead_request
*req
)
527 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
528 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
529 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
532 crypto_gcm_init_crypt(abreq
, req
, req
->cryptlen
);
533 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
534 gcm_encrypt_done
, req
);
536 gctx
->src
= req
->dst
;
537 gctx
->cryptlen
= req
->cryptlen
;
538 gctx
->complete
= gcm_enc_hash_done
;
540 err
= crypto_ablkcipher_encrypt(abreq
);
544 err
= gcm_hash(req
, pctx
);
548 crypto_xor(pctx
->auth_tag
, pctx
->iauth_tag
, 16);
549 gcm_enc_copy_hash(req
, pctx
);
554 static int crypto_gcm_verify(struct aead_request
*req
,
555 struct crypto_gcm_req_priv_ctx
*pctx
)
557 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
558 u8
*auth_tag
= pctx
->auth_tag
;
559 u8
*iauth_tag
= pctx
->iauth_tag
;
560 unsigned int authsize
= crypto_aead_authsize(aead
);
561 unsigned int cryptlen
= req
->cryptlen
- authsize
;
563 crypto_xor(auth_tag
, iauth_tag
, 16);
564 scatterwalk_map_and_copy(iauth_tag
, req
->src
, cryptlen
, authsize
, 0);
565 return memcmp(iauth_tag
, auth_tag
, authsize
) ? -EBADMSG
: 0;
568 static void gcm_decrypt_done(struct crypto_async_request
*areq
, int err
)
570 struct aead_request
*req
= areq
->data
;
571 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
574 err
= crypto_gcm_verify(req
, pctx
);
576 aead_request_complete(req
, err
);
579 static void gcm_dec_hash_done(struct aead_request
*req
, int err
)
581 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
582 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
583 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
586 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
587 gcm_decrypt_done
, req
);
588 crypto_gcm_init_crypt(abreq
, req
, gctx
->cryptlen
);
589 err
= crypto_ablkcipher_decrypt(abreq
);
590 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
593 err
= crypto_gcm_verify(req
, pctx
);
596 aead_request_complete(req
, err
);
599 static int crypto_gcm_decrypt(struct aead_request
*req
)
601 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
602 struct crypto_gcm_req_priv_ctx
*pctx
= crypto_gcm_reqctx(req
);
603 struct ablkcipher_request
*abreq
= &pctx
->u
.abreq
;
604 struct crypto_gcm_ghash_ctx
*gctx
= &pctx
->ghash_ctx
;
605 unsigned int authsize
= crypto_aead_authsize(aead
);
606 unsigned int cryptlen
= req
->cryptlen
;
609 if (cryptlen
< authsize
)
611 cryptlen
-= authsize
;
613 gctx
->src
= req
->src
;
614 gctx
->cryptlen
= cryptlen
;
615 gctx
->complete
= gcm_dec_hash_done
;
617 err
= gcm_hash(req
, pctx
);
621 ablkcipher_request_set_callback(abreq
, aead_request_flags(req
),
622 gcm_decrypt_done
, req
);
623 crypto_gcm_init_crypt(abreq
, req
, cryptlen
);
624 err
= crypto_ablkcipher_decrypt(abreq
);
628 return crypto_gcm_verify(req
, pctx
);
631 static int crypto_gcm_init_tfm(struct crypto_tfm
*tfm
)
633 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
634 struct gcm_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
635 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
636 struct crypto_ablkcipher
*ctr
;
637 struct crypto_ahash
*ghash
;
641 ghash
= crypto_spawn_ahash(&ictx
->ghash
);
643 return PTR_ERR(ghash
);
645 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
653 align
= crypto_tfm_alg_alignmask(tfm
);
654 align
&= ~(crypto_tfm_ctx_alignment() - 1);
655 tfm
->crt_aead
.reqsize
= align
+
656 offsetof(struct crypto_gcm_req_priv_ctx
, u
) +
657 max(sizeof(struct ablkcipher_request
) +
658 crypto_ablkcipher_reqsize(ctr
),
659 sizeof(struct ahash_request
) +
660 crypto_ahash_reqsize(ghash
));
665 crypto_free_ahash(ghash
);
669 static void crypto_gcm_exit_tfm(struct crypto_tfm
*tfm
)
671 struct crypto_gcm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
673 crypto_free_ahash(ctx
->ghash
);
674 crypto_free_ablkcipher(ctx
->ctr
);
677 static struct crypto_instance
*crypto_gcm_alloc_common(struct rtattr
**tb
,
678 const char *full_name
,
679 const char *ctr_name
,
680 const char *ghash_name
)
682 struct crypto_attr_type
*algt
;
683 struct crypto_instance
*inst
;
684 struct crypto_alg
*ctr
;
685 struct crypto_alg
*ghash_alg
;
686 struct ahash_alg
*ghash_ahash_alg
;
687 struct gcm_instance_ctx
*ctx
;
690 algt
= crypto_get_attr_type(tb
);
695 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
696 return ERR_PTR(-EINVAL
);
698 ghash_alg
= crypto_find_alg(ghash_name
, &crypto_ahash_type
,
699 CRYPTO_ALG_TYPE_HASH
,
700 CRYPTO_ALG_TYPE_AHASH_MASK
);
701 err
= PTR_ERR(ghash_alg
);
702 if (IS_ERR(ghash_alg
))
706 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
710 ctx
= crypto_instance_ctx(inst
);
711 ghash_ahash_alg
= container_of(ghash_alg
, struct ahash_alg
, halg
.base
);
712 err
= crypto_init_ahash_spawn(&ctx
->ghash
, &ghash_ahash_alg
->halg
,
717 crypto_set_skcipher_spawn(&ctx
->ctr
, inst
);
718 err
= crypto_grab_skcipher(&ctx
->ctr
, ctr_name
, 0,
719 crypto_requires_sync(algt
->type
,
724 ctr
= crypto_skcipher_spawn_alg(&ctx
->ctr
);
726 /* We only support 16-byte blocks. */
727 if (ctr
->cra_ablkcipher
.ivsize
!= 16)
730 /* Not a stream cipher? */
732 if (ctr
->cra_blocksize
!= 1)
736 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
737 "gcm_base(%s,%s)", ctr
->cra_driver_name
,
738 ghash_alg
->cra_driver_name
) >=
742 memcpy(inst
->alg
.cra_name
, full_name
, CRYPTO_MAX_ALG_NAME
);
744 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
;
745 inst
->alg
.cra_flags
|= ctr
->cra_flags
& CRYPTO_ALG_ASYNC
;
746 inst
->alg
.cra_priority
= ctr
->cra_priority
;
747 inst
->alg
.cra_blocksize
= 1;
748 inst
->alg
.cra_alignmask
= ctr
->cra_alignmask
| (__alignof__(u64
) - 1);
749 inst
->alg
.cra_type
= &crypto_aead_type
;
750 inst
->alg
.cra_aead
.ivsize
= 16;
751 inst
->alg
.cra_aead
.maxauthsize
= 16;
752 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_gcm_ctx
);
753 inst
->alg
.cra_init
= crypto_gcm_init_tfm
;
754 inst
->alg
.cra_exit
= crypto_gcm_exit_tfm
;
755 inst
->alg
.cra_aead
.setkey
= crypto_gcm_setkey
;
756 inst
->alg
.cra_aead
.setauthsize
= crypto_gcm_setauthsize
;
757 inst
->alg
.cra_aead
.encrypt
= crypto_gcm_encrypt
;
758 inst
->alg
.cra_aead
.decrypt
= crypto_gcm_decrypt
;
761 crypto_mod_put(ghash_alg
);
765 crypto_drop_skcipher(&ctx
->ctr
);
767 crypto_drop_ahash(&ctx
->ghash
);
775 static struct crypto_instance
*crypto_gcm_alloc(struct rtattr
**tb
)
778 const char *cipher_name
;
779 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
780 char full_name
[CRYPTO_MAX_ALG_NAME
];
782 cipher_name
= crypto_attr_alg_name(tb
[1]);
783 err
= PTR_ERR(cipher_name
);
784 if (IS_ERR(cipher_name
))
787 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)", cipher_name
) >=
789 return ERR_PTR(-ENAMETOOLONG
);
791 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm(%s)", cipher_name
) >=
793 return ERR_PTR(-ENAMETOOLONG
);
795 return crypto_gcm_alloc_common(tb
, full_name
, ctr_name
, "ghash");
798 static void crypto_gcm_free(struct crypto_instance
*inst
)
800 struct gcm_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
802 crypto_drop_skcipher(&ctx
->ctr
);
803 crypto_drop_ahash(&ctx
->ghash
);
807 static struct crypto_template crypto_gcm_tmpl
= {
809 .alloc
= crypto_gcm_alloc
,
810 .free
= crypto_gcm_free
,
811 .module
= THIS_MODULE
,
814 static struct crypto_instance
*crypto_gcm_base_alloc(struct rtattr
**tb
)
817 const char *ctr_name
;
818 const char *ghash_name
;
819 char full_name
[CRYPTO_MAX_ALG_NAME
];
821 ctr_name
= crypto_attr_alg_name(tb
[1]);
822 err
= PTR_ERR(ctr_name
);
823 if (IS_ERR(ctr_name
))
826 ghash_name
= crypto_attr_alg_name(tb
[2]);
827 err
= PTR_ERR(ghash_name
);
828 if (IS_ERR(ghash_name
))
831 if (snprintf(full_name
, CRYPTO_MAX_ALG_NAME
, "gcm_base(%s,%s)",
832 ctr_name
, ghash_name
) >= CRYPTO_MAX_ALG_NAME
)
833 return ERR_PTR(-ENAMETOOLONG
);
835 return crypto_gcm_alloc_common(tb
, full_name
, ctr_name
, ghash_name
);
838 static struct crypto_template crypto_gcm_base_tmpl
= {
840 .alloc
= crypto_gcm_base_alloc
,
841 .free
= crypto_gcm_free
,
842 .module
= THIS_MODULE
,
845 static int crypto_rfc4106_setkey(struct crypto_aead
*parent
, const u8
*key
,
848 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
849 struct crypto_aead
*child
= ctx
->child
;
856 memcpy(ctx
->nonce
, key
+ keylen
, 4);
858 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
859 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
860 CRYPTO_TFM_REQ_MASK
);
861 err
= crypto_aead_setkey(child
, key
, keylen
);
862 crypto_aead_set_flags(parent
, crypto_aead_get_flags(child
) &
863 CRYPTO_TFM_RES_MASK
);
868 static int crypto_rfc4106_setauthsize(struct crypto_aead
*parent
,
869 unsigned int authsize
)
871 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(parent
);
882 return crypto_aead_setauthsize(ctx
->child
, authsize
);
885 static struct aead_request
*crypto_rfc4106_crypt(struct aead_request
*req
)
887 struct aead_request
*subreq
= aead_request_ctx(req
);
888 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
889 struct crypto_rfc4106_ctx
*ctx
= crypto_aead_ctx(aead
);
890 struct crypto_aead
*child
= ctx
->child
;
891 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
892 crypto_aead_alignmask(child
) + 1);
894 memcpy(iv
, ctx
->nonce
, 4);
895 memcpy(iv
+ 4, req
->iv
, 8);
897 aead_request_set_tfm(subreq
, child
);
898 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
900 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
, iv
);
901 aead_request_set_assoc(subreq
, req
->assoc
, req
->assoclen
);
906 static int crypto_rfc4106_encrypt(struct aead_request
*req
)
908 req
= crypto_rfc4106_crypt(req
);
910 return crypto_aead_encrypt(req
);
913 static int crypto_rfc4106_decrypt(struct aead_request
*req
)
915 req
= crypto_rfc4106_crypt(req
);
917 return crypto_aead_decrypt(req
);
920 static int crypto_rfc4106_init_tfm(struct crypto_tfm
*tfm
)
922 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
923 struct crypto_aead_spawn
*spawn
= crypto_instance_ctx(inst
);
924 struct crypto_rfc4106_ctx
*ctx
= crypto_tfm_ctx(tfm
);
925 struct crypto_aead
*aead
;
928 aead
= crypto_spawn_aead(spawn
);
930 return PTR_ERR(aead
);
934 align
= crypto_aead_alignmask(aead
);
935 align
&= ~(crypto_tfm_ctx_alignment() - 1);
936 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
) +
937 ALIGN(crypto_aead_reqsize(aead
),
938 crypto_tfm_ctx_alignment()) +
944 static void crypto_rfc4106_exit_tfm(struct crypto_tfm
*tfm
)
946 struct crypto_rfc4106_ctx
*ctx
= crypto_tfm_ctx(tfm
);
948 crypto_free_aead(ctx
->child
);
951 static struct crypto_instance
*crypto_rfc4106_alloc(struct rtattr
**tb
)
953 struct crypto_attr_type
*algt
;
954 struct crypto_instance
*inst
;
955 struct crypto_aead_spawn
*spawn
;
956 struct crypto_alg
*alg
;
957 const char *ccm_name
;
960 algt
= crypto_get_attr_type(tb
);
965 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
966 return ERR_PTR(-EINVAL
);
968 ccm_name
= crypto_attr_alg_name(tb
[1]);
969 err
= PTR_ERR(ccm_name
);
970 if (IS_ERR(ccm_name
))
973 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
975 return ERR_PTR(-ENOMEM
);
977 spawn
= crypto_instance_ctx(inst
);
978 crypto_set_aead_spawn(spawn
, inst
);
979 err
= crypto_grab_aead(spawn
, ccm_name
, 0,
980 crypto_requires_sync(algt
->type
, algt
->mask
));
984 alg
= crypto_aead_spawn_alg(spawn
);
988 /* We only support 16-byte blocks. */
989 if (alg
->cra_aead
.ivsize
!= 16)
992 /* Not a stream cipher? */
993 if (alg
->cra_blocksize
!= 1)
997 if (snprintf(inst
->alg
.cra_name
, CRYPTO_MAX_ALG_NAME
,
998 "rfc4106(%s)", alg
->cra_name
) >= CRYPTO_MAX_ALG_NAME
||
999 snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
1000 "rfc4106(%s)", alg
->cra_driver_name
) >=
1001 CRYPTO_MAX_ALG_NAME
)
1004 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
;
1005 inst
->alg
.cra_flags
|= alg
->cra_flags
& CRYPTO_ALG_ASYNC
;
1006 inst
->alg
.cra_priority
= alg
->cra_priority
;
1007 inst
->alg
.cra_blocksize
= 1;
1008 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
1009 inst
->alg
.cra_type
= &crypto_nivaead_type
;
1011 inst
->alg
.cra_aead
.ivsize
= 8;
1012 inst
->alg
.cra_aead
.maxauthsize
= 16;
1014 inst
->alg
.cra_ctxsize
= sizeof(struct crypto_rfc4106_ctx
);
1016 inst
->alg
.cra_init
= crypto_rfc4106_init_tfm
;
1017 inst
->alg
.cra_exit
= crypto_rfc4106_exit_tfm
;
1019 inst
->alg
.cra_aead
.setkey
= crypto_rfc4106_setkey
;
1020 inst
->alg
.cra_aead
.setauthsize
= crypto_rfc4106_setauthsize
;
1021 inst
->alg
.cra_aead
.encrypt
= crypto_rfc4106_encrypt
;
1022 inst
->alg
.cra_aead
.decrypt
= crypto_rfc4106_decrypt
;
1024 inst
->alg
.cra_aead
.geniv
= "seqiv";
1030 crypto_drop_aead(spawn
);
1033 inst
= ERR_PTR(err
);
1037 static void crypto_rfc4106_free(struct crypto_instance
*inst
)
1039 crypto_drop_spawn(crypto_instance_ctx(inst
));
1043 static struct crypto_template crypto_rfc4106_tmpl
= {
1045 .alloc
= crypto_rfc4106_alloc
,
1046 .free
= crypto_rfc4106_free
,
1047 .module
= THIS_MODULE
,
1050 static int __init
crypto_gcm_module_init(void)
1054 gcm_zeroes
= kzalloc(16, GFP_KERNEL
);
1058 err
= crypto_register_template(&crypto_gcm_base_tmpl
);
1062 err
= crypto_register_template(&crypto_gcm_tmpl
);
1066 err
= crypto_register_template(&crypto_rfc4106_tmpl
);
1073 crypto_unregister_template(&crypto_gcm_tmpl
);
1075 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1081 static void __exit
crypto_gcm_module_exit(void)
1084 crypto_unregister_template(&crypto_rfc4106_tmpl
);
1085 crypto_unregister_template(&crypto_gcm_tmpl
);
1086 crypto_unregister_template(&crypto_gcm_base_tmpl
);
1089 module_init(crypto_gcm_module_init
);
1090 module_exit(crypto_gcm_module_exit
);
1092 MODULE_LICENSE("GPL");
1093 MODULE_DESCRIPTION("Galois/Counter Mode");
1094 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1095 MODULE_ALIAS("gcm_base");
1096 MODULE_ALIAS("rfc4106");