2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
26 #include <linux/err.h>
27 #include <crypto/algapi.h>
28 #include <crypto/aes.h>
29 #include <crypto/cryptd.h>
30 #include <crypto/ctr.h>
33 #include <crypto/scatterwalk.h>
34 #include <crypto/internal/aead.h>
35 #include <linux/workqueue.h>
36 #include <linux/spinlock.h>
38 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
42 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
46 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
50 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
54 struct async_aes_ctx
{
55 struct cryptd_ablkcipher
*cryptd_tfm
;
58 /* This data is stored at the end of the crypto_tfm struct.
59 * It's a type of per "session" data storage location.
60 * This needs to be 16 byte aligned.
62 struct aesni_rfc4106_gcm_ctx
{
64 struct crypto_aes_ctx aes_key_expanded
;
66 struct cryptd_aead
*cryptd_tfm
;
69 struct aesni_gcm_set_hash_subkey_result
{
71 struct completion completion
;
74 struct aesni_hash_subkey_req_data
{
76 struct aesni_gcm_set_hash_subkey_result result
;
77 struct scatterlist sg
;
80 #define AESNI_ALIGN (16)
81 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
82 #define RFC4106_HASH_SUBKEY_SIZE 16
84 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
85 unsigned int key_len
);
86 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
88 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
90 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
91 const u8
*in
, unsigned int len
);
92 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
93 const u8
*in
, unsigned int len
);
94 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
95 const u8
*in
, unsigned int len
, u8
*iv
);
96 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
97 const u8
*in
, unsigned int len
, u8
*iv
);
99 int crypto_fpu_init(void);
100 void crypto_fpu_exit(void);
103 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
104 const u8
*in
, unsigned int len
, u8
*iv
);
106 /* asmlinkage void aesni_gcm_enc()
107 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
108 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
109 * const u8 *in, Plaintext input
110 * unsigned long plaintext_len, Length of data in bytes for encryption.
111 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
112 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
113 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
114 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
115 * const u8 *aad, Additional Authentication Data (AAD)
116 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this
117 * is going to be 8 or 12 bytes
118 * u8 *auth_tag, Authenticated Tag output.
119 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
120 * Valid values are 16 (most likely), 12 or 8.
122 asmlinkage
void aesni_gcm_enc(void *ctx
, u8
*out
,
123 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
124 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
125 u8
*auth_tag
, unsigned long auth_tag_len
);
127 /* asmlinkage void aesni_gcm_dec()
128 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
129 * u8 *out, Plaintext output. Decrypt in-place is allowed.
130 * const u8 *in, Ciphertext input
131 * unsigned long ciphertext_len, Length of data in bytes for decryption.
132 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
133 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
134 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
135 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
136 * const u8 *aad, Additional Authentication Data (AAD)
137 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
138 * to be 8 or 12 bytes
139 * u8 *auth_tag, Authenticated Tag output.
140 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
141 * Valid values are 16 (most likely), 12 or 8.
143 asmlinkage
void aesni_gcm_dec(void *ctx
, u8
*out
,
144 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
145 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
146 u8
*auth_tag
, unsigned long auth_tag_len
);
149 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
152 (struct aesni_rfc4106_gcm_ctx
*)
154 crypto_tfm_ctx(crypto_aead_tfm(tfm
)), AESNI_ALIGN
);
158 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
160 unsigned long addr
= (unsigned long)raw_ctx
;
161 unsigned long align
= AESNI_ALIGN
;
163 if (align
<= crypto_tfm_ctx_alignment())
165 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
168 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
169 const u8
*in_key
, unsigned int key_len
)
171 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
172 u32
*flags
= &tfm
->crt_flags
;
175 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
176 key_len
!= AES_KEYSIZE_256
) {
177 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
181 if (!irq_fpu_usable())
182 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
185 err
= aesni_set_key(ctx
, in_key
, key_len
);
192 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
193 unsigned int key_len
)
195 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
198 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
200 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
202 if (!irq_fpu_usable())
203 crypto_aes_encrypt_x86(ctx
, dst
, src
);
206 aesni_enc(ctx
, dst
, src
);
211 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
213 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
215 if (!irq_fpu_usable())
216 crypto_aes_decrypt_x86(ctx
, dst
, src
);
219 aesni_dec(ctx
, dst
, src
);
224 static struct crypto_alg aesni_alg
= {
226 .cra_driver_name
= "aes-aesni",
228 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
229 .cra_blocksize
= AES_BLOCK_SIZE
,
230 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
232 .cra_module
= THIS_MODULE
,
233 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
236 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
237 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
238 .cia_setkey
= aes_set_key
,
239 .cia_encrypt
= aes_encrypt
,
240 .cia_decrypt
= aes_decrypt
245 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
247 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
249 aesni_enc(ctx
, dst
, src
);
252 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
254 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
256 aesni_dec(ctx
, dst
, src
);
259 static struct crypto_alg __aesni_alg
= {
260 .cra_name
= "__aes-aesni",
261 .cra_driver_name
= "__driver-aes-aesni",
263 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
264 .cra_blocksize
= AES_BLOCK_SIZE
,
265 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
267 .cra_module
= THIS_MODULE
,
268 .cra_list
= LIST_HEAD_INIT(__aesni_alg
.cra_list
),
271 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
272 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
273 .cia_setkey
= aes_set_key
,
274 .cia_encrypt
= __aes_encrypt
,
275 .cia_decrypt
= __aes_decrypt
280 static int ecb_encrypt(struct blkcipher_desc
*desc
,
281 struct scatterlist
*dst
, struct scatterlist
*src
,
284 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
285 struct blkcipher_walk walk
;
288 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
289 err
= blkcipher_walk_virt(desc
, &walk
);
290 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
293 while ((nbytes
= walk
.nbytes
)) {
294 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
295 nbytes
& AES_BLOCK_MASK
);
296 nbytes
&= AES_BLOCK_SIZE
- 1;
297 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
304 static int ecb_decrypt(struct blkcipher_desc
*desc
,
305 struct scatterlist
*dst
, struct scatterlist
*src
,
308 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
309 struct blkcipher_walk walk
;
312 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
313 err
= blkcipher_walk_virt(desc
, &walk
);
314 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
317 while ((nbytes
= walk
.nbytes
)) {
318 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
319 nbytes
& AES_BLOCK_MASK
);
320 nbytes
&= AES_BLOCK_SIZE
- 1;
321 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
328 static struct crypto_alg blk_ecb_alg
= {
329 .cra_name
= "__ecb-aes-aesni",
330 .cra_driver_name
= "__driver-ecb-aes-aesni",
332 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
333 .cra_blocksize
= AES_BLOCK_SIZE
,
334 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
336 .cra_type
= &crypto_blkcipher_type
,
337 .cra_module
= THIS_MODULE
,
338 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
341 .min_keysize
= AES_MIN_KEY_SIZE
,
342 .max_keysize
= AES_MAX_KEY_SIZE
,
343 .setkey
= aes_set_key
,
344 .encrypt
= ecb_encrypt
,
345 .decrypt
= ecb_decrypt
,
350 static int cbc_encrypt(struct blkcipher_desc
*desc
,
351 struct scatterlist
*dst
, struct scatterlist
*src
,
354 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
355 struct blkcipher_walk walk
;
358 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
359 err
= blkcipher_walk_virt(desc
, &walk
);
360 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
363 while ((nbytes
= walk
.nbytes
)) {
364 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
365 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
366 nbytes
&= AES_BLOCK_SIZE
- 1;
367 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
374 static int cbc_decrypt(struct blkcipher_desc
*desc
,
375 struct scatterlist
*dst
, struct scatterlist
*src
,
378 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
379 struct blkcipher_walk walk
;
382 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
383 err
= blkcipher_walk_virt(desc
, &walk
);
384 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
387 while ((nbytes
= walk
.nbytes
)) {
388 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
389 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
390 nbytes
&= AES_BLOCK_SIZE
- 1;
391 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
398 static struct crypto_alg blk_cbc_alg
= {
399 .cra_name
= "__cbc-aes-aesni",
400 .cra_driver_name
= "__driver-cbc-aes-aesni",
402 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
403 .cra_blocksize
= AES_BLOCK_SIZE
,
404 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
406 .cra_type
= &crypto_blkcipher_type
,
407 .cra_module
= THIS_MODULE
,
408 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
411 .min_keysize
= AES_MIN_KEY_SIZE
,
412 .max_keysize
= AES_MAX_KEY_SIZE
,
413 .setkey
= aes_set_key
,
414 .encrypt
= cbc_encrypt
,
415 .decrypt
= cbc_decrypt
,
421 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
422 struct blkcipher_walk
*walk
)
424 u8
*ctrblk
= walk
->iv
;
425 u8 keystream
[AES_BLOCK_SIZE
];
426 u8
*src
= walk
->src
.virt
.addr
;
427 u8
*dst
= walk
->dst
.virt
.addr
;
428 unsigned int nbytes
= walk
->nbytes
;
430 aesni_enc(ctx
, keystream
, ctrblk
);
431 crypto_xor(keystream
, src
, nbytes
);
432 memcpy(dst
, keystream
, nbytes
);
433 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
436 static int ctr_crypt(struct blkcipher_desc
*desc
,
437 struct scatterlist
*dst
, struct scatterlist
*src
,
440 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
441 struct blkcipher_walk walk
;
444 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
445 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
446 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
449 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
450 aesni_ctr_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
451 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
452 nbytes
&= AES_BLOCK_SIZE
- 1;
453 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
456 ctr_crypt_final(ctx
, &walk
);
457 err
= blkcipher_walk_done(desc
, &walk
, 0);
464 static struct crypto_alg blk_ctr_alg
= {
465 .cra_name
= "__ctr-aes-aesni",
466 .cra_driver_name
= "__driver-ctr-aes-aesni",
468 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
470 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
472 .cra_type
= &crypto_blkcipher_type
,
473 .cra_module
= THIS_MODULE
,
474 .cra_list
= LIST_HEAD_INIT(blk_ctr_alg
.cra_list
),
477 .min_keysize
= AES_MIN_KEY_SIZE
,
478 .max_keysize
= AES_MAX_KEY_SIZE
,
479 .ivsize
= AES_BLOCK_SIZE
,
480 .setkey
= aes_set_key
,
481 .encrypt
= ctr_crypt
,
482 .decrypt
= ctr_crypt
,
488 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
489 unsigned int key_len
)
491 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
492 struct crypto_ablkcipher
*child
= &ctx
->cryptd_tfm
->base
;
495 crypto_ablkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
496 crypto_ablkcipher_set_flags(child
, crypto_ablkcipher_get_flags(tfm
)
497 & CRYPTO_TFM_REQ_MASK
);
498 err
= crypto_ablkcipher_setkey(child
, key
, key_len
);
499 crypto_ablkcipher_set_flags(tfm
, crypto_ablkcipher_get_flags(child
)
500 & CRYPTO_TFM_RES_MASK
);
504 static int ablk_encrypt(struct ablkcipher_request
*req
)
506 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
507 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
509 if (!irq_fpu_usable()) {
510 struct ablkcipher_request
*cryptd_req
=
511 ablkcipher_request_ctx(req
);
512 memcpy(cryptd_req
, req
, sizeof(*req
));
513 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
514 return crypto_ablkcipher_encrypt(cryptd_req
);
516 struct blkcipher_desc desc
;
517 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
518 desc
.info
= req
->info
;
520 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
521 &desc
, req
->dst
, req
->src
, req
->nbytes
);
525 static int ablk_decrypt(struct ablkcipher_request
*req
)
527 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
528 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
530 if (!irq_fpu_usable()) {
531 struct ablkcipher_request
*cryptd_req
=
532 ablkcipher_request_ctx(req
);
533 memcpy(cryptd_req
, req
, sizeof(*req
));
534 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
535 return crypto_ablkcipher_decrypt(cryptd_req
);
537 struct blkcipher_desc desc
;
538 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
539 desc
.info
= req
->info
;
541 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
542 &desc
, req
->dst
, req
->src
, req
->nbytes
);
546 static void ablk_exit(struct crypto_tfm
*tfm
)
548 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
550 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
553 static void ablk_init_common(struct crypto_tfm
*tfm
,
554 struct cryptd_ablkcipher
*cryptd_tfm
)
556 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
558 ctx
->cryptd_tfm
= cryptd_tfm
;
559 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
560 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
563 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
565 struct cryptd_ablkcipher
*cryptd_tfm
;
567 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
568 if (IS_ERR(cryptd_tfm
))
569 return PTR_ERR(cryptd_tfm
);
570 ablk_init_common(tfm
, cryptd_tfm
);
574 static struct crypto_alg ablk_ecb_alg
= {
575 .cra_name
= "ecb(aes)",
576 .cra_driver_name
= "ecb-aes-aesni",
578 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
579 .cra_blocksize
= AES_BLOCK_SIZE
,
580 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
582 .cra_type
= &crypto_ablkcipher_type
,
583 .cra_module
= THIS_MODULE
,
584 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
585 .cra_init
= ablk_ecb_init
,
586 .cra_exit
= ablk_exit
,
589 .min_keysize
= AES_MIN_KEY_SIZE
,
590 .max_keysize
= AES_MAX_KEY_SIZE
,
591 .setkey
= ablk_set_key
,
592 .encrypt
= ablk_encrypt
,
593 .decrypt
= ablk_decrypt
,
598 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
600 struct cryptd_ablkcipher
*cryptd_tfm
;
602 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
603 if (IS_ERR(cryptd_tfm
))
604 return PTR_ERR(cryptd_tfm
);
605 ablk_init_common(tfm
, cryptd_tfm
);
609 static struct crypto_alg ablk_cbc_alg
= {
610 .cra_name
= "cbc(aes)",
611 .cra_driver_name
= "cbc-aes-aesni",
613 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
614 .cra_blocksize
= AES_BLOCK_SIZE
,
615 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
617 .cra_type
= &crypto_ablkcipher_type
,
618 .cra_module
= THIS_MODULE
,
619 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
620 .cra_init
= ablk_cbc_init
,
621 .cra_exit
= ablk_exit
,
624 .min_keysize
= AES_MIN_KEY_SIZE
,
625 .max_keysize
= AES_MAX_KEY_SIZE
,
626 .ivsize
= AES_BLOCK_SIZE
,
627 .setkey
= ablk_set_key
,
628 .encrypt
= ablk_encrypt
,
629 .decrypt
= ablk_decrypt
,
635 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
637 struct cryptd_ablkcipher
*cryptd_tfm
;
639 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
640 if (IS_ERR(cryptd_tfm
))
641 return PTR_ERR(cryptd_tfm
);
642 ablk_init_common(tfm
, cryptd_tfm
);
646 static struct crypto_alg ablk_ctr_alg
= {
647 .cra_name
= "ctr(aes)",
648 .cra_driver_name
= "ctr-aes-aesni",
650 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
652 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
654 .cra_type
= &crypto_ablkcipher_type
,
655 .cra_module
= THIS_MODULE
,
656 .cra_list
= LIST_HEAD_INIT(ablk_ctr_alg
.cra_list
),
657 .cra_init
= ablk_ctr_init
,
658 .cra_exit
= ablk_exit
,
661 .min_keysize
= AES_MIN_KEY_SIZE
,
662 .max_keysize
= AES_MAX_KEY_SIZE
,
663 .ivsize
= AES_BLOCK_SIZE
,
664 .setkey
= ablk_set_key
,
665 .encrypt
= ablk_encrypt
,
666 .decrypt
= ablk_encrypt
,
673 static int ablk_rfc3686_ctr_init(struct crypto_tfm
*tfm
)
675 struct cryptd_ablkcipher
*cryptd_tfm
;
677 cryptd_tfm
= cryptd_alloc_ablkcipher(
678 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
679 if (IS_ERR(cryptd_tfm
))
680 return PTR_ERR(cryptd_tfm
);
681 ablk_init_common(tfm
, cryptd_tfm
);
685 static struct crypto_alg ablk_rfc3686_ctr_alg
= {
686 .cra_name
= "rfc3686(ctr(aes))",
687 .cra_driver_name
= "rfc3686-ctr-aes-aesni",
689 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
691 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
693 .cra_type
= &crypto_ablkcipher_type
,
694 .cra_module
= THIS_MODULE
,
695 .cra_list
= LIST_HEAD_INIT(ablk_rfc3686_ctr_alg
.cra_list
),
696 .cra_init
= ablk_rfc3686_ctr_init
,
697 .cra_exit
= ablk_exit
,
700 .min_keysize
= AES_MIN_KEY_SIZE
+CTR_RFC3686_NONCE_SIZE
,
701 .max_keysize
= AES_MAX_KEY_SIZE
+CTR_RFC3686_NONCE_SIZE
,
702 .ivsize
= CTR_RFC3686_IV_SIZE
,
703 .setkey
= ablk_set_key
,
704 .encrypt
= ablk_encrypt
,
705 .decrypt
= ablk_decrypt
,
714 static int ablk_lrw_init(struct crypto_tfm
*tfm
)
716 struct cryptd_ablkcipher
*cryptd_tfm
;
718 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
720 if (IS_ERR(cryptd_tfm
))
721 return PTR_ERR(cryptd_tfm
);
722 ablk_init_common(tfm
, cryptd_tfm
);
726 static struct crypto_alg ablk_lrw_alg
= {
727 .cra_name
= "lrw(aes)",
728 .cra_driver_name
= "lrw-aes-aesni",
730 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
731 .cra_blocksize
= AES_BLOCK_SIZE
,
732 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
734 .cra_type
= &crypto_ablkcipher_type
,
735 .cra_module
= THIS_MODULE
,
736 .cra_list
= LIST_HEAD_INIT(ablk_lrw_alg
.cra_list
),
737 .cra_init
= ablk_lrw_init
,
738 .cra_exit
= ablk_exit
,
741 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
742 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
743 .ivsize
= AES_BLOCK_SIZE
,
744 .setkey
= ablk_set_key
,
745 .encrypt
= ablk_encrypt
,
746 .decrypt
= ablk_decrypt
,
753 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
755 struct cryptd_ablkcipher
*cryptd_tfm
;
757 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
759 if (IS_ERR(cryptd_tfm
))
760 return PTR_ERR(cryptd_tfm
);
761 ablk_init_common(tfm
, cryptd_tfm
);
765 static struct crypto_alg ablk_pcbc_alg
= {
766 .cra_name
= "pcbc(aes)",
767 .cra_driver_name
= "pcbc-aes-aesni",
769 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
770 .cra_blocksize
= AES_BLOCK_SIZE
,
771 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
773 .cra_type
= &crypto_ablkcipher_type
,
774 .cra_module
= THIS_MODULE
,
775 .cra_list
= LIST_HEAD_INIT(ablk_pcbc_alg
.cra_list
),
776 .cra_init
= ablk_pcbc_init
,
777 .cra_exit
= ablk_exit
,
780 .min_keysize
= AES_MIN_KEY_SIZE
,
781 .max_keysize
= AES_MAX_KEY_SIZE
,
782 .ivsize
= AES_BLOCK_SIZE
,
783 .setkey
= ablk_set_key
,
784 .encrypt
= ablk_encrypt
,
785 .decrypt
= ablk_decrypt
,
792 static int ablk_xts_init(struct crypto_tfm
*tfm
)
794 struct cryptd_ablkcipher
*cryptd_tfm
;
796 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
798 if (IS_ERR(cryptd_tfm
))
799 return PTR_ERR(cryptd_tfm
);
800 ablk_init_common(tfm
, cryptd_tfm
);
804 static struct crypto_alg ablk_xts_alg
= {
805 .cra_name
= "xts(aes)",
806 .cra_driver_name
= "xts-aes-aesni",
808 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
809 .cra_blocksize
= AES_BLOCK_SIZE
,
810 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
812 .cra_type
= &crypto_ablkcipher_type
,
813 .cra_module
= THIS_MODULE
,
814 .cra_list
= LIST_HEAD_INIT(ablk_xts_alg
.cra_list
),
815 .cra_init
= ablk_xts_init
,
816 .cra_exit
= ablk_exit
,
819 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
820 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
821 .ivsize
= AES_BLOCK_SIZE
,
822 .setkey
= ablk_set_key
,
823 .encrypt
= ablk_encrypt
,
824 .decrypt
= ablk_decrypt
,
831 static int rfc4106_init(struct crypto_tfm
*tfm
)
833 struct cryptd_aead
*cryptd_tfm
;
834 struct aesni_rfc4106_gcm_ctx
*ctx
= (struct aesni_rfc4106_gcm_ctx
*)
835 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
836 struct crypto_aead
*cryptd_child
;
837 struct aesni_rfc4106_gcm_ctx
*child_ctx
;
838 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0);
839 if (IS_ERR(cryptd_tfm
))
840 return PTR_ERR(cryptd_tfm
);
842 cryptd_child
= cryptd_aead_child(cryptd_tfm
);
843 child_ctx
= aesni_rfc4106_gcm_ctx_get(cryptd_child
);
844 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
845 ctx
->cryptd_tfm
= cryptd_tfm
;
846 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
)
847 + crypto_aead_reqsize(&cryptd_tfm
->base
);
851 static void rfc4106_exit(struct crypto_tfm
*tfm
)
853 struct aesni_rfc4106_gcm_ctx
*ctx
=
854 (struct aesni_rfc4106_gcm_ctx
*)
855 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
856 if (!IS_ERR(ctx
->cryptd_tfm
))
857 cryptd_free_aead(ctx
->cryptd_tfm
);
862 rfc4106_set_hash_subkey_done(struct crypto_async_request
*req
, int err
)
864 struct aesni_gcm_set_hash_subkey_result
*result
= req
->data
;
866 if (err
== -EINPROGRESS
)
869 complete(&result
->completion
);
873 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
875 struct crypto_ablkcipher
*ctr_tfm
;
876 struct ablkcipher_request
*req
;
878 struct aesni_hash_subkey_req_data
*req_data
;
880 ctr_tfm
= crypto_alloc_ablkcipher("ctr(aes)", 0, 0);
882 return PTR_ERR(ctr_tfm
);
884 crypto_ablkcipher_clear_flags(ctr_tfm
, ~0);
886 ret
= crypto_ablkcipher_setkey(ctr_tfm
, key
, key_len
);
888 goto out_free_ablkcipher
;
891 req
= ablkcipher_request_alloc(ctr_tfm
, GFP_KERNEL
);
893 goto out_free_ablkcipher
;
895 req_data
= kmalloc(sizeof(*req_data
), GFP_KERNEL
);
897 goto out_free_request
;
899 memset(req_data
->iv
, 0, sizeof(req_data
->iv
));
901 /* Clear the data in the hash sub key container to zero.*/
902 /* We want to cipher all zeros to create the hash sub key. */
903 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
905 init_completion(&req_data
->result
.completion
);
906 sg_init_one(&req_data
->sg
, hash_subkey
, RFC4106_HASH_SUBKEY_SIZE
);
907 ablkcipher_request_set_tfm(req
, ctr_tfm
);
908 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
909 CRYPTO_TFM_REQ_MAY_BACKLOG
,
910 rfc4106_set_hash_subkey_done
,
913 ablkcipher_request_set_crypt(req
, &req_data
->sg
,
914 &req_data
->sg
, RFC4106_HASH_SUBKEY_SIZE
, req_data
->iv
);
916 ret
= crypto_ablkcipher_encrypt(req
);
917 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
918 ret
= wait_for_completion_interruptible
919 (&req_data
->result
.completion
);
921 ret
= req_data
->result
.err
;
925 ablkcipher_request_free(req
);
927 crypto_free_ablkcipher(ctr_tfm
);
931 static int rfc4106_set_key(struct crypto_aead
*parent
, const u8
*key
,
932 unsigned int key_len
)
935 struct crypto_tfm
*tfm
= crypto_aead_tfm(parent
);
936 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
937 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
938 struct aesni_rfc4106_gcm_ctx
*child_ctx
=
939 aesni_rfc4106_gcm_ctx_get(cryptd_child
);
940 u8
*new_key_mem
= NULL
;
943 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
946 /*Account for 4 byte nonce at the end.*/
948 if (key_len
!= AES_KEYSIZE_128
) {
949 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
953 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
954 /*This must be on a 16 byte boundary!*/
955 if ((unsigned long)(&(ctx
->aes_key_expanded
.key_enc
[0])) % AESNI_ALIGN
)
958 if ((unsigned long)key
% AESNI_ALIGN
) {
959 /*key is not aligned: use an auxuliar aligned pointer*/
960 new_key_mem
= kmalloc(key_len
+AESNI_ALIGN
, GFP_KERNEL
);
964 new_key_mem
= PTR_ALIGN(new_key_mem
, AESNI_ALIGN
);
965 memcpy(new_key_mem
, key
, key_len
);
969 if (!irq_fpu_usable())
970 ret
= crypto_aes_expand_key(&(ctx
->aes_key_expanded
),
974 ret
= aesni_set_key(&(ctx
->aes_key_expanded
), key
, key_len
);
977 /*This must be on a 16 byte boundary!*/
978 if ((unsigned long)(&(ctx
->hash_subkey
[0])) % AESNI_ALIGN
) {
982 ret
= rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
983 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
989 /* This is the Integrity Check Value (aka the authentication tag length and can
990 * be 8, 12 or 16 bytes long. */
991 static int rfc4106_set_authsize(struct crypto_aead
*parent
,
992 unsigned int authsize
)
994 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
995 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
1005 crypto_aead_crt(parent
)->authsize
= authsize
;
1006 crypto_aead_crt(cryptd_child
)->authsize
= authsize
;
1010 static int rfc4106_encrypt(struct aead_request
*req
)
1013 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1014 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1016 if (!irq_fpu_usable()) {
1017 struct aead_request
*cryptd_req
=
1018 (struct aead_request
*) aead_request_ctx(req
);
1019 memcpy(cryptd_req
, req
, sizeof(*req
));
1020 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
1021 return crypto_aead_encrypt(cryptd_req
);
1023 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
1025 ret
= cryptd_child
->base
.crt_aead
.encrypt(req
);
1031 static int rfc4106_decrypt(struct aead_request
*req
)
1034 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1035 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1037 if (!irq_fpu_usable()) {
1038 struct aead_request
*cryptd_req
=
1039 (struct aead_request
*) aead_request_ctx(req
);
1040 memcpy(cryptd_req
, req
, sizeof(*req
));
1041 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
1042 return crypto_aead_decrypt(cryptd_req
);
1044 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
1046 ret
= cryptd_child
->base
.crt_aead
.decrypt(req
);
1052 static struct crypto_alg rfc4106_alg
= {
1053 .cra_name
= "rfc4106(gcm(aes))",
1054 .cra_driver_name
= "rfc4106-gcm-aesni",
1055 .cra_priority
= 400,
1056 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
,
1058 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) + AESNI_ALIGN
,
1060 .cra_type
= &crypto_nivaead_type
,
1061 .cra_module
= THIS_MODULE
,
1062 .cra_list
= LIST_HEAD_INIT(rfc4106_alg
.cra_list
),
1063 .cra_init
= rfc4106_init
,
1064 .cra_exit
= rfc4106_exit
,
1067 .setkey
= rfc4106_set_key
,
1068 .setauthsize
= rfc4106_set_authsize
,
1069 .encrypt
= rfc4106_encrypt
,
1070 .decrypt
= rfc4106_decrypt
,
1078 static int __driver_rfc4106_encrypt(struct aead_request
*req
)
1080 u8 one_entry_in_sg
= 0;
1081 u8
*src
, *dst
, *assoc
;
1082 __be32 counter
= cpu_to_be32(1);
1083 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1084 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1085 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1086 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
1087 u8 iv_tab
[16+AESNI_ALIGN
];
1088 u8
* iv
= (u8
*) PTR_ALIGN((u8
*)iv_tab
, AESNI_ALIGN
);
1089 struct scatter_walk src_sg_walk
;
1090 struct scatter_walk assoc_sg_walk
;
1091 struct scatter_walk dst_sg_walk
;
1094 /* Assuming we are supporting rfc4106 64-bit extended */
1095 /* sequence numbers We need to have the AAD length equal */
1096 /* to 8 or 12 bytes */
1097 if (unlikely(req
->assoclen
!= 8 && req
->assoclen
!= 12))
1099 /* IV below built */
1100 for (i
= 0; i
< 4; i
++)
1101 *(iv
+i
) = ctx
->nonce
[i
];
1102 for (i
= 0; i
< 8; i
++)
1103 *(iv
+4+i
) = req
->iv
[i
];
1104 *((__be32
*)(iv
+12)) = counter
;
1106 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
1107 one_entry_in_sg
= 1;
1108 scatterwalk_start(&src_sg_walk
, req
->src
);
1109 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
1110 src
= scatterwalk_map(&src_sg_walk
, 0);
1111 assoc
= scatterwalk_map(&assoc_sg_walk
, 0);
1113 if (unlikely(req
->src
!= req
->dst
)) {
1114 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1115 dst
= scatterwalk_map(&dst_sg_walk
, 0);
1119 /* Allocate memory for src, dst, assoc */
1120 src
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
1124 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
1125 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
1126 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
1131 aesni_gcm_enc(aes_ctx
, dst
, src
, (unsigned long)req
->cryptlen
, iv
,
1132 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
, dst
1133 + ((unsigned long)req
->cryptlen
), auth_tag_len
);
1135 /* The authTag (aka the Integrity Check Value) needs to be written
1136 * back to the packet. */
1137 if (one_entry_in_sg
) {
1138 if (unlikely(req
->src
!= req
->dst
)) {
1139 scatterwalk_unmap(dst
, 0);
1140 scatterwalk_done(&dst_sg_walk
, 0, 0);
1142 scatterwalk_unmap(src
, 0);
1143 scatterwalk_unmap(assoc
, 0);
1144 scatterwalk_done(&src_sg_walk
, 0, 0);
1145 scatterwalk_done(&assoc_sg_walk
, 0, 0);
1147 scatterwalk_map_and_copy(dst
, req
->dst
, 0,
1148 req
->cryptlen
+ auth_tag_len
, 1);
1154 static int __driver_rfc4106_decrypt(struct aead_request
*req
)
1156 u8 one_entry_in_sg
= 0;
1157 u8
*src
, *dst
, *assoc
;
1158 unsigned long tempCipherLen
= 0;
1159 __be32 counter
= cpu_to_be32(1);
1161 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1162 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1163 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1164 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
1165 u8 iv_and_authTag
[32+AESNI_ALIGN
];
1166 u8
*iv
= (u8
*) PTR_ALIGN((u8
*)iv_and_authTag
, AESNI_ALIGN
);
1167 u8
*authTag
= iv
+ 16;
1168 struct scatter_walk src_sg_walk
;
1169 struct scatter_walk assoc_sg_walk
;
1170 struct scatter_walk dst_sg_walk
;
1173 if (unlikely((req
->cryptlen
< auth_tag_len
) ||
1174 (req
->assoclen
!= 8 && req
->assoclen
!= 12)))
1176 /* Assuming we are supporting rfc4106 64-bit extended */
1177 /* sequence numbers We need to have the AAD length */
1178 /* equal to 8 or 12 bytes */
1180 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
1181 /* IV below built */
1182 for (i
= 0; i
< 4; i
++)
1183 *(iv
+i
) = ctx
->nonce
[i
];
1184 for (i
= 0; i
< 8; i
++)
1185 *(iv
+4+i
) = req
->iv
[i
];
1186 *((__be32
*)(iv
+12)) = counter
;
1188 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
1189 one_entry_in_sg
= 1;
1190 scatterwalk_start(&src_sg_walk
, req
->src
);
1191 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
1192 src
= scatterwalk_map(&src_sg_walk
, 0);
1193 assoc
= scatterwalk_map(&assoc_sg_walk
, 0);
1195 if (unlikely(req
->src
!= req
->dst
)) {
1196 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1197 dst
= scatterwalk_map(&dst_sg_walk
, 0);
1201 /* Allocate memory for src, dst, assoc */
1202 src
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
1205 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
1206 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
1207 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
1212 aesni_gcm_dec(aes_ctx
, dst
, src
, tempCipherLen
, iv
,
1213 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
,
1214 authTag
, auth_tag_len
);
1216 /* Compare generated tag with passed in tag. */
1217 retval
= memcmp(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
1220 if (one_entry_in_sg
) {
1221 if (unlikely(req
->src
!= req
->dst
)) {
1222 scatterwalk_unmap(dst
, 0);
1223 scatterwalk_done(&dst_sg_walk
, 0, 0);
1225 scatterwalk_unmap(src
, 0);
1226 scatterwalk_unmap(assoc
, 0);
1227 scatterwalk_done(&src_sg_walk
, 0, 0);
1228 scatterwalk_done(&assoc_sg_walk
, 0, 0);
1230 scatterwalk_map_and_copy(dst
, req
->dst
, 0, req
->cryptlen
, 1);
1236 static struct crypto_alg __rfc4106_alg
= {
1237 .cra_name
= "__gcm-aes-aesni",
1238 .cra_driver_name
= "__driver-gcm-aes-aesni",
1240 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
,
1242 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) + AESNI_ALIGN
,
1244 .cra_type
= &crypto_aead_type
,
1245 .cra_module
= THIS_MODULE
,
1246 .cra_list
= LIST_HEAD_INIT(__rfc4106_alg
.cra_list
),
1249 .encrypt
= __driver_rfc4106_encrypt
,
1250 .decrypt
= __driver_rfc4106_decrypt
,
1256 static int __init
aesni_init(void)
1261 printk(KERN_INFO
"Intel AES-NI instructions are not detected.\n");
1265 if ((err
= crypto_fpu_init()))
1267 if ((err
= crypto_register_alg(&aesni_alg
)))
1269 if ((err
= crypto_register_alg(&__aesni_alg
)))
1271 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
1273 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
1275 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
1277 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
1279 #ifdef CONFIG_X86_64
1280 if ((err
= crypto_register_alg(&blk_ctr_alg
)))
1282 if ((err
= crypto_register_alg(&ablk_ctr_alg
)))
1284 if ((err
= crypto_register_alg(&__rfc4106_alg
)))
1285 goto __aead_gcm_err
;
1286 if ((err
= crypto_register_alg(&rfc4106_alg
)))
1289 if ((err
= crypto_register_alg(&ablk_rfc3686_ctr_alg
)))
1290 goto ablk_rfc3686_ctr_err
;
1294 if ((err
= crypto_register_alg(&ablk_lrw_alg
)))
1298 if ((err
= crypto_register_alg(&ablk_pcbc_alg
)))
1302 if ((err
= crypto_register_alg(&ablk_xts_alg
)))
1311 crypto_unregister_alg(&ablk_pcbc_alg
);
1315 crypto_unregister_alg(&ablk_lrw_alg
);
1318 #ifdef CONFIG_X86_64
1320 crypto_unregister_alg(&ablk_rfc3686_ctr_alg
);
1321 ablk_rfc3686_ctr_err
:
1323 crypto_unregister_alg(&rfc4106_alg
);
1325 crypto_unregister_alg(&__rfc4106_alg
);
1327 crypto_unregister_alg(&ablk_ctr_alg
);
1329 crypto_unregister_alg(&blk_ctr_alg
);
1332 crypto_unregister_alg(&ablk_cbc_alg
);
1334 crypto_unregister_alg(&ablk_ecb_alg
);
1336 crypto_unregister_alg(&blk_cbc_alg
);
1338 crypto_unregister_alg(&blk_ecb_alg
);
1340 crypto_unregister_alg(&__aesni_alg
);
1342 crypto_unregister_alg(&aesni_alg
);
1348 static void __exit
aesni_exit(void)
1351 crypto_unregister_alg(&ablk_xts_alg
);
1354 crypto_unregister_alg(&ablk_pcbc_alg
);
1357 crypto_unregister_alg(&ablk_lrw_alg
);
1359 #ifdef CONFIG_X86_64
1361 crypto_unregister_alg(&ablk_rfc3686_ctr_alg
);
1363 crypto_unregister_alg(&rfc4106_alg
);
1364 crypto_unregister_alg(&__rfc4106_alg
);
1365 crypto_unregister_alg(&ablk_ctr_alg
);
1366 crypto_unregister_alg(&blk_ctr_alg
);
1368 crypto_unregister_alg(&ablk_cbc_alg
);
1369 crypto_unregister_alg(&ablk_ecb_alg
);
1370 crypto_unregister_alg(&blk_cbc_alg
);
1371 crypto_unregister_alg(&blk_ecb_alg
);
1372 crypto_unregister_alg(&__aesni_alg
);
1373 crypto_unregister_alg(&aesni_alg
);
1378 module_init(aesni_init
);
1379 module_exit(aesni_exit
);
1381 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1382 MODULE_LICENSE("GPL");
1383 MODULE_ALIAS("aes");