2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
24 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
28 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
32 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
36 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
40 struct async_aes_ctx
{
41 struct cryptd_ablkcipher
*cryptd_tfm
;
44 #define AESNI_ALIGN 16
45 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
47 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
48 unsigned int key_len
);
49 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
51 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
53 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
54 const u8
*in
, unsigned int len
);
55 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
56 const u8
*in
, unsigned int len
);
57 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
58 const u8
*in
, unsigned int len
, u8
*iv
);
59 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
60 const u8
*in
, unsigned int len
, u8
*iv
);
62 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
64 unsigned long addr
= (unsigned long)raw_ctx
;
65 unsigned long align
= AESNI_ALIGN
;
67 if (align
<= crypto_tfm_ctx_alignment())
69 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
72 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
73 const u8
*in_key
, unsigned int key_len
)
75 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
76 u32
*flags
= &tfm
->crt_flags
;
79 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
80 key_len
!= AES_KEYSIZE_256
) {
81 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
85 if (!irq_fpu_usable())
86 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
89 err
= aesni_set_key(ctx
, in_key
, key_len
);
96 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
99 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
102 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
104 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
106 if (!irq_fpu_usable())
107 crypto_aes_encrypt_x86(ctx
, dst
, src
);
110 aesni_enc(ctx
, dst
, src
);
115 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
117 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
119 if (!irq_fpu_usable())
120 crypto_aes_decrypt_x86(ctx
, dst
, src
);
123 aesni_dec(ctx
, dst
, src
);
128 static struct crypto_alg aesni_alg
= {
130 .cra_driver_name
= "aes-aesni",
132 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
133 .cra_blocksize
= AES_BLOCK_SIZE
,
134 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
136 .cra_module
= THIS_MODULE
,
137 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
140 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
141 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
142 .cia_setkey
= aes_set_key
,
143 .cia_encrypt
= aes_encrypt
,
144 .cia_decrypt
= aes_decrypt
149 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
151 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
153 aesni_enc(ctx
, dst
, src
);
156 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
158 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
160 aesni_dec(ctx
, dst
, src
);
163 static struct crypto_alg __aesni_alg
= {
164 .cra_name
= "__aes-aesni",
165 .cra_driver_name
= "__driver-aes-aesni",
167 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
168 .cra_blocksize
= AES_BLOCK_SIZE
,
169 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
171 .cra_module
= THIS_MODULE
,
172 .cra_list
= LIST_HEAD_INIT(__aesni_alg
.cra_list
),
175 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
176 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
177 .cia_setkey
= aes_set_key
,
178 .cia_encrypt
= __aes_encrypt
,
179 .cia_decrypt
= __aes_decrypt
184 static int ecb_encrypt(struct blkcipher_desc
*desc
,
185 struct scatterlist
*dst
, struct scatterlist
*src
,
188 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
189 struct blkcipher_walk walk
;
192 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
193 err
= blkcipher_walk_virt(desc
, &walk
);
194 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
197 while ((nbytes
= walk
.nbytes
)) {
198 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
199 nbytes
& AES_BLOCK_MASK
);
200 nbytes
&= AES_BLOCK_SIZE
- 1;
201 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
208 static int ecb_decrypt(struct blkcipher_desc
*desc
,
209 struct scatterlist
*dst
, struct scatterlist
*src
,
212 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
213 struct blkcipher_walk walk
;
216 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
217 err
= blkcipher_walk_virt(desc
, &walk
);
218 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
221 while ((nbytes
= walk
.nbytes
)) {
222 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
223 nbytes
& AES_BLOCK_MASK
);
224 nbytes
&= AES_BLOCK_SIZE
- 1;
225 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
232 static struct crypto_alg blk_ecb_alg
= {
233 .cra_name
= "__ecb-aes-aesni",
234 .cra_driver_name
= "__driver-ecb-aes-aesni",
236 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
237 .cra_blocksize
= AES_BLOCK_SIZE
,
238 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
240 .cra_type
= &crypto_blkcipher_type
,
241 .cra_module
= THIS_MODULE
,
242 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
245 .min_keysize
= AES_MIN_KEY_SIZE
,
246 .max_keysize
= AES_MAX_KEY_SIZE
,
247 .setkey
= aes_set_key
,
248 .encrypt
= ecb_encrypt
,
249 .decrypt
= ecb_decrypt
,
254 static int cbc_encrypt(struct blkcipher_desc
*desc
,
255 struct scatterlist
*dst
, struct scatterlist
*src
,
258 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
259 struct blkcipher_walk walk
;
262 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
263 err
= blkcipher_walk_virt(desc
, &walk
);
264 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
267 while ((nbytes
= walk
.nbytes
)) {
268 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
269 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
270 nbytes
&= AES_BLOCK_SIZE
- 1;
271 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
278 static int cbc_decrypt(struct blkcipher_desc
*desc
,
279 struct scatterlist
*dst
, struct scatterlist
*src
,
282 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
283 struct blkcipher_walk walk
;
286 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
287 err
= blkcipher_walk_virt(desc
, &walk
);
288 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
291 while ((nbytes
= walk
.nbytes
)) {
292 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
293 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
294 nbytes
&= AES_BLOCK_SIZE
- 1;
295 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
302 static struct crypto_alg blk_cbc_alg
= {
303 .cra_name
= "__cbc-aes-aesni",
304 .cra_driver_name
= "__driver-cbc-aes-aesni",
306 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
307 .cra_blocksize
= AES_BLOCK_SIZE
,
308 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
310 .cra_type
= &crypto_blkcipher_type
,
311 .cra_module
= THIS_MODULE
,
312 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
315 .min_keysize
= AES_MIN_KEY_SIZE
,
316 .max_keysize
= AES_MAX_KEY_SIZE
,
317 .setkey
= aes_set_key
,
318 .encrypt
= cbc_encrypt
,
319 .decrypt
= cbc_decrypt
,
324 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
325 unsigned int key_len
)
327 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
328 struct crypto_ablkcipher
*child
= &ctx
->cryptd_tfm
->base
;
331 crypto_ablkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
332 crypto_ablkcipher_set_flags(child
, crypto_ablkcipher_get_flags(tfm
)
333 & CRYPTO_TFM_REQ_MASK
);
334 err
= crypto_ablkcipher_setkey(child
, key
, key_len
);
335 crypto_ablkcipher_set_flags(tfm
, crypto_ablkcipher_get_flags(child
)
336 & CRYPTO_TFM_RES_MASK
);
340 static int ablk_encrypt(struct ablkcipher_request
*req
)
342 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
343 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
345 if (!irq_fpu_usable()) {
346 struct ablkcipher_request
*cryptd_req
=
347 ablkcipher_request_ctx(req
);
348 memcpy(cryptd_req
, req
, sizeof(*req
));
349 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
350 return crypto_ablkcipher_encrypt(cryptd_req
);
352 struct blkcipher_desc desc
;
353 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
354 desc
.info
= req
->info
;
356 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
357 &desc
, req
->dst
, req
->src
, req
->nbytes
);
361 static int ablk_decrypt(struct ablkcipher_request
*req
)
363 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
364 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
366 if (!irq_fpu_usable()) {
367 struct ablkcipher_request
*cryptd_req
=
368 ablkcipher_request_ctx(req
);
369 memcpy(cryptd_req
, req
, sizeof(*req
));
370 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
371 return crypto_ablkcipher_decrypt(cryptd_req
);
373 struct blkcipher_desc desc
;
374 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
375 desc
.info
= req
->info
;
377 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
378 &desc
, req
->dst
, req
->src
, req
->nbytes
);
382 static void ablk_exit(struct crypto_tfm
*tfm
)
384 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
386 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
389 static void ablk_init_common(struct crypto_tfm
*tfm
,
390 struct cryptd_ablkcipher
*cryptd_tfm
)
392 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
394 ctx
->cryptd_tfm
= cryptd_tfm
;
395 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
396 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
399 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
401 struct cryptd_ablkcipher
*cryptd_tfm
;
403 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
404 if (IS_ERR(cryptd_tfm
))
405 return PTR_ERR(cryptd_tfm
);
406 ablk_init_common(tfm
, cryptd_tfm
);
410 static struct crypto_alg ablk_ecb_alg
= {
411 .cra_name
= "ecb(aes)",
412 .cra_driver_name
= "ecb-aes-aesni",
414 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
415 .cra_blocksize
= AES_BLOCK_SIZE
,
416 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
418 .cra_type
= &crypto_ablkcipher_type
,
419 .cra_module
= THIS_MODULE
,
420 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
421 .cra_init
= ablk_ecb_init
,
422 .cra_exit
= ablk_exit
,
425 .min_keysize
= AES_MIN_KEY_SIZE
,
426 .max_keysize
= AES_MAX_KEY_SIZE
,
427 .setkey
= ablk_set_key
,
428 .encrypt
= ablk_encrypt
,
429 .decrypt
= ablk_decrypt
,
434 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
436 struct cryptd_ablkcipher
*cryptd_tfm
;
438 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
439 if (IS_ERR(cryptd_tfm
))
440 return PTR_ERR(cryptd_tfm
);
441 ablk_init_common(tfm
, cryptd_tfm
);
445 static struct crypto_alg ablk_cbc_alg
= {
446 .cra_name
= "cbc(aes)",
447 .cra_driver_name
= "cbc-aes-aesni",
449 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
450 .cra_blocksize
= AES_BLOCK_SIZE
,
451 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
453 .cra_type
= &crypto_ablkcipher_type
,
454 .cra_module
= THIS_MODULE
,
455 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
456 .cra_init
= ablk_cbc_init
,
457 .cra_exit
= ablk_exit
,
460 .min_keysize
= AES_MIN_KEY_SIZE
,
461 .max_keysize
= AES_MAX_KEY_SIZE
,
462 .ivsize
= AES_BLOCK_SIZE
,
463 .setkey
= ablk_set_key
,
464 .encrypt
= ablk_encrypt
,
465 .decrypt
= ablk_decrypt
,
471 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
473 struct cryptd_ablkcipher
*cryptd_tfm
;
475 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
477 if (IS_ERR(cryptd_tfm
))
478 return PTR_ERR(cryptd_tfm
);
479 ablk_init_common(tfm
, cryptd_tfm
);
483 static struct crypto_alg ablk_ctr_alg
= {
484 .cra_name
= "ctr(aes)",
485 .cra_driver_name
= "ctr-aes-aesni",
487 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
489 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
491 .cra_type
= &crypto_ablkcipher_type
,
492 .cra_module
= THIS_MODULE
,
493 .cra_list
= LIST_HEAD_INIT(ablk_ctr_alg
.cra_list
),
494 .cra_init
= ablk_ctr_init
,
495 .cra_exit
= ablk_exit
,
498 .min_keysize
= AES_MIN_KEY_SIZE
,
499 .max_keysize
= AES_MAX_KEY_SIZE
,
500 .ivsize
= AES_BLOCK_SIZE
,
501 .setkey
= ablk_set_key
,
502 .encrypt
= ablk_encrypt
,
503 .decrypt
= ablk_decrypt
,
511 static int ablk_lrw_init(struct crypto_tfm
*tfm
)
513 struct cryptd_ablkcipher
*cryptd_tfm
;
515 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
517 if (IS_ERR(cryptd_tfm
))
518 return PTR_ERR(cryptd_tfm
);
519 ablk_init_common(tfm
, cryptd_tfm
);
523 static struct crypto_alg ablk_lrw_alg
= {
524 .cra_name
= "lrw(aes)",
525 .cra_driver_name
= "lrw-aes-aesni",
527 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
528 .cra_blocksize
= AES_BLOCK_SIZE
,
529 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
531 .cra_type
= &crypto_ablkcipher_type
,
532 .cra_module
= THIS_MODULE
,
533 .cra_list
= LIST_HEAD_INIT(ablk_lrw_alg
.cra_list
),
534 .cra_init
= ablk_lrw_init
,
535 .cra_exit
= ablk_exit
,
538 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
539 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
540 .ivsize
= AES_BLOCK_SIZE
,
541 .setkey
= ablk_set_key
,
542 .encrypt
= ablk_encrypt
,
543 .decrypt
= ablk_decrypt
,
550 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
552 struct cryptd_ablkcipher
*cryptd_tfm
;
554 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
556 if (IS_ERR(cryptd_tfm
))
557 return PTR_ERR(cryptd_tfm
);
558 ablk_init_common(tfm
, cryptd_tfm
);
562 static struct crypto_alg ablk_pcbc_alg
= {
563 .cra_name
= "pcbc(aes)",
564 .cra_driver_name
= "pcbc-aes-aesni",
566 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
567 .cra_blocksize
= AES_BLOCK_SIZE
,
568 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
570 .cra_type
= &crypto_ablkcipher_type
,
571 .cra_module
= THIS_MODULE
,
572 .cra_list
= LIST_HEAD_INIT(ablk_pcbc_alg
.cra_list
),
573 .cra_init
= ablk_pcbc_init
,
574 .cra_exit
= ablk_exit
,
577 .min_keysize
= AES_MIN_KEY_SIZE
,
578 .max_keysize
= AES_MAX_KEY_SIZE
,
579 .ivsize
= AES_BLOCK_SIZE
,
580 .setkey
= ablk_set_key
,
581 .encrypt
= ablk_encrypt
,
582 .decrypt
= ablk_decrypt
,
589 static int ablk_xts_init(struct crypto_tfm
*tfm
)
591 struct cryptd_ablkcipher
*cryptd_tfm
;
593 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
595 if (IS_ERR(cryptd_tfm
))
596 return PTR_ERR(cryptd_tfm
);
597 ablk_init_common(tfm
, cryptd_tfm
);
601 static struct crypto_alg ablk_xts_alg
= {
602 .cra_name
= "xts(aes)",
603 .cra_driver_name
= "xts-aes-aesni",
605 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
606 .cra_blocksize
= AES_BLOCK_SIZE
,
607 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
609 .cra_type
= &crypto_ablkcipher_type
,
610 .cra_module
= THIS_MODULE
,
611 .cra_list
= LIST_HEAD_INIT(ablk_xts_alg
.cra_list
),
612 .cra_init
= ablk_xts_init
,
613 .cra_exit
= ablk_exit
,
616 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
617 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
618 .ivsize
= AES_BLOCK_SIZE
,
619 .setkey
= ablk_set_key
,
620 .encrypt
= ablk_encrypt
,
621 .decrypt
= ablk_decrypt
,
627 static int __init
aesni_init(void)
632 printk(KERN_INFO
"Intel AES-NI instructions are not detected.\n");
635 if ((err
= crypto_register_alg(&aesni_alg
)))
637 if ((err
= crypto_register_alg(&__aesni_alg
)))
639 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
641 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
643 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
645 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
648 if ((err
= crypto_register_alg(&ablk_ctr_alg
)))
652 if ((err
= crypto_register_alg(&ablk_lrw_alg
)))
656 if ((err
= crypto_register_alg(&ablk_pcbc_alg
)))
660 if ((err
= crypto_register_alg(&ablk_xts_alg
)))
670 crypto_unregister_alg(&ablk_pcbc_alg
);
674 crypto_unregister_alg(&ablk_lrw_alg
);
678 crypto_unregister_alg(&ablk_ctr_alg
);
681 crypto_unregister_alg(&ablk_cbc_alg
);
683 crypto_unregister_alg(&ablk_ecb_alg
);
685 crypto_unregister_alg(&blk_cbc_alg
);
687 crypto_unregister_alg(&blk_ecb_alg
);
689 crypto_unregister_alg(&__aesni_alg
);
691 crypto_unregister_alg(&aesni_alg
);
696 static void __exit
aesni_exit(void)
699 crypto_unregister_alg(&ablk_xts_alg
);
702 crypto_unregister_alg(&ablk_pcbc_alg
);
705 crypto_unregister_alg(&ablk_lrw_alg
);
708 crypto_unregister_alg(&ablk_ctr_alg
);
710 crypto_unregister_alg(&ablk_cbc_alg
);
711 crypto_unregister_alg(&ablk_ecb_alg
);
712 crypto_unregister_alg(&blk_cbc_alg
);
713 crypto_unregister_alg(&blk_ecb_alg
);
714 crypto_unregister_alg(&__aesni_alg
);
715 crypto_unregister_alg(&aesni_alg
);
718 module_init(aesni_init
);
719 module_exit(aesni_exit
);
721 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
722 MODULE_LICENSE("GPL");