2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
24 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
28 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
32 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
36 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
40 struct async_aes_ctx
{
41 struct cryptd_ablkcipher
*cryptd_tfm
;
44 #define AESNI_ALIGN 16
45 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
47 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
48 unsigned int key_len
);
49 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
51 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
53 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
54 const u8
*in
, unsigned int len
);
55 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
56 const u8
*in
, unsigned int len
);
57 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
58 const u8
*in
, unsigned int len
, u8
*iv
);
59 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
60 const u8
*in
, unsigned int len
, u8
*iv
);
62 static inline int kernel_fpu_using(void)
64 if (in_interrupt() && !(read_cr0() & X86_CR0_TS
))
69 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
71 unsigned long addr
= (unsigned long)raw_ctx
;
72 unsigned long align
= AESNI_ALIGN
;
74 if (align
<= crypto_tfm_ctx_alignment())
76 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
79 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
80 const u8
*in_key
, unsigned int key_len
)
82 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
83 u32
*flags
= &tfm
->crt_flags
;
86 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
87 key_len
!= AES_KEYSIZE_256
) {
88 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
92 if (kernel_fpu_using())
93 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
96 err
= aesni_set_key(ctx
, in_key
, key_len
);
103 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
104 unsigned int key_len
)
106 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
109 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
111 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
113 if (kernel_fpu_using())
114 crypto_aes_encrypt_x86(ctx
, dst
, src
);
117 aesni_enc(ctx
, dst
, src
);
122 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
124 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
126 if (kernel_fpu_using())
127 crypto_aes_decrypt_x86(ctx
, dst
, src
);
130 aesni_dec(ctx
, dst
, src
);
135 static struct crypto_alg aesni_alg
= {
137 .cra_driver_name
= "aes-aesni",
139 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
140 .cra_blocksize
= AES_BLOCK_SIZE
,
141 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
143 .cra_module
= THIS_MODULE
,
144 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
147 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
148 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
149 .cia_setkey
= aes_set_key
,
150 .cia_encrypt
= aes_encrypt
,
151 .cia_decrypt
= aes_decrypt
156 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
158 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
160 aesni_enc(ctx
, dst
, src
);
163 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
165 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
167 aesni_dec(ctx
, dst
, src
);
170 static struct crypto_alg __aesni_alg
= {
171 .cra_name
= "__aes-aesni",
172 .cra_driver_name
= "__driver-aes-aesni",
174 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
175 .cra_blocksize
= AES_BLOCK_SIZE
,
176 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
178 .cra_module
= THIS_MODULE
,
179 .cra_list
= LIST_HEAD_INIT(__aesni_alg
.cra_list
),
182 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
183 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
184 .cia_setkey
= aes_set_key
,
185 .cia_encrypt
= __aes_encrypt
,
186 .cia_decrypt
= __aes_decrypt
191 static int ecb_encrypt(struct blkcipher_desc
*desc
,
192 struct scatterlist
*dst
, struct scatterlist
*src
,
195 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
196 struct blkcipher_walk walk
;
199 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
200 err
= blkcipher_walk_virt(desc
, &walk
);
203 while ((nbytes
= walk
.nbytes
)) {
204 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
205 nbytes
& AES_BLOCK_MASK
);
206 nbytes
&= AES_BLOCK_SIZE
- 1;
207 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
214 static int ecb_decrypt(struct blkcipher_desc
*desc
,
215 struct scatterlist
*dst
, struct scatterlist
*src
,
218 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
219 struct blkcipher_walk walk
;
222 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
223 err
= blkcipher_walk_virt(desc
, &walk
);
226 while ((nbytes
= walk
.nbytes
)) {
227 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
228 nbytes
& AES_BLOCK_MASK
);
229 nbytes
&= AES_BLOCK_SIZE
- 1;
230 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
237 static struct crypto_alg blk_ecb_alg
= {
238 .cra_name
= "__ecb-aes-aesni",
239 .cra_driver_name
= "__driver-ecb-aes-aesni",
241 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
242 .cra_blocksize
= AES_BLOCK_SIZE
,
243 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
245 .cra_type
= &crypto_blkcipher_type
,
246 .cra_module
= THIS_MODULE
,
247 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
250 .min_keysize
= AES_MIN_KEY_SIZE
,
251 .max_keysize
= AES_MAX_KEY_SIZE
,
252 .setkey
= aes_set_key
,
253 .encrypt
= ecb_encrypt
,
254 .decrypt
= ecb_decrypt
,
259 static int cbc_encrypt(struct blkcipher_desc
*desc
,
260 struct scatterlist
*dst
, struct scatterlist
*src
,
263 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
264 struct blkcipher_walk walk
;
267 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
268 err
= blkcipher_walk_virt(desc
, &walk
);
271 while ((nbytes
= walk
.nbytes
)) {
272 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
273 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
274 nbytes
&= AES_BLOCK_SIZE
- 1;
275 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
282 static int cbc_decrypt(struct blkcipher_desc
*desc
,
283 struct scatterlist
*dst
, struct scatterlist
*src
,
286 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
287 struct blkcipher_walk walk
;
290 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
291 err
= blkcipher_walk_virt(desc
, &walk
);
294 while ((nbytes
= walk
.nbytes
)) {
295 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
296 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
297 nbytes
&= AES_BLOCK_SIZE
- 1;
298 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
305 static struct crypto_alg blk_cbc_alg
= {
306 .cra_name
= "__cbc-aes-aesni",
307 .cra_driver_name
= "__driver-cbc-aes-aesni",
309 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
310 .cra_blocksize
= AES_BLOCK_SIZE
,
311 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
313 .cra_type
= &crypto_blkcipher_type
,
314 .cra_module
= THIS_MODULE
,
315 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
318 .min_keysize
= AES_MIN_KEY_SIZE
,
319 .max_keysize
= AES_MAX_KEY_SIZE
,
320 .setkey
= aes_set_key
,
321 .encrypt
= cbc_encrypt
,
322 .decrypt
= cbc_decrypt
,
327 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
328 unsigned int key_len
)
330 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
331 struct crypto_ablkcipher
*child
= &ctx
->cryptd_tfm
->base
;
334 crypto_ablkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
335 crypto_ablkcipher_set_flags(child
, crypto_ablkcipher_get_flags(tfm
)
336 & CRYPTO_TFM_REQ_MASK
);
337 err
= crypto_ablkcipher_setkey(child
, key
, key_len
);
338 crypto_ablkcipher_set_flags(tfm
, crypto_ablkcipher_get_flags(child
)
339 & CRYPTO_TFM_RES_MASK
);
343 static int ablk_encrypt(struct ablkcipher_request
*req
)
345 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
346 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
348 if (kernel_fpu_using()) {
349 struct ablkcipher_request
*cryptd_req
=
350 ablkcipher_request_ctx(req
);
351 memcpy(cryptd_req
, req
, sizeof(*req
));
352 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
353 return crypto_ablkcipher_encrypt(cryptd_req
);
355 struct blkcipher_desc desc
;
356 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
357 desc
.info
= req
->info
;
359 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
360 &desc
, req
->dst
, req
->src
, req
->nbytes
);
364 static int ablk_decrypt(struct ablkcipher_request
*req
)
366 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
367 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
369 if (kernel_fpu_using()) {
370 struct ablkcipher_request
*cryptd_req
=
371 ablkcipher_request_ctx(req
);
372 memcpy(cryptd_req
, req
, sizeof(*req
));
373 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
374 return crypto_ablkcipher_decrypt(cryptd_req
);
376 struct blkcipher_desc desc
;
377 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
378 desc
.info
= req
->info
;
380 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
381 &desc
, req
->dst
, req
->src
, req
->nbytes
);
385 static void ablk_exit(struct crypto_tfm
*tfm
)
387 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
389 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
392 static void ablk_init_common(struct crypto_tfm
*tfm
,
393 struct cryptd_ablkcipher
*cryptd_tfm
)
395 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
397 ctx
->cryptd_tfm
= cryptd_tfm
;
398 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
399 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
402 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
404 struct cryptd_ablkcipher
*cryptd_tfm
;
406 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
407 if (IS_ERR(cryptd_tfm
))
408 return PTR_ERR(cryptd_tfm
);
409 ablk_init_common(tfm
, cryptd_tfm
);
413 static struct crypto_alg ablk_ecb_alg
= {
414 .cra_name
= "ecb(aes)",
415 .cra_driver_name
= "ecb-aes-aesni",
417 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
418 .cra_blocksize
= AES_BLOCK_SIZE
,
419 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
421 .cra_type
= &crypto_ablkcipher_type
,
422 .cra_module
= THIS_MODULE
,
423 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
424 .cra_init
= ablk_ecb_init
,
425 .cra_exit
= ablk_exit
,
428 .min_keysize
= AES_MIN_KEY_SIZE
,
429 .max_keysize
= AES_MAX_KEY_SIZE
,
430 .setkey
= ablk_set_key
,
431 .encrypt
= ablk_encrypt
,
432 .decrypt
= ablk_decrypt
,
437 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
439 struct cryptd_ablkcipher
*cryptd_tfm
;
441 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
442 if (IS_ERR(cryptd_tfm
))
443 return PTR_ERR(cryptd_tfm
);
444 ablk_init_common(tfm
, cryptd_tfm
);
448 static struct crypto_alg ablk_cbc_alg
= {
449 .cra_name
= "cbc(aes)",
450 .cra_driver_name
= "cbc-aes-aesni",
452 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
453 .cra_blocksize
= AES_BLOCK_SIZE
,
454 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
456 .cra_type
= &crypto_ablkcipher_type
,
457 .cra_module
= THIS_MODULE
,
458 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
459 .cra_init
= ablk_cbc_init
,
460 .cra_exit
= ablk_exit
,
463 .min_keysize
= AES_MIN_KEY_SIZE
,
464 .max_keysize
= AES_MAX_KEY_SIZE
,
465 .ivsize
= AES_BLOCK_SIZE
,
466 .setkey
= ablk_set_key
,
467 .encrypt
= ablk_encrypt
,
468 .decrypt
= ablk_decrypt
,
474 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
476 struct cryptd_ablkcipher
*cryptd_tfm
;
478 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
480 if (IS_ERR(cryptd_tfm
))
481 return PTR_ERR(cryptd_tfm
);
482 ablk_init_common(tfm
, cryptd_tfm
);
486 static struct crypto_alg ablk_ctr_alg
= {
487 .cra_name
= "ctr(aes)",
488 .cra_driver_name
= "ctr-aes-aesni",
490 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
492 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
494 .cra_type
= &crypto_ablkcipher_type
,
495 .cra_module
= THIS_MODULE
,
496 .cra_list
= LIST_HEAD_INIT(ablk_ctr_alg
.cra_list
),
497 .cra_init
= ablk_ctr_init
,
498 .cra_exit
= ablk_exit
,
501 .min_keysize
= AES_MIN_KEY_SIZE
,
502 .max_keysize
= AES_MAX_KEY_SIZE
,
503 .ivsize
= AES_BLOCK_SIZE
,
504 .setkey
= ablk_set_key
,
505 .encrypt
= ablk_encrypt
,
506 .decrypt
= ablk_decrypt
,
514 static int ablk_lrw_init(struct crypto_tfm
*tfm
)
516 struct cryptd_ablkcipher
*cryptd_tfm
;
518 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
520 if (IS_ERR(cryptd_tfm
))
521 return PTR_ERR(cryptd_tfm
);
522 ablk_init_common(tfm
, cryptd_tfm
);
526 static struct crypto_alg ablk_lrw_alg
= {
527 .cra_name
= "lrw(aes)",
528 .cra_driver_name
= "lrw-aes-aesni",
530 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
531 .cra_blocksize
= AES_BLOCK_SIZE
,
532 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
534 .cra_type
= &crypto_ablkcipher_type
,
535 .cra_module
= THIS_MODULE
,
536 .cra_list
= LIST_HEAD_INIT(ablk_lrw_alg
.cra_list
),
537 .cra_init
= ablk_lrw_init
,
538 .cra_exit
= ablk_exit
,
541 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
542 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
543 .ivsize
= AES_BLOCK_SIZE
,
544 .setkey
= ablk_set_key
,
545 .encrypt
= ablk_encrypt
,
546 .decrypt
= ablk_decrypt
,
553 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
555 struct cryptd_ablkcipher
*cryptd_tfm
;
557 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
559 if (IS_ERR(cryptd_tfm
))
560 return PTR_ERR(cryptd_tfm
);
561 ablk_init_common(tfm
, cryptd_tfm
);
565 static struct crypto_alg ablk_pcbc_alg
= {
566 .cra_name
= "pcbc(aes)",
567 .cra_driver_name
= "pcbc-aes-aesni",
569 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
570 .cra_blocksize
= AES_BLOCK_SIZE
,
571 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
573 .cra_type
= &crypto_ablkcipher_type
,
574 .cra_module
= THIS_MODULE
,
575 .cra_list
= LIST_HEAD_INIT(ablk_pcbc_alg
.cra_list
),
576 .cra_init
= ablk_pcbc_init
,
577 .cra_exit
= ablk_exit
,
580 .min_keysize
= AES_MIN_KEY_SIZE
,
581 .max_keysize
= AES_MAX_KEY_SIZE
,
582 .ivsize
= AES_BLOCK_SIZE
,
583 .setkey
= ablk_set_key
,
584 .encrypt
= ablk_encrypt
,
585 .decrypt
= ablk_decrypt
,
592 static int ablk_xts_init(struct crypto_tfm
*tfm
)
594 struct cryptd_ablkcipher
*cryptd_tfm
;
596 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
598 if (IS_ERR(cryptd_tfm
))
599 return PTR_ERR(cryptd_tfm
);
600 ablk_init_common(tfm
, cryptd_tfm
);
604 static struct crypto_alg ablk_xts_alg
= {
605 .cra_name
= "xts(aes)",
606 .cra_driver_name
= "xts-aes-aesni",
608 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
609 .cra_blocksize
= AES_BLOCK_SIZE
,
610 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
612 .cra_type
= &crypto_ablkcipher_type
,
613 .cra_module
= THIS_MODULE
,
614 .cra_list
= LIST_HEAD_INIT(ablk_xts_alg
.cra_list
),
615 .cra_init
= ablk_xts_init
,
616 .cra_exit
= ablk_exit
,
619 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
620 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
621 .ivsize
= AES_BLOCK_SIZE
,
622 .setkey
= ablk_set_key
,
623 .encrypt
= ablk_encrypt
,
624 .decrypt
= ablk_decrypt
,
630 static int __init
aesni_init(void)
635 printk(KERN_ERR
"Intel AES-NI instructions are not detected.\n");
638 if ((err
= crypto_register_alg(&aesni_alg
)))
640 if ((err
= crypto_register_alg(&__aesni_alg
)))
642 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
644 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
646 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
648 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
651 if ((err
= crypto_register_alg(&ablk_ctr_alg
)))
655 if ((err
= crypto_register_alg(&ablk_lrw_alg
)))
659 if ((err
= crypto_register_alg(&ablk_pcbc_alg
)))
663 if ((err
= crypto_register_alg(&ablk_xts_alg
)))
673 crypto_unregister_alg(&ablk_pcbc_alg
);
677 crypto_unregister_alg(&ablk_lrw_alg
);
681 crypto_unregister_alg(&ablk_ctr_alg
);
684 crypto_unregister_alg(&ablk_cbc_alg
);
686 crypto_unregister_alg(&ablk_ecb_alg
);
688 crypto_unregister_alg(&blk_cbc_alg
);
690 crypto_unregister_alg(&blk_ecb_alg
);
692 crypto_unregister_alg(&__aesni_alg
);
694 crypto_unregister_alg(&aesni_alg
);
699 static void __exit
aesni_exit(void)
702 crypto_unregister_alg(&ablk_xts_alg
);
705 crypto_unregister_alg(&ablk_pcbc_alg
);
708 crypto_unregister_alg(&ablk_lrw_alg
);
711 crypto_unregister_alg(&ablk_ctr_alg
);
713 crypto_unregister_alg(&ablk_cbc_alg
);
714 crypto_unregister_alg(&ablk_ecb_alg
);
715 crypto_unregister_alg(&blk_cbc_alg
);
716 crypto_unregister_alg(&blk_ecb_alg
);
717 crypto_unregister_alg(&__aesni_alg
);
718 crypto_unregister_alg(&aesni_alg
);
721 module_init(aesni_init
);
722 module_exit(aesni_exit
);
724 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
725 MODULE_LICENSE("GPL");