2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
24 struct async_aes_ctx
{
25 struct cryptd_ablkcipher
*cryptd_tfm
;
28 #define AESNI_ALIGN 16
29 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
31 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
32 unsigned int key_len
);
33 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
35 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
37 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
38 const u8
*in
, unsigned int len
);
39 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
40 const u8
*in
, unsigned int len
);
41 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
42 const u8
*in
, unsigned int len
, u8
*iv
);
43 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
44 const u8
*in
, unsigned int len
, u8
*iv
);
46 static inline int kernel_fpu_using(void)
48 if (in_interrupt() && !(read_cr0() & X86_CR0_TS
))
53 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
55 unsigned long addr
= (unsigned long)raw_ctx
;
56 unsigned long align
= AESNI_ALIGN
;
58 if (align
<= crypto_tfm_ctx_alignment())
60 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
63 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
64 const u8
*in_key
, unsigned int key_len
)
66 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
67 u32
*flags
= &tfm
->crt_flags
;
70 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
71 key_len
!= AES_KEYSIZE_256
) {
72 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
76 if (kernel_fpu_using())
77 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
80 err
= aesni_set_key(ctx
, in_key
, key_len
);
87 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
90 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
93 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
95 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
97 if (kernel_fpu_using())
98 crypto_aes_encrypt_x86(ctx
, dst
, src
);
101 aesni_enc(ctx
, dst
, src
);
106 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
108 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
110 if (kernel_fpu_using())
111 crypto_aes_decrypt_x86(ctx
, dst
, src
);
114 aesni_dec(ctx
, dst
, src
);
119 static struct crypto_alg aesni_alg
= {
121 .cra_driver_name
= "aes-aesni",
123 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
124 .cra_blocksize
= AES_BLOCK_SIZE
,
125 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
127 .cra_module
= THIS_MODULE
,
128 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
131 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
132 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
133 .cia_setkey
= aes_set_key
,
134 .cia_encrypt
= aes_encrypt
,
135 .cia_decrypt
= aes_decrypt
140 static int ecb_encrypt(struct blkcipher_desc
*desc
,
141 struct scatterlist
*dst
, struct scatterlist
*src
,
144 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
145 struct blkcipher_walk walk
;
148 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
149 err
= blkcipher_walk_virt(desc
, &walk
);
152 while ((nbytes
= walk
.nbytes
)) {
153 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
154 nbytes
& AES_BLOCK_MASK
);
155 nbytes
&= AES_BLOCK_SIZE
- 1;
156 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
163 static int ecb_decrypt(struct blkcipher_desc
*desc
,
164 struct scatterlist
*dst
, struct scatterlist
*src
,
167 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
168 struct blkcipher_walk walk
;
171 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
172 err
= blkcipher_walk_virt(desc
, &walk
);
175 while ((nbytes
= walk
.nbytes
)) {
176 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
177 nbytes
& AES_BLOCK_MASK
);
178 nbytes
&= AES_BLOCK_SIZE
- 1;
179 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
186 static struct crypto_alg blk_ecb_alg
= {
187 .cra_name
= "__ecb-aes-aesni",
188 .cra_driver_name
= "__driver-ecb-aes-aesni",
190 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
191 .cra_blocksize
= AES_BLOCK_SIZE
,
192 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
194 .cra_type
= &crypto_blkcipher_type
,
195 .cra_module
= THIS_MODULE
,
196 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
199 .min_keysize
= AES_MIN_KEY_SIZE
,
200 .max_keysize
= AES_MAX_KEY_SIZE
,
201 .setkey
= aes_set_key
,
202 .encrypt
= ecb_encrypt
,
203 .decrypt
= ecb_decrypt
,
208 static int cbc_encrypt(struct blkcipher_desc
*desc
,
209 struct scatterlist
*dst
, struct scatterlist
*src
,
212 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
213 struct blkcipher_walk walk
;
216 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
217 err
= blkcipher_walk_virt(desc
, &walk
);
220 while ((nbytes
= walk
.nbytes
)) {
221 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
222 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
223 nbytes
&= AES_BLOCK_SIZE
- 1;
224 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
231 static int cbc_decrypt(struct blkcipher_desc
*desc
,
232 struct scatterlist
*dst
, struct scatterlist
*src
,
235 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
236 struct blkcipher_walk walk
;
239 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
240 err
= blkcipher_walk_virt(desc
, &walk
);
243 while ((nbytes
= walk
.nbytes
)) {
244 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
245 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
246 nbytes
&= AES_BLOCK_SIZE
- 1;
247 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
254 static struct crypto_alg blk_cbc_alg
= {
255 .cra_name
= "__cbc-aes-aesni",
256 .cra_driver_name
= "__driver-cbc-aes-aesni",
258 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
259 .cra_blocksize
= AES_BLOCK_SIZE
,
260 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
262 .cra_type
= &crypto_blkcipher_type
,
263 .cra_module
= THIS_MODULE
,
264 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
267 .min_keysize
= AES_MIN_KEY_SIZE
,
268 .max_keysize
= AES_MAX_KEY_SIZE
,
269 .setkey
= aes_set_key
,
270 .encrypt
= cbc_encrypt
,
271 .decrypt
= cbc_decrypt
,
276 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
277 unsigned int key_len
)
279 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
281 return crypto_ablkcipher_setkey(&ctx
->cryptd_tfm
->base
, key
, key_len
);
284 static int ablk_encrypt(struct ablkcipher_request
*req
)
286 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
287 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
289 if (kernel_fpu_using()) {
290 struct ablkcipher_request
*cryptd_req
=
291 ablkcipher_request_ctx(req
);
292 memcpy(cryptd_req
, req
, sizeof(*req
));
293 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
294 return crypto_ablkcipher_encrypt(cryptd_req
);
296 struct blkcipher_desc desc
;
297 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
298 desc
.info
= req
->info
;
300 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
301 &desc
, req
->dst
, req
->src
, req
->nbytes
);
305 static int ablk_decrypt(struct ablkcipher_request
*req
)
307 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
308 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
310 if (kernel_fpu_using()) {
311 struct ablkcipher_request
*cryptd_req
=
312 ablkcipher_request_ctx(req
);
313 memcpy(cryptd_req
, req
, sizeof(*req
));
314 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
315 return crypto_ablkcipher_decrypt(cryptd_req
);
317 struct blkcipher_desc desc
;
318 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
319 desc
.info
= req
->info
;
321 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
322 &desc
, req
->dst
, req
->src
, req
->nbytes
);
326 static void ablk_exit(struct crypto_tfm
*tfm
)
328 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
330 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
333 static void ablk_init_common(struct crypto_tfm
*tfm
,
334 struct cryptd_ablkcipher
*cryptd_tfm
)
336 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
338 ctx
->cryptd_tfm
= cryptd_tfm
;
339 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
340 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
343 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
345 struct cryptd_ablkcipher
*cryptd_tfm
;
347 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
348 if (IS_ERR(cryptd_tfm
))
349 return PTR_ERR(cryptd_tfm
);
350 ablk_init_common(tfm
, cryptd_tfm
);
354 static struct crypto_alg ablk_ecb_alg
= {
355 .cra_name
= "ecb(aes)",
356 .cra_driver_name
= "ecb-aes-aesni",
358 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
359 .cra_blocksize
= AES_BLOCK_SIZE
,
360 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
362 .cra_type
= &crypto_ablkcipher_type
,
363 .cra_module
= THIS_MODULE
,
364 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
365 .cra_init
= ablk_ecb_init
,
366 .cra_exit
= ablk_exit
,
369 .min_keysize
= AES_MIN_KEY_SIZE
,
370 .max_keysize
= AES_MAX_KEY_SIZE
,
371 .setkey
= ablk_set_key
,
372 .encrypt
= ablk_encrypt
,
373 .decrypt
= ablk_decrypt
,
378 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
380 struct cryptd_ablkcipher
*cryptd_tfm
;
382 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
383 if (IS_ERR(cryptd_tfm
))
384 return PTR_ERR(cryptd_tfm
);
385 ablk_init_common(tfm
, cryptd_tfm
);
389 static struct crypto_alg ablk_cbc_alg
= {
390 .cra_name
= "cbc(aes)",
391 .cra_driver_name
= "cbc-aes-aesni",
393 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
394 .cra_blocksize
= AES_BLOCK_SIZE
,
395 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
397 .cra_type
= &crypto_ablkcipher_type
,
398 .cra_module
= THIS_MODULE
,
399 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
400 .cra_init
= ablk_cbc_init
,
401 .cra_exit
= ablk_exit
,
404 .min_keysize
= AES_MIN_KEY_SIZE
,
405 .max_keysize
= AES_MAX_KEY_SIZE
,
406 .ivsize
= AES_BLOCK_SIZE
,
407 .setkey
= ablk_set_key
,
408 .encrypt
= ablk_encrypt
,
409 .decrypt
= ablk_decrypt
,
414 static int __init
aesni_init(void)
419 printk(KERN_ERR
"Intel AES-NI instructions are not detected.\n");
422 if ((err
= crypto_register_alg(&aesni_alg
)))
424 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
426 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
428 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
430 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
436 crypto_unregister_alg(&ablk_ecb_alg
);
438 crypto_unregister_alg(&blk_cbc_alg
);
440 crypto_unregister_alg(&blk_ecb_alg
);
442 crypto_unregister_alg(&aesni_alg
);
447 static void __exit
aesni_exit(void)
449 crypto_unregister_alg(&ablk_cbc_alg
);
450 crypto_unregister_alg(&ablk_ecb_alg
);
451 crypto_unregister_alg(&blk_cbc_alg
);
452 crypto_unregister_alg(&blk_ecb_alg
);
453 crypto_unregister_alg(&aesni_alg
);
456 module_init(aesni_init
);
457 module_exit(aesni_exit
);
459 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
460 MODULE_LICENSE("GPL");