2 * Glue Code for AVX assembler versions of Serpent Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/algapi.h>
32 #include <crypto/serpent.h>
33 #include <crypto/cryptd.h>
34 #include <crypto/b128ops.h>
35 #include <crypto/ctr.h>
36 #include <crypto/lrw.h>
37 #include <crypto/xts.h>
39 #include <asm/xsave.h>
40 #include <asm/crypto/serpent-avx.h>
41 #include <asm/crypto/ablk_helper.h>
42 #include <asm/crypto/glue_helper.h>
44 /* 8-way parallel cipher functions */
45 asmlinkage
void serpent_ecb_enc_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
47 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx
);
49 asmlinkage
void serpent_ecb_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
51 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx
);
53 asmlinkage
void serpent_cbc_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
55 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx
);
57 asmlinkage
void serpent_ctr_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
58 const u8
*src
, le128
*iv
);
59 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx
);
61 asmlinkage
void serpent_xts_enc_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
62 const u8
*src
, le128
*iv
);
63 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx
);
65 asmlinkage
void serpent_xts_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
66 const u8
*src
, le128
*iv
);
67 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx
);
69 void __serpent_crypt_ctr(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
73 le128_to_be128(&ctrblk
, iv
);
76 __serpent_encrypt(ctx
, (u8
*)&ctrblk
, (u8
*)&ctrblk
);
77 u128_xor(dst
, src
, (u128
*)&ctrblk
);
79 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr
);
81 void serpent_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
83 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
84 GLUE_FUNC_CAST(__serpent_encrypt
));
86 EXPORT_SYMBOL_GPL(serpent_xts_enc
);
88 void serpent_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
90 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
91 GLUE_FUNC_CAST(__serpent_decrypt
));
93 EXPORT_SYMBOL_GPL(serpent_xts_dec
);
96 static const struct common_glue_ctx serpent_enc
= {
98 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
101 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
102 .fn_u
= { .ecb
= GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx
) }
105 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__serpent_encrypt
) }
109 static const struct common_glue_ctx serpent_ctr
= {
111 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
114 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
115 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx
) }
118 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr
) }
122 static const struct common_glue_ctx serpent_enc_xts
= {
124 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
127 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
128 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx
) }
131 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_enc
) }
135 static const struct common_glue_ctx serpent_dec
= {
137 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
140 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
141 .fn_u
= { .ecb
= GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx
) }
144 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__serpent_decrypt
) }
148 static const struct common_glue_ctx serpent_dec_cbc
= {
150 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
153 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
154 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx
) }
157 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(__serpent_decrypt
) }
161 static const struct common_glue_ctx serpent_dec_xts
= {
163 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
166 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
167 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx
) }
170 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_dec
) }
174 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
175 struct scatterlist
*src
, unsigned int nbytes
)
177 return glue_ecb_crypt_128bit(&serpent_enc
, desc
, dst
, src
, nbytes
);
180 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
181 struct scatterlist
*src
, unsigned int nbytes
)
183 return glue_ecb_crypt_128bit(&serpent_dec
, desc
, dst
, src
, nbytes
);
186 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
187 struct scatterlist
*src
, unsigned int nbytes
)
189 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt
), desc
,
193 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
194 struct scatterlist
*src
, unsigned int nbytes
)
196 return glue_cbc_decrypt_128bit(&serpent_dec_cbc
, desc
, dst
, src
,
200 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
201 struct scatterlist
*src
, unsigned int nbytes
)
203 return glue_ctr_crypt_128bit(&serpent_ctr
, desc
, dst
, src
, nbytes
);
206 static inline bool serpent_fpu_begin(bool fpu_enabled
, unsigned int nbytes
)
208 return glue_fpu_begin(SERPENT_BLOCK_SIZE
, SERPENT_PARALLEL_BLOCKS
,
209 NULL
, fpu_enabled
, nbytes
);
212 static inline void serpent_fpu_end(bool fpu_enabled
)
214 glue_fpu_end(fpu_enabled
);
218 struct serpent_ctx
*ctx
;
222 static void encrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
224 const unsigned int bsize
= SERPENT_BLOCK_SIZE
;
225 struct crypt_priv
*ctx
= priv
;
228 ctx
->fpu_enabled
= serpent_fpu_begin(ctx
->fpu_enabled
, nbytes
);
230 if (nbytes
== bsize
* SERPENT_PARALLEL_BLOCKS
) {
231 serpent_ecb_enc_8way_avx(ctx
->ctx
, srcdst
, srcdst
);
235 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
236 __serpent_encrypt(ctx
->ctx
, srcdst
, srcdst
);
239 static void decrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
241 const unsigned int bsize
= SERPENT_BLOCK_SIZE
;
242 struct crypt_priv
*ctx
= priv
;
245 ctx
->fpu_enabled
= serpent_fpu_begin(ctx
->fpu_enabled
, nbytes
);
247 if (nbytes
== bsize
* SERPENT_PARALLEL_BLOCKS
) {
248 serpent_ecb_dec_8way_avx(ctx
->ctx
, srcdst
, srcdst
);
252 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
253 __serpent_decrypt(ctx
->ctx
, srcdst
, srcdst
);
256 int lrw_serpent_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
259 struct serpent_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
262 err
= __serpent_setkey(&ctx
->serpent_ctx
, key
, keylen
-
267 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
-
270 EXPORT_SYMBOL_GPL(lrw_serpent_setkey
);
272 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
273 struct scatterlist
*src
, unsigned int nbytes
)
275 struct serpent_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
276 be128 buf
[SERPENT_PARALLEL_BLOCKS
];
277 struct crypt_priv crypt_ctx
= {
278 .ctx
= &ctx
->serpent_ctx
,
279 .fpu_enabled
= false,
281 struct lrw_crypt_req req
= {
283 .tbuflen
= sizeof(buf
),
285 .table_ctx
= &ctx
->lrw_table
,
286 .crypt_ctx
= &crypt_ctx
,
287 .crypt_fn
= encrypt_callback
,
291 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
292 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
293 serpent_fpu_end(crypt_ctx
.fpu_enabled
);
298 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
299 struct scatterlist
*src
, unsigned int nbytes
)
301 struct serpent_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
302 be128 buf
[SERPENT_PARALLEL_BLOCKS
];
303 struct crypt_priv crypt_ctx
= {
304 .ctx
= &ctx
->serpent_ctx
,
305 .fpu_enabled
= false,
307 struct lrw_crypt_req req
= {
309 .tbuflen
= sizeof(buf
),
311 .table_ctx
= &ctx
->lrw_table
,
312 .crypt_ctx
= &crypt_ctx
,
313 .crypt_fn
= decrypt_callback
,
317 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
318 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
319 serpent_fpu_end(crypt_ctx
.fpu_enabled
);
324 void lrw_serpent_exit_tfm(struct crypto_tfm
*tfm
)
326 struct serpent_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
328 lrw_free_table(&ctx
->lrw_table
);
330 EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm
);
332 int xts_serpent_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
335 struct serpent_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
336 u32
*flags
= &tfm
->crt_flags
;
339 /* key consists of keys of equal size concatenated, therefore
340 * the length must be even
343 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
347 /* first half of xts-key is for crypt */
348 err
= __serpent_setkey(&ctx
->crypt_ctx
, key
, keylen
/ 2);
352 /* second half of xts-key is for tweak */
353 return __serpent_setkey(&ctx
->tweak_ctx
, key
+ keylen
/ 2, keylen
/ 2);
355 EXPORT_SYMBOL_GPL(xts_serpent_setkey
);
357 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
358 struct scatterlist
*src
, unsigned int nbytes
)
360 struct serpent_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
362 return glue_xts_crypt_128bit(&serpent_enc_xts
, desc
, dst
, src
, nbytes
,
363 XTS_TWEAK_CAST(__serpent_encrypt
),
364 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
367 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
368 struct scatterlist
*src
, unsigned int nbytes
)
370 struct serpent_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
372 return glue_xts_crypt_128bit(&serpent_dec_xts
, desc
, dst
, src
, nbytes
,
373 XTS_TWEAK_CAST(__serpent_encrypt
),
374 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
377 static struct crypto_alg serpent_algs
[10] = { {
378 .cra_name
= "__ecb-serpent-avx",
379 .cra_driver_name
= "__driver-ecb-serpent-avx",
381 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
382 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
383 .cra_ctxsize
= sizeof(struct serpent_ctx
),
385 .cra_type
= &crypto_blkcipher_type
,
386 .cra_module
= THIS_MODULE
,
389 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
390 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
391 .setkey
= serpent_setkey
,
392 .encrypt
= ecb_encrypt
,
393 .decrypt
= ecb_decrypt
,
397 .cra_name
= "__cbc-serpent-avx",
398 .cra_driver_name
= "__driver-cbc-serpent-avx",
400 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
401 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
402 .cra_ctxsize
= sizeof(struct serpent_ctx
),
404 .cra_type
= &crypto_blkcipher_type
,
405 .cra_module
= THIS_MODULE
,
408 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
409 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
410 .setkey
= serpent_setkey
,
411 .encrypt
= cbc_encrypt
,
412 .decrypt
= cbc_decrypt
,
416 .cra_name
= "__ctr-serpent-avx",
417 .cra_driver_name
= "__driver-ctr-serpent-avx",
419 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
421 .cra_ctxsize
= sizeof(struct serpent_ctx
),
423 .cra_type
= &crypto_blkcipher_type
,
424 .cra_module
= THIS_MODULE
,
427 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
428 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
429 .ivsize
= SERPENT_BLOCK_SIZE
,
430 .setkey
= serpent_setkey
,
431 .encrypt
= ctr_crypt
,
432 .decrypt
= ctr_crypt
,
436 .cra_name
= "__lrw-serpent-avx",
437 .cra_driver_name
= "__driver-lrw-serpent-avx",
439 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
440 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
441 .cra_ctxsize
= sizeof(struct serpent_lrw_ctx
),
443 .cra_type
= &crypto_blkcipher_type
,
444 .cra_module
= THIS_MODULE
,
445 .cra_exit
= lrw_serpent_exit_tfm
,
448 .min_keysize
= SERPENT_MIN_KEY_SIZE
+
450 .max_keysize
= SERPENT_MAX_KEY_SIZE
+
452 .ivsize
= SERPENT_BLOCK_SIZE
,
453 .setkey
= lrw_serpent_setkey
,
454 .encrypt
= lrw_encrypt
,
455 .decrypt
= lrw_decrypt
,
459 .cra_name
= "__xts-serpent-avx",
460 .cra_driver_name
= "__driver-xts-serpent-avx",
462 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
463 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
464 .cra_ctxsize
= sizeof(struct serpent_xts_ctx
),
466 .cra_type
= &crypto_blkcipher_type
,
467 .cra_module
= THIS_MODULE
,
470 .min_keysize
= SERPENT_MIN_KEY_SIZE
* 2,
471 .max_keysize
= SERPENT_MAX_KEY_SIZE
* 2,
472 .ivsize
= SERPENT_BLOCK_SIZE
,
473 .setkey
= xts_serpent_setkey
,
474 .encrypt
= xts_encrypt
,
475 .decrypt
= xts_decrypt
,
479 .cra_name
= "ecb(serpent)",
480 .cra_driver_name
= "ecb-serpent-avx",
482 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
483 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
484 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
486 .cra_type
= &crypto_ablkcipher_type
,
487 .cra_module
= THIS_MODULE
,
488 .cra_init
= ablk_init
,
489 .cra_exit
= ablk_exit
,
492 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
493 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
494 .setkey
= ablk_set_key
,
495 .encrypt
= ablk_encrypt
,
496 .decrypt
= ablk_decrypt
,
500 .cra_name
= "cbc(serpent)",
501 .cra_driver_name
= "cbc-serpent-avx",
503 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
504 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
505 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
507 .cra_type
= &crypto_ablkcipher_type
,
508 .cra_module
= THIS_MODULE
,
509 .cra_init
= ablk_init
,
510 .cra_exit
= ablk_exit
,
513 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
514 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
515 .ivsize
= SERPENT_BLOCK_SIZE
,
516 .setkey
= ablk_set_key
,
517 .encrypt
= __ablk_encrypt
,
518 .decrypt
= ablk_decrypt
,
522 .cra_name
= "ctr(serpent)",
523 .cra_driver_name
= "ctr-serpent-avx",
525 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
527 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
529 .cra_type
= &crypto_ablkcipher_type
,
530 .cra_module
= THIS_MODULE
,
531 .cra_init
= ablk_init
,
532 .cra_exit
= ablk_exit
,
535 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
536 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
537 .ivsize
= SERPENT_BLOCK_SIZE
,
538 .setkey
= ablk_set_key
,
539 .encrypt
= ablk_encrypt
,
540 .decrypt
= ablk_encrypt
,
545 .cra_name
= "lrw(serpent)",
546 .cra_driver_name
= "lrw-serpent-avx",
548 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
549 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
550 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
552 .cra_type
= &crypto_ablkcipher_type
,
553 .cra_module
= THIS_MODULE
,
554 .cra_init
= ablk_init
,
555 .cra_exit
= ablk_exit
,
558 .min_keysize
= SERPENT_MIN_KEY_SIZE
+
560 .max_keysize
= SERPENT_MAX_KEY_SIZE
+
562 .ivsize
= SERPENT_BLOCK_SIZE
,
563 .setkey
= ablk_set_key
,
564 .encrypt
= ablk_encrypt
,
565 .decrypt
= ablk_decrypt
,
569 .cra_name
= "xts(serpent)",
570 .cra_driver_name
= "xts-serpent-avx",
572 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
573 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
574 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
576 .cra_type
= &crypto_ablkcipher_type
,
577 .cra_module
= THIS_MODULE
,
578 .cra_init
= ablk_init
,
579 .cra_exit
= ablk_exit
,
582 .min_keysize
= SERPENT_MIN_KEY_SIZE
* 2,
583 .max_keysize
= SERPENT_MAX_KEY_SIZE
* 2,
584 .ivsize
= SERPENT_BLOCK_SIZE
,
585 .setkey
= ablk_set_key
,
586 .encrypt
= ablk_encrypt
,
587 .decrypt
= ablk_decrypt
,
592 static int __init
serpent_init(void)
596 if (!cpu_has_avx
|| !cpu_has_osxsave
) {
597 printk(KERN_INFO
"AVX instructions are not detected.\n");
601 xcr0
= xgetbv(XCR_XFEATURE_ENABLED_MASK
);
602 if ((xcr0
& (XSTATE_SSE
| XSTATE_YMM
)) != (XSTATE_SSE
| XSTATE_YMM
)) {
603 printk(KERN_INFO
"AVX detected but unusable.\n");
607 return crypto_register_algs(serpent_algs
, ARRAY_SIZE(serpent_algs
));
610 static void __exit
serpent_exit(void)
612 crypto_unregister_algs(serpent_algs
, ARRAY_SIZE(serpent_algs
));
615 module_init(serpent_init
);
616 module_exit(serpent_exit
);
618 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
619 MODULE_LICENSE("GPL");
620 MODULE_ALIAS("serpent");