UM: Adjust printk in create_proc_mconsole()
[linux-2.6.git] / arch / x86 / crypto / twofish_avx_glue.c
blob94ac91d26e47e3b4ef415df06115d1c490b9f6a9
1 /*
2 * Glue Code for AVX assembler version of Twofish Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 * USA
24 #include <linux/module.h>
25 #include <linux/hardirq.h>
26 #include <linux/types.h>
27 #include <linux/crypto.h>
28 #include <linux/err.h>
29 #include <crypto/algapi.h>
30 #include <crypto/twofish.h>
31 #include <crypto/cryptd.h>
32 #include <crypto/b128ops.h>
33 #include <crypto/ctr.h>
34 #include <crypto/lrw.h>
35 #include <crypto/xts.h>
36 #include <asm/i387.h>
37 #include <asm/xcr.h>
38 #include <asm/xsave.h>
39 #include <asm/crypto/twofish.h>
40 #include <asm/crypto/ablk_helper.h>
41 #include <asm/crypto/glue_helper.h>
42 #include <crypto/scatterwalk.h>
43 #include <linux/workqueue.h>
44 #include <linux/spinlock.h>
46 #define TWOFISH_PARALLEL_BLOCKS 8
48 /* 8-way parallel cipher functions */
49 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
50 const u8 *src);
51 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
52 const u8 *src);
54 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
55 const u8 *src);
56 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
57 const u8 *src, le128 *iv);
59 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
60 const u8 *src)
62 __twofish_enc_blk_3way(ctx, dst, src, false);
66 static const struct common_glue_ctx twofish_enc = {
67 .num_funcs = 3,
68 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
70 .funcs = { {
71 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
72 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
73 }, {
74 .num_blocks = 3,
75 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
76 }, {
77 .num_blocks = 1,
78 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
79 } }
82 static const struct common_glue_ctx twofish_ctr = {
83 .num_funcs = 3,
84 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
86 .funcs = { {
87 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
88 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_ctr_8way) }
89 }, {
90 .num_blocks = 3,
91 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr_3way) }
92 }, {
93 .num_blocks = 1,
94 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr) }
95 } }
98 static const struct common_glue_ctx twofish_dec = {
99 .num_funcs = 3,
100 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
102 .funcs = { {
103 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
104 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
105 }, {
106 .num_blocks = 3,
107 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
108 }, {
109 .num_blocks = 1,
110 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
114 static const struct common_glue_ctx twofish_dec_cbc = {
115 .num_funcs = 3,
116 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
118 .funcs = { {
119 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
120 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_cbc_dec_8way) }
121 }, {
122 .num_blocks = 3,
123 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way) }
124 }, {
125 .num_blocks = 1,
126 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk) }
130 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
131 struct scatterlist *src, unsigned int nbytes)
133 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes);
136 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
137 struct scatterlist *src, unsigned int nbytes)
139 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes);
142 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
143 struct scatterlist *src, unsigned int nbytes)
145 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk), desc,
146 dst, src, nbytes);
149 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
150 struct scatterlist *src, unsigned int nbytes)
152 return glue_cbc_decrypt_128bit(&twofish_dec_cbc, desc, dst, src,
153 nbytes);
156 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
157 struct scatterlist *src, unsigned int nbytes)
159 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes);
162 static inline bool twofish_fpu_begin(bool fpu_enabled, unsigned int nbytes)
164 return glue_fpu_begin(TF_BLOCK_SIZE, TWOFISH_PARALLEL_BLOCKS, NULL,
165 fpu_enabled, nbytes);
168 static inline void twofish_fpu_end(bool fpu_enabled)
170 glue_fpu_end(fpu_enabled);
173 struct crypt_priv {
174 struct twofish_ctx *ctx;
175 bool fpu_enabled;
178 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
180 const unsigned int bsize = TF_BLOCK_SIZE;
181 struct crypt_priv *ctx = priv;
182 int i;
184 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
186 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
187 twofish_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
188 return;
191 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
192 twofish_enc_blk_3way(ctx->ctx, srcdst, srcdst);
194 nbytes %= bsize * 3;
196 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
197 twofish_enc_blk(ctx->ctx, srcdst, srcdst);
200 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
202 const unsigned int bsize = TF_BLOCK_SIZE;
203 struct crypt_priv *ctx = priv;
204 int i;
206 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
208 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
209 twofish_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
210 return;
213 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
214 twofish_dec_blk_3way(ctx->ctx, srcdst, srcdst);
216 nbytes %= bsize * 3;
218 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
219 twofish_dec_blk(ctx->ctx, srcdst, srcdst);
222 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
223 struct scatterlist *src, unsigned int nbytes)
225 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
226 be128 buf[TWOFISH_PARALLEL_BLOCKS];
227 struct crypt_priv crypt_ctx = {
228 .ctx = &ctx->twofish_ctx,
229 .fpu_enabled = false,
231 struct lrw_crypt_req req = {
232 .tbuf = buf,
233 .tbuflen = sizeof(buf),
235 .table_ctx = &ctx->lrw_table,
236 .crypt_ctx = &crypt_ctx,
237 .crypt_fn = encrypt_callback,
239 int ret;
241 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
242 ret = lrw_crypt(desc, dst, src, nbytes, &req);
243 twofish_fpu_end(crypt_ctx.fpu_enabled);
245 return ret;
248 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
249 struct scatterlist *src, unsigned int nbytes)
251 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
252 be128 buf[TWOFISH_PARALLEL_BLOCKS];
253 struct crypt_priv crypt_ctx = {
254 .ctx = &ctx->twofish_ctx,
255 .fpu_enabled = false,
257 struct lrw_crypt_req req = {
258 .tbuf = buf,
259 .tbuflen = sizeof(buf),
261 .table_ctx = &ctx->lrw_table,
262 .crypt_ctx = &crypt_ctx,
263 .crypt_fn = decrypt_callback,
265 int ret;
267 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
268 ret = lrw_crypt(desc, dst, src, nbytes, &req);
269 twofish_fpu_end(crypt_ctx.fpu_enabled);
271 return ret;
274 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
275 struct scatterlist *src, unsigned int nbytes)
277 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
278 be128 buf[TWOFISH_PARALLEL_BLOCKS];
279 struct crypt_priv crypt_ctx = {
280 .ctx = &ctx->crypt_ctx,
281 .fpu_enabled = false,
283 struct xts_crypt_req req = {
284 .tbuf = buf,
285 .tbuflen = sizeof(buf),
287 .tweak_ctx = &ctx->tweak_ctx,
288 .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
289 .crypt_ctx = &crypt_ctx,
290 .crypt_fn = encrypt_callback,
292 int ret;
294 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
295 ret = xts_crypt(desc, dst, src, nbytes, &req);
296 twofish_fpu_end(crypt_ctx.fpu_enabled);
298 return ret;
301 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
302 struct scatterlist *src, unsigned int nbytes)
304 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
305 be128 buf[TWOFISH_PARALLEL_BLOCKS];
306 struct crypt_priv crypt_ctx = {
307 .ctx = &ctx->crypt_ctx,
308 .fpu_enabled = false,
310 struct xts_crypt_req req = {
311 .tbuf = buf,
312 .tbuflen = sizeof(buf),
314 .tweak_ctx = &ctx->tweak_ctx,
315 .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
316 .crypt_ctx = &crypt_ctx,
317 .crypt_fn = decrypt_callback,
319 int ret;
321 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
322 ret = xts_crypt(desc, dst, src, nbytes, &req);
323 twofish_fpu_end(crypt_ctx.fpu_enabled);
325 return ret;
328 static struct crypto_alg twofish_algs[10] = { {
329 .cra_name = "__ecb-twofish-avx",
330 .cra_driver_name = "__driver-ecb-twofish-avx",
331 .cra_priority = 0,
332 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
333 .cra_blocksize = TF_BLOCK_SIZE,
334 .cra_ctxsize = sizeof(struct twofish_ctx),
335 .cra_alignmask = 0,
336 .cra_type = &crypto_blkcipher_type,
337 .cra_module = THIS_MODULE,
338 .cra_u = {
339 .blkcipher = {
340 .min_keysize = TF_MIN_KEY_SIZE,
341 .max_keysize = TF_MAX_KEY_SIZE,
342 .setkey = twofish_setkey,
343 .encrypt = ecb_encrypt,
344 .decrypt = ecb_decrypt,
347 }, {
348 .cra_name = "__cbc-twofish-avx",
349 .cra_driver_name = "__driver-cbc-twofish-avx",
350 .cra_priority = 0,
351 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
352 .cra_blocksize = TF_BLOCK_SIZE,
353 .cra_ctxsize = sizeof(struct twofish_ctx),
354 .cra_alignmask = 0,
355 .cra_type = &crypto_blkcipher_type,
356 .cra_module = THIS_MODULE,
357 .cra_u = {
358 .blkcipher = {
359 .min_keysize = TF_MIN_KEY_SIZE,
360 .max_keysize = TF_MAX_KEY_SIZE,
361 .setkey = twofish_setkey,
362 .encrypt = cbc_encrypt,
363 .decrypt = cbc_decrypt,
366 }, {
367 .cra_name = "__ctr-twofish-avx",
368 .cra_driver_name = "__driver-ctr-twofish-avx",
369 .cra_priority = 0,
370 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
371 .cra_blocksize = 1,
372 .cra_ctxsize = sizeof(struct twofish_ctx),
373 .cra_alignmask = 0,
374 .cra_type = &crypto_blkcipher_type,
375 .cra_module = THIS_MODULE,
376 .cra_u = {
377 .blkcipher = {
378 .min_keysize = TF_MIN_KEY_SIZE,
379 .max_keysize = TF_MAX_KEY_SIZE,
380 .ivsize = TF_BLOCK_SIZE,
381 .setkey = twofish_setkey,
382 .encrypt = ctr_crypt,
383 .decrypt = ctr_crypt,
386 }, {
387 .cra_name = "__lrw-twofish-avx",
388 .cra_driver_name = "__driver-lrw-twofish-avx",
389 .cra_priority = 0,
390 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
391 .cra_blocksize = TF_BLOCK_SIZE,
392 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
393 .cra_alignmask = 0,
394 .cra_type = &crypto_blkcipher_type,
395 .cra_module = THIS_MODULE,
396 .cra_exit = lrw_twofish_exit_tfm,
397 .cra_u = {
398 .blkcipher = {
399 .min_keysize = TF_MIN_KEY_SIZE +
400 TF_BLOCK_SIZE,
401 .max_keysize = TF_MAX_KEY_SIZE +
402 TF_BLOCK_SIZE,
403 .ivsize = TF_BLOCK_SIZE,
404 .setkey = lrw_twofish_setkey,
405 .encrypt = lrw_encrypt,
406 .decrypt = lrw_decrypt,
409 }, {
410 .cra_name = "__xts-twofish-avx",
411 .cra_driver_name = "__driver-xts-twofish-avx",
412 .cra_priority = 0,
413 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
414 .cra_blocksize = TF_BLOCK_SIZE,
415 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
416 .cra_alignmask = 0,
417 .cra_type = &crypto_blkcipher_type,
418 .cra_module = THIS_MODULE,
419 .cra_u = {
420 .blkcipher = {
421 .min_keysize = TF_MIN_KEY_SIZE * 2,
422 .max_keysize = TF_MAX_KEY_SIZE * 2,
423 .ivsize = TF_BLOCK_SIZE,
424 .setkey = xts_twofish_setkey,
425 .encrypt = xts_encrypt,
426 .decrypt = xts_decrypt,
429 }, {
430 .cra_name = "ecb(twofish)",
431 .cra_driver_name = "ecb-twofish-avx",
432 .cra_priority = 400,
433 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
434 .cra_blocksize = TF_BLOCK_SIZE,
435 .cra_ctxsize = sizeof(struct async_helper_ctx),
436 .cra_alignmask = 0,
437 .cra_type = &crypto_ablkcipher_type,
438 .cra_module = THIS_MODULE,
439 .cra_init = ablk_init,
440 .cra_exit = ablk_exit,
441 .cra_u = {
442 .ablkcipher = {
443 .min_keysize = TF_MIN_KEY_SIZE,
444 .max_keysize = TF_MAX_KEY_SIZE,
445 .setkey = ablk_set_key,
446 .encrypt = ablk_encrypt,
447 .decrypt = ablk_decrypt,
450 }, {
451 .cra_name = "cbc(twofish)",
452 .cra_driver_name = "cbc-twofish-avx",
453 .cra_priority = 400,
454 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
455 .cra_blocksize = TF_BLOCK_SIZE,
456 .cra_ctxsize = sizeof(struct async_helper_ctx),
457 .cra_alignmask = 0,
458 .cra_type = &crypto_ablkcipher_type,
459 .cra_module = THIS_MODULE,
460 .cra_init = ablk_init,
461 .cra_exit = ablk_exit,
462 .cra_u = {
463 .ablkcipher = {
464 .min_keysize = TF_MIN_KEY_SIZE,
465 .max_keysize = TF_MAX_KEY_SIZE,
466 .ivsize = TF_BLOCK_SIZE,
467 .setkey = ablk_set_key,
468 .encrypt = __ablk_encrypt,
469 .decrypt = ablk_decrypt,
472 }, {
473 .cra_name = "ctr(twofish)",
474 .cra_driver_name = "ctr-twofish-avx",
475 .cra_priority = 400,
476 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
477 .cra_blocksize = 1,
478 .cra_ctxsize = sizeof(struct async_helper_ctx),
479 .cra_alignmask = 0,
480 .cra_type = &crypto_ablkcipher_type,
481 .cra_module = THIS_MODULE,
482 .cra_init = ablk_init,
483 .cra_exit = ablk_exit,
484 .cra_u = {
485 .ablkcipher = {
486 .min_keysize = TF_MIN_KEY_SIZE,
487 .max_keysize = TF_MAX_KEY_SIZE,
488 .ivsize = TF_BLOCK_SIZE,
489 .setkey = ablk_set_key,
490 .encrypt = ablk_encrypt,
491 .decrypt = ablk_encrypt,
492 .geniv = "chainiv",
495 }, {
496 .cra_name = "lrw(twofish)",
497 .cra_driver_name = "lrw-twofish-avx",
498 .cra_priority = 400,
499 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
500 .cra_blocksize = TF_BLOCK_SIZE,
501 .cra_ctxsize = sizeof(struct async_helper_ctx),
502 .cra_alignmask = 0,
503 .cra_type = &crypto_ablkcipher_type,
504 .cra_module = THIS_MODULE,
505 .cra_init = ablk_init,
506 .cra_exit = ablk_exit,
507 .cra_u = {
508 .ablkcipher = {
509 .min_keysize = TF_MIN_KEY_SIZE +
510 TF_BLOCK_SIZE,
511 .max_keysize = TF_MAX_KEY_SIZE +
512 TF_BLOCK_SIZE,
513 .ivsize = TF_BLOCK_SIZE,
514 .setkey = ablk_set_key,
515 .encrypt = ablk_encrypt,
516 .decrypt = ablk_decrypt,
519 }, {
520 .cra_name = "xts(twofish)",
521 .cra_driver_name = "xts-twofish-avx",
522 .cra_priority = 400,
523 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
524 .cra_blocksize = TF_BLOCK_SIZE,
525 .cra_ctxsize = sizeof(struct async_helper_ctx),
526 .cra_alignmask = 0,
527 .cra_type = &crypto_ablkcipher_type,
528 .cra_module = THIS_MODULE,
529 .cra_init = ablk_init,
530 .cra_exit = ablk_exit,
531 .cra_u = {
532 .ablkcipher = {
533 .min_keysize = TF_MIN_KEY_SIZE * 2,
534 .max_keysize = TF_MAX_KEY_SIZE * 2,
535 .ivsize = TF_BLOCK_SIZE,
536 .setkey = ablk_set_key,
537 .encrypt = ablk_encrypt,
538 .decrypt = ablk_decrypt,
541 } };
543 static int __init twofish_init(void)
545 u64 xcr0;
547 if (!cpu_has_avx || !cpu_has_osxsave) {
548 printk(KERN_INFO "AVX instructions are not detected.\n");
549 return -ENODEV;
552 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
553 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
554 printk(KERN_INFO "AVX detected but unusable.\n");
555 return -ENODEV;
558 return crypto_register_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
561 static void __exit twofish_exit(void)
563 crypto_unregister_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
566 module_init(twofish_init);
567 module_exit(twofish_exit);
569 MODULE_DESCRIPTION("Twofish Cipher Algorithm, AVX optimized");
570 MODULE_LICENSE("GPL");
571 MODULE_ALIAS("twofish");