qmi_wwan: apply SET_DTR quirk to the SIMCOM shared device ID
[linux-stable.git] / arch / x86 / crypto / twofish_avx_glue.c
blobb7a3904b953c6146f543a164e6456c1893102a92
1 /*
2 * Glue Code for AVX assembler version of Twofish Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22 * USA
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/ablk_helper.h>
32 #include <crypto/algapi.h>
33 #include <crypto/twofish.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
39 #include <asm/fpu/api.h>
40 #include <asm/crypto/twofish.h>
41 #include <asm/crypto/glue_helper.h>
42 #include <crypto/scatterwalk.h>
43 #include <linux/workqueue.h>
44 #include <linux/spinlock.h>
46 #define TWOFISH_PARALLEL_BLOCKS 8
48 /* 8-way parallel cipher functions */
49 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
50 const u8 *src);
51 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
52 const u8 *src);
54 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
55 const u8 *src);
56 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
57 const u8 *src, le128 *iv);
59 asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst,
60 const u8 *src, le128 *iv);
61 asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst,
62 const u8 *src, le128 *iv);
64 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
65 const u8 *src)
67 __twofish_enc_blk_3way(ctx, dst, src, false);
70 static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
72 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
73 GLUE_FUNC_CAST(twofish_enc_blk));
76 static void twofish_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
78 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
79 GLUE_FUNC_CAST(twofish_dec_blk));
83 static const struct common_glue_ctx twofish_enc = {
84 .num_funcs = 3,
85 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
87 .funcs = { {
88 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
89 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
90 }, {
91 .num_blocks = 3,
92 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
93 }, {
94 .num_blocks = 1,
95 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
96 } }
99 static const struct common_glue_ctx twofish_ctr = {
100 .num_funcs = 3,
101 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
103 .funcs = { {
104 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
105 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_ctr_8way) }
106 }, {
107 .num_blocks = 3,
108 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr_3way) }
109 }, {
110 .num_blocks = 1,
111 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr) }
115 static const struct common_glue_ctx twofish_enc_xts = {
116 .num_funcs = 2,
117 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
119 .funcs = { {
120 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
121 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc_8way) }
122 }, {
123 .num_blocks = 1,
124 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc) }
128 static const struct common_glue_ctx twofish_dec = {
129 .num_funcs = 3,
130 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
132 .funcs = { {
133 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
134 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
135 }, {
136 .num_blocks = 3,
137 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
138 }, {
139 .num_blocks = 1,
140 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
144 static const struct common_glue_ctx twofish_dec_cbc = {
145 .num_funcs = 3,
146 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
148 .funcs = { {
149 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
150 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_cbc_dec_8way) }
151 }, {
152 .num_blocks = 3,
153 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way) }
154 }, {
155 .num_blocks = 1,
156 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk) }
160 static const struct common_glue_ctx twofish_dec_xts = {
161 .num_funcs = 2,
162 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
164 .funcs = { {
165 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
166 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec_8way) }
167 }, {
168 .num_blocks = 1,
169 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec) }
173 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
174 struct scatterlist *src, unsigned int nbytes)
176 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes);
179 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
180 struct scatterlist *src, unsigned int nbytes)
182 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes);
185 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
186 struct scatterlist *src, unsigned int nbytes)
188 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk), desc,
189 dst, src, nbytes);
192 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
193 struct scatterlist *src, unsigned int nbytes)
195 return glue_cbc_decrypt_128bit(&twofish_dec_cbc, desc, dst, src,
196 nbytes);
199 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
200 struct scatterlist *src, unsigned int nbytes)
202 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes);
205 static inline bool twofish_fpu_begin(bool fpu_enabled, unsigned int nbytes)
207 return glue_fpu_begin(TF_BLOCK_SIZE, TWOFISH_PARALLEL_BLOCKS, NULL,
208 fpu_enabled, nbytes);
211 static inline void twofish_fpu_end(bool fpu_enabled)
213 glue_fpu_end(fpu_enabled);
216 struct crypt_priv {
217 struct twofish_ctx *ctx;
218 bool fpu_enabled;
221 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
223 const unsigned int bsize = TF_BLOCK_SIZE;
224 struct crypt_priv *ctx = priv;
225 int i;
227 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
229 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
230 twofish_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
231 return;
234 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
235 twofish_enc_blk_3way(ctx->ctx, srcdst, srcdst);
237 nbytes %= bsize * 3;
239 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
240 twofish_enc_blk(ctx->ctx, srcdst, srcdst);
243 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
245 const unsigned int bsize = TF_BLOCK_SIZE;
246 struct crypt_priv *ctx = priv;
247 int i;
249 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
251 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
252 twofish_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
253 return;
256 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
257 twofish_dec_blk_3way(ctx->ctx, srcdst, srcdst);
259 nbytes %= bsize * 3;
261 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
262 twofish_dec_blk(ctx->ctx, srcdst, srcdst);
265 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
266 struct scatterlist *src, unsigned int nbytes)
268 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
269 be128 buf[TWOFISH_PARALLEL_BLOCKS];
270 struct crypt_priv crypt_ctx = {
271 .ctx = &ctx->twofish_ctx,
272 .fpu_enabled = false,
274 struct lrw_crypt_req req = {
275 .tbuf = buf,
276 .tbuflen = sizeof(buf),
278 .table_ctx = &ctx->lrw_table,
279 .crypt_ctx = &crypt_ctx,
280 .crypt_fn = encrypt_callback,
282 int ret;
284 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
285 ret = lrw_crypt(desc, dst, src, nbytes, &req);
286 twofish_fpu_end(crypt_ctx.fpu_enabled);
288 return ret;
291 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
292 struct scatterlist *src, unsigned int nbytes)
294 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
295 be128 buf[TWOFISH_PARALLEL_BLOCKS];
296 struct crypt_priv crypt_ctx = {
297 .ctx = &ctx->twofish_ctx,
298 .fpu_enabled = false,
300 struct lrw_crypt_req req = {
301 .tbuf = buf,
302 .tbuflen = sizeof(buf),
304 .table_ctx = &ctx->lrw_table,
305 .crypt_ctx = &crypt_ctx,
306 .crypt_fn = decrypt_callback,
308 int ret;
310 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
311 ret = lrw_crypt(desc, dst, src, nbytes, &req);
312 twofish_fpu_end(crypt_ctx.fpu_enabled);
314 return ret;
317 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
318 struct scatterlist *src, unsigned int nbytes)
320 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
322 return glue_xts_crypt_128bit(&twofish_enc_xts, desc, dst, src, nbytes,
323 XTS_TWEAK_CAST(twofish_enc_blk),
324 &ctx->tweak_ctx, &ctx->crypt_ctx);
327 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
328 struct scatterlist *src, unsigned int nbytes)
330 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
332 return glue_xts_crypt_128bit(&twofish_dec_xts, desc, dst, src, nbytes,
333 XTS_TWEAK_CAST(twofish_enc_blk),
334 &ctx->tweak_ctx, &ctx->crypt_ctx);
337 static struct crypto_alg twofish_algs[10] = { {
338 .cra_name = "__ecb-twofish-avx",
339 .cra_driver_name = "__driver-ecb-twofish-avx",
340 .cra_priority = 0,
341 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
342 CRYPTO_ALG_INTERNAL,
343 .cra_blocksize = TF_BLOCK_SIZE,
344 .cra_ctxsize = sizeof(struct twofish_ctx),
345 .cra_alignmask = 0,
346 .cra_type = &crypto_blkcipher_type,
347 .cra_module = THIS_MODULE,
348 .cra_u = {
349 .blkcipher = {
350 .min_keysize = TF_MIN_KEY_SIZE,
351 .max_keysize = TF_MAX_KEY_SIZE,
352 .setkey = twofish_setkey,
353 .encrypt = ecb_encrypt,
354 .decrypt = ecb_decrypt,
357 }, {
358 .cra_name = "__cbc-twofish-avx",
359 .cra_driver_name = "__driver-cbc-twofish-avx",
360 .cra_priority = 0,
361 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
362 CRYPTO_ALG_INTERNAL,
363 .cra_blocksize = TF_BLOCK_SIZE,
364 .cra_ctxsize = sizeof(struct twofish_ctx),
365 .cra_alignmask = 0,
366 .cra_type = &crypto_blkcipher_type,
367 .cra_module = THIS_MODULE,
368 .cra_u = {
369 .blkcipher = {
370 .min_keysize = TF_MIN_KEY_SIZE,
371 .max_keysize = TF_MAX_KEY_SIZE,
372 .setkey = twofish_setkey,
373 .encrypt = cbc_encrypt,
374 .decrypt = cbc_decrypt,
377 }, {
378 .cra_name = "__ctr-twofish-avx",
379 .cra_driver_name = "__driver-ctr-twofish-avx",
380 .cra_priority = 0,
381 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
382 CRYPTO_ALG_INTERNAL,
383 .cra_blocksize = 1,
384 .cra_ctxsize = sizeof(struct twofish_ctx),
385 .cra_alignmask = 0,
386 .cra_type = &crypto_blkcipher_type,
387 .cra_module = THIS_MODULE,
388 .cra_u = {
389 .blkcipher = {
390 .min_keysize = TF_MIN_KEY_SIZE,
391 .max_keysize = TF_MAX_KEY_SIZE,
392 .ivsize = TF_BLOCK_SIZE,
393 .setkey = twofish_setkey,
394 .encrypt = ctr_crypt,
395 .decrypt = ctr_crypt,
398 }, {
399 .cra_name = "__lrw-twofish-avx",
400 .cra_driver_name = "__driver-lrw-twofish-avx",
401 .cra_priority = 0,
402 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
403 CRYPTO_ALG_INTERNAL,
404 .cra_blocksize = TF_BLOCK_SIZE,
405 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
406 .cra_alignmask = 0,
407 .cra_type = &crypto_blkcipher_type,
408 .cra_module = THIS_MODULE,
409 .cra_exit = lrw_twofish_exit_tfm,
410 .cra_u = {
411 .blkcipher = {
412 .min_keysize = TF_MIN_KEY_SIZE +
413 TF_BLOCK_SIZE,
414 .max_keysize = TF_MAX_KEY_SIZE +
415 TF_BLOCK_SIZE,
416 .ivsize = TF_BLOCK_SIZE,
417 .setkey = lrw_twofish_setkey,
418 .encrypt = lrw_encrypt,
419 .decrypt = lrw_decrypt,
422 }, {
423 .cra_name = "__xts-twofish-avx",
424 .cra_driver_name = "__driver-xts-twofish-avx",
425 .cra_priority = 0,
426 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
427 CRYPTO_ALG_INTERNAL,
428 .cra_blocksize = TF_BLOCK_SIZE,
429 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
430 .cra_alignmask = 0,
431 .cra_type = &crypto_blkcipher_type,
432 .cra_module = THIS_MODULE,
433 .cra_u = {
434 .blkcipher = {
435 .min_keysize = TF_MIN_KEY_SIZE * 2,
436 .max_keysize = TF_MAX_KEY_SIZE * 2,
437 .ivsize = TF_BLOCK_SIZE,
438 .setkey = xts_twofish_setkey,
439 .encrypt = xts_encrypt,
440 .decrypt = xts_decrypt,
443 }, {
444 .cra_name = "ecb(twofish)",
445 .cra_driver_name = "ecb-twofish-avx",
446 .cra_priority = 400,
447 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
448 .cra_blocksize = TF_BLOCK_SIZE,
449 .cra_ctxsize = sizeof(struct async_helper_ctx),
450 .cra_alignmask = 0,
451 .cra_type = &crypto_ablkcipher_type,
452 .cra_module = THIS_MODULE,
453 .cra_init = ablk_init,
454 .cra_exit = ablk_exit,
455 .cra_u = {
456 .ablkcipher = {
457 .min_keysize = TF_MIN_KEY_SIZE,
458 .max_keysize = TF_MAX_KEY_SIZE,
459 .setkey = ablk_set_key,
460 .encrypt = ablk_encrypt,
461 .decrypt = ablk_decrypt,
464 }, {
465 .cra_name = "cbc(twofish)",
466 .cra_driver_name = "cbc-twofish-avx",
467 .cra_priority = 400,
468 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
469 .cra_blocksize = TF_BLOCK_SIZE,
470 .cra_ctxsize = sizeof(struct async_helper_ctx),
471 .cra_alignmask = 0,
472 .cra_type = &crypto_ablkcipher_type,
473 .cra_module = THIS_MODULE,
474 .cra_init = ablk_init,
475 .cra_exit = ablk_exit,
476 .cra_u = {
477 .ablkcipher = {
478 .min_keysize = TF_MIN_KEY_SIZE,
479 .max_keysize = TF_MAX_KEY_SIZE,
480 .ivsize = TF_BLOCK_SIZE,
481 .setkey = ablk_set_key,
482 .encrypt = __ablk_encrypt,
483 .decrypt = ablk_decrypt,
486 }, {
487 .cra_name = "ctr(twofish)",
488 .cra_driver_name = "ctr-twofish-avx",
489 .cra_priority = 400,
490 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
491 .cra_blocksize = 1,
492 .cra_ctxsize = sizeof(struct async_helper_ctx),
493 .cra_alignmask = 0,
494 .cra_type = &crypto_ablkcipher_type,
495 .cra_module = THIS_MODULE,
496 .cra_init = ablk_init,
497 .cra_exit = ablk_exit,
498 .cra_u = {
499 .ablkcipher = {
500 .min_keysize = TF_MIN_KEY_SIZE,
501 .max_keysize = TF_MAX_KEY_SIZE,
502 .ivsize = TF_BLOCK_SIZE,
503 .setkey = ablk_set_key,
504 .encrypt = ablk_encrypt,
505 .decrypt = ablk_encrypt,
506 .geniv = "chainiv",
509 }, {
510 .cra_name = "lrw(twofish)",
511 .cra_driver_name = "lrw-twofish-avx",
512 .cra_priority = 400,
513 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
514 .cra_blocksize = TF_BLOCK_SIZE,
515 .cra_ctxsize = sizeof(struct async_helper_ctx),
516 .cra_alignmask = 0,
517 .cra_type = &crypto_ablkcipher_type,
518 .cra_module = THIS_MODULE,
519 .cra_init = ablk_init,
520 .cra_exit = ablk_exit,
521 .cra_u = {
522 .ablkcipher = {
523 .min_keysize = TF_MIN_KEY_SIZE +
524 TF_BLOCK_SIZE,
525 .max_keysize = TF_MAX_KEY_SIZE +
526 TF_BLOCK_SIZE,
527 .ivsize = TF_BLOCK_SIZE,
528 .setkey = ablk_set_key,
529 .encrypt = ablk_encrypt,
530 .decrypt = ablk_decrypt,
533 }, {
534 .cra_name = "xts(twofish)",
535 .cra_driver_name = "xts-twofish-avx",
536 .cra_priority = 400,
537 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
538 .cra_blocksize = TF_BLOCK_SIZE,
539 .cra_ctxsize = sizeof(struct async_helper_ctx),
540 .cra_alignmask = 0,
541 .cra_type = &crypto_ablkcipher_type,
542 .cra_module = THIS_MODULE,
543 .cra_init = ablk_init,
544 .cra_exit = ablk_exit,
545 .cra_u = {
546 .ablkcipher = {
547 .min_keysize = TF_MIN_KEY_SIZE * 2,
548 .max_keysize = TF_MAX_KEY_SIZE * 2,
549 .ivsize = TF_BLOCK_SIZE,
550 .setkey = ablk_set_key,
551 .encrypt = ablk_encrypt,
552 .decrypt = ablk_decrypt,
555 } };
557 static int __init twofish_init(void)
559 const char *feature_name;
561 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, &feature_name)) {
562 pr_info("CPU feature '%s' is not supported.\n", feature_name);
563 return -ENODEV;
566 return crypto_register_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
569 static void __exit twofish_exit(void)
571 crypto_unregister_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
574 module_init(twofish_init);
575 module_exit(twofish_exit);
577 MODULE_DESCRIPTION("Twofish Cipher Algorithm, AVX optimized");
578 MODULE_LICENSE("GPL");
579 MODULE_ALIAS_CRYPTO("twofish");