x86, AMD: Fix ARAT feature setting again
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / crypto / tcrypt.c
blob9aac5e58be94063a9502156cbe032905669d8038
1 /*
2 * Quick & dirty crypto testing module.
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
11 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
18 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
20 * Software Foundation; either version 2 of the License, or (at your option)
21 * any later version.
25 #include <crypto/hash.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/gfp.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/string.h>
32 #include <linux/moduleparam.h>
33 #include <linux/jiffies.h>
34 #include <linux/timex.h>
35 #include <linux/interrupt.h>
36 #include "tcrypt.h"
37 #include "internal.h"
40 * Need slab memory for testing (size in number of pages).
42 #define TVMEMSIZE 4
45 * Used by test_cipher_speed()
47 #define ENCRYPT 1
48 #define DECRYPT 0
51 * Used by test_cipher_speed()
53 static unsigned int sec;
55 static char *alg = NULL;
56 static u32 type;
57 static u32 mask;
58 static int mode;
59 static char *tvmem[TVMEMSIZE];
61 static char *check[] = {
62 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
63 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
64 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
65 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
66 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
67 "lzo", "cts", "zlib", NULL
70 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
71 struct scatterlist *sg, int blen, int sec)
73 unsigned long start, end;
74 int bcount;
75 int ret;
77 for (start = jiffies, end = start + sec * HZ, bcount = 0;
78 time_before(jiffies, end); bcount++) {
79 if (enc)
80 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
81 else
82 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
84 if (ret)
85 return ret;
88 printk("%d operations in %d seconds (%ld bytes)\n",
89 bcount, sec, (long)bcount * blen);
90 return 0;
93 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
94 struct scatterlist *sg, int blen)
96 unsigned long cycles = 0;
97 int ret = 0;
98 int i;
100 local_bh_disable();
101 local_irq_disable();
103 /* Warm-up run. */
104 for (i = 0; i < 4; i++) {
105 if (enc)
106 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
107 else
108 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
110 if (ret)
111 goto out;
114 /* The real thing. */
115 for (i = 0; i < 8; i++) {
116 cycles_t start, end;
118 start = get_cycles();
119 if (enc)
120 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
121 else
122 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
123 end = get_cycles();
125 if (ret)
126 goto out;
128 cycles += end - start;
131 out:
132 local_irq_enable();
133 local_bh_enable();
135 if (ret == 0)
136 printk("1 operation in %lu cycles (%d bytes)\n",
137 (cycles + 4) / 8, blen);
139 return ret;
142 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
144 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
145 struct cipher_speed_template *template,
146 unsigned int tcount, u8 *keysize)
148 unsigned int ret, i, j, iv_len;
149 const char *key, iv[128];
150 struct crypto_blkcipher *tfm;
151 struct blkcipher_desc desc;
152 const char *e;
153 u32 *b_size;
155 if (enc == ENCRYPT)
156 e = "encryption";
157 else
158 e = "decryption";
160 printk("\ntesting speed of %s %s\n", algo, e);
162 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
164 if (IS_ERR(tfm)) {
165 printk("failed to load transform for %s: %ld\n", algo,
166 PTR_ERR(tfm));
167 return;
169 desc.tfm = tfm;
170 desc.flags = 0;
172 i = 0;
173 do {
175 b_size = block_sizes;
176 do {
177 struct scatterlist sg[TVMEMSIZE];
179 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
180 printk("template (%u) too big for "
181 "tvmem (%lu)\n", *keysize + *b_size,
182 TVMEMSIZE * PAGE_SIZE);
183 goto out;
186 printk("test %u (%d bit key, %d byte blocks): ", i,
187 *keysize * 8, *b_size);
189 memset(tvmem[0], 0xff, PAGE_SIZE);
191 /* set key, plain text and IV */
192 key = tvmem[0];
193 for (j = 0; j < tcount; j++) {
194 if (template[j].klen == *keysize) {
195 key = template[j].key;
196 break;
200 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
201 if (ret) {
202 printk("setkey() failed flags=%x\n",
203 crypto_blkcipher_get_flags(tfm));
204 goto out;
207 sg_init_table(sg, TVMEMSIZE);
208 sg_set_buf(sg, tvmem[0] + *keysize,
209 PAGE_SIZE - *keysize);
210 for (j = 1; j < TVMEMSIZE; j++) {
211 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
212 memset (tvmem[j], 0xff, PAGE_SIZE);
215 iv_len = crypto_blkcipher_ivsize(tfm);
216 if (iv_len) {
217 memset(&iv, 0xff, iv_len);
218 crypto_blkcipher_set_iv(tfm, iv, iv_len);
221 if (sec)
222 ret = test_cipher_jiffies(&desc, enc, sg,
223 *b_size, sec);
224 else
225 ret = test_cipher_cycles(&desc, enc, sg,
226 *b_size);
228 if (ret) {
229 printk("%s() failed flags=%x\n", e, desc.flags);
230 break;
232 b_size++;
233 i++;
234 } while (*b_size);
235 keysize++;
236 } while (*keysize);
238 out:
239 crypto_free_blkcipher(tfm);
242 static int test_hash_jiffies_digest(struct hash_desc *desc,
243 struct scatterlist *sg, int blen,
244 char *out, int sec)
246 unsigned long start, end;
247 int bcount;
248 int ret;
250 for (start = jiffies, end = start + sec * HZ, bcount = 0;
251 time_before(jiffies, end); bcount++) {
252 ret = crypto_hash_digest(desc, sg, blen, out);
253 if (ret)
254 return ret;
257 printk("%6u opers/sec, %9lu bytes/sec\n",
258 bcount / sec, ((long)bcount * blen) / sec);
260 return 0;
263 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
264 int blen, int plen, char *out, int sec)
266 unsigned long start, end;
267 int bcount, pcount;
268 int ret;
270 if (plen == blen)
271 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
273 for (start = jiffies, end = start + sec * HZ, bcount = 0;
274 time_before(jiffies, end); bcount++) {
275 ret = crypto_hash_init(desc);
276 if (ret)
277 return ret;
278 for (pcount = 0; pcount < blen; pcount += plen) {
279 ret = crypto_hash_update(desc, sg, plen);
280 if (ret)
281 return ret;
283 /* we assume there is enough space in 'out' for the result */
284 ret = crypto_hash_final(desc, out);
285 if (ret)
286 return ret;
289 printk("%6u opers/sec, %9lu bytes/sec\n",
290 bcount / sec, ((long)bcount * blen) / sec);
292 return 0;
295 static int test_hash_cycles_digest(struct hash_desc *desc,
296 struct scatterlist *sg, int blen, char *out)
298 unsigned long cycles = 0;
299 int i;
300 int ret;
302 local_bh_disable();
303 local_irq_disable();
305 /* Warm-up run. */
306 for (i = 0; i < 4; i++) {
307 ret = crypto_hash_digest(desc, sg, blen, out);
308 if (ret)
309 goto out;
312 /* The real thing. */
313 for (i = 0; i < 8; i++) {
314 cycles_t start, end;
316 start = get_cycles();
318 ret = crypto_hash_digest(desc, sg, blen, out);
319 if (ret)
320 goto out;
322 end = get_cycles();
324 cycles += end - start;
327 out:
328 local_irq_enable();
329 local_bh_enable();
331 if (ret)
332 return ret;
334 printk("%6lu cycles/operation, %4lu cycles/byte\n",
335 cycles / 8, cycles / (8 * blen));
337 return 0;
340 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
341 int blen, int plen, char *out)
343 unsigned long cycles = 0;
344 int i, pcount;
345 int ret;
347 if (plen == blen)
348 return test_hash_cycles_digest(desc, sg, blen, out);
350 local_bh_disable();
351 local_irq_disable();
353 /* Warm-up run. */
354 for (i = 0; i < 4; i++) {
355 ret = crypto_hash_init(desc);
356 if (ret)
357 goto out;
358 for (pcount = 0; pcount < blen; pcount += plen) {
359 ret = crypto_hash_update(desc, sg, plen);
360 if (ret)
361 goto out;
363 ret = crypto_hash_final(desc, out);
364 if (ret)
365 goto out;
368 /* The real thing. */
369 for (i = 0; i < 8; i++) {
370 cycles_t start, end;
372 start = get_cycles();
374 ret = crypto_hash_init(desc);
375 if (ret)
376 goto out;
377 for (pcount = 0; pcount < blen; pcount += plen) {
378 ret = crypto_hash_update(desc, sg, plen);
379 if (ret)
380 goto out;
382 ret = crypto_hash_final(desc, out);
383 if (ret)
384 goto out;
386 end = get_cycles();
388 cycles += end - start;
391 out:
392 local_irq_enable();
393 local_bh_enable();
395 if (ret)
396 return ret;
398 printk("%6lu cycles/operation, %4lu cycles/byte\n",
399 cycles / 8, cycles / (8 * blen));
401 return 0;
404 static void test_hash_sg_init(struct scatterlist *sg)
406 int i;
408 sg_init_table(sg, TVMEMSIZE);
409 for (i = 0; i < TVMEMSIZE; i++) {
410 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
411 memset(tvmem[i], 0xff, PAGE_SIZE);
415 static void test_hash_speed(const char *algo, unsigned int sec,
416 struct hash_speed *speed)
418 struct scatterlist sg[TVMEMSIZE];
419 struct crypto_hash *tfm;
420 struct hash_desc desc;
421 static char output[1024];
422 int i;
423 int ret;
425 printk(KERN_INFO "\ntesting speed of %s\n", algo);
427 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
429 if (IS_ERR(tfm)) {
430 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
431 PTR_ERR(tfm));
432 return;
435 desc.tfm = tfm;
436 desc.flags = 0;
438 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
439 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
440 crypto_hash_digestsize(tfm), sizeof(output));
441 goto out;
444 test_hash_sg_init(sg);
445 for (i = 0; speed[i].blen != 0; i++) {
446 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
447 printk(KERN_ERR
448 "template (%u) too big for tvmem (%lu)\n",
449 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
450 goto out;
453 if (speed[i].klen)
454 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
456 printk(KERN_INFO "test%3u "
457 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
458 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
460 if (sec)
461 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
462 speed[i].plen, output, sec);
463 else
464 ret = test_hash_cycles(&desc, sg, speed[i].blen,
465 speed[i].plen, output);
467 if (ret) {
468 printk(KERN_ERR "hashing failed ret=%d\n", ret);
469 break;
473 out:
474 crypto_free_hash(tfm);
477 struct tcrypt_result {
478 struct completion completion;
479 int err;
482 static void tcrypt_complete(struct crypto_async_request *req, int err)
484 struct tcrypt_result *res = req->data;
486 if (err == -EINPROGRESS)
487 return;
489 res->err = err;
490 complete(&res->completion);
493 static inline int do_one_ahash_op(struct ahash_request *req, int ret)
495 if (ret == -EINPROGRESS || ret == -EBUSY) {
496 struct tcrypt_result *tr = req->base.data;
498 ret = wait_for_completion_interruptible(&tr->completion);
499 if (!ret)
500 ret = tr->err;
501 INIT_COMPLETION(tr->completion);
503 return ret;
506 static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
507 char *out, int sec)
509 unsigned long start, end;
510 int bcount;
511 int ret;
513 for (start = jiffies, end = start + sec * HZ, bcount = 0;
514 time_before(jiffies, end); bcount++) {
515 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
516 if (ret)
517 return ret;
520 printk("%6u opers/sec, %9lu bytes/sec\n",
521 bcount / sec, ((long)bcount * blen) / sec);
523 return 0;
526 static int test_ahash_jiffies(struct ahash_request *req, int blen,
527 int plen, char *out, int sec)
529 unsigned long start, end;
530 int bcount, pcount;
531 int ret;
533 if (plen == blen)
534 return test_ahash_jiffies_digest(req, blen, out, sec);
536 for (start = jiffies, end = start + sec * HZ, bcount = 0;
537 time_before(jiffies, end); bcount++) {
538 ret = crypto_ahash_init(req);
539 if (ret)
540 return ret;
541 for (pcount = 0; pcount < blen; pcount += plen) {
542 ret = do_one_ahash_op(req, crypto_ahash_update(req));
543 if (ret)
544 return ret;
546 /* we assume there is enough space in 'out' for the result */
547 ret = do_one_ahash_op(req, crypto_ahash_final(req));
548 if (ret)
549 return ret;
552 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
553 bcount / sec, ((long)bcount * blen) / sec);
555 return 0;
558 static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
559 char *out)
561 unsigned long cycles = 0;
562 int ret, i;
564 /* Warm-up run. */
565 for (i = 0; i < 4; i++) {
566 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
567 if (ret)
568 goto out;
571 /* The real thing. */
572 for (i = 0; i < 8; i++) {
573 cycles_t start, end;
575 start = get_cycles();
577 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
578 if (ret)
579 goto out;
581 end = get_cycles();
583 cycles += end - start;
586 out:
587 if (ret)
588 return ret;
590 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
591 cycles / 8, cycles / (8 * blen));
593 return 0;
596 static int test_ahash_cycles(struct ahash_request *req, int blen,
597 int plen, char *out)
599 unsigned long cycles = 0;
600 int i, pcount, ret;
602 if (plen == blen)
603 return test_ahash_cycles_digest(req, blen, out);
605 /* Warm-up run. */
606 for (i = 0; i < 4; i++) {
607 ret = crypto_ahash_init(req);
608 if (ret)
609 goto out;
610 for (pcount = 0; pcount < blen; pcount += plen) {
611 ret = do_one_ahash_op(req, crypto_ahash_update(req));
612 if (ret)
613 goto out;
615 ret = do_one_ahash_op(req, crypto_ahash_final(req));
616 if (ret)
617 goto out;
620 /* The real thing. */
621 for (i = 0; i < 8; i++) {
622 cycles_t start, end;
624 start = get_cycles();
626 ret = crypto_ahash_init(req);
627 if (ret)
628 goto out;
629 for (pcount = 0; pcount < blen; pcount += plen) {
630 ret = do_one_ahash_op(req, crypto_ahash_update(req));
631 if (ret)
632 goto out;
634 ret = do_one_ahash_op(req, crypto_ahash_final(req));
635 if (ret)
636 goto out;
638 end = get_cycles();
640 cycles += end - start;
643 out:
644 if (ret)
645 return ret;
647 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
648 cycles / 8, cycles / (8 * blen));
650 return 0;
653 static void test_ahash_speed(const char *algo, unsigned int sec,
654 struct hash_speed *speed)
656 struct scatterlist sg[TVMEMSIZE];
657 struct tcrypt_result tresult;
658 struct ahash_request *req;
659 struct crypto_ahash *tfm;
660 static char output[1024];
661 int i, ret;
663 printk(KERN_INFO "\ntesting speed of async %s\n", algo);
665 tfm = crypto_alloc_ahash(algo, 0, 0);
666 if (IS_ERR(tfm)) {
667 pr_err("failed to load transform for %s: %ld\n",
668 algo, PTR_ERR(tfm));
669 return;
672 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
673 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
674 crypto_ahash_digestsize(tfm), sizeof(output));
675 goto out;
678 test_hash_sg_init(sg);
679 req = ahash_request_alloc(tfm, GFP_KERNEL);
680 if (!req) {
681 pr_err("ahash request allocation failure\n");
682 goto out;
685 init_completion(&tresult.completion);
686 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
687 tcrypt_complete, &tresult);
689 for (i = 0; speed[i].blen != 0; i++) {
690 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
691 pr_err("template (%u) too big for tvmem (%lu)\n",
692 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
693 break;
696 pr_info("test%3u "
697 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
698 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
700 ahash_request_set_crypt(req, sg, output, speed[i].plen);
702 if (sec)
703 ret = test_ahash_jiffies(req, speed[i].blen,
704 speed[i].plen, output, sec);
705 else
706 ret = test_ahash_cycles(req, speed[i].blen,
707 speed[i].plen, output);
709 if (ret) {
710 pr_err("hashing failed ret=%d\n", ret);
711 break;
715 ahash_request_free(req);
717 out:
718 crypto_free_ahash(tfm);
721 static void test_available(void)
723 char **name = check;
725 while (*name) {
726 printk("alg %s ", *name);
727 printk(crypto_has_alg(*name, 0, 0) ?
728 "found\n" : "not found\n");
729 name++;
733 static inline int tcrypt_test(const char *alg)
735 int ret;
737 ret = alg_test(alg, alg, 0, 0);
738 /* non-fips algs return -EINVAL in fips mode */
739 if (fips_enabled && ret == -EINVAL)
740 ret = 0;
741 return ret;
744 static int do_test(int m)
746 int i;
747 int ret = 0;
749 switch (m) {
750 case 0:
751 for (i = 1; i < 200; i++)
752 ret += do_test(i);
753 break;
755 case 1:
756 ret += tcrypt_test("md5");
757 break;
759 case 2:
760 ret += tcrypt_test("sha1");
761 break;
763 case 3:
764 ret += tcrypt_test("ecb(des)");
765 ret += tcrypt_test("cbc(des)");
766 break;
768 case 4:
769 ret += tcrypt_test("ecb(des3_ede)");
770 ret += tcrypt_test("cbc(des3_ede)");
771 break;
773 case 5:
774 ret += tcrypt_test("md4");
775 break;
777 case 6:
778 ret += tcrypt_test("sha256");
779 break;
781 case 7:
782 ret += tcrypt_test("ecb(blowfish)");
783 ret += tcrypt_test("cbc(blowfish)");
784 break;
786 case 8:
787 ret += tcrypt_test("ecb(twofish)");
788 ret += tcrypt_test("cbc(twofish)");
789 break;
791 case 9:
792 ret += tcrypt_test("ecb(serpent)");
793 break;
795 case 10:
796 ret += tcrypt_test("ecb(aes)");
797 ret += tcrypt_test("cbc(aes)");
798 ret += tcrypt_test("lrw(aes)");
799 ret += tcrypt_test("xts(aes)");
800 ret += tcrypt_test("ctr(aes)");
801 ret += tcrypt_test("rfc3686(ctr(aes))");
802 break;
804 case 11:
805 ret += tcrypt_test("sha384");
806 break;
808 case 12:
809 ret += tcrypt_test("sha512");
810 break;
812 case 13:
813 ret += tcrypt_test("deflate");
814 break;
816 case 14:
817 ret += tcrypt_test("ecb(cast5)");
818 break;
820 case 15:
821 ret += tcrypt_test("ecb(cast6)");
822 break;
824 case 16:
825 ret += tcrypt_test("ecb(arc4)");
826 break;
828 case 17:
829 ret += tcrypt_test("michael_mic");
830 break;
832 case 18:
833 ret += tcrypt_test("crc32c");
834 break;
836 case 19:
837 ret += tcrypt_test("ecb(tea)");
838 break;
840 case 20:
841 ret += tcrypt_test("ecb(xtea)");
842 break;
844 case 21:
845 ret += tcrypt_test("ecb(khazad)");
846 break;
848 case 22:
849 ret += tcrypt_test("wp512");
850 break;
852 case 23:
853 ret += tcrypt_test("wp384");
854 break;
856 case 24:
857 ret += tcrypt_test("wp256");
858 break;
860 case 25:
861 ret += tcrypt_test("ecb(tnepres)");
862 break;
864 case 26:
865 ret += tcrypt_test("ecb(anubis)");
866 ret += tcrypt_test("cbc(anubis)");
867 break;
869 case 27:
870 ret += tcrypt_test("tgr192");
871 break;
873 case 28:
875 ret += tcrypt_test("tgr160");
876 break;
878 case 29:
879 ret += tcrypt_test("tgr128");
880 break;
882 case 30:
883 ret += tcrypt_test("ecb(xeta)");
884 break;
886 case 31:
887 ret += tcrypt_test("pcbc(fcrypt)");
888 break;
890 case 32:
891 ret += tcrypt_test("ecb(camellia)");
892 ret += tcrypt_test("cbc(camellia)");
893 break;
894 case 33:
895 ret += tcrypt_test("sha224");
896 break;
898 case 34:
899 ret += tcrypt_test("salsa20");
900 break;
902 case 35:
903 ret += tcrypt_test("gcm(aes)");
904 break;
906 case 36:
907 ret += tcrypt_test("lzo");
908 break;
910 case 37:
911 ret += tcrypt_test("ccm(aes)");
912 break;
914 case 38:
915 ret += tcrypt_test("cts(cbc(aes))");
916 break;
918 case 39:
919 ret += tcrypt_test("rmd128");
920 break;
922 case 40:
923 ret += tcrypt_test("rmd160");
924 break;
926 case 41:
927 ret += tcrypt_test("rmd256");
928 break;
930 case 42:
931 ret += tcrypt_test("rmd320");
932 break;
934 case 43:
935 ret += tcrypt_test("ecb(seed)");
936 break;
938 case 44:
939 ret += tcrypt_test("zlib");
940 break;
942 case 45:
943 ret += tcrypt_test("rfc4309(ccm(aes))");
944 break;
946 case 100:
947 ret += tcrypt_test("hmac(md5)");
948 break;
950 case 101:
951 ret += tcrypt_test("hmac(sha1)");
952 break;
954 case 102:
955 ret += tcrypt_test("hmac(sha256)");
956 break;
958 case 103:
959 ret += tcrypt_test("hmac(sha384)");
960 break;
962 case 104:
963 ret += tcrypt_test("hmac(sha512)");
964 break;
966 case 105:
967 ret += tcrypt_test("hmac(sha224)");
968 break;
970 case 106:
971 ret += tcrypt_test("xcbc(aes)");
972 break;
974 case 107:
975 ret += tcrypt_test("hmac(rmd128)");
976 break;
978 case 108:
979 ret += tcrypt_test("hmac(rmd160)");
980 break;
982 case 109:
983 ret += tcrypt_test("vmac(aes)");
984 break;
986 case 150:
987 ret += tcrypt_test("ansi_cprng");
988 break;
990 case 151:
991 ret += tcrypt_test("rfc4106(gcm(aes))");
992 break;
994 case 200:
995 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
996 speed_template_16_24_32);
997 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
998 speed_template_16_24_32);
999 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1000 speed_template_16_24_32);
1001 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1002 speed_template_16_24_32);
1003 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1004 speed_template_32_40_48);
1005 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1006 speed_template_32_40_48);
1007 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1008 speed_template_32_48_64);
1009 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1010 speed_template_32_48_64);
1011 break;
1013 case 201:
1014 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1015 des3_speed_template, DES3_SPEED_VECTORS,
1016 speed_template_24);
1017 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1018 des3_speed_template, DES3_SPEED_VECTORS,
1019 speed_template_24);
1020 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1021 des3_speed_template, DES3_SPEED_VECTORS,
1022 speed_template_24);
1023 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1024 des3_speed_template, DES3_SPEED_VECTORS,
1025 speed_template_24);
1026 break;
1028 case 202:
1029 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1030 speed_template_16_24_32);
1031 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1032 speed_template_16_24_32);
1033 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1034 speed_template_16_24_32);
1035 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1036 speed_template_16_24_32);
1037 break;
1039 case 203:
1040 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1041 speed_template_8_32);
1042 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1043 speed_template_8_32);
1044 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1045 speed_template_8_32);
1046 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1047 speed_template_8_32);
1048 break;
1050 case 204:
1051 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1052 speed_template_8);
1053 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1054 speed_template_8);
1055 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1056 speed_template_8);
1057 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1058 speed_template_8);
1059 break;
1061 case 205:
1062 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1063 speed_template_16_24_32);
1064 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1065 speed_template_16_24_32);
1066 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1067 speed_template_16_24_32);
1068 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1069 speed_template_16_24_32);
1070 break;
1072 case 206:
1073 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1074 speed_template_16_32);
1075 break;
1077 case 300:
1078 /* fall through */
1080 case 301:
1081 test_hash_speed("md4", sec, generic_hash_speed_template);
1082 if (mode > 300 && mode < 400) break;
1084 case 302:
1085 test_hash_speed("md5", sec, generic_hash_speed_template);
1086 if (mode > 300 && mode < 400) break;
1088 case 303:
1089 test_hash_speed("sha1", sec, generic_hash_speed_template);
1090 if (mode > 300 && mode < 400) break;
1092 case 304:
1093 test_hash_speed("sha256", sec, generic_hash_speed_template);
1094 if (mode > 300 && mode < 400) break;
1096 case 305:
1097 test_hash_speed("sha384", sec, generic_hash_speed_template);
1098 if (mode > 300 && mode < 400) break;
1100 case 306:
1101 test_hash_speed("sha512", sec, generic_hash_speed_template);
1102 if (mode > 300 && mode < 400) break;
1104 case 307:
1105 test_hash_speed("wp256", sec, generic_hash_speed_template);
1106 if (mode > 300 && mode < 400) break;
1108 case 308:
1109 test_hash_speed("wp384", sec, generic_hash_speed_template);
1110 if (mode > 300 && mode < 400) break;
1112 case 309:
1113 test_hash_speed("wp512", sec, generic_hash_speed_template);
1114 if (mode > 300 && mode < 400) break;
1116 case 310:
1117 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1118 if (mode > 300 && mode < 400) break;
1120 case 311:
1121 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1122 if (mode > 300 && mode < 400) break;
1124 case 312:
1125 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1126 if (mode > 300 && mode < 400) break;
1128 case 313:
1129 test_hash_speed("sha224", sec, generic_hash_speed_template);
1130 if (mode > 300 && mode < 400) break;
1132 case 314:
1133 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1134 if (mode > 300 && mode < 400) break;
1136 case 315:
1137 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1138 if (mode > 300 && mode < 400) break;
1140 case 316:
1141 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1142 if (mode > 300 && mode < 400) break;
1144 case 317:
1145 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1146 if (mode > 300 && mode < 400) break;
1148 case 318:
1149 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1150 if (mode > 300 && mode < 400) break;
1152 case 399:
1153 break;
1155 case 400:
1156 /* fall through */
1158 case 401:
1159 test_ahash_speed("md4", sec, generic_hash_speed_template);
1160 if (mode > 400 && mode < 500) break;
1162 case 402:
1163 test_ahash_speed("md5", sec, generic_hash_speed_template);
1164 if (mode > 400 && mode < 500) break;
1166 case 403:
1167 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1168 if (mode > 400 && mode < 500) break;
1170 case 404:
1171 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1172 if (mode > 400 && mode < 500) break;
1174 case 405:
1175 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1176 if (mode > 400 && mode < 500) break;
1178 case 406:
1179 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1180 if (mode > 400 && mode < 500) break;
1182 case 407:
1183 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1184 if (mode > 400 && mode < 500) break;
1186 case 408:
1187 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1188 if (mode > 400 && mode < 500) break;
1190 case 409:
1191 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1192 if (mode > 400 && mode < 500) break;
1194 case 410:
1195 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1196 if (mode > 400 && mode < 500) break;
1198 case 411:
1199 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1200 if (mode > 400 && mode < 500) break;
1202 case 412:
1203 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1204 if (mode > 400 && mode < 500) break;
1206 case 413:
1207 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1208 if (mode > 400 && mode < 500) break;
1210 case 414:
1211 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1212 if (mode > 400 && mode < 500) break;
1214 case 415:
1215 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1216 if (mode > 400 && mode < 500) break;
1218 case 416:
1219 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1220 if (mode > 400 && mode < 500) break;
1222 case 417:
1223 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1224 if (mode > 400 && mode < 500) break;
1226 case 499:
1227 break;
1229 case 1000:
1230 test_available();
1231 break;
1234 return ret;
1237 static int do_alg_test(const char *alg, u32 type, u32 mask)
1239 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
1240 0 : -ENOENT;
1243 static int __init tcrypt_mod_init(void)
1245 int err = -ENOMEM;
1246 int i;
1248 for (i = 0; i < TVMEMSIZE; i++) {
1249 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
1250 if (!tvmem[i])
1251 goto err_free_tv;
1254 if (alg)
1255 err = do_alg_test(alg, type, mask);
1256 else
1257 err = do_test(mode);
1259 if (err) {
1260 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
1261 goto err_free_tv;
1264 /* We intentionaly return -EAGAIN to prevent keeping the module,
1265 * unless we're running in fips mode. It does all its work from
1266 * init() and doesn't offer any runtime functionality, but in
1267 * the fips case, checking for a successful load is helpful.
1268 * => we don't need it in the memory, do we?
1269 * -- mludvig
1271 if (!fips_enabled)
1272 err = -EAGAIN;
1274 err_free_tv:
1275 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
1276 free_page((unsigned long)tvmem[i]);
1278 return err;
1282 * If an init function is provided, an exit function must also be provided
1283 * to allow module unload.
1285 static void __exit tcrypt_mod_fini(void) { }
1287 module_init(tcrypt_mod_init);
1288 module_exit(tcrypt_mod_fini);
1290 module_param(alg, charp, 0);
1291 module_param(type, uint, 0);
1292 module_param(mask, uint, 0);
1293 module_param(mode, int, 0);
1294 module_param(sec, uint, 0);
1295 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
1296 "(defaults to zero which uses CPU cycles instead)");
1298 MODULE_LICENSE("GPL");
1299 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
1300 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");