intel-iommu: Change dma_addr_level_pte() to dma_pfn_level_pte()
[linux-2.6/linux-2.6-openrd.git] / crypto / testmgr.c
blobe9e9d84293b9f953e6c41835778d2bb7a878c694
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
24 #include "internal.h"
25 #include "testmgr.h"
28 * Need slab memory for testing (size in number of pages).
30 #define XBUFSIZE 8
33 * Indexes into the xbuf to simulate cross-page access.
35 #define IDX1 32
36 #define IDX2 32400
37 #define IDX3 1
38 #define IDX4 8193
39 #define IDX5 22222
40 #define IDX6 17101
41 #define IDX7 27333
42 #define IDX8 3000
45 * Used by test_cipher()
47 #define ENCRYPT 1
48 #define DECRYPT 0
50 struct tcrypt_result {
51 struct completion completion;
52 int err;
55 struct aead_test_suite {
56 struct {
57 struct aead_testvec *vecs;
58 unsigned int count;
59 } enc, dec;
62 struct cipher_test_suite {
63 struct {
64 struct cipher_testvec *vecs;
65 unsigned int count;
66 } enc, dec;
69 struct comp_test_suite {
70 struct {
71 struct comp_testvec *vecs;
72 unsigned int count;
73 } comp, decomp;
76 struct pcomp_test_suite {
77 struct {
78 struct pcomp_testvec *vecs;
79 unsigned int count;
80 } comp, decomp;
83 struct hash_test_suite {
84 struct hash_testvec *vecs;
85 unsigned int count;
88 struct cprng_test_suite {
89 struct cprng_testvec *vecs;
90 unsigned int count;
93 struct alg_test_desc {
94 const char *alg;
95 int (*test)(const struct alg_test_desc *desc, const char *driver,
96 u32 type, u32 mask);
97 int fips_allowed; /* set if alg is allowed in fips mode */
99 union {
100 struct aead_test_suite aead;
101 struct cipher_test_suite cipher;
102 struct comp_test_suite comp;
103 struct pcomp_test_suite pcomp;
104 struct hash_test_suite hash;
105 struct cprng_test_suite cprng;
106 } suite;
109 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
111 static void hexdump(unsigned char *buf, unsigned int len)
113 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
114 16, 1,
115 buf, len, false);
118 static void tcrypt_complete(struct crypto_async_request *req, int err)
120 struct tcrypt_result *res = req->data;
122 if (err == -EINPROGRESS)
123 return;
125 res->err = err;
126 complete(&res->completion);
129 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
131 int i;
133 for (i = 0; i < XBUFSIZE; i++) {
134 buf[i] = (void *)__get_free_page(GFP_KERNEL);
135 if (!buf[i])
136 goto err_free_buf;
139 return 0;
141 err_free_buf:
142 while (i-- > 0)
143 free_page((unsigned long)buf[i]);
145 return -ENOMEM;
148 static void testmgr_free_buf(char *buf[XBUFSIZE])
150 int i;
152 for (i = 0; i < XBUFSIZE; i++)
153 free_page((unsigned long)buf[i]);
156 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
157 unsigned int tcount)
159 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
160 unsigned int i, j, k, temp;
161 struct scatterlist sg[8];
162 char result[64];
163 struct ahash_request *req;
164 struct tcrypt_result tresult;
165 void *hash_buff;
166 char *xbuf[XBUFSIZE];
167 int ret = -ENOMEM;
169 if (testmgr_alloc_buf(xbuf))
170 goto out_nobuf;
172 init_completion(&tresult.completion);
174 req = ahash_request_alloc(tfm, GFP_KERNEL);
175 if (!req) {
176 printk(KERN_ERR "alg: hash: Failed to allocate request for "
177 "%s\n", algo);
178 goto out_noreq;
180 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
181 tcrypt_complete, &tresult);
183 j = 0;
184 for (i = 0; i < tcount; i++) {
185 if (template[i].np)
186 continue;
188 j++;
189 memset(result, 0, 64);
191 hash_buff = xbuf[0];
193 ret = -EINVAL;
194 if (WARN_ON(template[i].psize > PAGE_SIZE))
195 goto out;
197 memcpy(hash_buff, template[i].plaintext, template[i].psize);
198 sg_init_one(&sg[0], hash_buff, template[i].psize);
200 if (template[i].ksize) {
201 crypto_ahash_clear_flags(tfm, ~0);
202 ret = crypto_ahash_setkey(tfm, template[i].key,
203 template[i].ksize);
204 if (ret) {
205 printk(KERN_ERR "alg: hash: setkey failed on "
206 "test %d for %s: ret=%d\n", j, algo,
207 -ret);
208 goto out;
212 ahash_request_set_crypt(req, sg, result, template[i].psize);
213 ret = crypto_ahash_digest(req);
214 switch (ret) {
215 case 0:
216 break;
217 case -EINPROGRESS:
218 case -EBUSY:
219 ret = wait_for_completion_interruptible(
220 &tresult.completion);
221 if (!ret && !(ret = tresult.err)) {
222 INIT_COMPLETION(tresult.completion);
223 break;
225 /* fall through */
226 default:
227 printk(KERN_ERR "alg: hash: digest failed on test %d "
228 "for %s: ret=%d\n", j, algo, -ret);
229 goto out;
232 if (memcmp(result, template[i].digest,
233 crypto_ahash_digestsize(tfm))) {
234 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
235 j, algo);
236 hexdump(result, crypto_ahash_digestsize(tfm));
237 ret = -EINVAL;
238 goto out;
242 j = 0;
243 for (i = 0; i < tcount; i++) {
244 if (template[i].np) {
245 j++;
246 memset(result, 0, 64);
248 temp = 0;
249 sg_init_table(sg, template[i].np);
250 ret = -EINVAL;
251 for (k = 0; k < template[i].np; k++) {
252 if (WARN_ON(offset_in_page(IDX[k]) +
253 template[i].tap[k] > PAGE_SIZE))
254 goto out;
255 sg_set_buf(&sg[k],
256 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
257 offset_in_page(IDX[k]),
258 template[i].plaintext + temp,
259 template[i].tap[k]),
260 template[i].tap[k]);
261 temp += template[i].tap[k];
264 if (template[i].ksize) {
265 crypto_ahash_clear_flags(tfm, ~0);
266 ret = crypto_ahash_setkey(tfm, template[i].key,
267 template[i].ksize);
269 if (ret) {
270 printk(KERN_ERR "alg: hash: setkey "
271 "failed on chunking test %d "
272 "for %s: ret=%d\n", j, algo,
273 -ret);
274 goto out;
278 ahash_request_set_crypt(req, sg, result,
279 template[i].psize);
280 ret = crypto_ahash_digest(req);
281 switch (ret) {
282 case 0:
283 break;
284 case -EINPROGRESS:
285 case -EBUSY:
286 ret = wait_for_completion_interruptible(
287 &tresult.completion);
288 if (!ret && !(ret = tresult.err)) {
289 INIT_COMPLETION(tresult.completion);
290 break;
292 /* fall through */
293 default:
294 printk(KERN_ERR "alg: hash: digest failed "
295 "on chunking test %d for %s: "
296 "ret=%d\n", j, algo, -ret);
297 goto out;
300 if (memcmp(result, template[i].digest,
301 crypto_ahash_digestsize(tfm))) {
302 printk(KERN_ERR "alg: hash: Chunking test %d "
303 "failed for %s\n", j, algo);
304 hexdump(result, crypto_ahash_digestsize(tfm));
305 ret = -EINVAL;
306 goto out;
311 ret = 0;
313 out:
314 ahash_request_free(req);
315 out_noreq:
316 testmgr_free_buf(xbuf);
317 out_nobuf:
318 return ret;
321 static int test_aead(struct crypto_aead *tfm, int enc,
322 struct aead_testvec *template, unsigned int tcount)
324 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
325 unsigned int i, j, k, n, temp;
326 int ret = -ENOMEM;
327 char *q;
328 char *key;
329 struct aead_request *req;
330 struct scatterlist sg[8];
331 struct scatterlist asg[8];
332 const char *e;
333 struct tcrypt_result result;
334 unsigned int authsize;
335 void *input;
336 void *assoc;
337 char iv[MAX_IVLEN];
338 char *xbuf[XBUFSIZE];
339 char *axbuf[XBUFSIZE];
341 if (testmgr_alloc_buf(xbuf))
342 goto out_noxbuf;
343 if (testmgr_alloc_buf(axbuf))
344 goto out_noaxbuf;
346 if (enc == ENCRYPT)
347 e = "encryption";
348 else
349 e = "decryption";
351 init_completion(&result.completion);
353 req = aead_request_alloc(tfm, GFP_KERNEL);
354 if (!req) {
355 printk(KERN_ERR "alg: aead: Failed to allocate request for "
356 "%s\n", algo);
357 goto out;
360 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
361 tcrypt_complete, &result);
363 for (i = 0, j = 0; i < tcount; i++) {
364 if (!template[i].np) {
365 j++;
367 /* some tepmplates have no input data but they will
368 * touch input
370 input = xbuf[0];
371 assoc = axbuf[0];
373 ret = -EINVAL;
374 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
375 template[i].alen > PAGE_SIZE))
376 goto out;
378 memcpy(input, template[i].input, template[i].ilen);
379 memcpy(assoc, template[i].assoc, template[i].alen);
380 if (template[i].iv)
381 memcpy(iv, template[i].iv, MAX_IVLEN);
382 else
383 memset(iv, 0, MAX_IVLEN);
385 crypto_aead_clear_flags(tfm, ~0);
386 if (template[i].wk)
387 crypto_aead_set_flags(
388 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
390 key = template[i].key;
392 ret = crypto_aead_setkey(tfm, key,
393 template[i].klen);
394 if (!ret == template[i].fail) {
395 printk(KERN_ERR "alg: aead: setkey failed on "
396 "test %d for %s: flags=%x\n", j, algo,
397 crypto_aead_get_flags(tfm));
398 goto out;
399 } else if (ret)
400 continue;
402 authsize = abs(template[i].rlen - template[i].ilen);
403 ret = crypto_aead_setauthsize(tfm, authsize);
404 if (ret) {
405 printk(KERN_ERR "alg: aead: Failed to set "
406 "authsize to %u on test %d for %s\n",
407 authsize, j, algo);
408 goto out;
411 sg_init_one(&sg[0], input,
412 template[i].ilen + (enc ? authsize : 0));
414 sg_init_one(&asg[0], assoc, template[i].alen);
416 aead_request_set_crypt(req, sg, sg,
417 template[i].ilen, iv);
419 aead_request_set_assoc(req, asg, template[i].alen);
421 ret = enc ?
422 crypto_aead_encrypt(req) :
423 crypto_aead_decrypt(req);
425 switch (ret) {
426 case 0:
427 if (template[i].novrfy) {
428 /* verification was supposed to fail */
429 printk(KERN_ERR "alg: aead: %s failed "
430 "on test %d for %s: ret was 0, "
431 "expected -EBADMSG\n",
432 e, j, algo);
433 /* so really, we got a bad message */
434 ret = -EBADMSG;
435 goto out;
437 break;
438 case -EINPROGRESS:
439 case -EBUSY:
440 ret = wait_for_completion_interruptible(
441 &result.completion);
442 if (!ret && !(ret = result.err)) {
443 INIT_COMPLETION(result.completion);
444 break;
446 case -EBADMSG:
447 if (template[i].novrfy)
448 /* verification failure was expected */
449 continue;
450 /* fall through */
451 default:
452 printk(KERN_ERR "alg: aead: %s failed on test "
453 "%d for %s: ret=%d\n", e, j, algo, -ret);
454 goto out;
457 q = input;
458 if (memcmp(q, template[i].result, template[i].rlen)) {
459 printk(KERN_ERR "alg: aead: Test %d failed on "
460 "%s for %s\n", j, e, algo);
461 hexdump(q, template[i].rlen);
462 ret = -EINVAL;
463 goto out;
468 for (i = 0, j = 0; i < tcount; i++) {
469 if (template[i].np) {
470 j++;
472 if (template[i].iv)
473 memcpy(iv, template[i].iv, MAX_IVLEN);
474 else
475 memset(iv, 0, MAX_IVLEN);
477 crypto_aead_clear_flags(tfm, ~0);
478 if (template[i].wk)
479 crypto_aead_set_flags(
480 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
481 key = template[i].key;
483 ret = crypto_aead_setkey(tfm, key, template[i].klen);
484 if (!ret == template[i].fail) {
485 printk(KERN_ERR "alg: aead: setkey failed on "
486 "chunk test %d for %s: flags=%x\n", j,
487 algo, crypto_aead_get_flags(tfm));
488 goto out;
489 } else if (ret)
490 continue;
492 authsize = abs(template[i].rlen - template[i].ilen);
494 ret = -EINVAL;
495 sg_init_table(sg, template[i].np);
496 for (k = 0, temp = 0; k < template[i].np; k++) {
497 if (WARN_ON(offset_in_page(IDX[k]) +
498 template[i].tap[k] > PAGE_SIZE))
499 goto out;
501 q = xbuf[IDX[k] >> PAGE_SHIFT] +
502 offset_in_page(IDX[k]);
504 memcpy(q, template[i].input + temp,
505 template[i].tap[k]);
507 n = template[i].tap[k];
508 if (k == template[i].np - 1 && enc)
509 n += authsize;
510 if (offset_in_page(q) + n < PAGE_SIZE)
511 q[n] = 0;
513 sg_set_buf(&sg[k], q, template[i].tap[k]);
514 temp += template[i].tap[k];
517 ret = crypto_aead_setauthsize(tfm, authsize);
518 if (ret) {
519 printk(KERN_ERR "alg: aead: Failed to set "
520 "authsize to %u on chunk test %d for "
521 "%s\n", authsize, j, algo);
522 goto out;
525 if (enc) {
526 if (WARN_ON(sg[k - 1].offset +
527 sg[k - 1].length + authsize >
528 PAGE_SIZE)) {
529 ret = -EINVAL;
530 goto out;
533 sg[k - 1].length += authsize;
536 sg_init_table(asg, template[i].anp);
537 ret = -EINVAL;
538 for (k = 0, temp = 0; k < template[i].anp; k++) {
539 if (WARN_ON(offset_in_page(IDX[k]) +
540 template[i].atap[k] > PAGE_SIZE))
541 goto out;
542 sg_set_buf(&asg[k],
543 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
544 offset_in_page(IDX[k]),
545 template[i].assoc + temp,
546 template[i].atap[k]),
547 template[i].atap[k]);
548 temp += template[i].atap[k];
551 aead_request_set_crypt(req, sg, sg,
552 template[i].ilen,
553 iv);
555 aead_request_set_assoc(req, asg, template[i].alen);
557 ret = enc ?
558 crypto_aead_encrypt(req) :
559 crypto_aead_decrypt(req);
561 switch (ret) {
562 case 0:
563 if (template[i].novrfy) {
564 /* verification was supposed to fail */
565 printk(KERN_ERR "alg: aead: %s failed "
566 "on chunk test %d for %s: ret "
567 "was 0, expected -EBADMSG\n",
568 e, j, algo);
569 /* so really, we got a bad message */
570 ret = -EBADMSG;
571 goto out;
573 break;
574 case -EINPROGRESS:
575 case -EBUSY:
576 ret = wait_for_completion_interruptible(
577 &result.completion);
578 if (!ret && !(ret = result.err)) {
579 INIT_COMPLETION(result.completion);
580 break;
582 case -EBADMSG:
583 if (template[i].novrfy)
584 /* verification failure was expected */
585 continue;
586 /* fall through */
587 default:
588 printk(KERN_ERR "alg: aead: %s failed on "
589 "chunk test %d for %s: ret=%d\n", e, j,
590 algo, -ret);
591 goto out;
594 ret = -EINVAL;
595 for (k = 0, temp = 0; k < template[i].np; k++) {
596 q = xbuf[IDX[k] >> PAGE_SHIFT] +
597 offset_in_page(IDX[k]);
599 n = template[i].tap[k];
600 if (k == template[i].np - 1)
601 n += enc ? authsize : -authsize;
603 if (memcmp(q, template[i].result + temp, n)) {
604 printk(KERN_ERR "alg: aead: Chunk "
605 "test %d failed on %s at page "
606 "%u for %s\n", j, e, k, algo);
607 hexdump(q, n);
608 goto out;
611 q += n;
612 if (k == template[i].np - 1 && !enc) {
613 if (memcmp(q, template[i].input +
614 temp + n, authsize))
615 n = authsize;
616 else
617 n = 0;
618 } else {
619 for (n = 0; offset_in_page(q + n) &&
620 q[n]; n++)
623 if (n) {
624 printk(KERN_ERR "alg: aead: Result "
625 "buffer corruption in chunk "
626 "test %d on %s at page %u for "
627 "%s: %u bytes:\n", j, e, k,
628 algo, n);
629 hexdump(q, n);
630 goto out;
633 temp += template[i].tap[k];
638 ret = 0;
640 out:
641 aead_request_free(req);
642 testmgr_free_buf(axbuf);
643 out_noaxbuf:
644 testmgr_free_buf(xbuf);
645 out_noxbuf:
646 return ret;
649 static int test_cipher(struct crypto_cipher *tfm, int enc,
650 struct cipher_testvec *template, unsigned int tcount)
652 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
653 unsigned int i, j, k;
654 char *q;
655 const char *e;
656 void *data;
657 char *xbuf[XBUFSIZE];
658 int ret = -ENOMEM;
660 if (testmgr_alloc_buf(xbuf))
661 goto out_nobuf;
663 if (enc == ENCRYPT)
664 e = "encryption";
665 else
666 e = "decryption";
668 j = 0;
669 for (i = 0; i < tcount; i++) {
670 if (template[i].np)
671 continue;
673 j++;
675 ret = -EINVAL;
676 if (WARN_ON(template[i].ilen > PAGE_SIZE))
677 goto out;
679 data = xbuf[0];
680 memcpy(data, template[i].input, template[i].ilen);
682 crypto_cipher_clear_flags(tfm, ~0);
683 if (template[i].wk)
684 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
686 ret = crypto_cipher_setkey(tfm, template[i].key,
687 template[i].klen);
688 if (!ret == template[i].fail) {
689 printk(KERN_ERR "alg: cipher: setkey failed "
690 "on test %d for %s: flags=%x\n", j,
691 algo, crypto_cipher_get_flags(tfm));
692 goto out;
693 } else if (ret)
694 continue;
696 for (k = 0; k < template[i].ilen;
697 k += crypto_cipher_blocksize(tfm)) {
698 if (enc)
699 crypto_cipher_encrypt_one(tfm, data + k,
700 data + k);
701 else
702 crypto_cipher_decrypt_one(tfm, data + k,
703 data + k);
706 q = data;
707 if (memcmp(q, template[i].result, template[i].rlen)) {
708 printk(KERN_ERR "alg: cipher: Test %d failed "
709 "on %s for %s\n", j, e, algo);
710 hexdump(q, template[i].rlen);
711 ret = -EINVAL;
712 goto out;
716 ret = 0;
718 out:
719 testmgr_free_buf(xbuf);
720 out_nobuf:
721 return ret;
724 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
725 struct cipher_testvec *template, unsigned int tcount)
727 const char *algo =
728 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
729 unsigned int i, j, k, n, temp;
730 char *q;
731 struct ablkcipher_request *req;
732 struct scatterlist sg[8];
733 const char *e;
734 struct tcrypt_result result;
735 void *data;
736 char iv[MAX_IVLEN];
737 char *xbuf[XBUFSIZE];
738 int ret = -ENOMEM;
740 if (testmgr_alloc_buf(xbuf))
741 goto out_nobuf;
743 if (enc == ENCRYPT)
744 e = "encryption";
745 else
746 e = "decryption";
748 init_completion(&result.completion);
750 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
751 if (!req) {
752 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
753 "for %s\n", algo);
754 goto out;
757 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
758 tcrypt_complete, &result);
760 j = 0;
761 for (i = 0; i < tcount; i++) {
762 if (template[i].iv)
763 memcpy(iv, template[i].iv, MAX_IVLEN);
764 else
765 memset(iv, 0, MAX_IVLEN);
767 if (!(template[i].np)) {
768 j++;
770 ret = -EINVAL;
771 if (WARN_ON(template[i].ilen > PAGE_SIZE))
772 goto out;
774 data = xbuf[0];
775 memcpy(data, template[i].input, template[i].ilen);
777 crypto_ablkcipher_clear_flags(tfm, ~0);
778 if (template[i].wk)
779 crypto_ablkcipher_set_flags(
780 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
782 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
783 template[i].klen);
784 if (!ret == template[i].fail) {
785 printk(KERN_ERR "alg: skcipher: setkey failed "
786 "on test %d for %s: flags=%x\n", j,
787 algo, crypto_ablkcipher_get_flags(tfm));
788 goto out;
789 } else if (ret)
790 continue;
792 sg_init_one(&sg[0], data, template[i].ilen);
794 ablkcipher_request_set_crypt(req, sg, sg,
795 template[i].ilen, iv);
796 ret = enc ?
797 crypto_ablkcipher_encrypt(req) :
798 crypto_ablkcipher_decrypt(req);
800 switch (ret) {
801 case 0:
802 break;
803 case -EINPROGRESS:
804 case -EBUSY:
805 ret = wait_for_completion_interruptible(
806 &result.completion);
807 if (!ret && !((ret = result.err))) {
808 INIT_COMPLETION(result.completion);
809 break;
811 /* fall through */
812 default:
813 printk(KERN_ERR "alg: skcipher: %s failed on "
814 "test %d for %s: ret=%d\n", e, j, algo,
815 -ret);
816 goto out;
819 q = data;
820 if (memcmp(q, template[i].result, template[i].rlen)) {
821 printk(KERN_ERR "alg: skcipher: Test %d "
822 "failed on %s for %s\n", j, e, algo);
823 hexdump(q, template[i].rlen);
824 ret = -EINVAL;
825 goto out;
830 j = 0;
831 for (i = 0; i < tcount; i++) {
833 if (template[i].iv)
834 memcpy(iv, template[i].iv, MAX_IVLEN);
835 else
836 memset(iv, 0, MAX_IVLEN);
838 if (template[i].np) {
839 j++;
841 crypto_ablkcipher_clear_flags(tfm, ~0);
842 if (template[i].wk)
843 crypto_ablkcipher_set_flags(
844 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
846 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
847 template[i].klen);
848 if (!ret == template[i].fail) {
849 printk(KERN_ERR "alg: skcipher: setkey failed "
850 "on chunk test %d for %s: flags=%x\n",
851 j, algo,
852 crypto_ablkcipher_get_flags(tfm));
853 goto out;
854 } else if (ret)
855 continue;
857 temp = 0;
858 ret = -EINVAL;
859 sg_init_table(sg, template[i].np);
860 for (k = 0; k < template[i].np; k++) {
861 if (WARN_ON(offset_in_page(IDX[k]) +
862 template[i].tap[k] > PAGE_SIZE))
863 goto out;
865 q = xbuf[IDX[k] >> PAGE_SHIFT] +
866 offset_in_page(IDX[k]);
868 memcpy(q, template[i].input + temp,
869 template[i].tap[k]);
871 if (offset_in_page(q) + template[i].tap[k] <
872 PAGE_SIZE)
873 q[template[i].tap[k]] = 0;
875 sg_set_buf(&sg[k], q, template[i].tap[k]);
877 temp += template[i].tap[k];
880 ablkcipher_request_set_crypt(req, sg, sg,
881 template[i].ilen, iv);
883 ret = enc ?
884 crypto_ablkcipher_encrypt(req) :
885 crypto_ablkcipher_decrypt(req);
887 switch (ret) {
888 case 0:
889 break;
890 case -EINPROGRESS:
891 case -EBUSY:
892 ret = wait_for_completion_interruptible(
893 &result.completion);
894 if (!ret && !((ret = result.err))) {
895 INIT_COMPLETION(result.completion);
896 break;
898 /* fall through */
899 default:
900 printk(KERN_ERR "alg: skcipher: %s failed on "
901 "chunk test %d for %s: ret=%d\n", e, j,
902 algo, -ret);
903 goto out;
906 temp = 0;
907 ret = -EINVAL;
908 for (k = 0; k < template[i].np; k++) {
909 q = xbuf[IDX[k] >> PAGE_SHIFT] +
910 offset_in_page(IDX[k]);
912 if (memcmp(q, template[i].result + temp,
913 template[i].tap[k])) {
914 printk(KERN_ERR "alg: skcipher: Chunk "
915 "test %d failed on %s at page "
916 "%u for %s\n", j, e, k, algo);
917 hexdump(q, template[i].tap[k]);
918 goto out;
921 q += template[i].tap[k];
922 for (n = 0; offset_in_page(q + n) && q[n]; n++)
924 if (n) {
925 printk(KERN_ERR "alg: skcipher: "
926 "Result buffer corruption in "
927 "chunk test %d on %s at page "
928 "%u for %s: %u bytes:\n", j, e,
929 k, algo, n);
930 hexdump(q, n);
931 goto out;
933 temp += template[i].tap[k];
938 ret = 0;
940 out:
941 ablkcipher_request_free(req);
942 testmgr_free_buf(xbuf);
943 out_nobuf:
944 return ret;
947 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
948 struct comp_testvec *dtemplate, int ctcount, int dtcount)
950 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
951 unsigned int i;
952 char result[COMP_BUF_SIZE];
953 int ret;
955 for (i = 0; i < ctcount; i++) {
956 int ilen;
957 unsigned int dlen = COMP_BUF_SIZE;
959 memset(result, 0, sizeof (result));
961 ilen = ctemplate[i].inlen;
962 ret = crypto_comp_compress(tfm, ctemplate[i].input,
963 ilen, result, &dlen);
964 if (ret) {
965 printk(KERN_ERR "alg: comp: compression failed "
966 "on test %d for %s: ret=%d\n", i + 1, algo,
967 -ret);
968 goto out;
971 if (dlen != ctemplate[i].outlen) {
972 printk(KERN_ERR "alg: comp: Compression test %d "
973 "failed for %s: output len = %d\n", i + 1, algo,
974 dlen);
975 ret = -EINVAL;
976 goto out;
979 if (memcmp(result, ctemplate[i].output, dlen)) {
980 printk(KERN_ERR "alg: comp: Compression test %d "
981 "failed for %s\n", i + 1, algo);
982 hexdump(result, dlen);
983 ret = -EINVAL;
984 goto out;
988 for (i = 0; i < dtcount; i++) {
989 int ilen;
990 unsigned int dlen = COMP_BUF_SIZE;
992 memset(result, 0, sizeof (result));
994 ilen = dtemplate[i].inlen;
995 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
996 ilen, result, &dlen);
997 if (ret) {
998 printk(KERN_ERR "alg: comp: decompression failed "
999 "on test %d for %s: ret=%d\n", i + 1, algo,
1000 -ret);
1001 goto out;
1004 if (dlen != dtemplate[i].outlen) {
1005 printk(KERN_ERR "alg: comp: Decompression test %d "
1006 "failed for %s: output len = %d\n", i + 1, algo,
1007 dlen);
1008 ret = -EINVAL;
1009 goto out;
1012 if (memcmp(result, dtemplate[i].output, dlen)) {
1013 printk(KERN_ERR "alg: comp: Decompression test %d "
1014 "failed for %s\n", i + 1, algo);
1015 hexdump(result, dlen);
1016 ret = -EINVAL;
1017 goto out;
1021 ret = 0;
1023 out:
1024 return ret;
1027 static int test_pcomp(struct crypto_pcomp *tfm,
1028 struct pcomp_testvec *ctemplate,
1029 struct pcomp_testvec *dtemplate, int ctcount,
1030 int dtcount)
1032 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1033 unsigned int i;
1034 char result[COMP_BUF_SIZE];
1035 int res;
1037 for (i = 0; i < ctcount; i++) {
1038 struct comp_request req;
1039 unsigned int produced = 0;
1041 res = crypto_compress_setup(tfm, ctemplate[i].params,
1042 ctemplate[i].paramsize);
1043 if (res) {
1044 pr_err("alg: pcomp: compression setup failed on test "
1045 "%d for %s: error=%d\n", i + 1, algo, res);
1046 return res;
1049 res = crypto_compress_init(tfm);
1050 if (res) {
1051 pr_err("alg: pcomp: compression init failed on test "
1052 "%d for %s: error=%d\n", i + 1, algo, res);
1053 return res;
1056 memset(result, 0, sizeof(result));
1058 req.next_in = ctemplate[i].input;
1059 req.avail_in = ctemplate[i].inlen / 2;
1060 req.next_out = result;
1061 req.avail_out = ctemplate[i].outlen / 2;
1063 res = crypto_compress_update(tfm, &req);
1064 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1065 pr_err("alg: pcomp: compression update failed on test "
1066 "%d for %s: error=%d\n", i + 1, algo, res);
1067 return res;
1069 if (res > 0)
1070 produced += res;
1072 /* Add remaining input data */
1073 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1075 res = crypto_compress_update(tfm, &req);
1076 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1077 pr_err("alg: pcomp: compression update failed on test "
1078 "%d for %s: error=%d\n", i + 1, algo, res);
1079 return res;
1081 if (res > 0)
1082 produced += res;
1084 /* Provide remaining output space */
1085 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1087 res = crypto_compress_final(tfm, &req);
1088 if (res < 0) {
1089 pr_err("alg: pcomp: compression final failed on test "
1090 "%d for %s: error=%d\n", i + 1, algo, res);
1091 return res;
1093 produced += res;
1095 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1096 pr_err("alg: comp: Compression test %d failed for %s: "
1097 "output len = %d (expected %d)\n", i + 1, algo,
1098 COMP_BUF_SIZE - req.avail_out,
1099 ctemplate[i].outlen);
1100 return -EINVAL;
1103 if (produced != ctemplate[i].outlen) {
1104 pr_err("alg: comp: Compression test %d failed for %s: "
1105 "returned len = %u (expected %d)\n", i + 1,
1106 algo, produced, ctemplate[i].outlen);
1107 return -EINVAL;
1110 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1111 pr_err("alg: pcomp: Compression test %d failed for "
1112 "%s\n", i + 1, algo);
1113 hexdump(result, ctemplate[i].outlen);
1114 return -EINVAL;
1118 for (i = 0; i < dtcount; i++) {
1119 struct comp_request req;
1120 unsigned int produced = 0;
1122 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1123 dtemplate[i].paramsize);
1124 if (res) {
1125 pr_err("alg: pcomp: decompression setup failed on "
1126 "test %d for %s: error=%d\n", i + 1, algo, res);
1127 return res;
1130 res = crypto_decompress_init(tfm);
1131 if (res) {
1132 pr_err("alg: pcomp: decompression init failed on test "
1133 "%d for %s: error=%d\n", i + 1, algo, res);
1134 return res;
1137 memset(result, 0, sizeof(result));
1139 req.next_in = dtemplate[i].input;
1140 req.avail_in = dtemplate[i].inlen / 2;
1141 req.next_out = result;
1142 req.avail_out = dtemplate[i].outlen / 2;
1144 res = crypto_decompress_update(tfm, &req);
1145 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1146 pr_err("alg: pcomp: decompression update failed on "
1147 "test %d for %s: error=%d\n", i + 1, algo, res);
1148 return res;
1150 if (res > 0)
1151 produced += res;
1153 /* Add remaining input data */
1154 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1156 res = crypto_decompress_update(tfm, &req);
1157 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1158 pr_err("alg: pcomp: decompression update failed on "
1159 "test %d for %s: error=%d\n", i + 1, algo, res);
1160 return res;
1162 if (res > 0)
1163 produced += res;
1165 /* Provide remaining output space */
1166 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1168 res = crypto_decompress_final(tfm, &req);
1169 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1170 pr_err("alg: pcomp: decompression final failed on "
1171 "test %d for %s: error=%d\n", i + 1, algo, res);
1172 return res;
1174 if (res > 0)
1175 produced += res;
1177 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1178 pr_err("alg: comp: Decompression test %d failed for "
1179 "%s: output len = %d (expected %d)\n", i + 1,
1180 algo, COMP_BUF_SIZE - req.avail_out,
1181 dtemplate[i].outlen);
1182 return -EINVAL;
1185 if (produced != dtemplate[i].outlen) {
1186 pr_err("alg: comp: Decompression test %d failed for "
1187 "%s: returned len = %u (expected %d)\n", i + 1,
1188 algo, produced, dtemplate[i].outlen);
1189 return -EINVAL;
1192 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1193 pr_err("alg: pcomp: Decompression test %d failed for "
1194 "%s\n", i + 1, algo);
1195 hexdump(result, dtemplate[i].outlen);
1196 return -EINVAL;
1200 return 0;
1204 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1205 unsigned int tcount)
1207 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1208 int err, i, j, seedsize;
1209 u8 *seed;
1210 char result[32];
1212 seedsize = crypto_rng_seedsize(tfm);
1214 seed = kmalloc(seedsize, GFP_KERNEL);
1215 if (!seed) {
1216 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1217 "for %s\n", algo);
1218 return -ENOMEM;
1221 for (i = 0; i < tcount; i++) {
1222 memset(result, 0, 32);
1224 memcpy(seed, template[i].v, template[i].vlen);
1225 memcpy(seed + template[i].vlen, template[i].key,
1226 template[i].klen);
1227 memcpy(seed + template[i].vlen + template[i].klen,
1228 template[i].dt, template[i].dtlen);
1230 err = crypto_rng_reset(tfm, seed, seedsize);
1231 if (err) {
1232 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1233 "for %s\n", algo);
1234 goto out;
1237 for (j = 0; j < template[i].loops; j++) {
1238 err = crypto_rng_get_bytes(tfm, result,
1239 template[i].rlen);
1240 if (err != template[i].rlen) {
1241 printk(KERN_ERR "alg: cprng: Failed to obtain "
1242 "the correct amount of random data for "
1243 "%s (requested %d, got %d)\n", algo,
1244 template[i].rlen, err);
1245 goto out;
1249 err = memcmp(result, template[i].result,
1250 template[i].rlen);
1251 if (err) {
1252 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1253 i, algo);
1254 hexdump(result, template[i].rlen);
1255 err = -EINVAL;
1256 goto out;
1260 out:
1261 kfree(seed);
1262 return err;
1265 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1266 u32 type, u32 mask)
1268 struct crypto_aead *tfm;
1269 int err = 0;
1271 tfm = crypto_alloc_aead(driver, type, mask);
1272 if (IS_ERR(tfm)) {
1273 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1274 "%ld\n", driver, PTR_ERR(tfm));
1275 return PTR_ERR(tfm);
1278 if (desc->suite.aead.enc.vecs) {
1279 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1280 desc->suite.aead.enc.count);
1281 if (err)
1282 goto out;
1285 if (!err && desc->suite.aead.dec.vecs)
1286 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1287 desc->suite.aead.dec.count);
1289 out:
1290 crypto_free_aead(tfm);
1291 return err;
1294 static int alg_test_cipher(const struct alg_test_desc *desc,
1295 const char *driver, u32 type, u32 mask)
1297 struct crypto_cipher *tfm;
1298 int err = 0;
1300 tfm = crypto_alloc_cipher(driver, type, mask);
1301 if (IS_ERR(tfm)) {
1302 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1303 "%s: %ld\n", driver, PTR_ERR(tfm));
1304 return PTR_ERR(tfm);
1307 if (desc->suite.cipher.enc.vecs) {
1308 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1309 desc->suite.cipher.enc.count);
1310 if (err)
1311 goto out;
1314 if (desc->suite.cipher.dec.vecs)
1315 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1316 desc->suite.cipher.dec.count);
1318 out:
1319 crypto_free_cipher(tfm);
1320 return err;
1323 static int alg_test_skcipher(const struct alg_test_desc *desc,
1324 const char *driver, u32 type, u32 mask)
1326 struct crypto_ablkcipher *tfm;
1327 int err = 0;
1329 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1330 if (IS_ERR(tfm)) {
1331 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1332 "%s: %ld\n", driver, PTR_ERR(tfm));
1333 return PTR_ERR(tfm);
1336 if (desc->suite.cipher.enc.vecs) {
1337 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1338 desc->suite.cipher.enc.count);
1339 if (err)
1340 goto out;
1343 if (desc->suite.cipher.dec.vecs)
1344 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1345 desc->suite.cipher.dec.count);
1347 out:
1348 crypto_free_ablkcipher(tfm);
1349 return err;
1352 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1353 u32 type, u32 mask)
1355 struct crypto_comp *tfm;
1356 int err;
1358 tfm = crypto_alloc_comp(driver, type, mask);
1359 if (IS_ERR(tfm)) {
1360 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1361 "%ld\n", driver, PTR_ERR(tfm));
1362 return PTR_ERR(tfm);
1365 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1366 desc->suite.comp.decomp.vecs,
1367 desc->suite.comp.comp.count,
1368 desc->suite.comp.decomp.count);
1370 crypto_free_comp(tfm);
1371 return err;
1374 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1375 u32 type, u32 mask)
1377 struct crypto_pcomp *tfm;
1378 int err;
1380 tfm = crypto_alloc_pcomp(driver, type, mask);
1381 if (IS_ERR(tfm)) {
1382 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1383 driver, PTR_ERR(tfm));
1384 return PTR_ERR(tfm);
1387 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1388 desc->suite.pcomp.decomp.vecs,
1389 desc->suite.pcomp.comp.count,
1390 desc->suite.pcomp.decomp.count);
1392 crypto_free_pcomp(tfm);
1393 return err;
1396 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1397 u32 type, u32 mask)
1399 struct crypto_ahash *tfm;
1400 int err;
1402 tfm = crypto_alloc_ahash(driver, type, mask);
1403 if (IS_ERR(tfm)) {
1404 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1405 "%ld\n", driver, PTR_ERR(tfm));
1406 return PTR_ERR(tfm);
1409 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1411 crypto_free_ahash(tfm);
1412 return err;
1415 static int alg_test_crc32c(const struct alg_test_desc *desc,
1416 const char *driver, u32 type, u32 mask)
1418 struct crypto_shash *tfm;
1419 u32 val;
1420 int err;
1422 err = alg_test_hash(desc, driver, type, mask);
1423 if (err)
1424 goto out;
1426 tfm = crypto_alloc_shash(driver, type, mask);
1427 if (IS_ERR(tfm)) {
1428 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1429 "%ld\n", driver, PTR_ERR(tfm));
1430 err = PTR_ERR(tfm);
1431 goto out;
1434 do {
1435 struct {
1436 struct shash_desc shash;
1437 char ctx[crypto_shash_descsize(tfm)];
1438 } sdesc;
1440 sdesc.shash.tfm = tfm;
1441 sdesc.shash.flags = 0;
1443 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1444 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1445 if (err) {
1446 printk(KERN_ERR "alg: crc32c: Operation failed for "
1447 "%s: %d\n", driver, err);
1448 break;
1451 if (val != ~420553207) {
1452 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1453 "%d\n", driver, val);
1454 err = -EINVAL;
1456 } while (0);
1458 crypto_free_shash(tfm);
1460 out:
1461 return err;
1464 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1465 u32 type, u32 mask)
1467 struct crypto_rng *rng;
1468 int err;
1470 rng = crypto_alloc_rng(driver, type, mask);
1471 if (IS_ERR(rng)) {
1472 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1473 "%ld\n", driver, PTR_ERR(rng));
1474 return PTR_ERR(rng);
1477 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1479 crypto_free_rng(rng);
1481 return err;
1484 /* Please keep this list sorted by algorithm name. */
1485 static const struct alg_test_desc alg_test_descs[] = {
1487 .alg = "ansi_cprng",
1488 .test = alg_test_cprng,
1489 .fips_allowed = 1,
1490 .suite = {
1491 .cprng = {
1492 .vecs = ansi_cprng_aes_tv_template,
1493 .count = ANSI_CPRNG_AES_TEST_VECTORS
1496 }, {
1497 .alg = "cbc(aes)",
1498 .test = alg_test_skcipher,
1499 .fips_allowed = 1,
1500 .suite = {
1501 .cipher = {
1502 .enc = {
1503 .vecs = aes_cbc_enc_tv_template,
1504 .count = AES_CBC_ENC_TEST_VECTORS
1506 .dec = {
1507 .vecs = aes_cbc_dec_tv_template,
1508 .count = AES_CBC_DEC_TEST_VECTORS
1512 }, {
1513 .alg = "cbc(anubis)",
1514 .test = alg_test_skcipher,
1515 .suite = {
1516 .cipher = {
1517 .enc = {
1518 .vecs = anubis_cbc_enc_tv_template,
1519 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1521 .dec = {
1522 .vecs = anubis_cbc_dec_tv_template,
1523 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1527 }, {
1528 .alg = "cbc(blowfish)",
1529 .test = alg_test_skcipher,
1530 .suite = {
1531 .cipher = {
1532 .enc = {
1533 .vecs = bf_cbc_enc_tv_template,
1534 .count = BF_CBC_ENC_TEST_VECTORS
1536 .dec = {
1537 .vecs = bf_cbc_dec_tv_template,
1538 .count = BF_CBC_DEC_TEST_VECTORS
1542 }, {
1543 .alg = "cbc(camellia)",
1544 .test = alg_test_skcipher,
1545 .suite = {
1546 .cipher = {
1547 .enc = {
1548 .vecs = camellia_cbc_enc_tv_template,
1549 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1551 .dec = {
1552 .vecs = camellia_cbc_dec_tv_template,
1553 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1557 }, {
1558 .alg = "cbc(des)",
1559 .test = alg_test_skcipher,
1560 .suite = {
1561 .cipher = {
1562 .enc = {
1563 .vecs = des_cbc_enc_tv_template,
1564 .count = DES_CBC_ENC_TEST_VECTORS
1566 .dec = {
1567 .vecs = des_cbc_dec_tv_template,
1568 .count = DES_CBC_DEC_TEST_VECTORS
1572 }, {
1573 .alg = "cbc(des3_ede)",
1574 .test = alg_test_skcipher,
1575 .fips_allowed = 1,
1576 .suite = {
1577 .cipher = {
1578 .enc = {
1579 .vecs = des3_ede_cbc_enc_tv_template,
1580 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1582 .dec = {
1583 .vecs = des3_ede_cbc_dec_tv_template,
1584 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1588 }, {
1589 .alg = "cbc(twofish)",
1590 .test = alg_test_skcipher,
1591 .suite = {
1592 .cipher = {
1593 .enc = {
1594 .vecs = tf_cbc_enc_tv_template,
1595 .count = TF_CBC_ENC_TEST_VECTORS
1597 .dec = {
1598 .vecs = tf_cbc_dec_tv_template,
1599 .count = TF_CBC_DEC_TEST_VECTORS
1603 }, {
1604 .alg = "ccm(aes)",
1605 .test = alg_test_aead,
1606 .fips_allowed = 1,
1607 .suite = {
1608 .aead = {
1609 .enc = {
1610 .vecs = aes_ccm_enc_tv_template,
1611 .count = AES_CCM_ENC_TEST_VECTORS
1613 .dec = {
1614 .vecs = aes_ccm_dec_tv_template,
1615 .count = AES_CCM_DEC_TEST_VECTORS
1619 }, {
1620 .alg = "crc32c",
1621 .test = alg_test_crc32c,
1622 .fips_allowed = 1,
1623 .suite = {
1624 .hash = {
1625 .vecs = crc32c_tv_template,
1626 .count = CRC32C_TEST_VECTORS
1629 }, {
1630 .alg = "ctr(aes)",
1631 .test = alg_test_skcipher,
1632 .fips_allowed = 1,
1633 .suite = {
1634 .cipher = {
1635 .enc = {
1636 .vecs = aes_ctr_enc_tv_template,
1637 .count = AES_CTR_ENC_TEST_VECTORS
1639 .dec = {
1640 .vecs = aes_ctr_dec_tv_template,
1641 .count = AES_CTR_DEC_TEST_VECTORS
1645 }, {
1646 .alg = "cts(cbc(aes))",
1647 .test = alg_test_skcipher,
1648 .suite = {
1649 .cipher = {
1650 .enc = {
1651 .vecs = cts_mode_enc_tv_template,
1652 .count = CTS_MODE_ENC_TEST_VECTORS
1654 .dec = {
1655 .vecs = cts_mode_dec_tv_template,
1656 .count = CTS_MODE_DEC_TEST_VECTORS
1660 }, {
1661 .alg = "deflate",
1662 .test = alg_test_comp,
1663 .suite = {
1664 .comp = {
1665 .comp = {
1666 .vecs = deflate_comp_tv_template,
1667 .count = DEFLATE_COMP_TEST_VECTORS
1669 .decomp = {
1670 .vecs = deflate_decomp_tv_template,
1671 .count = DEFLATE_DECOMP_TEST_VECTORS
1675 }, {
1676 .alg = "ecb(aes)",
1677 .test = alg_test_skcipher,
1678 .fips_allowed = 1,
1679 .suite = {
1680 .cipher = {
1681 .enc = {
1682 .vecs = aes_enc_tv_template,
1683 .count = AES_ENC_TEST_VECTORS
1685 .dec = {
1686 .vecs = aes_dec_tv_template,
1687 .count = AES_DEC_TEST_VECTORS
1691 }, {
1692 .alg = "ecb(anubis)",
1693 .test = alg_test_skcipher,
1694 .suite = {
1695 .cipher = {
1696 .enc = {
1697 .vecs = anubis_enc_tv_template,
1698 .count = ANUBIS_ENC_TEST_VECTORS
1700 .dec = {
1701 .vecs = anubis_dec_tv_template,
1702 .count = ANUBIS_DEC_TEST_VECTORS
1706 }, {
1707 .alg = "ecb(arc4)",
1708 .test = alg_test_skcipher,
1709 .suite = {
1710 .cipher = {
1711 .enc = {
1712 .vecs = arc4_enc_tv_template,
1713 .count = ARC4_ENC_TEST_VECTORS
1715 .dec = {
1716 .vecs = arc4_dec_tv_template,
1717 .count = ARC4_DEC_TEST_VECTORS
1721 }, {
1722 .alg = "ecb(blowfish)",
1723 .test = alg_test_skcipher,
1724 .suite = {
1725 .cipher = {
1726 .enc = {
1727 .vecs = bf_enc_tv_template,
1728 .count = BF_ENC_TEST_VECTORS
1730 .dec = {
1731 .vecs = bf_dec_tv_template,
1732 .count = BF_DEC_TEST_VECTORS
1736 }, {
1737 .alg = "ecb(camellia)",
1738 .test = alg_test_skcipher,
1739 .suite = {
1740 .cipher = {
1741 .enc = {
1742 .vecs = camellia_enc_tv_template,
1743 .count = CAMELLIA_ENC_TEST_VECTORS
1745 .dec = {
1746 .vecs = camellia_dec_tv_template,
1747 .count = CAMELLIA_DEC_TEST_VECTORS
1751 }, {
1752 .alg = "ecb(cast5)",
1753 .test = alg_test_skcipher,
1754 .suite = {
1755 .cipher = {
1756 .enc = {
1757 .vecs = cast5_enc_tv_template,
1758 .count = CAST5_ENC_TEST_VECTORS
1760 .dec = {
1761 .vecs = cast5_dec_tv_template,
1762 .count = CAST5_DEC_TEST_VECTORS
1766 }, {
1767 .alg = "ecb(cast6)",
1768 .test = alg_test_skcipher,
1769 .suite = {
1770 .cipher = {
1771 .enc = {
1772 .vecs = cast6_enc_tv_template,
1773 .count = CAST6_ENC_TEST_VECTORS
1775 .dec = {
1776 .vecs = cast6_dec_tv_template,
1777 .count = CAST6_DEC_TEST_VECTORS
1781 }, {
1782 .alg = "ecb(des)",
1783 .test = alg_test_skcipher,
1784 .fips_allowed = 1,
1785 .suite = {
1786 .cipher = {
1787 .enc = {
1788 .vecs = des_enc_tv_template,
1789 .count = DES_ENC_TEST_VECTORS
1791 .dec = {
1792 .vecs = des_dec_tv_template,
1793 .count = DES_DEC_TEST_VECTORS
1797 }, {
1798 .alg = "ecb(des3_ede)",
1799 .test = alg_test_skcipher,
1800 .fips_allowed = 1,
1801 .suite = {
1802 .cipher = {
1803 .enc = {
1804 .vecs = des3_ede_enc_tv_template,
1805 .count = DES3_EDE_ENC_TEST_VECTORS
1807 .dec = {
1808 .vecs = des3_ede_dec_tv_template,
1809 .count = DES3_EDE_DEC_TEST_VECTORS
1813 }, {
1814 .alg = "ecb(khazad)",
1815 .test = alg_test_skcipher,
1816 .suite = {
1817 .cipher = {
1818 .enc = {
1819 .vecs = khazad_enc_tv_template,
1820 .count = KHAZAD_ENC_TEST_VECTORS
1822 .dec = {
1823 .vecs = khazad_dec_tv_template,
1824 .count = KHAZAD_DEC_TEST_VECTORS
1828 }, {
1829 .alg = "ecb(seed)",
1830 .test = alg_test_skcipher,
1831 .suite = {
1832 .cipher = {
1833 .enc = {
1834 .vecs = seed_enc_tv_template,
1835 .count = SEED_ENC_TEST_VECTORS
1837 .dec = {
1838 .vecs = seed_dec_tv_template,
1839 .count = SEED_DEC_TEST_VECTORS
1843 }, {
1844 .alg = "ecb(serpent)",
1845 .test = alg_test_skcipher,
1846 .suite = {
1847 .cipher = {
1848 .enc = {
1849 .vecs = serpent_enc_tv_template,
1850 .count = SERPENT_ENC_TEST_VECTORS
1852 .dec = {
1853 .vecs = serpent_dec_tv_template,
1854 .count = SERPENT_DEC_TEST_VECTORS
1858 }, {
1859 .alg = "ecb(tea)",
1860 .test = alg_test_skcipher,
1861 .suite = {
1862 .cipher = {
1863 .enc = {
1864 .vecs = tea_enc_tv_template,
1865 .count = TEA_ENC_TEST_VECTORS
1867 .dec = {
1868 .vecs = tea_dec_tv_template,
1869 .count = TEA_DEC_TEST_VECTORS
1873 }, {
1874 .alg = "ecb(tnepres)",
1875 .test = alg_test_skcipher,
1876 .suite = {
1877 .cipher = {
1878 .enc = {
1879 .vecs = tnepres_enc_tv_template,
1880 .count = TNEPRES_ENC_TEST_VECTORS
1882 .dec = {
1883 .vecs = tnepres_dec_tv_template,
1884 .count = TNEPRES_DEC_TEST_VECTORS
1888 }, {
1889 .alg = "ecb(twofish)",
1890 .test = alg_test_skcipher,
1891 .suite = {
1892 .cipher = {
1893 .enc = {
1894 .vecs = tf_enc_tv_template,
1895 .count = TF_ENC_TEST_VECTORS
1897 .dec = {
1898 .vecs = tf_dec_tv_template,
1899 .count = TF_DEC_TEST_VECTORS
1903 }, {
1904 .alg = "ecb(xeta)",
1905 .test = alg_test_skcipher,
1906 .suite = {
1907 .cipher = {
1908 .enc = {
1909 .vecs = xeta_enc_tv_template,
1910 .count = XETA_ENC_TEST_VECTORS
1912 .dec = {
1913 .vecs = xeta_dec_tv_template,
1914 .count = XETA_DEC_TEST_VECTORS
1918 }, {
1919 .alg = "ecb(xtea)",
1920 .test = alg_test_skcipher,
1921 .suite = {
1922 .cipher = {
1923 .enc = {
1924 .vecs = xtea_enc_tv_template,
1925 .count = XTEA_ENC_TEST_VECTORS
1927 .dec = {
1928 .vecs = xtea_dec_tv_template,
1929 .count = XTEA_DEC_TEST_VECTORS
1933 }, {
1934 .alg = "gcm(aes)",
1935 .test = alg_test_aead,
1936 .fips_allowed = 1,
1937 .suite = {
1938 .aead = {
1939 .enc = {
1940 .vecs = aes_gcm_enc_tv_template,
1941 .count = AES_GCM_ENC_TEST_VECTORS
1943 .dec = {
1944 .vecs = aes_gcm_dec_tv_template,
1945 .count = AES_GCM_DEC_TEST_VECTORS
1949 }, {
1950 .alg = "hmac(md5)",
1951 .test = alg_test_hash,
1952 .suite = {
1953 .hash = {
1954 .vecs = hmac_md5_tv_template,
1955 .count = HMAC_MD5_TEST_VECTORS
1958 }, {
1959 .alg = "hmac(rmd128)",
1960 .test = alg_test_hash,
1961 .suite = {
1962 .hash = {
1963 .vecs = hmac_rmd128_tv_template,
1964 .count = HMAC_RMD128_TEST_VECTORS
1967 }, {
1968 .alg = "hmac(rmd160)",
1969 .test = alg_test_hash,
1970 .suite = {
1971 .hash = {
1972 .vecs = hmac_rmd160_tv_template,
1973 .count = HMAC_RMD160_TEST_VECTORS
1976 }, {
1977 .alg = "hmac(sha1)",
1978 .test = alg_test_hash,
1979 .fips_allowed = 1,
1980 .suite = {
1981 .hash = {
1982 .vecs = hmac_sha1_tv_template,
1983 .count = HMAC_SHA1_TEST_VECTORS
1986 }, {
1987 .alg = "hmac(sha224)",
1988 .test = alg_test_hash,
1989 .fips_allowed = 1,
1990 .suite = {
1991 .hash = {
1992 .vecs = hmac_sha224_tv_template,
1993 .count = HMAC_SHA224_TEST_VECTORS
1996 }, {
1997 .alg = "hmac(sha256)",
1998 .test = alg_test_hash,
1999 .fips_allowed = 1,
2000 .suite = {
2001 .hash = {
2002 .vecs = hmac_sha256_tv_template,
2003 .count = HMAC_SHA256_TEST_VECTORS
2006 }, {
2007 .alg = "hmac(sha384)",
2008 .test = alg_test_hash,
2009 .fips_allowed = 1,
2010 .suite = {
2011 .hash = {
2012 .vecs = hmac_sha384_tv_template,
2013 .count = HMAC_SHA384_TEST_VECTORS
2016 }, {
2017 .alg = "hmac(sha512)",
2018 .test = alg_test_hash,
2019 .fips_allowed = 1,
2020 .suite = {
2021 .hash = {
2022 .vecs = hmac_sha512_tv_template,
2023 .count = HMAC_SHA512_TEST_VECTORS
2026 }, {
2027 .alg = "lrw(aes)",
2028 .test = alg_test_skcipher,
2029 .suite = {
2030 .cipher = {
2031 .enc = {
2032 .vecs = aes_lrw_enc_tv_template,
2033 .count = AES_LRW_ENC_TEST_VECTORS
2035 .dec = {
2036 .vecs = aes_lrw_dec_tv_template,
2037 .count = AES_LRW_DEC_TEST_VECTORS
2041 }, {
2042 .alg = "lzo",
2043 .test = alg_test_comp,
2044 .suite = {
2045 .comp = {
2046 .comp = {
2047 .vecs = lzo_comp_tv_template,
2048 .count = LZO_COMP_TEST_VECTORS
2050 .decomp = {
2051 .vecs = lzo_decomp_tv_template,
2052 .count = LZO_DECOMP_TEST_VECTORS
2056 }, {
2057 .alg = "md4",
2058 .test = alg_test_hash,
2059 .suite = {
2060 .hash = {
2061 .vecs = md4_tv_template,
2062 .count = MD4_TEST_VECTORS
2065 }, {
2066 .alg = "md5",
2067 .test = alg_test_hash,
2068 .suite = {
2069 .hash = {
2070 .vecs = md5_tv_template,
2071 .count = MD5_TEST_VECTORS
2074 }, {
2075 .alg = "michael_mic",
2076 .test = alg_test_hash,
2077 .suite = {
2078 .hash = {
2079 .vecs = michael_mic_tv_template,
2080 .count = MICHAEL_MIC_TEST_VECTORS
2083 }, {
2084 .alg = "pcbc(fcrypt)",
2085 .test = alg_test_skcipher,
2086 .suite = {
2087 .cipher = {
2088 .enc = {
2089 .vecs = fcrypt_pcbc_enc_tv_template,
2090 .count = FCRYPT_ENC_TEST_VECTORS
2092 .dec = {
2093 .vecs = fcrypt_pcbc_dec_tv_template,
2094 .count = FCRYPT_DEC_TEST_VECTORS
2098 }, {
2099 .alg = "rfc3686(ctr(aes))",
2100 .test = alg_test_skcipher,
2101 .fips_allowed = 1,
2102 .suite = {
2103 .cipher = {
2104 .enc = {
2105 .vecs = aes_ctr_rfc3686_enc_tv_template,
2106 .count = AES_CTR_3686_ENC_TEST_VECTORS
2108 .dec = {
2109 .vecs = aes_ctr_rfc3686_dec_tv_template,
2110 .count = AES_CTR_3686_DEC_TEST_VECTORS
2114 }, {
2115 .alg = "rfc4309(ccm(aes))",
2116 .test = alg_test_aead,
2117 .fips_allowed = 1,
2118 .suite = {
2119 .aead = {
2120 .enc = {
2121 .vecs = aes_ccm_rfc4309_enc_tv_template,
2122 .count = AES_CCM_4309_ENC_TEST_VECTORS
2124 .dec = {
2125 .vecs = aes_ccm_rfc4309_dec_tv_template,
2126 .count = AES_CCM_4309_DEC_TEST_VECTORS
2130 }, {
2131 .alg = "rmd128",
2132 .test = alg_test_hash,
2133 .suite = {
2134 .hash = {
2135 .vecs = rmd128_tv_template,
2136 .count = RMD128_TEST_VECTORS
2139 }, {
2140 .alg = "rmd160",
2141 .test = alg_test_hash,
2142 .suite = {
2143 .hash = {
2144 .vecs = rmd160_tv_template,
2145 .count = RMD160_TEST_VECTORS
2148 }, {
2149 .alg = "rmd256",
2150 .test = alg_test_hash,
2151 .suite = {
2152 .hash = {
2153 .vecs = rmd256_tv_template,
2154 .count = RMD256_TEST_VECTORS
2157 }, {
2158 .alg = "rmd320",
2159 .test = alg_test_hash,
2160 .suite = {
2161 .hash = {
2162 .vecs = rmd320_tv_template,
2163 .count = RMD320_TEST_VECTORS
2166 }, {
2167 .alg = "salsa20",
2168 .test = alg_test_skcipher,
2169 .suite = {
2170 .cipher = {
2171 .enc = {
2172 .vecs = salsa20_stream_enc_tv_template,
2173 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2177 }, {
2178 .alg = "sha1",
2179 .test = alg_test_hash,
2180 .fips_allowed = 1,
2181 .suite = {
2182 .hash = {
2183 .vecs = sha1_tv_template,
2184 .count = SHA1_TEST_VECTORS
2187 }, {
2188 .alg = "sha224",
2189 .test = alg_test_hash,
2190 .fips_allowed = 1,
2191 .suite = {
2192 .hash = {
2193 .vecs = sha224_tv_template,
2194 .count = SHA224_TEST_VECTORS
2197 }, {
2198 .alg = "sha256",
2199 .test = alg_test_hash,
2200 .fips_allowed = 1,
2201 .suite = {
2202 .hash = {
2203 .vecs = sha256_tv_template,
2204 .count = SHA256_TEST_VECTORS
2207 }, {
2208 .alg = "sha384",
2209 .test = alg_test_hash,
2210 .fips_allowed = 1,
2211 .suite = {
2212 .hash = {
2213 .vecs = sha384_tv_template,
2214 .count = SHA384_TEST_VECTORS
2217 }, {
2218 .alg = "sha512",
2219 .test = alg_test_hash,
2220 .fips_allowed = 1,
2221 .suite = {
2222 .hash = {
2223 .vecs = sha512_tv_template,
2224 .count = SHA512_TEST_VECTORS
2227 }, {
2228 .alg = "tgr128",
2229 .test = alg_test_hash,
2230 .suite = {
2231 .hash = {
2232 .vecs = tgr128_tv_template,
2233 .count = TGR128_TEST_VECTORS
2236 }, {
2237 .alg = "tgr160",
2238 .test = alg_test_hash,
2239 .suite = {
2240 .hash = {
2241 .vecs = tgr160_tv_template,
2242 .count = TGR160_TEST_VECTORS
2245 }, {
2246 .alg = "tgr192",
2247 .test = alg_test_hash,
2248 .suite = {
2249 .hash = {
2250 .vecs = tgr192_tv_template,
2251 .count = TGR192_TEST_VECTORS
2254 }, {
2255 .alg = "wp256",
2256 .test = alg_test_hash,
2257 .suite = {
2258 .hash = {
2259 .vecs = wp256_tv_template,
2260 .count = WP256_TEST_VECTORS
2263 }, {
2264 .alg = "wp384",
2265 .test = alg_test_hash,
2266 .suite = {
2267 .hash = {
2268 .vecs = wp384_tv_template,
2269 .count = WP384_TEST_VECTORS
2272 }, {
2273 .alg = "wp512",
2274 .test = alg_test_hash,
2275 .suite = {
2276 .hash = {
2277 .vecs = wp512_tv_template,
2278 .count = WP512_TEST_VECTORS
2281 }, {
2282 .alg = "xcbc(aes)",
2283 .test = alg_test_hash,
2284 .suite = {
2285 .hash = {
2286 .vecs = aes_xcbc128_tv_template,
2287 .count = XCBC_AES_TEST_VECTORS
2290 }, {
2291 .alg = "xts(aes)",
2292 .test = alg_test_skcipher,
2293 .suite = {
2294 .cipher = {
2295 .enc = {
2296 .vecs = aes_xts_enc_tv_template,
2297 .count = AES_XTS_ENC_TEST_VECTORS
2299 .dec = {
2300 .vecs = aes_xts_dec_tv_template,
2301 .count = AES_XTS_DEC_TEST_VECTORS
2305 }, {
2306 .alg = "zlib",
2307 .test = alg_test_pcomp,
2308 .suite = {
2309 .pcomp = {
2310 .comp = {
2311 .vecs = zlib_comp_tv_template,
2312 .count = ZLIB_COMP_TEST_VECTORS
2314 .decomp = {
2315 .vecs = zlib_decomp_tv_template,
2316 .count = ZLIB_DECOMP_TEST_VECTORS
2323 static int alg_find_test(const char *alg)
2325 int start = 0;
2326 int end = ARRAY_SIZE(alg_test_descs);
2328 while (start < end) {
2329 int i = (start + end) / 2;
2330 int diff = strcmp(alg_test_descs[i].alg, alg);
2332 if (diff > 0) {
2333 end = i;
2334 continue;
2337 if (diff < 0) {
2338 start = i + 1;
2339 continue;
2342 return i;
2345 return -1;
2348 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2350 int i;
2351 int rc;
2353 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2354 char nalg[CRYPTO_MAX_ALG_NAME];
2356 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2357 sizeof(nalg))
2358 return -ENAMETOOLONG;
2360 i = alg_find_test(nalg);
2361 if (i < 0)
2362 goto notest;
2364 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2365 goto non_fips_alg;
2367 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
2368 goto test_done;
2371 i = alg_find_test(alg);
2372 if (i < 0)
2373 goto notest;
2375 if (fips_enabled && !alg_test_descs[i].fips_allowed)
2376 goto non_fips_alg;
2378 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
2379 type, mask);
2380 test_done:
2381 if (fips_enabled && rc)
2382 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2384 if (fips_enabled && !rc)
2385 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
2386 driver, alg);
2388 return rc;
2390 notest:
2391 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2392 return 0;
2393 non_fips_alg:
2394 return -EINVAL;
2396 EXPORT_SYMBOL_GPL(alg_test);