crypto: testmgr - don't print info about missing test for gcm-aes-aesni
[linux-2.6/btrfs-unstable.git] / crypto / testmgr.c
blobd0a42bd3aae926fcfef0624c6497452e9a0741c5
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <linux/err.h>
26 #include <linux/fips.h>
27 #include <linux/module.h>
28 #include <linux/scatterlist.h>
29 #include <linux/slab.h>
30 #include <linux/string.h>
31 #include <crypto/rng.h>
32 #include <crypto/drbg.h>
33 #include <crypto/akcipher.h>
35 #include "internal.h"
37 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39 /* a perfect nop */
40 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
42 return 0;
45 #else
47 #include "testmgr.h"
50 * Need slab memory for testing (size in number of pages).
52 #define XBUFSIZE 8
55 * Indexes into the xbuf to simulate cross-page access.
57 #define IDX1 32
58 #define IDX2 32400
59 #define IDX3 1
60 #define IDX4 8193
61 #define IDX5 22222
62 #define IDX6 17101
63 #define IDX7 27333
64 #define IDX8 3000
67 * Used by test_cipher()
69 #define ENCRYPT 1
70 #define DECRYPT 0
72 struct tcrypt_result {
73 struct completion completion;
74 int err;
77 struct aead_test_suite {
78 struct {
79 struct aead_testvec *vecs;
80 unsigned int count;
81 } enc, dec;
84 struct cipher_test_suite {
85 struct {
86 struct cipher_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
91 struct comp_test_suite {
92 struct {
93 struct comp_testvec *vecs;
94 unsigned int count;
95 } comp, decomp;
98 struct pcomp_test_suite {
99 struct {
100 struct pcomp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
105 struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
110 struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
115 struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
120 struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
125 struct alg_test_desc {
126 const char *alg;
127 int (*test)(const struct alg_test_desc *desc, const char *driver,
128 u32 type, u32 mask);
129 int fips_allowed; /* set if alg is allowed in fips mode */
131 union {
132 struct aead_test_suite aead;
133 struct cipher_test_suite cipher;
134 struct comp_test_suite comp;
135 struct pcomp_test_suite pcomp;
136 struct hash_test_suite hash;
137 struct cprng_test_suite cprng;
138 struct drbg_test_suite drbg;
139 struct akcipher_test_suite akcipher;
140 } suite;
143 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
145 static void hexdump(unsigned char *buf, unsigned int len)
147 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
148 16, 1,
149 buf, len, false);
152 static void tcrypt_complete(struct crypto_async_request *req, int err)
154 struct tcrypt_result *res = req->data;
156 if (err == -EINPROGRESS)
157 return;
159 res->err = err;
160 complete(&res->completion);
163 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
165 int i;
167 for (i = 0; i < XBUFSIZE; i++) {
168 buf[i] = (void *)__get_free_page(GFP_KERNEL);
169 if (!buf[i])
170 goto err_free_buf;
173 return 0;
175 err_free_buf:
176 while (i-- > 0)
177 free_page((unsigned long)buf[i]);
179 return -ENOMEM;
182 static void testmgr_free_buf(char *buf[XBUFSIZE])
184 int i;
186 for (i = 0; i < XBUFSIZE; i++)
187 free_page((unsigned long)buf[i]);
190 static int wait_async_op(struct tcrypt_result *tr, int ret)
192 if (ret == -EINPROGRESS || ret == -EBUSY) {
193 wait_for_completion(&tr->completion);
194 reinit_completion(&tr->completion);
195 ret = tr->err;
197 return ret;
200 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
201 unsigned int tcount, bool use_digest,
202 const int align_offset)
204 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
205 unsigned int i, j, k, temp;
206 struct scatterlist sg[8];
207 char *result;
208 char *key;
209 struct ahash_request *req;
210 struct tcrypt_result tresult;
211 void *hash_buff;
212 char *xbuf[XBUFSIZE];
213 int ret = -ENOMEM;
215 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
216 if (!result)
217 return ret;
218 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
219 if (!key)
220 goto out_nobuf;
221 if (testmgr_alloc_buf(xbuf))
222 goto out_nobuf;
224 init_completion(&tresult.completion);
226 req = ahash_request_alloc(tfm, GFP_KERNEL);
227 if (!req) {
228 printk(KERN_ERR "alg: hash: Failed to allocate request for "
229 "%s\n", algo);
230 goto out_noreq;
232 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
233 tcrypt_complete, &tresult);
235 j = 0;
236 for (i = 0; i < tcount; i++) {
237 if (template[i].np)
238 continue;
240 ret = -EINVAL;
241 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
242 goto out;
244 j++;
245 memset(result, 0, MAX_DIGEST_SIZE);
247 hash_buff = xbuf[0];
248 hash_buff += align_offset;
250 memcpy(hash_buff, template[i].plaintext, template[i].psize);
251 sg_init_one(&sg[0], hash_buff, template[i].psize);
253 if (template[i].ksize) {
254 crypto_ahash_clear_flags(tfm, ~0);
255 if (template[i].ksize > MAX_KEYLEN) {
256 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
257 j, algo, template[i].ksize, MAX_KEYLEN);
258 ret = -EINVAL;
259 goto out;
261 memcpy(key, template[i].key, template[i].ksize);
262 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
263 if (ret) {
264 printk(KERN_ERR "alg: hash: setkey failed on "
265 "test %d for %s: ret=%d\n", j, algo,
266 -ret);
267 goto out;
271 ahash_request_set_crypt(req, sg, result, template[i].psize);
272 if (use_digest) {
273 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
274 if (ret) {
275 pr_err("alg: hash: digest failed on test %d "
276 "for %s: ret=%d\n", j, algo, -ret);
277 goto out;
279 } else {
280 ret = wait_async_op(&tresult, crypto_ahash_init(req));
281 if (ret) {
282 pr_err("alt: hash: init failed on test %d "
283 "for %s: ret=%d\n", j, algo, -ret);
284 goto out;
286 ret = wait_async_op(&tresult, crypto_ahash_update(req));
287 if (ret) {
288 pr_err("alt: hash: update failed on test %d "
289 "for %s: ret=%d\n", j, algo, -ret);
290 goto out;
292 ret = wait_async_op(&tresult, crypto_ahash_final(req));
293 if (ret) {
294 pr_err("alt: hash: final failed on test %d "
295 "for %s: ret=%d\n", j, algo, -ret);
296 goto out;
300 if (memcmp(result, template[i].digest,
301 crypto_ahash_digestsize(tfm))) {
302 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
303 j, algo);
304 hexdump(result, crypto_ahash_digestsize(tfm));
305 ret = -EINVAL;
306 goto out;
310 j = 0;
311 for (i = 0; i < tcount; i++) {
312 /* alignment tests are only done with continuous buffers */
313 if (align_offset != 0)
314 break;
316 if (!template[i].np)
317 continue;
319 j++;
320 memset(result, 0, MAX_DIGEST_SIZE);
322 temp = 0;
323 sg_init_table(sg, template[i].np);
324 ret = -EINVAL;
325 for (k = 0; k < template[i].np; k++) {
326 if (WARN_ON(offset_in_page(IDX[k]) +
327 template[i].tap[k] > PAGE_SIZE))
328 goto out;
329 sg_set_buf(&sg[k],
330 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
331 offset_in_page(IDX[k]),
332 template[i].plaintext + temp,
333 template[i].tap[k]),
334 template[i].tap[k]);
335 temp += template[i].tap[k];
338 if (template[i].ksize) {
339 if (template[i].ksize > MAX_KEYLEN) {
340 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
341 j, algo, template[i].ksize, MAX_KEYLEN);
342 ret = -EINVAL;
343 goto out;
345 crypto_ahash_clear_flags(tfm, ~0);
346 memcpy(key, template[i].key, template[i].ksize);
347 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
349 if (ret) {
350 printk(KERN_ERR "alg: hash: setkey "
351 "failed on chunking test %d "
352 "for %s: ret=%d\n", j, algo, -ret);
353 goto out;
357 ahash_request_set_crypt(req, sg, result, template[i].psize);
358 ret = crypto_ahash_digest(req);
359 switch (ret) {
360 case 0:
361 break;
362 case -EINPROGRESS:
363 case -EBUSY:
364 wait_for_completion(&tresult.completion);
365 reinit_completion(&tresult.completion);
366 ret = tresult.err;
367 if (!ret)
368 break;
369 /* fall through */
370 default:
371 printk(KERN_ERR "alg: hash: digest failed "
372 "on chunking test %d for %s: "
373 "ret=%d\n", j, algo, -ret);
374 goto out;
377 if (memcmp(result, template[i].digest,
378 crypto_ahash_digestsize(tfm))) {
379 printk(KERN_ERR "alg: hash: Chunking test %d "
380 "failed for %s\n", j, algo);
381 hexdump(result, crypto_ahash_digestsize(tfm));
382 ret = -EINVAL;
383 goto out;
387 ret = 0;
389 out:
390 ahash_request_free(req);
391 out_noreq:
392 testmgr_free_buf(xbuf);
393 out_nobuf:
394 kfree(key);
395 kfree(result);
396 return ret;
399 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
400 unsigned int tcount, bool use_digest)
402 unsigned int alignmask;
403 int ret;
405 ret = __test_hash(tfm, template, tcount, use_digest, 0);
406 if (ret)
407 return ret;
409 /* test unaligned buffers, check with one byte offset */
410 ret = __test_hash(tfm, template, tcount, use_digest, 1);
411 if (ret)
412 return ret;
414 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
415 if (alignmask) {
416 /* Check if alignment mask for tfm is correctly set. */
417 ret = __test_hash(tfm, template, tcount, use_digest,
418 alignmask + 1);
419 if (ret)
420 return ret;
423 return 0;
426 static int __test_aead(struct crypto_aead *tfm, int enc,
427 struct aead_testvec *template, unsigned int tcount,
428 const bool diff_dst, const int align_offset)
430 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
431 unsigned int i, j, k, n, temp;
432 int ret = -ENOMEM;
433 char *q;
434 char *key;
435 struct aead_request *req;
436 struct scatterlist *sg;
437 struct scatterlist *sgout;
438 const char *e, *d;
439 struct tcrypt_result result;
440 unsigned int authsize, iv_len;
441 void *input;
442 void *output;
443 void *assoc;
444 char *iv;
445 char *xbuf[XBUFSIZE];
446 char *xoutbuf[XBUFSIZE];
447 char *axbuf[XBUFSIZE];
449 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
450 if (!iv)
451 return ret;
452 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
453 if (!key)
454 goto out_noxbuf;
455 if (testmgr_alloc_buf(xbuf))
456 goto out_noxbuf;
457 if (testmgr_alloc_buf(axbuf))
458 goto out_noaxbuf;
459 if (diff_dst && testmgr_alloc_buf(xoutbuf))
460 goto out_nooutbuf;
462 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
463 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
464 if (!sg)
465 goto out_nosg;
466 sgout = &sg[16];
468 if (diff_dst)
469 d = "-ddst";
470 else
471 d = "";
473 if (enc == ENCRYPT)
474 e = "encryption";
475 else
476 e = "decryption";
478 init_completion(&result.completion);
480 req = aead_request_alloc(tfm, GFP_KERNEL);
481 if (!req) {
482 pr_err("alg: aead%s: Failed to allocate request for %s\n",
483 d, algo);
484 goto out;
487 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
488 tcrypt_complete, &result);
490 for (i = 0, j = 0; i < tcount; i++) {
491 if (template[i].np)
492 continue;
494 j++;
496 /* some templates have no input data but they will
497 * touch input
499 input = xbuf[0];
500 input += align_offset;
501 assoc = axbuf[0];
503 ret = -EINVAL;
504 if (WARN_ON(align_offset + template[i].ilen >
505 PAGE_SIZE || template[i].alen > PAGE_SIZE))
506 goto out;
508 memcpy(input, template[i].input, template[i].ilen);
509 memcpy(assoc, template[i].assoc, template[i].alen);
510 iv_len = crypto_aead_ivsize(tfm);
511 if (template[i].iv)
512 memcpy(iv, template[i].iv, iv_len);
513 else
514 memset(iv, 0, iv_len);
516 crypto_aead_clear_flags(tfm, ~0);
517 if (template[i].wk)
518 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
520 if (template[i].klen > MAX_KEYLEN) {
521 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
522 d, j, algo, template[i].klen,
523 MAX_KEYLEN);
524 ret = -EINVAL;
525 goto out;
527 memcpy(key, template[i].key, template[i].klen);
529 ret = crypto_aead_setkey(tfm, key, template[i].klen);
530 if (!ret == template[i].fail) {
531 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
532 d, j, algo, crypto_aead_get_flags(tfm));
533 goto out;
534 } else if (ret)
535 continue;
537 authsize = abs(template[i].rlen - template[i].ilen);
538 ret = crypto_aead_setauthsize(tfm, authsize);
539 if (ret) {
540 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
541 d, authsize, j, algo);
542 goto out;
545 k = !!template[i].alen;
546 sg_init_table(sg, k + 1);
547 sg_set_buf(&sg[0], assoc, template[i].alen);
548 sg_set_buf(&sg[k], input,
549 template[i].ilen + (enc ? authsize : 0));
550 output = input;
552 if (diff_dst) {
553 sg_init_table(sgout, k + 1);
554 sg_set_buf(&sgout[0], assoc, template[i].alen);
556 output = xoutbuf[0];
557 output += align_offset;
558 sg_set_buf(&sgout[k], output,
559 template[i].rlen + (enc ? 0 : authsize));
562 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
563 template[i].ilen, iv);
565 aead_request_set_ad(req, template[i].alen);
567 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
569 switch (ret) {
570 case 0:
571 if (template[i].novrfy) {
572 /* verification was supposed to fail */
573 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
574 d, e, j, algo);
575 /* so really, we got a bad message */
576 ret = -EBADMSG;
577 goto out;
579 break;
580 case -EINPROGRESS:
581 case -EBUSY:
582 wait_for_completion(&result.completion);
583 reinit_completion(&result.completion);
584 ret = result.err;
585 if (!ret)
586 break;
587 case -EBADMSG:
588 if (template[i].novrfy)
589 /* verification failure was expected */
590 continue;
591 /* fall through */
592 default:
593 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
594 d, e, j, algo, -ret);
595 goto out;
598 q = output;
599 if (memcmp(q, template[i].result, template[i].rlen)) {
600 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
601 d, j, e, algo);
602 hexdump(q, template[i].rlen);
603 ret = -EINVAL;
604 goto out;
608 for (i = 0, j = 0; i < tcount; i++) {
609 /* alignment tests are only done with continuous buffers */
610 if (align_offset != 0)
611 break;
613 if (!template[i].np)
614 continue;
616 j++;
618 if (template[i].iv)
619 memcpy(iv, template[i].iv, MAX_IVLEN);
620 else
621 memset(iv, 0, MAX_IVLEN);
623 crypto_aead_clear_flags(tfm, ~0);
624 if (template[i].wk)
625 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
626 if (template[i].klen > MAX_KEYLEN) {
627 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
628 d, j, algo, template[i].klen, MAX_KEYLEN);
629 ret = -EINVAL;
630 goto out;
632 memcpy(key, template[i].key, template[i].klen);
634 ret = crypto_aead_setkey(tfm, key, template[i].klen);
635 if (!ret == template[i].fail) {
636 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
637 d, j, algo, crypto_aead_get_flags(tfm));
638 goto out;
639 } else if (ret)
640 continue;
642 authsize = abs(template[i].rlen - template[i].ilen);
644 ret = -EINVAL;
645 sg_init_table(sg, template[i].anp + template[i].np);
646 if (diff_dst)
647 sg_init_table(sgout, template[i].anp + template[i].np);
649 ret = -EINVAL;
650 for (k = 0, temp = 0; k < template[i].anp; k++) {
651 if (WARN_ON(offset_in_page(IDX[k]) +
652 template[i].atap[k] > PAGE_SIZE))
653 goto out;
654 sg_set_buf(&sg[k],
655 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
656 offset_in_page(IDX[k]),
657 template[i].assoc + temp,
658 template[i].atap[k]),
659 template[i].atap[k]);
660 if (diff_dst)
661 sg_set_buf(&sgout[k],
662 axbuf[IDX[k] >> PAGE_SHIFT] +
663 offset_in_page(IDX[k]),
664 template[i].atap[k]);
665 temp += template[i].atap[k];
668 for (k = 0, temp = 0; k < template[i].np; k++) {
669 if (WARN_ON(offset_in_page(IDX[k]) +
670 template[i].tap[k] > PAGE_SIZE))
671 goto out;
673 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
674 memcpy(q, template[i].input + temp, template[i].tap[k]);
675 sg_set_buf(&sg[template[i].anp + k],
676 q, template[i].tap[k]);
678 if (diff_dst) {
679 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
680 offset_in_page(IDX[k]);
682 memset(q, 0, template[i].tap[k]);
684 sg_set_buf(&sgout[template[i].anp + k],
685 q, template[i].tap[k]);
688 n = template[i].tap[k];
689 if (k == template[i].np - 1 && enc)
690 n += authsize;
691 if (offset_in_page(q) + n < PAGE_SIZE)
692 q[n] = 0;
694 temp += template[i].tap[k];
697 ret = crypto_aead_setauthsize(tfm, authsize);
698 if (ret) {
699 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
700 d, authsize, j, algo);
701 goto out;
704 if (enc) {
705 if (WARN_ON(sg[template[i].anp + k - 1].offset +
706 sg[template[i].anp + k - 1].length +
707 authsize > PAGE_SIZE)) {
708 ret = -EINVAL;
709 goto out;
712 if (diff_dst)
713 sgout[template[i].anp + k - 1].length +=
714 authsize;
715 sg[template[i].anp + k - 1].length += authsize;
718 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
719 template[i].ilen,
720 iv);
722 aead_request_set_ad(req, template[i].alen);
724 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
726 switch (ret) {
727 case 0:
728 if (template[i].novrfy) {
729 /* verification was supposed to fail */
730 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
731 d, e, j, algo);
732 /* so really, we got a bad message */
733 ret = -EBADMSG;
734 goto out;
736 break;
737 case -EINPROGRESS:
738 case -EBUSY:
739 wait_for_completion(&result.completion);
740 reinit_completion(&result.completion);
741 ret = result.err;
742 if (!ret)
743 break;
744 case -EBADMSG:
745 if (template[i].novrfy)
746 /* verification failure was expected */
747 continue;
748 /* fall through */
749 default:
750 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
751 d, e, j, algo, -ret);
752 goto out;
755 ret = -EINVAL;
756 for (k = 0, temp = 0; k < template[i].np; k++) {
757 if (diff_dst)
758 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
759 offset_in_page(IDX[k]);
760 else
761 q = xbuf[IDX[k] >> PAGE_SHIFT] +
762 offset_in_page(IDX[k]);
764 n = template[i].tap[k];
765 if (k == template[i].np - 1)
766 n += enc ? authsize : -authsize;
768 if (memcmp(q, template[i].result + temp, n)) {
769 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
770 d, j, e, k, algo);
771 hexdump(q, n);
772 goto out;
775 q += n;
776 if (k == template[i].np - 1 && !enc) {
777 if (!diff_dst &&
778 memcmp(q, template[i].input +
779 temp + n, authsize))
780 n = authsize;
781 else
782 n = 0;
783 } else {
784 for (n = 0; offset_in_page(q + n) && q[n]; n++)
787 if (n) {
788 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
789 d, j, e, k, algo, n);
790 hexdump(q, n);
791 goto out;
794 temp += template[i].tap[k];
798 ret = 0;
800 out:
801 aead_request_free(req);
802 kfree(sg);
803 out_nosg:
804 if (diff_dst)
805 testmgr_free_buf(xoutbuf);
806 out_nooutbuf:
807 testmgr_free_buf(axbuf);
808 out_noaxbuf:
809 testmgr_free_buf(xbuf);
810 out_noxbuf:
811 kfree(key);
812 kfree(iv);
813 return ret;
816 static int test_aead(struct crypto_aead *tfm, int enc,
817 struct aead_testvec *template, unsigned int tcount)
819 unsigned int alignmask;
820 int ret;
822 /* test 'dst == src' case */
823 ret = __test_aead(tfm, enc, template, tcount, false, 0);
824 if (ret)
825 return ret;
827 /* test 'dst != src' case */
828 ret = __test_aead(tfm, enc, template, tcount, true, 0);
829 if (ret)
830 return ret;
832 /* test unaligned buffers, check with one byte offset */
833 ret = __test_aead(tfm, enc, template, tcount, true, 1);
834 if (ret)
835 return ret;
837 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
838 if (alignmask) {
839 /* Check if alignment mask for tfm is correctly set. */
840 ret = __test_aead(tfm, enc, template, tcount, true,
841 alignmask + 1);
842 if (ret)
843 return ret;
846 return 0;
849 static int test_cipher(struct crypto_cipher *tfm, int enc,
850 struct cipher_testvec *template, unsigned int tcount)
852 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
853 unsigned int i, j, k;
854 char *q;
855 const char *e;
856 void *data;
857 char *xbuf[XBUFSIZE];
858 int ret = -ENOMEM;
860 if (testmgr_alloc_buf(xbuf))
861 goto out_nobuf;
863 if (enc == ENCRYPT)
864 e = "encryption";
865 else
866 e = "decryption";
868 j = 0;
869 for (i = 0; i < tcount; i++) {
870 if (template[i].np)
871 continue;
873 j++;
875 ret = -EINVAL;
876 if (WARN_ON(template[i].ilen > PAGE_SIZE))
877 goto out;
879 data = xbuf[0];
880 memcpy(data, template[i].input, template[i].ilen);
882 crypto_cipher_clear_flags(tfm, ~0);
883 if (template[i].wk)
884 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
886 ret = crypto_cipher_setkey(tfm, template[i].key,
887 template[i].klen);
888 if (!ret == template[i].fail) {
889 printk(KERN_ERR "alg: cipher: setkey failed "
890 "on test %d for %s: flags=%x\n", j,
891 algo, crypto_cipher_get_flags(tfm));
892 goto out;
893 } else if (ret)
894 continue;
896 for (k = 0; k < template[i].ilen;
897 k += crypto_cipher_blocksize(tfm)) {
898 if (enc)
899 crypto_cipher_encrypt_one(tfm, data + k,
900 data + k);
901 else
902 crypto_cipher_decrypt_one(tfm, data + k,
903 data + k);
906 q = data;
907 if (memcmp(q, template[i].result, template[i].rlen)) {
908 printk(KERN_ERR "alg: cipher: Test %d failed "
909 "on %s for %s\n", j, e, algo);
910 hexdump(q, template[i].rlen);
911 ret = -EINVAL;
912 goto out;
916 ret = 0;
918 out:
919 testmgr_free_buf(xbuf);
920 out_nobuf:
921 return ret;
924 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
925 struct cipher_testvec *template, unsigned int tcount,
926 const bool diff_dst, const int align_offset)
928 const char *algo =
929 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
930 unsigned int i, j, k, n, temp;
931 char *q;
932 struct ablkcipher_request *req;
933 struct scatterlist sg[8];
934 struct scatterlist sgout[8];
935 const char *e, *d;
936 struct tcrypt_result result;
937 void *data;
938 char iv[MAX_IVLEN];
939 char *xbuf[XBUFSIZE];
940 char *xoutbuf[XBUFSIZE];
941 int ret = -ENOMEM;
943 if (testmgr_alloc_buf(xbuf))
944 goto out_nobuf;
946 if (diff_dst && testmgr_alloc_buf(xoutbuf))
947 goto out_nooutbuf;
949 if (diff_dst)
950 d = "-ddst";
951 else
952 d = "";
954 if (enc == ENCRYPT)
955 e = "encryption";
956 else
957 e = "decryption";
959 init_completion(&result.completion);
961 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
962 if (!req) {
963 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
964 d, algo);
965 goto out;
968 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
969 tcrypt_complete, &result);
971 j = 0;
972 for (i = 0; i < tcount; i++) {
973 if (template[i].np && !template[i].also_non_np)
974 continue;
976 if (template[i].iv)
977 memcpy(iv, template[i].iv, MAX_IVLEN);
978 else
979 memset(iv, 0, MAX_IVLEN);
981 j++;
982 ret = -EINVAL;
983 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
984 goto out;
986 data = xbuf[0];
987 data += align_offset;
988 memcpy(data, template[i].input, template[i].ilen);
990 crypto_ablkcipher_clear_flags(tfm, ~0);
991 if (template[i].wk)
992 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
994 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
995 template[i].klen);
996 if (!ret == template[i].fail) {
997 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
998 d, j, algo, crypto_ablkcipher_get_flags(tfm));
999 goto out;
1000 } else if (ret)
1001 continue;
1003 sg_init_one(&sg[0], data, template[i].ilen);
1004 if (diff_dst) {
1005 data = xoutbuf[0];
1006 data += align_offset;
1007 sg_init_one(&sgout[0], data, template[i].ilen);
1010 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1011 template[i].ilen, iv);
1012 ret = enc ? crypto_ablkcipher_encrypt(req) :
1013 crypto_ablkcipher_decrypt(req);
1015 switch (ret) {
1016 case 0:
1017 break;
1018 case -EINPROGRESS:
1019 case -EBUSY:
1020 wait_for_completion(&result.completion);
1021 reinit_completion(&result.completion);
1022 ret = result.err;
1023 if (!ret)
1024 break;
1025 /* fall through */
1026 default:
1027 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1028 d, e, j, algo, -ret);
1029 goto out;
1032 q = data;
1033 if (memcmp(q, template[i].result, template[i].rlen)) {
1034 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1035 d, j, e, algo);
1036 hexdump(q, template[i].rlen);
1037 ret = -EINVAL;
1038 goto out;
1042 j = 0;
1043 for (i = 0; i < tcount; i++) {
1044 /* alignment tests are only done with continuous buffers */
1045 if (align_offset != 0)
1046 break;
1048 if (!template[i].np)
1049 continue;
1051 if (template[i].iv)
1052 memcpy(iv, template[i].iv, MAX_IVLEN);
1053 else
1054 memset(iv, 0, MAX_IVLEN);
1056 j++;
1057 crypto_ablkcipher_clear_flags(tfm, ~0);
1058 if (template[i].wk)
1059 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1061 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1062 template[i].klen);
1063 if (!ret == template[i].fail) {
1064 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1065 d, j, algo, crypto_ablkcipher_get_flags(tfm));
1066 goto out;
1067 } else if (ret)
1068 continue;
1070 temp = 0;
1071 ret = -EINVAL;
1072 sg_init_table(sg, template[i].np);
1073 if (diff_dst)
1074 sg_init_table(sgout, template[i].np);
1075 for (k = 0; k < template[i].np; k++) {
1076 if (WARN_ON(offset_in_page(IDX[k]) +
1077 template[i].tap[k] > PAGE_SIZE))
1078 goto out;
1080 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1082 memcpy(q, template[i].input + temp, template[i].tap[k]);
1084 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1085 q[template[i].tap[k]] = 0;
1087 sg_set_buf(&sg[k], q, template[i].tap[k]);
1088 if (diff_dst) {
1089 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1090 offset_in_page(IDX[k]);
1092 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1094 memset(q, 0, template[i].tap[k]);
1095 if (offset_in_page(q) +
1096 template[i].tap[k] < PAGE_SIZE)
1097 q[template[i].tap[k]] = 0;
1100 temp += template[i].tap[k];
1103 ablkcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1104 template[i].ilen, iv);
1106 ret = enc ? crypto_ablkcipher_encrypt(req) :
1107 crypto_ablkcipher_decrypt(req);
1109 switch (ret) {
1110 case 0:
1111 break;
1112 case -EINPROGRESS:
1113 case -EBUSY:
1114 wait_for_completion(&result.completion);
1115 reinit_completion(&result.completion);
1116 ret = result.err;
1117 if (!ret)
1118 break;
1119 /* fall through */
1120 default:
1121 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1122 d, e, j, algo, -ret);
1123 goto out;
1126 temp = 0;
1127 ret = -EINVAL;
1128 for (k = 0; k < template[i].np; k++) {
1129 if (diff_dst)
1130 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1131 offset_in_page(IDX[k]);
1132 else
1133 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1134 offset_in_page(IDX[k]);
1136 if (memcmp(q, template[i].result + temp,
1137 template[i].tap[k])) {
1138 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1139 d, j, e, k, algo);
1140 hexdump(q, template[i].tap[k]);
1141 goto out;
1144 q += template[i].tap[k];
1145 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1147 if (n) {
1148 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1149 d, j, e, k, algo, n);
1150 hexdump(q, n);
1151 goto out;
1153 temp += template[i].tap[k];
1157 ret = 0;
1159 out:
1160 ablkcipher_request_free(req);
1161 if (diff_dst)
1162 testmgr_free_buf(xoutbuf);
1163 out_nooutbuf:
1164 testmgr_free_buf(xbuf);
1165 out_nobuf:
1166 return ret;
1169 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1170 struct cipher_testvec *template, unsigned int tcount)
1172 unsigned int alignmask;
1173 int ret;
1175 /* test 'dst == src' case */
1176 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1177 if (ret)
1178 return ret;
1180 /* test 'dst != src' case */
1181 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1182 if (ret)
1183 return ret;
1185 /* test unaligned buffers, check with one byte offset */
1186 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1187 if (ret)
1188 return ret;
1190 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1191 if (alignmask) {
1192 /* Check if alignment mask for tfm is correctly set. */
1193 ret = __test_skcipher(tfm, enc, template, tcount, true,
1194 alignmask + 1);
1195 if (ret)
1196 return ret;
1199 return 0;
1202 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1203 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1205 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1206 unsigned int i;
1207 char result[COMP_BUF_SIZE];
1208 int ret;
1210 for (i = 0; i < ctcount; i++) {
1211 int ilen;
1212 unsigned int dlen = COMP_BUF_SIZE;
1214 memset(result, 0, sizeof (result));
1216 ilen = ctemplate[i].inlen;
1217 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1218 ilen, result, &dlen);
1219 if (ret) {
1220 printk(KERN_ERR "alg: comp: compression failed "
1221 "on test %d for %s: ret=%d\n", i + 1, algo,
1222 -ret);
1223 goto out;
1226 if (dlen != ctemplate[i].outlen) {
1227 printk(KERN_ERR "alg: comp: Compression test %d "
1228 "failed for %s: output len = %d\n", i + 1, algo,
1229 dlen);
1230 ret = -EINVAL;
1231 goto out;
1234 if (memcmp(result, ctemplate[i].output, dlen)) {
1235 printk(KERN_ERR "alg: comp: Compression test %d "
1236 "failed for %s\n", i + 1, algo);
1237 hexdump(result, dlen);
1238 ret = -EINVAL;
1239 goto out;
1243 for (i = 0; i < dtcount; i++) {
1244 int ilen;
1245 unsigned int dlen = COMP_BUF_SIZE;
1247 memset(result, 0, sizeof (result));
1249 ilen = dtemplate[i].inlen;
1250 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1251 ilen, result, &dlen);
1252 if (ret) {
1253 printk(KERN_ERR "alg: comp: decompression failed "
1254 "on test %d for %s: ret=%d\n", i + 1, algo,
1255 -ret);
1256 goto out;
1259 if (dlen != dtemplate[i].outlen) {
1260 printk(KERN_ERR "alg: comp: Decompression test %d "
1261 "failed for %s: output len = %d\n", i + 1, algo,
1262 dlen);
1263 ret = -EINVAL;
1264 goto out;
1267 if (memcmp(result, dtemplate[i].output, dlen)) {
1268 printk(KERN_ERR "alg: comp: Decompression test %d "
1269 "failed for %s\n", i + 1, algo);
1270 hexdump(result, dlen);
1271 ret = -EINVAL;
1272 goto out;
1276 ret = 0;
1278 out:
1279 return ret;
1282 static int test_pcomp(struct crypto_pcomp *tfm,
1283 struct pcomp_testvec *ctemplate,
1284 struct pcomp_testvec *dtemplate, int ctcount,
1285 int dtcount)
1287 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1288 unsigned int i;
1289 char result[COMP_BUF_SIZE];
1290 int res;
1292 for (i = 0; i < ctcount; i++) {
1293 struct comp_request req;
1294 unsigned int produced = 0;
1296 res = crypto_compress_setup(tfm, ctemplate[i].params,
1297 ctemplate[i].paramsize);
1298 if (res) {
1299 pr_err("alg: pcomp: compression setup failed on test "
1300 "%d for %s: error=%d\n", i + 1, algo, res);
1301 return res;
1304 res = crypto_compress_init(tfm);
1305 if (res) {
1306 pr_err("alg: pcomp: compression init failed on test "
1307 "%d for %s: error=%d\n", i + 1, algo, res);
1308 return res;
1311 memset(result, 0, sizeof(result));
1313 req.next_in = ctemplate[i].input;
1314 req.avail_in = ctemplate[i].inlen / 2;
1315 req.next_out = result;
1316 req.avail_out = ctemplate[i].outlen / 2;
1318 res = crypto_compress_update(tfm, &req);
1319 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1320 pr_err("alg: pcomp: compression update failed on test "
1321 "%d for %s: error=%d\n", i + 1, algo, res);
1322 return res;
1324 if (res > 0)
1325 produced += res;
1327 /* Add remaining input data */
1328 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1330 res = crypto_compress_update(tfm, &req);
1331 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1332 pr_err("alg: pcomp: compression update failed on test "
1333 "%d for %s: error=%d\n", i + 1, algo, res);
1334 return res;
1336 if (res > 0)
1337 produced += res;
1339 /* Provide remaining output space */
1340 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1342 res = crypto_compress_final(tfm, &req);
1343 if (res < 0) {
1344 pr_err("alg: pcomp: compression final failed on test "
1345 "%d for %s: error=%d\n", i + 1, algo, res);
1346 return res;
1348 produced += res;
1350 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1351 pr_err("alg: comp: Compression test %d failed for %s: "
1352 "output len = %d (expected %d)\n", i + 1, algo,
1353 COMP_BUF_SIZE - req.avail_out,
1354 ctemplate[i].outlen);
1355 return -EINVAL;
1358 if (produced != ctemplate[i].outlen) {
1359 pr_err("alg: comp: Compression test %d failed for %s: "
1360 "returned len = %u (expected %d)\n", i + 1,
1361 algo, produced, ctemplate[i].outlen);
1362 return -EINVAL;
1365 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1366 pr_err("alg: pcomp: Compression test %d failed for "
1367 "%s\n", i + 1, algo);
1368 hexdump(result, ctemplate[i].outlen);
1369 return -EINVAL;
1373 for (i = 0; i < dtcount; i++) {
1374 struct comp_request req;
1375 unsigned int produced = 0;
1377 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1378 dtemplate[i].paramsize);
1379 if (res) {
1380 pr_err("alg: pcomp: decompression setup failed on "
1381 "test %d for %s: error=%d\n", i + 1, algo, res);
1382 return res;
1385 res = crypto_decompress_init(tfm);
1386 if (res) {
1387 pr_err("alg: pcomp: decompression init failed on test "
1388 "%d for %s: error=%d\n", i + 1, algo, res);
1389 return res;
1392 memset(result, 0, sizeof(result));
1394 req.next_in = dtemplate[i].input;
1395 req.avail_in = dtemplate[i].inlen / 2;
1396 req.next_out = result;
1397 req.avail_out = dtemplate[i].outlen / 2;
1399 res = crypto_decompress_update(tfm, &req);
1400 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1401 pr_err("alg: pcomp: decompression update failed on "
1402 "test %d for %s: error=%d\n", i + 1, algo, res);
1403 return res;
1405 if (res > 0)
1406 produced += res;
1408 /* Add remaining input data */
1409 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1411 res = crypto_decompress_update(tfm, &req);
1412 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1413 pr_err("alg: pcomp: decompression update failed on "
1414 "test %d for %s: error=%d\n", i + 1, algo, res);
1415 return res;
1417 if (res > 0)
1418 produced += res;
1420 /* Provide remaining output space */
1421 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1423 res = crypto_decompress_final(tfm, &req);
1424 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1425 pr_err("alg: pcomp: decompression final failed on "
1426 "test %d for %s: error=%d\n", i + 1, algo, res);
1427 return res;
1429 if (res > 0)
1430 produced += res;
1432 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1433 pr_err("alg: comp: Decompression test %d failed for "
1434 "%s: output len = %d (expected %d)\n", i + 1,
1435 algo, COMP_BUF_SIZE - req.avail_out,
1436 dtemplate[i].outlen);
1437 return -EINVAL;
1440 if (produced != dtemplate[i].outlen) {
1441 pr_err("alg: comp: Decompression test %d failed for "
1442 "%s: returned len = %u (expected %d)\n", i + 1,
1443 algo, produced, dtemplate[i].outlen);
1444 return -EINVAL;
1447 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1448 pr_err("alg: pcomp: Decompression test %d failed for "
1449 "%s\n", i + 1, algo);
1450 hexdump(result, dtemplate[i].outlen);
1451 return -EINVAL;
1455 return 0;
1459 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1460 unsigned int tcount)
1462 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1463 int err = 0, i, j, seedsize;
1464 u8 *seed;
1465 char result[32];
1467 seedsize = crypto_rng_seedsize(tfm);
1469 seed = kmalloc(seedsize, GFP_KERNEL);
1470 if (!seed) {
1471 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1472 "for %s\n", algo);
1473 return -ENOMEM;
1476 for (i = 0; i < tcount; i++) {
1477 memset(result, 0, 32);
1479 memcpy(seed, template[i].v, template[i].vlen);
1480 memcpy(seed + template[i].vlen, template[i].key,
1481 template[i].klen);
1482 memcpy(seed + template[i].vlen + template[i].klen,
1483 template[i].dt, template[i].dtlen);
1485 err = crypto_rng_reset(tfm, seed, seedsize);
1486 if (err) {
1487 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1488 "for %s\n", algo);
1489 goto out;
1492 for (j = 0; j < template[i].loops; j++) {
1493 err = crypto_rng_get_bytes(tfm, result,
1494 template[i].rlen);
1495 if (err < 0) {
1496 printk(KERN_ERR "alg: cprng: Failed to obtain "
1497 "the correct amount of random data for "
1498 "%s (requested %d)\n", algo,
1499 template[i].rlen);
1500 goto out;
1504 err = memcmp(result, template[i].result,
1505 template[i].rlen);
1506 if (err) {
1507 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1508 i, algo);
1509 hexdump(result, template[i].rlen);
1510 err = -EINVAL;
1511 goto out;
1515 out:
1516 kfree(seed);
1517 return err;
1520 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1521 u32 type, u32 mask)
1523 struct crypto_aead *tfm;
1524 int err = 0;
1526 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1527 if (IS_ERR(tfm)) {
1528 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1529 "%ld\n", driver, PTR_ERR(tfm));
1530 return PTR_ERR(tfm);
1533 if (desc->suite.aead.enc.vecs) {
1534 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1535 desc->suite.aead.enc.count);
1536 if (err)
1537 goto out;
1540 if (!err && desc->suite.aead.dec.vecs)
1541 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1542 desc->suite.aead.dec.count);
1544 out:
1545 crypto_free_aead(tfm);
1546 return err;
1549 static int alg_test_cipher(const struct alg_test_desc *desc,
1550 const char *driver, u32 type, u32 mask)
1552 struct crypto_cipher *tfm;
1553 int err = 0;
1555 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1556 if (IS_ERR(tfm)) {
1557 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1558 "%s: %ld\n", driver, PTR_ERR(tfm));
1559 return PTR_ERR(tfm);
1562 if (desc->suite.cipher.enc.vecs) {
1563 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1564 desc->suite.cipher.enc.count);
1565 if (err)
1566 goto out;
1569 if (desc->suite.cipher.dec.vecs)
1570 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1571 desc->suite.cipher.dec.count);
1573 out:
1574 crypto_free_cipher(tfm);
1575 return err;
1578 static int alg_test_skcipher(const struct alg_test_desc *desc,
1579 const char *driver, u32 type, u32 mask)
1581 struct crypto_ablkcipher *tfm;
1582 int err = 0;
1584 tfm = crypto_alloc_ablkcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1585 if (IS_ERR(tfm)) {
1586 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1587 "%s: %ld\n", driver, PTR_ERR(tfm));
1588 return PTR_ERR(tfm);
1591 if (desc->suite.cipher.enc.vecs) {
1592 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1593 desc->suite.cipher.enc.count);
1594 if (err)
1595 goto out;
1598 if (desc->suite.cipher.dec.vecs)
1599 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1600 desc->suite.cipher.dec.count);
1602 out:
1603 crypto_free_ablkcipher(tfm);
1604 return err;
1607 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1608 u32 type, u32 mask)
1610 struct crypto_comp *tfm;
1611 int err;
1613 tfm = crypto_alloc_comp(driver, type, mask);
1614 if (IS_ERR(tfm)) {
1615 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1616 "%ld\n", driver, PTR_ERR(tfm));
1617 return PTR_ERR(tfm);
1620 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1621 desc->suite.comp.decomp.vecs,
1622 desc->suite.comp.comp.count,
1623 desc->suite.comp.decomp.count);
1625 crypto_free_comp(tfm);
1626 return err;
1629 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1630 u32 type, u32 mask)
1632 struct crypto_pcomp *tfm;
1633 int err;
1635 tfm = crypto_alloc_pcomp(driver, type, mask);
1636 if (IS_ERR(tfm)) {
1637 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1638 driver, PTR_ERR(tfm));
1639 return PTR_ERR(tfm);
1642 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1643 desc->suite.pcomp.decomp.vecs,
1644 desc->suite.pcomp.comp.count,
1645 desc->suite.pcomp.decomp.count);
1647 crypto_free_pcomp(tfm);
1648 return err;
1651 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1652 u32 type, u32 mask)
1654 struct crypto_ahash *tfm;
1655 int err;
1657 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 return PTR_ERR(tfm);
1664 err = test_hash(tfm, desc->suite.hash.vecs,
1665 desc->suite.hash.count, true);
1666 if (!err)
1667 err = test_hash(tfm, desc->suite.hash.vecs,
1668 desc->suite.hash.count, false);
1670 crypto_free_ahash(tfm);
1671 return err;
1674 static int alg_test_crc32c(const struct alg_test_desc *desc,
1675 const char *driver, u32 type, u32 mask)
1677 struct crypto_shash *tfm;
1678 u32 val;
1679 int err;
1681 err = alg_test_hash(desc, driver, type, mask);
1682 if (err)
1683 goto out;
1685 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1686 if (IS_ERR(tfm)) {
1687 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1688 "%ld\n", driver, PTR_ERR(tfm));
1689 err = PTR_ERR(tfm);
1690 goto out;
1693 do {
1694 SHASH_DESC_ON_STACK(shash, tfm);
1695 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1697 shash->tfm = tfm;
1698 shash->flags = 0;
1700 *ctx = le32_to_cpu(420553207);
1701 err = crypto_shash_final(shash, (u8 *)&val);
1702 if (err) {
1703 printk(KERN_ERR "alg: crc32c: Operation failed for "
1704 "%s: %d\n", driver, err);
1705 break;
1708 if (val != ~420553207) {
1709 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1710 "%d\n", driver, val);
1711 err = -EINVAL;
1713 } while (0);
1715 crypto_free_shash(tfm);
1717 out:
1718 return err;
1721 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1722 u32 type, u32 mask)
1724 struct crypto_rng *rng;
1725 int err;
1727 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1728 if (IS_ERR(rng)) {
1729 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1730 "%ld\n", driver, PTR_ERR(rng));
1731 return PTR_ERR(rng);
1734 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1736 crypto_free_rng(rng);
1738 return err;
1742 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1743 const char *driver, u32 type, u32 mask)
1745 int ret = -EAGAIN;
1746 struct crypto_rng *drng;
1747 struct drbg_test_data test_data;
1748 struct drbg_string addtl, pers, testentropy;
1749 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1751 if (!buf)
1752 return -ENOMEM;
1754 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1755 if (IS_ERR(drng)) {
1756 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1757 "%s\n", driver);
1758 kzfree(buf);
1759 return -ENOMEM;
1762 test_data.testentropy = &testentropy;
1763 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1764 drbg_string_fill(&pers, test->pers, test->perslen);
1765 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1766 if (ret) {
1767 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1768 goto outbuf;
1771 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1772 if (pr) {
1773 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1774 ret = crypto_drbg_get_bytes_addtl_test(drng,
1775 buf, test->expectedlen, &addtl, &test_data);
1776 } else {
1777 ret = crypto_drbg_get_bytes_addtl(drng,
1778 buf, test->expectedlen, &addtl);
1780 if (ret < 0) {
1781 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1782 "driver %s\n", driver);
1783 goto outbuf;
1786 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1787 if (pr) {
1788 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1789 ret = crypto_drbg_get_bytes_addtl_test(drng,
1790 buf, test->expectedlen, &addtl, &test_data);
1791 } else {
1792 ret = crypto_drbg_get_bytes_addtl(drng,
1793 buf, test->expectedlen, &addtl);
1795 if (ret < 0) {
1796 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1797 "driver %s\n", driver);
1798 goto outbuf;
1801 ret = memcmp(test->expected, buf, test->expectedlen);
1803 outbuf:
1804 crypto_free_rng(drng);
1805 kzfree(buf);
1806 return ret;
1810 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1811 u32 type, u32 mask)
1813 int err = 0;
1814 int pr = 0;
1815 int i = 0;
1816 struct drbg_testvec *template = desc->suite.drbg.vecs;
1817 unsigned int tcount = desc->suite.drbg.count;
1819 if (0 == memcmp(driver, "drbg_pr_", 8))
1820 pr = 1;
1822 for (i = 0; i < tcount; i++) {
1823 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1824 if (err) {
1825 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1826 i, driver);
1827 err = -EINVAL;
1828 break;
1831 return err;
1835 static int do_test_rsa(struct crypto_akcipher *tfm,
1836 struct akcipher_testvec *vecs)
1838 struct akcipher_request *req;
1839 void *outbuf_enc = NULL;
1840 void *outbuf_dec = NULL;
1841 struct tcrypt_result result;
1842 unsigned int out_len_max, out_len = 0;
1843 int err = -ENOMEM;
1845 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1846 if (!req)
1847 return err;
1849 init_completion(&result.completion);
1850 err = crypto_akcipher_setkey(tfm, vecs->key, vecs->key_len);
1851 if (err)
1852 goto free_req;
1854 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size,
1855 out_len);
1856 /* expect this to fail, and update the required buf len */
1857 crypto_akcipher_encrypt(req);
1858 out_len = req->dst_len;
1859 if (!out_len) {
1860 err = -EINVAL;
1861 goto free_req;
1864 out_len_max = out_len;
1865 err = -ENOMEM;
1866 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1867 if (!outbuf_enc)
1868 goto free_req;
1870 akcipher_request_set_crypt(req, vecs->m, outbuf_enc, vecs->m_size,
1871 out_len);
1872 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1873 tcrypt_complete, &result);
1875 /* Run RSA encrypt - c = m^e mod n;*/
1876 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1877 if (err) {
1878 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1879 goto free_all;
1881 if (out_len != vecs->c_size) {
1882 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1883 err = -EINVAL;
1884 goto free_all;
1886 /* verify that encrypted message is equal to expected */
1887 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1888 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1889 err = -EINVAL;
1890 goto free_all;
1892 /* Don't invoke decrypt for vectors with public key */
1893 if (vecs->public_key_vec) {
1894 err = 0;
1895 goto free_all;
1897 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1898 if (!outbuf_dec) {
1899 err = -ENOMEM;
1900 goto free_all;
1902 init_completion(&result.completion);
1903 akcipher_request_set_crypt(req, outbuf_enc, outbuf_dec, vecs->c_size,
1904 out_len);
1906 /* Run RSA decrypt - m = c^d mod n;*/
1907 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1908 if (err) {
1909 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1910 goto free_all;
1912 out_len = req->dst_len;
1913 if (out_len != vecs->m_size) {
1914 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1915 err = -EINVAL;
1916 goto free_all;
1918 /* verify that decrypted message is equal to the original msg */
1919 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1920 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1921 err = -EINVAL;
1923 free_all:
1924 kfree(outbuf_dec);
1925 kfree(outbuf_enc);
1926 free_req:
1927 akcipher_request_free(req);
1928 return err;
1931 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1932 unsigned int tcount)
1934 int ret, i;
1936 for (i = 0; i < tcount; i++) {
1937 ret = do_test_rsa(tfm, vecs++);
1938 if (ret) {
1939 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1940 i + 1, ret);
1941 return ret;
1944 return 0;
1947 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1948 struct akcipher_testvec *vecs, unsigned int tcount)
1950 if (strncmp(alg, "rsa", 3) == 0)
1951 return test_rsa(tfm, vecs, tcount);
1953 return 0;
1956 static int alg_test_akcipher(const struct alg_test_desc *desc,
1957 const char *driver, u32 type, u32 mask)
1959 struct crypto_akcipher *tfm;
1960 int err = 0;
1962 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1963 if (IS_ERR(tfm)) {
1964 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1965 driver, PTR_ERR(tfm));
1966 return PTR_ERR(tfm);
1968 if (desc->suite.akcipher.vecs)
1969 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1970 desc->suite.akcipher.count);
1972 crypto_free_akcipher(tfm);
1973 return err;
1976 static int alg_test_null(const struct alg_test_desc *desc,
1977 const char *driver, u32 type, u32 mask)
1979 return 0;
1982 /* Please keep this list sorted by algorithm name. */
1983 static const struct alg_test_desc alg_test_descs[] = {
1985 .alg = "__cbc-cast5-avx",
1986 .test = alg_test_null,
1987 }, {
1988 .alg = "__cbc-cast6-avx",
1989 .test = alg_test_null,
1990 }, {
1991 .alg = "__cbc-serpent-avx",
1992 .test = alg_test_null,
1993 }, {
1994 .alg = "__cbc-serpent-avx2",
1995 .test = alg_test_null,
1996 }, {
1997 .alg = "__cbc-serpent-sse2",
1998 .test = alg_test_null,
1999 }, {
2000 .alg = "__cbc-twofish-avx",
2001 .test = alg_test_null,
2002 }, {
2003 .alg = "__driver-cbc-aes-aesni",
2004 .test = alg_test_null,
2005 .fips_allowed = 1,
2006 }, {
2007 .alg = "__driver-cbc-camellia-aesni",
2008 .test = alg_test_null,
2009 }, {
2010 .alg = "__driver-cbc-camellia-aesni-avx2",
2011 .test = alg_test_null,
2012 }, {
2013 .alg = "__driver-cbc-cast5-avx",
2014 .test = alg_test_null,
2015 }, {
2016 .alg = "__driver-cbc-cast6-avx",
2017 .test = alg_test_null,
2018 }, {
2019 .alg = "__driver-cbc-serpent-avx",
2020 .test = alg_test_null,
2021 }, {
2022 .alg = "__driver-cbc-serpent-avx2",
2023 .test = alg_test_null,
2024 }, {
2025 .alg = "__driver-cbc-serpent-sse2",
2026 .test = alg_test_null,
2027 }, {
2028 .alg = "__driver-cbc-twofish-avx",
2029 .test = alg_test_null,
2030 }, {
2031 .alg = "__driver-ecb-aes-aesni",
2032 .test = alg_test_null,
2033 .fips_allowed = 1,
2034 }, {
2035 .alg = "__driver-ecb-camellia-aesni",
2036 .test = alg_test_null,
2037 }, {
2038 .alg = "__driver-ecb-camellia-aesni-avx2",
2039 .test = alg_test_null,
2040 }, {
2041 .alg = "__driver-ecb-cast5-avx",
2042 .test = alg_test_null,
2043 }, {
2044 .alg = "__driver-ecb-cast6-avx",
2045 .test = alg_test_null,
2046 }, {
2047 .alg = "__driver-ecb-serpent-avx",
2048 .test = alg_test_null,
2049 }, {
2050 .alg = "__driver-ecb-serpent-avx2",
2051 .test = alg_test_null,
2052 }, {
2053 .alg = "__driver-ecb-serpent-sse2",
2054 .test = alg_test_null,
2055 }, {
2056 .alg = "__driver-ecb-twofish-avx",
2057 .test = alg_test_null,
2058 }, {
2059 .alg = "__driver-gcm-aes-aesni",
2060 .test = alg_test_null,
2061 .fips_allowed = 1,
2062 }, {
2063 .alg = "__ghash-pclmulqdqni",
2064 .test = alg_test_null,
2065 .fips_allowed = 1,
2066 }, {
2067 .alg = "ansi_cprng",
2068 .test = alg_test_cprng,
2069 .fips_allowed = 1,
2070 .suite = {
2071 .cprng = {
2072 .vecs = ansi_cprng_aes_tv_template,
2073 .count = ANSI_CPRNG_AES_TEST_VECTORS
2076 }, {
2077 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2078 .test = alg_test_aead,
2079 .fips_allowed = 1,
2080 .suite = {
2081 .aead = {
2082 .enc = {
2083 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2084 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2086 .dec = {
2087 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2088 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2092 }, {
2093 .alg = "authenc(hmac(sha1),cbc(aes))",
2094 .test = alg_test_aead,
2095 .fips_allowed = 1,
2096 .suite = {
2097 .aead = {
2098 .enc = {
2099 .vecs =
2100 hmac_sha1_aes_cbc_enc_tv_temp,
2101 .count =
2102 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2106 }, {
2107 .alg = "authenc(hmac(sha1),cbc(des))",
2108 .test = alg_test_aead,
2109 .fips_allowed = 1,
2110 .suite = {
2111 .aead = {
2112 .enc = {
2113 .vecs =
2114 hmac_sha1_des_cbc_enc_tv_temp,
2115 .count =
2116 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2120 }, {
2121 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2122 .test = alg_test_aead,
2123 .fips_allowed = 1,
2124 .suite = {
2125 .aead = {
2126 .enc = {
2127 .vecs =
2128 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2129 .count =
2130 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2134 }, {
2135 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2136 .test = alg_test_aead,
2137 .fips_allowed = 1,
2138 .suite = {
2139 .aead = {
2140 .enc = {
2141 .vecs =
2142 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2143 .count =
2144 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2146 .dec = {
2147 .vecs =
2148 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2149 .count =
2150 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2154 }, {
2155 .alg = "authenc(hmac(sha224),cbc(des))",
2156 .test = alg_test_aead,
2157 .fips_allowed = 1,
2158 .suite = {
2159 .aead = {
2160 .enc = {
2161 .vecs =
2162 hmac_sha224_des_cbc_enc_tv_temp,
2163 .count =
2164 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2168 }, {
2169 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2170 .test = alg_test_aead,
2171 .fips_allowed = 1,
2172 .suite = {
2173 .aead = {
2174 .enc = {
2175 .vecs =
2176 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2177 .count =
2178 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2182 }, {
2183 .alg = "authenc(hmac(sha256),cbc(aes))",
2184 .test = alg_test_aead,
2185 .fips_allowed = 1,
2186 .suite = {
2187 .aead = {
2188 .enc = {
2189 .vecs =
2190 hmac_sha256_aes_cbc_enc_tv_temp,
2191 .count =
2192 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2196 }, {
2197 .alg = "authenc(hmac(sha256),cbc(des))",
2198 .test = alg_test_aead,
2199 .fips_allowed = 1,
2200 .suite = {
2201 .aead = {
2202 .enc = {
2203 .vecs =
2204 hmac_sha256_des_cbc_enc_tv_temp,
2205 .count =
2206 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2210 }, {
2211 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2212 .test = alg_test_aead,
2213 .fips_allowed = 1,
2214 .suite = {
2215 .aead = {
2216 .enc = {
2217 .vecs =
2218 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2219 .count =
2220 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2224 }, {
2225 .alg = "authenc(hmac(sha384),cbc(des))",
2226 .test = alg_test_aead,
2227 .fips_allowed = 1,
2228 .suite = {
2229 .aead = {
2230 .enc = {
2231 .vecs =
2232 hmac_sha384_des_cbc_enc_tv_temp,
2233 .count =
2234 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2238 }, {
2239 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2240 .test = alg_test_aead,
2241 .fips_allowed = 1,
2242 .suite = {
2243 .aead = {
2244 .enc = {
2245 .vecs =
2246 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2247 .count =
2248 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2252 }, {
2253 .alg = "authenc(hmac(sha512),cbc(aes))",
2254 .test = alg_test_aead,
2255 .fips_allowed = 1,
2256 .suite = {
2257 .aead = {
2258 .enc = {
2259 .vecs =
2260 hmac_sha512_aes_cbc_enc_tv_temp,
2261 .count =
2262 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2266 }, {
2267 .alg = "authenc(hmac(sha512),cbc(des))",
2268 .test = alg_test_aead,
2269 .fips_allowed = 1,
2270 .suite = {
2271 .aead = {
2272 .enc = {
2273 .vecs =
2274 hmac_sha512_des_cbc_enc_tv_temp,
2275 .count =
2276 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2280 }, {
2281 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2282 .test = alg_test_aead,
2283 .fips_allowed = 1,
2284 .suite = {
2285 .aead = {
2286 .enc = {
2287 .vecs =
2288 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2289 .count =
2290 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2294 }, {
2295 .alg = "cbc(aes)",
2296 .test = alg_test_skcipher,
2297 .fips_allowed = 1,
2298 .suite = {
2299 .cipher = {
2300 .enc = {
2301 .vecs = aes_cbc_enc_tv_template,
2302 .count = AES_CBC_ENC_TEST_VECTORS
2304 .dec = {
2305 .vecs = aes_cbc_dec_tv_template,
2306 .count = AES_CBC_DEC_TEST_VECTORS
2310 }, {
2311 .alg = "cbc(anubis)",
2312 .test = alg_test_skcipher,
2313 .suite = {
2314 .cipher = {
2315 .enc = {
2316 .vecs = anubis_cbc_enc_tv_template,
2317 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2319 .dec = {
2320 .vecs = anubis_cbc_dec_tv_template,
2321 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2325 }, {
2326 .alg = "cbc(blowfish)",
2327 .test = alg_test_skcipher,
2328 .suite = {
2329 .cipher = {
2330 .enc = {
2331 .vecs = bf_cbc_enc_tv_template,
2332 .count = BF_CBC_ENC_TEST_VECTORS
2334 .dec = {
2335 .vecs = bf_cbc_dec_tv_template,
2336 .count = BF_CBC_DEC_TEST_VECTORS
2340 }, {
2341 .alg = "cbc(camellia)",
2342 .test = alg_test_skcipher,
2343 .suite = {
2344 .cipher = {
2345 .enc = {
2346 .vecs = camellia_cbc_enc_tv_template,
2347 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2349 .dec = {
2350 .vecs = camellia_cbc_dec_tv_template,
2351 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2355 }, {
2356 .alg = "cbc(cast5)",
2357 .test = alg_test_skcipher,
2358 .suite = {
2359 .cipher = {
2360 .enc = {
2361 .vecs = cast5_cbc_enc_tv_template,
2362 .count = CAST5_CBC_ENC_TEST_VECTORS
2364 .dec = {
2365 .vecs = cast5_cbc_dec_tv_template,
2366 .count = CAST5_CBC_DEC_TEST_VECTORS
2370 }, {
2371 .alg = "cbc(cast6)",
2372 .test = alg_test_skcipher,
2373 .suite = {
2374 .cipher = {
2375 .enc = {
2376 .vecs = cast6_cbc_enc_tv_template,
2377 .count = CAST6_CBC_ENC_TEST_VECTORS
2379 .dec = {
2380 .vecs = cast6_cbc_dec_tv_template,
2381 .count = CAST6_CBC_DEC_TEST_VECTORS
2385 }, {
2386 .alg = "cbc(des)",
2387 .test = alg_test_skcipher,
2388 .suite = {
2389 .cipher = {
2390 .enc = {
2391 .vecs = des_cbc_enc_tv_template,
2392 .count = DES_CBC_ENC_TEST_VECTORS
2394 .dec = {
2395 .vecs = des_cbc_dec_tv_template,
2396 .count = DES_CBC_DEC_TEST_VECTORS
2400 }, {
2401 .alg = "cbc(des3_ede)",
2402 .test = alg_test_skcipher,
2403 .fips_allowed = 1,
2404 .suite = {
2405 .cipher = {
2406 .enc = {
2407 .vecs = des3_ede_cbc_enc_tv_template,
2408 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2410 .dec = {
2411 .vecs = des3_ede_cbc_dec_tv_template,
2412 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2416 }, {
2417 .alg = "cbc(serpent)",
2418 .test = alg_test_skcipher,
2419 .suite = {
2420 .cipher = {
2421 .enc = {
2422 .vecs = serpent_cbc_enc_tv_template,
2423 .count = SERPENT_CBC_ENC_TEST_VECTORS
2425 .dec = {
2426 .vecs = serpent_cbc_dec_tv_template,
2427 .count = SERPENT_CBC_DEC_TEST_VECTORS
2431 }, {
2432 .alg = "cbc(twofish)",
2433 .test = alg_test_skcipher,
2434 .suite = {
2435 .cipher = {
2436 .enc = {
2437 .vecs = tf_cbc_enc_tv_template,
2438 .count = TF_CBC_ENC_TEST_VECTORS
2440 .dec = {
2441 .vecs = tf_cbc_dec_tv_template,
2442 .count = TF_CBC_DEC_TEST_VECTORS
2446 }, {
2447 .alg = "ccm(aes)",
2448 .test = alg_test_aead,
2449 .fips_allowed = 1,
2450 .suite = {
2451 .aead = {
2452 .enc = {
2453 .vecs = aes_ccm_enc_tv_template,
2454 .count = AES_CCM_ENC_TEST_VECTORS
2456 .dec = {
2457 .vecs = aes_ccm_dec_tv_template,
2458 .count = AES_CCM_DEC_TEST_VECTORS
2462 }, {
2463 .alg = "chacha20",
2464 .test = alg_test_skcipher,
2465 .suite = {
2466 .cipher = {
2467 .enc = {
2468 .vecs = chacha20_enc_tv_template,
2469 .count = CHACHA20_ENC_TEST_VECTORS
2471 .dec = {
2472 .vecs = chacha20_enc_tv_template,
2473 .count = CHACHA20_ENC_TEST_VECTORS
2477 }, {
2478 .alg = "cmac(aes)",
2479 .test = alg_test_hash,
2480 .suite = {
2481 .hash = {
2482 .vecs = aes_cmac128_tv_template,
2483 .count = CMAC_AES_TEST_VECTORS
2486 }, {
2487 .alg = "cmac(des3_ede)",
2488 .test = alg_test_hash,
2489 .suite = {
2490 .hash = {
2491 .vecs = des3_ede_cmac64_tv_template,
2492 .count = CMAC_DES3_EDE_TEST_VECTORS
2495 }, {
2496 .alg = "compress_null",
2497 .test = alg_test_null,
2498 }, {
2499 .alg = "crc32",
2500 .test = alg_test_hash,
2501 .suite = {
2502 .hash = {
2503 .vecs = crc32_tv_template,
2504 .count = CRC32_TEST_VECTORS
2507 }, {
2508 .alg = "crc32c",
2509 .test = alg_test_crc32c,
2510 .fips_allowed = 1,
2511 .suite = {
2512 .hash = {
2513 .vecs = crc32c_tv_template,
2514 .count = CRC32C_TEST_VECTORS
2517 }, {
2518 .alg = "crct10dif",
2519 .test = alg_test_hash,
2520 .fips_allowed = 1,
2521 .suite = {
2522 .hash = {
2523 .vecs = crct10dif_tv_template,
2524 .count = CRCT10DIF_TEST_VECTORS
2527 }, {
2528 .alg = "cryptd(__driver-cbc-aes-aesni)",
2529 .test = alg_test_null,
2530 .fips_allowed = 1,
2531 }, {
2532 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2533 .test = alg_test_null,
2534 }, {
2535 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2536 .test = alg_test_null,
2537 }, {
2538 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2539 .test = alg_test_null,
2540 }, {
2541 .alg = "cryptd(__driver-ecb-aes-aesni)",
2542 .test = alg_test_null,
2543 .fips_allowed = 1,
2544 }, {
2545 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2546 .test = alg_test_null,
2547 }, {
2548 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2549 .test = alg_test_null,
2550 }, {
2551 .alg = "cryptd(__driver-ecb-cast5-avx)",
2552 .test = alg_test_null,
2553 }, {
2554 .alg = "cryptd(__driver-ecb-cast6-avx)",
2555 .test = alg_test_null,
2556 }, {
2557 .alg = "cryptd(__driver-ecb-serpent-avx)",
2558 .test = alg_test_null,
2559 }, {
2560 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2561 .test = alg_test_null,
2562 }, {
2563 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2564 .test = alg_test_null,
2565 }, {
2566 .alg = "cryptd(__driver-ecb-twofish-avx)",
2567 .test = alg_test_null,
2568 }, {
2569 .alg = "cryptd(__driver-gcm-aes-aesni)",
2570 .test = alg_test_null,
2571 .fips_allowed = 1,
2572 }, {
2573 .alg = "cryptd(__ghash-pclmulqdqni)",
2574 .test = alg_test_null,
2575 .fips_allowed = 1,
2576 }, {
2577 .alg = "ctr(aes)",
2578 .test = alg_test_skcipher,
2579 .fips_allowed = 1,
2580 .suite = {
2581 .cipher = {
2582 .enc = {
2583 .vecs = aes_ctr_enc_tv_template,
2584 .count = AES_CTR_ENC_TEST_VECTORS
2586 .dec = {
2587 .vecs = aes_ctr_dec_tv_template,
2588 .count = AES_CTR_DEC_TEST_VECTORS
2592 }, {
2593 .alg = "ctr(blowfish)",
2594 .test = alg_test_skcipher,
2595 .suite = {
2596 .cipher = {
2597 .enc = {
2598 .vecs = bf_ctr_enc_tv_template,
2599 .count = BF_CTR_ENC_TEST_VECTORS
2601 .dec = {
2602 .vecs = bf_ctr_dec_tv_template,
2603 .count = BF_CTR_DEC_TEST_VECTORS
2607 }, {
2608 .alg = "ctr(camellia)",
2609 .test = alg_test_skcipher,
2610 .suite = {
2611 .cipher = {
2612 .enc = {
2613 .vecs = camellia_ctr_enc_tv_template,
2614 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2616 .dec = {
2617 .vecs = camellia_ctr_dec_tv_template,
2618 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2622 }, {
2623 .alg = "ctr(cast5)",
2624 .test = alg_test_skcipher,
2625 .suite = {
2626 .cipher = {
2627 .enc = {
2628 .vecs = cast5_ctr_enc_tv_template,
2629 .count = CAST5_CTR_ENC_TEST_VECTORS
2631 .dec = {
2632 .vecs = cast5_ctr_dec_tv_template,
2633 .count = CAST5_CTR_DEC_TEST_VECTORS
2637 }, {
2638 .alg = "ctr(cast6)",
2639 .test = alg_test_skcipher,
2640 .suite = {
2641 .cipher = {
2642 .enc = {
2643 .vecs = cast6_ctr_enc_tv_template,
2644 .count = CAST6_CTR_ENC_TEST_VECTORS
2646 .dec = {
2647 .vecs = cast6_ctr_dec_tv_template,
2648 .count = CAST6_CTR_DEC_TEST_VECTORS
2652 }, {
2653 .alg = "ctr(des)",
2654 .test = alg_test_skcipher,
2655 .suite = {
2656 .cipher = {
2657 .enc = {
2658 .vecs = des_ctr_enc_tv_template,
2659 .count = DES_CTR_ENC_TEST_VECTORS
2661 .dec = {
2662 .vecs = des_ctr_dec_tv_template,
2663 .count = DES_CTR_DEC_TEST_VECTORS
2667 }, {
2668 .alg = "ctr(des3_ede)",
2669 .test = alg_test_skcipher,
2670 .suite = {
2671 .cipher = {
2672 .enc = {
2673 .vecs = des3_ede_ctr_enc_tv_template,
2674 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2676 .dec = {
2677 .vecs = des3_ede_ctr_dec_tv_template,
2678 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2682 }, {
2683 .alg = "ctr(serpent)",
2684 .test = alg_test_skcipher,
2685 .suite = {
2686 .cipher = {
2687 .enc = {
2688 .vecs = serpent_ctr_enc_tv_template,
2689 .count = SERPENT_CTR_ENC_TEST_VECTORS
2691 .dec = {
2692 .vecs = serpent_ctr_dec_tv_template,
2693 .count = SERPENT_CTR_DEC_TEST_VECTORS
2697 }, {
2698 .alg = "ctr(twofish)",
2699 .test = alg_test_skcipher,
2700 .suite = {
2701 .cipher = {
2702 .enc = {
2703 .vecs = tf_ctr_enc_tv_template,
2704 .count = TF_CTR_ENC_TEST_VECTORS
2706 .dec = {
2707 .vecs = tf_ctr_dec_tv_template,
2708 .count = TF_CTR_DEC_TEST_VECTORS
2712 }, {
2713 .alg = "cts(cbc(aes))",
2714 .test = alg_test_skcipher,
2715 .suite = {
2716 .cipher = {
2717 .enc = {
2718 .vecs = cts_mode_enc_tv_template,
2719 .count = CTS_MODE_ENC_TEST_VECTORS
2721 .dec = {
2722 .vecs = cts_mode_dec_tv_template,
2723 .count = CTS_MODE_DEC_TEST_VECTORS
2727 }, {
2728 .alg = "deflate",
2729 .test = alg_test_comp,
2730 .fips_allowed = 1,
2731 .suite = {
2732 .comp = {
2733 .comp = {
2734 .vecs = deflate_comp_tv_template,
2735 .count = DEFLATE_COMP_TEST_VECTORS
2737 .decomp = {
2738 .vecs = deflate_decomp_tv_template,
2739 .count = DEFLATE_DECOMP_TEST_VECTORS
2743 }, {
2744 .alg = "digest_null",
2745 .test = alg_test_null,
2746 }, {
2747 .alg = "drbg_nopr_ctr_aes128",
2748 .test = alg_test_drbg,
2749 .fips_allowed = 1,
2750 .suite = {
2751 .drbg = {
2752 .vecs = drbg_nopr_ctr_aes128_tv_template,
2753 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2756 }, {
2757 .alg = "drbg_nopr_ctr_aes192",
2758 .test = alg_test_drbg,
2759 .fips_allowed = 1,
2760 .suite = {
2761 .drbg = {
2762 .vecs = drbg_nopr_ctr_aes192_tv_template,
2763 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2766 }, {
2767 .alg = "drbg_nopr_ctr_aes256",
2768 .test = alg_test_drbg,
2769 .fips_allowed = 1,
2770 .suite = {
2771 .drbg = {
2772 .vecs = drbg_nopr_ctr_aes256_tv_template,
2773 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2776 }, {
2778 * There is no need to specifically test the DRBG with every
2779 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2781 .alg = "drbg_nopr_hmac_sha1",
2782 .fips_allowed = 1,
2783 .test = alg_test_null,
2784 }, {
2785 .alg = "drbg_nopr_hmac_sha256",
2786 .test = alg_test_drbg,
2787 .fips_allowed = 1,
2788 .suite = {
2789 .drbg = {
2790 .vecs = drbg_nopr_hmac_sha256_tv_template,
2791 .count =
2792 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2795 }, {
2796 /* covered by drbg_nopr_hmac_sha256 test */
2797 .alg = "drbg_nopr_hmac_sha384",
2798 .fips_allowed = 1,
2799 .test = alg_test_null,
2800 }, {
2801 .alg = "drbg_nopr_hmac_sha512",
2802 .test = alg_test_null,
2803 .fips_allowed = 1,
2804 }, {
2805 .alg = "drbg_nopr_sha1",
2806 .fips_allowed = 1,
2807 .test = alg_test_null,
2808 }, {
2809 .alg = "drbg_nopr_sha256",
2810 .test = alg_test_drbg,
2811 .fips_allowed = 1,
2812 .suite = {
2813 .drbg = {
2814 .vecs = drbg_nopr_sha256_tv_template,
2815 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2818 }, {
2819 /* covered by drbg_nopr_sha256 test */
2820 .alg = "drbg_nopr_sha384",
2821 .fips_allowed = 1,
2822 .test = alg_test_null,
2823 }, {
2824 .alg = "drbg_nopr_sha512",
2825 .fips_allowed = 1,
2826 .test = alg_test_null,
2827 }, {
2828 .alg = "drbg_pr_ctr_aes128",
2829 .test = alg_test_drbg,
2830 .fips_allowed = 1,
2831 .suite = {
2832 .drbg = {
2833 .vecs = drbg_pr_ctr_aes128_tv_template,
2834 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2837 }, {
2838 /* covered by drbg_pr_ctr_aes128 test */
2839 .alg = "drbg_pr_ctr_aes192",
2840 .fips_allowed = 1,
2841 .test = alg_test_null,
2842 }, {
2843 .alg = "drbg_pr_ctr_aes256",
2844 .fips_allowed = 1,
2845 .test = alg_test_null,
2846 }, {
2847 .alg = "drbg_pr_hmac_sha1",
2848 .fips_allowed = 1,
2849 .test = alg_test_null,
2850 }, {
2851 .alg = "drbg_pr_hmac_sha256",
2852 .test = alg_test_drbg,
2853 .fips_allowed = 1,
2854 .suite = {
2855 .drbg = {
2856 .vecs = drbg_pr_hmac_sha256_tv_template,
2857 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2860 }, {
2861 /* covered by drbg_pr_hmac_sha256 test */
2862 .alg = "drbg_pr_hmac_sha384",
2863 .fips_allowed = 1,
2864 .test = alg_test_null,
2865 }, {
2866 .alg = "drbg_pr_hmac_sha512",
2867 .test = alg_test_null,
2868 .fips_allowed = 1,
2869 }, {
2870 .alg = "drbg_pr_sha1",
2871 .fips_allowed = 1,
2872 .test = alg_test_null,
2873 }, {
2874 .alg = "drbg_pr_sha256",
2875 .test = alg_test_drbg,
2876 .fips_allowed = 1,
2877 .suite = {
2878 .drbg = {
2879 .vecs = drbg_pr_sha256_tv_template,
2880 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2883 }, {
2884 /* covered by drbg_pr_sha256 test */
2885 .alg = "drbg_pr_sha384",
2886 .fips_allowed = 1,
2887 .test = alg_test_null,
2888 }, {
2889 .alg = "drbg_pr_sha512",
2890 .fips_allowed = 1,
2891 .test = alg_test_null,
2892 }, {
2893 .alg = "ecb(__aes-aesni)",
2894 .test = alg_test_null,
2895 .fips_allowed = 1,
2896 }, {
2897 .alg = "ecb(aes)",
2898 .test = alg_test_skcipher,
2899 .fips_allowed = 1,
2900 .suite = {
2901 .cipher = {
2902 .enc = {
2903 .vecs = aes_enc_tv_template,
2904 .count = AES_ENC_TEST_VECTORS
2906 .dec = {
2907 .vecs = aes_dec_tv_template,
2908 .count = AES_DEC_TEST_VECTORS
2912 }, {
2913 .alg = "ecb(anubis)",
2914 .test = alg_test_skcipher,
2915 .suite = {
2916 .cipher = {
2917 .enc = {
2918 .vecs = anubis_enc_tv_template,
2919 .count = ANUBIS_ENC_TEST_VECTORS
2921 .dec = {
2922 .vecs = anubis_dec_tv_template,
2923 .count = ANUBIS_DEC_TEST_VECTORS
2927 }, {
2928 .alg = "ecb(arc4)",
2929 .test = alg_test_skcipher,
2930 .suite = {
2931 .cipher = {
2932 .enc = {
2933 .vecs = arc4_enc_tv_template,
2934 .count = ARC4_ENC_TEST_VECTORS
2936 .dec = {
2937 .vecs = arc4_dec_tv_template,
2938 .count = ARC4_DEC_TEST_VECTORS
2942 }, {
2943 .alg = "ecb(blowfish)",
2944 .test = alg_test_skcipher,
2945 .suite = {
2946 .cipher = {
2947 .enc = {
2948 .vecs = bf_enc_tv_template,
2949 .count = BF_ENC_TEST_VECTORS
2951 .dec = {
2952 .vecs = bf_dec_tv_template,
2953 .count = BF_DEC_TEST_VECTORS
2957 }, {
2958 .alg = "ecb(camellia)",
2959 .test = alg_test_skcipher,
2960 .suite = {
2961 .cipher = {
2962 .enc = {
2963 .vecs = camellia_enc_tv_template,
2964 .count = CAMELLIA_ENC_TEST_VECTORS
2966 .dec = {
2967 .vecs = camellia_dec_tv_template,
2968 .count = CAMELLIA_DEC_TEST_VECTORS
2972 }, {
2973 .alg = "ecb(cast5)",
2974 .test = alg_test_skcipher,
2975 .suite = {
2976 .cipher = {
2977 .enc = {
2978 .vecs = cast5_enc_tv_template,
2979 .count = CAST5_ENC_TEST_VECTORS
2981 .dec = {
2982 .vecs = cast5_dec_tv_template,
2983 .count = CAST5_DEC_TEST_VECTORS
2987 }, {
2988 .alg = "ecb(cast6)",
2989 .test = alg_test_skcipher,
2990 .suite = {
2991 .cipher = {
2992 .enc = {
2993 .vecs = cast6_enc_tv_template,
2994 .count = CAST6_ENC_TEST_VECTORS
2996 .dec = {
2997 .vecs = cast6_dec_tv_template,
2998 .count = CAST6_DEC_TEST_VECTORS
3002 }, {
3003 .alg = "ecb(cipher_null)",
3004 .test = alg_test_null,
3005 }, {
3006 .alg = "ecb(des)",
3007 .test = alg_test_skcipher,
3008 .fips_allowed = 1,
3009 .suite = {
3010 .cipher = {
3011 .enc = {
3012 .vecs = des_enc_tv_template,
3013 .count = DES_ENC_TEST_VECTORS
3015 .dec = {
3016 .vecs = des_dec_tv_template,
3017 .count = DES_DEC_TEST_VECTORS
3021 }, {
3022 .alg = "ecb(des3_ede)",
3023 .test = alg_test_skcipher,
3024 .fips_allowed = 1,
3025 .suite = {
3026 .cipher = {
3027 .enc = {
3028 .vecs = des3_ede_enc_tv_template,
3029 .count = DES3_EDE_ENC_TEST_VECTORS
3031 .dec = {
3032 .vecs = des3_ede_dec_tv_template,
3033 .count = DES3_EDE_DEC_TEST_VECTORS
3037 }, {
3038 .alg = "ecb(fcrypt)",
3039 .test = alg_test_skcipher,
3040 .suite = {
3041 .cipher = {
3042 .enc = {
3043 .vecs = fcrypt_pcbc_enc_tv_template,
3044 .count = 1
3046 .dec = {
3047 .vecs = fcrypt_pcbc_dec_tv_template,
3048 .count = 1
3052 }, {
3053 .alg = "ecb(khazad)",
3054 .test = alg_test_skcipher,
3055 .suite = {
3056 .cipher = {
3057 .enc = {
3058 .vecs = khazad_enc_tv_template,
3059 .count = KHAZAD_ENC_TEST_VECTORS
3061 .dec = {
3062 .vecs = khazad_dec_tv_template,
3063 .count = KHAZAD_DEC_TEST_VECTORS
3067 }, {
3068 .alg = "ecb(seed)",
3069 .test = alg_test_skcipher,
3070 .suite = {
3071 .cipher = {
3072 .enc = {
3073 .vecs = seed_enc_tv_template,
3074 .count = SEED_ENC_TEST_VECTORS
3076 .dec = {
3077 .vecs = seed_dec_tv_template,
3078 .count = SEED_DEC_TEST_VECTORS
3082 }, {
3083 .alg = "ecb(serpent)",
3084 .test = alg_test_skcipher,
3085 .suite = {
3086 .cipher = {
3087 .enc = {
3088 .vecs = serpent_enc_tv_template,
3089 .count = SERPENT_ENC_TEST_VECTORS
3091 .dec = {
3092 .vecs = serpent_dec_tv_template,
3093 .count = SERPENT_DEC_TEST_VECTORS
3097 }, {
3098 .alg = "ecb(tea)",
3099 .test = alg_test_skcipher,
3100 .suite = {
3101 .cipher = {
3102 .enc = {
3103 .vecs = tea_enc_tv_template,
3104 .count = TEA_ENC_TEST_VECTORS
3106 .dec = {
3107 .vecs = tea_dec_tv_template,
3108 .count = TEA_DEC_TEST_VECTORS
3112 }, {
3113 .alg = "ecb(tnepres)",
3114 .test = alg_test_skcipher,
3115 .suite = {
3116 .cipher = {
3117 .enc = {
3118 .vecs = tnepres_enc_tv_template,
3119 .count = TNEPRES_ENC_TEST_VECTORS
3121 .dec = {
3122 .vecs = tnepres_dec_tv_template,
3123 .count = TNEPRES_DEC_TEST_VECTORS
3127 }, {
3128 .alg = "ecb(twofish)",
3129 .test = alg_test_skcipher,
3130 .suite = {
3131 .cipher = {
3132 .enc = {
3133 .vecs = tf_enc_tv_template,
3134 .count = TF_ENC_TEST_VECTORS
3136 .dec = {
3137 .vecs = tf_dec_tv_template,
3138 .count = TF_DEC_TEST_VECTORS
3142 }, {
3143 .alg = "ecb(xeta)",
3144 .test = alg_test_skcipher,
3145 .suite = {
3146 .cipher = {
3147 .enc = {
3148 .vecs = xeta_enc_tv_template,
3149 .count = XETA_ENC_TEST_VECTORS
3151 .dec = {
3152 .vecs = xeta_dec_tv_template,
3153 .count = XETA_DEC_TEST_VECTORS
3157 }, {
3158 .alg = "ecb(xtea)",
3159 .test = alg_test_skcipher,
3160 .suite = {
3161 .cipher = {
3162 .enc = {
3163 .vecs = xtea_enc_tv_template,
3164 .count = XTEA_ENC_TEST_VECTORS
3166 .dec = {
3167 .vecs = xtea_dec_tv_template,
3168 .count = XTEA_DEC_TEST_VECTORS
3172 }, {
3173 .alg = "gcm(aes)",
3174 .test = alg_test_aead,
3175 .fips_allowed = 1,
3176 .suite = {
3177 .aead = {
3178 .enc = {
3179 .vecs = aes_gcm_enc_tv_template,
3180 .count = AES_GCM_ENC_TEST_VECTORS
3182 .dec = {
3183 .vecs = aes_gcm_dec_tv_template,
3184 .count = AES_GCM_DEC_TEST_VECTORS
3188 }, {
3189 .alg = "ghash",
3190 .test = alg_test_hash,
3191 .fips_allowed = 1,
3192 .suite = {
3193 .hash = {
3194 .vecs = ghash_tv_template,
3195 .count = GHASH_TEST_VECTORS
3198 }, {
3199 .alg = "hmac(crc32)",
3200 .test = alg_test_hash,
3201 .suite = {
3202 .hash = {
3203 .vecs = bfin_crc_tv_template,
3204 .count = BFIN_CRC_TEST_VECTORS
3207 }, {
3208 .alg = "hmac(md5)",
3209 .test = alg_test_hash,
3210 .suite = {
3211 .hash = {
3212 .vecs = hmac_md5_tv_template,
3213 .count = HMAC_MD5_TEST_VECTORS
3216 }, {
3217 .alg = "hmac(rmd128)",
3218 .test = alg_test_hash,
3219 .suite = {
3220 .hash = {
3221 .vecs = hmac_rmd128_tv_template,
3222 .count = HMAC_RMD128_TEST_VECTORS
3225 }, {
3226 .alg = "hmac(rmd160)",
3227 .test = alg_test_hash,
3228 .suite = {
3229 .hash = {
3230 .vecs = hmac_rmd160_tv_template,
3231 .count = HMAC_RMD160_TEST_VECTORS
3234 }, {
3235 .alg = "hmac(sha1)",
3236 .test = alg_test_hash,
3237 .fips_allowed = 1,
3238 .suite = {
3239 .hash = {
3240 .vecs = hmac_sha1_tv_template,
3241 .count = HMAC_SHA1_TEST_VECTORS
3244 }, {
3245 .alg = "hmac(sha224)",
3246 .test = alg_test_hash,
3247 .fips_allowed = 1,
3248 .suite = {
3249 .hash = {
3250 .vecs = hmac_sha224_tv_template,
3251 .count = HMAC_SHA224_TEST_VECTORS
3254 }, {
3255 .alg = "hmac(sha256)",
3256 .test = alg_test_hash,
3257 .fips_allowed = 1,
3258 .suite = {
3259 .hash = {
3260 .vecs = hmac_sha256_tv_template,
3261 .count = HMAC_SHA256_TEST_VECTORS
3264 }, {
3265 .alg = "hmac(sha384)",
3266 .test = alg_test_hash,
3267 .fips_allowed = 1,
3268 .suite = {
3269 .hash = {
3270 .vecs = hmac_sha384_tv_template,
3271 .count = HMAC_SHA384_TEST_VECTORS
3274 }, {
3275 .alg = "hmac(sha512)",
3276 .test = alg_test_hash,
3277 .fips_allowed = 1,
3278 .suite = {
3279 .hash = {
3280 .vecs = hmac_sha512_tv_template,
3281 .count = HMAC_SHA512_TEST_VECTORS
3284 }, {
3285 .alg = "jitterentropy_rng",
3286 .fips_allowed = 1,
3287 .test = alg_test_null,
3288 }, {
3289 .alg = "lrw(aes)",
3290 .test = alg_test_skcipher,
3291 .suite = {
3292 .cipher = {
3293 .enc = {
3294 .vecs = aes_lrw_enc_tv_template,
3295 .count = AES_LRW_ENC_TEST_VECTORS
3297 .dec = {
3298 .vecs = aes_lrw_dec_tv_template,
3299 .count = AES_LRW_DEC_TEST_VECTORS
3303 }, {
3304 .alg = "lrw(camellia)",
3305 .test = alg_test_skcipher,
3306 .suite = {
3307 .cipher = {
3308 .enc = {
3309 .vecs = camellia_lrw_enc_tv_template,
3310 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3312 .dec = {
3313 .vecs = camellia_lrw_dec_tv_template,
3314 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3318 }, {
3319 .alg = "lrw(cast6)",
3320 .test = alg_test_skcipher,
3321 .suite = {
3322 .cipher = {
3323 .enc = {
3324 .vecs = cast6_lrw_enc_tv_template,
3325 .count = CAST6_LRW_ENC_TEST_VECTORS
3327 .dec = {
3328 .vecs = cast6_lrw_dec_tv_template,
3329 .count = CAST6_LRW_DEC_TEST_VECTORS
3333 }, {
3334 .alg = "lrw(serpent)",
3335 .test = alg_test_skcipher,
3336 .suite = {
3337 .cipher = {
3338 .enc = {
3339 .vecs = serpent_lrw_enc_tv_template,
3340 .count = SERPENT_LRW_ENC_TEST_VECTORS
3342 .dec = {
3343 .vecs = serpent_lrw_dec_tv_template,
3344 .count = SERPENT_LRW_DEC_TEST_VECTORS
3348 }, {
3349 .alg = "lrw(twofish)",
3350 .test = alg_test_skcipher,
3351 .suite = {
3352 .cipher = {
3353 .enc = {
3354 .vecs = tf_lrw_enc_tv_template,
3355 .count = TF_LRW_ENC_TEST_VECTORS
3357 .dec = {
3358 .vecs = tf_lrw_dec_tv_template,
3359 .count = TF_LRW_DEC_TEST_VECTORS
3363 }, {
3364 .alg = "lz4",
3365 .test = alg_test_comp,
3366 .fips_allowed = 1,
3367 .suite = {
3368 .comp = {
3369 .comp = {
3370 .vecs = lz4_comp_tv_template,
3371 .count = LZ4_COMP_TEST_VECTORS
3373 .decomp = {
3374 .vecs = lz4_decomp_tv_template,
3375 .count = LZ4_DECOMP_TEST_VECTORS
3379 }, {
3380 .alg = "lz4hc",
3381 .test = alg_test_comp,
3382 .fips_allowed = 1,
3383 .suite = {
3384 .comp = {
3385 .comp = {
3386 .vecs = lz4hc_comp_tv_template,
3387 .count = LZ4HC_COMP_TEST_VECTORS
3389 .decomp = {
3390 .vecs = lz4hc_decomp_tv_template,
3391 .count = LZ4HC_DECOMP_TEST_VECTORS
3395 }, {
3396 .alg = "lzo",
3397 .test = alg_test_comp,
3398 .fips_allowed = 1,
3399 .suite = {
3400 .comp = {
3401 .comp = {
3402 .vecs = lzo_comp_tv_template,
3403 .count = LZO_COMP_TEST_VECTORS
3405 .decomp = {
3406 .vecs = lzo_decomp_tv_template,
3407 .count = LZO_DECOMP_TEST_VECTORS
3411 }, {
3412 .alg = "md4",
3413 .test = alg_test_hash,
3414 .suite = {
3415 .hash = {
3416 .vecs = md4_tv_template,
3417 .count = MD4_TEST_VECTORS
3420 }, {
3421 .alg = "md5",
3422 .test = alg_test_hash,
3423 .suite = {
3424 .hash = {
3425 .vecs = md5_tv_template,
3426 .count = MD5_TEST_VECTORS
3429 }, {
3430 .alg = "michael_mic",
3431 .test = alg_test_hash,
3432 .suite = {
3433 .hash = {
3434 .vecs = michael_mic_tv_template,
3435 .count = MICHAEL_MIC_TEST_VECTORS
3438 }, {
3439 .alg = "ofb(aes)",
3440 .test = alg_test_skcipher,
3441 .fips_allowed = 1,
3442 .suite = {
3443 .cipher = {
3444 .enc = {
3445 .vecs = aes_ofb_enc_tv_template,
3446 .count = AES_OFB_ENC_TEST_VECTORS
3448 .dec = {
3449 .vecs = aes_ofb_dec_tv_template,
3450 .count = AES_OFB_DEC_TEST_VECTORS
3454 }, {
3455 .alg = "pcbc(fcrypt)",
3456 .test = alg_test_skcipher,
3457 .suite = {
3458 .cipher = {
3459 .enc = {
3460 .vecs = fcrypt_pcbc_enc_tv_template,
3461 .count = FCRYPT_ENC_TEST_VECTORS
3463 .dec = {
3464 .vecs = fcrypt_pcbc_dec_tv_template,
3465 .count = FCRYPT_DEC_TEST_VECTORS
3469 }, {
3470 .alg = "poly1305",
3471 .test = alg_test_hash,
3472 .suite = {
3473 .hash = {
3474 .vecs = poly1305_tv_template,
3475 .count = POLY1305_TEST_VECTORS
3478 }, {
3479 .alg = "rfc3686(ctr(aes))",
3480 .test = alg_test_skcipher,
3481 .fips_allowed = 1,
3482 .suite = {
3483 .cipher = {
3484 .enc = {
3485 .vecs = aes_ctr_rfc3686_enc_tv_template,
3486 .count = AES_CTR_3686_ENC_TEST_VECTORS
3488 .dec = {
3489 .vecs = aes_ctr_rfc3686_dec_tv_template,
3490 .count = AES_CTR_3686_DEC_TEST_VECTORS
3494 }, {
3495 .alg = "rfc4106(gcm(aes))",
3496 .test = alg_test_aead,
3497 .fips_allowed = 1,
3498 .suite = {
3499 .aead = {
3500 .enc = {
3501 .vecs = aes_gcm_rfc4106_enc_tv_template,
3502 .count = AES_GCM_4106_ENC_TEST_VECTORS
3504 .dec = {
3505 .vecs = aes_gcm_rfc4106_dec_tv_template,
3506 .count = AES_GCM_4106_DEC_TEST_VECTORS
3510 }, {
3511 .alg = "rfc4309(ccm(aes))",
3512 .test = alg_test_aead,
3513 .fips_allowed = 1,
3514 .suite = {
3515 .aead = {
3516 .enc = {
3517 .vecs = aes_ccm_rfc4309_enc_tv_template,
3518 .count = AES_CCM_4309_ENC_TEST_VECTORS
3520 .dec = {
3521 .vecs = aes_ccm_rfc4309_dec_tv_template,
3522 .count = AES_CCM_4309_DEC_TEST_VECTORS
3526 }, {
3527 .alg = "rfc4543(gcm(aes))",
3528 .test = alg_test_aead,
3529 .suite = {
3530 .aead = {
3531 .enc = {
3532 .vecs = aes_gcm_rfc4543_enc_tv_template,
3533 .count = AES_GCM_4543_ENC_TEST_VECTORS
3535 .dec = {
3536 .vecs = aes_gcm_rfc4543_dec_tv_template,
3537 .count = AES_GCM_4543_DEC_TEST_VECTORS
3541 }, {
3542 .alg = "rfc7539(chacha20,poly1305)",
3543 .test = alg_test_aead,
3544 .suite = {
3545 .aead = {
3546 .enc = {
3547 .vecs = rfc7539_enc_tv_template,
3548 .count = RFC7539_ENC_TEST_VECTORS
3550 .dec = {
3551 .vecs = rfc7539_dec_tv_template,
3552 .count = RFC7539_DEC_TEST_VECTORS
3556 }, {
3557 .alg = "rfc7539esp(chacha20,poly1305)",
3558 .test = alg_test_aead,
3559 .suite = {
3560 .aead = {
3561 .enc = {
3562 .vecs = rfc7539esp_enc_tv_template,
3563 .count = RFC7539ESP_ENC_TEST_VECTORS
3565 .dec = {
3566 .vecs = rfc7539esp_dec_tv_template,
3567 .count = RFC7539ESP_DEC_TEST_VECTORS
3571 }, {
3572 .alg = "rmd128",
3573 .test = alg_test_hash,
3574 .suite = {
3575 .hash = {
3576 .vecs = rmd128_tv_template,
3577 .count = RMD128_TEST_VECTORS
3580 }, {
3581 .alg = "rmd160",
3582 .test = alg_test_hash,
3583 .suite = {
3584 .hash = {
3585 .vecs = rmd160_tv_template,
3586 .count = RMD160_TEST_VECTORS
3589 }, {
3590 .alg = "rmd256",
3591 .test = alg_test_hash,
3592 .suite = {
3593 .hash = {
3594 .vecs = rmd256_tv_template,
3595 .count = RMD256_TEST_VECTORS
3598 }, {
3599 .alg = "rmd320",
3600 .test = alg_test_hash,
3601 .suite = {
3602 .hash = {
3603 .vecs = rmd320_tv_template,
3604 .count = RMD320_TEST_VECTORS
3607 }, {
3608 .alg = "rsa",
3609 .test = alg_test_akcipher,
3610 .fips_allowed = 1,
3611 .suite = {
3612 .akcipher = {
3613 .vecs = rsa_tv_template,
3614 .count = RSA_TEST_VECTORS
3617 }, {
3618 .alg = "salsa20",
3619 .test = alg_test_skcipher,
3620 .suite = {
3621 .cipher = {
3622 .enc = {
3623 .vecs = salsa20_stream_enc_tv_template,
3624 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3628 }, {
3629 .alg = "sha1",
3630 .test = alg_test_hash,
3631 .fips_allowed = 1,
3632 .suite = {
3633 .hash = {
3634 .vecs = sha1_tv_template,
3635 .count = SHA1_TEST_VECTORS
3638 }, {
3639 .alg = "sha224",
3640 .test = alg_test_hash,
3641 .fips_allowed = 1,
3642 .suite = {
3643 .hash = {
3644 .vecs = sha224_tv_template,
3645 .count = SHA224_TEST_VECTORS
3648 }, {
3649 .alg = "sha256",
3650 .test = alg_test_hash,
3651 .fips_allowed = 1,
3652 .suite = {
3653 .hash = {
3654 .vecs = sha256_tv_template,
3655 .count = SHA256_TEST_VECTORS
3658 }, {
3659 .alg = "sha384",
3660 .test = alg_test_hash,
3661 .fips_allowed = 1,
3662 .suite = {
3663 .hash = {
3664 .vecs = sha384_tv_template,
3665 .count = SHA384_TEST_VECTORS
3668 }, {
3669 .alg = "sha512",
3670 .test = alg_test_hash,
3671 .fips_allowed = 1,
3672 .suite = {
3673 .hash = {
3674 .vecs = sha512_tv_template,
3675 .count = SHA512_TEST_VECTORS
3678 }, {
3679 .alg = "tgr128",
3680 .test = alg_test_hash,
3681 .suite = {
3682 .hash = {
3683 .vecs = tgr128_tv_template,
3684 .count = TGR128_TEST_VECTORS
3687 }, {
3688 .alg = "tgr160",
3689 .test = alg_test_hash,
3690 .suite = {
3691 .hash = {
3692 .vecs = tgr160_tv_template,
3693 .count = TGR160_TEST_VECTORS
3696 }, {
3697 .alg = "tgr192",
3698 .test = alg_test_hash,
3699 .suite = {
3700 .hash = {
3701 .vecs = tgr192_tv_template,
3702 .count = TGR192_TEST_VECTORS
3705 }, {
3706 .alg = "vmac(aes)",
3707 .test = alg_test_hash,
3708 .suite = {
3709 .hash = {
3710 .vecs = aes_vmac128_tv_template,
3711 .count = VMAC_AES_TEST_VECTORS
3714 }, {
3715 .alg = "wp256",
3716 .test = alg_test_hash,
3717 .suite = {
3718 .hash = {
3719 .vecs = wp256_tv_template,
3720 .count = WP256_TEST_VECTORS
3723 }, {
3724 .alg = "wp384",
3725 .test = alg_test_hash,
3726 .suite = {
3727 .hash = {
3728 .vecs = wp384_tv_template,
3729 .count = WP384_TEST_VECTORS
3732 }, {
3733 .alg = "wp512",
3734 .test = alg_test_hash,
3735 .suite = {
3736 .hash = {
3737 .vecs = wp512_tv_template,
3738 .count = WP512_TEST_VECTORS
3741 }, {
3742 .alg = "xcbc(aes)",
3743 .test = alg_test_hash,
3744 .suite = {
3745 .hash = {
3746 .vecs = aes_xcbc128_tv_template,
3747 .count = XCBC_AES_TEST_VECTORS
3750 }, {
3751 .alg = "xts(aes)",
3752 .test = alg_test_skcipher,
3753 .fips_allowed = 1,
3754 .suite = {
3755 .cipher = {
3756 .enc = {
3757 .vecs = aes_xts_enc_tv_template,
3758 .count = AES_XTS_ENC_TEST_VECTORS
3760 .dec = {
3761 .vecs = aes_xts_dec_tv_template,
3762 .count = AES_XTS_DEC_TEST_VECTORS
3766 }, {
3767 .alg = "xts(camellia)",
3768 .test = alg_test_skcipher,
3769 .suite = {
3770 .cipher = {
3771 .enc = {
3772 .vecs = camellia_xts_enc_tv_template,
3773 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3775 .dec = {
3776 .vecs = camellia_xts_dec_tv_template,
3777 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3781 }, {
3782 .alg = "xts(cast6)",
3783 .test = alg_test_skcipher,
3784 .suite = {
3785 .cipher = {
3786 .enc = {
3787 .vecs = cast6_xts_enc_tv_template,
3788 .count = CAST6_XTS_ENC_TEST_VECTORS
3790 .dec = {
3791 .vecs = cast6_xts_dec_tv_template,
3792 .count = CAST6_XTS_DEC_TEST_VECTORS
3796 }, {
3797 .alg = "xts(serpent)",
3798 .test = alg_test_skcipher,
3799 .suite = {
3800 .cipher = {
3801 .enc = {
3802 .vecs = serpent_xts_enc_tv_template,
3803 .count = SERPENT_XTS_ENC_TEST_VECTORS
3805 .dec = {
3806 .vecs = serpent_xts_dec_tv_template,
3807 .count = SERPENT_XTS_DEC_TEST_VECTORS
3811 }, {
3812 .alg = "xts(twofish)",
3813 .test = alg_test_skcipher,
3814 .suite = {
3815 .cipher = {
3816 .enc = {
3817 .vecs = tf_xts_enc_tv_template,
3818 .count = TF_XTS_ENC_TEST_VECTORS
3820 .dec = {
3821 .vecs = tf_xts_dec_tv_template,
3822 .count = TF_XTS_DEC_TEST_VECTORS
3826 }, {
3827 .alg = "zlib",
3828 .test = alg_test_pcomp,
3829 .fips_allowed = 1,
3830 .suite = {
3831 .pcomp = {
3832 .comp = {
3833 .vecs = zlib_comp_tv_template,
3834 .count = ZLIB_COMP_TEST_VECTORS
3836 .decomp = {
3837 .vecs = zlib_decomp_tv_template,
3838 .count = ZLIB_DECOMP_TEST_VECTORS
3845 static bool alg_test_descs_checked;
3847 static void alg_test_descs_check_order(void)
3849 int i;
3851 /* only check once */
3852 if (alg_test_descs_checked)
3853 return;
3855 alg_test_descs_checked = true;
3857 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3858 int diff = strcmp(alg_test_descs[i - 1].alg,
3859 alg_test_descs[i].alg);
3861 if (WARN_ON(diff > 0)) {
3862 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3863 alg_test_descs[i - 1].alg,
3864 alg_test_descs[i].alg);
3867 if (WARN_ON(diff == 0)) {
3868 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3869 alg_test_descs[i].alg);
3874 static int alg_find_test(const char *alg)
3876 int start = 0;
3877 int end = ARRAY_SIZE(alg_test_descs);
3879 while (start < end) {
3880 int i = (start + end) / 2;
3881 int diff = strcmp(alg_test_descs[i].alg, alg);
3883 if (diff > 0) {
3884 end = i;
3885 continue;
3888 if (diff < 0) {
3889 start = i + 1;
3890 continue;
3893 return i;
3896 return -1;
3899 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3901 int i;
3902 int j;
3903 int rc;
3905 alg_test_descs_check_order();
3907 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3908 char nalg[CRYPTO_MAX_ALG_NAME];
3910 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3911 sizeof(nalg))
3912 return -ENAMETOOLONG;
3914 i = alg_find_test(nalg);
3915 if (i < 0)
3916 goto notest;
3918 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3919 goto non_fips_alg;
3921 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3922 goto test_done;
3925 i = alg_find_test(alg);
3926 j = alg_find_test(driver);
3927 if (i < 0 && j < 0)
3928 goto notest;
3930 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3931 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3932 goto non_fips_alg;
3934 rc = 0;
3935 if (i >= 0)
3936 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3937 type, mask);
3938 if (j >= 0 && j != i)
3939 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3940 type, mask);
3942 test_done:
3943 if (fips_enabled && rc)
3944 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3946 if (fips_enabled && !rc)
3947 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3949 return rc;
3951 notest:
3952 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3953 return 0;
3954 non_fips_alg:
3955 return -EINVAL;
3958 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3960 EXPORT_SYMBOL_GPL(alg_test);