ARM: mvebu: Netgear RN104: Use Hardware BCH ECC
[linux-2.6/btrfs-unstable.git] / crypto / testmgr.c
blobac2b63105afcd018593eae3db8e65faaa593ea07
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
30 #include <crypto/drbg.h>
32 #include "internal.h"
34 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
36 /* a perfect nop */
37 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
39 return 0;
42 #else
44 #include "testmgr.h"
47 * Need slab memory for testing (size in number of pages).
49 #define XBUFSIZE 8
52 * Indexes into the xbuf to simulate cross-page access.
54 #define IDX1 32
55 #define IDX2 32400
56 #define IDX3 1
57 #define IDX4 8193
58 #define IDX5 22222
59 #define IDX6 17101
60 #define IDX7 27333
61 #define IDX8 3000
64 * Used by test_cipher()
66 #define ENCRYPT 1
67 #define DECRYPT 0
69 struct tcrypt_result {
70 struct completion completion;
71 int err;
74 struct aead_test_suite {
75 struct {
76 struct aead_testvec *vecs;
77 unsigned int count;
78 } enc, dec;
81 struct cipher_test_suite {
82 struct {
83 struct cipher_testvec *vecs;
84 unsigned int count;
85 } enc, dec;
88 struct comp_test_suite {
89 struct {
90 struct comp_testvec *vecs;
91 unsigned int count;
92 } comp, decomp;
95 struct pcomp_test_suite {
96 struct {
97 struct pcomp_testvec *vecs;
98 unsigned int count;
99 } comp, decomp;
102 struct hash_test_suite {
103 struct hash_testvec *vecs;
104 unsigned int count;
107 struct cprng_test_suite {
108 struct cprng_testvec *vecs;
109 unsigned int count;
112 struct drbg_test_suite {
113 struct drbg_testvec *vecs;
114 unsigned int count;
117 struct alg_test_desc {
118 const char *alg;
119 int (*test)(const struct alg_test_desc *desc, const char *driver,
120 u32 type, u32 mask);
121 int fips_allowed; /* set if alg is allowed in fips mode */
123 union {
124 struct aead_test_suite aead;
125 struct cipher_test_suite cipher;
126 struct comp_test_suite comp;
127 struct pcomp_test_suite pcomp;
128 struct hash_test_suite hash;
129 struct cprng_test_suite cprng;
130 struct drbg_test_suite drbg;
131 } suite;
134 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
136 static void hexdump(unsigned char *buf, unsigned int len)
138 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
139 16, 1,
140 buf, len, false);
143 static void tcrypt_complete(struct crypto_async_request *req, int err)
145 struct tcrypt_result *res = req->data;
147 if (err == -EINPROGRESS)
148 return;
150 res->err = err;
151 complete(&res->completion);
154 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
156 int i;
158 for (i = 0; i < XBUFSIZE; i++) {
159 buf[i] = (void *)__get_free_page(GFP_KERNEL);
160 if (!buf[i])
161 goto err_free_buf;
164 return 0;
166 err_free_buf:
167 while (i-- > 0)
168 free_page((unsigned long)buf[i]);
170 return -ENOMEM;
173 static void testmgr_free_buf(char *buf[XBUFSIZE])
175 int i;
177 for (i = 0; i < XBUFSIZE; i++)
178 free_page((unsigned long)buf[i]);
181 static int do_one_async_hash_op(struct ahash_request *req,
182 struct tcrypt_result *tr,
183 int ret)
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 ret = wait_for_completion_interruptible(&tr->completion);
187 if (!ret)
188 ret = tr->err;
189 reinit_completion(&tr->completion);
191 return ret;
194 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
195 unsigned int tcount, bool use_digest,
196 const int align_offset)
198 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
199 unsigned int i, j, k, temp;
200 struct scatterlist sg[8];
201 char *result;
202 char *key;
203 struct ahash_request *req;
204 struct tcrypt_result tresult;
205 void *hash_buff;
206 char *xbuf[XBUFSIZE];
207 int ret = -ENOMEM;
209 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
210 if (!result)
211 return ret;
212 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
213 if (!key)
214 goto out_nobuf;
215 if (testmgr_alloc_buf(xbuf))
216 goto out_nobuf;
218 init_completion(&tresult.completion);
220 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 if (!req) {
222 printk(KERN_ERR "alg: hash: Failed to allocate request for "
223 "%s\n", algo);
224 goto out_noreq;
226 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
227 tcrypt_complete, &tresult);
229 j = 0;
230 for (i = 0; i < tcount; i++) {
231 if (template[i].np)
232 continue;
234 ret = -EINVAL;
235 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
236 goto out;
238 j++;
239 memset(result, 0, MAX_DIGEST_SIZE);
241 hash_buff = xbuf[0];
242 hash_buff += align_offset;
244 memcpy(hash_buff, template[i].plaintext, template[i].psize);
245 sg_init_one(&sg[0], hash_buff, template[i].psize);
247 if (template[i].ksize) {
248 crypto_ahash_clear_flags(tfm, ~0);
249 if (template[i].ksize > MAX_KEYLEN) {
250 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
251 j, algo, template[i].ksize, MAX_KEYLEN);
252 ret = -EINVAL;
253 goto out;
255 memcpy(key, template[i].key, template[i].ksize);
256 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
257 if (ret) {
258 printk(KERN_ERR "alg: hash: setkey failed on "
259 "test %d for %s: ret=%d\n", j, algo,
260 -ret);
261 goto out;
265 ahash_request_set_crypt(req, sg, result, template[i].psize);
266 if (use_digest) {
267 ret = do_one_async_hash_op(req, &tresult,
268 crypto_ahash_digest(req));
269 if (ret) {
270 pr_err("alg: hash: digest failed on test %d "
271 "for %s: ret=%d\n", j, algo, -ret);
272 goto out;
274 } else {
275 ret = do_one_async_hash_op(req, &tresult,
276 crypto_ahash_init(req));
277 if (ret) {
278 pr_err("alt: hash: init failed on test %d "
279 "for %s: ret=%d\n", j, algo, -ret);
280 goto out;
282 ret = do_one_async_hash_op(req, &tresult,
283 crypto_ahash_update(req));
284 if (ret) {
285 pr_err("alt: hash: update failed on test %d "
286 "for %s: ret=%d\n", j, algo, -ret);
287 goto out;
289 ret = do_one_async_hash_op(req, &tresult,
290 crypto_ahash_final(req));
291 if (ret) {
292 pr_err("alt: hash: final failed on test %d "
293 "for %s: ret=%d\n", j, algo, -ret);
294 goto out;
298 if (memcmp(result, template[i].digest,
299 crypto_ahash_digestsize(tfm))) {
300 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
301 j, algo);
302 hexdump(result, crypto_ahash_digestsize(tfm));
303 ret = -EINVAL;
304 goto out;
308 j = 0;
309 for (i = 0; i < tcount; i++) {
310 /* alignment tests are only done with continuous buffers */
311 if (align_offset != 0)
312 break;
314 if (template[i].np) {
315 j++;
316 memset(result, 0, MAX_DIGEST_SIZE);
318 temp = 0;
319 sg_init_table(sg, template[i].np);
320 ret = -EINVAL;
321 for (k = 0; k < template[i].np; k++) {
322 if (WARN_ON(offset_in_page(IDX[k]) +
323 template[i].tap[k] > PAGE_SIZE))
324 goto out;
325 sg_set_buf(&sg[k],
326 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
327 offset_in_page(IDX[k]),
328 template[i].plaintext + temp,
329 template[i].tap[k]),
330 template[i].tap[k]);
331 temp += template[i].tap[k];
334 if (template[i].ksize) {
335 if (template[i].ksize > MAX_KEYLEN) {
336 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
337 j, algo, template[i].ksize,
338 MAX_KEYLEN);
339 ret = -EINVAL;
340 goto out;
342 crypto_ahash_clear_flags(tfm, ~0);
343 memcpy(key, template[i].key, template[i].ksize);
344 ret = crypto_ahash_setkey(tfm, key,
345 template[i].ksize);
347 if (ret) {
348 printk(KERN_ERR "alg: hash: setkey "
349 "failed on chunking test %d "
350 "for %s: ret=%d\n", j, algo,
351 -ret);
352 goto out;
356 ahash_request_set_crypt(req, sg, result,
357 template[i].psize);
358 ret = crypto_ahash_digest(req);
359 switch (ret) {
360 case 0:
361 break;
362 case -EINPROGRESS:
363 case -EBUSY:
364 ret = wait_for_completion_interruptible(
365 &tresult.completion);
366 if (!ret && !(ret = tresult.err)) {
367 reinit_completion(&tresult.completion);
368 break;
370 /* fall through */
371 default:
372 printk(KERN_ERR "alg: hash: digest failed "
373 "on chunking test %d for %s: "
374 "ret=%d\n", j, algo, -ret);
375 goto out;
378 if (memcmp(result, template[i].digest,
379 crypto_ahash_digestsize(tfm))) {
380 printk(KERN_ERR "alg: hash: Chunking test %d "
381 "failed for %s\n", j, algo);
382 hexdump(result, crypto_ahash_digestsize(tfm));
383 ret = -EINVAL;
384 goto out;
389 ret = 0;
391 out:
392 ahash_request_free(req);
393 out_noreq:
394 testmgr_free_buf(xbuf);
395 out_nobuf:
396 kfree(key);
397 kfree(result);
398 return ret;
401 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
402 unsigned int tcount, bool use_digest)
404 unsigned int alignmask;
405 int ret;
407 ret = __test_hash(tfm, template, tcount, use_digest, 0);
408 if (ret)
409 return ret;
411 /* test unaligned buffers, check with one byte offset */
412 ret = __test_hash(tfm, template, tcount, use_digest, 1);
413 if (ret)
414 return ret;
416 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
417 if (alignmask) {
418 /* Check if alignment mask for tfm is correctly set. */
419 ret = __test_hash(tfm, template, tcount, use_digest,
420 alignmask + 1);
421 if (ret)
422 return ret;
425 return 0;
428 static int __test_aead(struct crypto_aead *tfm, int enc,
429 struct aead_testvec *template, unsigned int tcount,
430 const bool diff_dst, const int align_offset)
432 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
433 unsigned int i, j, k, n, temp;
434 int ret = -ENOMEM;
435 char *q;
436 char *key;
437 struct aead_request *req;
438 struct scatterlist *sg;
439 struct scatterlist *asg;
440 struct scatterlist *sgout;
441 const char *e, *d;
442 struct tcrypt_result result;
443 unsigned int authsize;
444 void *input;
445 void *output;
446 void *assoc;
447 char *iv;
448 char *xbuf[XBUFSIZE];
449 char *xoutbuf[XBUFSIZE];
450 char *axbuf[XBUFSIZE];
452 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
453 if (!iv)
454 return ret;
455 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
456 if (!key)
457 goto out_noxbuf;
458 if (testmgr_alloc_buf(xbuf))
459 goto out_noxbuf;
460 if (testmgr_alloc_buf(axbuf))
461 goto out_noaxbuf;
462 if (diff_dst && testmgr_alloc_buf(xoutbuf))
463 goto out_nooutbuf;
465 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
466 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
467 if (!sg)
468 goto out_nosg;
469 asg = &sg[8];
470 sgout = &asg[8];
472 if (diff_dst)
473 d = "-ddst";
474 else
475 d = "";
477 if (enc == ENCRYPT)
478 e = "encryption";
479 else
480 e = "decryption";
482 init_completion(&result.completion);
484 req = aead_request_alloc(tfm, GFP_KERNEL);
485 if (!req) {
486 pr_err("alg: aead%s: Failed to allocate request for %s\n",
487 d, algo);
488 goto out;
491 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
492 tcrypt_complete, &result);
494 for (i = 0, j = 0; i < tcount; i++) {
495 if (!template[i].np) {
496 j++;
498 /* some templates have no input data but they will
499 * touch input
501 input = xbuf[0];
502 input += align_offset;
503 assoc = axbuf[0];
505 ret = -EINVAL;
506 if (WARN_ON(align_offset + template[i].ilen >
507 PAGE_SIZE || template[i].alen > PAGE_SIZE))
508 goto out;
510 memcpy(input, template[i].input, template[i].ilen);
511 memcpy(assoc, template[i].assoc, template[i].alen);
512 if (template[i].iv)
513 memcpy(iv, template[i].iv, MAX_IVLEN);
514 else
515 memset(iv, 0, MAX_IVLEN);
517 crypto_aead_clear_flags(tfm, ~0);
518 if (template[i].wk)
519 crypto_aead_set_flags(
520 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
522 if (template[i].klen > MAX_KEYLEN) {
523 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
524 d, j, algo, template[i].klen,
525 MAX_KEYLEN);
526 ret = -EINVAL;
527 goto out;
529 memcpy(key, template[i].key, template[i].klen);
531 ret = crypto_aead_setkey(tfm, key,
532 template[i].klen);
533 if (!ret == template[i].fail) {
534 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
535 d, j, algo, crypto_aead_get_flags(tfm));
536 goto out;
537 } else if (ret)
538 continue;
540 authsize = abs(template[i].rlen - template[i].ilen);
541 ret = crypto_aead_setauthsize(tfm, authsize);
542 if (ret) {
543 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
544 d, authsize, j, algo);
545 goto out;
548 if (diff_dst) {
549 output = xoutbuf[0];
550 output += align_offset;
551 sg_init_one(&sg[0], input, template[i].ilen);
552 sg_init_one(&sgout[0], output,
553 template[i].rlen);
554 } else {
555 sg_init_one(&sg[0], input,
556 template[i].ilen +
557 (enc ? authsize : 0));
558 output = input;
561 sg_init_one(&asg[0], assoc, template[i].alen);
563 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
564 template[i].ilen, iv);
566 aead_request_set_assoc(req, asg, template[i].alen);
568 ret = enc ?
569 crypto_aead_encrypt(req) :
570 crypto_aead_decrypt(req);
572 switch (ret) {
573 case 0:
574 if (template[i].novrfy) {
575 /* verification was supposed to fail */
576 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
577 d, e, j, algo);
578 /* so really, we got a bad message */
579 ret = -EBADMSG;
580 goto out;
582 break;
583 case -EINPROGRESS:
584 case -EBUSY:
585 ret = wait_for_completion_interruptible(
586 &result.completion);
587 if (!ret && !(ret = result.err)) {
588 reinit_completion(&result.completion);
589 break;
591 case -EBADMSG:
592 if (template[i].novrfy)
593 /* verification failure was expected */
594 continue;
595 /* fall through */
596 default:
597 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
598 d, e, j, algo, -ret);
599 goto out;
602 q = output;
603 if (memcmp(q, template[i].result, template[i].rlen)) {
604 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
605 d, j, e, algo);
606 hexdump(q, template[i].rlen);
607 ret = -EINVAL;
608 goto out;
613 for (i = 0, j = 0; i < tcount; i++) {
614 /* alignment tests are only done with continuous buffers */
615 if (align_offset != 0)
616 break;
618 if (template[i].np) {
619 j++;
621 if (template[i].iv)
622 memcpy(iv, template[i].iv, MAX_IVLEN);
623 else
624 memset(iv, 0, MAX_IVLEN);
626 crypto_aead_clear_flags(tfm, ~0);
627 if (template[i].wk)
628 crypto_aead_set_flags(
629 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
630 if (template[i].klen > MAX_KEYLEN) {
631 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
632 d, j, algo, template[i].klen,
633 MAX_KEYLEN);
634 ret = -EINVAL;
635 goto out;
637 memcpy(key, template[i].key, template[i].klen);
639 ret = crypto_aead_setkey(tfm, key, template[i].klen);
640 if (!ret == template[i].fail) {
641 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
642 d, j, algo, crypto_aead_get_flags(tfm));
643 goto out;
644 } else if (ret)
645 continue;
647 authsize = abs(template[i].rlen - template[i].ilen);
649 ret = -EINVAL;
650 sg_init_table(sg, template[i].np);
651 if (diff_dst)
652 sg_init_table(sgout, template[i].np);
653 for (k = 0, temp = 0; k < template[i].np; k++) {
654 if (WARN_ON(offset_in_page(IDX[k]) +
655 template[i].tap[k] > PAGE_SIZE))
656 goto out;
658 q = xbuf[IDX[k] >> PAGE_SHIFT] +
659 offset_in_page(IDX[k]);
661 memcpy(q, template[i].input + temp,
662 template[i].tap[k]);
664 sg_set_buf(&sg[k], q, template[i].tap[k]);
666 if (diff_dst) {
667 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
668 offset_in_page(IDX[k]);
670 memset(q, 0, template[i].tap[k]);
672 sg_set_buf(&sgout[k], q,
673 template[i].tap[k]);
676 n = template[i].tap[k];
677 if (k == template[i].np - 1 && enc)
678 n += authsize;
679 if (offset_in_page(q) + n < PAGE_SIZE)
680 q[n] = 0;
682 temp += template[i].tap[k];
685 ret = crypto_aead_setauthsize(tfm, authsize);
686 if (ret) {
687 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
688 d, authsize, j, algo);
689 goto out;
692 if (enc) {
693 if (WARN_ON(sg[k - 1].offset +
694 sg[k - 1].length + authsize >
695 PAGE_SIZE)) {
696 ret = -EINVAL;
697 goto out;
700 if (diff_dst)
701 sgout[k - 1].length += authsize;
702 else
703 sg[k - 1].length += authsize;
706 sg_init_table(asg, template[i].anp);
707 ret = -EINVAL;
708 for (k = 0, temp = 0; k < template[i].anp; k++) {
709 if (WARN_ON(offset_in_page(IDX[k]) +
710 template[i].atap[k] > PAGE_SIZE))
711 goto out;
712 sg_set_buf(&asg[k],
713 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
714 offset_in_page(IDX[k]),
715 template[i].assoc + temp,
716 template[i].atap[k]),
717 template[i].atap[k]);
718 temp += template[i].atap[k];
721 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
722 template[i].ilen,
723 iv);
725 aead_request_set_assoc(req, asg, template[i].alen);
727 ret = enc ?
728 crypto_aead_encrypt(req) :
729 crypto_aead_decrypt(req);
731 switch (ret) {
732 case 0:
733 if (template[i].novrfy) {
734 /* verification was supposed to fail */
735 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
736 d, e, j, algo);
737 /* so really, we got a bad message */
738 ret = -EBADMSG;
739 goto out;
741 break;
742 case -EINPROGRESS:
743 case -EBUSY:
744 ret = wait_for_completion_interruptible(
745 &result.completion);
746 if (!ret && !(ret = result.err)) {
747 reinit_completion(&result.completion);
748 break;
750 case -EBADMSG:
751 if (template[i].novrfy)
752 /* verification failure was expected */
753 continue;
754 /* fall through */
755 default:
756 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
757 d, e, j, algo, -ret);
758 goto out;
761 ret = -EINVAL;
762 for (k = 0, temp = 0; k < template[i].np; k++) {
763 if (diff_dst)
764 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
765 offset_in_page(IDX[k]);
766 else
767 q = xbuf[IDX[k] >> PAGE_SHIFT] +
768 offset_in_page(IDX[k]);
770 n = template[i].tap[k];
771 if (k == template[i].np - 1)
772 n += enc ? authsize : -authsize;
774 if (memcmp(q, template[i].result + temp, n)) {
775 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
776 d, j, e, k, algo);
777 hexdump(q, n);
778 goto out;
781 q += n;
782 if (k == template[i].np - 1 && !enc) {
783 if (!diff_dst &&
784 memcmp(q, template[i].input +
785 temp + n, authsize))
786 n = authsize;
787 else
788 n = 0;
789 } else {
790 for (n = 0; offset_in_page(q + n) &&
791 q[n]; n++)
794 if (n) {
795 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
796 d, j, e, k, algo, n);
797 hexdump(q, n);
798 goto out;
801 temp += template[i].tap[k];
806 ret = 0;
808 out:
809 aead_request_free(req);
810 kfree(sg);
811 out_nosg:
812 if (diff_dst)
813 testmgr_free_buf(xoutbuf);
814 out_nooutbuf:
815 testmgr_free_buf(axbuf);
816 out_noaxbuf:
817 testmgr_free_buf(xbuf);
818 out_noxbuf:
819 kfree(key);
820 kfree(iv);
821 return ret;
824 static int test_aead(struct crypto_aead *tfm, int enc,
825 struct aead_testvec *template, unsigned int tcount)
827 unsigned int alignmask;
828 int ret;
830 /* test 'dst == src' case */
831 ret = __test_aead(tfm, enc, template, tcount, false, 0);
832 if (ret)
833 return ret;
835 /* test 'dst != src' case */
836 ret = __test_aead(tfm, enc, template, tcount, true, 0);
837 if (ret)
838 return ret;
840 /* test unaligned buffers, check with one byte offset */
841 ret = __test_aead(tfm, enc, template, tcount, true, 1);
842 if (ret)
843 return ret;
845 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
846 if (alignmask) {
847 /* Check if alignment mask for tfm is correctly set. */
848 ret = __test_aead(tfm, enc, template, tcount, true,
849 alignmask + 1);
850 if (ret)
851 return ret;
854 return 0;
857 static int test_cipher(struct crypto_cipher *tfm, int enc,
858 struct cipher_testvec *template, unsigned int tcount)
860 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
861 unsigned int i, j, k;
862 char *q;
863 const char *e;
864 void *data;
865 char *xbuf[XBUFSIZE];
866 int ret = -ENOMEM;
868 if (testmgr_alloc_buf(xbuf))
869 goto out_nobuf;
871 if (enc == ENCRYPT)
872 e = "encryption";
873 else
874 e = "decryption";
876 j = 0;
877 for (i = 0; i < tcount; i++) {
878 if (template[i].np)
879 continue;
881 j++;
883 ret = -EINVAL;
884 if (WARN_ON(template[i].ilen > PAGE_SIZE))
885 goto out;
887 data = xbuf[0];
888 memcpy(data, template[i].input, template[i].ilen);
890 crypto_cipher_clear_flags(tfm, ~0);
891 if (template[i].wk)
892 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
894 ret = crypto_cipher_setkey(tfm, template[i].key,
895 template[i].klen);
896 if (!ret == template[i].fail) {
897 printk(KERN_ERR "alg: cipher: setkey failed "
898 "on test %d for %s: flags=%x\n", j,
899 algo, crypto_cipher_get_flags(tfm));
900 goto out;
901 } else if (ret)
902 continue;
904 for (k = 0; k < template[i].ilen;
905 k += crypto_cipher_blocksize(tfm)) {
906 if (enc)
907 crypto_cipher_encrypt_one(tfm, data + k,
908 data + k);
909 else
910 crypto_cipher_decrypt_one(tfm, data + k,
911 data + k);
914 q = data;
915 if (memcmp(q, template[i].result, template[i].rlen)) {
916 printk(KERN_ERR "alg: cipher: Test %d failed "
917 "on %s for %s\n", j, e, algo);
918 hexdump(q, template[i].rlen);
919 ret = -EINVAL;
920 goto out;
924 ret = 0;
926 out:
927 testmgr_free_buf(xbuf);
928 out_nobuf:
929 return ret;
932 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
933 struct cipher_testvec *template, unsigned int tcount,
934 const bool diff_dst, const int align_offset)
936 const char *algo =
937 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
938 unsigned int i, j, k, n, temp;
939 char *q;
940 struct ablkcipher_request *req;
941 struct scatterlist sg[8];
942 struct scatterlist sgout[8];
943 const char *e, *d;
944 struct tcrypt_result result;
945 void *data;
946 char iv[MAX_IVLEN];
947 char *xbuf[XBUFSIZE];
948 char *xoutbuf[XBUFSIZE];
949 int ret = -ENOMEM;
951 if (testmgr_alloc_buf(xbuf))
952 goto out_nobuf;
954 if (diff_dst && testmgr_alloc_buf(xoutbuf))
955 goto out_nooutbuf;
957 if (diff_dst)
958 d = "-ddst";
959 else
960 d = "";
962 if (enc == ENCRYPT)
963 e = "encryption";
964 else
965 e = "decryption";
967 init_completion(&result.completion);
969 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
970 if (!req) {
971 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
972 d, algo);
973 goto out;
976 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
977 tcrypt_complete, &result);
979 j = 0;
980 for (i = 0; i < tcount; i++) {
981 if (template[i].iv)
982 memcpy(iv, template[i].iv, MAX_IVLEN);
983 else
984 memset(iv, 0, MAX_IVLEN);
986 if (!(template[i].np) || (template[i].also_non_np)) {
987 j++;
989 ret = -EINVAL;
990 if (WARN_ON(align_offset + template[i].ilen >
991 PAGE_SIZE))
992 goto out;
994 data = xbuf[0];
995 data += align_offset;
996 memcpy(data, template[i].input, template[i].ilen);
998 crypto_ablkcipher_clear_flags(tfm, ~0);
999 if (template[i].wk)
1000 crypto_ablkcipher_set_flags(
1001 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1003 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1004 template[i].klen);
1005 if (!ret == template[i].fail) {
1006 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1007 d, j, algo,
1008 crypto_ablkcipher_get_flags(tfm));
1009 goto out;
1010 } else if (ret)
1011 continue;
1013 sg_init_one(&sg[0], data, template[i].ilen);
1014 if (diff_dst) {
1015 data = xoutbuf[0];
1016 data += align_offset;
1017 sg_init_one(&sgout[0], data, template[i].ilen);
1020 ablkcipher_request_set_crypt(req, sg,
1021 (diff_dst) ? sgout : sg,
1022 template[i].ilen, iv);
1023 ret = enc ?
1024 crypto_ablkcipher_encrypt(req) :
1025 crypto_ablkcipher_decrypt(req);
1027 switch (ret) {
1028 case 0:
1029 break;
1030 case -EINPROGRESS:
1031 case -EBUSY:
1032 ret = wait_for_completion_interruptible(
1033 &result.completion);
1034 if (!ret && !((ret = result.err))) {
1035 reinit_completion(&result.completion);
1036 break;
1038 /* fall through */
1039 default:
1040 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1041 d, e, j, algo, -ret);
1042 goto out;
1045 q = data;
1046 if (memcmp(q, template[i].result, template[i].rlen)) {
1047 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1048 d, j, e, algo);
1049 hexdump(q, template[i].rlen);
1050 ret = -EINVAL;
1051 goto out;
1056 j = 0;
1057 for (i = 0; i < tcount; i++) {
1058 /* alignment tests are only done with continuous buffers */
1059 if (align_offset != 0)
1060 break;
1062 if (template[i].iv)
1063 memcpy(iv, template[i].iv, MAX_IVLEN);
1064 else
1065 memset(iv, 0, MAX_IVLEN);
1067 if (template[i].np) {
1068 j++;
1070 crypto_ablkcipher_clear_flags(tfm, ~0);
1071 if (template[i].wk)
1072 crypto_ablkcipher_set_flags(
1073 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1075 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1076 template[i].klen);
1077 if (!ret == template[i].fail) {
1078 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1079 d, j, algo,
1080 crypto_ablkcipher_get_flags(tfm));
1081 goto out;
1082 } else if (ret)
1083 continue;
1085 temp = 0;
1086 ret = -EINVAL;
1087 sg_init_table(sg, template[i].np);
1088 if (diff_dst)
1089 sg_init_table(sgout, template[i].np);
1090 for (k = 0; k < template[i].np; k++) {
1091 if (WARN_ON(offset_in_page(IDX[k]) +
1092 template[i].tap[k] > PAGE_SIZE))
1093 goto out;
1095 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1096 offset_in_page(IDX[k]);
1098 memcpy(q, template[i].input + temp,
1099 template[i].tap[k]);
1101 if (offset_in_page(q) + template[i].tap[k] <
1102 PAGE_SIZE)
1103 q[template[i].tap[k]] = 0;
1105 sg_set_buf(&sg[k], q, template[i].tap[k]);
1106 if (diff_dst) {
1107 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1108 offset_in_page(IDX[k]);
1110 sg_set_buf(&sgout[k], q,
1111 template[i].tap[k]);
1113 memset(q, 0, template[i].tap[k]);
1114 if (offset_in_page(q) +
1115 template[i].tap[k] < PAGE_SIZE)
1116 q[template[i].tap[k]] = 0;
1119 temp += template[i].tap[k];
1122 ablkcipher_request_set_crypt(req, sg,
1123 (diff_dst) ? sgout : sg,
1124 template[i].ilen, iv);
1126 ret = enc ?
1127 crypto_ablkcipher_encrypt(req) :
1128 crypto_ablkcipher_decrypt(req);
1130 switch (ret) {
1131 case 0:
1132 break;
1133 case -EINPROGRESS:
1134 case -EBUSY:
1135 ret = wait_for_completion_interruptible(
1136 &result.completion);
1137 if (!ret && !((ret = result.err))) {
1138 reinit_completion(&result.completion);
1139 break;
1141 /* fall through */
1142 default:
1143 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1144 d, e, j, algo, -ret);
1145 goto out;
1148 temp = 0;
1149 ret = -EINVAL;
1150 for (k = 0; k < template[i].np; k++) {
1151 if (diff_dst)
1152 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1153 offset_in_page(IDX[k]);
1154 else
1155 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1156 offset_in_page(IDX[k]);
1158 if (memcmp(q, template[i].result + temp,
1159 template[i].tap[k])) {
1160 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1161 d, j, e, k, algo);
1162 hexdump(q, template[i].tap[k]);
1163 goto out;
1166 q += template[i].tap[k];
1167 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1169 if (n) {
1170 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1171 d, j, e, k, algo, n);
1172 hexdump(q, n);
1173 goto out;
1175 temp += template[i].tap[k];
1180 ret = 0;
1182 out:
1183 ablkcipher_request_free(req);
1184 if (diff_dst)
1185 testmgr_free_buf(xoutbuf);
1186 out_nooutbuf:
1187 testmgr_free_buf(xbuf);
1188 out_nobuf:
1189 return ret;
1192 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1193 struct cipher_testvec *template, unsigned int tcount)
1195 unsigned int alignmask;
1196 int ret;
1198 /* test 'dst == src' case */
1199 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1200 if (ret)
1201 return ret;
1203 /* test 'dst != src' case */
1204 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1205 if (ret)
1206 return ret;
1208 /* test unaligned buffers, check with one byte offset */
1209 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1210 if (ret)
1211 return ret;
1213 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1214 if (alignmask) {
1215 /* Check if alignment mask for tfm is correctly set. */
1216 ret = __test_skcipher(tfm, enc, template, tcount, true,
1217 alignmask + 1);
1218 if (ret)
1219 return ret;
1222 return 0;
1225 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1226 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1228 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1229 unsigned int i;
1230 char result[COMP_BUF_SIZE];
1231 int ret;
1233 for (i = 0; i < ctcount; i++) {
1234 int ilen;
1235 unsigned int dlen = COMP_BUF_SIZE;
1237 memset(result, 0, sizeof (result));
1239 ilen = ctemplate[i].inlen;
1240 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1241 ilen, result, &dlen);
1242 if (ret) {
1243 printk(KERN_ERR "alg: comp: compression failed "
1244 "on test %d for %s: ret=%d\n", i + 1, algo,
1245 -ret);
1246 goto out;
1249 if (dlen != ctemplate[i].outlen) {
1250 printk(KERN_ERR "alg: comp: Compression test %d "
1251 "failed for %s: output len = %d\n", i + 1, algo,
1252 dlen);
1253 ret = -EINVAL;
1254 goto out;
1257 if (memcmp(result, ctemplate[i].output, dlen)) {
1258 printk(KERN_ERR "alg: comp: Compression test %d "
1259 "failed for %s\n", i + 1, algo);
1260 hexdump(result, dlen);
1261 ret = -EINVAL;
1262 goto out;
1266 for (i = 0; i < dtcount; i++) {
1267 int ilen;
1268 unsigned int dlen = COMP_BUF_SIZE;
1270 memset(result, 0, sizeof (result));
1272 ilen = dtemplate[i].inlen;
1273 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1274 ilen, result, &dlen);
1275 if (ret) {
1276 printk(KERN_ERR "alg: comp: decompression failed "
1277 "on test %d for %s: ret=%d\n", i + 1, algo,
1278 -ret);
1279 goto out;
1282 if (dlen != dtemplate[i].outlen) {
1283 printk(KERN_ERR "alg: comp: Decompression test %d "
1284 "failed for %s: output len = %d\n", i + 1, algo,
1285 dlen);
1286 ret = -EINVAL;
1287 goto out;
1290 if (memcmp(result, dtemplate[i].output, dlen)) {
1291 printk(KERN_ERR "alg: comp: Decompression test %d "
1292 "failed for %s\n", i + 1, algo);
1293 hexdump(result, dlen);
1294 ret = -EINVAL;
1295 goto out;
1299 ret = 0;
1301 out:
1302 return ret;
1305 static int test_pcomp(struct crypto_pcomp *tfm,
1306 struct pcomp_testvec *ctemplate,
1307 struct pcomp_testvec *dtemplate, int ctcount,
1308 int dtcount)
1310 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1311 unsigned int i;
1312 char result[COMP_BUF_SIZE];
1313 int res;
1315 for (i = 0; i < ctcount; i++) {
1316 struct comp_request req;
1317 unsigned int produced = 0;
1319 res = crypto_compress_setup(tfm, ctemplate[i].params,
1320 ctemplate[i].paramsize);
1321 if (res) {
1322 pr_err("alg: pcomp: compression setup failed on test "
1323 "%d for %s: error=%d\n", i + 1, algo, res);
1324 return res;
1327 res = crypto_compress_init(tfm);
1328 if (res) {
1329 pr_err("alg: pcomp: compression init failed on test "
1330 "%d for %s: error=%d\n", i + 1, algo, res);
1331 return res;
1334 memset(result, 0, sizeof(result));
1336 req.next_in = ctemplate[i].input;
1337 req.avail_in = ctemplate[i].inlen / 2;
1338 req.next_out = result;
1339 req.avail_out = ctemplate[i].outlen / 2;
1341 res = crypto_compress_update(tfm, &req);
1342 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1343 pr_err("alg: pcomp: compression update failed on test "
1344 "%d for %s: error=%d\n", i + 1, algo, res);
1345 return res;
1347 if (res > 0)
1348 produced += res;
1350 /* Add remaining input data */
1351 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1353 res = crypto_compress_update(tfm, &req);
1354 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1355 pr_err("alg: pcomp: compression update failed on test "
1356 "%d for %s: error=%d\n", i + 1, algo, res);
1357 return res;
1359 if (res > 0)
1360 produced += res;
1362 /* Provide remaining output space */
1363 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1365 res = crypto_compress_final(tfm, &req);
1366 if (res < 0) {
1367 pr_err("alg: pcomp: compression final failed on test "
1368 "%d for %s: error=%d\n", i + 1, algo, res);
1369 return res;
1371 produced += res;
1373 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1374 pr_err("alg: comp: Compression test %d failed for %s: "
1375 "output len = %d (expected %d)\n", i + 1, algo,
1376 COMP_BUF_SIZE - req.avail_out,
1377 ctemplate[i].outlen);
1378 return -EINVAL;
1381 if (produced != ctemplate[i].outlen) {
1382 pr_err("alg: comp: Compression test %d failed for %s: "
1383 "returned len = %u (expected %d)\n", i + 1,
1384 algo, produced, ctemplate[i].outlen);
1385 return -EINVAL;
1388 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1389 pr_err("alg: pcomp: Compression test %d failed for "
1390 "%s\n", i + 1, algo);
1391 hexdump(result, ctemplate[i].outlen);
1392 return -EINVAL;
1396 for (i = 0; i < dtcount; i++) {
1397 struct comp_request req;
1398 unsigned int produced = 0;
1400 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1401 dtemplate[i].paramsize);
1402 if (res) {
1403 pr_err("alg: pcomp: decompression setup failed on "
1404 "test %d for %s: error=%d\n", i + 1, algo, res);
1405 return res;
1408 res = crypto_decompress_init(tfm);
1409 if (res) {
1410 pr_err("alg: pcomp: decompression init failed on test "
1411 "%d for %s: error=%d\n", i + 1, algo, res);
1412 return res;
1415 memset(result, 0, sizeof(result));
1417 req.next_in = dtemplate[i].input;
1418 req.avail_in = dtemplate[i].inlen / 2;
1419 req.next_out = result;
1420 req.avail_out = dtemplate[i].outlen / 2;
1422 res = crypto_decompress_update(tfm, &req);
1423 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1424 pr_err("alg: pcomp: decompression update failed on "
1425 "test %d for %s: error=%d\n", i + 1, algo, res);
1426 return res;
1428 if (res > 0)
1429 produced += res;
1431 /* Add remaining input data */
1432 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1434 res = crypto_decompress_update(tfm, &req);
1435 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1436 pr_err("alg: pcomp: decompression update failed on "
1437 "test %d for %s: error=%d\n", i + 1, algo, res);
1438 return res;
1440 if (res > 0)
1441 produced += res;
1443 /* Provide remaining output space */
1444 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1446 res = crypto_decompress_final(tfm, &req);
1447 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1448 pr_err("alg: pcomp: decompression final failed on "
1449 "test %d for %s: error=%d\n", i + 1, algo, res);
1450 return res;
1452 if (res > 0)
1453 produced += res;
1455 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1456 pr_err("alg: comp: Decompression test %d failed for "
1457 "%s: output len = %d (expected %d)\n", i + 1,
1458 algo, COMP_BUF_SIZE - req.avail_out,
1459 dtemplate[i].outlen);
1460 return -EINVAL;
1463 if (produced != dtemplate[i].outlen) {
1464 pr_err("alg: comp: Decompression test %d failed for "
1465 "%s: returned len = %u (expected %d)\n", i + 1,
1466 algo, produced, dtemplate[i].outlen);
1467 return -EINVAL;
1470 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1471 pr_err("alg: pcomp: Decompression test %d failed for "
1472 "%s\n", i + 1, algo);
1473 hexdump(result, dtemplate[i].outlen);
1474 return -EINVAL;
1478 return 0;
1482 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1483 unsigned int tcount)
1485 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1486 int err = 0, i, j, seedsize;
1487 u8 *seed;
1488 char result[32];
1490 seedsize = crypto_rng_seedsize(tfm);
1492 seed = kmalloc(seedsize, GFP_KERNEL);
1493 if (!seed) {
1494 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1495 "for %s\n", algo);
1496 return -ENOMEM;
1499 for (i = 0; i < tcount; i++) {
1500 memset(result, 0, 32);
1502 memcpy(seed, template[i].v, template[i].vlen);
1503 memcpy(seed + template[i].vlen, template[i].key,
1504 template[i].klen);
1505 memcpy(seed + template[i].vlen + template[i].klen,
1506 template[i].dt, template[i].dtlen);
1508 err = crypto_rng_reset(tfm, seed, seedsize);
1509 if (err) {
1510 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1511 "for %s\n", algo);
1512 goto out;
1515 for (j = 0; j < template[i].loops; j++) {
1516 err = crypto_rng_get_bytes(tfm, result,
1517 template[i].rlen);
1518 if (err != template[i].rlen) {
1519 printk(KERN_ERR "alg: cprng: Failed to obtain "
1520 "the correct amount of random data for "
1521 "%s (requested %d, got %d)\n", algo,
1522 template[i].rlen, err);
1523 goto out;
1527 err = memcmp(result, template[i].result,
1528 template[i].rlen);
1529 if (err) {
1530 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1531 i, algo);
1532 hexdump(result, template[i].rlen);
1533 err = -EINVAL;
1534 goto out;
1538 out:
1539 kfree(seed);
1540 return err;
1543 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1544 u32 type, u32 mask)
1546 struct crypto_aead *tfm;
1547 int err = 0;
1549 tfm = crypto_alloc_aead(driver, type, mask);
1550 if (IS_ERR(tfm)) {
1551 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1552 "%ld\n", driver, PTR_ERR(tfm));
1553 return PTR_ERR(tfm);
1556 if (desc->suite.aead.enc.vecs) {
1557 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1558 desc->suite.aead.enc.count);
1559 if (err)
1560 goto out;
1563 if (!err && desc->suite.aead.dec.vecs)
1564 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1565 desc->suite.aead.dec.count);
1567 out:
1568 crypto_free_aead(tfm);
1569 return err;
1572 static int alg_test_cipher(const struct alg_test_desc *desc,
1573 const char *driver, u32 type, u32 mask)
1575 struct crypto_cipher *tfm;
1576 int err = 0;
1578 tfm = crypto_alloc_cipher(driver, type, mask);
1579 if (IS_ERR(tfm)) {
1580 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1581 "%s: %ld\n", driver, PTR_ERR(tfm));
1582 return PTR_ERR(tfm);
1585 if (desc->suite.cipher.enc.vecs) {
1586 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1587 desc->suite.cipher.enc.count);
1588 if (err)
1589 goto out;
1592 if (desc->suite.cipher.dec.vecs)
1593 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1594 desc->suite.cipher.dec.count);
1596 out:
1597 crypto_free_cipher(tfm);
1598 return err;
1601 static int alg_test_skcipher(const struct alg_test_desc *desc,
1602 const char *driver, u32 type, u32 mask)
1604 struct crypto_ablkcipher *tfm;
1605 int err = 0;
1607 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1608 if (IS_ERR(tfm)) {
1609 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1610 "%s: %ld\n", driver, PTR_ERR(tfm));
1611 return PTR_ERR(tfm);
1614 if (desc->suite.cipher.enc.vecs) {
1615 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1616 desc->suite.cipher.enc.count);
1617 if (err)
1618 goto out;
1621 if (desc->suite.cipher.dec.vecs)
1622 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1623 desc->suite.cipher.dec.count);
1625 out:
1626 crypto_free_ablkcipher(tfm);
1627 return err;
1630 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1631 u32 type, u32 mask)
1633 struct crypto_comp *tfm;
1634 int err;
1636 tfm = crypto_alloc_comp(driver, type, mask);
1637 if (IS_ERR(tfm)) {
1638 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1639 "%ld\n", driver, PTR_ERR(tfm));
1640 return PTR_ERR(tfm);
1643 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1644 desc->suite.comp.decomp.vecs,
1645 desc->suite.comp.comp.count,
1646 desc->suite.comp.decomp.count);
1648 crypto_free_comp(tfm);
1649 return err;
1652 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1653 u32 type, u32 mask)
1655 struct crypto_pcomp *tfm;
1656 int err;
1658 tfm = crypto_alloc_pcomp(driver, type, mask);
1659 if (IS_ERR(tfm)) {
1660 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1661 driver, PTR_ERR(tfm));
1662 return PTR_ERR(tfm);
1665 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1666 desc->suite.pcomp.decomp.vecs,
1667 desc->suite.pcomp.comp.count,
1668 desc->suite.pcomp.decomp.count);
1670 crypto_free_pcomp(tfm);
1671 return err;
1674 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1675 u32 type, u32 mask)
1677 struct crypto_ahash *tfm;
1678 int err;
1680 tfm = crypto_alloc_ahash(driver, type, mask);
1681 if (IS_ERR(tfm)) {
1682 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1683 "%ld\n", driver, PTR_ERR(tfm));
1684 return PTR_ERR(tfm);
1687 err = test_hash(tfm, desc->suite.hash.vecs,
1688 desc->suite.hash.count, true);
1689 if (!err)
1690 err = test_hash(tfm, desc->suite.hash.vecs,
1691 desc->suite.hash.count, false);
1693 crypto_free_ahash(tfm);
1694 return err;
1697 static int alg_test_crc32c(const struct alg_test_desc *desc,
1698 const char *driver, u32 type, u32 mask)
1700 struct crypto_shash *tfm;
1701 u32 val;
1702 int err;
1704 err = alg_test_hash(desc, driver, type, mask);
1705 if (err)
1706 goto out;
1708 tfm = crypto_alloc_shash(driver, type, mask);
1709 if (IS_ERR(tfm)) {
1710 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1711 "%ld\n", driver, PTR_ERR(tfm));
1712 err = PTR_ERR(tfm);
1713 goto out;
1716 do {
1717 struct {
1718 struct shash_desc shash;
1719 char ctx[crypto_shash_descsize(tfm)];
1720 } sdesc;
1722 sdesc.shash.tfm = tfm;
1723 sdesc.shash.flags = 0;
1725 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1726 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1727 if (err) {
1728 printk(KERN_ERR "alg: crc32c: Operation failed for "
1729 "%s: %d\n", driver, err);
1730 break;
1733 if (val != ~420553207) {
1734 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1735 "%d\n", driver, val);
1736 err = -EINVAL;
1738 } while (0);
1740 crypto_free_shash(tfm);
1742 out:
1743 return err;
1746 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1747 u32 type, u32 mask)
1749 struct crypto_rng *rng;
1750 int err;
1752 rng = crypto_alloc_rng(driver, type, mask);
1753 if (IS_ERR(rng)) {
1754 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1755 "%ld\n", driver, PTR_ERR(rng));
1756 return PTR_ERR(rng);
1759 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1761 crypto_free_rng(rng);
1763 return err;
1767 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1768 const char *driver, u32 type, u32 mask)
1770 int ret = -EAGAIN;
1771 struct crypto_rng *drng;
1772 struct drbg_test_data test_data;
1773 struct drbg_string addtl, pers, testentropy;
1774 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1776 if (!buf)
1777 return -ENOMEM;
1779 drng = crypto_alloc_rng(driver, type, mask);
1780 if (IS_ERR(drng)) {
1781 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1782 "%s\n", driver);
1783 kzfree(buf);
1784 return -ENOMEM;
1787 test_data.testentropy = &testentropy;
1788 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1789 drbg_string_fill(&pers, test->pers, test->perslen);
1790 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1791 if (ret) {
1792 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1793 goto outbuf;
1796 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1797 if (pr) {
1798 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1799 ret = crypto_drbg_get_bytes_addtl_test(drng,
1800 buf, test->expectedlen, &addtl, &test_data);
1801 } else {
1802 ret = crypto_drbg_get_bytes_addtl(drng,
1803 buf, test->expectedlen, &addtl);
1805 if (ret <= 0) {
1806 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1807 "driver %s\n", driver);
1808 goto outbuf;
1811 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1812 if (pr) {
1813 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1814 ret = crypto_drbg_get_bytes_addtl_test(drng,
1815 buf, test->expectedlen, &addtl, &test_data);
1816 } else {
1817 ret = crypto_drbg_get_bytes_addtl(drng,
1818 buf, test->expectedlen, &addtl);
1820 if (ret <= 0) {
1821 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1822 "driver %s\n", driver);
1823 goto outbuf;
1826 ret = memcmp(test->expected, buf, test->expectedlen);
1828 outbuf:
1829 crypto_free_rng(drng);
1830 kzfree(buf);
1831 return ret;
1835 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1836 u32 type, u32 mask)
1838 int err = 0;
1839 int pr = 0;
1840 int i = 0;
1841 struct drbg_testvec *template = desc->suite.drbg.vecs;
1842 unsigned int tcount = desc->suite.drbg.count;
1844 if (0 == memcmp(driver, "drbg_pr_", 8))
1845 pr = 1;
1847 for (i = 0; i < tcount; i++) {
1848 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1849 if (err) {
1850 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1851 i, driver);
1852 err = -EINVAL;
1853 break;
1856 return err;
1860 static int alg_test_null(const struct alg_test_desc *desc,
1861 const char *driver, u32 type, u32 mask)
1863 return 0;
1866 /* Please keep this list sorted by algorithm name. */
1867 static const struct alg_test_desc alg_test_descs[] = {
1869 .alg = "__cbc-cast5-avx",
1870 .test = alg_test_null,
1871 }, {
1872 .alg = "__cbc-cast6-avx",
1873 .test = alg_test_null,
1874 }, {
1875 .alg = "__cbc-serpent-avx",
1876 .test = alg_test_null,
1877 }, {
1878 .alg = "__cbc-serpent-avx2",
1879 .test = alg_test_null,
1880 }, {
1881 .alg = "__cbc-serpent-sse2",
1882 .test = alg_test_null,
1883 }, {
1884 .alg = "__cbc-twofish-avx",
1885 .test = alg_test_null,
1886 }, {
1887 .alg = "__driver-cbc-aes-aesni",
1888 .test = alg_test_null,
1889 .fips_allowed = 1,
1890 }, {
1891 .alg = "__driver-cbc-camellia-aesni",
1892 .test = alg_test_null,
1893 }, {
1894 .alg = "__driver-cbc-camellia-aesni-avx2",
1895 .test = alg_test_null,
1896 }, {
1897 .alg = "__driver-cbc-cast5-avx",
1898 .test = alg_test_null,
1899 }, {
1900 .alg = "__driver-cbc-cast6-avx",
1901 .test = alg_test_null,
1902 }, {
1903 .alg = "__driver-cbc-serpent-avx",
1904 .test = alg_test_null,
1905 }, {
1906 .alg = "__driver-cbc-serpent-avx2",
1907 .test = alg_test_null,
1908 }, {
1909 .alg = "__driver-cbc-serpent-sse2",
1910 .test = alg_test_null,
1911 }, {
1912 .alg = "__driver-cbc-twofish-avx",
1913 .test = alg_test_null,
1914 }, {
1915 .alg = "__driver-ecb-aes-aesni",
1916 .test = alg_test_null,
1917 .fips_allowed = 1,
1918 }, {
1919 .alg = "__driver-ecb-camellia-aesni",
1920 .test = alg_test_null,
1921 }, {
1922 .alg = "__driver-ecb-camellia-aesni-avx2",
1923 .test = alg_test_null,
1924 }, {
1925 .alg = "__driver-ecb-cast5-avx",
1926 .test = alg_test_null,
1927 }, {
1928 .alg = "__driver-ecb-cast6-avx",
1929 .test = alg_test_null,
1930 }, {
1931 .alg = "__driver-ecb-serpent-avx",
1932 .test = alg_test_null,
1933 }, {
1934 .alg = "__driver-ecb-serpent-avx2",
1935 .test = alg_test_null,
1936 }, {
1937 .alg = "__driver-ecb-serpent-sse2",
1938 .test = alg_test_null,
1939 }, {
1940 .alg = "__driver-ecb-twofish-avx",
1941 .test = alg_test_null,
1942 }, {
1943 .alg = "__ghash-pclmulqdqni",
1944 .test = alg_test_null,
1945 .fips_allowed = 1,
1946 }, {
1947 .alg = "ansi_cprng",
1948 .test = alg_test_cprng,
1949 .fips_allowed = 1,
1950 .suite = {
1951 .cprng = {
1952 .vecs = ansi_cprng_aes_tv_template,
1953 .count = ANSI_CPRNG_AES_TEST_VECTORS
1956 }, {
1957 .alg = "authenc(hmac(md5),ecb(cipher_null))",
1958 .test = alg_test_aead,
1959 .fips_allowed = 1,
1960 .suite = {
1961 .aead = {
1962 .enc = {
1963 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1964 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1966 .dec = {
1967 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1968 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1972 }, {
1973 .alg = "authenc(hmac(sha1),cbc(aes))",
1974 .test = alg_test_aead,
1975 .fips_allowed = 1,
1976 .suite = {
1977 .aead = {
1978 .enc = {
1979 .vecs =
1980 hmac_sha1_aes_cbc_enc_tv_temp,
1981 .count =
1982 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
1986 }, {
1987 .alg = "authenc(hmac(sha1),cbc(des))",
1988 .test = alg_test_aead,
1989 .fips_allowed = 1,
1990 .suite = {
1991 .aead = {
1992 .enc = {
1993 .vecs =
1994 hmac_sha1_des_cbc_enc_tv_temp,
1995 .count =
1996 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2000 }, {
2001 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2002 .test = alg_test_aead,
2003 .fips_allowed = 1,
2004 .suite = {
2005 .aead = {
2006 .enc = {
2007 .vecs =
2008 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2009 .count =
2010 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2014 }, {
2015 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2016 .test = alg_test_aead,
2017 .fips_allowed = 1,
2018 .suite = {
2019 .aead = {
2020 .enc = {
2021 .vecs =
2022 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2023 .count =
2024 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2026 .dec = {
2027 .vecs =
2028 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2029 .count =
2030 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2034 }, {
2035 .alg = "authenc(hmac(sha224),cbc(des))",
2036 .test = alg_test_aead,
2037 .fips_allowed = 1,
2038 .suite = {
2039 .aead = {
2040 .enc = {
2041 .vecs =
2042 hmac_sha224_des_cbc_enc_tv_temp,
2043 .count =
2044 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2048 }, {
2049 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2050 .test = alg_test_aead,
2051 .fips_allowed = 1,
2052 .suite = {
2053 .aead = {
2054 .enc = {
2055 .vecs =
2056 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2057 .count =
2058 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2062 }, {
2063 .alg = "authenc(hmac(sha256),cbc(aes))",
2064 .test = alg_test_aead,
2065 .fips_allowed = 1,
2066 .suite = {
2067 .aead = {
2068 .enc = {
2069 .vecs =
2070 hmac_sha256_aes_cbc_enc_tv_temp,
2071 .count =
2072 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2076 }, {
2077 .alg = "authenc(hmac(sha256),cbc(des))",
2078 .test = alg_test_aead,
2079 .fips_allowed = 1,
2080 .suite = {
2081 .aead = {
2082 .enc = {
2083 .vecs =
2084 hmac_sha256_des_cbc_enc_tv_temp,
2085 .count =
2086 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2090 }, {
2091 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2092 .test = alg_test_aead,
2093 .fips_allowed = 1,
2094 .suite = {
2095 .aead = {
2096 .enc = {
2097 .vecs =
2098 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2099 .count =
2100 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2104 }, {
2105 .alg = "authenc(hmac(sha384),cbc(des))",
2106 .test = alg_test_aead,
2107 .fips_allowed = 1,
2108 .suite = {
2109 .aead = {
2110 .enc = {
2111 .vecs =
2112 hmac_sha384_des_cbc_enc_tv_temp,
2113 .count =
2114 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2118 }, {
2119 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2120 .test = alg_test_aead,
2121 .fips_allowed = 1,
2122 .suite = {
2123 .aead = {
2124 .enc = {
2125 .vecs =
2126 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2127 .count =
2128 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2132 }, {
2133 .alg = "authenc(hmac(sha512),cbc(aes))",
2134 .test = alg_test_aead,
2135 .fips_allowed = 1,
2136 .suite = {
2137 .aead = {
2138 .enc = {
2139 .vecs =
2140 hmac_sha512_aes_cbc_enc_tv_temp,
2141 .count =
2142 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2146 }, {
2147 .alg = "authenc(hmac(sha512),cbc(des))",
2148 .test = alg_test_aead,
2149 .fips_allowed = 1,
2150 .suite = {
2151 .aead = {
2152 .enc = {
2153 .vecs =
2154 hmac_sha512_des_cbc_enc_tv_temp,
2155 .count =
2156 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2160 }, {
2161 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2162 .test = alg_test_aead,
2163 .fips_allowed = 1,
2164 .suite = {
2165 .aead = {
2166 .enc = {
2167 .vecs =
2168 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2169 .count =
2170 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2174 }, {
2175 .alg = "cbc(aes)",
2176 .test = alg_test_skcipher,
2177 .fips_allowed = 1,
2178 .suite = {
2179 .cipher = {
2180 .enc = {
2181 .vecs = aes_cbc_enc_tv_template,
2182 .count = AES_CBC_ENC_TEST_VECTORS
2184 .dec = {
2185 .vecs = aes_cbc_dec_tv_template,
2186 .count = AES_CBC_DEC_TEST_VECTORS
2190 }, {
2191 .alg = "cbc(anubis)",
2192 .test = alg_test_skcipher,
2193 .suite = {
2194 .cipher = {
2195 .enc = {
2196 .vecs = anubis_cbc_enc_tv_template,
2197 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2199 .dec = {
2200 .vecs = anubis_cbc_dec_tv_template,
2201 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2205 }, {
2206 .alg = "cbc(blowfish)",
2207 .test = alg_test_skcipher,
2208 .suite = {
2209 .cipher = {
2210 .enc = {
2211 .vecs = bf_cbc_enc_tv_template,
2212 .count = BF_CBC_ENC_TEST_VECTORS
2214 .dec = {
2215 .vecs = bf_cbc_dec_tv_template,
2216 .count = BF_CBC_DEC_TEST_VECTORS
2220 }, {
2221 .alg = "cbc(camellia)",
2222 .test = alg_test_skcipher,
2223 .suite = {
2224 .cipher = {
2225 .enc = {
2226 .vecs = camellia_cbc_enc_tv_template,
2227 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2229 .dec = {
2230 .vecs = camellia_cbc_dec_tv_template,
2231 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2235 }, {
2236 .alg = "cbc(cast5)",
2237 .test = alg_test_skcipher,
2238 .suite = {
2239 .cipher = {
2240 .enc = {
2241 .vecs = cast5_cbc_enc_tv_template,
2242 .count = CAST5_CBC_ENC_TEST_VECTORS
2244 .dec = {
2245 .vecs = cast5_cbc_dec_tv_template,
2246 .count = CAST5_CBC_DEC_TEST_VECTORS
2250 }, {
2251 .alg = "cbc(cast6)",
2252 .test = alg_test_skcipher,
2253 .suite = {
2254 .cipher = {
2255 .enc = {
2256 .vecs = cast6_cbc_enc_tv_template,
2257 .count = CAST6_CBC_ENC_TEST_VECTORS
2259 .dec = {
2260 .vecs = cast6_cbc_dec_tv_template,
2261 .count = CAST6_CBC_DEC_TEST_VECTORS
2265 }, {
2266 .alg = "cbc(des)",
2267 .test = alg_test_skcipher,
2268 .suite = {
2269 .cipher = {
2270 .enc = {
2271 .vecs = des_cbc_enc_tv_template,
2272 .count = DES_CBC_ENC_TEST_VECTORS
2274 .dec = {
2275 .vecs = des_cbc_dec_tv_template,
2276 .count = DES_CBC_DEC_TEST_VECTORS
2280 }, {
2281 .alg = "cbc(des3_ede)",
2282 .test = alg_test_skcipher,
2283 .fips_allowed = 1,
2284 .suite = {
2285 .cipher = {
2286 .enc = {
2287 .vecs = des3_ede_cbc_enc_tv_template,
2288 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2290 .dec = {
2291 .vecs = des3_ede_cbc_dec_tv_template,
2292 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2296 }, {
2297 .alg = "cbc(serpent)",
2298 .test = alg_test_skcipher,
2299 .suite = {
2300 .cipher = {
2301 .enc = {
2302 .vecs = serpent_cbc_enc_tv_template,
2303 .count = SERPENT_CBC_ENC_TEST_VECTORS
2305 .dec = {
2306 .vecs = serpent_cbc_dec_tv_template,
2307 .count = SERPENT_CBC_DEC_TEST_VECTORS
2311 }, {
2312 .alg = "cbc(twofish)",
2313 .test = alg_test_skcipher,
2314 .suite = {
2315 .cipher = {
2316 .enc = {
2317 .vecs = tf_cbc_enc_tv_template,
2318 .count = TF_CBC_ENC_TEST_VECTORS
2320 .dec = {
2321 .vecs = tf_cbc_dec_tv_template,
2322 .count = TF_CBC_DEC_TEST_VECTORS
2326 }, {
2327 .alg = "ccm(aes)",
2328 .test = alg_test_aead,
2329 .fips_allowed = 1,
2330 .suite = {
2331 .aead = {
2332 .enc = {
2333 .vecs = aes_ccm_enc_tv_template,
2334 .count = AES_CCM_ENC_TEST_VECTORS
2336 .dec = {
2337 .vecs = aes_ccm_dec_tv_template,
2338 .count = AES_CCM_DEC_TEST_VECTORS
2342 }, {
2343 .alg = "cmac(aes)",
2344 .test = alg_test_hash,
2345 .suite = {
2346 .hash = {
2347 .vecs = aes_cmac128_tv_template,
2348 .count = CMAC_AES_TEST_VECTORS
2351 }, {
2352 .alg = "cmac(des3_ede)",
2353 .test = alg_test_hash,
2354 .suite = {
2355 .hash = {
2356 .vecs = des3_ede_cmac64_tv_template,
2357 .count = CMAC_DES3_EDE_TEST_VECTORS
2360 }, {
2361 .alg = "compress_null",
2362 .test = alg_test_null,
2363 }, {
2364 .alg = "crc32c",
2365 .test = alg_test_crc32c,
2366 .fips_allowed = 1,
2367 .suite = {
2368 .hash = {
2369 .vecs = crc32c_tv_template,
2370 .count = CRC32C_TEST_VECTORS
2373 }, {
2374 .alg = "crct10dif",
2375 .test = alg_test_hash,
2376 .fips_allowed = 1,
2377 .suite = {
2378 .hash = {
2379 .vecs = crct10dif_tv_template,
2380 .count = CRCT10DIF_TEST_VECTORS
2383 }, {
2384 .alg = "cryptd(__driver-cbc-aes-aesni)",
2385 .test = alg_test_null,
2386 .fips_allowed = 1,
2387 }, {
2388 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2389 .test = alg_test_null,
2390 }, {
2391 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2392 .test = alg_test_null,
2393 }, {
2394 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2395 .test = alg_test_null,
2396 }, {
2397 .alg = "cryptd(__driver-ecb-aes-aesni)",
2398 .test = alg_test_null,
2399 .fips_allowed = 1,
2400 }, {
2401 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2402 .test = alg_test_null,
2403 }, {
2404 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2405 .test = alg_test_null,
2406 }, {
2407 .alg = "cryptd(__driver-ecb-cast5-avx)",
2408 .test = alg_test_null,
2409 }, {
2410 .alg = "cryptd(__driver-ecb-cast6-avx)",
2411 .test = alg_test_null,
2412 }, {
2413 .alg = "cryptd(__driver-ecb-serpent-avx)",
2414 .test = alg_test_null,
2415 }, {
2416 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2417 .test = alg_test_null,
2418 }, {
2419 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2420 .test = alg_test_null,
2421 }, {
2422 .alg = "cryptd(__driver-ecb-twofish-avx)",
2423 .test = alg_test_null,
2424 }, {
2425 .alg = "cryptd(__driver-gcm-aes-aesni)",
2426 .test = alg_test_null,
2427 .fips_allowed = 1,
2428 }, {
2429 .alg = "cryptd(__ghash-pclmulqdqni)",
2430 .test = alg_test_null,
2431 .fips_allowed = 1,
2432 }, {
2433 .alg = "ctr(aes)",
2434 .test = alg_test_skcipher,
2435 .fips_allowed = 1,
2436 .suite = {
2437 .cipher = {
2438 .enc = {
2439 .vecs = aes_ctr_enc_tv_template,
2440 .count = AES_CTR_ENC_TEST_VECTORS
2442 .dec = {
2443 .vecs = aes_ctr_dec_tv_template,
2444 .count = AES_CTR_DEC_TEST_VECTORS
2448 }, {
2449 .alg = "ctr(blowfish)",
2450 .test = alg_test_skcipher,
2451 .suite = {
2452 .cipher = {
2453 .enc = {
2454 .vecs = bf_ctr_enc_tv_template,
2455 .count = BF_CTR_ENC_TEST_VECTORS
2457 .dec = {
2458 .vecs = bf_ctr_dec_tv_template,
2459 .count = BF_CTR_DEC_TEST_VECTORS
2463 }, {
2464 .alg = "ctr(camellia)",
2465 .test = alg_test_skcipher,
2466 .suite = {
2467 .cipher = {
2468 .enc = {
2469 .vecs = camellia_ctr_enc_tv_template,
2470 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2472 .dec = {
2473 .vecs = camellia_ctr_dec_tv_template,
2474 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2478 }, {
2479 .alg = "ctr(cast5)",
2480 .test = alg_test_skcipher,
2481 .suite = {
2482 .cipher = {
2483 .enc = {
2484 .vecs = cast5_ctr_enc_tv_template,
2485 .count = CAST5_CTR_ENC_TEST_VECTORS
2487 .dec = {
2488 .vecs = cast5_ctr_dec_tv_template,
2489 .count = CAST5_CTR_DEC_TEST_VECTORS
2493 }, {
2494 .alg = "ctr(cast6)",
2495 .test = alg_test_skcipher,
2496 .suite = {
2497 .cipher = {
2498 .enc = {
2499 .vecs = cast6_ctr_enc_tv_template,
2500 .count = CAST6_CTR_ENC_TEST_VECTORS
2502 .dec = {
2503 .vecs = cast6_ctr_dec_tv_template,
2504 .count = CAST6_CTR_DEC_TEST_VECTORS
2508 }, {
2509 .alg = "ctr(des)",
2510 .test = alg_test_skcipher,
2511 .suite = {
2512 .cipher = {
2513 .enc = {
2514 .vecs = des_ctr_enc_tv_template,
2515 .count = DES_CTR_ENC_TEST_VECTORS
2517 .dec = {
2518 .vecs = des_ctr_dec_tv_template,
2519 .count = DES_CTR_DEC_TEST_VECTORS
2523 }, {
2524 .alg = "ctr(des3_ede)",
2525 .test = alg_test_skcipher,
2526 .suite = {
2527 .cipher = {
2528 .enc = {
2529 .vecs = des3_ede_ctr_enc_tv_template,
2530 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2532 .dec = {
2533 .vecs = des3_ede_ctr_dec_tv_template,
2534 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2538 }, {
2539 .alg = "ctr(serpent)",
2540 .test = alg_test_skcipher,
2541 .suite = {
2542 .cipher = {
2543 .enc = {
2544 .vecs = serpent_ctr_enc_tv_template,
2545 .count = SERPENT_CTR_ENC_TEST_VECTORS
2547 .dec = {
2548 .vecs = serpent_ctr_dec_tv_template,
2549 .count = SERPENT_CTR_DEC_TEST_VECTORS
2553 }, {
2554 .alg = "ctr(twofish)",
2555 .test = alg_test_skcipher,
2556 .suite = {
2557 .cipher = {
2558 .enc = {
2559 .vecs = tf_ctr_enc_tv_template,
2560 .count = TF_CTR_ENC_TEST_VECTORS
2562 .dec = {
2563 .vecs = tf_ctr_dec_tv_template,
2564 .count = TF_CTR_DEC_TEST_VECTORS
2568 }, {
2569 .alg = "cts(cbc(aes))",
2570 .test = alg_test_skcipher,
2571 .suite = {
2572 .cipher = {
2573 .enc = {
2574 .vecs = cts_mode_enc_tv_template,
2575 .count = CTS_MODE_ENC_TEST_VECTORS
2577 .dec = {
2578 .vecs = cts_mode_dec_tv_template,
2579 .count = CTS_MODE_DEC_TEST_VECTORS
2583 }, {
2584 .alg = "deflate",
2585 .test = alg_test_comp,
2586 .fips_allowed = 1,
2587 .suite = {
2588 .comp = {
2589 .comp = {
2590 .vecs = deflate_comp_tv_template,
2591 .count = DEFLATE_COMP_TEST_VECTORS
2593 .decomp = {
2594 .vecs = deflate_decomp_tv_template,
2595 .count = DEFLATE_DECOMP_TEST_VECTORS
2599 }, {
2600 .alg = "digest_null",
2601 .test = alg_test_null,
2602 }, {
2603 .alg = "drbg_nopr_ctr_aes128",
2604 .test = alg_test_drbg,
2605 .fips_allowed = 1,
2606 .suite = {
2607 .drbg = {
2608 .vecs = drbg_nopr_ctr_aes128_tv_template,
2609 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2612 }, {
2613 .alg = "drbg_nopr_ctr_aes192",
2614 .test = alg_test_drbg,
2615 .fips_allowed = 1,
2616 .suite = {
2617 .drbg = {
2618 .vecs = drbg_nopr_ctr_aes192_tv_template,
2619 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2622 }, {
2623 .alg = "drbg_nopr_ctr_aes256",
2624 .test = alg_test_drbg,
2625 .fips_allowed = 1,
2626 .suite = {
2627 .drbg = {
2628 .vecs = drbg_nopr_ctr_aes256_tv_template,
2629 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2632 }, {
2634 * There is no need to specifically test the DRBG with every
2635 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2637 .alg = "drbg_nopr_hmac_sha1",
2638 .fips_allowed = 1,
2639 .test = alg_test_null,
2640 }, {
2641 .alg = "drbg_nopr_hmac_sha256",
2642 .test = alg_test_drbg,
2643 .fips_allowed = 1,
2644 .suite = {
2645 .drbg = {
2646 .vecs = drbg_nopr_hmac_sha256_tv_template,
2647 .count =
2648 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2651 }, {
2652 /* covered by drbg_nopr_hmac_sha256 test */
2653 .alg = "drbg_nopr_hmac_sha384",
2654 .fips_allowed = 1,
2655 .test = alg_test_null,
2656 }, {
2657 .alg = "drbg_nopr_hmac_sha512",
2658 .test = alg_test_null,
2659 .fips_allowed = 1,
2660 }, {
2661 .alg = "drbg_nopr_sha1",
2662 .fips_allowed = 1,
2663 .test = alg_test_null,
2664 }, {
2665 .alg = "drbg_nopr_sha256",
2666 .test = alg_test_drbg,
2667 .fips_allowed = 1,
2668 .suite = {
2669 .drbg = {
2670 .vecs = drbg_nopr_sha256_tv_template,
2671 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2674 }, {
2675 /* covered by drbg_nopr_sha256 test */
2676 .alg = "drbg_nopr_sha384",
2677 .fips_allowed = 1,
2678 .test = alg_test_null,
2679 }, {
2680 .alg = "drbg_nopr_sha512",
2681 .fips_allowed = 1,
2682 .test = alg_test_null,
2683 }, {
2684 .alg = "drbg_pr_ctr_aes128",
2685 .test = alg_test_drbg,
2686 .fips_allowed = 1,
2687 .suite = {
2688 .drbg = {
2689 .vecs = drbg_pr_ctr_aes128_tv_template,
2690 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2693 }, {
2694 /* covered by drbg_pr_ctr_aes128 test */
2695 .alg = "drbg_pr_ctr_aes192",
2696 .fips_allowed = 1,
2697 .test = alg_test_null,
2698 }, {
2699 .alg = "drbg_pr_ctr_aes256",
2700 .fips_allowed = 1,
2701 .test = alg_test_null,
2702 }, {
2703 .alg = "drbg_pr_hmac_sha1",
2704 .fips_allowed = 1,
2705 .test = alg_test_null,
2706 }, {
2707 .alg = "drbg_pr_hmac_sha256",
2708 .test = alg_test_drbg,
2709 .fips_allowed = 1,
2710 .suite = {
2711 .drbg = {
2712 .vecs = drbg_pr_hmac_sha256_tv_template,
2713 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2716 }, {
2717 /* covered by drbg_pr_hmac_sha256 test */
2718 .alg = "drbg_pr_hmac_sha384",
2719 .fips_allowed = 1,
2720 .test = alg_test_null,
2721 }, {
2722 .alg = "drbg_pr_hmac_sha512",
2723 .test = alg_test_null,
2724 .fips_allowed = 1,
2725 }, {
2726 .alg = "drbg_pr_sha1",
2727 .fips_allowed = 1,
2728 .test = alg_test_null,
2729 }, {
2730 .alg = "drbg_pr_sha256",
2731 .test = alg_test_drbg,
2732 .fips_allowed = 1,
2733 .suite = {
2734 .drbg = {
2735 .vecs = drbg_pr_sha256_tv_template,
2736 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2739 }, {
2740 /* covered by drbg_pr_sha256 test */
2741 .alg = "drbg_pr_sha384",
2742 .fips_allowed = 1,
2743 .test = alg_test_null,
2744 }, {
2745 .alg = "drbg_pr_sha512",
2746 .fips_allowed = 1,
2747 .test = alg_test_null,
2748 }, {
2749 .alg = "ecb(__aes-aesni)",
2750 .test = alg_test_null,
2751 .fips_allowed = 1,
2752 }, {
2753 .alg = "ecb(aes)",
2754 .test = alg_test_skcipher,
2755 .fips_allowed = 1,
2756 .suite = {
2757 .cipher = {
2758 .enc = {
2759 .vecs = aes_enc_tv_template,
2760 .count = AES_ENC_TEST_VECTORS
2762 .dec = {
2763 .vecs = aes_dec_tv_template,
2764 .count = AES_DEC_TEST_VECTORS
2768 }, {
2769 .alg = "ecb(anubis)",
2770 .test = alg_test_skcipher,
2771 .suite = {
2772 .cipher = {
2773 .enc = {
2774 .vecs = anubis_enc_tv_template,
2775 .count = ANUBIS_ENC_TEST_VECTORS
2777 .dec = {
2778 .vecs = anubis_dec_tv_template,
2779 .count = ANUBIS_DEC_TEST_VECTORS
2783 }, {
2784 .alg = "ecb(arc4)",
2785 .test = alg_test_skcipher,
2786 .suite = {
2787 .cipher = {
2788 .enc = {
2789 .vecs = arc4_enc_tv_template,
2790 .count = ARC4_ENC_TEST_VECTORS
2792 .dec = {
2793 .vecs = arc4_dec_tv_template,
2794 .count = ARC4_DEC_TEST_VECTORS
2798 }, {
2799 .alg = "ecb(blowfish)",
2800 .test = alg_test_skcipher,
2801 .suite = {
2802 .cipher = {
2803 .enc = {
2804 .vecs = bf_enc_tv_template,
2805 .count = BF_ENC_TEST_VECTORS
2807 .dec = {
2808 .vecs = bf_dec_tv_template,
2809 .count = BF_DEC_TEST_VECTORS
2813 }, {
2814 .alg = "ecb(camellia)",
2815 .test = alg_test_skcipher,
2816 .suite = {
2817 .cipher = {
2818 .enc = {
2819 .vecs = camellia_enc_tv_template,
2820 .count = CAMELLIA_ENC_TEST_VECTORS
2822 .dec = {
2823 .vecs = camellia_dec_tv_template,
2824 .count = CAMELLIA_DEC_TEST_VECTORS
2828 }, {
2829 .alg = "ecb(cast5)",
2830 .test = alg_test_skcipher,
2831 .suite = {
2832 .cipher = {
2833 .enc = {
2834 .vecs = cast5_enc_tv_template,
2835 .count = CAST5_ENC_TEST_VECTORS
2837 .dec = {
2838 .vecs = cast5_dec_tv_template,
2839 .count = CAST5_DEC_TEST_VECTORS
2843 }, {
2844 .alg = "ecb(cast6)",
2845 .test = alg_test_skcipher,
2846 .suite = {
2847 .cipher = {
2848 .enc = {
2849 .vecs = cast6_enc_tv_template,
2850 .count = CAST6_ENC_TEST_VECTORS
2852 .dec = {
2853 .vecs = cast6_dec_tv_template,
2854 .count = CAST6_DEC_TEST_VECTORS
2858 }, {
2859 .alg = "ecb(cipher_null)",
2860 .test = alg_test_null,
2861 }, {
2862 .alg = "ecb(des)",
2863 .test = alg_test_skcipher,
2864 .fips_allowed = 1,
2865 .suite = {
2866 .cipher = {
2867 .enc = {
2868 .vecs = des_enc_tv_template,
2869 .count = DES_ENC_TEST_VECTORS
2871 .dec = {
2872 .vecs = des_dec_tv_template,
2873 .count = DES_DEC_TEST_VECTORS
2877 }, {
2878 .alg = "ecb(des3_ede)",
2879 .test = alg_test_skcipher,
2880 .fips_allowed = 1,
2881 .suite = {
2882 .cipher = {
2883 .enc = {
2884 .vecs = des3_ede_enc_tv_template,
2885 .count = DES3_EDE_ENC_TEST_VECTORS
2887 .dec = {
2888 .vecs = des3_ede_dec_tv_template,
2889 .count = DES3_EDE_DEC_TEST_VECTORS
2893 }, {
2894 .alg = "ecb(fcrypt)",
2895 .test = alg_test_skcipher,
2896 .suite = {
2897 .cipher = {
2898 .enc = {
2899 .vecs = fcrypt_pcbc_enc_tv_template,
2900 .count = 1
2902 .dec = {
2903 .vecs = fcrypt_pcbc_dec_tv_template,
2904 .count = 1
2908 }, {
2909 .alg = "ecb(khazad)",
2910 .test = alg_test_skcipher,
2911 .suite = {
2912 .cipher = {
2913 .enc = {
2914 .vecs = khazad_enc_tv_template,
2915 .count = KHAZAD_ENC_TEST_VECTORS
2917 .dec = {
2918 .vecs = khazad_dec_tv_template,
2919 .count = KHAZAD_DEC_TEST_VECTORS
2923 }, {
2924 .alg = "ecb(seed)",
2925 .test = alg_test_skcipher,
2926 .suite = {
2927 .cipher = {
2928 .enc = {
2929 .vecs = seed_enc_tv_template,
2930 .count = SEED_ENC_TEST_VECTORS
2932 .dec = {
2933 .vecs = seed_dec_tv_template,
2934 .count = SEED_DEC_TEST_VECTORS
2938 }, {
2939 .alg = "ecb(serpent)",
2940 .test = alg_test_skcipher,
2941 .suite = {
2942 .cipher = {
2943 .enc = {
2944 .vecs = serpent_enc_tv_template,
2945 .count = SERPENT_ENC_TEST_VECTORS
2947 .dec = {
2948 .vecs = serpent_dec_tv_template,
2949 .count = SERPENT_DEC_TEST_VECTORS
2953 }, {
2954 .alg = "ecb(tea)",
2955 .test = alg_test_skcipher,
2956 .suite = {
2957 .cipher = {
2958 .enc = {
2959 .vecs = tea_enc_tv_template,
2960 .count = TEA_ENC_TEST_VECTORS
2962 .dec = {
2963 .vecs = tea_dec_tv_template,
2964 .count = TEA_DEC_TEST_VECTORS
2968 }, {
2969 .alg = "ecb(tnepres)",
2970 .test = alg_test_skcipher,
2971 .suite = {
2972 .cipher = {
2973 .enc = {
2974 .vecs = tnepres_enc_tv_template,
2975 .count = TNEPRES_ENC_TEST_VECTORS
2977 .dec = {
2978 .vecs = tnepres_dec_tv_template,
2979 .count = TNEPRES_DEC_TEST_VECTORS
2983 }, {
2984 .alg = "ecb(twofish)",
2985 .test = alg_test_skcipher,
2986 .suite = {
2987 .cipher = {
2988 .enc = {
2989 .vecs = tf_enc_tv_template,
2990 .count = TF_ENC_TEST_VECTORS
2992 .dec = {
2993 .vecs = tf_dec_tv_template,
2994 .count = TF_DEC_TEST_VECTORS
2998 }, {
2999 .alg = "ecb(xeta)",
3000 .test = alg_test_skcipher,
3001 .suite = {
3002 .cipher = {
3003 .enc = {
3004 .vecs = xeta_enc_tv_template,
3005 .count = XETA_ENC_TEST_VECTORS
3007 .dec = {
3008 .vecs = xeta_dec_tv_template,
3009 .count = XETA_DEC_TEST_VECTORS
3013 }, {
3014 .alg = "ecb(xtea)",
3015 .test = alg_test_skcipher,
3016 .suite = {
3017 .cipher = {
3018 .enc = {
3019 .vecs = xtea_enc_tv_template,
3020 .count = XTEA_ENC_TEST_VECTORS
3022 .dec = {
3023 .vecs = xtea_dec_tv_template,
3024 .count = XTEA_DEC_TEST_VECTORS
3028 }, {
3029 .alg = "gcm(aes)",
3030 .test = alg_test_aead,
3031 .fips_allowed = 1,
3032 .suite = {
3033 .aead = {
3034 .enc = {
3035 .vecs = aes_gcm_enc_tv_template,
3036 .count = AES_GCM_ENC_TEST_VECTORS
3038 .dec = {
3039 .vecs = aes_gcm_dec_tv_template,
3040 .count = AES_GCM_DEC_TEST_VECTORS
3044 }, {
3045 .alg = "ghash",
3046 .test = alg_test_hash,
3047 .fips_allowed = 1,
3048 .suite = {
3049 .hash = {
3050 .vecs = ghash_tv_template,
3051 .count = GHASH_TEST_VECTORS
3054 }, {
3055 .alg = "hmac(crc32)",
3056 .test = alg_test_hash,
3057 .suite = {
3058 .hash = {
3059 .vecs = bfin_crc_tv_template,
3060 .count = BFIN_CRC_TEST_VECTORS
3063 }, {
3064 .alg = "hmac(md5)",
3065 .test = alg_test_hash,
3066 .suite = {
3067 .hash = {
3068 .vecs = hmac_md5_tv_template,
3069 .count = HMAC_MD5_TEST_VECTORS
3072 }, {
3073 .alg = "hmac(rmd128)",
3074 .test = alg_test_hash,
3075 .suite = {
3076 .hash = {
3077 .vecs = hmac_rmd128_tv_template,
3078 .count = HMAC_RMD128_TEST_VECTORS
3081 }, {
3082 .alg = "hmac(rmd160)",
3083 .test = alg_test_hash,
3084 .suite = {
3085 .hash = {
3086 .vecs = hmac_rmd160_tv_template,
3087 .count = HMAC_RMD160_TEST_VECTORS
3090 }, {
3091 .alg = "hmac(sha1)",
3092 .test = alg_test_hash,
3093 .fips_allowed = 1,
3094 .suite = {
3095 .hash = {
3096 .vecs = hmac_sha1_tv_template,
3097 .count = HMAC_SHA1_TEST_VECTORS
3100 }, {
3101 .alg = "hmac(sha224)",
3102 .test = alg_test_hash,
3103 .fips_allowed = 1,
3104 .suite = {
3105 .hash = {
3106 .vecs = hmac_sha224_tv_template,
3107 .count = HMAC_SHA224_TEST_VECTORS
3110 }, {
3111 .alg = "hmac(sha256)",
3112 .test = alg_test_hash,
3113 .fips_allowed = 1,
3114 .suite = {
3115 .hash = {
3116 .vecs = hmac_sha256_tv_template,
3117 .count = HMAC_SHA256_TEST_VECTORS
3120 }, {
3121 .alg = "hmac(sha384)",
3122 .test = alg_test_hash,
3123 .fips_allowed = 1,
3124 .suite = {
3125 .hash = {
3126 .vecs = hmac_sha384_tv_template,
3127 .count = HMAC_SHA384_TEST_VECTORS
3130 }, {
3131 .alg = "hmac(sha512)",
3132 .test = alg_test_hash,
3133 .fips_allowed = 1,
3134 .suite = {
3135 .hash = {
3136 .vecs = hmac_sha512_tv_template,
3137 .count = HMAC_SHA512_TEST_VECTORS
3140 }, {
3141 .alg = "lrw(aes)",
3142 .test = alg_test_skcipher,
3143 .suite = {
3144 .cipher = {
3145 .enc = {
3146 .vecs = aes_lrw_enc_tv_template,
3147 .count = AES_LRW_ENC_TEST_VECTORS
3149 .dec = {
3150 .vecs = aes_lrw_dec_tv_template,
3151 .count = AES_LRW_DEC_TEST_VECTORS
3155 }, {
3156 .alg = "lrw(camellia)",
3157 .test = alg_test_skcipher,
3158 .suite = {
3159 .cipher = {
3160 .enc = {
3161 .vecs = camellia_lrw_enc_tv_template,
3162 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3164 .dec = {
3165 .vecs = camellia_lrw_dec_tv_template,
3166 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3170 }, {
3171 .alg = "lrw(cast6)",
3172 .test = alg_test_skcipher,
3173 .suite = {
3174 .cipher = {
3175 .enc = {
3176 .vecs = cast6_lrw_enc_tv_template,
3177 .count = CAST6_LRW_ENC_TEST_VECTORS
3179 .dec = {
3180 .vecs = cast6_lrw_dec_tv_template,
3181 .count = CAST6_LRW_DEC_TEST_VECTORS
3185 }, {
3186 .alg = "lrw(serpent)",
3187 .test = alg_test_skcipher,
3188 .suite = {
3189 .cipher = {
3190 .enc = {
3191 .vecs = serpent_lrw_enc_tv_template,
3192 .count = SERPENT_LRW_ENC_TEST_VECTORS
3194 .dec = {
3195 .vecs = serpent_lrw_dec_tv_template,
3196 .count = SERPENT_LRW_DEC_TEST_VECTORS
3200 }, {
3201 .alg = "lrw(twofish)",
3202 .test = alg_test_skcipher,
3203 .suite = {
3204 .cipher = {
3205 .enc = {
3206 .vecs = tf_lrw_enc_tv_template,
3207 .count = TF_LRW_ENC_TEST_VECTORS
3209 .dec = {
3210 .vecs = tf_lrw_dec_tv_template,
3211 .count = TF_LRW_DEC_TEST_VECTORS
3215 }, {
3216 .alg = "lzo",
3217 .test = alg_test_comp,
3218 .fips_allowed = 1,
3219 .suite = {
3220 .comp = {
3221 .comp = {
3222 .vecs = lzo_comp_tv_template,
3223 .count = LZO_COMP_TEST_VECTORS
3225 .decomp = {
3226 .vecs = lzo_decomp_tv_template,
3227 .count = LZO_DECOMP_TEST_VECTORS
3231 }, {
3232 .alg = "md4",
3233 .test = alg_test_hash,
3234 .suite = {
3235 .hash = {
3236 .vecs = md4_tv_template,
3237 .count = MD4_TEST_VECTORS
3240 }, {
3241 .alg = "md5",
3242 .test = alg_test_hash,
3243 .suite = {
3244 .hash = {
3245 .vecs = md5_tv_template,
3246 .count = MD5_TEST_VECTORS
3249 }, {
3250 .alg = "michael_mic",
3251 .test = alg_test_hash,
3252 .suite = {
3253 .hash = {
3254 .vecs = michael_mic_tv_template,
3255 .count = MICHAEL_MIC_TEST_VECTORS
3258 }, {
3259 .alg = "ofb(aes)",
3260 .test = alg_test_skcipher,
3261 .fips_allowed = 1,
3262 .suite = {
3263 .cipher = {
3264 .enc = {
3265 .vecs = aes_ofb_enc_tv_template,
3266 .count = AES_OFB_ENC_TEST_VECTORS
3268 .dec = {
3269 .vecs = aes_ofb_dec_tv_template,
3270 .count = AES_OFB_DEC_TEST_VECTORS
3274 }, {
3275 .alg = "pcbc(fcrypt)",
3276 .test = alg_test_skcipher,
3277 .suite = {
3278 .cipher = {
3279 .enc = {
3280 .vecs = fcrypt_pcbc_enc_tv_template,
3281 .count = FCRYPT_ENC_TEST_VECTORS
3283 .dec = {
3284 .vecs = fcrypt_pcbc_dec_tv_template,
3285 .count = FCRYPT_DEC_TEST_VECTORS
3289 }, {
3290 .alg = "rfc3686(ctr(aes))",
3291 .test = alg_test_skcipher,
3292 .fips_allowed = 1,
3293 .suite = {
3294 .cipher = {
3295 .enc = {
3296 .vecs = aes_ctr_rfc3686_enc_tv_template,
3297 .count = AES_CTR_3686_ENC_TEST_VECTORS
3299 .dec = {
3300 .vecs = aes_ctr_rfc3686_dec_tv_template,
3301 .count = AES_CTR_3686_DEC_TEST_VECTORS
3305 }, {
3306 .alg = "rfc4106(gcm(aes))",
3307 .test = alg_test_aead,
3308 .suite = {
3309 .aead = {
3310 .enc = {
3311 .vecs = aes_gcm_rfc4106_enc_tv_template,
3312 .count = AES_GCM_4106_ENC_TEST_VECTORS
3314 .dec = {
3315 .vecs = aes_gcm_rfc4106_dec_tv_template,
3316 .count = AES_GCM_4106_DEC_TEST_VECTORS
3320 }, {
3321 .alg = "rfc4309(ccm(aes))",
3322 .test = alg_test_aead,
3323 .fips_allowed = 1,
3324 .suite = {
3325 .aead = {
3326 .enc = {
3327 .vecs = aes_ccm_rfc4309_enc_tv_template,
3328 .count = AES_CCM_4309_ENC_TEST_VECTORS
3330 .dec = {
3331 .vecs = aes_ccm_rfc4309_dec_tv_template,
3332 .count = AES_CCM_4309_DEC_TEST_VECTORS
3336 }, {
3337 .alg = "rfc4543(gcm(aes))",
3338 .test = alg_test_aead,
3339 .suite = {
3340 .aead = {
3341 .enc = {
3342 .vecs = aes_gcm_rfc4543_enc_tv_template,
3343 .count = AES_GCM_4543_ENC_TEST_VECTORS
3345 .dec = {
3346 .vecs = aes_gcm_rfc4543_dec_tv_template,
3347 .count = AES_GCM_4543_DEC_TEST_VECTORS
3351 }, {
3352 .alg = "rmd128",
3353 .test = alg_test_hash,
3354 .suite = {
3355 .hash = {
3356 .vecs = rmd128_tv_template,
3357 .count = RMD128_TEST_VECTORS
3360 }, {
3361 .alg = "rmd160",
3362 .test = alg_test_hash,
3363 .suite = {
3364 .hash = {
3365 .vecs = rmd160_tv_template,
3366 .count = RMD160_TEST_VECTORS
3369 }, {
3370 .alg = "rmd256",
3371 .test = alg_test_hash,
3372 .suite = {
3373 .hash = {
3374 .vecs = rmd256_tv_template,
3375 .count = RMD256_TEST_VECTORS
3378 }, {
3379 .alg = "rmd320",
3380 .test = alg_test_hash,
3381 .suite = {
3382 .hash = {
3383 .vecs = rmd320_tv_template,
3384 .count = RMD320_TEST_VECTORS
3387 }, {
3388 .alg = "salsa20",
3389 .test = alg_test_skcipher,
3390 .suite = {
3391 .cipher = {
3392 .enc = {
3393 .vecs = salsa20_stream_enc_tv_template,
3394 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3398 }, {
3399 .alg = "sha1",
3400 .test = alg_test_hash,
3401 .fips_allowed = 1,
3402 .suite = {
3403 .hash = {
3404 .vecs = sha1_tv_template,
3405 .count = SHA1_TEST_VECTORS
3408 }, {
3409 .alg = "sha224",
3410 .test = alg_test_hash,
3411 .fips_allowed = 1,
3412 .suite = {
3413 .hash = {
3414 .vecs = sha224_tv_template,
3415 .count = SHA224_TEST_VECTORS
3418 }, {
3419 .alg = "sha256",
3420 .test = alg_test_hash,
3421 .fips_allowed = 1,
3422 .suite = {
3423 .hash = {
3424 .vecs = sha256_tv_template,
3425 .count = SHA256_TEST_VECTORS
3428 }, {
3429 .alg = "sha384",
3430 .test = alg_test_hash,
3431 .fips_allowed = 1,
3432 .suite = {
3433 .hash = {
3434 .vecs = sha384_tv_template,
3435 .count = SHA384_TEST_VECTORS
3438 }, {
3439 .alg = "sha512",
3440 .test = alg_test_hash,
3441 .fips_allowed = 1,
3442 .suite = {
3443 .hash = {
3444 .vecs = sha512_tv_template,
3445 .count = SHA512_TEST_VECTORS
3448 }, {
3449 .alg = "tgr128",
3450 .test = alg_test_hash,
3451 .suite = {
3452 .hash = {
3453 .vecs = tgr128_tv_template,
3454 .count = TGR128_TEST_VECTORS
3457 }, {
3458 .alg = "tgr160",
3459 .test = alg_test_hash,
3460 .suite = {
3461 .hash = {
3462 .vecs = tgr160_tv_template,
3463 .count = TGR160_TEST_VECTORS
3466 }, {
3467 .alg = "tgr192",
3468 .test = alg_test_hash,
3469 .suite = {
3470 .hash = {
3471 .vecs = tgr192_tv_template,
3472 .count = TGR192_TEST_VECTORS
3475 }, {
3476 .alg = "vmac(aes)",
3477 .test = alg_test_hash,
3478 .suite = {
3479 .hash = {
3480 .vecs = aes_vmac128_tv_template,
3481 .count = VMAC_AES_TEST_VECTORS
3484 }, {
3485 .alg = "wp256",
3486 .test = alg_test_hash,
3487 .suite = {
3488 .hash = {
3489 .vecs = wp256_tv_template,
3490 .count = WP256_TEST_VECTORS
3493 }, {
3494 .alg = "wp384",
3495 .test = alg_test_hash,
3496 .suite = {
3497 .hash = {
3498 .vecs = wp384_tv_template,
3499 .count = WP384_TEST_VECTORS
3502 }, {
3503 .alg = "wp512",
3504 .test = alg_test_hash,
3505 .suite = {
3506 .hash = {
3507 .vecs = wp512_tv_template,
3508 .count = WP512_TEST_VECTORS
3511 }, {
3512 .alg = "xcbc(aes)",
3513 .test = alg_test_hash,
3514 .suite = {
3515 .hash = {
3516 .vecs = aes_xcbc128_tv_template,
3517 .count = XCBC_AES_TEST_VECTORS
3520 }, {
3521 .alg = "xts(aes)",
3522 .test = alg_test_skcipher,
3523 .fips_allowed = 1,
3524 .suite = {
3525 .cipher = {
3526 .enc = {
3527 .vecs = aes_xts_enc_tv_template,
3528 .count = AES_XTS_ENC_TEST_VECTORS
3530 .dec = {
3531 .vecs = aes_xts_dec_tv_template,
3532 .count = AES_XTS_DEC_TEST_VECTORS
3536 }, {
3537 .alg = "xts(camellia)",
3538 .test = alg_test_skcipher,
3539 .suite = {
3540 .cipher = {
3541 .enc = {
3542 .vecs = camellia_xts_enc_tv_template,
3543 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3545 .dec = {
3546 .vecs = camellia_xts_dec_tv_template,
3547 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3551 }, {
3552 .alg = "xts(cast6)",
3553 .test = alg_test_skcipher,
3554 .suite = {
3555 .cipher = {
3556 .enc = {
3557 .vecs = cast6_xts_enc_tv_template,
3558 .count = CAST6_XTS_ENC_TEST_VECTORS
3560 .dec = {
3561 .vecs = cast6_xts_dec_tv_template,
3562 .count = CAST6_XTS_DEC_TEST_VECTORS
3566 }, {
3567 .alg = "xts(serpent)",
3568 .test = alg_test_skcipher,
3569 .suite = {
3570 .cipher = {
3571 .enc = {
3572 .vecs = serpent_xts_enc_tv_template,
3573 .count = SERPENT_XTS_ENC_TEST_VECTORS
3575 .dec = {
3576 .vecs = serpent_xts_dec_tv_template,
3577 .count = SERPENT_XTS_DEC_TEST_VECTORS
3581 }, {
3582 .alg = "xts(twofish)",
3583 .test = alg_test_skcipher,
3584 .suite = {
3585 .cipher = {
3586 .enc = {
3587 .vecs = tf_xts_enc_tv_template,
3588 .count = TF_XTS_ENC_TEST_VECTORS
3590 .dec = {
3591 .vecs = tf_xts_dec_tv_template,
3592 .count = TF_XTS_DEC_TEST_VECTORS
3596 }, {
3597 .alg = "zlib",
3598 .test = alg_test_pcomp,
3599 .fips_allowed = 1,
3600 .suite = {
3601 .pcomp = {
3602 .comp = {
3603 .vecs = zlib_comp_tv_template,
3604 .count = ZLIB_COMP_TEST_VECTORS
3606 .decomp = {
3607 .vecs = zlib_decomp_tv_template,
3608 .count = ZLIB_DECOMP_TEST_VECTORS
3615 static bool alg_test_descs_checked;
3617 static void alg_test_descs_check_order(void)
3619 int i;
3621 /* only check once */
3622 if (alg_test_descs_checked)
3623 return;
3625 alg_test_descs_checked = true;
3627 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3628 int diff = strcmp(alg_test_descs[i - 1].alg,
3629 alg_test_descs[i].alg);
3631 if (WARN_ON(diff > 0)) {
3632 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3633 alg_test_descs[i - 1].alg,
3634 alg_test_descs[i].alg);
3637 if (WARN_ON(diff == 0)) {
3638 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3639 alg_test_descs[i].alg);
3644 static int alg_find_test(const char *alg)
3646 int start = 0;
3647 int end = ARRAY_SIZE(alg_test_descs);
3649 while (start < end) {
3650 int i = (start + end) / 2;
3651 int diff = strcmp(alg_test_descs[i].alg, alg);
3653 if (diff > 0) {
3654 end = i;
3655 continue;
3658 if (diff < 0) {
3659 start = i + 1;
3660 continue;
3663 return i;
3666 return -1;
3669 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3671 int i;
3672 int j;
3673 int rc;
3675 alg_test_descs_check_order();
3677 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3678 char nalg[CRYPTO_MAX_ALG_NAME];
3680 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3681 sizeof(nalg))
3682 return -ENAMETOOLONG;
3684 i = alg_find_test(nalg);
3685 if (i < 0)
3686 goto notest;
3688 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3689 goto non_fips_alg;
3691 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3692 goto test_done;
3695 i = alg_find_test(alg);
3696 j = alg_find_test(driver);
3697 if (i < 0 && j < 0)
3698 goto notest;
3700 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3701 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3702 goto non_fips_alg;
3704 rc = 0;
3705 if (i >= 0)
3706 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3707 type, mask);
3708 if (j >= 0 && j != i)
3709 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3710 type, mask);
3712 test_done:
3713 if (fips_enabled && rc)
3714 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3716 if (fips_enabled && !rc)
3717 pr_info(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3718 driver, alg);
3720 return rc;
3722 notest:
3723 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3724 return 0;
3725 non_fips_alg:
3726 return -EINVAL;
3729 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3731 EXPORT_SYMBOL_GPL(alg_test);