2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
36 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
46 * Need slab memory for testing (size in number of pages).
51 * Indexes into the xbuf to simulate cross-page access.
63 * Used by test_cipher()
68 struct tcrypt_result
{
69 struct completion completion
;
73 struct aead_test_suite
{
75 struct aead_testvec
*vecs
;
80 struct cipher_test_suite
{
82 struct cipher_testvec
*vecs
;
87 struct comp_test_suite
{
89 struct comp_testvec
*vecs
;
94 struct pcomp_test_suite
{
96 struct pcomp_testvec
*vecs
;
101 struct hash_test_suite
{
102 struct hash_testvec
*vecs
;
106 struct cprng_test_suite
{
107 struct cprng_testvec
*vecs
;
111 struct alg_test_desc
{
113 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
115 int fips_allowed
; /* set if alg is allowed in fips mode */
118 struct aead_test_suite aead
;
119 struct cipher_test_suite cipher
;
120 struct comp_test_suite comp
;
121 struct pcomp_test_suite pcomp
;
122 struct hash_test_suite hash
;
123 struct cprng_test_suite cprng
;
127 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
129 static void hexdump(unsigned char *buf
, unsigned int len
)
131 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
136 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
138 struct tcrypt_result
*res
= req
->data
;
140 if (err
== -EINPROGRESS
)
144 complete(&res
->completion
);
147 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
151 for (i
= 0; i
< XBUFSIZE
; i
++) {
152 buf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
161 free_page((unsigned long)buf
[i
]);
166 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
170 for (i
= 0; i
< XBUFSIZE
; i
++)
171 free_page((unsigned long)buf
[i
]);
174 static int do_one_async_hash_op(struct ahash_request
*req
,
175 struct tcrypt_result
*tr
,
178 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
179 ret
= wait_for_completion_interruptible(&tr
->completion
);
182 INIT_COMPLETION(tr
->completion
);
187 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
188 unsigned int tcount
, bool use_digest
)
190 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
191 unsigned int i
, j
, k
, temp
;
192 struct scatterlist sg
[8];
194 struct ahash_request
*req
;
195 struct tcrypt_result tresult
;
197 char *xbuf
[XBUFSIZE
];
200 if (testmgr_alloc_buf(xbuf
))
203 init_completion(&tresult
.completion
);
205 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
207 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
211 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
212 tcrypt_complete
, &tresult
);
215 for (i
= 0; i
< tcount
; i
++) {
220 memset(result
, 0, 64);
224 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
225 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
227 if (template[i
].ksize
) {
228 crypto_ahash_clear_flags(tfm
, ~0);
229 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
232 printk(KERN_ERR
"alg: hash: setkey failed on "
233 "test %d for %s: ret=%d\n", j
, algo
,
239 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
241 ret
= do_one_async_hash_op(req
, &tresult
,
242 crypto_ahash_digest(req
));
244 pr_err("alg: hash: digest failed on test %d "
245 "for %s: ret=%d\n", j
, algo
, -ret
);
249 ret
= do_one_async_hash_op(req
, &tresult
,
250 crypto_ahash_init(req
));
252 pr_err("alt: hash: init failed on test %d "
253 "for %s: ret=%d\n", j
, algo
, -ret
);
256 ret
= do_one_async_hash_op(req
, &tresult
,
257 crypto_ahash_update(req
));
259 pr_err("alt: hash: update failed on test %d "
260 "for %s: ret=%d\n", j
, algo
, -ret
);
263 ret
= do_one_async_hash_op(req
, &tresult
,
264 crypto_ahash_final(req
));
266 pr_err("alt: hash: final failed on test %d "
267 "for %s: ret=%d\n", j
, algo
, -ret
);
272 if (memcmp(result
, template[i
].digest
,
273 crypto_ahash_digestsize(tfm
))) {
274 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
276 hexdump(result
, crypto_ahash_digestsize(tfm
));
283 for (i
= 0; i
< tcount
; i
++) {
284 if (template[i
].np
) {
286 memset(result
, 0, 64);
289 sg_init_table(sg
, template[i
].np
);
291 for (k
= 0; k
< template[i
].np
; k
++) {
292 if (WARN_ON(offset_in_page(IDX
[k
]) +
293 template[i
].tap
[k
] > PAGE_SIZE
))
296 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
297 offset_in_page(IDX
[k
]),
298 template[i
].plaintext
+ temp
,
301 temp
+= template[i
].tap
[k
];
304 if (template[i
].ksize
) {
305 crypto_ahash_clear_flags(tfm
, ~0);
306 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
310 printk(KERN_ERR
"alg: hash: setkey "
311 "failed on chunking test %d "
312 "for %s: ret=%d\n", j
, algo
,
318 ahash_request_set_crypt(req
, sg
, result
,
320 ret
= crypto_ahash_digest(req
);
326 ret
= wait_for_completion_interruptible(
327 &tresult
.completion
);
328 if (!ret
&& !(ret
= tresult
.err
)) {
329 INIT_COMPLETION(tresult
.completion
);
334 printk(KERN_ERR
"alg: hash: digest failed "
335 "on chunking test %d for %s: "
336 "ret=%d\n", j
, algo
, -ret
);
340 if (memcmp(result
, template[i
].digest
,
341 crypto_ahash_digestsize(tfm
))) {
342 printk(KERN_ERR
"alg: hash: Chunking test %d "
343 "failed for %s\n", j
, algo
);
344 hexdump(result
, crypto_ahash_digestsize(tfm
));
354 ahash_request_free(req
);
356 testmgr_free_buf(xbuf
);
361 static int __test_aead(struct crypto_aead
*tfm
, int enc
,
362 struct aead_testvec
*template, unsigned int tcount
,
365 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
366 unsigned int i
, j
, k
, n
, temp
;
370 struct aead_request
*req
;
371 struct scatterlist
*sg
;
372 struct scatterlist
*asg
;
373 struct scatterlist
*sgout
;
375 struct tcrypt_result result
;
376 unsigned int authsize
;
381 char *xbuf
[XBUFSIZE
];
382 char *xoutbuf
[XBUFSIZE
];
383 char *axbuf
[XBUFSIZE
];
385 if (testmgr_alloc_buf(xbuf
))
387 if (testmgr_alloc_buf(axbuf
))
390 if (diff_dst
&& testmgr_alloc_buf(xoutbuf
))
393 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
394 sg
= kmalloc(sizeof(*sg
) * 8 * (diff_dst
? 3 : 2), GFP_KERNEL
);
410 init_completion(&result
.completion
);
412 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
414 pr_err("alg: aead%s: Failed to allocate request for %s\n",
419 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
420 tcrypt_complete
, &result
);
422 for (i
= 0, j
= 0; i
< tcount
; i
++) {
423 if (!template[i
].np
) {
426 /* some tepmplates have no input data but they will
433 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
||
434 template[i
].alen
> PAGE_SIZE
))
437 memcpy(input
, template[i
].input
, template[i
].ilen
);
438 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
440 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
442 memset(iv
, 0, MAX_IVLEN
);
444 crypto_aead_clear_flags(tfm
, ~0);
446 crypto_aead_set_flags(
447 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
449 key
= template[i
].key
;
451 ret
= crypto_aead_setkey(tfm
, key
,
453 if (!ret
== template[i
].fail
) {
454 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
455 d
, j
, algo
, crypto_aead_get_flags(tfm
));
460 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
461 ret
= crypto_aead_setauthsize(tfm
, authsize
);
463 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
464 d
, authsize
, j
, algo
);
468 sg_init_one(&sg
[0], input
,
469 template[i
].ilen
+ (enc
? authsize
: 0));
473 sg_init_one(&sgout
[0], output
,
475 (enc
? authsize
: 0));
480 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
482 aead_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
,
483 template[i
].ilen
, iv
);
485 aead_request_set_assoc(req
, asg
, template[i
].alen
);
488 crypto_aead_encrypt(req
) :
489 crypto_aead_decrypt(req
);
493 if (template[i
].novrfy
) {
494 /* verification was supposed to fail */
495 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
497 /* so really, we got a bad message */
504 ret
= wait_for_completion_interruptible(
506 if (!ret
&& !(ret
= result
.err
)) {
507 INIT_COMPLETION(result
.completion
);
511 if (template[i
].novrfy
)
512 /* verification failure was expected */
516 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
517 d
, e
, j
, algo
, -ret
);
522 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
523 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
525 hexdump(q
, template[i
].rlen
);
532 for (i
= 0, j
= 0; i
< tcount
; i
++) {
533 if (template[i
].np
) {
537 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
539 memset(iv
, 0, MAX_IVLEN
);
541 crypto_aead_clear_flags(tfm
, ~0);
543 crypto_aead_set_flags(
544 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
545 key
= template[i
].key
;
547 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
548 if (!ret
== template[i
].fail
) {
549 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
550 d
, j
, algo
, crypto_aead_get_flags(tfm
));
555 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
558 sg_init_table(sg
, template[i
].np
);
560 sg_init_table(sgout
, template[i
].np
);
561 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
562 if (WARN_ON(offset_in_page(IDX
[k
]) +
563 template[i
].tap
[k
] > PAGE_SIZE
))
566 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
567 offset_in_page(IDX
[k
]);
569 memcpy(q
, template[i
].input
+ temp
,
572 n
= template[i
].tap
[k
];
573 if (k
== template[i
].np
- 1 && enc
)
575 if (offset_in_page(q
) + n
< PAGE_SIZE
)
578 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
581 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
582 offset_in_page(IDX
[k
]);
584 memset(q
, 0, template[i
].tap
[k
]);
585 if (offset_in_page(q
) + n
< PAGE_SIZE
)
588 sg_set_buf(&sgout
[k
], q
,
592 temp
+= template[i
].tap
[k
];
595 ret
= crypto_aead_setauthsize(tfm
, authsize
);
597 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
598 d
, authsize
, j
, algo
);
603 if (WARN_ON(sg
[k
- 1].offset
+
604 sg
[k
- 1].length
+ authsize
>
610 sg
[k
- 1].length
+= authsize
;
613 sgout
[k
- 1].length
+= authsize
;
616 sg_init_table(asg
, template[i
].anp
);
618 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
619 if (WARN_ON(offset_in_page(IDX
[k
]) +
620 template[i
].atap
[k
] > PAGE_SIZE
))
623 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
624 offset_in_page(IDX
[k
]),
625 template[i
].assoc
+ temp
,
626 template[i
].atap
[k
]),
627 template[i
].atap
[k
]);
628 temp
+= template[i
].atap
[k
];
631 aead_request_set_crypt(req
, sg
, (diff_dst
) ? sgout
: sg
,
635 aead_request_set_assoc(req
, asg
, template[i
].alen
);
638 crypto_aead_encrypt(req
) :
639 crypto_aead_decrypt(req
);
643 if (template[i
].novrfy
) {
644 /* verification was supposed to fail */
645 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
647 /* so really, we got a bad message */
654 ret
= wait_for_completion_interruptible(
656 if (!ret
&& !(ret
= result
.err
)) {
657 INIT_COMPLETION(result
.completion
);
661 if (template[i
].novrfy
)
662 /* verification failure was expected */
666 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
667 d
, e
, j
, algo
, -ret
);
672 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
674 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
675 offset_in_page(IDX
[k
]);
677 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
678 offset_in_page(IDX
[k
]);
680 n
= template[i
].tap
[k
];
681 if (k
== template[i
].np
- 1)
682 n
+= enc
? authsize
: -authsize
;
684 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
685 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
692 if (k
== template[i
].np
- 1 && !enc
) {
694 memcmp(q
, template[i
].input
+
700 for (n
= 0; offset_in_page(q
+ n
) &&
705 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
706 d
, j
, e
, k
, algo
, n
);
711 temp
+= template[i
].tap
[k
];
719 aead_request_free(req
);
723 testmgr_free_buf(xoutbuf
);
725 testmgr_free_buf(axbuf
);
727 testmgr_free_buf(xbuf
);
732 static int test_aead(struct crypto_aead
*tfm
, int enc
,
733 struct aead_testvec
*template, unsigned int tcount
)
737 /* test 'dst == src' case */
738 ret
= __test_aead(tfm
, enc
, template, tcount
, false);
742 /* test 'dst != src' case */
743 return __test_aead(tfm
, enc
, template, tcount
, true);
746 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
747 struct cipher_testvec
*template, unsigned int tcount
)
749 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
750 unsigned int i
, j
, k
;
754 char *xbuf
[XBUFSIZE
];
757 if (testmgr_alloc_buf(xbuf
))
766 for (i
= 0; i
< tcount
; i
++) {
773 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
777 memcpy(data
, template[i
].input
, template[i
].ilen
);
779 crypto_cipher_clear_flags(tfm
, ~0);
781 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
783 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
785 if (!ret
== template[i
].fail
) {
786 printk(KERN_ERR
"alg: cipher: setkey failed "
787 "on test %d for %s: flags=%x\n", j
,
788 algo
, crypto_cipher_get_flags(tfm
));
793 for (k
= 0; k
< template[i
].ilen
;
794 k
+= crypto_cipher_blocksize(tfm
)) {
796 crypto_cipher_encrypt_one(tfm
, data
+ k
,
799 crypto_cipher_decrypt_one(tfm
, data
+ k
,
804 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
805 printk(KERN_ERR
"alg: cipher: Test %d failed "
806 "on %s for %s\n", j
, e
, algo
);
807 hexdump(q
, template[i
].rlen
);
816 testmgr_free_buf(xbuf
);
821 static int __test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
822 struct cipher_testvec
*template, unsigned int tcount
,
826 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
827 unsigned int i
, j
, k
, n
, temp
;
829 struct ablkcipher_request
*req
;
830 struct scatterlist sg
[8];
831 struct scatterlist sgout
[8];
833 struct tcrypt_result result
;
836 char *xbuf
[XBUFSIZE
];
837 char *xoutbuf
[XBUFSIZE
];
840 if (testmgr_alloc_buf(xbuf
))
843 if (diff_dst
&& testmgr_alloc_buf(xoutbuf
))
856 init_completion(&result
.completion
);
858 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
860 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
865 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
866 tcrypt_complete
, &result
);
869 for (i
= 0; i
< tcount
; i
++) {
871 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
873 memset(iv
, 0, MAX_IVLEN
);
875 if (!(template[i
].np
) || (template[i
].also_non_np
)) {
879 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
883 memcpy(data
, template[i
].input
, template[i
].ilen
);
885 crypto_ablkcipher_clear_flags(tfm
, ~0);
887 crypto_ablkcipher_set_flags(
888 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
890 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
892 if (!ret
== template[i
].fail
) {
893 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
895 crypto_ablkcipher_get_flags(tfm
));
900 sg_init_one(&sg
[0], data
, template[i
].ilen
);
903 sg_init_one(&sgout
[0], data
, template[i
].ilen
);
906 ablkcipher_request_set_crypt(req
, sg
,
907 (diff_dst
) ? sgout
: sg
,
908 template[i
].ilen
, iv
);
910 crypto_ablkcipher_encrypt(req
) :
911 crypto_ablkcipher_decrypt(req
);
918 ret
= wait_for_completion_interruptible(
920 if (!ret
&& !((ret
= result
.err
))) {
921 INIT_COMPLETION(result
.completion
);
926 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
927 d
, e
, j
, algo
, -ret
);
932 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
933 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
935 hexdump(q
, template[i
].rlen
);
943 for (i
= 0; i
< tcount
; i
++) {
946 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
948 memset(iv
, 0, MAX_IVLEN
);
950 if (template[i
].np
) {
953 crypto_ablkcipher_clear_flags(tfm
, ~0);
955 crypto_ablkcipher_set_flags(
956 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
958 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
960 if (!ret
== template[i
].fail
) {
961 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
963 crypto_ablkcipher_get_flags(tfm
));
970 sg_init_table(sg
, template[i
].np
);
972 sg_init_table(sgout
, template[i
].np
);
973 for (k
= 0; k
< template[i
].np
; k
++) {
974 if (WARN_ON(offset_in_page(IDX
[k
]) +
975 template[i
].tap
[k
] > PAGE_SIZE
))
978 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
979 offset_in_page(IDX
[k
]);
981 memcpy(q
, template[i
].input
+ temp
,
984 if (offset_in_page(q
) + template[i
].tap
[k
] <
986 q
[template[i
].tap
[k
]] = 0;
988 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
990 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
991 offset_in_page(IDX
[k
]);
993 sg_set_buf(&sgout
[k
], q
,
996 memset(q
, 0, template[i
].tap
[k
]);
997 if (offset_in_page(q
) +
998 template[i
].tap
[k
] < PAGE_SIZE
)
999 q
[template[i
].tap
[k
]] = 0;
1002 temp
+= template[i
].tap
[k
];
1005 ablkcipher_request_set_crypt(req
, sg
,
1006 (diff_dst
) ? sgout
: sg
,
1007 template[i
].ilen
, iv
);
1010 crypto_ablkcipher_encrypt(req
) :
1011 crypto_ablkcipher_decrypt(req
);
1018 ret
= wait_for_completion_interruptible(
1019 &result
.completion
);
1020 if (!ret
&& !((ret
= result
.err
))) {
1021 INIT_COMPLETION(result
.completion
);
1026 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1027 d
, e
, j
, algo
, -ret
);
1033 for (k
= 0; k
< template[i
].np
; k
++) {
1035 q
= xoutbuf
[IDX
[k
] >> PAGE_SHIFT
] +
1036 offset_in_page(IDX
[k
]);
1038 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
1039 offset_in_page(IDX
[k
]);
1041 if (memcmp(q
, template[i
].result
+ temp
,
1042 template[i
].tap
[k
])) {
1043 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1045 hexdump(q
, template[i
].tap
[k
]);
1049 q
+= template[i
].tap
[k
];
1050 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
1053 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1054 d
, j
, e
, k
, algo
, n
);
1058 temp
+= template[i
].tap
[k
];
1066 ablkcipher_request_free(req
);
1068 testmgr_free_buf(xoutbuf
);
1070 testmgr_free_buf(xbuf
);
1075 static int test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
1076 struct cipher_testvec
*template, unsigned int tcount
)
1080 /* test 'dst == src' case */
1081 ret
= __test_skcipher(tfm
, enc
, template, tcount
, false);
1085 /* test 'dst != src' case */
1086 return __test_skcipher(tfm
, enc
, template, tcount
, true);
1089 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
1090 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
1092 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
1094 char result
[COMP_BUF_SIZE
];
1097 for (i
= 0; i
< ctcount
; i
++) {
1099 unsigned int dlen
= COMP_BUF_SIZE
;
1101 memset(result
, 0, sizeof (result
));
1103 ilen
= ctemplate
[i
].inlen
;
1104 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
1105 ilen
, result
, &dlen
);
1107 printk(KERN_ERR
"alg: comp: compression failed "
1108 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1113 if (dlen
!= ctemplate
[i
].outlen
) {
1114 printk(KERN_ERR
"alg: comp: Compression test %d "
1115 "failed for %s: output len = %d\n", i
+ 1, algo
,
1121 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
1122 printk(KERN_ERR
"alg: comp: Compression test %d "
1123 "failed for %s\n", i
+ 1, algo
);
1124 hexdump(result
, dlen
);
1130 for (i
= 0; i
< dtcount
; i
++) {
1132 unsigned int dlen
= COMP_BUF_SIZE
;
1134 memset(result
, 0, sizeof (result
));
1136 ilen
= dtemplate
[i
].inlen
;
1137 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1138 ilen
, result
, &dlen
);
1140 printk(KERN_ERR
"alg: comp: decompression failed "
1141 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1146 if (dlen
!= dtemplate
[i
].outlen
) {
1147 printk(KERN_ERR
"alg: comp: Decompression test %d "
1148 "failed for %s: output len = %d\n", i
+ 1, algo
,
1154 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
1155 printk(KERN_ERR
"alg: comp: Decompression test %d "
1156 "failed for %s\n", i
+ 1, algo
);
1157 hexdump(result
, dlen
);
1169 static int test_pcomp(struct crypto_pcomp
*tfm
,
1170 struct pcomp_testvec
*ctemplate
,
1171 struct pcomp_testvec
*dtemplate
, int ctcount
,
1174 const char *algo
= crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm
));
1176 char result
[COMP_BUF_SIZE
];
1179 for (i
= 0; i
< ctcount
; i
++) {
1180 struct comp_request req
;
1181 unsigned int produced
= 0;
1183 res
= crypto_compress_setup(tfm
, ctemplate
[i
].params
,
1184 ctemplate
[i
].paramsize
);
1186 pr_err("alg: pcomp: compression setup failed on test "
1187 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1191 res
= crypto_compress_init(tfm
);
1193 pr_err("alg: pcomp: compression init failed on test "
1194 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1198 memset(result
, 0, sizeof(result
));
1200 req
.next_in
= ctemplate
[i
].input
;
1201 req
.avail_in
= ctemplate
[i
].inlen
/ 2;
1202 req
.next_out
= result
;
1203 req
.avail_out
= ctemplate
[i
].outlen
/ 2;
1205 res
= crypto_compress_update(tfm
, &req
);
1206 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1207 pr_err("alg: pcomp: compression update failed on test "
1208 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1214 /* Add remaining input data */
1215 req
.avail_in
+= (ctemplate
[i
].inlen
+ 1) / 2;
1217 res
= crypto_compress_update(tfm
, &req
);
1218 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1219 pr_err("alg: pcomp: compression update failed on test "
1220 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1226 /* Provide remaining output space */
1227 req
.avail_out
+= COMP_BUF_SIZE
- ctemplate
[i
].outlen
/ 2;
1229 res
= crypto_compress_final(tfm
, &req
);
1231 pr_err("alg: pcomp: compression final failed on test "
1232 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1237 if (COMP_BUF_SIZE
- req
.avail_out
!= ctemplate
[i
].outlen
) {
1238 pr_err("alg: comp: Compression test %d failed for %s: "
1239 "output len = %d (expected %d)\n", i
+ 1, algo
,
1240 COMP_BUF_SIZE
- req
.avail_out
,
1241 ctemplate
[i
].outlen
);
1245 if (produced
!= ctemplate
[i
].outlen
) {
1246 pr_err("alg: comp: Compression test %d failed for %s: "
1247 "returned len = %u (expected %d)\n", i
+ 1,
1248 algo
, produced
, ctemplate
[i
].outlen
);
1252 if (memcmp(result
, ctemplate
[i
].output
, ctemplate
[i
].outlen
)) {
1253 pr_err("alg: pcomp: Compression test %d failed for "
1254 "%s\n", i
+ 1, algo
);
1255 hexdump(result
, ctemplate
[i
].outlen
);
1260 for (i
= 0; i
< dtcount
; i
++) {
1261 struct comp_request req
;
1262 unsigned int produced
= 0;
1264 res
= crypto_decompress_setup(tfm
, dtemplate
[i
].params
,
1265 dtemplate
[i
].paramsize
);
1267 pr_err("alg: pcomp: decompression setup failed on "
1268 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1272 res
= crypto_decompress_init(tfm
);
1274 pr_err("alg: pcomp: decompression init failed on test "
1275 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1279 memset(result
, 0, sizeof(result
));
1281 req
.next_in
= dtemplate
[i
].input
;
1282 req
.avail_in
= dtemplate
[i
].inlen
/ 2;
1283 req
.next_out
= result
;
1284 req
.avail_out
= dtemplate
[i
].outlen
/ 2;
1286 res
= crypto_decompress_update(tfm
, &req
);
1287 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1288 pr_err("alg: pcomp: decompression update failed on "
1289 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1295 /* Add remaining input data */
1296 req
.avail_in
+= (dtemplate
[i
].inlen
+ 1) / 2;
1298 res
= crypto_decompress_update(tfm
, &req
);
1299 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1300 pr_err("alg: pcomp: decompression update failed on "
1301 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1307 /* Provide remaining output space */
1308 req
.avail_out
+= COMP_BUF_SIZE
- dtemplate
[i
].outlen
/ 2;
1310 res
= crypto_decompress_final(tfm
, &req
);
1311 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1312 pr_err("alg: pcomp: decompression final failed on "
1313 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1319 if (COMP_BUF_SIZE
- req
.avail_out
!= dtemplate
[i
].outlen
) {
1320 pr_err("alg: comp: Decompression test %d failed for "
1321 "%s: output len = %d (expected %d)\n", i
+ 1,
1322 algo
, COMP_BUF_SIZE
- req
.avail_out
,
1323 dtemplate
[i
].outlen
);
1327 if (produced
!= dtemplate
[i
].outlen
) {
1328 pr_err("alg: comp: Decompression test %d failed for "
1329 "%s: returned len = %u (expected %d)\n", i
+ 1,
1330 algo
, produced
, dtemplate
[i
].outlen
);
1334 if (memcmp(result
, dtemplate
[i
].output
, dtemplate
[i
].outlen
)) {
1335 pr_err("alg: pcomp: Decompression test %d failed for "
1336 "%s\n", i
+ 1, algo
);
1337 hexdump(result
, dtemplate
[i
].outlen
);
1346 static int test_cprng(struct crypto_rng
*tfm
, struct cprng_testvec
*template,
1347 unsigned int tcount
)
1349 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
1350 int err
= 0, i
, j
, seedsize
;
1354 seedsize
= crypto_rng_seedsize(tfm
);
1356 seed
= kmalloc(seedsize
, GFP_KERNEL
);
1358 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
1363 for (i
= 0; i
< tcount
; i
++) {
1364 memset(result
, 0, 32);
1366 memcpy(seed
, template[i
].v
, template[i
].vlen
);
1367 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
1369 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
1370 template[i
].dt
, template[i
].dtlen
);
1372 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
1374 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
1379 for (j
= 0; j
< template[i
].loops
; j
++) {
1380 err
= crypto_rng_get_bytes(tfm
, result
,
1382 if (err
!= template[i
].rlen
) {
1383 printk(KERN_ERR
"alg: cprng: Failed to obtain "
1384 "the correct amount of random data for "
1385 "%s (requested %d, got %d)\n", algo
,
1386 template[i
].rlen
, err
);
1391 err
= memcmp(result
, template[i
].result
,
1394 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
1396 hexdump(result
, template[i
].rlen
);
1407 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1410 struct crypto_aead
*tfm
;
1413 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1415 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1416 "%ld\n", driver
, PTR_ERR(tfm
));
1417 return PTR_ERR(tfm
);
1420 if (desc
->suite
.aead
.enc
.vecs
) {
1421 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1422 desc
->suite
.aead
.enc
.count
);
1427 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1428 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1429 desc
->suite
.aead
.dec
.count
);
1432 crypto_free_aead(tfm
);
1436 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1437 const char *driver
, u32 type
, u32 mask
)
1439 struct crypto_cipher
*tfm
;
1442 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1444 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1445 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1446 return PTR_ERR(tfm
);
1449 if (desc
->suite
.cipher
.enc
.vecs
) {
1450 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1451 desc
->suite
.cipher
.enc
.count
);
1456 if (desc
->suite
.cipher
.dec
.vecs
)
1457 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1458 desc
->suite
.cipher
.dec
.count
);
1461 crypto_free_cipher(tfm
);
1465 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1466 const char *driver
, u32 type
, u32 mask
)
1468 struct crypto_ablkcipher
*tfm
;
1471 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1473 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1474 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1475 return PTR_ERR(tfm
);
1478 if (desc
->suite
.cipher
.enc
.vecs
) {
1479 err
= test_skcipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1480 desc
->suite
.cipher
.enc
.count
);
1485 if (desc
->suite
.cipher
.dec
.vecs
)
1486 err
= test_skcipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1487 desc
->suite
.cipher
.dec
.count
);
1490 crypto_free_ablkcipher(tfm
);
1494 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1497 struct crypto_comp
*tfm
;
1500 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1502 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1503 "%ld\n", driver
, PTR_ERR(tfm
));
1504 return PTR_ERR(tfm
);
1507 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1508 desc
->suite
.comp
.decomp
.vecs
,
1509 desc
->suite
.comp
.comp
.count
,
1510 desc
->suite
.comp
.decomp
.count
);
1512 crypto_free_comp(tfm
);
1516 static int alg_test_pcomp(const struct alg_test_desc
*desc
, const char *driver
,
1519 struct crypto_pcomp
*tfm
;
1522 tfm
= crypto_alloc_pcomp(driver
, type
, mask
);
1524 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1525 driver
, PTR_ERR(tfm
));
1526 return PTR_ERR(tfm
);
1529 err
= test_pcomp(tfm
, desc
->suite
.pcomp
.comp
.vecs
,
1530 desc
->suite
.pcomp
.decomp
.vecs
,
1531 desc
->suite
.pcomp
.comp
.count
,
1532 desc
->suite
.pcomp
.decomp
.count
);
1534 crypto_free_pcomp(tfm
);
1538 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1541 struct crypto_ahash
*tfm
;
1544 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1546 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1547 "%ld\n", driver
, PTR_ERR(tfm
));
1548 return PTR_ERR(tfm
);
1551 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1552 desc
->suite
.hash
.count
, true);
1554 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1555 desc
->suite
.hash
.count
, false);
1557 crypto_free_ahash(tfm
);
1561 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1562 const char *driver
, u32 type
, u32 mask
)
1564 struct crypto_shash
*tfm
;
1568 err
= alg_test_hash(desc
, driver
, type
, mask
);
1572 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1574 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1575 "%ld\n", driver
, PTR_ERR(tfm
));
1582 struct shash_desc shash
;
1583 char ctx
[crypto_shash_descsize(tfm
)];
1586 sdesc
.shash
.tfm
= tfm
;
1587 sdesc
.shash
.flags
= 0;
1589 *(u32
*)sdesc
.ctx
= le32_to_cpu(420553207);
1590 err
= crypto_shash_final(&sdesc
.shash
, (u8
*)&val
);
1592 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1593 "%s: %d\n", driver
, err
);
1597 if (val
!= ~420553207) {
1598 printk(KERN_ERR
"alg: crc32c: Test failed for %s: "
1599 "%d\n", driver
, val
);
1604 crypto_free_shash(tfm
);
1610 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
1613 struct crypto_rng
*rng
;
1616 rng
= crypto_alloc_rng(driver
, type
, mask
);
1618 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
1619 "%ld\n", driver
, PTR_ERR(rng
));
1620 return PTR_ERR(rng
);
1623 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
1625 crypto_free_rng(rng
);
1630 static int alg_test_null(const struct alg_test_desc
*desc
,
1631 const char *driver
, u32 type
, u32 mask
)
1636 /* Please keep this list sorted by algorithm name. */
1637 static const struct alg_test_desc alg_test_descs
[] = {
1639 .alg
= "__cbc-cast5-avx",
1640 .test
= alg_test_null
,
1654 .alg
= "__cbc-cast6-avx",
1655 .test
= alg_test_null
,
1669 .alg
= "__cbc-serpent-avx",
1670 .test
= alg_test_null
,
1684 .alg
= "__cbc-serpent-sse2",
1685 .test
= alg_test_null
,
1699 .alg
= "__cbc-twofish-avx",
1700 .test
= alg_test_null
,
1714 .alg
= "__driver-cbc-aes-aesni",
1715 .test
= alg_test_null
,
1730 .alg
= "__driver-cbc-cast5-avx",
1731 .test
= alg_test_null
,
1745 .alg
= "__driver-cbc-cast6-avx",
1746 .test
= alg_test_null
,
1760 .alg
= "__driver-cbc-serpent-avx",
1761 .test
= alg_test_null
,
1775 .alg
= "__driver-cbc-serpent-sse2",
1776 .test
= alg_test_null
,
1790 .alg
= "__driver-cbc-twofish-avx",
1791 .test
= alg_test_null
,
1805 .alg
= "__driver-ecb-aes-aesni",
1806 .test
= alg_test_null
,
1821 .alg
= "__driver-ecb-cast5-avx",
1822 .test
= alg_test_null
,
1836 .alg
= "__driver-ecb-cast6-avx",
1837 .test
= alg_test_null
,
1851 .alg
= "__driver-ecb-serpent-avx",
1852 .test
= alg_test_null
,
1866 .alg
= "__driver-ecb-serpent-sse2",
1867 .test
= alg_test_null
,
1881 .alg
= "__driver-ecb-twofish-avx",
1882 .test
= alg_test_null
,
1896 .alg
= "__ghash-pclmulqdqni",
1897 .test
= alg_test_null
,
1906 .alg
= "ansi_cprng",
1907 .test
= alg_test_cprng
,
1911 .vecs
= ansi_cprng_aes_tv_template
,
1912 .count
= ANSI_CPRNG_AES_TEST_VECTORS
1916 .alg
= "authenc(hmac(sha1),cbc(aes))",
1917 .test
= alg_test_aead
,
1922 .vecs
= hmac_sha1_aes_cbc_enc_tv_template
,
1923 .count
= HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1928 .alg
= "authenc(hmac(sha256),cbc(aes))",
1929 .test
= alg_test_aead
,
1934 .vecs
= hmac_sha256_aes_cbc_enc_tv_template
,
1935 .count
= HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1940 .alg
= "authenc(hmac(sha512),cbc(aes))",
1941 .test
= alg_test_aead
,
1946 .vecs
= hmac_sha512_aes_cbc_enc_tv_template
,
1947 .count
= HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1953 .test
= alg_test_skcipher
,
1958 .vecs
= aes_cbc_enc_tv_template
,
1959 .count
= AES_CBC_ENC_TEST_VECTORS
1962 .vecs
= aes_cbc_dec_tv_template
,
1963 .count
= AES_CBC_DEC_TEST_VECTORS
1968 .alg
= "cbc(anubis)",
1969 .test
= alg_test_skcipher
,
1973 .vecs
= anubis_cbc_enc_tv_template
,
1974 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1977 .vecs
= anubis_cbc_dec_tv_template
,
1978 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1983 .alg
= "cbc(blowfish)",
1984 .test
= alg_test_skcipher
,
1988 .vecs
= bf_cbc_enc_tv_template
,
1989 .count
= BF_CBC_ENC_TEST_VECTORS
1992 .vecs
= bf_cbc_dec_tv_template
,
1993 .count
= BF_CBC_DEC_TEST_VECTORS
1998 .alg
= "cbc(camellia)",
1999 .test
= alg_test_skcipher
,
2003 .vecs
= camellia_cbc_enc_tv_template
,
2004 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
2007 .vecs
= camellia_cbc_dec_tv_template
,
2008 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
2013 .alg
= "cbc(cast5)",
2014 .test
= alg_test_skcipher
,
2018 .vecs
= cast5_cbc_enc_tv_template
,
2019 .count
= CAST5_CBC_ENC_TEST_VECTORS
2022 .vecs
= cast5_cbc_dec_tv_template
,
2023 .count
= CAST5_CBC_DEC_TEST_VECTORS
2028 .alg
= "cbc(cast6)",
2029 .test
= alg_test_skcipher
,
2033 .vecs
= cast6_cbc_enc_tv_template
,
2034 .count
= CAST6_CBC_ENC_TEST_VECTORS
2037 .vecs
= cast6_cbc_dec_tv_template
,
2038 .count
= CAST6_CBC_DEC_TEST_VECTORS
2044 .test
= alg_test_skcipher
,
2048 .vecs
= des_cbc_enc_tv_template
,
2049 .count
= DES_CBC_ENC_TEST_VECTORS
2052 .vecs
= des_cbc_dec_tv_template
,
2053 .count
= DES_CBC_DEC_TEST_VECTORS
2058 .alg
= "cbc(des3_ede)",
2059 .test
= alg_test_skcipher
,
2064 .vecs
= des3_ede_cbc_enc_tv_template
,
2065 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
2068 .vecs
= des3_ede_cbc_dec_tv_template
,
2069 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
2074 .alg
= "cbc(serpent)",
2075 .test
= alg_test_skcipher
,
2079 .vecs
= serpent_cbc_enc_tv_template
,
2080 .count
= SERPENT_CBC_ENC_TEST_VECTORS
2083 .vecs
= serpent_cbc_dec_tv_template
,
2084 .count
= SERPENT_CBC_DEC_TEST_VECTORS
2089 .alg
= "cbc(twofish)",
2090 .test
= alg_test_skcipher
,
2094 .vecs
= tf_cbc_enc_tv_template
,
2095 .count
= TF_CBC_ENC_TEST_VECTORS
2098 .vecs
= tf_cbc_dec_tv_template
,
2099 .count
= TF_CBC_DEC_TEST_VECTORS
2105 .test
= alg_test_aead
,
2110 .vecs
= aes_ccm_enc_tv_template
,
2111 .count
= AES_CCM_ENC_TEST_VECTORS
2114 .vecs
= aes_ccm_dec_tv_template
,
2115 .count
= AES_CCM_DEC_TEST_VECTORS
2121 .test
= alg_test_crc32c
,
2125 .vecs
= crc32c_tv_template
,
2126 .count
= CRC32C_TEST_VECTORS
2130 .alg
= "cryptd(__driver-cbc-aes-aesni)",
2131 .test
= alg_test_null
,
2146 .alg
= "cryptd(__driver-ecb-aes-aesni)",
2147 .test
= alg_test_null
,
2162 .alg
= "cryptd(__driver-ecb-cast5-avx)",
2163 .test
= alg_test_null
,
2177 .alg
= "cryptd(__driver-ecb-cast6-avx)",
2178 .test
= alg_test_null
,
2192 .alg
= "cryptd(__driver-ecb-serpent-avx)",
2193 .test
= alg_test_null
,
2207 .alg
= "cryptd(__driver-ecb-serpent-sse2)",
2208 .test
= alg_test_null
,
2222 .alg
= "cryptd(__driver-ecb-twofish-avx)",
2223 .test
= alg_test_null
,
2237 .alg
= "cryptd(__driver-gcm-aes-aesni)",
2238 .test
= alg_test_null
,
2253 .alg
= "cryptd(__ghash-pclmulqdqni)",
2254 .test
= alg_test_null
,
2264 .test
= alg_test_skcipher
,
2269 .vecs
= aes_ctr_enc_tv_template
,
2270 .count
= AES_CTR_ENC_TEST_VECTORS
2273 .vecs
= aes_ctr_dec_tv_template
,
2274 .count
= AES_CTR_DEC_TEST_VECTORS
2279 .alg
= "ctr(blowfish)",
2280 .test
= alg_test_skcipher
,
2284 .vecs
= bf_ctr_enc_tv_template
,
2285 .count
= BF_CTR_ENC_TEST_VECTORS
2288 .vecs
= bf_ctr_dec_tv_template
,
2289 .count
= BF_CTR_DEC_TEST_VECTORS
2294 .alg
= "ctr(camellia)",
2295 .test
= alg_test_skcipher
,
2299 .vecs
= camellia_ctr_enc_tv_template
,
2300 .count
= CAMELLIA_CTR_ENC_TEST_VECTORS
2303 .vecs
= camellia_ctr_dec_tv_template
,
2304 .count
= CAMELLIA_CTR_DEC_TEST_VECTORS
2309 .alg
= "ctr(cast5)",
2310 .test
= alg_test_skcipher
,
2314 .vecs
= cast5_ctr_enc_tv_template
,
2315 .count
= CAST5_CTR_ENC_TEST_VECTORS
2318 .vecs
= cast5_ctr_dec_tv_template
,
2319 .count
= CAST5_CTR_DEC_TEST_VECTORS
2324 .alg
= "ctr(cast6)",
2325 .test
= alg_test_skcipher
,
2329 .vecs
= cast6_ctr_enc_tv_template
,
2330 .count
= CAST6_CTR_ENC_TEST_VECTORS
2333 .vecs
= cast6_ctr_dec_tv_template
,
2334 .count
= CAST6_CTR_DEC_TEST_VECTORS
2339 .alg
= "ctr(serpent)",
2340 .test
= alg_test_skcipher
,
2344 .vecs
= serpent_ctr_enc_tv_template
,
2345 .count
= SERPENT_CTR_ENC_TEST_VECTORS
2348 .vecs
= serpent_ctr_dec_tv_template
,
2349 .count
= SERPENT_CTR_DEC_TEST_VECTORS
2354 .alg
= "ctr(twofish)",
2355 .test
= alg_test_skcipher
,
2359 .vecs
= tf_ctr_enc_tv_template
,
2360 .count
= TF_CTR_ENC_TEST_VECTORS
2363 .vecs
= tf_ctr_dec_tv_template
,
2364 .count
= TF_CTR_DEC_TEST_VECTORS
2369 .alg
= "cts(cbc(aes))",
2370 .test
= alg_test_skcipher
,
2374 .vecs
= cts_mode_enc_tv_template
,
2375 .count
= CTS_MODE_ENC_TEST_VECTORS
2378 .vecs
= cts_mode_dec_tv_template
,
2379 .count
= CTS_MODE_DEC_TEST_VECTORS
2385 .test
= alg_test_comp
,
2389 .vecs
= deflate_comp_tv_template
,
2390 .count
= DEFLATE_COMP_TEST_VECTORS
2393 .vecs
= deflate_decomp_tv_template
,
2394 .count
= DEFLATE_DECOMP_TEST_VECTORS
2399 .alg
= "ecb(__aes-aesni)",
2400 .test
= alg_test_null
,
2416 .test
= alg_test_skcipher
,
2421 .vecs
= aes_enc_tv_template
,
2422 .count
= AES_ENC_TEST_VECTORS
2425 .vecs
= aes_dec_tv_template
,
2426 .count
= AES_DEC_TEST_VECTORS
2431 .alg
= "ecb(anubis)",
2432 .test
= alg_test_skcipher
,
2436 .vecs
= anubis_enc_tv_template
,
2437 .count
= ANUBIS_ENC_TEST_VECTORS
2440 .vecs
= anubis_dec_tv_template
,
2441 .count
= ANUBIS_DEC_TEST_VECTORS
2447 .test
= alg_test_skcipher
,
2451 .vecs
= arc4_enc_tv_template
,
2452 .count
= ARC4_ENC_TEST_VECTORS
2455 .vecs
= arc4_dec_tv_template
,
2456 .count
= ARC4_DEC_TEST_VECTORS
2461 .alg
= "ecb(blowfish)",
2462 .test
= alg_test_skcipher
,
2466 .vecs
= bf_enc_tv_template
,
2467 .count
= BF_ENC_TEST_VECTORS
2470 .vecs
= bf_dec_tv_template
,
2471 .count
= BF_DEC_TEST_VECTORS
2476 .alg
= "ecb(camellia)",
2477 .test
= alg_test_skcipher
,
2481 .vecs
= camellia_enc_tv_template
,
2482 .count
= CAMELLIA_ENC_TEST_VECTORS
2485 .vecs
= camellia_dec_tv_template
,
2486 .count
= CAMELLIA_DEC_TEST_VECTORS
2491 .alg
= "ecb(cast5)",
2492 .test
= alg_test_skcipher
,
2496 .vecs
= cast5_enc_tv_template
,
2497 .count
= CAST5_ENC_TEST_VECTORS
2500 .vecs
= cast5_dec_tv_template
,
2501 .count
= CAST5_DEC_TEST_VECTORS
2506 .alg
= "ecb(cast6)",
2507 .test
= alg_test_skcipher
,
2511 .vecs
= cast6_enc_tv_template
,
2512 .count
= CAST6_ENC_TEST_VECTORS
2515 .vecs
= cast6_dec_tv_template
,
2516 .count
= CAST6_DEC_TEST_VECTORS
2522 .test
= alg_test_skcipher
,
2527 .vecs
= des_enc_tv_template
,
2528 .count
= DES_ENC_TEST_VECTORS
2531 .vecs
= des_dec_tv_template
,
2532 .count
= DES_DEC_TEST_VECTORS
2537 .alg
= "ecb(des3_ede)",
2538 .test
= alg_test_skcipher
,
2543 .vecs
= des3_ede_enc_tv_template
,
2544 .count
= DES3_EDE_ENC_TEST_VECTORS
2547 .vecs
= des3_ede_dec_tv_template
,
2548 .count
= DES3_EDE_DEC_TEST_VECTORS
2553 .alg
= "ecb(khazad)",
2554 .test
= alg_test_skcipher
,
2558 .vecs
= khazad_enc_tv_template
,
2559 .count
= KHAZAD_ENC_TEST_VECTORS
2562 .vecs
= khazad_dec_tv_template
,
2563 .count
= KHAZAD_DEC_TEST_VECTORS
2569 .test
= alg_test_skcipher
,
2573 .vecs
= seed_enc_tv_template
,
2574 .count
= SEED_ENC_TEST_VECTORS
2577 .vecs
= seed_dec_tv_template
,
2578 .count
= SEED_DEC_TEST_VECTORS
2583 .alg
= "ecb(serpent)",
2584 .test
= alg_test_skcipher
,
2588 .vecs
= serpent_enc_tv_template
,
2589 .count
= SERPENT_ENC_TEST_VECTORS
2592 .vecs
= serpent_dec_tv_template
,
2593 .count
= SERPENT_DEC_TEST_VECTORS
2599 .test
= alg_test_skcipher
,
2603 .vecs
= tea_enc_tv_template
,
2604 .count
= TEA_ENC_TEST_VECTORS
2607 .vecs
= tea_dec_tv_template
,
2608 .count
= TEA_DEC_TEST_VECTORS
2613 .alg
= "ecb(tnepres)",
2614 .test
= alg_test_skcipher
,
2618 .vecs
= tnepres_enc_tv_template
,
2619 .count
= TNEPRES_ENC_TEST_VECTORS
2622 .vecs
= tnepres_dec_tv_template
,
2623 .count
= TNEPRES_DEC_TEST_VECTORS
2628 .alg
= "ecb(twofish)",
2629 .test
= alg_test_skcipher
,
2633 .vecs
= tf_enc_tv_template
,
2634 .count
= TF_ENC_TEST_VECTORS
2637 .vecs
= tf_dec_tv_template
,
2638 .count
= TF_DEC_TEST_VECTORS
2644 .test
= alg_test_skcipher
,
2648 .vecs
= xeta_enc_tv_template
,
2649 .count
= XETA_ENC_TEST_VECTORS
2652 .vecs
= xeta_dec_tv_template
,
2653 .count
= XETA_DEC_TEST_VECTORS
2659 .test
= alg_test_skcipher
,
2663 .vecs
= xtea_enc_tv_template
,
2664 .count
= XTEA_ENC_TEST_VECTORS
2667 .vecs
= xtea_dec_tv_template
,
2668 .count
= XTEA_DEC_TEST_VECTORS
2674 .test
= alg_test_aead
,
2679 .vecs
= aes_gcm_enc_tv_template
,
2680 .count
= AES_GCM_ENC_TEST_VECTORS
2683 .vecs
= aes_gcm_dec_tv_template
,
2684 .count
= AES_GCM_DEC_TEST_VECTORS
2690 .test
= alg_test_hash
,
2694 .vecs
= ghash_tv_template
,
2695 .count
= GHASH_TEST_VECTORS
2699 .alg
= "hmac(crc32)",
2700 .test
= alg_test_hash
,
2703 .vecs
= bfin_crc_tv_template
,
2704 .count
= BFIN_CRC_TEST_VECTORS
2709 .test
= alg_test_hash
,
2712 .vecs
= hmac_md5_tv_template
,
2713 .count
= HMAC_MD5_TEST_VECTORS
2717 .alg
= "hmac(rmd128)",
2718 .test
= alg_test_hash
,
2721 .vecs
= hmac_rmd128_tv_template
,
2722 .count
= HMAC_RMD128_TEST_VECTORS
2726 .alg
= "hmac(rmd160)",
2727 .test
= alg_test_hash
,
2730 .vecs
= hmac_rmd160_tv_template
,
2731 .count
= HMAC_RMD160_TEST_VECTORS
2735 .alg
= "hmac(sha1)",
2736 .test
= alg_test_hash
,
2740 .vecs
= hmac_sha1_tv_template
,
2741 .count
= HMAC_SHA1_TEST_VECTORS
2745 .alg
= "hmac(sha224)",
2746 .test
= alg_test_hash
,
2750 .vecs
= hmac_sha224_tv_template
,
2751 .count
= HMAC_SHA224_TEST_VECTORS
2755 .alg
= "hmac(sha256)",
2756 .test
= alg_test_hash
,
2760 .vecs
= hmac_sha256_tv_template
,
2761 .count
= HMAC_SHA256_TEST_VECTORS
2765 .alg
= "hmac(sha384)",
2766 .test
= alg_test_hash
,
2770 .vecs
= hmac_sha384_tv_template
,
2771 .count
= HMAC_SHA384_TEST_VECTORS
2775 .alg
= "hmac(sha512)",
2776 .test
= alg_test_hash
,
2780 .vecs
= hmac_sha512_tv_template
,
2781 .count
= HMAC_SHA512_TEST_VECTORS
2786 .test
= alg_test_skcipher
,
2790 .vecs
= aes_lrw_enc_tv_template
,
2791 .count
= AES_LRW_ENC_TEST_VECTORS
2794 .vecs
= aes_lrw_dec_tv_template
,
2795 .count
= AES_LRW_DEC_TEST_VECTORS
2800 .alg
= "lrw(camellia)",
2801 .test
= alg_test_skcipher
,
2805 .vecs
= camellia_lrw_enc_tv_template
,
2806 .count
= CAMELLIA_LRW_ENC_TEST_VECTORS
2809 .vecs
= camellia_lrw_dec_tv_template
,
2810 .count
= CAMELLIA_LRW_DEC_TEST_VECTORS
2815 .alg
= "lrw(cast6)",
2816 .test
= alg_test_skcipher
,
2820 .vecs
= cast6_lrw_enc_tv_template
,
2821 .count
= CAST6_LRW_ENC_TEST_VECTORS
2824 .vecs
= cast6_lrw_dec_tv_template
,
2825 .count
= CAST6_LRW_DEC_TEST_VECTORS
2830 .alg
= "lrw(serpent)",
2831 .test
= alg_test_skcipher
,
2835 .vecs
= serpent_lrw_enc_tv_template
,
2836 .count
= SERPENT_LRW_ENC_TEST_VECTORS
2839 .vecs
= serpent_lrw_dec_tv_template
,
2840 .count
= SERPENT_LRW_DEC_TEST_VECTORS
2845 .alg
= "lrw(twofish)",
2846 .test
= alg_test_skcipher
,
2850 .vecs
= tf_lrw_enc_tv_template
,
2851 .count
= TF_LRW_ENC_TEST_VECTORS
2854 .vecs
= tf_lrw_dec_tv_template
,
2855 .count
= TF_LRW_DEC_TEST_VECTORS
2861 .test
= alg_test_comp
,
2865 .vecs
= lzo_comp_tv_template
,
2866 .count
= LZO_COMP_TEST_VECTORS
2869 .vecs
= lzo_decomp_tv_template
,
2870 .count
= LZO_DECOMP_TEST_VECTORS
2876 .test
= alg_test_hash
,
2879 .vecs
= md4_tv_template
,
2880 .count
= MD4_TEST_VECTORS
2885 .test
= alg_test_hash
,
2888 .vecs
= md5_tv_template
,
2889 .count
= MD5_TEST_VECTORS
2893 .alg
= "michael_mic",
2894 .test
= alg_test_hash
,
2897 .vecs
= michael_mic_tv_template
,
2898 .count
= MICHAEL_MIC_TEST_VECTORS
2903 .test
= alg_test_skcipher
,
2908 .vecs
= aes_ofb_enc_tv_template
,
2909 .count
= AES_OFB_ENC_TEST_VECTORS
2912 .vecs
= aes_ofb_dec_tv_template
,
2913 .count
= AES_OFB_DEC_TEST_VECTORS
2918 .alg
= "pcbc(fcrypt)",
2919 .test
= alg_test_skcipher
,
2923 .vecs
= fcrypt_pcbc_enc_tv_template
,
2924 .count
= FCRYPT_ENC_TEST_VECTORS
2927 .vecs
= fcrypt_pcbc_dec_tv_template
,
2928 .count
= FCRYPT_DEC_TEST_VECTORS
2933 .alg
= "rfc3686(ctr(aes))",
2934 .test
= alg_test_skcipher
,
2939 .vecs
= aes_ctr_rfc3686_enc_tv_template
,
2940 .count
= AES_CTR_3686_ENC_TEST_VECTORS
2943 .vecs
= aes_ctr_rfc3686_dec_tv_template
,
2944 .count
= AES_CTR_3686_DEC_TEST_VECTORS
2949 .alg
= "rfc4106(gcm(aes))",
2950 .test
= alg_test_aead
,
2954 .vecs
= aes_gcm_rfc4106_enc_tv_template
,
2955 .count
= AES_GCM_4106_ENC_TEST_VECTORS
2958 .vecs
= aes_gcm_rfc4106_dec_tv_template
,
2959 .count
= AES_GCM_4106_DEC_TEST_VECTORS
2966 .alg
= "rfc4309(ccm(aes))",
2967 .test
= alg_test_aead
,
2972 .vecs
= aes_ccm_rfc4309_enc_tv_template
,
2973 .count
= AES_CCM_4309_ENC_TEST_VECTORS
2976 .vecs
= aes_ccm_rfc4309_dec_tv_template
,
2977 .count
= AES_CCM_4309_DEC_TEST_VECTORS
2983 .test
= alg_test_hash
,
2986 .vecs
= rmd128_tv_template
,
2987 .count
= RMD128_TEST_VECTORS
2992 .test
= alg_test_hash
,
2995 .vecs
= rmd160_tv_template
,
2996 .count
= RMD160_TEST_VECTORS
3001 .test
= alg_test_hash
,
3004 .vecs
= rmd256_tv_template
,
3005 .count
= RMD256_TEST_VECTORS
3010 .test
= alg_test_hash
,
3013 .vecs
= rmd320_tv_template
,
3014 .count
= RMD320_TEST_VECTORS
3019 .test
= alg_test_skcipher
,
3023 .vecs
= salsa20_stream_enc_tv_template
,
3024 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
3030 .test
= alg_test_hash
,
3034 .vecs
= sha1_tv_template
,
3035 .count
= SHA1_TEST_VECTORS
3040 .test
= alg_test_hash
,
3044 .vecs
= sha224_tv_template
,
3045 .count
= SHA224_TEST_VECTORS
3050 .test
= alg_test_hash
,
3054 .vecs
= sha256_tv_template
,
3055 .count
= SHA256_TEST_VECTORS
3060 .test
= alg_test_hash
,
3064 .vecs
= sha384_tv_template
,
3065 .count
= SHA384_TEST_VECTORS
3070 .test
= alg_test_hash
,
3074 .vecs
= sha512_tv_template
,
3075 .count
= SHA512_TEST_VECTORS
3080 .test
= alg_test_hash
,
3083 .vecs
= tgr128_tv_template
,
3084 .count
= TGR128_TEST_VECTORS
3089 .test
= alg_test_hash
,
3092 .vecs
= tgr160_tv_template
,
3093 .count
= TGR160_TEST_VECTORS
3098 .test
= alg_test_hash
,
3101 .vecs
= tgr192_tv_template
,
3102 .count
= TGR192_TEST_VECTORS
3107 .test
= alg_test_hash
,
3110 .vecs
= aes_vmac128_tv_template
,
3111 .count
= VMAC_AES_TEST_VECTORS
3116 .test
= alg_test_hash
,
3119 .vecs
= wp256_tv_template
,
3120 .count
= WP256_TEST_VECTORS
3125 .test
= alg_test_hash
,
3128 .vecs
= wp384_tv_template
,
3129 .count
= WP384_TEST_VECTORS
3134 .test
= alg_test_hash
,
3137 .vecs
= wp512_tv_template
,
3138 .count
= WP512_TEST_VECTORS
3143 .test
= alg_test_hash
,
3146 .vecs
= aes_xcbc128_tv_template
,
3147 .count
= XCBC_AES_TEST_VECTORS
3152 .test
= alg_test_skcipher
,
3157 .vecs
= aes_xts_enc_tv_template
,
3158 .count
= AES_XTS_ENC_TEST_VECTORS
3161 .vecs
= aes_xts_dec_tv_template
,
3162 .count
= AES_XTS_DEC_TEST_VECTORS
3167 .alg
= "xts(camellia)",
3168 .test
= alg_test_skcipher
,
3172 .vecs
= camellia_xts_enc_tv_template
,
3173 .count
= CAMELLIA_XTS_ENC_TEST_VECTORS
3176 .vecs
= camellia_xts_dec_tv_template
,
3177 .count
= CAMELLIA_XTS_DEC_TEST_VECTORS
3182 .alg
= "xts(cast6)",
3183 .test
= alg_test_skcipher
,
3187 .vecs
= cast6_xts_enc_tv_template
,
3188 .count
= CAST6_XTS_ENC_TEST_VECTORS
3191 .vecs
= cast6_xts_dec_tv_template
,
3192 .count
= CAST6_XTS_DEC_TEST_VECTORS
3197 .alg
= "xts(serpent)",
3198 .test
= alg_test_skcipher
,
3202 .vecs
= serpent_xts_enc_tv_template
,
3203 .count
= SERPENT_XTS_ENC_TEST_VECTORS
3206 .vecs
= serpent_xts_dec_tv_template
,
3207 .count
= SERPENT_XTS_DEC_TEST_VECTORS
3212 .alg
= "xts(twofish)",
3213 .test
= alg_test_skcipher
,
3217 .vecs
= tf_xts_enc_tv_template
,
3218 .count
= TF_XTS_ENC_TEST_VECTORS
3221 .vecs
= tf_xts_dec_tv_template
,
3222 .count
= TF_XTS_DEC_TEST_VECTORS
3228 .test
= alg_test_pcomp
,
3232 .vecs
= zlib_comp_tv_template
,
3233 .count
= ZLIB_COMP_TEST_VECTORS
3236 .vecs
= zlib_decomp_tv_template
,
3237 .count
= ZLIB_DECOMP_TEST_VECTORS
3244 static int alg_find_test(const char *alg
)
3247 int end
= ARRAY_SIZE(alg_test_descs
);
3249 while (start
< end
) {
3250 int i
= (start
+ end
) / 2;
3251 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
3269 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
3275 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
3276 char nalg
[CRYPTO_MAX_ALG_NAME
];
3278 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
3280 return -ENAMETOOLONG
;
3282 i
= alg_find_test(nalg
);
3286 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
3289 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
3293 i
= alg_find_test(alg
);
3294 j
= alg_find_test(driver
);
3298 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
3299 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
3304 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
3307 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
3311 if (fips_enabled
&& rc
)
3312 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
3314 if (fips_enabled
&& !rc
)
3315 printk(KERN_INFO
"alg: self-tests for %s (%s) passed\n",
3321 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
3327 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3329 EXPORT_SYMBOL_GPL(alg_test
);