2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
28 * Need slab memory for testing (size in number of pages).
33 * Indexes into the xbuf to simulate cross-page access.
45 * Used by test_cipher()
50 struct tcrypt_result
{
51 struct completion completion
;
55 struct aead_test_suite
{
57 struct aead_testvec
*vecs
;
62 struct cipher_test_suite
{
64 struct cipher_testvec
*vecs
;
69 struct comp_test_suite
{
71 struct comp_testvec
*vecs
;
76 struct pcomp_test_suite
{
78 struct pcomp_testvec
*vecs
;
83 struct hash_test_suite
{
84 struct hash_testvec
*vecs
;
88 struct cprng_test_suite
{
89 struct cprng_testvec
*vecs
;
93 struct alg_test_desc
{
95 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
97 int fips_allowed
; /* set if alg is allowed in fips mode */
100 struct aead_test_suite aead
;
101 struct cipher_test_suite cipher
;
102 struct comp_test_suite comp
;
103 struct pcomp_test_suite pcomp
;
104 struct hash_test_suite hash
;
105 struct cprng_test_suite cprng
;
109 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
111 static void hexdump(unsigned char *buf
, unsigned int len
)
113 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
118 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
120 struct tcrypt_result
*res
= req
->data
;
122 if (err
== -EINPROGRESS
)
126 complete(&res
->completion
);
129 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
133 for (i
= 0; i
< XBUFSIZE
; i
++) {
134 buf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
143 free_page((unsigned long)buf
[i
]);
148 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
152 for (i
= 0; i
< XBUFSIZE
; i
++)
153 free_page((unsigned long)buf
[i
]);
156 static int do_one_async_hash_op(struct ahash_request
*req
,
157 struct tcrypt_result
*tr
,
160 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
161 ret
= wait_for_completion_interruptible(&tr
->completion
);
164 INIT_COMPLETION(tr
->completion
);
169 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
170 unsigned int tcount
, bool use_digest
)
172 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
173 unsigned int i
, j
, k
, temp
;
174 struct scatterlist sg
[8];
176 struct ahash_request
*req
;
177 struct tcrypt_result tresult
;
179 char *xbuf
[XBUFSIZE
];
182 if (testmgr_alloc_buf(xbuf
))
185 init_completion(&tresult
.completion
);
187 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
189 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
193 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
194 tcrypt_complete
, &tresult
);
197 for (i
= 0; i
< tcount
; i
++) {
202 memset(result
, 0, 64);
206 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
207 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
209 if (template[i
].ksize
) {
210 crypto_ahash_clear_flags(tfm
, ~0);
211 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
214 printk(KERN_ERR
"alg: hash: setkey failed on "
215 "test %d for %s: ret=%d\n", j
, algo
,
221 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
223 ret
= do_one_async_hash_op(req
, &tresult
,
224 crypto_ahash_digest(req
));
226 pr_err("alg: hash: digest failed on test %d "
227 "for %s: ret=%d\n", j
, algo
, -ret
);
231 ret
= do_one_async_hash_op(req
, &tresult
,
232 crypto_ahash_init(req
));
234 pr_err("alt: hash: init failed on test %d "
235 "for %s: ret=%d\n", j
, algo
, -ret
);
238 ret
= do_one_async_hash_op(req
, &tresult
,
239 crypto_ahash_update(req
));
241 pr_err("alt: hash: update failed on test %d "
242 "for %s: ret=%d\n", j
, algo
, -ret
);
245 ret
= do_one_async_hash_op(req
, &tresult
,
246 crypto_ahash_final(req
));
248 pr_err("alt: hash: final failed on test %d "
249 "for %s: ret=%d\n", j
, algo
, -ret
);
254 if (memcmp(result
, template[i
].digest
,
255 crypto_ahash_digestsize(tfm
))) {
256 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
258 hexdump(result
, crypto_ahash_digestsize(tfm
));
265 for (i
= 0; i
< tcount
; i
++) {
266 if (template[i
].np
) {
268 memset(result
, 0, 64);
271 sg_init_table(sg
, template[i
].np
);
273 for (k
= 0; k
< template[i
].np
; k
++) {
274 if (WARN_ON(offset_in_page(IDX
[k
]) +
275 template[i
].tap
[k
] > PAGE_SIZE
))
278 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
279 offset_in_page(IDX
[k
]),
280 template[i
].plaintext
+ temp
,
283 temp
+= template[i
].tap
[k
];
286 if (template[i
].ksize
) {
287 crypto_ahash_clear_flags(tfm
, ~0);
288 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
292 printk(KERN_ERR
"alg: hash: setkey "
293 "failed on chunking test %d "
294 "for %s: ret=%d\n", j
, algo
,
300 ahash_request_set_crypt(req
, sg
, result
,
302 ret
= crypto_ahash_digest(req
);
308 ret
= wait_for_completion_interruptible(
309 &tresult
.completion
);
310 if (!ret
&& !(ret
= tresult
.err
)) {
311 INIT_COMPLETION(tresult
.completion
);
316 printk(KERN_ERR
"alg: hash: digest failed "
317 "on chunking test %d for %s: "
318 "ret=%d\n", j
, algo
, -ret
);
322 if (memcmp(result
, template[i
].digest
,
323 crypto_ahash_digestsize(tfm
))) {
324 printk(KERN_ERR
"alg: hash: Chunking test %d "
325 "failed for %s\n", j
, algo
);
326 hexdump(result
, crypto_ahash_digestsize(tfm
));
336 ahash_request_free(req
);
338 testmgr_free_buf(xbuf
);
343 static int test_aead(struct crypto_aead
*tfm
, int enc
,
344 struct aead_testvec
*template, unsigned int tcount
)
346 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
347 unsigned int i
, j
, k
, n
, temp
;
351 struct aead_request
*req
;
352 struct scatterlist sg
[8];
353 struct scatterlist asg
[8];
355 struct tcrypt_result result
;
356 unsigned int authsize
;
360 char *xbuf
[XBUFSIZE
];
361 char *axbuf
[XBUFSIZE
];
363 if (testmgr_alloc_buf(xbuf
))
365 if (testmgr_alloc_buf(axbuf
))
373 init_completion(&result
.completion
);
375 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
377 printk(KERN_ERR
"alg: aead: Failed to allocate request for "
382 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
383 tcrypt_complete
, &result
);
385 for (i
= 0, j
= 0; i
< tcount
; i
++) {
386 if (!template[i
].np
) {
389 /* some tepmplates have no input data but they will
396 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
||
397 template[i
].alen
> PAGE_SIZE
))
400 memcpy(input
, template[i
].input
, template[i
].ilen
);
401 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
403 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
405 memset(iv
, 0, MAX_IVLEN
);
407 crypto_aead_clear_flags(tfm
, ~0);
409 crypto_aead_set_flags(
410 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
412 key
= template[i
].key
;
414 ret
= crypto_aead_setkey(tfm
, key
,
416 if (!ret
== template[i
].fail
) {
417 printk(KERN_ERR
"alg: aead: setkey failed on "
418 "test %d for %s: flags=%x\n", j
, algo
,
419 crypto_aead_get_flags(tfm
));
424 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
425 ret
= crypto_aead_setauthsize(tfm
, authsize
);
427 printk(KERN_ERR
"alg: aead: Failed to set "
428 "authsize to %u on test %d for %s\n",
433 sg_init_one(&sg
[0], input
,
434 template[i
].ilen
+ (enc
? authsize
: 0));
436 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
438 aead_request_set_crypt(req
, sg
, sg
,
439 template[i
].ilen
, iv
);
441 aead_request_set_assoc(req
, asg
, template[i
].alen
);
444 crypto_aead_encrypt(req
) :
445 crypto_aead_decrypt(req
);
449 if (template[i
].novrfy
) {
450 /* verification was supposed to fail */
451 printk(KERN_ERR
"alg: aead: %s failed "
452 "on test %d for %s: ret was 0, "
453 "expected -EBADMSG\n",
455 /* so really, we got a bad message */
462 ret
= wait_for_completion_interruptible(
464 if (!ret
&& !(ret
= result
.err
)) {
465 INIT_COMPLETION(result
.completion
);
469 if (template[i
].novrfy
)
470 /* verification failure was expected */
474 printk(KERN_ERR
"alg: aead: %s failed on test "
475 "%d for %s: ret=%d\n", e
, j
, algo
, -ret
);
480 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
481 printk(KERN_ERR
"alg: aead: Test %d failed on "
482 "%s for %s\n", j
, e
, algo
);
483 hexdump(q
, template[i
].rlen
);
490 for (i
= 0, j
= 0; i
< tcount
; i
++) {
491 if (template[i
].np
) {
495 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
497 memset(iv
, 0, MAX_IVLEN
);
499 crypto_aead_clear_flags(tfm
, ~0);
501 crypto_aead_set_flags(
502 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
503 key
= template[i
].key
;
505 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
506 if (!ret
== template[i
].fail
) {
507 printk(KERN_ERR
"alg: aead: setkey failed on "
508 "chunk test %d for %s: flags=%x\n", j
,
509 algo
, crypto_aead_get_flags(tfm
));
514 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
517 sg_init_table(sg
, template[i
].np
);
518 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
519 if (WARN_ON(offset_in_page(IDX
[k
]) +
520 template[i
].tap
[k
] > PAGE_SIZE
))
523 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
524 offset_in_page(IDX
[k
]);
526 memcpy(q
, template[i
].input
+ temp
,
529 n
= template[i
].tap
[k
];
530 if (k
== template[i
].np
- 1 && enc
)
532 if (offset_in_page(q
) + n
< PAGE_SIZE
)
535 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
536 temp
+= template[i
].tap
[k
];
539 ret
= crypto_aead_setauthsize(tfm
, authsize
);
541 printk(KERN_ERR
"alg: aead: Failed to set "
542 "authsize to %u on chunk test %d for "
543 "%s\n", authsize
, j
, algo
);
548 if (WARN_ON(sg
[k
- 1].offset
+
549 sg
[k
- 1].length
+ authsize
>
555 sg
[k
- 1].length
+= authsize
;
558 sg_init_table(asg
, template[i
].anp
);
560 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
561 if (WARN_ON(offset_in_page(IDX
[k
]) +
562 template[i
].atap
[k
] > PAGE_SIZE
))
565 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
566 offset_in_page(IDX
[k
]),
567 template[i
].assoc
+ temp
,
568 template[i
].atap
[k
]),
569 template[i
].atap
[k
]);
570 temp
+= template[i
].atap
[k
];
573 aead_request_set_crypt(req
, sg
, sg
,
577 aead_request_set_assoc(req
, asg
, template[i
].alen
);
580 crypto_aead_encrypt(req
) :
581 crypto_aead_decrypt(req
);
585 if (template[i
].novrfy
) {
586 /* verification was supposed to fail */
587 printk(KERN_ERR
"alg: aead: %s failed "
588 "on chunk test %d for %s: ret "
589 "was 0, expected -EBADMSG\n",
591 /* so really, we got a bad message */
598 ret
= wait_for_completion_interruptible(
600 if (!ret
&& !(ret
= result
.err
)) {
601 INIT_COMPLETION(result
.completion
);
605 if (template[i
].novrfy
)
606 /* verification failure was expected */
610 printk(KERN_ERR
"alg: aead: %s failed on "
611 "chunk test %d for %s: ret=%d\n", e
, j
,
617 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
618 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
619 offset_in_page(IDX
[k
]);
621 n
= template[i
].tap
[k
];
622 if (k
== template[i
].np
- 1)
623 n
+= enc
? authsize
: -authsize
;
625 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
626 printk(KERN_ERR
"alg: aead: Chunk "
627 "test %d failed on %s at page "
628 "%u for %s\n", j
, e
, k
, algo
);
634 if (k
== template[i
].np
- 1 && !enc
) {
635 if (memcmp(q
, template[i
].input
+
641 for (n
= 0; offset_in_page(q
+ n
) &&
646 printk(KERN_ERR
"alg: aead: Result "
647 "buffer corruption in chunk "
648 "test %d on %s at page %u for "
649 "%s: %u bytes:\n", j
, e
, k
,
655 temp
+= template[i
].tap
[k
];
663 aead_request_free(req
);
664 testmgr_free_buf(axbuf
);
666 testmgr_free_buf(xbuf
);
671 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
672 struct cipher_testvec
*template, unsigned int tcount
)
674 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
675 unsigned int i
, j
, k
;
679 char *xbuf
[XBUFSIZE
];
682 if (testmgr_alloc_buf(xbuf
))
691 for (i
= 0; i
< tcount
; i
++) {
698 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
702 memcpy(data
, template[i
].input
, template[i
].ilen
);
704 crypto_cipher_clear_flags(tfm
, ~0);
706 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
708 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
710 if (!ret
== template[i
].fail
) {
711 printk(KERN_ERR
"alg: cipher: setkey failed "
712 "on test %d for %s: flags=%x\n", j
,
713 algo
, crypto_cipher_get_flags(tfm
));
718 for (k
= 0; k
< template[i
].ilen
;
719 k
+= crypto_cipher_blocksize(tfm
)) {
721 crypto_cipher_encrypt_one(tfm
, data
+ k
,
724 crypto_cipher_decrypt_one(tfm
, data
+ k
,
729 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
730 printk(KERN_ERR
"alg: cipher: Test %d failed "
731 "on %s for %s\n", j
, e
, algo
);
732 hexdump(q
, template[i
].rlen
);
741 testmgr_free_buf(xbuf
);
746 static int test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
747 struct cipher_testvec
*template, unsigned int tcount
)
750 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
751 unsigned int i
, j
, k
, n
, temp
;
753 struct ablkcipher_request
*req
;
754 struct scatterlist sg
[8];
756 struct tcrypt_result result
;
759 char *xbuf
[XBUFSIZE
];
762 if (testmgr_alloc_buf(xbuf
))
770 init_completion(&result
.completion
);
772 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
774 printk(KERN_ERR
"alg: skcipher: Failed to allocate request "
779 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
780 tcrypt_complete
, &result
);
783 for (i
= 0; i
< tcount
; i
++) {
785 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
787 memset(iv
, 0, MAX_IVLEN
);
789 if (!(template[i
].np
)) {
793 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
797 memcpy(data
, template[i
].input
, template[i
].ilen
);
799 crypto_ablkcipher_clear_flags(tfm
, ~0);
801 crypto_ablkcipher_set_flags(
802 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
804 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
806 if (!ret
== template[i
].fail
) {
807 printk(KERN_ERR
"alg: skcipher: setkey failed "
808 "on test %d for %s: flags=%x\n", j
,
809 algo
, crypto_ablkcipher_get_flags(tfm
));
814 sg_init_one(&sg
[0], data
, template[i
].ilen
);
816 ablkcipher_request_set_crypt(req
, sg
, sg
,
817 template[i
].ilen
, iv
);
819 crypto_ablkcipher_encrypt(req
) :
820 crypto_ablkcipher_decrypt(req
);
827 ret
= wait_for_completion_interruptible(
829 if (!ret
&& !((ret
= result
.err
))) {
830 INIT_COMPLETION(result
.completion
);
835 printk(KERN_ERR
"alg: skcipher: %s failed on "
836 "test %d for %s: ret=%d\n", e
, j
, algo
,
842 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
843 printk(KERN_ERR
"alg: skcipher: Test %d "
844 "failed on %s for %s\n", j
, e
, algo
);
845 hexdump(q
, template[i
].rlen
);
853 for (i
= 0; i
< tcount
; i
++) {
856 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
858 memset(iv
, 0, MAX_IVLEN
);
860 if (template[i
].np
) {
863 crypto_ablkcipher_clear_flags(tfm
, ~0);
865 crypto_ablkcipher_set_flags(
866 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
868 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
870 if (!ret
== template[i
].fail
) {
871 printk(KERN_ERR
"alg: skcipher: setkey failed "
872 "on chunk test %d for %s: flags=%x\n",
874 crypto_ablkcipher_get_flags(tfm
));
881 sg_init_table(sg
, template[i
].np
);
882 for (k
= 0; k
< template[i
].np
; k
++) {
883 if (WARN_ON(offset_in_page(IDX
[k
]) +
884 template[i
].tap
[k
] > PAGE_SIZE
))
887 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
888 offset_in_page(IDX
[k
]);
890 memcpy(q
, template[i
].input
+ temp
,
893 if (offset_in_page(q
) + template[i
].tap
[k
] <
895 q
[template[i
].tap
[k
]] = 0;
897 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
899 temp
+= template[i
].tap
[k
];
902 ablkcipher_request_set_crypt(req
, sg
, sg
,
903 template[i
].ilen
, iv
);
906 crypto_ablkcipher_encrypt(req
) :
907 crypto_ablkcipher_decrypt(req
);
914 ret
= wait_for_completion_interruptible(
916 if (!ret
&& !((ret
= result
.err
))) {
917 INIT_COMPLETION(result
.completion
);
922 printk(KERN_ERR
"alg: skcipher: %s failed on "
923 "chunk test %d for %s: ret=%d\n", e
, j
,
930 for (k
= 0; k
< template[i
].np
; k
++) {
931 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
932 offset_in_page(IDX
[k
]);
934 if (memcmp(q
, template[i
].result
+ temp
,
935 template[i
].tap
[k
])) {
936 printk(KERN_ERR
"alg: skcipher: Chunk "
937 "test %d failed on %s at page "
938 "%u for %s\n", j
, e
, k
, algo
);
939 hexdump(q
, template[i
].tap
[k
]);
943 q
+= template[i
].tap
[k
];
944 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
947 printk(KERN_ERR
"alg: skcipher: "
948 "Result buffer corruption in "
949 "chunk test %d on %s at page "
950 "%u for %s: %u bytes:\n", j
, e
,
955 temp
+= template[i
].tap
[k
];
963 ablkcipher_request_free(req
);
964 testmgr_free_buf(xbuf
);
969 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
970 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
972 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
974 char result
[COMP_BUF_SIZE
];
977 for (i
= 0; i
< ctcount
; i
++) {
979 unsigned int dlen
= COMP_BUF_SIZE
;
981 memset(result
, 0, sizeof (result
));
983 ilen
= ctemplate
[i
].inlen
;
984 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
985 ilen
, result
, &dlen
);
987 printk(KERN_ERR
"alg: comp: compression failed "
988 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
993 if (dlen
!= ctemplate
[i
].outlen
) {
994 printk(KERN_ERR
"alg: comp: Compression test %d "
995 "failed for %s: output len = %d\n", i
+ 1, algo
,
1001 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
1002 printk(KERN_ERR
"alg: comp: Compression test %d "
1003 "failed for %s\n", i
+ 1, algo
);
1004 hexdump(result
, dlen
);
1010 for (i
= 0; i
< dtcount
; i
++) {
1012 unsigned int dlen
= COMP_BUF_SIZE
;
1014 memset(result
, 0, sizeof (result
));
1016 ilen
= dtemplate
[i
].inlen
;
1017 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1018 ilen
, result
, &dlen
);
1020 printk(KERN_ERR
"alg: comp: decompression failed "
1021 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1026 if (dlen
!= dtemplate
[i
].outlen
) {
1027 printk(KERN_ERR
"alg: comp: Decompression test %d "
1028 "failed for %s: output len = %d\n", i
+ 1, algo
,
1034 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
1035 printk(KERN_ERR
"alg: comp: Decompression test %d "
1036 "failed for %s\n", i
+ 1, algo
);
1037 hexdump(result
, dlen
);
1049 static int test_pcomp(struct crypto_pcomp
*tfm
,
1050 struct pcomp_testvec
*ctemplate
,
1051 struct pcomp_testvec
*dtemplate
, int ctcount
,
1054 const char *algo
= crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm
));
1056 char result
[COMP_BUF_SIZE
];
1059 for (i
= 0; i
< ctcount
; i
++) {
1060 struct comp_request req
;
1061 unsigned int produced
= 0;
1063 res
= crypto_compress_setup(tfm
, ctemplate
[i
].params
,
1064 ctemplate
[i
].paramsize
);
1066 pr_err("alg: pcomp: compression setup failed on test "
1067 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1071 res
= crypto_compress_init(tfm
);
1073 pr_err("alg: pcomp: compression init failed on test "
1074 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1078 memset(result
, 0, sizeof(result
));
1080 req
.next_in
= ctemplate
[i
].input
;
1081 req
.avail_in
= ctemplate
[i
].inlen
/ 2;
1082 req
.next_out
= result
;
1083 req
.avail_out
= ctemplate
[i
].outlen
/ 2;
1085 res
= crypto_compress_update(tfm
, &req
);
1086 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1087 pr_err("alg: pcomp: compression update failed on test "
1088 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1094 /* Add remaining input data */
1095 req
.avail_in
+= (ctemplate
[i
].inlen
+ 1) / 2;
1097 res
= crypto_compress_update(tfm
, &req
);
1098 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1099 pr_err("alg: pcomp: compression update failed on test "
1100 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1106 /* Provide remaining output space */
1107 req
.avail_out
+= COMP_BUF_SIZE
- ctemplate
[i
].outlen
/ 2;
1109 res
= crypto_compress_final(tfm
, &req
);
1111 pr_err("alg: pcomp: compression final failed on test "
1112 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1117 if (COMP_BUF_SIZE
- req
.avail_out
!= ctemplate
[i
].outlen
) {
1118 pr_err("alg: comp: Compression test %d failed for %s: "
1119 "output len = %d (expected %d)\n", i
+ 1, algo
,
1120 COMP_BUF_SIZE
- req
.avail_out
,
1121 ctemplate
[i
].outlen
);
1125 if (produced
!= ctemplate
[i
].outlen
) {
1126 pr_err("alg: comp: Compression test %d failed for %s: "
1127 "returned len = %u (expected %d)\n", i
+ 1,
1128 algo
, produced
, ctemplate
[i
].outlen
);
1132 if (memcmp(result
, ctemplate
[i
].output
, ctemplate
[i
].outlen
)) {
1133 pr_err("alg: pcomp: Compression test %d failed for "
1134 "%s\n", i
+ 1, algo
);
1135 hexdump(result
, ctemplate
[i
].outlen
);
1140 for (i
= 0; i
< dtcount
; i
++) {
1141 struct comp_request req
;
1142 unsigned int produced
= 0;
1144 res
= crypto_decompress_setup(tfm
, dtemplate
[i
].params
,
1145 dtemplate
[i
].paramsize
);
1147 pr_err("alg: pcomp: decompression setup failed on "
1148 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1152 res
= crypto_decompress_init(tfm
);
1154 pr_err("alg: pcomp: decompression init failed on test "
1155 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1159 memset(result
, 0, sizeof(result
));
1161 req
.next_in
= dtemplate
[i
].input
;
1162 req
.avail_in
= dtemplate
[i
].inlen
/ 2;
1163 req
.next_out
= result
;
1164 req
.avail_out
= dtemplate
[i
].outlen
/ 2;
1166 res
= crypto_decompress_update(tfm
, &req
);
1167 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1168 pr_err("alg: pcomp: decompression update failed on "
1169 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1175 /* Add remaining input data */
1176 req
.avail_in
+= (dtemplate
[i
].inlen
+ 1) / 2;
1178 res
= crypto_decompress_update(tfm
, &req
);
1179 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1180 pr_err("alg: pcomp: decompression update failed on "
1181 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1187 /* Provide remaining output space */
1188 req
.avail_out
+= COMP_BUF_SIZE
- dtemplate
[i
].outlen
/ 2;
1190 res
= crypto_decompress_final(tfm
, &req
);
1191 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1192 pr_err("alg: pcomp: decompression final failed on "
1193 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1199 if (COMP_BUF_SIZE
- req
.avail_out
!= dtemplate
[i
].outlen
) {
1200 pr_err("alg: comp: Decompression test %d failed for "
1201 "%s: output len = %d (expected %d)\n", i
+ 1,
1202 algo
, COMP_BUF_SIZE
- req
.avail_out
,
1203 dtemplate
[i
].outlen
);
1207 if (produced
!= dtemplate
[i
].outlen
) {
1208 pr_err("alg: comp: Decompression test %d failed for "
1209 "%s: returned len = %u (expected %d)\n", i
+ 1,
1210 algo
, produced
, dtemplate
[i
].outlen
);
1214 if (memcmp(result
, dtemplate
[i
].output
, dtemplate
[i
].outlen
)) {
1215 pr_err("alg: pcomp: Decompression test %d failed for "
1216 "%s\n", i
+ 1, algo
);
1217 hexdump(result
, dtemplate
[i
].outlen
);
1226 static int test_cprng(struct crypto_rng
*tfm
, struct cprng_testvec
*template,
1227 unsigned int tcount
)
1229 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
1230 int err
= 0, i
, j
, seedsize
;
1234 seedsize
= crypto_rng_seedsize(tfm
);
1236 seed
= kmalloc(seedsize
, GFP_KERNEL
);
1238 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
1243 for (i
= 0; i
< tcount
; i
++) {
1244 memset(result
, 0, 32);
1246 memcpy(seed
, template[i
].v
, template[i
].vlen
);
1247 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
1249 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
1250 template[i
].dt
, template[i
].dtlen
);
1252 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
1254 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
1259 for (j
= 0; j
< template[i
].loops
; j
++) {
1260 err
= crypto_rng_get_bytes(tfm
, result
,
1262 if (err
!= template[i
].rlen
) {
1263 printk(KERN_ERR
"alg: cprng: Failed to obtain "
1264 "the correct amount of random data for "
1265 "%s (requested %d, got %d)\n", algo
,
1266 template[i
].rlen
, err
);
1271 err
= memcmp(result
, template[i
].result
,
1274 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
1276 hexdump(result
, template[i
].rlen
);
1287 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1290 struct crypto_aead
*tfm
;
1293 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1295 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1296 "%ld\n", driver
, PTR_ERR(tfm
));
1297 return PTR_ERR(tfm
);
1300 if (desc
->suite
.aead
.enc
.vecs
) {
1301 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1302 desc
->suite
.aead
.enc
.count
);
1307 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1308 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1309 desc
->suite
.aead
.dec
.count
);
1312 crypto_free_aead(tfm
);
1316 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1317 const char *driver
, u32 type
, u32 mask
)
1319 struct crypto_cipher
*tfm
;
1322 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1324 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1325 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1326 return PTR_ERR(tfm
);
1329 if (desc
->suite
.cipher
.enc
.vecs
) {
1330 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1331 desc
->suite
.cipher
.enc
.count
);
1336 if (desc
->suite
.cipher
.dec
.vecs
)
1337 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1338 desc
->suite
.cipher
.dec
.count
);
1341 crypto_free_cipher(tfm
);
1345 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1346 const char *driver
, u32 type
, u32 mask
)
1348 struct crypto_ablkcipher
*tfm
;
1351 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1353 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1354 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1355 return PTR_ERR(tfm
);
1358 if (desc
->suite
.cipher
.enc
.vecs
) {
1359 err
= test_skcipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1360 desc
->suite
.cipher
.enc
.count
);
1365 if (desc
->suite
.cipher
.dec
.vecs
)
1366 err
= test_skcipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1367 desc
->suite
.cipher
.dec
.count
);
1370 crypto_free_ablkcipher(tfm
);
1374 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1377 struct crypto_comp
*tfm
;
1380 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1382 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1383 "%ld\n", driver
, PTR_ERR(tfm
));
1384 return PTR_ERR(tfm
);
1387 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1388 desc
->suite
.comp
.decomp
.vecs
,
1389 desc
->suite
.comp
.comp
.count
,
1390 desc
->suite
.comp
.decomp
.count
);
1392 crypto_free_comp(tfm
);
1396 static int alg_test_pcomp(const struct alg_test_desc
*desc
, const char *driver
,
1399 struct crypto_pcomp
*tfm
;
1402 tfm
= crypto_alloc_pcomp(driver
, type
, mask
);
1404 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1405 driver
, PTR_ERR(tfm
));
1406 return PTR_ERR(tfm
);
1409 err
= test_pcomp(tfm
, desc
->suite
.pcomp
.comp
.vecs
,
1410 desc
->suite
.pcomp
.decomp
.vecs
,
1411 desc
->suite
.pcomp
.comp
.count
,
1412 desc
->suite
.pcomp
.decomp
.count
);
1414 crypto_free_pcomp(tfm
);
1418 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1421 struct crypto_ahash
*tfm
;
1424 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1426 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1427 "%ld\n", driver
, PTR_ERR(tfm
));
1428 return PTR_ERR(tfm
);
1431 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1432 desc
->suite
.hash
.count
, true);
1434 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1435 desc
->suite
.hash
.count
, false);
1437 crypto_free_ahash(tfm
);
1441 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1442 const char *driver
, u32 type
, u32 mask
)
1444 struct crypto_shash
*tfm
;
1448 err
= alg_test_hash(desc
, driver
, type
, mask
);
1452 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1454 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1455 "%ld\n", driver
, PTR_ERR(tfm
));
1462 struct shash_desc shash
;
1463 char ctx
[crypto_shash_descsize(tfm
)];
1466 sdesc
.shash
.tfm
= tfm
;
1467 sdesc
.shash
.flags
= 0;
1469 *(u32
*)sdesc
.ctx
= le32_to_cpu(420553207);
1470 err
= crypto_shash_final(&sdesc
.shash
, (u8
*)&val
);
1472 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1473 "%s: %d\n", driver
, err
);
1477 if (val
!= ~420553207) {
1478 printk(KERN_ERR
"alg: crc32c: Test failed for %s: "
1479 "%d\n", driver
, val
);
1484 crypto_free_shash(tfm
);
1490 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
1493 struct crypto_rng
*rng
;
1496 rng
= crypto_alloc_rng(driver
, type
, mask
);
1498 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
1499 "%ld\n", driver
, PTR_ERR(rng
));
1500 return PTR_ERR(rng
);
1503 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
1505 crypto_free_rng(rng
);
1510 static int alg_test_null(const struct alg_test_desc
*desc
,
1511 const char *driver
, u32 type
, u32 mask
)
1516 /* Please keep this list sorted by algorithm name. */
1517 static const struct alg_test_desc alg_test_descs
[] = {
1519 .alg
= "__driver-cbc-aes-aesni",
1520 .test
= alg_test_null
,
1534 .alg
= "__driver-ecb-aes-aesni",
1535 .test
= alg_test_null
,
1549 .alg
= "__ghash-pclmulqdqni",
1550 .test
= alg_test_null
,
1558 .alg
= "ansi_cprng",
1559 .test
= alg_test_cprng
,
1563 .vecs
= ansi_cprng_aes_tv_template
,
1564 .count
= ANSI_CPRNG_AES_TEST_VECTORS
1569 .test
= alg_test_skcipher
,
1574 .vecs
= aes_cbc_enc_tv_template
,
1575 .count
= AES_CBC_ENC_TEST_VECTORS
1578 .vecs
= aes_cbc_dec_tv_template
,
1579 .count
= AES_CBC_DEC_TEST_VECTORS
1584 .alg
= "cbc(anubis)",
1585 .test
= alg_test_skcipher
,
1589 .vecs
= anubis_cbc_enc_tv_template
,
1590 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1593 .vecs
= anubis_cbc_dec_tv_template
,
1594 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1599 .alg
= "cbc(blowfish)",
1600 .test
= alg_test_skcipher
,
1604 .vecs
= bf_cbc_enc_tv_template
,
1605 .count
= BF_CBC_ENC_TEST_VECTORS
1608 .vecs
= bf_cbc_dec_tv_template
,
1609 .count
= BF_CBC_DEC_TEST_VECTORS
1614 .alg
= "cbc(camellia)",
1615 .test
= alg_test_skcipher
,
1619 .vecs
= camellia_cbc_enc_tv_template
,
1620 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
1623 .vecs
= camellia_cbc_dec_tv_template
,
1624 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
1630 .test
= alg_test_skcipher
,
1634 .vecs
= des_cbc_enc_tv_template
,
1635 .count
= DES_CBC_ENC_TEST_VECTORS
1638 .vecs
= des_cbc_dec_tv_template
,
1639 .count
= DES_CBC_DEC_TEST_VECTORS
1644 .alg
= "cbc(des3_ede)",
1645 .test
= alg_test_skcipher
,
1650 .vecs
= des3_ede_cbc_enc_tv_template
,
1651 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
1654 .vecs
= des3_ede_cbc_dec_tv_template
,
1655 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
1660 .alg
= "cbc(twofish)",
1661 .test
= alg_test_skcipher
,
1665 .vecs
= tf_cbc_enc_tv_template
,
1666 .count
= TF_CBC_ENC_TEST_VECTORS
1669 .vecs
= tf_cbc_dec_tv_template
,
1670 .count
= TF_CBC_DEC_TEST_VECTORS
1676 .test
= alg_test_aead
,
1681 .vecs
= aes_ccm_enc_tv_template
,
1682 .count
= AES_CCM_ENC_TEST_VECTORS
1685 .vecs
= aes_ccm_dec_tv_template
,
1686 .count
= AES_CCM_DEC_TEST_VECTORS
1692 .test
= alg_test_crc32c
,
1696 .vecs
= crc32c_tv_template
,
1697 .count
= CRC32C_TEST_VECTORS
1701 .alg
= "cryptd(__driver-ecb-aes-aesni)",
1702 .test
= alg_test_null
,
1716 .alg
= "cryptd(__ghash-pclmulqdqni)",
1717 .test
= alg_test_null
,
1726 .test
= alg_test_skcipher
,
1731 .vecs
= aes_ctr_enc_tv_template
,
1732 .count
= AES_CTR_ENC_TEST_VECTORS
1735 .vecs
= aes_ctr_dec_tv_template
,
1736 .count
= AES_CTR_DEC_TEST_VECTORS
1741 .alg
= "cts(cbc(aes))",
1742 .test
= alg_test_skcipher
,
1746 .vecs
= cts_mode_enc_tv_template
,
1747 .count
= CTS_MODE_ENC_TEST_VECTORS
1750 .vecs
= cts_mode_dec_tv_template
,
1751 .count
= CTS_MODE_DEC_TEST_VECTORS
1757 .test
= alg_test_comp
,
1761 .vecs
= deflate_comp_tv_template
,
1762 .count
= DEFLATE_COMP_TEST_VECTORS
1765 .vecs
= deflate_decomp_tv_template
,
1766 .count
= DEFLATE_DECOMP_TEST_VECTORS
1771 .alg
= "ecb(__aes-aesni)",
1772 .test
= alg_test_null
,
1787 .test
= alg_test_skcipher
,
1792 .vecs
= aes_enc_tv_template
,
1793 .count
= AES_ENC_TEST_VECTORS
1796 .vecs
= aes_dec_tv_template
,
1797 .count
= AES_DEC_TEST_VECTORS
1802 .alg
= "ecb(anubis)",
1803 .test
= alg_test_skcipher
,
1807 .vecs
= anubis_enc_tv_template
,
1808 .count
= ANUBIS_ENC_TEST_VECTORS
1811 .vecs
= anubis_dec_tv_template
,
1812 .count
= ANUBIS_DEC_TEST_VECTORS
1818 .test
= alg_test_skcipher
,
1822 .vecs
= arc4_enc_tv_template
,
1823 .count
= ARC4_ENC_TEST_VECTORS
1826 .vecs
= arc4_dec_tv_template
,
1827 .count
= ARC4_DEC_TEST_VECTORS
1832 .alg
= "ecb(blowfish)",
1833 .test
= alg_test_skcipher
,
1837 .vecs
= bf_enc_tv_template
,
1838 .count
= BF_ENC_TEST_VECTORS
1841 .vecs
= bf_dec_tv_template
,
1842 .count
= BF_DEC_TEST_VECTORS
1847 .alg
= "ecb(camellia)",
1848 .test
= alg_test_skcipher
,
1852 .vecs
= camellia_enc_tv_template
,
1853 .count
= CAMELLIA_ENC_TEST_VECTORS
1856 .vecs
= camellia_dec_tv_template
,
1857 .count
= CAMELLIA_DEC_TEST_VECTORS
1862 .alg
= "ecb(cast5)",
1863 .test
= alg_test_skcipher
,
1867 .vecs
= cast5_enc_tv_template
,
1868 .count
= CAST5_ENC_TEST_VECTORS
1871 .vecs
= cast5_dec_tv_template
,
1872 .count
= CAST5_DEC_TEST_VECTORS
1877 .alg
= "ecb(cast6)",
1878 .test
= alg_test_skcipher
,
1882 .vecs
= cast6_enc_tv_template
,
1883 .count
= CAST6_ENC_TEST_VECTORS
1886 .vecs
= cast6_dec_tv_template
,
1887 .count
= CAST6_DEC_TEST_VECTORS
1893 .test
= alg_test_skcipher
,
1898 .vecs
= des_enc_tv_template
,
1899 .count
= DES_ENC_TEST_VECTORS
1902 .vecs
= des_dec_tv_template
,
1903 .count
= DES_DEC_TEST_VECTORS
1908 .alg
= "ecb(des3_ede)",
1909 .test
= alg_test_skcipher
,
1914 .vecs
= des3_ede_enc_tv_template
,
1915 .count
= DES3_EDE_ENC_TEST_VECTORS
1918 .vecs
= des3_ede_dec_tv_template
,
1919 .count
= DES3_EDE_DEC_TEST_VECTORS
1924 .alg
= "ecb(khazad)",
1925 .test
= alg_test_skcipher
,
1929 .vecs
= khazad_enc_tv_template
,
1930 .count
= KHAZAD_ENC_TEST_VECTORS
1933 .vecs
= khazad_dec_tv_template
,
1934 .count
= KHAZAD_DEC_TEST_VECTORS
1940 .test
= alg_test_skcipher
,
1944 .vecs
= seed_enc_tv_template
,
1945 .count
= SEED_ENC_TEST_VECTORS
1948 .vecs
= seed_dec_tv_template
,
1949 .count
= SEED_DEC_TEST_VECTORS
1954 .alg
= "ecb(serpent)",
1955 .test
= alg_test_skcipher
,
1959 .vecs
= serpent_enc_tv_template
,
1960 .count
= SERPENT_ENC_TEST_VECTORS
1963 .vecs
= serpent_dec_tv_template
,
1964 .count
= SERPENT_DEC_TEST_VECTORS
1970 .test
= alg_test_skcipher
,
1974 .vecs
= tea_enc_tv_template
,
1975 .count
= TEA_ENC_TEST_VECTORS
1978 .vecs
= tea_dec_tv_template
,
1979 .count
= TEA_DEC_TEST_VECTORS
1984 .alg
= "ecb(tnepres)",
1985 .test
= alg_test_skcipher
,
1989 .vecs
= tnepres_enc_tv_template
,
1990 .count
= TNEPRES_ENC_TEST_VECTORS
1993 .vecs
= tnepres_dec_tv_template
,
1994 .count
= TNEPRES_DEC_TEST_VECTORS
1999 .alg
= "ecb(twofish)",
2000 .test
= alg_test_skcipher
,
2004 .vecs
= tf_enc_tv_template
,
2005 .count
= TF_ENC_TEST_VECTORS
2008 .vecs
= tf_dec_tv_template
,
2009 .count
= TF_DEC_TEST_VECTORS
2015 .test
= alg_test_skcipher
,
2019 .vecs
= xeta_enc_tv_template
,
2020 .count
= XETA_ENC_TEST_VECTORS
2023 .vecs
= xeta_dec_tv_template
,
2024 .count
= XETA_DEC_TEST_VECTORS
2030 .test
= alg_test_skcipher
,
2034 .vecs
= xtea_enc_tv_template
,
2035 .count
= XTEA_ENC_TEST_VECTORS
2038 .vecs
= xtea_dec_tv_template
,
2039 .count
= XTEA_DEC_TEST_VECTORS
2045 .test
= alg_test_aead
,
2050 .vecs
= aes_gcm_enc_tv_template
,
2051 .count
= AES_GCM_ENC_TEST_VECTORS
2054 .vecs
= aes_gcm_dec_tv_template
,
2055 .count
= AES_GCM_DEC_TEST_VECTORS
2061 .test
= alg_test_hash
,
2064 .vecs
= ghash_tv_template
,
2065 .count
= GHASH_TEST_VECTORS
2070 .test
= alg_test_hash
,
2073 .vecs
= hmac_md5_tv_template
,
2074 .count
= HMAC_MD5_TEST_VECTORS
2078 .alg
= "hmac(rmd128)",
2079 .test
= alg_test_hash
,
2082 .vecs
= hmac_rmd128_tv_template
,
2083 .count
= HMAC_RMD128_TEST_VECTORS
2087 .alg
= "hmac(rmd160)",
2088 .test
= alg_test_hash
,
2091 .vecs
= hmac_rmd160_tv_template
,
2092 .count
= HMAC_RMD160_TEST_VECTORS
2096 .alg
= "hmac(sha1)",
2097 .test
= alg_test_hash
,
2101 .vecs
= hmac_sha1_tv_template
,
2102 .count
= HMAC_SHA1_TEST_VECTORS
2106 .alg
= "hmac(sha224)",
2107 .test
= alg_test_hash
,
2111 .vecs
= hmac_sha224_tv_template
,
2112 .count
= HMAC_SHA224_TEST_VECTORS
2116 .alg
= "hmac(sha256)",
2117 .test
= alg_test_hash
,
2121 .vecs
= hmac_sha256_tv_template
,
2122 .count
= HMAC_SHA256_TEST_VECTORS
2126 .alg
= "hmac(sha384)",
2127 .test
= alg_test_hash
,
2131 .vecs
= hmac_sha384_tv_template
,
2132 .count
= HMAC_SHA384_TEST_VECTORS
2136 .alg
= "hmac(sha512)",
2137 .test
= alg_test_hash
,
2141 .vecs
= hmac_sha512_tv_template
,
2142 .count
= HMAC_SHA512_TEST_VECTORS
2147 .test
= alg_test_skcipher
,
2151 .vecs
= aes_lrw_enc_tv_template
,
2152 .count
= AES_LRW_ENC_TEST_VECTORS
2155 .vecs
= aes_lrw_dec_tv_template
,
2156 .count
= AES_LRW_DEC_TEST_VECTORS
2162 .test
= alg_test_comp
,
2166 .vecs
= lzo_comp_tv_template
,
2167 .count
= LZO_COMP_TEST_VECTORS
2170 .vecs
= lzo_decomp_tv_template
,
2171 .count
= LZO_DECOMP_TEST_VECTORS
2177 .test
= alg_test_hash
,
2180 .vecs
= md4_tv_template
,
2181 .count
= MD4_TEST_VECTORS
2186 .test
= alg_test_hash
,
2189 .vecs
= md5_tv_template
,
2190 .count
= MD5_TEST_VECTORS
2194 .alg
= "michael_mic",
2195 .test
= alg_test_hash
,
2198 .vecs
= michael_mic_tv_template
,
2199 .count
= MICHAEL_MIC_TEST_VECTORS
2203 .alg
= "pcbc(fcrypt)",
2204 .test
= alg_test_skcipher
,
2208 .vecs
= fcrypt_pcbc_enc_tv_template
,
2209 .count
= FCRYPT_ENC_TEST_VECTORS
2212 .vecs
= fcrypt_pcbc_dec_tv_template
,
2213 .count
= FCRYPT_DEC_TEST_VECTORS
2218 .alg
= "rfc3686(ctr(aes))",
2219 .test
= alg_test_skcipher
,
2224 .vecs
= aes_ctr_rfc3686_enc_tv_template
,
2225 .count
= AES_CTR_3686_ENC_TEST_VECTORS
2228 .vecs
= aes_ctr_rfc3686_dec_tv_template
,
2229 .count
= AES_CTR_3686_DEC_TEST_VECTORS
2234 .alg
= "rfc4309(ccm(aes))",
2235 .test
= alg_test_aead
,
2240 .vecs
= aes_ccm_rfc4309_enc_tv_template
,
2241 .count
= AES_CCM_4309_ENC_TEST_VECTORS
2244 .vecs
= aes_ccm_rfc4309_dec_tv_template
,
2245 .count
= AES_CCM_4309_DEC_TEST_VECTORS
2251 .test
= alg_test_hash
,
2254 .vecs
= rmd128_tv_template
,
2255 .count
= RMD128_TEST_VECTORS
2260 .test
= alg_test_hash
,
2263 .vecs
= rmd160_tv_template
,
2264 .count
= RMD160_TEST_VECTORS
2269 .test
= alg_test_hash
,
2272 .vecs
= rmd256_tv_template
,
2273 .count
= RMD256_TEST_VECTORS
2278 .test
= alg_test_hash
,
2281 .vecs
= rmd320_tv_template
,
2282 .count
= RMD320_TEST_VECTORS
2287 .test
= alg_test_skcipher
,
2291 .vecs
= salsa20_stream_enc_tv_template
,
2292 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
2298 .test
= alg_test_hash
,
2302 .vecs
= sha1_tv_template
,
2303 .count
= SHA1_TEST_VECTORS
2308 .test
= alg_test_hash
,
2312 .vecs
= sha224_tv_template
,
2313 .count
= SHA224_TEST_VECTORS
2318 .test
= alg_test_hash
,
2322 .vecs
= sha256_tv_template
,
2323 .count
= SHA256_TEST_VECTORS
2328 .test
= alg_test_hash
,
2332 .vecs
= sha384_tv_template
,
2333 .count
= SHA384_TEST_VECTORS
2338 .test
= alg_test_hash
,
2342 .vecs
= sha512_tv_template
,
2343 .count
= SHA512_TEST_VECTORS
2348 .test
= alg_test_hash
,
2351 .vecs
= tgr128_tv_template
,
2352 .count
= TGR128_TEST_VECTORS
2357 .test
= alg_test_hash
,
2360 .vecs
= tgr160_tv_template
,
2361 .count
= TGR160_TEST_VECTORS
2366 .test
= alg_test_hash
,
2369 .vecs
= tgr192_tv_template
,
2370 .count
= TGR192_TEST_VECTORS
2375 .test
= alg_test_hash
,
2378 .vecs
= aes_vmac128_tv_template
,
2379 .count
= VMAC_AES_TEST_VECTORS
2384 .test
= alg_test_hash
,
2387 .vecs
= wp256_tv_template
,
2388 .count
= WP256_TEST_VECTORS
2393 .test
= alg_test_hash
,
2396 .vecs
= wp384_tv_template
,
2397 .count
= WP384_TEST_VECTORS
2402 .test
= alg_test_hash
,
2405 .vecs
= wp512_tv_template
,
2406 .count
= WP512_TEST_VECTORS
2411 .test
= alg_test_hash
,
2414 .vecs
= aes_xcbc128_tv_template
,
2415 .count
= XCBC_AES_TEST_VECTORS
2420 .test
= alg_test_skcipher
,
2424 .vecs
= aes_xts_enc_tv_template
,
2425 .count
= AES_XTS_ENC_TEST_VECTORS
2428 .vecs
= aes_xts_dec_tv_template
,
2429 .count
= AES_XTS_DEC_TEST_VECTORS
2435 .test
= alg_test_pcomp
,
2439 .vecs
= zlib_comp_tv_template
,
2440 .count
= ZLIB_COMP_TEST_VECTORS
2443 .vecs
= zlib_decomp_tv_template
,
2444 .count
= ZLIB_DECOMP_TEST_VECTORS
2451 static int alg_find_test(const char *alg
)
2454 int end
= ARRAY_SIZE(alg_test_descs
);
2456 while (start
< end
) {
2457 int i
= (start
+ end
) / 2;
2458 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
2476 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
2482 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
2483 char nalg
[CRYPTO_MAX_ALG_NAME
];
2485 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
2487 return -ENAMETOOLONG
;
2489 i
= alg_find_test(nalg
);
2493 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
2496 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
2500 i
= alg_find_test(alg
);
2501 j
= alg_find_test(driver
);
2505 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
2506 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
2511 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
2514 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
2518 if (fips_enabled
&& rc
)
2519 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
2521 if (fips_enabled
&& !rc
)
2522 printk(KERN_INFO
"alg: self-tests for %s (%s) passed\n",
2528 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
2533 EXPORT_SYMBOL_GPL(alg_test
);