2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
26 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
29 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
39 * Need slab memory for testing (size in number of pages).
44 * Indexes into the xbuf to simulate cross-page access.
56 * Used by test_cipher()
61 struct tcrypt_result
{
62 struct completion completion
;
66 struct aead_test_suite
{
68 struct aead_testvec
*vecs
;
73 struct cipher_test_suite
{
75 struct cipher_testvec
*vecs
;
80 struct comp_test_suite
{
82 struct comp_testvec
*vecs
;
87 struct pcomp_test_suite
{
89 struct pcomp_testvec
*vecs
;
94 struct hash_test_suite
{
95 struct hash_testvec
*vecs
;
99 struct cprng_test_suite
{
100 struct cprng_testvec
*vecs
;
104 struct alg_test_desc
{
106 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
108 int fips_allowed
; /* set if alg is allowed in fips mode */
111 struct aead_test_suite aead
;
112 struct cipher_test_suite cipher
;
113 struct comp_test_suite comp
;
114 struct pcomp_test_suite pcomp
;
115 struct hash_test_suite hash
;
116 struct cprng_test_suite cprng
;
120 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
122 static void hexdump(unsigned char *buf
, unsigned int len
)
124 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
129 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
131 struct tcrypt_result
*res
= req
->data
;
133 if (err
== -EINPROGRESS
)
137 complete(&res
->completion
);
140 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
144 for (i
= 0; i
< XBUFSIZE
; i
++) {
145 buf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
154 free_page((unsigned long)buf
[i
]);
159 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
163 for (i
= 0; i
< XBUFSIZE
; i
++)
164 free_page((unsigned long)buf
[i
]);
167 static int do_one_async_hash_op(struct ahash_request
*req
,
168 struct tcrypt_result
*tr
,
171 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
172 ret
= wait_for_completion_interruptible(&tr
->completion
);
175 INIT_COMPLETION(tr
->completion
);
180 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
181 unsigned int tcount
, bool use_digest
)
183 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
184 unsigned int i
, j
, k
, temp
;
185 struct scatterlist sg
[8];
187 struct ahash_request
*req
;
188 struct tcrypt_result tresult
;
190 char *xbuf
[XBUFSIZE
];
193 if (testmgr_alloc_buf(xbuf
))
196 init_completion(&tresult
.completion
);
198 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
200 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
204 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
205 tcrypt_complete
, &tresult
);
208 for (i
= 0; i
< tcount
; i
++) {
213 memset(result
, 0, 64);
217 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
218 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
220 if (template[i
].ksize
) {
221 crypto_ahash_clear_flags(tfm
, ~0);
222 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
225 printk(KERN_ERR
"alg: hash: setkey failed on "
226 "test %d for %s: ret=%d\n", j
, algo
,
232 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
234 ret
= do_one_async_hash_op(req
, &tresult
,
235 crypto_ahash_digest(req
));
237 pr_err("alg: hash: digest failed on test %d "
238 "for %s: ret=%d\n", j
, algo
, -ret
);
242 ret
= do_one_async_hash_op(req
, &tresult
,
243 crypto_ahash_init(req
));
245 pr_err("alt: hash: init failed on test %d "
246 "for %s: ret=%d\n", j
, algo
, -ret
);
249 ret
= do_one_async_hash_op(req
, &tresult
,
250 crypto_ahash_update(req
));
252 pr_err("alt: hash: update failed on test %d "
253 "for %s: ret=%d\n", j
, algo
, -ret
);
256 ret
= do_one_async_hash_op(req
, &tresult
,
257 crypto_ahash_final(req
));
259 pr_err("alt: hash: final failed on test %d "
260 "for %s: ret=%d\n", j
, algo
, -ret
);
265 if (memcmp(result
, template[i
].digest
,
266 crypto_ahash_digestsize(tfm
))) {
267 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
269 hexdump(result
, crypto_ahash_digestsize(tfm
));
276 for (i
= 0; i
< tcount
; i
++) {
277 if (template[i
].np
) {
279 memset(result
, 0, 64);
282 sg_init_table(sg
, template[i
].np
);
284 for (k
= 0; k
< template[i
].np
; k
++) {
285 if (WARN_ON(offset_in_page(IDX
[k
]) +
286 template[i
].tap
[k
] > PAGE_SIZE
))
289 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
290 offset_in_page(IDX
[k
]),
291 template[i
].plaintext
+ temp
,
294 temp
+= template[i
].tap
[k
];
297 if (template[i
].ksize
) {
298 crypto_ahash_clear_flags(tfm
, ~0);
299 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
303 printk(KERN_ERR
"alg: hash: setkey "
304 "failed on chunking test %d "
305 "for %s: ret=%d\n", j
, algo
,
311 ahash_request_set_crypt(req
, sg
, result
,
313 ret
= crypto_ahash_digest(req
);
319 ret
= wait_for_completion_interruptible(
320 &tresult
.completion
);
321 if (!ret
&& !(ret
= tresult
.err
)) {
322 INIT_COMPLETION(tresult
.completion
);
327 printk(KERN_ERR
"alg: hash: digest failed "
328 "on chunking test %d for %s: "
329 "ret=%d\n", j
, algo
, -ret
);
333 if (memcmp(result
, template[i
].digest
,
334 crypto_ahash_digestsize(tfm
))) {
335 printk(KERN_ERR
"alg: hash: Chunking test %d "
336 "failed for %s\n", j
, algo
);
337 hexdump(result
, crypto_ahash_digestsize(tfm
));
347 ahash_request_free(req
);
349 testmgr_free_buf(xbuf
);
354 static int test_aead(struct crypto_aead
*tfm
, int enc
,
355 struct aead_testvec
*template, unsigned int tcount
)
357 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
358 unsigned int i
, j
, k
, n
, temp
;
362 struct aead_request
*req
;
363 struct scatterlist sg
[8];
364 struct scatterlist asg
[8];
366 struct tcrypt_result result
;
367 unsigned int authsize
;
371 char *xbuf
[XBUFSIZE
];
372 char *axbuf
[XBUFSIZE
];
374 if (testmgr_alloc_buf(xbuf
))
376 if (testmgr_alloc_buf(axbuf
))
384 init_completion(&result
.completion
);
386 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
388 printk(KERN_ERR
"alg: aead: Failed to allocate request for "
393 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
394 tcrypt_complete
, &result
);
396 for (i
= 0, j
= 0; i
< tcount
; i
++) {
397 if (!template[i
].np
) {
400 /* some tepmplates have no input data but they will
407 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
||
408 template[i
].alen
> PAGE_SIZE
))
411 memcpy(input
, template[i
].input
, template[i
].ilen
);
412 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
414 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
416 memset(iv
, 0, MAX_IVLEN
);
418 crypto_aead_clear_flags(tfm
, ~0);
420 crypto_aead_set_flags(
421 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
423 key
= template[i
].key
;
425 ret
= crypto_aead_setkey(tfm
, key
,
427 if (!ret
== template[i
].fail
) {
428 printk(KERN_ERR
"alg: aead: setkey failed on "
429 "test %d for %s: flags=%x\n", j
, algo
,
430 crypto_aead_get_flags(tfm
));
435 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
436 ret
= crypto_aead_setauthsize(tfm
, authsize
);
438 printk(KERN_ERR
"alg: aead: Failed to set "
439 "authsize to %u on test %d for %s\n",
444 sg_init_one(&sg
[0], input
,
445 template[i
].ilen
+ (enc
? authsize
: 0));
447 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
449 aead_request_set_crypt(req
, sg
, sg
,
450 template[i
].ilen
, iv
);
452 aead_request_set_assoc(req
, asg
, template[i
].alen
);
455 crypto_aead_encrypt(req
) :
456 crypto_aead_decrypt(req
);
460 if (template[i
].novrfy
) {
461 /* verification was supposed to fail */
462 printk(KERN_ERR
"alg: aead: %s failed "
463 "on test %d for %s: ret was 0, "
464 "expected -EBADMSG\n",
466 /* so really, we got a bad message */
473 ret
= wait_for_completion_interruptible(
475 if (!ret
&& !(ret
= result
.err
)) {
476 INIT_COMPLETION(result
.completion
);
480 if (template[i
].novrfy
)
481 /* verification failure was expected */
485 printk(KERN_ERR
"alg: aead: %s failed on test "
486 "%d for %s: ret=%d\n", e
, j
, algo
, -ret
);
491 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
492 printk(KERN_ERR
"alg: aead: Test %d failed on "
493 "%s for %s\n", j
, e
, algo
);
494 hexdump(q
, template[i
].rlen
);
501 for (i
= 0, j
= 0; i
< tcount
; i
++) {
502 if (template[i
].np
) {
506 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
508 memset(iv
, 0, MAX_IVLEN
);
510 crypto_aead_clear_flags(tfm
, ~0);
512 crypto_aead_set_flags(
513 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
514 key
= template[i
].key
;
516 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
517 if (!ret
== template[i
].fail
) {
518 printk(KERN_ERR
"alg: aead: setkey failed on "
519 "chunk test %d for %s: flags=%x\n", j
,
520 algo
, crypto_aead_get_flags(tfm
));
525 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
528 sg_init_table(sg
, template[i
].np
);
529 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
530 if (WARN_ON(offset_in_page(IDX
[k
]) +
531 template[i
].tap
[k
] > PAGE_SIZE
))
534 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
535 offset_in_page(IDX
[k
]);
537 memcpy(q
, template[i
].input
+ temp
,
540 n
= template[i
].tap
[k
];
541 if (k
== template[i
].np
- 1 && enc
)
543 if (offset_in_page(q
) + n
< PAGE_SIZE
)
546 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
547 temp
+= template[i
].tap
[k
];
550 ret
= crypto_aead_setauthsize(tfm
, authsize
);
552 printk(KERN_ERR
"alg: aead: Failed to set "
553 "authsize to %u on chunk test %d for "
554 "%s\n", authsize
, j
, algo
);
559 if (WARN_ON(sg
[k
- 1].offset
+
560 sg
[k
- 1].length
+ authsize
>
566 sg
[k
- 1].length
+= authsize
;
569 sg_init_table(asg
, template[i
].anp
);
571 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
572 if (WARN_ON(offset_in_page(IDX
[k
]) +
573 template[i
].atap
[k
] > PAGE_SIZE
))
576 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
577 offset_in_page(IDX
[k
]),
578 template[i
].assoc
+ temp
,
579 template[i
].atap
[k
]),
580 template[i
].atap
[k
]);
581 temp
+= template[i
].atap
[k
];
584 aead_request_set_crypt(req
, sg
, sg
,
588 aead_request_set_assoc(req
, asg
, template[i
].alen
);
591 crypto_aead_encrypt(req
) :
592 crypto_aead_decrypt(req
);
596 if (template[i
].novrfy
) {
597 /* verification was supposed to fail */
598 printk(KERN_ERR
"alg: aead: %s failed "
599 "on chunk test %d for %s: ret "
600 "was 0, expected -EBADMSG\n",
602 /* so really, we got a bad message */
609 ret
= wait_for_completion_interruptible(
611 if (!ret
&& !(ret
= result
.err
)) {
612 INIT_COMPLETION(result
.completion
);
616 if (template[i
].novrfy
)
617 /* verification failure was expected */
621 printk(KERN_ERR
"alg: aead: %s failed on "
622 "chunk test %d for %s: ret=%d\n", e
, j
,
628 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
629 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
630 offset_in_page(IDX
[k
]);
632 n
= template[i
].tap
[k
];
633 if (k
== template[i
].np
- 1)
634 n
+= enc
? authsize
: -authsize
;
636 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
637 printk(KERN_ERR
"alg: aead: Chunk "
638 "test %d failed on %s at page "
639 "%u for %s\n", j
, e
, k
, algo
);
645 if (k
== template[i
].np
- 1 && !enc
) {
646 if (memcmp(q
, template[i
].input
+
652 for (n
= 0; offset_in_page(q
+ n
) &&
657 printk(KERN_ERR
"alg: aead: Result "
658 "buffer corruption in chunk "
659 "test %d on %s at page %u for "
660 "%s: %u bytes:\n", j
, e
, k
,
666 temp
+= template[i
].tap
[k
];
674 aead_request_free(req
);
675 testmgr_free_buf(axbuf
);
677 testmgr_free_buf(xbuf
);
682 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
683 struct cipher_testvec
*template, unsigned int tcount
)
685 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
686 unsigned int i
, j
, k
;
690 char *xbuf
[XBUFSIZE
];
693 if (testmgr_alloc_buf(xbuf
))
702 for (i
= 0; i
< tcount
; i
++) {
709 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
713 memcpy(data
, template[i
].input
, template[i
].ilen
);
715 crypto_cipher_clear_flags(tfm
, ~0);
717 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
719 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
721 if (!ret
== template[i
].fail
) {
722 printk(KERN_ERR
"alg: cipher: setkey failed "
723 "on test %d for %s: flags=%x\n", j
,
724 algo
, crypto_cipher_get_flags(tfm
));
729 for (k
= 0; k
< template[i
].ilen
;
730 k
+= crypto_cipher_blocksize(tfm
)) {
732 crypto_cipher_encrypt_one(tfm
, data
+ k
,
735 crypto_cipher_decrypt_one(tfm
, data
+ k
,
740 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
741 printk(KERN_ERR
"alg: cipher: Test %d failed "
742 "on %s for %s\n", j
, e
, algo
);
743 hexdump(q
, template[i
].rlen
);
752 testmgr_free_buf(xbuf
);
757 static int test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
758 struct cipher_testvec
*template, unsigned int tcount
)
761 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
762 unsigned int i
, j
, k
, n
, temp
;
764 struct ablkcipher_request
*req
;
765 struct scatterlist sg
[8];
767 struct tcrypt_result result
;
770 char *xbuf
[XBUFSIZE
];
773 if (testmgr_alloc_buf(xbuf
))
781 init_completion(&result
.completion
);
783 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
785 printk(KERN_ERR
"alg: skcipher: Failed to allocate request "
790 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
791 tcrypt_complete
, &result
);
794 for (i
= 0; i
< tcount
; i
++) {
796 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
798 memset(iv
, 0, MAX_IVLEN
);
800 if (!(template[i
].np
)) {
804 if (WARN_ON(template[i
].ilen
> PAGE_SIZE
))
808 memcpy(data
, template[i
].input
, template[i
].ilen
);
810 crypto_ablkcipher_clear_flags(tfm
, ~0);
812 crypto_ablkcipher_set_flags(
813 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
815 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
817 if (!ret
== template[i
].fail
) {
818 printk(KERN_ERR
"alg: skcipher: setkey failed "
819 "on test %d for %s: flags=%x\n", j
,
820 algo
, crypto_ablkcipher_get_flags(tfm
));
825 sg_init_one(&sg
[0], data
, template[i
].ilen
);
827 ablkcipher_request_set_crypt(req
, sg
, sg
,
828 template[i
].ilen
, iv
);
830 crypto_ablkcipher_encrypt(req
) :
831 crypto_ablkcipher_decrypt(req
);
838 ret
= wait_for_completion_interruptible(
840 if (!ret
&& !((ret
= result
.err
))) {
841 INIT_COMPLETION(result
.completion
);
846 printk(KERN_ERR
"alg: skcipher: %s failed on "
847 "test %d for %s: ret=%d\n", e
, j
, algo
,
853 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
854 printk(KERN_ERR
"alg: skcipher: Test %d "
855 "failed on %s for %s\n", j
, e
, algo
);
856 hexdump(q
, template[i
].rlen
);
864 for (i
= 0; i
< tcount
; i
++) {
867 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
869 memset(iv
, 0, MAX_IVLEN
);
871 if (template[i
].np
) {
874 crypto_ablkcipher_clear_flags(tfm
, ~0);
876 crypto_ablkcipher_set_flags(
877 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
879 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
881 if (!ret
== template[i
].fail
) {
882 printk(KERN_ERR
"alg: skcipher: setkey failed "
883 "on chunk test %d for %s: flags=%x\n",
885 crypto_ablkcipher_get_flags(tfm
));
892 sg_init_table(sg
, template[i
].np
);
893 for (k
= 0; k
< template[i
].np
; k
++) {
894 if (WARN_ON(offset_in_page(IDX
[k
]) +
895 template[i
].tap
[k
] > PAGE_SIZE
))
898 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
899 offset_in_page(IDX
[k
]);
901 memcpy(q
, template[i
].input
+ temp
,
904 if (offset_in_page(q
) + template[i
].tap
[k
] <
906 q
[template[i
].tap
[k
]] = 0;
908 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
910 temp
+= template[i
].tap
[k
];
913 ablkcipher_request_set_crypt(req
, sg
, sg
,
914 template[i
].ilen
, iv
);
917 crypto_ablkcipher_encrypt(req
) :
918 crypto_ablkcipher_decrypt(req
);
925 ret
= wait_for_completion_interruptible(
927 if (!ret
&& !((ret
= result
.err
))) {
928 INIT_COMPLETION(result
.completion
);
933 printk(KERN_ERR
"alg: skcipher: %s failed on "
934 "chunk test %d for %s: ret=%d\n", e
, j
,
941 for (k
= 0; k
< template[i
].np
; k
++) {
942 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
943 offset_in_page(IDX
[k
]);
945 if (memcmp(q
, template[i
].result
+ temp
,
946 template[i
].tap
[k
])) {
947 printk(KERN_ERR
"alg: skcipher: Chunk "
948 "test %d failed on %s at page "
949 "%u for %s\n", j
, e
, k
, algo
);
950 hexdump(q
, template[i
].tap
[k
]);
954 q
+= template[i
].tap
[k
];
955 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
958 printk(KERN_ERR
"alg: skcipher: "
959 "Result buffer corruption in "
960 "chunk test %d on %s at page "
961 "%u for %s: %u bytes:\n", j
, e
,
966 temp
+= template[i
].tap
[k
];
974 ablkcipher_request_free(req
);
975 testmgr_free_buf(xbuf
);
980 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
981 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
983 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
985 char result
[COMP_BUF_SIZE
];
988 for (i
= 0; i
< ctcount
; i
++) {
990 unsigned int dlen
= COMP_BUF_SIZE
;
992 memset(result
, 0, sizeof (result
));
994 ilen
= ctemplate
[i
].inlen
;
995 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
996 ilen
, result
, &dlen
);
998 printk(KERN_ERR
"alg: comp: compression failed "
999 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1004 if (dlen
!= ctemplate
[i
].outlen
) {
1005 printk(KERN_ERR
"alg: comp: Compression test %d "
1006 "failed for %s: output len = %d\n", i
+ 1, algo
,
1012 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
1013 printk(KERN_ERR
"alg: comp: Compression test %d "
1014 "failed for %s\n", i
+ 1, algo
);
1015 hexdump(result
, dlen
);
1021 for (i
= 0; i
< dtcount
; i
++) {
1023 unsigned int dlen
= COMP_BUF_SIZE
;
1025 memset(result
, 0, sizeof (result
));
1027 ilen
= dtemplate
[i
].inlen
;
1028 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1029 ilen
, result
, &dlen
);
1031 printk(KERN_ERR
"alg: comp: decompression failed "
1032 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1037 if (dlen
!= dtemplate
[i
].outlen
) {
1038 printk(KERN_ERR
"alg: comp: Decompression test %d "
1039 "failed for %s: output len = %d\n", i
+ 1, algo
,
1045 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
1046 printk(KERN_ERR
"alg: comp: Decompression test %d "
1047 "failed for %s\n", i
+ 1, algo
);
1048 hexdump(result
, dlen
);
1060 static int test_pcomp(struct crypto_pcomp
*tfm
,
1061 struct pcomp_testvec
*ctemplate
,
1062 struct pcomp_testvec
*dtemplate
, int ctcount
,
1065 const char *algo
= crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm
));
1067 char result
[COMP_BUF_SIZE
];
1070 for (i
= 0; i
< ctcount
; i
++) {
1071 struct comp_request req
;
1072 unsigned int produced
= 0;
1074 res
= crypto_compress_setup(tfm
, ctemplate
[i
].params
,
1075 ctemplate
[i
].paramsize
);
1077 pr_err("alg: pcomp: compression setup failed on test "
1078 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1082 res
= crypto_compress_init(tfm
);
1084 pr_err("alg: pcomp: compression init failed on test "
1085 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1089 memset(result
, 0, sizeof(result
));
1091 req
.next_in
= ctemplate
[i
].input
;
1092 req
.avail_in
= ctemplate
[i
].inlen
/ 2;
1093 req
.next_out
= result
;
1094 req
.avail_out
= ctemplate
[i
].outlen
/ 2;
1096 res
= crypto_compress_update(tfm
, &req
);
1097 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1098 pr_err("alg: pcomp: compression update failed on test "
1099 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1105 /* Add remaining input data */
1106 req
.avail_in
+= (ctemplate
[i
].inlen
+ 1) / 2;
1108 res
= crypto_compress_update(tfm
, &req
);
1109 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1110 pr_err("alg: pcomp: compression update failed on test "
1111 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1117 /* Provide remaining output space */
1118 req
.avail_out
+= COMP_BUF_SIZE
- ctemplate
[i
].outlen
/ 2;
1120 res
= crypto_compress_final(tfm
, &req
);
1122 pr_err("alg: pcomp: compression final failed on test "
1123 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1128 if (COMP_BUF_SIZE
- req
.avail_out
!= ctemplate
[i
].outlen
) {
1129 pr_err("alg: comp: Compression test %d failed for %s: "
1130 "output len = %d (expected %d)\n", i
+ 1, algo
,
1131 COMP_BUF_SIZE
- req
.avail_out
,
1132 ctemplate
[i
].outlen
);
1136 if (produced
!= ctemplate
[i
].outlen
) {
1137 pr_err("alg: comp: Compression test %d failed for %s: "
1138 "returned len = %u (expected %d)\n", i
+ 1,
1139 algo
, produced
, ctemplate
[i
].outlen
);
1143 if (memcmp(result
, ctemplate
[i
].output
, ctemplate
[i
].outlen
)) {
1144 pr_err("alg: pcomp: Compression test %d failed for "
1145 "%s\n", i
+ 1, algo
);
1146 hexdump(result
, ctemplate
[i
].outlen
);
1151 for (i
= 0; i
< dtcount
; i
++) {
1152 struct comp_request req
;
1153 unsigned int produced
= 0;
1155 res
= crypto_decompress_setup(tfm
, dtemplate
[i
].params
,
1156 dtemplate
[i
].paramsize
);
1158 pr_err("alg: pcomp: decompression setup failed on "
1159 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1163 res
= crypto_decompress_init(tfm
);
1165 pr_err("alg: pcomp: decompression init failed on test "
1166 "%d for %s: error=%d\n", i
+ 1, algo
, res
);
1170 memset(result
, 0, sizeof(result
));
1172 req
.next_in
= dtemplate
[i
].input
;
1173 req
.avail_in
= dtemplate
[i
].inlen
/ 2;
1174 req
.next_out
= result
;
1175 req
.avail_out
= dtemplate
[i
].outlen
/ 2;
1177 res
= crypto_decompress_update(tfm
, &req
);
1178 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1179 pr_err("alg: pcomp: decompression update failed on "
1180 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1186 /* Add remaining input data */
1187 req
.avail_in
+= (dtemplate
[i
].inlen
+ 1) / 2;
1189 res
= crypto_decompress_update(tfm
, &req
);
1190 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1191 pr_err("alg: pcomp: decompression update failed on "
1192 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1198 /* Provide remaining output space */
1199 req
.avail_out
+= COMP_BUF_SIZE
- dtemplate
[i
].outlen
/ 2;
1201 res
= crypto_decompress_final(tfm
, &req
);
1202 if (res
< 0 && (res
!= -EAGAIN
|| req
.avail_in
)) {
1203 pr_err("alg: pcomp: decompression final failed on "
1204 "test %d for %s: error=%d\n", i
+ 1, algo
, res
);
1210 if (COMP_BUF_SIZE
- req
.avail_out
!= dtemplate
[i
].outlen
) {
1211 pr_err("alg: comp: Decompression test %d failed for "
1212 "%s: output len = %d (expected %d)\n", i
+ 1,
1213 algo
, COMP_BUF_SIZE
- req
.avail_out
,
1214 dtemplate
[i
].outlen
);
1218 if (produced
!= dtemplate
[i
].outlen
) {
1219 pr_err("alg: comp: Decompression test %d failed for "
1220 "%s: returned len = %u (expected %d)\n", i
+ 1,
1221 algo
, produced
, dtemplate
[i
].outlen
);
1225 if (memcmp(result
, dtemplate
[i
].output
, dtemplate
[i
].outlen
)) {
1226 pr_err("alg: pcomp: Decompression test %d failed for "
1227 "%s\n", i
+ 1, algo
);
1228 hexdump(result
, dtemplate
[i
].outlen
);
1237 static int test_cprng(struct crypto_rng
*tfm
, struct cprng_testvec
*template,
1238 unsigned int tcount
)
1240 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
1241 int err
= 0, i
, j
, seedsize
;
1245 seedsize
= crypto_rng_seedsize(tfm
);
1247 seed
= kmalloc(seedsize
, GFP_KERNEL
);
1249 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
1254 for (i
= 0; i
< tcount
; i
++) {
1255 memset(result
, 0, 32);
1257 memcpy(seed
, template[i
].v
, template[i
].vlen
);
1258 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
1260 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
1261 template[i
].dt
, template[i
].dtlen
);
1263 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
1265 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
1270 for (j
= 0; j
< template[i
].loops
; j
++) {
1271 err
= crypto_rng_get_bytes(tfm
, result
,
1273 if (err
!= template[i
].rlen
) {
1274 printk(KERN_ERR
"alg: cprng: Failed to obtain "
1275 "the correct amount of random data for "
1276 "%s (requested %d, got %d)\n", algo
,
1277 template[i
].rlen
, err
);
1282 err
= memcmp(result
, template[i
].result
,
1285 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
1287 hexdump(result
, template[i
].rlen
);
1298 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1301 struct crypto_aead
*tfm
;
1304 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1306 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1307 "%ld\n", driver
, PTR_ERR(tfm
));
1308 return PTR_ERR(tfm
);
1311 if (desc
->suite
.aead
.enc
.vecs
) {
1312 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1313 desc
->suite
.aead
.enc
.count
);
1318 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1319 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1320 desc
->suite
.aead
.dec
.count
);
1323 crypto_free_aead(tfm
);
1327 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1328 const char *driver
, u32 type
, u32 mask
)
1330 struct crypto_cipher
*tfm
;
1333 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1335 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1336 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1337 return PTR_ERR(tfm
);
1340 if (desc
->suite
.cipher
.enc
.vecs
) {
1341 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1342 desc
->suite
.cipher
.enc
.count
);
1347 if (desc
->suite
.cipher
.dec
.vecs
)
1348 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1349 desc
->suite
.cipher
.dec
.count
);
1352 crypto_free_cipher(tfm
);
1356 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1357 const char *driver
, u32 type
, u32 mask
)
1359 struct crypto_ablkcipher
*tfm
;
1362 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1364 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1365 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1366 return PTR_ERR(tfm
);
1369 if (desc
->suite
.cipher
.enc
.vecs
) {
1370 err
= test_skcipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1371 desc
->suite
.cipher
.enc
.count
);
1376 if (desc
->suite
.cipher
.dec
.vecs
)
1377 err
= test_skcipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1378 desc
->suite
.cipher
.dec
.count
);
1381 crypto_free_ablkcipher(tfm
);
1385 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1388 struct crypto_comp
*tfm
;
1391 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1393 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1394 "%ld\n", driver
, PTR_ERR(tfm
));
1395 return PTR_ERR(tfm
);
1398 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1399 desc
->suite
.comp
.decomp
.vecs
,
1400 desc
->suite
.comp
.comp
.count
,
1401 desc
->suite
.comp
.decomp
.count
);
1403 crypto_free_comp(tfm
);
1407 static int alg_test_pcomp(const struct alg_test_desc
*desc
, const char *driver
,
1410 struct crypto_pcomp
*tfm
;
1413 tfm
= crypto_alloc_pcomp(driver
, type
, mask
);
1415 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1416 driver
, PTR_ERR(tfm
));
1417 return PTR_ERR(tfm
);
1420 err
= test_pcomp(tfm
, desc
->suite
.pcomp
.comp
.vecs
,
1421 desc
->suite
.pcomp
.decomp
.vecs
,
1422 desc
->suite
.pcomp
.comp
.count
,
1423 desc
->suite
.pcomp
.decomp
.count
);
1425 crypto_free_pcomp(tfm
);
1429 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1432 struct crypto_ahash
*tfm
;
1435 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1437 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1438 "%ld\n", driver
, PTR_ERR(tfm
));
1439 return PTR_ERR(tfm
);
1442 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1443 desc
->suite
.hash
.count
, true);
1445 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
,
1446 desc
->suite
.hash
.count
, false);
1448 crypto_free_ahash(tfm
);
1452 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1453 const char *driver
, u32 type
, u32 mask
)
1455 struct crypto_shash
*tfm
;
1459 err
= alg_test_hash(desc
, driver
, type
, mask
);
1463 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1465 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1466 "%ld\n", driver
, PTR_ERR(tfm
));
1473 struct shash_desc shash
;
1474 char ctx
[crypto_shash_descsize(tfm
)];
1477 sdesc
.shash
.tfm
= tfm
;
1478 sdesc
.shash
.flags
= 0;
1480 *(u32
*)sdesc
.ctx
= le32_to_cpu(420553207);
1481 err
= crypto_shash_final(&sdesc
.shash
, (u8
*)&val
);
1483 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1484 "%s: %d\n", driver
, err
);
1488 if (val
!= ~420553207) {
1489 printk(KERN_ERR
"alg: crc32c: Test failed for %s: "
1490 "%d\n", driver
, val
);
1495 crypto_free_shash(tfm
);
1501 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
1504 struct crypto_rng
*rng
;
1507 rng
= crypto_alloc_rng(driver
, type
, mask
);
1509 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
1510 "%ld\n", driver
, PTR_ERR(rng
));
1511 return PTR_ERR(rng
);
1514 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
1516 crypto_free_rng(rng
);
1521 static int alg_test_null(const struct alg_test_desc
*desc
,
1522 const char *driver
, u32 type
, u32 mask
)
1527 /* Please keep this list sorted by algorithm name. */
1528 static const struct alg_test_desc alg_test_descs
[] = {
1530 .alg
= "__driver-cbc-aes-aesni",
1531 .test
= alg_test_null
,
1545 .alg
= "__driver-ecb-aes-aesni",
1546 .test
= alg_test_null
,
1560 .alg
= "__ghash-pclmulqdqni",
1561 .test
= alg_test_null
,
1569 .alg
= "ansi_cprng",
1570 .test
= alg_test_cprng
,
1574 .vecs
= ansi_cprng_aes_tv_template
,
1575 .count
= ANSI_CPRNG_AES_TEST_VECTORS
1580 .test
= alg_test_skcipher
,
1585 .vecs
= aes_cbc_enc_tv_template
,
1586 .count
= AES_CBC_ENC_TEST_VECTORS
1589 .vecs
= aes_cbc_dec_tv_template
,
1590 .count
= AES_CBC_DEC_TEST_VECTORS
1595 .alg
= "cbc(anubis)",
1596 .test
= alg_test_skcipher
,
1600 .vecs
= anubis_cbc_enc_tv_template
,
1601 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1604 .vecs
= anubis_cbc_dec_tv_template
,
1605 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1610 .alg
= "cbc(blowfish)",
1611 .test
= alg_test_skcipher
,
1615 .vecs
= bf_cbc_enc_tv_template
,
1616 .count
= BF_CBC_ENC_TEST_VECTORS
1619 .vecs
= bf_cbc_dec_tv_template
,
1620 .count
= BF_CBC_DEC_TEST_VECTORS
1625 .alg
= "cbc(camellia)",
1626 .test
= alg_test_skcipher
,
1630 .vecs
= camellia_cbc_enc_tv_template
,
1631 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
1634 .vecs
= camellia_cbc_dec_tv_template
,
1635 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
1641 .test
= alg_test_skcipher
,
1645 .vecs
= des_cbc_enc_tv_template
,
1646 .count
= DES_CBC_ENC_TEST_VECTORS
1649 .vecs
= des_cbc_dec_tv_template
,
1650 .count
= DES_CBC_DEC_TEST_VECTORS
1655 .alg
= "cbc(des3_ede)",
1656 .test
= alg_test_skcipher
,
1661 .vecs
= des3_ede_cbc_enc_tv_template
,
1662 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
1665 .vecs
= des3_ede_cbc_dec_tv_template
,
1666 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
1671 .alg
= "cbc(twofish)",
1672 .test
= alg_test_skcipher
,
1676 .vecs
= tf_cbc_enc_tv_template
,
1677 .count
= TF_CBC_ENC_TEST_VECTORS
1680 .vecs
= tf_cbc_dec_tv_template
,
1681 .count
= TF_CBC_DEC_TEST_VECTORS
1687 .test
= alg_test_aead
,
1692 .vecs
= aes_ccm_enc_tv_template
,
1693 .count
= AES_CCM_ENC_TEST_VECTORS
1696 .vecs
= aes_ccm_dec_tv_template
,
1697 .count
= AES_CCM_DEC_TEST_VECTORS
1703 .test
= alg_test_crc32c
,
1707 .vecs
= crc32c_tv_template
,
1708 .count
= CRC32C_TEST_VECTORS
1712 .alg
= "cryptd(__driver-ecb-aes-aesni)",
1713 .test
= alg_test_null
,
1727 .alg
= "cryptd(__ghash-pclmulqdqni)",
1728 .test
= alg_test_null
,
1737 .test
= alg_test_skcipher
,
1742 .vecs
= aes_ctr_enc_tv_template
,
1743 .count
= AES_CTR_ENC_TEST_VECTORS
1746 .vecs
= aes_ctr_dec_tv_template
,
1747 .count
= AES_CTR_DEC_TEST_VECTORS
1752 .alg
= "cts(cbc(aes))",
1753 .test
= alg_test_skcipher
,
1757 .vecs
= cts_mode_enc_tv_template
,
1758 .count
= CTS_MODE_ENC_TEST_VECTORS
1761 .vecs
= cts_mode_dec_tv_template
,
1762 .count
= CTS_MODE_DEC_TEST_VECTORS
1768 .test
= alg_test_comp
,
1772 .vecs
= deflate_comp_tv_template
,
1773 .count
= DEFLATE_COMP_TEST_VECTORS
1776 .vecs
= deflate_decomp_tv_template
,
1777 .count
= DEFLATE_DECOMP_TEST_VECTORS
1782 .alg
= "ecb(__aes-aesni)",
1783 .test
= alg_test_null
,
1798 .test
= alg_test_skcipher
,
1803 .vecs
= aes_enc_tv_template
,
1804 .count
= AES_ENC_TEST_VECTORS
1807 .vecs
= aes_dec_tv_template
,
1808 .count
= AES_DEC_TEST_VECTORS
1813 .alg
= "ecb(anubis)",
1814 .test
= alg_test_skcipher
,
1818 .vecs
= anubis_enc_tv_template
,
1819 .count
= ANUBIS_ENC_TEST_VECTORS
1822 .vecs
= anubis_dec_tv_template
,
1823 .count
= ANUBIS_DEC_TEST_VECTORS
1829 .test
= alg_test_skcipher
,
1833 .vecs
= arc4_enc_tv_template
,
1834 .count
= ARC4_ENC_TEST_VECTORS
1837 .vecs
= arc4_dec_tv_template
,
1838 .count
= ARC4_DEC_TEST_VECTORS
1843 .alg
= "ecb(blowfish)",
1844 .test
= alg_test_skcipher
,
1848 .vecs
= bf_enc_tv_template
,
1849 .count
= BF_ENC_TEST_VECTORS
1852 .vecs
= bf_dec_tv_template
,
1853 .count
= BF_DEC_TEST_VECTORS
1858 .alg
= "ecb(camellia)",
1859 .test
= alg_test_skcipher
,
1863 .vecs
= camellia_enc_tv_template
,
1864 .count
= CAMELLIA_ENC_TEST_VECTORS
1867 .vecs
= camellia_dec_tv_template
,
1868 .count
= CAMELLIA_DEC_TEST_VECTORS
1873 .alg
= "ecb(cast5)",
1874 .test
= alg_test_skcipher
,
1878 .vecs
= cast5_enc_tv_template
,
1879 .count
= CAST5_ENC_TEST_VECTORS
1882 .vecs
= cast5_dec_tv_template
,
1883 .count
= CAST5_DEC_TEST_VECTORS
1888 .alg
= "ecb(cast6)",
1889 .test
= alg_test_skcipher
,
1893 .vecs
= cast6_enc_tv_template
,
1894 .count
= CAST6_ENC_TEST_VECTORS
1897 .vecs
= cast6_dec_tv_template
,
1898 .count
= CAST6_DEC_TEST_VECTORS
1904 .test
= alg_test_skcipher
,
1909 .vecs
= des_enc_tv_template
,
1910 .count
= DES_ENC_TEST_VECTORS
1913 .vecs
= des_dec_tv_template
,
1914 .count
= DES_DEC_TEST_VECTORS
1919 .alg
= "ecb(des3_ede)",
1920 .test
= alg_test_skcipher
,
1925 .vecs
= des3_ede_enc_tv_template
,
1926 .count
= DES3_EDE_ENC_TEST_VECTORS
1929 .vecs
= des3_ede_dec_tv_template
,
1930 .count
= DES3_EDE_DEC_TEST_VECTORS
1935 .alg
= "ecb(khazad)",
1936 .test
= alg_test_skcipher
,
1940 .vecs
= khazad_enc_tv_template
,
1941 .count
= KHAZAD_ENC_TEST_VECTORS
1944 .vecs
= khazad_dec_tv_template
,
1945 .count
= KHAZAD_DEC_TEST_VECTORS
1951 .test
= alg_test_skcipher
,
1955 .vecs
= seed_enc_tv_template
,
1956 .count
= SEED_ENC_TEST_VECTORS
1959 .vecs
= seed_dec_tv_template
,
1960 .count
= SEED_DEC_TEST_VECTORS
1965 .alg
= "ecb(serpent)",
1966 .test
= alg_test_skcipher
,
1970 .vecs
= serpent_enc_tv_template
,
1971 .count
= SERPENT_ENC_TEST_VECTORS
1974 .vecs
= serpent_dec_tv_template
,
1975 .count
= SERPENT_DEC_TEST_VECTORS
1981 .test
= alg_test_skcipher
,
1985 .vecs
= tea_enc_tv_template
,
1986 .count
= TEA_ENC_TEST_VECTORS
1989 .vecs
= tea_dec_tv_template
,
1990 .count
= TEA_DEC_TEST_VECTORS
1995 .alg
= "ecb(tnepres)",
1996 .test
= alg_test_skcipher
,
2000 .vecs
= tnepres_enc_tv_template
,
2001 .count
= TNEPRES_ENC_TEST_VECTORS
2004 .vecs
= tnepres_dec_tv_template
,
2005 .count
= TNEPRES_DEC_TEST_VECTORS
2010 .alg
= "ecb(twofish)",
2011 .test
= alg_test_skcipher
,
2015 .vecs
= tf_enc_tv_template
,
2016 .count
= TF_ENC_TEST_VECTORS
2019 .vecs
= tf_dec_tv_template
,
2020 .count
= TF_DEC_TEST_VECTORS
2026 .test
= alg_test_skcipher
,
2030 .vecs
= xeta_enc_tv_template
,
2031 .count
= XETA_ENC_TEST_VECTORS
2034 .vecs
= xeta_dec_tv_template
,
2035 .count
= XETA_DEC_TEST_VECTORS
2041 .test
= alg_test_skcipher
,
2045 .vecs
= xtea_enc_tv_template
,
2046 .count
= XTEA_ENC_TEST_VECTORS
2049 .vecs
= xtea_dec_tv_template
,
2050 .count
= XTEA_DEC_TEST_VECTORS
2056 .test
= alg_test_aead
,
2061 .vecs
= aes_gcm_enc_tv_template
,
2062 .count
= AES_GCM_ENC_TEST_VECTORS
2065 .vecs
= aes_gcm_dec_tv_template
,
2066 .count
= AES_GCM_DEC_TEST_VECTORS
2072 .test
= alg_test_hash
,
2075 .vecs
= ghash_tv_template
,
2076 .count
= GHASH_TEST_VECTORS
2081 .test
= alg_test_hash
,
2084 .vecs
= hmac_md5_tv_template
,
2085 .count
= HMAC_MD5_TEST_VECTORS
2089 .alg
= "hmac(rmd128)",
2090 .test
= alg_test_hash
,
2093 .vecs
= hmac_rmd128_tv_template
,
2094 .count
= HMAC_RMD128_TEST_VECTORS
2098 .alg
= "hmac(rmd160)",
2099 .test
= alg_test_hash
,
2102 .vecs
= hmac_rmd160_tv_template
,
2103 .count
= HMAC_RMD160_TEST_VECTORS
2107 .alg
= "hmac(sha1)",
2108 .test
= alg_test_hash
,
2112 .vecs
= hmac_sha1_tv_template
,
2113 .count
= HMAC_SHA1_TEST_VECTORS
2117 .alg
= "hmac(sha224)",
2118 .test
= alg_test_hash
,
2122 .vecs
= hmac_sha224_tv_template
,
2123 .count
= HMAC_SHA224_TEST_VECTORS
2127 .alg
= "hmac(sha256)",
2128 .test
= alg_test_hash
,
2132 .vecs
= hmac_sha256_tv_template
,
2133 .count
= HMAC_SHA256_TEST_VECTORS
2137 .alg
= "hmac(sha384)",
2138 .test
= alg_test_hash
,
2142 .vecs
= hmac_sha384_tv_template
,
2143 .count
= HMAC_SHA384_TEST_VECTORS
2147 .alg
= "hmac(sha512)",
2148 .test
= alg_test_hash
,
2152 .vecs
= hmac_sha512_tv_template
,
2153 .count
= HMAC_SHA512_TEST_VECTORS
2158 .test
= alg_test_skcipher
,
2162 .vecs
= aes_lrw_enc_tv_template
,
2163 .count
= AES_LRW_ENC_TEST_VECTORS
2166 .vecs
= aes_lrw_dec_tv_template
,
2167 .count
= AES_LRW_DEC_TEST_VECTORS
2173 .test
= alg_test_comp
,
2177 .vecs
= lzo_comp_tv_template
,
2178 .count
= LZO_COMP_TEST_VECTORS
2181 .vecs
= lzo_decomp_tv_template
,
2182 .count
= LZO_DECOMP_TEST_VECTORS
2188 .test
= alg_test_hash
,
2191 .vecs
= md4_tv_template
,
2192 .count
= MD4_TEST_VECTORS
2197 .test
= alg_test_hash
,
2200 .vecs
= md5_tv_template
,
2201 .count
= MD5_TEST_VECTORS
2205 .alg
= "michael_mic",
2206 .test
= alg_test_hash
,
2209 .vecs
= michael_mic_tv_template
,
2210 .count
= MICHAEL_MIC_TEST_VECTORS
2214 .alg
= "pcbc(fcrypt)",
2215 .test
= alg_test_skcipher
,
2219 .vecs
= fcrypt_pcbc_enc_tv_template
,
2220 .count
= FCRYPT_ENC_TEST_VECTORS
2223 .vecs
= fcrypt_pcbc_dec_tv_template
,
2224 .count
= FCRYPT_DEC_TEST_VECTORS
2229 .alg
= "rfc3686(ctr(aes))",
2230 .test
= alg_test_skcipher
,
2235 .vecs
= aes_ctr_rfc3686_enc_tv_template
,
2236 .count
= AES_CTR_3686_ENC_TEST_VECTORS
2239 .vecs
= aes_ctr_rfc3686_dec_tv_template
,
2240 .count
= AES_CTR_3686_DEC_TEST_VECTORS
2245 .alg
= "rfc4309(ccm(aes))",
2246 .test
= alg_test_aead
,
2251 .vecs
= aes_ccm_rfc4309_enc_tv_template
,
2252 .count
= AES_CCM_4309_ENC_TEST_VECTORS
2255 .vecs
= aes_ccm_rfc4309_dec_tv_template
,
2256 .count
= AES_CCM_4309_DEC_TEST_VECTORS
2262 .test
= alg_test_hash
,
2265 .vecs
= rmd128_tv_template
,
2266 .count
= RMD128_TEST_VECTORS
2271 .test
= alg_test_hash
,
2274 .vecs
= rmd160_tv_template
,
2275 .count
= RMD160_TEST_VECTORS
2280 .test
= alg_test_hash
,
2283 .vecs
= rmd256_tv_template
,
2284 .count
= RMD256_TEST_VECTORS
2289 .test
= alg_test_hash
,
2292 .vecs
= rmd320_tv_template
,
2293 .count
= RMD320_TEST_VECTORS
2298 .test
= alg_test_skcipher
,
2302 .vecs
= salsa20_stream_enc_tv_template
,
2303 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
2309 .test
= alg_test_hash
,
2313 .vecs
= sha1_tv_template
,
2314 .count
= SHA1_TEST_VECTORS
2319 .test
= alg_test_hash
,
2323 .vecs
= sha224_tv_template
,
2324 .count
= SHA224_TEST_VECTORS
2329 .test
= alg_test_hash
,
2333 .vecs
= sha256_tv_template
,
2334 .count
= SHA256_TEST_VECTORS
2339 .test
= alg_test_hash
,
2343 .vecs
= sha384_tv_template
,
2344 .count
= SHA384_TEST_VECTORS
2349 .test
= alg_test_hash
,
2353 .vecs
= sha512_tv_template
,
2354 .count
= SHA512_TEST_VECTORS
2359 .test
= alg_test_hash
,
2362 .vecs
= tgr128_tv_template
,
2363 .count
= TGR128_TEST_VECTORS
2368 .test
= alg_test_hash
,
2371 .vecs
= tgr160_tv_template
,
2372 .count
= TGR160_TEST_VECTORS
2377 .test
= alg_test_hash
,
2380 .vecs
= tgr192_tv_template
,
2381 .count
= TGR192_TEST_VECTORS
2386 .test
= alg_test_hash
,
2389 .vecs
= aes_vmac128_tv_template
,
2390 .count
= VMAC_AES_TEST_VECTORS
2395 .test
= alg_test_hash
,
2398 .vecs
= wp256_tv_template
,
2399 .count
= WP256_TEST_VECTORS
2404 .test
= alg_test_hash
,
2407 .vecs
= wp384_tv_template
,
2408 .count
= WP384_TEST_VECTORS
2413 .test
= alg_test_hash
,
2416 .vecs
= wp512_tv_template
,
2417 .count
= WP512_TEST_VECTORS
2422 .test
= alg_test_hash
,
2425 .vecs
= aes_xcbc128_tv_template
,
2426 .count
= XCBC_AES_TEST_VECTORS
2431 .test
= alg_test_skcipher
,
2435 .vecs
= aes_xts_enc_tv_template
,
2436 .count
= AES_XTS_ENC_TEST_VECTORS
2439 .vecs
= aes_xts_dec_tv_template
,
2440 .count
= AES_XTS_DEC_TEST_VECTORS
2446 .test
= alg_test_pcomp
,
2450 .vecs
= zlib_comp_tv_template
,
2451 .count
= ZLIB_COMP_TEST_VECTORS
2454 .vecs
= zlib_decomp_tv_template
,
2455 .count
= ZLIB_DECOMP_TEST_VECTORS
2462 static int alg_find_test(const char *alg
)
2465 int end
= ARRAY_SIZE(alg_test_descs
);
2467 while (start
< end
) {
2468 int i
= (start
+ end
) / 2;
2469 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
2487 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
2493 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
2494 char nalg
[CRYPTO_MAX_ALG_NAME
];
2496 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
2498 return -ENAMETOOLONG
;
2500 i
= alg_find_test(nalg
);
2504 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
2507 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
2511 i
= alg_find_test(alg
);
2512 j
= alg_find_test(driver
);
2516 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
2517 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
2522 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
2525 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
2529 if (fips_enabled
&& rc
)
2530 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
2532 if (fips_enabled
&& !rc
)
2533 printk(KERN_INFO
"alg: self-tests for %s (%s) passed\n",
2539 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
2545 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
2547 EXPORT_SYMBOL_GPL(alg_test
);