2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
27 * Need slab memory for testing (size in number of pages).
32 * Indexes into the xbuf to simulate cross-page access.
44 * Used by test_cipher()
49 struct tcrypt_result
{
50 struct completion completion
;
54 struct aead_test_suite
{
56 struct aead_testvec
*vecs
;
61 struct cipher_test_suite
{
63 struct cipher_testvec
*vecs
;
68 struct comp_test_suite
{
70 struct comp_testvec
*vecs
;
75 struct pcomp_test_suite
{
77 struct pcomp_testvec
*vecs
;
82 struct hash_test_suite
{
83 struct hash_testvec
*vecs
;
87 struct alg_test_desc
{
89 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
93 struct aead_test_suite aead
;
94 struct cipher_test_suite cipher
;
95 struct comp_test_suite comp
;
96 struct pcomp_test_suite pcomp
;
97 struct hash_test_suite hash
;
101 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
103 static char *xbuf
[XBUFSIZE
];
104 static char *axbuf
[XBUFSIZE
];
106 static void hexdump(unsigned char *buf
, unsigned int len
)
108 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
113 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
115 struct tcrypt_result
*res
= req
->data
;
117 if (err
== -EINPROGRESS
)
121 complete(&res
->completion
);
124 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
127 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
128 unsigned int i
, j
, k
, temp
;
129 struct scatterlist sg
[8];
131 struct ahash_request
*req
;
132 struct tcrypt_result tresult
;
136 init_completion(&tresult
.completion
);
138 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
140 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
145 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
146 tcrypt_complete
, &tresult
);
148 for (i
= 0; i
< tcount
; i
++) {
149 memset(result
, 0, 64);
153 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
154 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
156 if (template[i
].ksize
) {
157 crypto_ahash_clear_flags(tfm
, ~0);
158 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
161 printk(KERN_ERR
"alg: hash: setkey failed on "
162 "test %d for %s: ret=%d\n", i
+ 1, algo
,
168 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
169 ret
= crypto_ahash_digest(req
);
175 ret
= wait_for_completion_interruptible(
176 &tresult
.completion
);
177 if (!ret
&& !(ret
= tresult
.err
)) {
178 INIT_COMPLETION(tresult
.completion
);
183 printk(KERN_ERR
"alg: hash: digest failed on test %d "
184 "for %s: ret=%d\n", i
+ 1, algo
, -ret
);
188 if (memcmp(result
, template[i
].digest
,
189 crypto_ahash_digestsize(tfm
))) {
190 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
192 hexdump(result
, crypto_ahash_digestsize(tfm
));
199 for (i
= 0; i
< tcount
; i
++) {
200 if (template[i
].np
) {
202 memset(result
, 0, 64);
205 sg_init_table(sg
, template[i
].np
);
206 for (k
= 0; k
< template[i
].np
; k
++) {
208 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
209 offset_in_page(IDX
[k
]),
210 template[i
].plaintext
+ temp
,
213 temp
+= template[i
].tap
[k
];
216 if (template[i
].ksize
) {
217 crypto_ahash_clear_flags(tfm
, ~0);
218 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
222 printk(KERN_ERR
"alg: hash: setkey "
223 "failed on chunking test %d "
224 "for %s: ret=%d\n", j
, algo
,
230 ahash_request_set_crypt(req
, sg
, result
,
232 ret
= crypto_ahash_digest(req
);
238 ret
= wait_for_completion_interruptible(
239 &tresult
.completion
);
240 if (!ret
&& !(ret
= tresult
.err
)) {
241 INIT_COMPLETION(tresult
.completion
);
246 printk(KERN_ERR
"alg: hash: digest failed "
247 "on chunking test %d for %s: "
248 "ret=%d\n", j
, algo
, -ret
);
252 if (memcmp(result
, template[i
].digest
,
253 crypto_ahash_digestsize(tfm
))) {
254 printk(KERN_ERR
"alg: hash: Chunking test %d "
255 "failed for %s\n", j
, algo
);
256 hexdump(result
, crypto_ahash_digestsize(tfm
));
266 ahash_request_free(req
);
271 static int test_aead(struct crypto_aead
*tfm
, int enc
,
272 struct aead_testvec
*template, unsigned int tcount
)
274 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
275 unsigned int i
, j
, k
, n
, temp
;
279 struct aead_request
*req
;
280 struct scatterlist sg
[8];
281 struct scatterlist asg
[8];
283 struct tcrypt_result result
;
284 unsigned int authsize
;
294 init_completion(&result
.completion
);
296 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
298 printk(KERN_ERR
"alg: aead: Failed to allocate request for "
304 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
305 tcrypt_complete
, &result
);
307 for (i
= 0, j
= 0; i
< tcount
; i
++) {
308 if (!template[i
].np
) {
311 /* some tepmplates have no input data but they will
317 memcpy(input
, template[i
].input
, template[i
].ilen
);
318 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
320 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
322 memset(iv
, 0, MAX_IVLEN
);
324 crypto_aead_clear_flags(tfm
, ~0);
326 crypto_aead_set_flags(
327 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
329 key
= template[i
].key
;
331 ret
= crypto_aead_setkey(tfm
, key
,
333 if (!ret
== template[i
].fail
) {
334 printk(KERN_ERR
"alg: aead: setkey failed on "
335 "test %d for %s: flags=%x\n", j
, algo
,
336 crypto_aead_get_flags(tfm
));
341 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
342 ret
= crypto_aead_setauthsize(tfm
, authsize
);
344 printk(KERN_ERR
"alg: aead: Failed to set "
345 "authsize to %u on test %d for %s\n",
350 sg_init_one(&sg
[0], input
,
351 template[i
].ilen
+ (enc
? authsize
: 0));
353 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
355 aead_request_set_crypt(req
, sg
, sg
,
356 template[i
].ilen
, iv
);
358 aead_request_set_assoc(req
, asg
, template[i
].alen
);
361 crypto_aead_encrypt(req
) :
362 crypto_aead_decrypt(req
);
369 ret
= wait_for_completion_interruptible(
371 if (!ret
&& !(ret
= result
.err
)) {
372 INIT_COMPLETION(result
.completion
);
377 printk(KERN_ERR
"alg: aead: %s failed on test "
378 "%d for %s: ret=%d\n", e
, j
, algo
, -ret
);
383 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
384 printk(KERN_ERR
"alg: aead: Test %d failed on "
385 "%s for %s\n", j
, e
, algo
);
386 hexdump(q
, template[i
].rlen
);
393 for (i
= 0, j
= 0; i
< tcount
; i
++) {
394 if (template[i
].np
) {
398 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
400 memset(iv
, 0, MAX_IVLEN
);
402 crypto_aead_clear_flags(tfm
, ~0);
404 crypto_aead_set_flags(
405 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
406 key
= template[i
].key
;
408 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
409 if (!ret
== template[i
].fail
) {
410 printk(KERN_ERR
"alg: aead: setkey failed on "
411 "chunk test %d for %s: flags=%x\n", j
,
412 algo
, crypto_aead_get_flags(tfm
));
417 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
420 sg_init_table(sg
, template[i
].np
);
421 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
422 if (WARN_ON(offset_in_page(IDX
[k
]) +
423 template[i
].tap
[k
] > PAGE_SIZE
))
426 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
427 offset_in_page(IDX
[k
]);
429 memcpy(q
, template[i
].input
+ temp
,
432 n
= template[i
].tap
[k
];
433 if (k
== template[i
].np
- 1 && enc
)
435 if (offset_in_page(q
) + n
< PAGE_SIZE
)
438 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
439 temp
+= template[i
].tap
[k
];
442 ret
= crypto_aead_setauthsize(tfm
, authsize
);
444 printk(KERN_ERR
"alg: aead: Failed to set "
445 "authsize to %u on chunk test %d for "
446 "%s\n", authsize
, j
, algo
);
451 if (WARN_ON(sg
[k
- 1].offset
+
452 sg
[k
- 1].length
+ authsize
>
458 sg
[k
- 1].length
+= authsize
;
461 sg_init_table(asg
, template[i
].anp
);
462 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
464 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
465 offset_in_page(IDX
[k
]),
466 template[i
].assoc
+ temp
,
467 template[i
].atap
[k
]),
468 template[i
].atap
[k
]);
469 temp
+= template[i
].atap
[k
];
472 aead_request_set_crypt(req
, sg
, sg
,
476 aead_request_set_assoc(req
, asg
, template[i
].alen
);
479 crypto_aead_encrypt(req
) :
480 crypto_aead_decrypt(req
);
487 ret
= wait_for_completion_interruptible(
489 if (!ret
&& !(ret
= result
.err
)) {
490 INIT_COMPLETION(result
.completion
);
495 printk(KERN_ERR
"alg: aead: %s failed on "
496 "chunk test %d for %s: ret=%d\n", e
, j
,
502 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
503 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
504 offset_in_page(IDX
[k
]);
506 n
= template[i
].tap
[k
];
507 if (k
== template[i
].np
- 1)
508 n
+= enc
? authsize
: -authsize
;
510 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
511 printk(KERN_ERR
"alg: aead: Chunk "
512 "test %d failed on %s at page "
513 "%u for %s\n", j
, e
, k
, algo
);
519 if (k
== template[i
].np
- 1 && !enc
) {
520 if (memcmp(q
, template[i
].input
+
526 for (n
= 0; offset_in_page(q
+ n
) &&
531 printk(KERN_ERR
"alg: aead: Result "
532 "buffer corruption in chunk "
533 "test %d on %s at page %u for "
534 "%s: %u bytes:\n", j
, e
, k
,
540 temp
+= template[i
].tap
[k
];
548 aead_request_free(req
);
552 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
553 struct cipher_testvec
*template, unsigned int tcount
)
555 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
556 unsigned int i
, j
, k
;
568 for (i
= 0; i
< tcount
; i
++) {
575 memcpy(data
, template[i
].input
, template[i
].ilen
);
577 crypto_cipher_clear_flags(tfm
, ~0);
579 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
581 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
583 if (!ret
== template[i
].fail
) {
584 printk(KERN_ERR
"alg: cipher: setkey failed "
585 "on test %d for %s: flags=%x\n", j
,
586 algo
, crypto_cipher_get_flags(tfm
));
591 for (k
= 0; k
< template[i
].ilen
;
592 k
+= crypto_cipher_blocksize(tfm
)) {
594 crypto_cipher_encrypt_one(tfm
, data
+ k
,
597 crypto_cipher_decrypt_one(tfm
, data
+ k
,
602 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
603 printk(KERN_ERR
"alg: cipher: Test %d failed "
604 "on %s for %s\n", j
, e
, algo
);
605 hexdump(q
, template[i
].rlen
);
617 static int test_skcipher(struct crypto_ablkcipher
*tfm
, int enc
,
618 struct cipher_testvec
*template, unsigned int tcount
)
621 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
622 unsigned int i
, j
, k
, n
, temp
;
625 struct ablkcipher_request
*req
;
626 struct scatterlist sg
[8];
628 struct tcrypt_result result
;
637 init_completion(&result
.completion
);
639 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
641 printk(KERN_ERR
"alg: skcipher: Failed to allocate request "
647 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
648 tcrypt_complete
, &result
);
651 for (i
= 0; i
< tcount
; i
++) {
653 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
655 memset(iv
, 0, MAX_IVLEN
);
657 if (!(template[i
].np
)) {
661 memcpy(data
, template[i
].input
, template[i
].ilen
);
663 crypto_ablkcipher_clear_flags(tfm
, ~0);
665 crypto_ablkcipher_set_flags(
666 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
668 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
670 if (!ret
== template[i
].fail
) {
671 printk(KERN_ERR
"alg: skcipher: setkey failed "
672 "on test %d for %s: flags=%x\n", j
,
673 algo
, crypto_ablkcipher_get_flags(tfm
));
678 sg_init_one(&sg
[0], data
, template[i
].ilen
);
680 ablkcipher_request_set_crypt(req
, sg
, sg
,
681 template[i
].ilen
, iv
);
683 crypto_ablkcipher_encrypt(req
) :
684 crypto_ablkcipher_decrypt(req
);
691 ret
= wait_for_completion_interruptible(
693 if (!ret
&& !((ret
= result
.err
))) {
694 INIT_COMPLETION(result
.completion
);
699 printk(KERN_ERR
"alg: skcipher: %s failed on "
700 "test %d for %s: ret=%d\n", e
, j
, algo
,
706 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
707 printk(KERN_ERR
"alg: skcipher: Test %d "
708 "failed on %s for %s\n", j
, e
, algo
);
709 hexdump(q
, template[i
].rlen
);
717 for (i
= 0; i
< tcount
; i
++) {
720 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
722 memset(iv
, 0, MAX_IVLEN
);
724 if (template[i
].np
) {
727 crypto_ablkcipher_clear_flags(tfm
, ~0);
729 crypto_ablkcipher_set_flags(
730 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
732 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
734 if (!ret
== template[i
].fail
) {
735 printk(KERN_ERR
"alg: skcipher: setkey failed "
736 "on chunk test %d for %s: flags=%x\n",
738 crypto_ablkcipher_get_flags(tfm
));
745 sg_init_table(sg
, template[i
].np
);
746 for (k
= 0; k
< template[i
].np
; k
++) {
747 if (WARN_ON(offset_in_page(IDX
[k
]) +
748 template[i
].tap
[k
] > PAGE_SIZE
))
751 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
752 offset_in_page(IDX
[k
]);
754 memcpy(q
, template[i
].input
+ temp
,
757 if (offset_in_page(q
) + template[i
].tap
[k
] <
759 q
[template[i
].tap
[k
]] = 0;
761 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
763 temp
+= template[i
].tap
[k
];
766 ablkcipher_request_set_crypt(req
, sg
, sg
,
767 template[i
].ilen
, iv
);
770 crypto_ablkcipher_encrypt(req
) :
771 crypto_ablkcipher_decrypt(req
);
778 ret
= wait_for_completion_interruptible(
780 if (!ret
&& !((ret
= result
.err
))) {
781 INIT_COMPLETION(result
.completion
);
786 printk(KERN_ERR
"alg: skcipher: %s failed on "
787 "chunk test %d for %s: ret=%d\n", e
, j
,
794 for (k
= 0; k
< template[i
].np
; k
++) {
795 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
796 offset_in_page(IDX
[k
]);
798 if (memcmp(q
, template[i
].result
+ temp
,
799 template[i
].tap
[k
])) {
800 printk(KERN_ERR
"alg: skcipher: Chunk "
801 "test %d failed on %s at page "
802 "%u for %s\n", j
, e
, k
, algo
);
803 hexdump(q
, template[i
].tap
[k
]);
807 q
+= template[i
].tap
[k
];
808 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
811 printk(KERN_ERR
"alg: skcipher: "
812 "Result buffer corruption in "
813 "chunk test %d on %s at page "
814 "%u for %s: %u bytes:\n", j
, e
,
819 temp
+= template[i
].tap
[k
];
827 ablkcipher_request_free(req
);
831 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
832 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
834 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
836 char result
[COMP_BUF_SIZE
];
839 for (i
= 0; i
< ctcount
; i
++) {
840 int ilen
, dlen
= COMP_BUF_SIZE
;
842 memset(result
, 0, sizeof (result
));
844 ilen
= ctemplate
[i
].inlen
;
845 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
846 ilen
, result
, &dlen
);
848 printk(KERN_ERR
"alg: comp: compression failed "
849 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
854 if (dlen
!= ctemplate
[i
].outlen
) {
855 printk(KERN_ERR
"alg: comp: Compression test %d "
856 "failed for %s: output len = %d\n", i
+ 1, algo
,
862 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
863 printk(KERN_ERR
"alg: comp: Compression test %d "
864 "failed for %s\n", i
+ 1, algo
);
865 hexdump(result
, dlen
);
871 for (i
= 0; i
< dtcount
; i
++) {
872 int ilen
, dlen
= COMP_BUF_SIZE
;
874 memset(result
, 0, sizeof (result
));
876 ilen
= dtemplate
[i
].inlen
;
877 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
878 ilen
, result
, &dlen
);
880 printk(KERN_ERR
"alg: comp: decompression failed "
881 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
886 if (dlen
!= dtemplate
[i
].outlen
) {
887 printk(KERN_ERR
"alg: comp: Decompression test %d "
888 "failed for %s: output len = %d\n", i
+ 1, algo
,
894 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
895 printk(KERN_ERR
"alg: comp: Decompression test %d "
896 "failed for %s\n", i
+ 1, algo
);
897 hexdump(result
, dlen
);
909 static int test_pcomp(struct crypto_pcomp
*tfm
,
910 struct pcomp_testvec
*ctemplate
,
911 struct pcomp_testvec
*dtemplate
, int ctcount
,
914 const char *algo
= crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm
));
916 char result
[COMP_BUF_SIZE
];
919 for (i
= 0; i
< ctcount
; i
++) {
920 struct comp_request req
;
922 error
= crypto_compress_setup(tfm
, ctemplate
[i
].params
,
923 ctemplate
[i
].paramsize
);
925 pr_err("alg: pcomp: compression setup failed on test "
926 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
930 error
= crypto_compress_init(tfm
);
932 pr_err("alg: pcomp: compression init failed on test "
933 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
937 memset(result
, 0, sizeof(result
));
939 req
.next_in
= ctemplate
[i
].input
;
940 req
.avail_in
= ctemplate
[i
].inlen
/ 2;
941 req
.next_out
= result
;
942 req
.avail_out
= ctemplate
[i
].outlen
/ 2;
944 error
= crypto_compress_update(tfm
, &req
);
945 if (error
&& (error
!= -EAGAIN
|| req
.avail_in
)) {
946 pr_err("alg: pcomp: compression update failed on test "
947 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
951 /* Add remaining input data */
952 req
.avail_in
+= (ctemplate
[i
].inlen
+ 1) / 2;
954 error
= crypto_compress_update(tfm
, &req
);
955 if (error
&& (error
!= -EAGAIN
|| req
.avail_in
)) {
956 pr_err("alg: pcomp: compression update failed on test "
957 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
961 /* Provide remaining output space */
962 req
.avail_out
+= COMP_BUF_SIZE
- ctemplate
[i
].outlen
/ 2;
964 error
= crypto_compress_final(tfm
, &req
);
966 pr_err("alg: pcomp: compression final failed on test "
967 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
971 if (COMP_BUF_SIZE
- req
.avail_out
!= ctemplate
[i
].outlen
) {
972 pr_err("alg: comp: Compression test %d failed for %s: "
973 "output len = %d (expected %d)\n", i
+ 1, algo
,
974 COMP_BUF_SIZE
- req
.avail_out
,
975 ctemplate
[i
].outlen
);
979 if (memcmp(result
, ctemplate
[i
].output
, ctemplate
[i
].outlen
)) {
980 pr_err("alg: pcomp: Compression test %d failed for "
981 "%s\n", i
+ 1, algo
);
982 hexdump(result
, ctemplate
[i
].outlen
);
987 for (i
= 0; i
< dtcount
; i
++) {
988 struct comp_request req
;
990 error
= crypto_decompress_setup(tfm
, dtemplate
[i
].params
,
991 dtemplate
[i
].paramsize
);
993 pr_err("alg: pcomp: decompression setup failed on "
994 "test %d for %s: error=%d\n", i
+ 1, algo
,
999 error
= crypto_decompress_init(tfm
);
1001 pr_err("alg: pcomp: decompression init failed on test "
1002 "%d for %s: error=%d\n", i
+ 1, algo
, error
);
1006 memset(result
, 0, sizeof(result
));
1008 req
.next_in
= dtemplate
[i
].input
;
1009 req
.avail_in
= dtemplate
[i
].inlen
/ 2;
1010 req
.next_out
= result
;
1011 req
.avail_out
= dtemplate
[i
].outlen
/ 2;
1013 error
= crypto_decompress_update(tfm
, &req
);
1014 if (error
&& (error
!= -EAGAIN
|| req
.avail_in
)) {
1015 pr_err("alg: pcomp: decompression update failed on "
1016 "test %d for %s: error=%d\n", i
+ 1, algo
,
1021 /* Add remaining input data */
1022 req
.avail_in
+= (dtemplate
[i
].inlen
+ 1) / 2;
1024 error
= crypto_decompress_update(tfm
, &req
);
1025 if (error
&& (error
!= -EAGAIN
|| req
.avail_in
)) {
1026 pr_err("alg: pcomp: decompression update failed on "
1027 "test %d for %s: error=%d\n", i
+ 1, algo
,
1032 /* Provide remaining output space */
1033 req
.avail_out
+= COMP_BUF_SIZE
- dtemplate
[i
].outlen
/ 2;
1035 error
= crypto_decompress_final(tfm
, &req
);
1036 if (error
&& (error
!= -EAGAIN
|| req
.avail_in
)) {
1037 pr_err("alg: pcomp: decompression final failed on "
1038 "test %d for %s: error=%d\n", i
+ 1, algo
,
1043 if (COMP_BUF_SIZE
- req
.avail_out
!= dtemplate
[i
].outlen
) {
1044 pr_err("alg: comp: Decompression test %d failed for "
1045 "%s: output len = %d (expected %d)\n", i
+ 1,
1046 algo
, COMP_BUF_SIZE
- req
.avail_out
,
1047 dtemplate
[i
].outlen
);
1051 if (memcmp(result
, dtemplate
[i
].output
, dtemplate
[i
].outlen
)) {
1052 pr_err("alg: pcomp: Decompression test %d failed for "
1053 "%s\n", i
+ 1, algo
);
1054 hexdump(result
, dtemplate
[i
].outlen
);
1062 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1065 struct crypto_aead
*tfm
;
1068 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1070 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1071 "%ld\n", driver
, PTR_ERR(tfm
));
1072 return PTR_ERR(tfm
);
1075 if (desc
->suite
.aead
.enc
.vecs
) {
1076 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1077 desc
->suite
.aead
.enc
.count
);
1082 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1083 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1084 desc
->suite
.aead
.dec
.count
);
1087 crypto_free_aead(tfm
);
1091 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1092 const char *driver
, u32 type
, u32 mask
)
1094 struct crypto_cipher
*tfm
;
1097 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
1099 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1100 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1101 return PTR_ERR(tfm
);
1104 if (desc
->suite
.cipher
.enc
.vecs
) {
1105 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1106 desc
->suite
.cipher
.enc
.count
);
1111 if (desc
->suite
.cipher
.dec
.vecs
)
1112 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1113 desc
->suite
.cipher
.dec
.count
);
1116 crypto_free_cipher(tfm
);
1120 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
1121 const char *driver
, u32 type
, u32 mask
)
1123 struct crypto_ablkcipher
*tfm
;
1126 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1128 printk(KERN_ERR
"alg: skcipher: Failed to load transform for "
1129 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1130 return PTR_ERR(tfm
);
1133 if (desc
->suite
.cipher
.enc
.vecs
) {
1134 err
= test_skcipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1135 desc
->suite
.cipher
.enc
.count
);
1140 if (desc
->suite
.cipher
.dec
.vecs
)
1141 err
= test_skcipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1142 desc
->suite
.cipher
.dec
.count
);
1145 crypto_free_ablkcipher(tfm
);
1149 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1152 struct crypto_comp
*tfm
;
1155 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1157 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1158 "%ld\n", driver
, PTR_ERR(tfm
));
1159 return PTR_ERR(tfm
);
1162 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1163 desc
->suite
.comp
.decomp
.vecs
,
1164 desc
->suite
.comp
.comp
.count
,
1165 desc
->suite
.comp
.decomp
.count
);
1167 crypto_free_comp(tfm
);
1171 static int alg_test_pcomp(const struct alg_test_desc
*desc
, const char *driver
,
1174 struct crypto_pcomp
*tfm
;
1177 tfm
= crypto_alloc_pcomp(driver
, type
, mask
);
1179 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1180 driver
, PTR_ERR(tfm
));
1181 return PTR_ERR(tfm
);
1184 err
= test_pcomp(tfm
, desc
->suite
.pcomp
.comp
.vecs
,
1185 desc
->suite
.pcomp
.decomp
.vecs
,
1186 desc
->suite
.pcomp
.comp
.count
,
1187 desc
->suite
.pcomp
.decomp
.count
);
1189 crypto_free_pcomp(tfm
);
1193 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1196 struct crypto_ahash
*tfm
;
1199 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1201 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1202 "%ld\n", driver
, PTR_ERR(tfm
));
1203 return PTR_ERR(tfm
);
1206 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
, desc
->suite
.hash
.count
);
1208 crypto_free_ahash(tfm
);
1212 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
1213 const char *driver
, u32 type
, u32 mask
)
1215 struct crypto_shash
*tfm
;
1219 err
= alg_test_hash(desc
, driver
, type
, mask
);
1223 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1225 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
1226 "%ld\n", driver
, PTR_ERR(tfm
));
1233 struct shash_desc shash
;
1234 char ctx
[crypto_shash_descsize(tfm
)];
1237 sdesc
.shash
.tfm
= tfm
;
1238 sdesc
.shash
.flags
= 0;
1240 *(u32
*)sdesc
.ctx
= le32_to_cpu(420553207);
1241 err
= crypto_shash_final(&sdesc
.shash
, (u8
*)&val
);
1243 printk(KERN_ERR
"alg: crc32c: Operation failed for "
1244 "%s: %d\n", driver
, err
);
1248 if (val
!= ~420553207) {
1249 printk(KERN_ERR
"alg: crc32c: Test failed for %s: "
1250 "%d\n", driver
, val
);
1255 crypto_free_shash(tfm
);
1261 /* Please keep this list sorted by algorithm name. */
1262 static const struct alg_test_desc alg_test_descs
[] = {
1265 .test
= alg_test_skcipher
,
1269 .vecs
= aes_cbc_enc_tv_template
,
1270 .count
= AES_CBC_ENC_TEST_VECTORS
1273 .vecs
= aes_cbc_dec_tv_template
,
1274 .count
= AES_CBC_DEC_TEST_VECTORS
1279 .alg
= "cbc(anubis)",
1280 .test
= alg_test_skcipher
,
1284 .vecs
= anubis_cbc_enc_tv_template
,
1285 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1288 .vecs
= anubis_cbc_dec_tv_template
,
1289 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1294 .alg
= "cbc(blowfish)",
1295 .test
= alg_test_skcipher
,
1299 .vecs
= bf_cbc_enc_tv_template
,
1300 .count
= BF_CBC_ENC_TEST_VECTORS
1303 .vecs
= bf_cbc_dec_tv_template
,
1304 .count
= BF_CBC_DEC_TEST_VECTORS
1309 .alg
= "cbc(camellia)",
1310 .test
= alg_test_skcipher
,
1314 .vecs
= camellia_cbc_enc_tv_template
,
1315 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
1318 .vecs
= camellia_cbc_dec_tv_template
,
1319 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
1325 .test
= alg_test_skcipher
,
1329 .vecs
= des_cbc_enc_tv_template
,
1330 .count
= DES_CBC_ENC_TEST_VECTORS
1333 .vecs
= des_cbc_dec_tv_template
,
1334 .count
= DES_CBC_DEC_TEST_VECTORS
1339 .alg
= "cbc(des3_ede)",
1340 .test
= alg_test_skcipher
,
1344 .vecs
= des3_ede_cbc_enc_tv_template
,
1345 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
1348 .vecs
= des3_ede_cbc_dec_tv_template
,
1349 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
1354 .alg
= "cbc(twofish)",
1355 .test
= alg_test_skcipher
,
1359 .vecs
= tf_cbc_enc_tv_template
,
1360 .count
= TF_CBC_ENC_TEST_VECTORS
1363 .vecs
= tf_cbc_dec_tv_template
,
1364 .count
= TF_CBC_DEC_TEST_VECTORS
1370 .test
= alg_test_aead
,
1374 .vecs
= aes_ccm_enc_tv_template
,
1375 .count
= AES_CCM_ENC_TEST_VECTORS
1378 .vecs
= aes_ccm_dec_tv_template
,
1379 .count
= AES_CCM_DEC_TEST_VECTORS
1385 .test
= alg_test_crc32c
,
1388 .vecs
= crc32c_tv_template
,
1389 .count
= CRC32C_TEST_VECTORS
1393 .alg
= "cts(cbc(aes))",
1394 .test
= alg_test_skcipher
,
1398 .vecs
= cts_mode_enc_tv_template
,
1399 .count
= CTS_MODE_ENC_TEST_VECTORS
1402 .vecs
= cts_mode_dec_tv_template
,
1403 .count
= CTS_MODE_DEC_TEST_VECTORS
1409 .test
= alg_test_comp
,
1413 .vecs
= deflate_comp_tv_template
,
1414 .count
= DEFLATE_COMP_TEST_VECTORS
1417 .vecs
= deflate_decomp_tv_template
,
1418 .count
= DEFLATE_DECOMP_TEST_VECTORS
1424 .test
= alg_test_skcipher
,
1428 .vecs
= aes_enc_tv_template
,
1429 .count
= AES_ENC_TEST_VECTORS
1432 .vecs
= aes_dec_tv_template
,
1433 .count
= AES_DEC_TEST_VECTORS
1438 .alg
= "ecb(anubis)",
1439 .test
= alg_test_skcipher
,
1443 .vecs
= anubis_enc_tv_template
,
1444 .count
= ANUBIS_ENC_TEST_VECTORS
1447 .vecs
= anubis_dec_tv_template
,
1448 .count
= ANUBIS_DEC_TEST_VECTORS
1454 .test
= alg_test_skcipher
,
1458 .vecs
= arc4_enc_tv_template
,
1459 .count
= ARC4_ENC_TEST_VECTORS
1462 .vecs
= arc4_dec_tv_template
,
1463 .count
= ARC4_DEC_TEST_VECTORS
1468 .alg
= "ecb(blowfish)",
1469 .test
= alg_test_skcipher
,
1473 .vecs
= bf_enc_tv_template
,
1474 .count
= BF_ENC_TEST_VECTORS
1477 .vecs
= bf_dec_tv_template
,
1478 .count
= BF_DEC_TEST_VECTORS
1483 .alg
= "ecb(camellia)",
1484 .test
= alg_test_skcipher
,
1488 .vecs
= camellia_enc_tv_template
,
1489 .count
= CAMELLIA_ENC_TEST_VECTORS
1492 .vecs
= camellia_dec_tv_template
,
1493 .count
= CAMELLIA_DEC_TEST_VECTORS
1498 .alg
= "ecb(cast5)",
1499 .test
= alg_test_skcipher
,
1503 .vecs
= cast5_enc_tv_template
,
1504 .count
= CAST5_ENC_TEST_VECTORS
1507 .vecs
= cast5_dec_tv_template
,
1508 .count
= CAST5_DEC_TEST_VECTORS
1513 .alg
= "ecb(cast6)",
1514 .test
= alg_test_skcipher
,
1518 .vecs
= cast6_enc_tv_template
,
1519 .count
= CAST6_ENC_TEST_VECTORS
1522 .vecs
= cast6_dec_tv_template
,
1523 .count
= CAST6_DEC_TEST_VECTORS
1529 .test
= alg_test_skcipher
,
1533 .vecs
= des_enc_tv_template
,
1534 .count
= DES_ENC_TEST_VECTORS
1537 .vecs
= des_dec_tv_template
,
1538 .count
= DES_DEC_TEST_VECTORS
1543 .alg
= "ecb(des3_ede)",
1544 .test
= alg_test_skcipher
,
1548 .vecs
= des3_ede_enc_tv_template
,
1549 .count
= DES3_EDE_ENC_TEST_VECTORS
1552 .vecs
= des3_ede_dec_tv_template
,
1553 .count
= DES3_EDE_DEC_TEST_VECTORS
1558 .alg
= "ecb(khazad)",
1559 .test
= alg_test_skcipher
,
1563 .vecs
= khazad_enc_tv_template
,
1564 .count
= KHAZAD_ENC_TEST_VECTORS
1567 .vecs
= khazad_dec_tv_template
,
1568 .count
= KHAZAD_DEC_TEST_VECTORS
1574 .test
= alg_test_skcipher
,
1578 .vecs
= seed_enc_tv_template
,
1579 .count
= SEED_ENC_TEST_VECTORS
1582 .vecs
= seed_dec_tv_template
,
1583 .count
= SEED_DEC_TEST_VECTORS
1588 .alg
= "ecb(serpent)",
1589 .test
= alg_test_skcipher
,
1593 .vecs
= serpent_enc_tv_template
,
1594 .count
= SERPENT_ENC_TEST_VECTORS
1597 .vecs
= serpent_dec_tv_template
,
1598 .count
= SERPENT_DEC_TEST_VECTORS
1604 .test
= alg_test_skcipher
,
1608 .vecs
= tea_enc_tv_template
,
1609 .count
= TEA_ENC_TEST_VECTORS
1612 .vecs
= tea_dec_tv_template
,
1613 .count
= TEA_DEC_TEST_VECTORS
1618 .alg
= "ecb(tnepres)",
1619 .test
= alg_test_skcipher
,
1623 .vecs
= tnepres_enc_tv_template
,
1624 .count
= TNEPRES_ENC_TEST_VECTORS
1627 .vecs
= tnepres_dec_tv_template
,
1628 .count
= TNEPRES_DEC_TEST_VECTORS
1633 .alg
= "ecb(twofish)",
1634 .test
= alg_test_skcipher
,
1638 .vecs
= tf_enc_tv_template
,
1639 .count
= TF_ENC_TEST_VECTORS
1642 .vecs
= tf_dec_tv_template
,
1643 .count
= TF_DEC_TEST_VECTORS
1649 .test
= alg_test_skcipher
,
1653 .vecs
= xeta_enc_tv_template
,
1654 .count
= XETA_ENC_TEST_VECTORS
1657 .vecs
= xeta_dec_tv_template
,
1658 .count
= XETA_DEC_TEST_VECTORS
1664 .test
= alg_test_skcipher
,
1668 .vecs
= xtea_enc_tv_template
,
1669 .count
= XTEA_ENC_TEST_VECTORS
1672 .vecs
= xtea_dec_tv_template
,
1673 .count
= XTEA_DEC_TEST_VECTORS
1679 .test
= alg_test_aead
,
1683 .vecs
= aes_gcm_enc_tv_template
,
1684 .count
= AES_GCM_ENC_TEST_VECTORS
1687 .vecs
= aes_gcm_dec_tv_template
,
1688 .count
= AES_GCM_DEC_TEST_VECTORS
1694 .test
= alg_test_hash
,
1697 .vecs
= hmac_md5_tv_template
,
1698 .count
= HMAC_MD5_TEST_VECTORS
1702 .alg
= "hmac(rmd128)",
1703 .test
= alg_test_hash
,
1706 .vecs
= hmac_rmd128_tv_template
,
1707 .count
= HMAC_RMD128_TEST_VECTORS
1711 .alg
= "hmac(rmd160)",
1712 .test
= alg_test_hash
,
1715 .vecs
= hmac_rmd160_tv_template
,
1716 .count
= HMAC_RMD160_TEST_VECTORS
1720 .alg
= "hmac(sha1)",
1721 .test
= alg_test_hash
,
1724 .vecs
= hmac_sha1_tv_template
,
1725 .count
= HMAC_SHA1_TEST_VECTORS
1729 .alg
= "hmac(sha224)",
1730 .test
= alg_test_hash
,
1733 .vecs
= hmac_sha224_tv_template
,
1734 .count
= HMAC_SHA224_TEST_VECTORS
1738 .alg
= "hmac(sha256)",
1739 .test
= alg_test_hash
,
1742 .vecs
= hmac_sha256_tv_template
,
1743 .count
= HMAC_SHA256_TEST_VECTORS
1747 .alg
= "hmac(sha384)",
1748 .test
= alg_test_hash
,
1751 .vecs
= hmac_sha384_tv_template
,
1752 .count
= HMAC_SHA384_TEST_VECTORS
1756 .alg
= "hmac(sha512)",
1757 .test
= alg_test_hash
,
1760 .vecs
= hmac_sha512_tv_template
,
1761 .count
= HMAC_SHA512_TEST_VECTORS
1766 .test
= alg_test_skcipher
,
1770 .vecs
= aes_lrw_enc_tv_template
,
1771 .count
= AES_LRW_ENC_TEST_VECTORS
1774 .vecs
= aes_lrw_dec_tv_template
,
1775 .count
= AES_LRW_DEC_TEST_VECTORS
1781 .test
= alg_test_comp
,
1785 .vecs
= lzo_comp_tv_template
,
1786 .count
= LZO_COMP_TEST_VECTORS
1789 .vecs
= lzo_decomp_tv_template
,
1790 .count
= LZO_DECOMP_TEST_VECTORS
1796 .test
= alg_test_hash
,
1799 .vecs
= md4_tv_template
,
1800 .count
= MD4_TEST_VECTORS
1805 .test
= alg_test_hash
,
1808 .vecs
= md5_tv_template
,
1809 .count
= MD5_TEST_VECTORS
1813 .alg
= "michael_mic",
1814 .test
= alg_test_hash
,
1817 .vecs
= michael_mic_tv_template
,
1818 .count
= MICHAEL_MIC_TEST_VECTORS
1822 .alg
= "pcbc(fcrypt)",
1823 .test
= alg_test_skcipher
,
1827 .vecs
= fcrypt_pcbc_enc_tv_template
,
1828 .count
= FCRYPT_ENC_TEST_VECTORS
1831 .vecs
= fcrypt_pcbc_dec_tv_template
,
1832 .count
= FCRYPT_DEC_TEST_VECTORS
1837 .alg
= "rfc3686(ctr(aes))",
1838 .test
= alg_test_skcipher
,
1842 .vecs
= aes_ctr_enc_tv_template
,
1843 .count
= AES_CTR_ENC_TEST_VECTORS
1846 .vecs
= aes_ctr_dec_tv_template
,
1847 .count
= AES_CTR_DEC_TEST_VECTORS
1853 .test
= alg_test_hash
,
1856 .vecs
= rmd128_tv_template
,
1857 .count
= RMD128_TEST_VECTORS
1862 .test
= alg_test_hash
,
1865 .vecs
= rmd160_tv_template
,
1866 .count
= RMD160_TEST_VECTORS
1871 .test
= alg_test_hash
,
1874 .vecs
= rmd256_tv_template
,
1875 .count
= RMD256_TEST_VECTORS
1880 .test
= alg_test_hash
,
1883 .vecs
= rmd320_tv_template
,
1884 .count
= RMD320_TEST_VECTORS
1889 .test
= alg_test_skcipher
,
1893 .vecs
= salsa20_stream_enc_tv_template
,
1894 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
1900 .test
= alg_test_hash
,
1903 .vecs
= sha1_tv_template
,
1904 .count
= SHA1_TEST_VECTORS
1909 .test
= alg_test_hash
,
1912 .vecs
= sha224_tv_template
,
1913 .count
= SHA224_TEST_VECTORS
1918 .test
= alg_test_hash
,
1921 .vecs
= sha256_tv_template
,
1922 .count
= SHA256_TEST_VECTORS
1927 .test
= alg_test_hash
,
1930 .vecs
= sha384_tv_template
,
1931 .count
= SHA384_TEST_VECTORS
1936 .test
= alg_test_hash
,
1939 .vecs
= sha512_tv_template
,
1940 .count
= SHA512_TEST_VECTORS
1945 .test
= alg_test_hash
,
1948 .vecs
= tgr128_tv_template
,
1949 .count
= TGR128_TEST_VECTORS
1954 .test
= alg_test_hash
,
1957 .vecs
= tgr160_tv_template
,
1958 .count
= TGR160_TEST_VECTORS
1963 .test
= alg_test_hash
,
1966 .vecs
= tgr192_tv_template
,
1967 .count
= TGR192_TEST_VECTORS
1972 .test
= alg_test_hash
,
1975 .vecs
= wp256_tv_template
,
1976 .count
= WP256_TEST_VECTORS
1981 .test
= alg_test_hash
,
1984 .vecs
= wp384_tv_template
,
1985 .count
= WP384_TEST_VECTORS
1990 .test
= alg_test_hash
,
1993 .vecs
= wp512_tv_template
,
1994 .count
= WP512_TEST_VECTORS
1999 .test
= alg_test_hash
,
2002 .vecs
= aes_xcbc128_tv_template
,
2003 .count
= XCBC_AES_TEST_VECTORS
2008 .test
= alg_test_skcipher
,
2012 .vecs
= aes_xts_enc_tv_template
,
2013 .count
= AES_XTS_ENC_TEST_VECTORS
2016 .vecs
= aes_xts_dec_tv_template
,
2017 .count
= AES_XTS_DEC_TEST_VECTORS
2023 .test
= alg_test_pcomp
,
2027 .vecs
= zlib_comp_tv_template
,
2028 .count
= ZLIB_COMP_TEST_VECTORS
2031 .vecs
= zlib_decomp_tv_template
,
2032 .count
= ZLIB_DECOMP_TEST_VECTORS
2039 static int alg_find_test(const char *alg
)
2042 int end
= ARRAY_SIZE(alg_test_descs
);
2044 while (start
< end
) {
2045 int i
= (start
+ end
) / 2;
2046 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
2064 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
2069 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
2070 char nalg
[CRYPTO_MAX_ALG_NAME
];
2072 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
2074 return -ENAMETOOLONG
;
2076 i
= alg_find_test(nalg
);
2080 return alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
2083 i
= alg_find_test(alg
);
2087 rc
= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
2089 if (fips_enabled
&& rc
)
2090 panic("%s: %s alg self test failed in fips mode!\n", driver
, alg
);
2095 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
2098 EXPORT_SYMBOL_GPL(alg_test
);
2100 int __init
testmgr_init(void)
2104 for (i
= 0; i
< XBUFSIZE
; i
++) {
2105 xbuf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
2110 for (i
= 0; i
< XBUFSIZE
; i
++) {
2111 axbuf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
2113 goto err_free_axbuf
;
2119 for (i
= 0; i
< XBUFSIZE
&& axbuf
[i
]; i
++)
2120 free_page((unsigned long)axbuf
[i
]);
2122 for (i
= 0; i
< XBUFSIZE
&& xbuf
[i
]; i
++)
2123 free_page((unsigned long)xbuf
[i
]);
2128 void testmgr_exit(void)
2132 for (i
= 0; i
< XBUFSIZE
; i
++)
2133 free_page((unsigned long)axbuf
[i
]);
2134 for (i
= 0; i
< XBUFSIZE
; i
++)
2135 free_page((unsigned long)xbuf
[i
]);