2 * Quick & dirty crypto testing module.
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <crypto/hash.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
23 #include <linux/slab.h>
24 #include <linux/scatterlist.h>
25 #include <linux/string.h>
26 #include <linux/crypto.h>
27 #include <linux/moduleparam.h>
28 #include <linux/jiffies.h>
29 #include <linux/timex.h>
30 #include <linux/interrupt.h>
34 * Need slab memory for testing (size in number of pages).
40 * Indexes into the xbuf to simulate cross-page access.
52 * Used by test_cipher()
57 struct tcrypt_result
{
58 struct completion completion
;
62 struct aead_test_suite
{
64 struct aead_testvec
*vecs
;
69 struct cipher_test_suite
{
71 struct cipher_testvec
*vecs
;
76 struct comp_test_suite
{
78 struct comp_testvec
*vecs
;
83 struct hash_test_suite
{
84 struct hash_testvec
*vecs
;
88 struct alg_test_desc
{
90 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
94 struct aead_test_suite aead
;
95 struct cipher_test_suite cipher
;
96 struct comp_test_suite comp
;
97 struct hash_test_suite hash
;
101 static unsigned int IDX
[8] = { IDX1
, IDX2
, IDX3
, IDX4
, IDX5
, IDX6
, IDX7
, IDX8
};
104 * Used by test_cipher_speed()
106 static unsigned int sec
;
109 static char *xbuf
[XBUFSIZE
];
110 static char *axbuf
[XBUFSIZE
];
111 static char *tvmem
[TVMEMSIZE
];
113 static char *check
[] = {
114 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
115 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
116 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
117 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
118 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
122 static void hexdump(unsigned char *buf
, unsigned int len
)
124 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
129 static void tcrypt_complete(struct crypto_async_request
*req
, int err
)
131 struct tcrypt_result
*res
= req
->data
;
133 if (err
== -EINPROGRESS
)
137 complete(&res
->completion
);
140 static int test_hash(struct crypto_ahash
*tfm
, struct hash_testvec
*template,
143 const char *algo
= crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm
));
144 unsigned int i
, j
, k
, temp
;
145 struct scatterlist sg
[8];
147 struct ahash_request
*req
;
148 struct tcrypt_result tresult
;
152 init_completion(&tresult
.completion
);
154 req
= ahash_request_alloc(tfm
, GFP_KERNEL
);
156 printk(KERN_ERR
"alg: hash: Failed to allocate request for "
161 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
162 tcrypt_complete
, &tresult
);
164 for (i
= 0; i
< tcount
; i
++) {
165 memset(result
, 0, 64);
169 memcpy(hash_buff
, template[i
].plaintext
, template[i
].psize
);
170 sg_init_one(&sg
[0], hash_buff
, template[i
].psize
);
172 if (template[i
].ksize
) {
173 crypto_ahash_clear_flags(tfm
, ~0);
174 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
177 printk(KERN_ERR
"alg: hash: setkey failed on "
178 "test %d for %s: ret=%d\n", i
+ 1, algo
,
184 ahash_request_set_crypt(req
, sg
, result
, template[i
].psize
);
185 ret
= crypto_ahash_digest(req
);
191 ret
= wait_for_completion_interruptible(
192 &tresult
.completion
);
193 if (!ret
&& !(ret
= tresult
.err
)) {
194 INIT_COMPLETION(tresult
.completion
);
199 printk(KERN_ERR
"alg: hash: digest failed on test %d "
200 "for %s: ret=%d\n", i
+ 1, algo
, -ret
);
204 if (memcmp(result
, template[i
].digest
,
205 crypto_ahash_digestsize(tfm
))) {
206 printk(KERN_ERR
"alg: hash: Test %d failed for %s\n",
208 hexdump(result
, crypto_ahash_digestsize(tfm
));
215 for (i
= 0; i
< tcount
; i
++) {
216 if (template[i
].np
) {
218 memset(result
, 0, 64);
221 sg_init_table(sg
, template[i
].np
);
222 for (k
= 0; k
< template[i
].np
; k
++) {
224 memcpy(xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
225 offset_in_page(IDX
[k
]),
226 template[i
].plaintext
+ temp
,
229 temp
+= template[i
].tap
[k
];
232 if (template[i
].ksize
) {
233 crypto_ahash_clear_flags(tfm
, ~0);
234 ret
= crypto_ahash_setkey(tfm
, template[i
].key
,
238 printk(KERN_ERR
"alg: hash: setkey "
239 "failed on chunking test %d "
240 "for %s: ret=%d\n", j
, algo
,
246 ahash_request_set_crypt(req
, sg
, result
,
248 ret
= crypto_ahash_digest(req
);
254 ret
= wait_for_completion_interruptible(
255 &tresult
.completion
);
256 if (!ret
&& !(ret
= tresult
.err
)) {
257 INIT_COMPLETION(tresult
.completion
);
262 printk(KERN_ERR
"alg: hash: digest failed "
263 "on chunking test %d for %s: "
264 "ret=%d\n", j
, algo
, -ret
);
268 if (memcmp(result
, template[i
].digest
,
269 crypto_ahash_digestsize(tfm
))) {
270 printk(KERN_ERR
"alg: hash: Chunking test %d "
271 "failed for %s\n", j
, algo
);
272 hexdump(result
, crypto_ahash_digestsize(tfm
));
282 ahash_request_free(req
);
287 static int test_aead(struct crypto_aead
*tfm
, int enc
,
288 struct aead_testvec
*template, unsigned int tcount
)
290 const char *algo
= crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm
));
291 unsigned int i
, j
, k
, n
, temp
;
295 struct aead_request
*req
;
296 struct scatterlist sg
[8];
297 struct scatterlist asg
[8];
299 struct tcrypt_result result
;
300 unsigned int authsize
;
310 init_completion(&result
.completion
);
312 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
314 printk(KERN_ERR
"alg: aead: Failed to allocate request for "
320 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
321 tcrypt_complete
, &result
);
323 for (i
= 0, j
= 0; i
< tcount
; i
++) {
324 if (!template[i
].np
) {
327 /* some tepmplates have no input data but they will
333 memcpy(input
, template[i
].input
, template[i
].ilen
);
334 memcpy(assoc
, template[i
].assoc
, template[i
].alen
);
336 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
338 memset(iv
, 0, MAX_IVLEN
);
340 crypto_aead_clear_flags(tfm
, ~0);
342 crypto_aead_set_flags(
343 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
345 key
= template[i
].key
;
347 ret
= crypto_aead_setkey(tfm
, key
,
349 if (!ret
== template[i
].fail
) {
350 printk(KERN_ERR
"alg: aead: setkey failed on "
351 "test %d for %s: flags=%x\n", j
, algo
,
352 crypto_aead_get_flags(tfm
));
357 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
358 ret
= crypto_aead_setauthsize(tfm
, authsize
);
360 printk(KERN_ERR
"alg: aead: Failed to set "
361 "authsize to %u on test %d for %s\n",
366 sg_init_one(&sg
[0], input
,
367 template[i
].ilen
+ (enc
? authsize
: 0));
369 sg_init_one(&asg
[0], assoc
, template[i
].alen
);
371 aead_request_set_crypt(req
, sg
, sg
,
372 template[i
].ilen
, iv
);
374 aead_request_set_assoc(req
, asg
, template[i
].alen
);
377 crypto_aead_encrypt(req
) :
378 crypto_aead_decrypt(req
);
385 ret
= wait_for_completion_interruptible(
387 if (!ret
&& !(ret
= result
.err
)) {
388 INIT_COMPLETION(result
.completion
);
393 printk(KERN_ERR
"alg: aead: %s failed on test "
394 "%d for %s: ret=%d\n", e
, j
, algo
, -ret
);
399 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
400 printk(KERN_ERR
"alg: aead: Test %d failed on "
401 "%s for %s\n", j
, e
, algo
);
402 hexdump(q
, template[i
].rlen
);
409 for (i
= 0, j
= 0; i
< tcount
; i
++) {
410 if (template[i
].np
) {
414 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
416 memset(iv
, 0, MAX_IVLEN
);
418 crypto_aead_clear_flags(tfm
, ~0);
420 crypto_aead_set_flags(
421 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
422 key
= template[i
].key
;
424 ret
= crypto_aead_setkey(tfm
, key
, template[i
].klen
);
425 if (!ret
== template[i
].fail
) {
426 printk(KERN_ERR
"alg: aead: setkey failed on "
427 "chunk test %d for %s: flags=%x\n", j
,
428 algo
, crypto_aead_get_flags(tfm
));
433 authsize
= abs(template[i
].rlen
- template[i
].ilen
);
436 sg_init_table(sg
, template[i
].np
);
437 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
438 if (WARN_ON(offset_in_page(IDX
[k
]) +
439 template[i
].tap
[k
] > PAGE_SIZE
))
442 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
443 offset_in_page(IDX
[k
]);
445 memcpy(q
, template[i
].input
+ temp
,
448 n
= template[i
].tap
[k
];
449 if (k
== template[i
].np
- 1 && enc
)
451 if (offset_in_page(q
) + n
< PAGE_SIZE
)
454 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
455 temp
+= template[i
].tap
[k
];
458 ret
= crypto_aead_setauthsize(tfm
, authsize
);
460 printk(KERN_ERR
"alg: aead: Failed to set "
461 "authsize to %u on chunk test %d for "
462 "%s\n", authsize
, j
, algo
);
467 if (WARN_ON(sg
[k
- 1].offset
+
468 sg
[k
- 1].length
+ authsize
>
474 sg
[k
- 1].length
+= authsize
;
477 sg_init_table(asg
, template[i
].anp
);
478 for (k
= 0, temp
= 0; k
< template[i
].anp
; k
++) {
480 memcpy(axbuf
[IDX
[k
] >> PAGE_SHIFT
] +
481 offset_in_page(IDX
[k
]),
482 template[i
].assoc
+ temp
,
483 template[i
].atap
[k
]),
484 template[i
].atap
[k
]);
485 temp
+= template[i
].atap
[k
];
488 aead_request_set_crypt(req
, sg
, sg
,
492 aead_request_set_assoc(req
, asg
, template[i
].alen
);
495 crypto_aead_encrypt(req
) :
496 crypto_aead_decrypt(req
);
503 ret
= wait_for_completion_interruptible(
505 if (!ret
&& !(ret
= result
.err
)) {
506 INIT_COMPLETION(result
.completion
);
511 printk(KERN_ERR
"alg: aead: %s failed on "
512 "chunk test %d for %s: ret=%d\n", e
, j
,
518 for (k
= 0, temp
= 0; k
< template[i
].np
; k
++) {
519 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
520 offset_in_page(IDX
[k
]);
522 n
= template[i
].tap
[k
];
523 if (k
== template[i
].np
- 1)
524 n
+= enc
? authsize
: -authsize
;
526 if (memcmp(q
, template[i
].result
+ temp
, n
)) {
527 printk(KERN_ERR
"alg: aead: Chunk "
528 "test %d failed on %s at page "
529 "%u for %s\n", j
, e
, k
, algo
);
535 if (k
== template[i
].np
- 1 && !enc
) {
536 if (memcmp(q
, template[i
].input
+
542 for (n
= 0; offset_in_page(q
+ n
) &&
547 printk(KERN_ERR
"alg: aead: Result "
548 "buffer corruption in chunk "
549 "test %d on %s at page %u for "
550 "%s: %u bytes:\n", j
, e
, k
,
556 temp
+= template[i
].tap
[k
];
564 aead_request_free(req
);
568 static int test_cipher(struct crypto_ablkcipher
*tfm
, int enc
,
569 struct cipher_testvec
*template, unsigned int tcount
)
572 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm
));
573 unsigned int i
, j
, k
, n
, temp
;
576 struct ablkcipher_request
*req
;
577 struct scatterlist sg
[8];
579 struct tcrypt_result result
;
588 init_completion(&result
.completion
);
590 req
= ablkcipher_request_alloc(tfm
, GFP_KERNEL
);
592 printk(KERN_ERR
"alg: cipher: Failed to allocate request for "
598 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
599 tcrypt_complete
, &result
);
602 for (i
= 0; i
< tcount
; i
++) {
604 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
606 memset(iv
, 0, MAX_IVLEN
);
608 if (!(template[i
].np
)) {
612 memcpy(data
, template[i
].input
, template[i
].ilen
);
614 crypto_ablkcipher_clear_flags(tfm
, ~0);
616 crypto_ablkcipher_set_flags(
617 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
619 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
621 if (!ret
== template[i
].fail
) {
622 printk(KERN_ERR
"alg: cipher: setkey failed "
623 "on test %d for %s: flags=%x\n", j
,
624 algo
, crypto_ablkcipher_get_flags(tfm
));
629 sg_init_one(&sg
[0], data
, template[i
].ilen
);
631 ablkcipher_request_set_crypt(req
, sg
, sg
,
632 template[i
].ilen
, iv
);
634 crypto_ablkcipher_encrypt(req
) :
635 crypto_ablkcipher_decrypt(req
);
642 ret
= wait_for_completion_interruptible(
644 if (!ret
&& !((ret
= result
.err
))) {
645 INIT_COMPLETION(result
.completion
);
650 printk(KERN_ERR
"alg: cipher: %s failed on "
651 "test %d for %s: ret=%d\n", e
, j
, algo
,
657 if (memcmp(q
, template[i
].result
, template[i
].rlen
)) {
658 printk(KERN_ERR
"alg: cipher: Test %d failed "
659 "on %s for %s\n", j
, e
, algo
);
660 hexdump(q
, template[i
].rlen
);
668 for (i
= 0; i
< tcount
; i
++) {
671 memcpy(iv
, template[i
].iv
, MAX_IVLEN
);
673 memset(iv
, 0, MAX_IVLEN
);
675 if (template[i
].np
) {
678 crypto_ablkcipher_clear_flags(tfm
, ~0);
680 crypto_ablkcipher_set_flags(
681 tfm
, CRYPTO_TFM_REQ_WEAK_KEY
);
683 ret
= crypto_ablkcipher_setkey(tfm
, template[i
].key
,
685 if (!ret
== template[i
].fail
) {
686 printk(KERN_ERR
"alg: cipher: setkey failed "
687 "on chunk test %d for %s: flags=%x\n",
689 crypto_ablkcipher_get_flags(tfm
));
696 sg_init_table(sg
, template[i
].np
);
697 for (k
= 0; k
< template[i
].np
; k
++) {
698 if (WARN_ON(offset_in_page(IDX
[k
]) +
699 template[i
].tap
[k
] > PAGE_SIZE
))
702 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
703 offset_in_page(IDX
[k
]);
705 memcpy(q
, template[i
].input
+ temp
,
708 if (offset_in_page(q
) + template[i
].tap
[k
] <
710 q
[template[i
].tap
[k
]] = 0;
712 sg_set_buf(&sg
[k
], q
, template[i
].tap
[k
]);
714 temp
+= template[i
].tap
[k
];
717 ablkcipher_request_set_crypt(req
, sg
, sg
,
718 template[i
].ilen
, iv
);
721 crypto_ablkcipher_encrypt(req
) :
722 crypto_ablkcipher_decrypt(req
);
729 ret
= wait_for_completion_interruptible(
731 if (!ret
&& !((ret
= result
.err
))) {
732 INIT_COMPLETION(result
.completion
);
737 printk(KERN_ERR
"alg: cipher: %s failed on "
738 "chunk test %d for %s: ret=%d\n", e
, j
,
745 for (k
= 0; k
< template[i
].np
; k
++) {
746 q
= xbuf
[IDX
[k
] >> PAGE_SHIFT
] +
747 offset_in_page(IDX
[k
]);
749 if (memcmp(q
, template[i
].result
+ temp
,
750 template[i
].tap
[k
])) {
751 printk(KERN_ERR
"alg: cipher: Chunk "
752 "test %d failed on %s at page "
753 "%u for %s\n", j
, e
, k
, algo
);
754 hexdump(q
, template[i
].tap
[k
]);
758 q
+= template[i
].tap
[k
];
759 for (n
= 0; offset_in_page(q
+ n
) && q
[n
]; n
++)
762 printk(KERN_ERR
"alg: cipher: "
763 "Result buffer corruption in "
764 "chunk test %d on %s at page "
765 "%u for %s: %u bytes:\n", j
, e
,
770 temp
+= template[i
].tap
[k
];
778 ablkcipher_request_free(req
);
782 static int test_cipher_jiffies(struct blkcipher_desc
*desc
, int enc
,
783 struct scatterlist
*sg
, int blen
, int sec
)
785 unsigned long start
, end
;
789 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
790 time_before(jiffies
, end
); bcount
++) {
792 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
794 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
800 printk("%d operations in %d seconds (%ld bytes)\n",
801 bcount
, sec
, (long)bcount
* blen
);
805 static int test_cipher_cycles(struct blkcipher_desc
*desc
, int enc
,
806 struct scatterlist
*sg
, int blen
)
808 unsigned long cycles
= 0;
816 for (i
= 0; i
< 4; i
++) {
818 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
820 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
826 /* The real thing. */
827 for (i
= 0; i
< 8; i
++) {
830 start
= get_cycles();
832 ret
= crypto_blkcipher_encrypt(desc
, sg
, sg
, blen
);
834 ret
= crypto_blkcipher_decrypt(desc
, sg
, sg
, blen
);
840 cycles
+= end
- start
;
848 printk("1 operation in %lu cycles (%d bytes)\n",
849 (cycles
+ 4) / 8, blen
);
854 static u32 block_sizes
[] = { 16, 64, 256, 1024, 8192, 0 };
856 static void test_cipher_speed(const char *algo
, int enc
, unsigned int sec
,
857 struct cipher_testvec
*template,
858 unsigned int tcount
, u8
*keysize
)
860 unsigned int ret
, i
, j
, iv_len
;
861 unsigned char *key
, iv
[128];
862 struct crypto_blkcipher
*tfm
;
863 struct blkcipher_desc desc
;
872 printk("\ntesting speed of %s %s\n", algo
, e
);
874 tfm
= crypto_alloc_blkcipher(algo
, 0, CRYPTO_ALG_ASYNC
);
877 printk("failed to load transform for %s: %ld\n", algo
,
887 b_size
= block_sizes
;
889 struct scatterlist sg
[TVMEMSIZE
];
891 if ((*keysize
+ *b_size
) > TVMEMSIZE
* PAGE_SIZE
) {
892 printk("template (%u) too big for "
893 "tvmem (%lu)\n", *keysize
+ *b_size
,
894 TVMEMSIZE
* PAGE_SIZE
);
898 printk("test %u (%d bit key, %d byte blocks): ", i
,
899 *keysize
* 8, *b_size
);
901 memset(tvmem
[0], 0xff, PAGE_SIZE
);
903 /* set key, plain text and IV */
904 key
= (unsigned char *)tvmem
[0];
905 for (j
= 0; j
< tcount
; j
++) {
906 if (template[j
].klen
== *keysize
) {
907 key
= template[j
].key
;
912 ret
= crypto_blkcipher_setkey(tfm
, key
, *keysize
);
914 printk("setkey() failed flags=%x\n",
915 crypto_blkcipher_get_flags(tfm
));
919 sg_init_table(sg
, TVMEMSIZE
);
920 sg_set_buf(sg
, tvmem
[0] + *keysize
,
921 PAGE_SIZE
- *keysize
);
922 for (j
= 1; j
< TVMEMSIZE
; j
++) {
923 sg_set_buf(sg
+ j
, tvmem
[j
], PAGE_SIZE
);
924 memset (tvmem
[j
], 0xff, PAGE_SIZE
);
927 iv_len
= crypto_blkcipher_ivsize(tfm
);
929 memset(&iv
, 0xff, iv_len
);
930 crypto_blkcipher_set_iv(tfm
, iv
, iv_len
);
934 ret
= test_cipher_jiffies(&desc
, enc
, sg
,
937 ret
= test_cipher_cycles(&desc
, enc
, sg
,
941 printk("%s() failed flags=%x\n", e
, desc
.flags
);
951 crypto_free_blkcipher(tfm
);
954 static int test_hash_jiffies_digest(struct hash_desc
*desc
,
955 struct scatterlist
*sg
, int blen
,
958 unsigned long start
, end
;
962 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
963 time_before(jiffies
, end
); bcount
++) {
964 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
969 printk("%6u opers/sec, %9lu bytes/sec\n",
970 bcount
/ sec
, ((long)bcount
* blen
) / sec
);
975 static int test_hash_jiffies(struct hash_desc
*desc
, struct scatterlist
*sg
,
976 int blen
, int plen
, char *out
, int sec
)
978 unsigned long start
, end
;
983 return test_hash_jiffies_digest(desc
, sg
, blen
, out
, sec
);
985 for (start
= jiffies
, end
= start
+ sec
* HZ
, bcount
= 0;
986 time_before(jiffies
, end
); bcount
++) {
987 ret
= crypto_hash_init(desc
);
990 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
991 ret
= crypto_hash_update(desc
, sg
, plen
);
995 /* we assume there is enough space in 'out' for the result */
996 ret
= crypto_hash_final(desc
, out
);
1001 printk("%6u opers/sec, %9lu bytes/sec\n",
1002 bcount
/ sec
, ((long)bcount
* blen
) / sec
);
1007 static int test_hash_cycles_digest(struct hash_desc
*desc
,
1008 struct scatterlist
*sg
, int blen
, char *out
)
1010 unsigned long cycles
= 0;
1015 local_irq_disable();
1018 for (i
= 0; i
< 4; i
++) {
1019 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
1024 /* The real thing. */
1025 for (i
= 0; i
< 8; i
++) {
1026 cycles_t start
, end
;
1028 start
= get_cycles();
1030 ret
= crypto_hash_digest(desc
, sg
, blen
, out
);
1036 cycles
+= end
- start
;
1046 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1047 cycles
/ 8, cycles
/ (8 * blen
));
1052 static int test_hash_cycles(struct hash_desc
*desc
, struct scatterlist
*sg
,
1053 int blen
, int plen
, char *out
)
1055 unsigned long cycles
= 0;
1060 return test_hash_cycles_digest(desc
, sg
, blen
, out
);
1063 local_irq_disable();
1066 for (i
= 0; i
< 4; i
++) {
1067 ret
= crypto_hash_init(desc
);
1070 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
1071 ret
= crypto_hash_update(desc
, sg
, plen
);
1075 ret
= crypto_hash_final(desc
, out
);
1080 /* The real thing. */
1081 for (i
= 0; i
< 8; i
++) {
1082 cycles_t start
, end
;
1084 start
= get_cycles();
1086 ret
= crypto_hash_init(desc
);
1089 for (pcount
= 0; pcount
< blen
; pcount
+= plen
) {
1090 ret
= crypto_hash_update(desc
, sg
, plen
);
1094 ret
= crypto_hash_final(desc
, out
);
1100 cycles
+= end
- start
;
1110 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1111 cycles
/ 8, cycles
/ (8 * blen
));
1116 static void test_hash_speed(const char *algo
, unsigned int sec
,
1117 struct hash_speed
*speed
)
1119 struct scatterlist sg
[TVMEMSIZE
];
1120 struct crypto_hash
*tfm
;
1121 struct hash_desc desc
;
1126 printk("\ntesting speed of %s\n", algo
);
1128 tfm
= crypto_alloc_hash(algo
, 0, CRYPTO_ALG_ASYNC
);
1131 printk("failed to load transform for %s: %ld\n", algo
,
1139 if (crypto_hash_digestsize(tfm
) > sizeof(output
)) {
1140 printk("digestsize(%u) > outputbuffer(%zu)\n",
1141 crypto_hash_digestsize(tfm
), sizeof(output
));
1145 sg_init_table(sg
, TVMEMSIZE
);
1146 for (i
= 0; i
< TVMEMSIZE
; i
++) {
1147 sg_set_buf(sg
+ i
, tvmem
[i
], PAGE_SIZE
);
1148 memset(tvmem
[i
], 0xff, PAGE_SIZE
);
1151 for (i
= 0; speed
[i
].blen
!= 0; i
++) {
1152 if (speed
[i
].blen
> TVMEMSIZE
* PAGE_SIZE
) {
1153 printk("template (%u) too big for tvmem (%lu)\n",
1154 speed
[i
].blen
, TVMEMSIZE
* PAGE_SIZE
);
1158 printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
1159 i
, speed
[i
].blen
, speed
[i
].plen
, speed
[i
].blen
/ speed
[i
].plen
);
1162 ret
= test_hash_jiffies(&desc
, sg
, speed
[i
].blen
,
1163 speed
[i
].plen
, output
, sec
);
1165 ret
= test_hash_cycles(&desc
, sg
, speed
[i
].blen
,
1166 speed
[i
].plen
, output
);
1169 printk("hashing failed ret=%d\n", ret
);
1175 crypto_free_hash(tfm
);
1178 static int test_comp(struct crypto_comp
*tfm
, struct comp_testvec
*ctemplate
,
1179 struct comp_testvec
*dtemplate
, int ctcount
, int dtcount
)
1181 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
1183 char result
[COMP_BUF_SIZE
];
1186 for (i
= 0; i
< ctcount
; i
++) {
1187 int ilen
, dlen
= COMP_BUF_SIZE
;
1189 memset(result
, 0, sizeof (result
));
1191 ilen
= ctemplate
[i
].inlen
;
1192 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
1193 ilen
, result
, &dlen
);
1195 printk(KERN_ERR
"alg: comp: compression failed "
1196 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1201 if (memcmp(result
, ctemplate
[i
].output
, dlen
)) {
1202 printk(KERN_ERR
"alg: comp: Compression test %d "
1203 "failed for %s\n", i
+ 1, algo
);
1204 hexdump(result
, dlen
);
1210 for (i
= 0; i
< dtcount
; i
++) {
1211 int ilen
, ret
, dlen
= COMP_BUF_SIZE
;
1213 memset(result
, 0, sizeof (result
));
1215 ilen
= dtemplate
[i
].inlen
;
1216 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
1217 ilen
, result
, &dlen
);
1219 printk(KERN_ERR
"alg: comp: decompression failed "
1220 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
1225 if (memcmp(result
, dtemplate
[i
].output
, dlen
)) {
1226 printk(KERN_ERR
"alg: comp: Decompression test %d "
1227 "failed for %s\n", i
+ 1, algo
);
1228 hexdump(result
, dlen
);
1240 static void test_available(void)
1242 char **name
= check
;
1245 printk("alg %s ", *name
);
1246 printk(crypto_has_alg(*name
, 0, 0) ?
1247 "found\n" : "not found\n");
1252 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
1255 struct crypto_aead
*tfm
;
1258 tfm
= crypto_alloc_aead(driver
, type
, mask
);
1260 printk(KERN_ERR
"alg: aead: Failed to load transform for %s: "
1261 "%ld\n", driver
, PTR_ERR(tfm
));
1262 return PTR_ERR(tfm
);
1265 if (desc
->suite
.aead
.enc
.vecs
) {
1266 err
= test_aead(tfm
, ENCRYPT
, desc
->suite
.aead
.enc
.vecs
,
1267 desc
->suite
.aead
.enc
.count
);
1272 if (!err
&& desc
->suite
.aead
.dec
.vecs
)
1273 err
= test_aead(tfm
, DECRYPT
, desc
->suite
.aead
.dec
.vecs
,
1274 desc
->suite
.aead
.dec
.count
);
1277 crypto_free_aead(tfm
);
1281 static int alg_test_cipher(const struct alg_test_desc
*desc
,
1282 const char *driver
, u32 type
, u32 mask
)
1284 struct crypto_ablkcipher
*tfm
;
1287 tfm
= crypto_alloc_ablkcipher(driver
, type
, mask
);
1289 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
1290 "%s: %ld\n", driver
, PTR_ERR(tfm
));
1291 return PTR_ERR(tfm
);
1294 if (desc
->suite
.cipher
.enc
.vecs
) {
1295 err
= test_cipher(tfm
, ENCRYPT
, desc
->suite
.cipher
.enc
.vecs
,
1296 desc
->suite
.cipher
.enc
.count
);
1301 if (desc
->suite
.cipher
.dec
.vecs
)
1302 err
= test_cipher(tfm
, DECRYPT
, desc
->suite
.cipher
.dec
.vecs
,
1303 desc
->suite
.cipher
.dec
.count
);
1306 crypto_free_ablkcipher(tfm
);
1310 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
1313 struct crypto_comp
*tfm
;
1316 tfm
= crypto_alloc_comp(driver
, type
, mask
);
1318 printk(KERN_ERR
"alg: comp: Failed to load transform for %s: "
1319 "%ld\n", driver
, PTR_ERR(tfm
));
1320 return PTR_ERR(tfm
);
1323 err
= test_comp(tfm
, desc
->suite
.comp
.comp
.vecs
,
1324 desc
->suite
.comp
.decomp
.vecs
,
1325 desc
->suite
.comp
.comp
.count
,
1326 desc
->suite
.comp
.decomp
.count
);
1328 crypto_free_comp(tfm
);
1332 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1335 struct crypto_ahash
*tfm
;
1338 tfm
= crypto_alloc_ahash(driver
, type
, mask
);
1340 printk(KERN_ERR
"alg: hash: Failed to load transform for %s: "
1341 "%ld\n", driver
, PTR_ERR(tfm
));
1342 return PTR_ERR(tfm
);
1345 err
= test_hash(tfm
, desc
->suite
.hash
.vecs
, desc
->suite
.hash
.count
);
1347 crypto_free_ahash(tfm
);
1351 /* Please keep this list sorted by algorithm name. */
1352 static const struct alg_test_desc alg_test_descs
[] = {
1355 .test
= alg_test_cipher
,
1359 .vecs
= aes_cbc_enc_tv_template
,
1360 .count
= AES_CBC_ENC_TEST_VECTORS
1363 .vecs
= aes_cbc_dec_tv_template
,
1364 .count
= AES_CBC_DEC_TEST_VECTORS
1369 .alg
= "cbc(anubis)",
1370 .test
= alg_test_cipher
,
1374 .vecs
= anubis_cbc_enc_tv_template
,
1375 .count
= ANUBIS_CBC_ENC_TEST_VECTORS
1378 .vecs
= anubis_cbc_dec_tv_template
,
1379 .count
= ANUBIS_CBC_DEC_TEST_VECTORS
1384 .alg
= "cbc(blowfish)",
1385 .test
= alg_test_cipher
,
1389 .vecs
= bf_cbc_enc_tv_template
,
1390 .count
= BF_CBC_ENC_TEST_VECTORS
1393 .vecs
= bf_cbc_dec_tv_template
,
1394 .count
= BF_CBC_DEC_TEST_VECTORS
1399 .alg
= "cbc(camellia)",
1400 .test
= alg_test_cipher
,
1404 .vecs
= camellia_cbc_enc_tv_template
,
1405 .count
= CAMELLIA_CBC_ENC_TEST_VECTORS
1408 .vecs
= camellia_cbc_dec_tv_template
,
1409 .count
= CAMELLIA_CBC_DEC_TEST_VECTORS
1415 .test
= alg_test_cipher
,
1419 .vecs
= des_cbc_enc_tv_template
,
1420 .count
= DES_CBC_ENC_TEST_VECTORS
1423 .vecs
= des_cbc_dec_tv_template
,
1424 .count
= DES_CBC_DEC_TEST_VECTORS
1429 .alg
= "cbc(des3_ede)",
1430 .test
= alg_test_cipher
,
1434 .vecs
= des3_ede_cbc_enc_tv_template
,
1435 .count
= DES3_EDE_CBC_ENC_TEST_VECTORS
1438 .vecs
= des3_ede_cbc_dec_tv_template
,
1439 .count
= DES3_EDE_CBC_DEC_TEST_VECTORS
1444 .alg
= "cbc(twofish)",
1445 .test
= alg_test_cipher
,
1449 .vecs
= tf_cbc_enc_tv_template
,
1450 .count
= TF_CBC_ENC_TEST_VECTORS
1453 .vecs
= tf_cbc_dec_tv_template
,
1454 .count
= TF_CBC_DEC_TEST_VECTORS
1460 .test
= alg_test_aead
,
1464 .vecs
= aes_ccm_enc_tv_template
,
1465 .count
= AES_CCM_ENC_TEST_VECTORS
1468 .vecs
= aes_ccm_dec_tv_template
,
1469 .count
= AES_CCM_DEC_TEST_VECTORS
1475 .test
= alg_test_hash
,
1478 .vecs
= crc32c_tv_template
,
1479 .count
= CRC32C_TEST_VECTORS
1483 .alg
= "cts(cbc(aes))",
1484 .test
= alg_test_cipher
,
1488 .vecs
= cts_mode_enc_tv_template
,
1489 .count
= CTS_MODE_ENC_TEST_VECTORS
1492 .vecs
= cts_mode_dec_tv_template
,
1493 .count
= CTS_MODE_DEC_TEST_VECTORS
1499 .test
= alg_test_comp
,
1503 .vecs
= deflate_comp_tv_template
,
1504 .count
= DEFLATE_COMP_TEST_VECTORS
1507 .vecs
= deflate_decomp_tv_template
,
1508 .count
= DEFLATE_DECOMP_TEST_VECTORS
1514 .test
= alg_test_cipher
,
1518 .vecs
= aes_enc_tv_template
,
1519 .count
= AES_ENC_TEST_VECTORS
1522 .vecs
= aes_dec_tv_template
,
1523 .count
= AES_DEC_TEST_VECTORS
1528 .alg
= "ecb(anubis)",
1529 .test
= alg_test_cipher
,
1533 .vecs
= anubis_enc_tv_template
,
1534 .count
= ANUBIS_ENC_TEST_VECTORS
1537 .vecs
= anubis_dec_tv_template
,
1538 .count
= ANUBIS_DEC_TEST_VECTORS
1544 .test
= alg_test_cipher
,
1548 .vecs
= arc4_enc_tv_template
,
1549 .count
= ARC4_ENC_TEST_VECTORS
1552 .vecs
= arc4_dec_tv_template
,
1553 .count
= ARC4_DEC_TEST_VECTORS
1558 .alg
= "ecb(blowfish)",
1559 .test
= alg_test_cipher
,
1563 .vecs
= bf_enc_tv_template
,
1564 .count
= BF_ENC_TEST_VECTORS
1567 .vecs
= bf_dec_tv_template
,
1568 .count
= BF_DEC_TEST_VECTORS
1573 .alg
= "ecb(camellia)",
1574 .test
= alg_test_cipher
,
1578 .vecs
= camellia_enc_tv_template
,
1579 .count
= CAMELLIA_ENC_TEST_VECTORS
1582 .vecs
= camellia_dec_tv_template
,
1583 .count
= CAMELLIA_DEC_TEST_VECTORS
1588 .alg
= "ecb(cast5)",
1589 .test
= alg_test_cipher
,
1593 .vecs
= cast5_enc_tv_template
,
1594 .count
= CAST5_ENC_TEST_VECTORS
1597 .vecs
= cast5_dec_tv_template
,
1598 .count
= CAST5_DEC_TEST_VECTORS
1603 .alg
= "ecb(cast6)",
1604 .test
= alg_test_cipher
,
1608 .vecs
= cast6_enc_tv_template
,
1609 .count
= CAST6_ENC_TEST_VECTORS
1612 .vecs
= cast6_dec_tv_template
,
1613 .count
= CAST6_DEC_TEST_VECTORS
1619 .test
= alg_test_cipher
,
1623 .vecs
= des_enc_tv_template
,
1624 .count
= DES_ENC_TEST_VECTORS
1627 .vecs
= des_dec_tv_template
,
1628 .count
= DES_DEC_TEST_VECTORS
1633 .alg
= "ecb(des3_ede)",
1634 .test
= alg_test_cipher
,
1638 .vecs
= des3_ede_enc_tv_template
,
1639 .count
= DES3_EDE_ENC_TEST_VECTORS
1642 .vecs
= des3_ede_dec_tv_template
,
1643 .count
= DES3_EDE_DEC_TEST_VECTORS
1648 .alg
= "ecb(khazad)",
1649 .test
= alg_test_cipher
,
1653 .vecs
= khazad_enc_tv_template
,
1654 .count
= KHAZAD_ENC_TEST_VECTORS
1657 .vecs
= khazad_dec_tv_template
,
1658 .count
= KHAZAD_DEC_TEST_VECTORS
1664 .test
= alg_test_cipher
,
1668 .vecs
= seed_enc_tv_template
,
1669 .count
= SEED_ENC_TEST_VECTORS
1672 .vecs
= seed_dec_tv_template
,
1673 .count
= SEED_DEC_TEST_VECTORS
1678 .alg
= "ecb(serpent)",
1679 .test
= alg_test_cipher
,
1683 .vecs
= serpent_enc_tv_template
,
1684 .count
= SERPENT_ENC_TEST_VECTORS
1687 .vecs
= serpent_dec_tv_template
,
1688 .count
= SERPENT_DEC_TEST_VECTORS
1694 .test
= alg_test_cipher
,
1698 .vecs
= tea_enc_tv_template
,
1699 .count
= TEA_ENC_TEST_VECTORS
1702 .vecs
= tea_dec_tv_template
,
1703 .count
= TEA_DEC_TEST_VECTORS
1708 .alg
= "ecb(tnepres)",
1709 .test
= alg_test_cipher
,
1713 .vecs
= tnepres_enc_tv_template
,
1714 .count
= TNEPRES_ENC_TEST_VECTORS
1717 .vecs
= tnepres_dec_tv_template
,
1718 .count
= TNEPRES_DEC_TEST_VECTORS
1723 .alg
= "ecb(twofish)",
1724 .test
= alg_test_cipher
,
1728 .vecs
= tf_enc_tv_template
,
1729 .count
= TF_ENC_TEST_VECTORS
1732 .vecs
= tf_dec_tv_template
,
1733 .count
= TF_DEC_TEST_VECTORS
1739 .test
= alg_test_cipher
,
1743 .vecs
= xeta_enc_tv_template
,
1744 .count
= XETA_ENC_TEST_VECTORS
1747 .vecs
= xeta_dec_tv_template
,
1748 .count
= XETA_DEC_TEST_VECTORS
1754 .test
= alg_test_cipher
,
1758 .vecs
= xtea_enc_tv_template
,
1759 .count
= XTEA_ENC_TEST_VECTORS
1762 .vecs
= xtea_dec_tv_template
,
1763 .count
= XTEA_DEC_TEST_VECTORS
1769 .test
= alg_test_aead
,
1773 .vecs
= aes_gcm_enc_tv_template
,
1774 .count
= AES_GCM_ENC_TEST_VECTORS
1777 .vecs
= aes_gcm_dec_tv_template
,
1778 .count
= AES_GCM_DEC_TEST_VECTORS
1784 .test
= alg_test_hash
,
1787 .vecs
= hmac_md5_tv_template
,
1788 .count
= HMAC_MD5_TEST_VECTORS
1792 .alg
= "hmac(rmd128)",
1793 .test
= alg_test_hash
,
1796 .vecs
= hmac_rmd128_tv_template
,
1797 .count
= HMAC_RMD128_TEST_VECTORS
1801 .alg
= "hmac(rmd160)",
1802 .test
= alg_test_hash
,
1805 .vecs
= hmac_rmd160_tv_template
,
1806 .count
= HMAC_RMD160_TEST_VECTORS
1810 .alg
= "hmac(sha1)",
1811 .test
= alg_test_hash
,
1814 .vecs
= hmac_sha1_tv_template
,
1815 .count
= HMAC_SHA1_TEST_VECTORS
1819 .alg
= "hmac(sha224)",
1820 .test
= alg_test_hash
,
1823 .vecs
= hmac_sha224_tv_template
,
1824 .count
= HMAC_SHA224_TEST_VECTORS
1828 .alg
= "hmac(sha256)",
1829 .test
= alg_test_hash
,
1832 .vecs
= hmac_sha256_tv_template
,
1833 .count
= HMAC_SHA256_TEST_VECTORS
1837 .alg
= "hmac(sha384)",
1838 .test
= alg_test_hash
,
1841 .vecs
= hmac_sha384_tv_template
,
1842 .count
= HMAC_SHA384_TEST_VECTORS
1846 .alg
= "hmac(sha512)",
1847 .test
= alg_test_hash
,
1850 .vecs
= hmac_sha512_tv_template
,
1851 .count
= HMAC_SHA512_TEST_VECTORS
1856 .test
= alg_test_cipher
,
1860 .vecs
= aes_lrw_enc_tv_template
,
1861 .count
= AES_LRW_ENC_TEST_VECTORS
1864 .vecs
= aes_lrw_dec_tv_template
,
1865 .count
= AES_LRW_DEC_TEST_VECTORS
1871 .test
= alg_test_comp
,
1875 .vecs
= lzo_comp_tv_template
,
1876 .count
= LZO_COMP_TEST_VECTORS
1879 .vecs
= lzo_decomp_tv_template
,
1880 .count
= LZO_DECOMP_TEST_VECTORS
1886 .test
= alg_test_hash
,
1889 .vecs
= md4_tv_template
,
1890 .count
= MD4_TEST_VECTORS
1895 .test
= alg_test_hash
,
1898 .vecs
= md5_tv_template
,
1899 .count
= MD5_TEST_VECTORS
1903 .alg
= "michael_mic",
1904 .test
= alg_test_hash
,
1907 .vecs
= michael_mic_tv_template
,
1908 .count
= MICHAEL_MIC_TEST_VECTORS
1912 .alg
= "pcbc(fcrypt)",
1913 .test
= alg_test_cipher
,
1917 .vecs
= fcrypt_pcbc_enc_tv_template
,
1918 .count
= FCRYPT_ENC_TEST_VECTORS
1921 .vecs
= fcrypt_pcbc_dec_tv_template
,
1922 .count
= FCRYPT_DEC_TEST_VECTORS
1927 .alg
= "rfc3686(ctr(aes))",
1928 .test
= alg_test_cipher
,
1932 .vecs
= aes_ctr_enc_tv_template
,
1933 .count
= AES_CTR_ENC_TEST_VECTORS
1936 .vecs
= aes_ctr_dec_tv_template
,
1937 .count
= AES_CTR_DEC_TEST_VECTORS
1943 .test
= alg_test_hash
,
1946 .vecs
= rmd128_tv_template
,
1947 .count
= RMD128_TEST_VECTORS
1952 .test
= alg_test_hash
,
1955 .vecs
= rmd160_tv_template
,
1956 .count
= RMD160_TEST_VECTORS
1961 .test
= alg_test_hash
,
1964 .vecs
= rmd256_tv_template
,
1965 .count
= RMD256_TEST_VECTORS
1970 .test
= alg_test_hash
,
1973 .vecs
= rmd320_tv_template
,
1974 .count
= RMD320_TEST_VECTORS
1979 .test
= alg_test_cipher
,
1983 .vecs
= salsa20_stream_enc_tv_template
,
1984 .count
= SALSA20_STREAM_ENC_TEST_VECTORS
1990 .test
= alg_test_hash
,
1993 .vecs
= sha1_tv_template
,
1994 .count
= SHA1_TEST_VECTORS
1999 .test
= alg_test_hash
,
2002 .vecs
= sha224_tv_template
,
2003 .count
= SHA224_TEST_VECTORS
2008 .test
= alg_test_hash
,
2011 .vecs
= sha256_tv_template
,
2012 .count
= SHA256_TEST_VECTORS
2017 .test
= alg_test_hash
,
2020 .vecs
= sha384_tv_template
,
2021 .count
= SHA384_TEST_VECTORS
2026 .test
= alg_test_hash
,
2029 .vecs
= sha512_tv_template
,
2030 .count
= SHA512_TEST_VECTORS
2035 .test
= alg_test_hash
,
2038 .vecs
= tgr128_tv_template
,
2039 .count
= TGR128_TEST_VECTORS
2044 .test
= alg_test_hash
,
2047 .vecs
= tgr160_tv_template
,
2048 .count
= TGR160_TEST_VECTORS
2053 .test
= alg_test_hash
,
2056 .vecs
= tgr192_tv_template
,
2057 .count
= TGR192_TEST_VECTORS
2062 .test
= alg_test_hash
,
2065 .vecs
= wp256_tv_template
,
2066 .count
= WP256_TEST_VECTORS
2071 .test
= alg_test_hash
,
2074 .vecs
= wp384_tv_template
,
2075 .count
= WP384_TEST_VECTORS
2080 .test
= alg_test_hash
,
2083 .vecs
= wp512_tv_template
,
2084 .count
= WP512_TEST_VECTORS
2089 .test
= alg_test_hash
,
2092 .vecs
= aes_xcbc128_tv_template
,
2093 .count
= XCBC_AES_TEST_VECTORS
2098 .test
= alg_test_cipher
,
2102 .vecs
= aes_xts_enc_tv_template
,
2103 .count
= AES_XTS_ENC_TEST_VECTORS
2106 .vecs
= aes_xts_dec_tv_template
,
2107 .count
= AES_XTS_DEC_TEST_VECTORS
2114 static int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
2117 int end
= ARRAY_SIZE(alg_test_descs
);
2119 while (start
< end
) {
2120 int i
= (start
+ end
) / 2;
2121 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
2133 return alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
2137 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
2141 static inline int tcrypt_test(const char *alg
)
2143 return alg_test(alg
, alg
, 0, 0);
2146 static void do_test(int m
)
2152 for (i
= 1; i
< 200; i
++)
2161 tcrypt_test("sha1");
2165 tcrypt_test("ecb(des)");
2166 tcrypt_test("cbc(des)");
2170 tcrypt_test("ecb(des3_ede)");
2171 tcrypt_test("cbc(des3_ede)");
2179 tcrypt_test("sha256");
2183 tcrypt_test("ecb(blowfish)");
2184 tcrypt_test("cbc(blowfish)");
2188 tcrypt_test("ecb(twofish)");
2189 tcrypt_test("cbc(twofish)");
2193 tcrypt_test("ecb(serpent)");
2197 tcrypt_test("ecb(aes)");
2198 tcrypt_test("cbc(aes)");
2199 tcrypt_test("lrw(aes)");
2200 tcrypt_test("xts(aes)");
2201 tcrypt_test("rfc3686(ctr(aes))");
2205 tcrypt_test("sha384");
2209 tcrypt_test("sha512");
2213 tcrypt_test("deflate");
2217 tcrypt_test("ecb(cast5)");
2221 tcrypt_test("ecb(cast6)");
2225 tcrypt_test("ecb(arc4)");
2229 tcrypt_test("michael_mic");
2233 tcrypt_test("crc32c");
2237 tcrypt_test("ecb(tea)");
2241 tcrypt_test("ecb(xtea)");
2245 tcrypt_test("ecb(khazad)");
2249 tcrypt_test("wp512");
2253 tcrypt_test("wp384");
2257 tcrypt_test("wp256");
2261 tcrypt_test("ecb(tnepres)");
2265 tcrypt_test("ecb(anubis)");
2266 tcrypt_test("cbc(anubis)");
2270 tcrypt_test("tgr192");
2275 tcrypt_test("tgr160");
2279 tcrypt_test("tgr128");
2283 tcrypt_test("ecb(xeta)");
2287 tcrypt_test("pcbc(fcrypt)");
2291 tcrypt_test("ecb(camellia)");
2292 tcrypt_test("cbc(camellia)");
2295 tcrypt_test("sha224");
2299 tcrypt_test("salsa20");
2303 tcrypt_test("gcm(aes)");
2311 tcrypt_test("ccm(aes)");
2315 tcrypt_test("cts(cbc(aes))");
2319 tcrypt_test("rmd128");
2323 tcrypt_test("rmd160");
2327 tcrypt_test("rmd256");
2331 tcrypt_test("rmd320");
2335 tcrypt_test("ecb(seed)");
2339 tcrypt_test("hmac(md5)");
2343 tcrypt_test("hmac(sha1)");
2347 tcrypt_test("hmac(sha256)");
2351 tcrypt_test("hmac(sha384)");
2355 tcrypt_test("hmac(sha512)");
2359 tcrypt_test("hmac(sha224)");
2363 tcrypt_test("xcbc(aes)");
2367 tcrypt_test("hmac(rmd128)");
2371 tcrypt_test("hmac(rmd160)");
2375 test_cipher_speed("ecb(aes)", ENCRYPT
, sec
, NULL
, 0,
2376 speed_template_16_24_32
);
2377 test_cipher_speed("ecb(aes)", DECRYPT
, sec
, NULL
, 0,
2378 speed_template_16_24_32
);
2379 test_cipher_speed("cbc(aes)", ENCRYPT
, sec
, NULL
, 0,
2380 speed_template_16_24_32
);
2381 test_cipher_speed("cbc(aes)", DECRYPT
, sec
, NULL
, 0,
2382 speed_template_16_24_32
);
2383 test_cipher_speed("lrw(aes)", ENCRYPT
, sec
, NULL
, 0,
2384 speed_template_32_40_48
);
2385 test_cipher_speed("lrw(aes)", DECRYPT
, sec
, NULL
, 0,
2386 speed_template_32_40_48
);
2387 test_cipher_speed("xts(aes)", ENCRYPT
, sec
, NULL
, 0,
2388 speed_template_32_48_64
);
2389 test_cipher_speed("xts(aes)", DECRYPT
, sec
, NULL
, 0,
2390 speed_template_32_48_64
);
2394 test_cipher_speed("ecb(des3_ede)", ENCRYPT
, sec
,
2395 des3_ede_enc_tv_template
, DES3_EDE_ENC_TEST_VECTORS
,
2397 test_cipher_speed("ecb(des3_ede)", DECRYPT
, sec
,
2398 des3_ede_enc_tv_template
, DES3_EDE_ENC_TEST_VECTORS
,
2400 test_cipher_speed("cbc(des3_ede)", ENCRYPT
, sec
,
2401 des3_ede_enc_tv_template
, DES3_EDE_ENC_TEST_VECTORS
,
2403 test_cipher_speed("cbc(des3_ede)", DECRYPT
, sec
,
2404 des3_ede_enc_tv_template
, DES3_EDE_ENC_TEST_VECTORS
,
2409 test_cipher_speed("ecb(twofish)", ENCRYPT
, sec
, NULL
, 0,
2410 speed_template_16_24_32
);
2411 test_cipher_speed("ecb(twofish)", DECRYPT
, sec
, NULL
, 0,
2412 speed_template_16_24_32
);
2413 test_cipher_speed("cbc(twofish)", ENCRYPT
, sec
, NULL
, 0,
2414 speed_template_16_24_32
);
2415 test_cipher_speed("cbc(twofish)", DECRYPT
, sec
, NULL
, 0,
2416 speed_template_16_24_32
);
2420 test_cipher_speed("ecb(blowfish)", ENCRYPT
, sec
, NULL
, 0,
2421 speed_template_8_32
);
2422 test_cipher_speed("ecb(blowfish)", DECRYPT
, sec
, NULL
, 0,
2423 speed_template_8_32
);
2424 test_cipher_speed("cbc(blowfish)", ENCRYPT
, sec
, NULL
, 0,
2425 speed_template_8_32
);
2426 test_cipher_speed("cbc(blowfish)", DECRYPT
, sec
, NULL
, 0,
2427 speed_template_8_32
);
2431 test_cipher_speed("ecb(des)", ENCRYPT
, sec
, NULL
, 0,
2433 test_cipher_speed("ecb(des)", DECRYPT
, sec
, NULL
, 0,
2435 test_cipher_speed("cbc(des)", ENCRYPT
, sec
, NULL
, 0,
2437 test_cipher_speed("cbc(des)", DECRYPT
, sec
, NULL
, 0,
2442 test_cipher_speed("ecb(camellia)", ENCRYPT
, sec
, NULL
, 0,
2443 speed_template_16_24_32
);
2444 test_cipher_speed("ecb(camellia)", DECRYPT
, sec
, NULL
, 0,
2445 speed_template_16_24_32
);
2446 test_cipher_speed("cbc(camellia)", ENCRYPT
, sec
, NULL
, 0,
2447 speed_template_16_24_32
);
2448 test_cipher_speed("cbc(camellia)", DECRYPT
, sec
, NULL
, 0,
2449 speed_template_16_24_32
);
2453 test_cipher_speed("salsa20", ENCRYPT
, sec
, NULL
, 0,
2454 speed_template_16_32
);
2461 test_hash_speed("md4", sec
, generic_hash_speed_template
);
2462 if (mode
> 300 && mode
< 400) break;
2465 test_hash_speed("md5", sec
, generic_hash_speed_template
);
2466 if (mode
> 300 && mode
< 400) break;
2469 test_hash_speed("sha1", sec
, generic_hash_speed_template
);
2470 if (mode
> 300 && mode
< 400) break;
2473 test_hash_speed("sha256", sec
, generic_hash_speed_template
);
2474 if (mode
> 300 && mode
< 400) break;
2477 test_hash_speed("sha384", sec
, generic_hash_speed_template
);
2478 if (mode
> 300 && mode
< 400) break;
2481 test_hash_speed("sha512", sec
, generic_hash_speed_template
);
2482 if (mode
> 300 && mode
< 400) break;
2485 test_hash_speed("wp256", sec
, generic_hash_speed_template
);
2486 if (mode
> 300 && mode
< 400) break;
2489 test_hash_speed("wp384", sec
, generic_hash_speed_template
);
2490 if (mode
> 300 && mode
< 400) break;
2493 test_hash_speed("wp512", sec
, generic_hash_speed_template
);
2494 if (mode
> 300 && mode
< 400) break;
2497 test_hash_speed("tgr128", sec
, generic_hash_speed_template
);
2498 if (mode
> 300 && mode
< 400) break;
2501 test_hash_speed("tgr160", sec
, generic_hash_speed_template
);
2502 if (mode
> 300 && mode
< 400) break;
2505 test_hash_speed("tgr192", sec
, generic_hash_speed_template
);
2506 if (mode
> 300 && mode
< 400) break;
2509 test_hash_speed("sha224", sec
, generic_hash_speed_template
);
2510 if (mode
> 300 && mode
< 400) break;
2513 test_hash_speed("rmd128", sec
, generic_hash_speed_template
);
2514 if (mode
> 300 && mode
< 400) break;
2517 test_hash_speed("rmd160", sec
, generic_hash_speed_template
);
2518 if (mode
> 300 && mode
< 400) break;
2521 test_hash_speed("rmd256", sec
, generic_hash_speed_template
);
2522 if (mode
> 300 && mode
< 400) break;
2525 test_hash_speed("rmd320", sec
, generic_hash_speed_template
);
2526 if (mode
> 300 && mode
< 400) break;
2537 static int __init
tcrypt_mod_init(void)
2542 for (i
= 0; i
< TVMEMSIZE
; i
++) {
2543 tvmem
[i
] = (void *)__get_free_page(GFP_KERNEL
);
2548 for (i
= 0; i
< XBUFSIZE
; i
++) {
2549 xbuf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
2554 for (i
= 0; i
< XBUFSIZE
; i
++) {
2555 axbuf
[i
] = (void *)__get_free_page(GFP_KERNEL
);
2557 goto err_free_axbuf
;
2562 /* We intentionaly return -EAGAIN to prevent keeping
2563 * the module. It does all its work from init()
2564 * and doesn't offer any runtime functionality
2565 * => we don't need it in the memory, do we?
2571 for (i
= 0; i
< XBUFSIZE
&& axbuf
[i
]; i
++)
2572 free_page((unsigned long)axbuf
[i
]);
2574 for (i
= 0; i
< XBUFSIZE
&& xbuf
[i
]; i
++)
2575 free_page((unsigned long)xbuf
[i
]);
2577 for (i
= 0; i
< TVMEMSIZE
&& tvmem
[i
]; i
++)
2578 free_page((unsigned long)tvmem
[i
]);
2584 * If an init function is provided, an exit function must also be provided
2585 * to allow module unload.
2587 static void __exit
tcrypt_mod_fini(void) { }
2589 module_init(tcrypt_mod_init
);
2590 module_exit(tcrypt_mod_fini
);
2592 module_param(mode
, int, 0);
2593 module_param(sec
, uint
, 0);
2594 MODULE_PARM_DESC(sec
, "Length in seconds of speed tests "
2595 "(defaults to zero which uses CPU cycles instead)");
2597 MODULE_LICENSE("GPL");
2598 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2599 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");