crypto: tcrypt - Add alg_test interface
[deliverable/linux.git] / crypto / tcrypt.c
1 /*
2 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18 #include <crypto/hash.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
22 #include <linux/mm.h>
23 #include <linux/slab.h>
24 #include <linux/scatterlist.h>
25 #include <linux/string.h>
26 #include <linux/crypto.h>
27 #include <linux/moduleparam.h>
28 #include <linux/jiffies.h>
29 #include <linux/timex.h>
30 #include <linux/interrupt.h>
31 #include "tcrypt.h"
32
33 /*
34 * Need slab memory for testing (size in number of pages).
35 */
36 #define TVMEMSIZE 4
37 #define XBUFSIZE 8
38
39 /*
40 * Indexes into the xbuf to simulate cross-page access.
41 */
42 #define IDX1 32
43 #define IDX2 32400
44 #define IDX3 1
45 #define IDX4 8193
46 #define IDX5 22222
47 #define IDX6 17101
48 #define IDX7 27333
49 #define IDX8 3000
50
51 /*
52 * Used by test_cipher()
53 */
54 #define ENCRYPT 1
55 #define DECRYPT 0
56
57 struct tcrypt_result {
58 struct completion completion;
59 int err;
60 };
61
62 struct aead_test_suite {
63 struct {
64 struct aead_testvec *vecs;
65 unsigned int count;
66 } enc, dec;
67 };
68
69 struct cipher_test_suite {
70 struct {
71 struct cipher_testvec *vecs;
72 unsigned int count;
73 } enc, dec;
74 };
75
76 struct comp_test_suite {
77 struct {
78 struct comp_testvec *vecs;
79 unsigned int count;
80 } comp, decomp;
81 };
82
83 struct hash_test_suite {
84 struct hash_testvec *vecs;
85 unsigned int count;
86 };
87
88 struct alg_test_desc {
89 const char *alg;
90 int (*test)(const struct alg_test_desc *desc, const char *driver,
91 u32 type, u32 mask);
92
93 union {
94 struct aead_test_suite aead;
95 struct cipher_test_suite cipher;
96 struct comp_test_suite comp;
97 struct hash_test_suite hash;
98 } suite;
99 };
100
101 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
102
103 /*
104 * Used by test_cipher_speed()
105 */
106 static unsigned int sec;
107
108 static int mode;
109 static char *xbuf[XBUFSIZE];
110 static char *axbuf[XBUFSIZE];
111 static char *tvmem[TVMEMSIZE];
112
113 static char *check[] = {
114 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
115 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
116 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
117 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
118 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
119 "lzo", "cts", NULL
120 };
121
122 static void hexdump(unsigned char *buf, unsigned int len)
123 {
124 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
125 16, 1,
126 buf, len, false);
127 }
128
129 static void tcrypt_complete(struct crypto_async_request *req, int err)
130 {
131 struct tcrypt_result *res = req->data;
132
133 if (err == -EINPROGRESS)
134 return;
135
136 res->err = err;
137 complete(&res->completion);
138 }
139
140 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
141 unsigned int tcount)
142 {
143 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
144 unsigned int i, j, k, temp;
145 struct scatterlist sg[8];
146 char result[64];
147 struct ahash_request *req;
148 struct tcrypt_result tresult;
149 int ret;
150 void *hash_buff;
151
152 init_completion(&tresult.completion);
153
154 req = ahash_request_alloc(tfm, GFP_KERNEL);
155 if (!req) {
156 printk(KERN_ERR "alg: hash: Failed to allocate request for "
157 "%s\n", algo);
158 ret = -ENOMEM;
159 goto out_noreq;
160 }
161 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
162 tcrypt_complete, &tresult);
163
164 for (i = 0; i < tcount; i++) {
165 memset(result, 0, 64);
166
167 hash_buff = xbuf[0];
168
169 memcpy(hash_buff, template[i].plaintext, template[i].psize);
170 sg_init_one(&sg[0], hash_buff, template[i].psize);
171
172 if (template[i].ksize) {
173 crypto_ahash_clear_flags(tfm, ~0);
174 ret = crypto_ahash_setkey(tfm, template[i].key,
175 template[i].ksize);
176 if (ret) {
177 printk(KERN_ERR "alg: hash: setkey failed on "
178 "test %d for %s: ret=%d\n", i + 1, algo,
179 -ret);
180 goto out;
181 }
182 }
183
184 ahash_request_set_crypt(req, sg, result, template[i].psize);
185 ret = crypto_ahash_digest(req);
186 switch (ret) {
187 case 0:
188 break;
189 case -EINPROGRESS:
190 case -EBUSY:
191 ret = wait_for_completion_interruptible(
192 &tresult.completion);
193 if (!ret && !(ret = tresult.err)) {
194 INIT_COMPLETION(tresult.completion);
195 break;
196 }
197 /* fall through */
198 default:
199 printk(KERN_ERR "alg: hash: digest failed on test %d "
200 "for %s: ret=%d\n", i + 1, algo, -ret);
201 goto out;
202 }
203
204 if (memcmp(result, template[i].digest,
205 crypto_ahash_digestsize(tfm))) {
206 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
207 i + 1, algo);
208 hexdump(result, crypto_ahash_digestsize(tfm));
209 ret = -EINVAL;
210 goto out;
211 }
212 }
213
214 j = 0;
215 for (i = 0; i < tcount; i++) {
216 if (template[i].np) {
217 j++;
218 memset(result, 0, 64);
219
220 temp = 0;
221 sg_init_table(sg, template[i].np);
222 for (k = 0; k < template[i].np; k++) {
223 sg_set_buf(&sg[k],
224 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
225 offset_in_page(IDX[k]),
226 template[i].plaintext + temp,
227 template[i].tap[k]),
228 template[i].tap[k]);
229 temp += template[i].tap[k];
230 }
231
232 if (template[i].ksize) {
233 crypto_ahash_clear_flags(tfm, ~0);
234 ret = crypto_ahash_setkey(tfm, template[i].key,
235 template[i].ksize);
236
237 if (ret) {
238 printk(KERN_ERR "alg: hash: setkey "
239 "failed on chunking test %d "
240 "for %s: ret=%d\n", j, algo,
241 -ret);
242 goto out;
243 }
244 }
245
246 ahash_request_set_crypt(req, sg, result,
247 template[i].psize);
248 ret = crypto_ahash_digest(req);
249 switch (ret) {
250 case 0:
251 break;
252 case -EINPROGRESS:
253 case -EBUSY:
254 ret = wait_for_completion_interruptible(
255 &tresult.completion);
256 if (!ret && !(ret = tresult.err)) {
257 INIT_COMPLETION(tresult.completion);
258 break;
259 }
260 /* fall through */
261 default:
262 printk(KERN_ERR "alg: hash: digest failed "
263 "on chunking test %d for %s: "
264 "ret=%d\n", j, algo, -ret);
265 goto out;
266 }
267
268 if (memcmp(result, template[i].digest,
269 crypto_ahash_digestsize(tfm))) {
270 printk(KERN_ERR "alg: hash: Chunking test %d "
271 "failed for %s\n", j, algo);
272 hexdump(result, crypto_ahash_digestsize(tfm));
273 ret = -EINVAL;
274 goto out;
275 }
276 }
277 }
278
279 ret = 0;
280
281 out:
282 ahash_request_free(req);
283 out_noreq:
284 return ret;
285 }
286
287 static int test_aead(struct crypto_aead *tfm, int enc,
288 struct aead_testvec *template, unsigned int tcount)
289 {
290 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
291 unsigned int i, j, k, n, temp;
292 int ret = 0;
293 char *q;
294 char *key;
295 struct aead_request *req;
296 struct scatterlist sg[8];
297 struct scatterlist asg[8];
298 const char *e;
299 struct tcrypt_result result;
300 unsigned int authsize;
301 void *input;
302 void *assoc;
303 char iv[MAX_IVLEN];
304
305 if (enc == ENCRYPT)
306 e = "encryption";
307 else
308 e = "decryption";
309
310 init_completion(&result.completion);
311
312 req = aead_request_alloc(tfm, GFP_KERNEL);
313 if (!req) {
314 printk(KERN_ERR "alg: aead: Failed to allocate request for "
315 "%s\n", algo);
316 ret = -ENOMEM;
317 goto out;
318 }
319
320 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
321 tcrypt_complete, &result);
322
323 for (i = 0, j = 0; i < tcount; i++) {
324 if (!template[i].np) {
325 j++;
326
327 /* some tepmplates have no input data but they will
328 * touch input
329 */
330 input = xbuf[0];
331 assoc = axbuf[0];
332
333 memcpy(input, template[i].input, template[i].ilen);
334 memcpy(assoc, template[i].assoc, template[i].alen);
335 if (template[i].iv)
336 memcpy(iv, template[i].iv, MAX_IVLEN);
337 else
338 memset(iv, 0, MAX_IVLEN);
339
340 crypto_aead_clear_flags(tfm, ~0);
341 if (template[i].wk)
342 crypto_aead_set_flags(
343 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
344
345 key = template[i].key;
346
347 ret = crypto_aead_setkey(tfm, key,
348 template[i].klen);
349 if (!ret == template[i].fail) {
350 printk(KERN_ERR "alg: aead: setkey failed on "
351 "test %d for %s: flags=%x\n", j, algo,
352 crypto_aead_get_flags(tfm));
353 goto out;
354 } else if (ret)
355 continue;
356
357 authsize = abs(template[i].rlen - template[i].ilen);
358 ret = crypto_aead_setauthsize(tfm, authsize);
359 if (ret) {
360 printk(KERN_ERR "alg: aead: Failed to set "
361 "authsize to %u on test %d for %s\n",
362 authsize, j, algo);
363 goto out;
364 }
365
366 sg_init_one(&sg[0], input,
367 template[i].ilen + (enc ? authsize : 0));
368
369 sg_init_one(&asg[0], assoc, template[i].alen);
370
371 aead_request_set_crypt(req, sg, sg,
372 template[i].ilen, iv);
373
374 aead_request_set_assoc(req, asg, template[i].alen);
375
376 ret = enc ?
377 crypto_aead_encrypt(req) :
378 crypto_aead_decrypt(req);
379
380 switch (ret) {
381 case 0:
382 break;
383 case -EINPROGRESS:
384 case -EBUSY:
385 ret = wait_for_completion_interruptible(
386 &result.completion);
387 if (!ret && !(ret = result.err)) {
388 INIT_COMPLETION(result.completion);
389 break;
390 }
391 /* fall through */
392 default:
393 printk(KERN_ERR "alg: aead: %s failed on test "
394 "%d for %s: ret=%d\n", e, j, algo, -ret);
395 goto out;
396 }
397
398 q = input;
399 if (memcmp(q, template[i].result, template[i].rlen)) {
400 printk(KERN_ERR "alg: aead: Test %d failed on "
401 "%s for %s\n", j, e, algo);
402 hexdump(q, template[i].rlen);
403 ret = -EINVAL;
404 goto out;
405 }
406 }
407 }
408
409 for (i = 0, j = 0; i < tcount; i++) {
410 if (template[i].np) {
411 j++;
412
413 if (template[i].iv)
414 memcpy(iv, template[i].iv, MAX_IVLEN);
415 else
416 memset(iv, 0, MAX_IVLEN);
417
418 crypto_aead_clear_flags(tfm, ~0);
419 if (template[i].wk)
420 crypto_aead_set_flags(
421 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
422 key = template[i].key;
423
424 ret = crypto_aead_setkey(tfm, key, template[i].klen);
425 if (!ret == template[i].fail) {
426 printk(KERN_ERR "alg: aead: setkey failed on "
427 "chunk test %d for %s: flags=%x\n", j,
428 algo, crypto_aead_get_flags(tfm));
429 goto out;
430 } else if (ret)
431 continue;
432
433 authsize = abs(template[i].rlen - template[i].ilen);
434
435 ret = -EINVAL;
436 sg_init_table(sg, template[i].np);
437 for (k = 0, temp = 0; k < template[i].np; k++) {
438 if (WARN_ON(offset_in_page(IDX[k]) +
439 template[i].tap[k] > PAGE_SIZE))
440 goto out;
441
442 q = xbuf[IDX[k] >> PAGE_SHIFT] +
443 offset_in_page(IDX[k]);
444
445 memcpy(q, template[i].input + temp,
446 template[i].tap[k]);
447
448 n = template[i].tap[k];
449 if (k == template[i].np - 1 && enc)
450 n += authsize;
451 if (offset_in_page(q) + n < PAGE_SIZE)
452 q[n] = 0;
453
454 sg_set_buf(&sg[k], q, template[i].tap[k]);
455 temp += template[i].tap[k];
456 }
457
458 ret = crypto_aead_setauthsize(tfm, authsize);
459 if (ret) {
460 printk(KERN_ERR "alg: aead: Failed to set "
461 "authsize to %u on chunk test %d for "
462 "%s\n", authsize, j, algo);
463 goto out;
464 }
465
466 if (enc) {
467 if (WARN_ON(sg[k - 1].offset +
468 sg[k - 1].length + authsize >
469 PAGE_SIZE)) {
470 ret = -EINVAL;
471 goto out;
472 }
473
474 sg[k - 1].length += authsize;
475 }
476
477 sg_init_table(asg, template[i].anp);
478 for (k = 0, temp = 0; k < template[i].anp; k++) {
479 sg_set_buf(&asg[k],
480 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
481 offset_in_page(IDX[k]),
482 template[i].assoc + temp,
483 template[i].atap[k]),
484 template[i].atap[k]);
485 temp += template[i].atap[k];
486 }
487
488 aead_request_set_crypt(req, sg, sg,
489 template[i].ilen,
490 iv);
491
492 aead_request_set_assoc(req, asg, template[i].alen);
493
494 ret = enc ?
495 crypto_aead_encrypt(req) :
496 crypto_aead_decrypt(req);
497
498 switch (ret) {
499 case 0:
500 break;
501 case -EINPROGRESS:
502 case -EBUSY:
503 ret = wait_for_completion_interruptible(
504 &result.completion);
505 if (!ret && !(ret = result.err)) {
506 INIT_COMPLETION(result.completion);
507 break;
508 }
509 /* fall through */
510 default:
511 printk(KERN_ERR "alg: aead: %s failed on "
512 "chunk test %d for %s: ret=%d\n", e, j,
513 algo, -ret);
514 goto out;
515 }
516
517 ret = -EINVAL;
518 for (k = 0, temp = 0; k < template[i].np; k++) {
519 q = xbuf[IDX[k] >> PAGE_SHIFT] +
520 offset_in_page(IDX[k]);
521
522 n = template[i].tap[k];
523 if (k == template[i].np - 1)
524 n += enc ? authsize : -authsize;
525
526 if (memcmp(q, template[i].result + temp, n)) {
527 printk(KERN_ERR "alg: aead: Chunk "
528 "test %d failed on %s at page "
529 "%u for %s\n", j, e, k, algo);
530 hexdump(q, n);
531 goto out;
532 }
533
534 q += n;
535 if (k == template[i].np - 1 && !enc) {
536 if (memcmp(q, template[i].input +
537 temp + n, authsize))
538 n = authsize;
539 else
540 n = 0;
541 } else {
542 for (n = 0; offset_in_page(q + n) &&
543 q[n]; n++)
544 ;
545 }
546 if (n) {
547 printk(KERN_ERR "alg: aead: Result "
548 "buffer corruption in chunk "
549 "test %d on %s at page %u for "
550 "%s: %u bytes:\n", j, e, k,
551 algo, n);
552 hexdump(q, n);
553 goto out;
554 }
555
556 temp += template[i].tap[k];
557 }
558 }
559 }
560
561 ret = 0;
562
563 out:
564 aead_request_free(req);
565 return ret;
566 }
567
568 static int test_cipher(struct crypto_ablkcipher *tfm, int enc,
569 struct cipher_testvec *template, unsigned int tcount)
570 {
571 const char *algo =
572 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
573 unsigned int i, j, k, n, temp;
574 int ret;
575 char *q;
576 struct ablkcipher_request *req;
577 struct scatterlist sg[8];
578 const char *e;
579 struct tcrypt_result result;
580 void *data;
581 char iv[MAX_IVLEN];
582
583 if (enc == ENCRYPT)
584 e = "encryption";
585 else
586 e = "decryption";
587
588 init_completion(&result.completion);
589
590 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
591 if (!req) {
592 printk(KERN_ERR "alg: cipher: Failed to allocate request for "
593 "%s\n", algo);
594 ret = -ENOMEM;
595 goto out;
596 }
597
598 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
599 tcrypt_complete, &result);
600
601 j = 0;
602 for (i = 0; i < tcount; i++) {
603 if (template[i].iv)
604 memcpy(iv, template[i].iv, MAX_IVLEN);
605 else
606 memset(iv, 0, MAX_IVLEN);
607
608 if (!(template[i].np)) {
609 j++;
610
611 data = xbuf[0];
612 memcpy(data, template[i].input, template[i].ilen);
613
614 crypto_ablkcipher_clear_flags(tfm, ~0);
615 if (template[i].wk)
616 crypto_ablkcipher_set_flags(
617 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
618
619 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
620 template[i].klen);
621 if (!ret == template[i].fail) {
622 printk(KERN_ERR "alg: cipher: setkey failed "
623 "on test %d for %s: flags=%x\n", j,
624 algo, crypto_ablkcipher_get_flags(tfm));
625 goto out;
626 } else if (ret)
627 continue;
628
629 sg_init_one(&sg[0], data, template[i].ilen);
630
631 ablkcipher_request_set_crypt(req, sg, sg,
632 template[i].ilen, iv);
633 ret = enc ?
634 crypto_ablkcipher_encrypt(req) :
635 crypto_ablkcipher_decrypt(req);
636
637 switch (ret) {
638 case 0:
639 break;
640 case -EINPROGRESS:
641 case -EBUSY:
642 ret = wait_for_completion_interruptible(
643 &result.completion);
644 if (!ret && !((ret = result.err))) {
645 INIT_COMPLETION(result.completion);
646 break;
647 }
648 /* fall through */
649 default:
650 printk(KERN_ERR "alg: cipher: %s failed on "
651 "test %d for %s: ret=%d\n", e, j, algo,
652 -ret);
653 goto out;
654 }
655
656 q = data;
657 if (memcmp(q, template[i].result, template[i].rlen)) {
658 printk(KERN_ERR "alg: cipher: Test %d failed "
659 "on %s for %s\n", j, e, algo);
660 hexdump(q, template[i].rlen);
661 ret = -EINVAL;
662 goto out;
663 }
664 }
665 }
666
667 j = 0;
668 for (i = 0; i < tcount; i++) {
669
670 if (template[i].iv)
671 memcpy(iv, template[i].iv, MAX_IVLEN);
672 else
673 memset(iv, 0, MAX_IVLEN);
674
675 if (template[i].np) {
676 j++;
677
678 crypto_ablkcipher_clear_flags(tfm, ~0);
679 if (template[i].wk)
680 crypto_ablkcipher_set_flags(
681 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
682
683 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
684 template[i].klen);
685 if (!ret == template[i].fail) {
686 printk(KERN_ERR "alg: cipher: setkey failed "
687 "on chunk test %d for %s: flags=%x\n",
688 j, algo,
689 crypto_ablkcipher_get_flags(tfm));
690 goto out;
691 } else if (ret)
692 continue;
693
694 temp = 0;
695 ret = -EINVAL;
696 sg_init_table(sg, template[i].np);
697 for (k = 0; k < template[i].np; k++) {
698 if (WARN_ON(offset_in_page(IDX[k]) +
699 template[i].tap[k] > PAGE_SIZE))
700 goto out;
701
702 q = xbuf[IDX[k] >> PAGE_SHIFT] +
703 offset_in_page(IDX[k]);
704
705 memcpy(q, template[i].input + temp,
706 template[i].tap[k]);
707
708 if (offset_in_page(q) + template[i].tap[k] <
709 PAGE_SIZE)
710 q[template[i].tap[k]] = 0;
711
712 sg_set_buf(&sg[k], q, template[i].tap[k]);
713
714 temp += template[i].tap[k];
715 }
716
717 ablkcipher_request_set_crypt(req, sg, sg,
718 template[i].ilen, iv);
719
720 ret = enc ?
721 crypto_ablkcipher_encrypt(req) :
722 crypto_ablkcipher_decrypt(req);
723
724 switch (ret) {
725 case 0:
726 break;
727 case -EINPROGRESS:
728 case -EBUSY:
729 ret = wait_for_completion_interruptible(
730 &result.completion);
731 if (!ret && !((ret = result.err))) {
732 INIT_COMPLETION(result.completion);
733 break;
734 }
735 /* fall through */
736 default:
737 printk(KERN_ERR "alg: cipher: %s failed on "
738 "chunk test %d for %s: ret=%d\n", e, j,
739 algo, -ret);
740 goto out;
741 }
742
743 temp = 0;
744 ret = -EINVAL;
745 for (k = 0; k < template[i].np; k++) {
746 q = xbuf[IDX[k] >> PAGE_SHIFT] +
747 offset_in_page(IDX[k]);
748
749 if (memcmp(q, template[i].result + temp,
750 template[i].tap[k])) {
751 printk(KERN_ERR "alg: cipher: Chunk "
752 "test %d failed on %s at page "
753 "%u for %s\n", j, e, k, algo);
754 hexdump(q, template[i].tap[k]);
755 goto out;
756 }
757
758 q += template[i].tap[k];
759 for (n = 0; offset_in_page(q + n) && q[n]; n++)
760 ;
761 if (n) {
762 printk(KERN_ERR "alg: cipher: "
763 "Result buffer corruption in "
764 "chunk test %d on %s at page "
765 "%u for %s: %u bytes:\n", j, e,
766 k, algo, n);
767 hexdump(q, n);
768 goto out;
769 }
770 temp += template[i].tap[k];
771 }
772 }
773 }
774
775 ret = 0;
776
777 out:
778 ablkcipher_request_free(req);
779 return ret;
780 }
781
782 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
783 struct scatterlist *sg, int blen, int sec)
784 {
785 unsigned long start, end;
786 int bcount;
787 int ret;
788
789 for (start = jiffies, end = start + sec * HZ, bcount = 0;
790 time_before(jiffies, end); bcount++) {
791 if (enc)
792 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
793 else
794 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
795
796 if (ret)
797 return ret;
798 }
799
800 printk("%d operations in %d seconds (%ld bytes)\n",
801 bcount, sec, (long)bcount * blen);
802 return 0;
803 }
804
805 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
806 struct scatterlist *sg, int blen)
807 {
808 unsigned long cycles = 0;
809 int ret = 0;
810 int i;
811
812 local_bh_disable();
813 local_irq_disable();
814
815 /* Warm-up run. */
816 for (i = 0; i < 4; i++) {
817 if (enc)
818 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
819 else
820 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
821
822 if (ret)
823 goto out;
824 }
825
826 /* The real thing. */
827 for (i = 0; i < 8; i++) {
828 cycles_t start, end;
829
830 start = get_cycles();
831 if (enc)
832 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
833 else
834 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
835 end = get_cycles();
836
837 if (ret)
838 goto out;
839
840 cycles += end - start;
841 }
842
843 out:
844 local_irq_enable();
845 local_bh_enable();
846
847 if (ret == 0)
848 printk("1 operation in %lu cycles (%d bytes)\n",
849 (cycles + 4) / 8, blen);
850
851 return ret;
852 }
853
854 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
855
856 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
857 struct cipher_testvec *template,
858 unsigned int tcount, u8 *keysize)
859 {
860 unsigned int ret, i, j, iv_len;
861 unsigned char *key, iv[128];
862 struct crypto_blkcipher *tfm;
863 struct blkcipher_desc desc;
864 const char *e;
865 u32 *b_size;
866
867 if (enc == ENCRYPT)
868 e = "encryption";
869 else
870 e = "decryption";
871
872 printk("\ntesting speed of %s %s\n", algo, e);
873
874 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
875
876 if (IS_ERR(tfm)) {
877 printk("failed to load transform for %s: %ld\n", algo,
878 PTR_ERR(tfm));
879 return;
880 }
881 desc.tfm = tfm;
882 desc.flags = 0;
883
884 i = 0;
885 do {
886
887 b_size = block_sizes;
888 do {
889 struct scatterlist sg[TVMEMSIZE];
890
891 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
892 printk("template (%u) too big for "
893 "tvmem (%lu)\n", *keysize + *b_size,
894 TVMEMSIZE * PAGE_SIZE);
895 goto out;
896 }
897
898 printk("test %u (%d bit key, %d byte blocks): ", i,
899 *keysize * 8, *b_size);
900
901 memset(tvmem[0], 0xff, PAGE_SIZE);
902
903 /* set key, plain text and IV */
904 key = (unsigned char *)tvmem[0];
905 for (j = 0; j < tcount; j++) {
906 if (template[j].klen == *keysize) {
907 key = template[j].key;
908 break;
909 }
910 }
911
912 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
913 if (ret) {
914 printk("setkey() failed flags=%x\n",
915 crypto_blkcipher_get_flags(tfm));
916 goto out;
917 }
918
919 sg_init_table(sg, TVMEMSIZE);
920 sg_set_buf(sg, tvmem[0] + *keysize,
921 PAGE_SIZE - *keysize);
922 for (j = 1; j < TVMEMSIZE; j++) {
923 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
924 memset (tvmem[j], 0xff, PAGE_SIZE);
925 }
926
927 iv_len = crypto_blkcipher_ivsize(tfm);
928 if (iv_len) {
929 memset(&iv, 0xff, iv_len);
930 crypto_blkcipher_set_iv(tfm, iv, iv_len);
931 }
932
933 if (sec)
934 ret = test_cipher_jiffies(&desc, enc, sg,
935 *b_size, sec);
936 else
937 ret = test_cipher_cycles(&desc, enc, sg,
938 *b_size);
939
940 if (ret) {
941 printk("%s() failed flags=%x\n", e, desc.flags);
942 break;
943 }
944 b_size++;
945 i++;
946 } while (*b_size);
947 keysize++;
948 } while (*keysize);
949
950 out:
951 crypto_free_blkcipher(tfm);
952 }
953
954 static int test_hash_jiffies_digest(struct hash_desc *desc,
955 struct scatterlist *sg, int blen,
956 char *out, int sec)
957 {
958 unsigned long start, end;
959 int bcount;
960 int ret;
961
962 for (start = jiffies, end = start + sec * HZ, bcount = 0;
963 time_before(jiffies, end); bcount++) {
964 ret = crypto_hash_digest(desc, sg, blen, out);
965 if (ret)
966 return ret;
967 }
968
969 printk("%6u opers/sec, %9lu bytes/sec\n",
970 bcount / sec, ((long)bcount * blen) / sec);
971
972 return 0;
973 }
974
975 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
976 int blen, int plen, char *out, int sec)
977 {
978 unsigned long start, end;
979 int bcount, pcount;
980 int ret;
981
982 if (plen == blen)
983 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
984
985 for (start = jiffies, end = start + sec * HZ, bcount = 0;
986 time_before(jiffies, end); bcount++) {
987 ret = crypto_hash_init(desc);
988 if (ret)
989 return ret;
990 for (pcount = 0; pcount < blen; pcount += plen) {
991 ret = crypto_hash_update(desc, sg, plen);
992 if (ret)
993 return ret;
994 }
995 /* we assume there is enough space in 'out' for the result */
996 ret = crypto_hash_final(desc, out);
997 if (ret)
998 return ret;
999 }
1000
1001 printk("%6u opers/sec, %9lu bytes/sec\n",
1002 bcount / sec, ((long)bcount * blen) / sec);
1003
1004 return 0;
1005 }
1006
1007 static int test_hash_cycles_digest(struct hash_desc *desc,
1008 struct scatterlist *sg, int blen, char *out)
1009 {
1010 unsigned long cycles = 0;
1011 int i;
1012 int ret;
1013
1014 local_bh_disable();
1015 local_irq_disable();
1016
1017 /* Warm-up run. */
1018 for (i = 0; i < 4; i++) {
1019 ret = crypto_hash_digest(desc, sg, blen, out);
1020 if (ret)
1021 goto out;
1022 }
1023
1024 /* The real thing. */
1025 for (i = 0; i < 8; i++) {
1026 cycles_t start, end;
1027
1028 start = get_cycles();
1029
1030 ret = crypto_hash_digest(desc, sg, blen, out);
1031 if (ret)
1032 goto out;
1033
1034 end = get_cycles();
1035
1036 cycles += end - start;
1037 }
1038
1039 out:
1040 local_irq_enable();
1041 local_bh_enable();
1042
1043 if (ret)
1044 return ret;
1045
1046 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1047 cycles / 8, cycles / (8 * blen));
1048
1049 return 0;
1050 }
1051
1052 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
1053 int blen, int plen, char *out)
1054 {
1055 unsigned long cycles = 0;
1056 int i, pcount;
1057 int ret;
1058
1059 if (plen == blen)
1060 return test_hash_cycles_digest(desc, sg, blen, out);
1061
1062 local_bh_disable();
1063 local_irq_disable();
1064
1065 /* Warm-up run. */
1066 for (i = 0; i < 4; i++) {
1067 ret = crypto_hash_init(desc);
1068 if (ret)
1069 goto out;
1070 for (pcount = 0; pcount < blen; pcount += plen) {
1071 ret = crypto_hash_update(desc, sg, plen);
1072 if (ret)
1073 goto out;
1074 }
1075 ret = crypto_hash_final(desc, out);
1076 if (ret)
1077 goto out;
1078 }
1079
1080 /* The real thing. */
1081 for (i = 0; i < 8; i++) {
1082 cycles_t start, end;
1083
1084 start = get_cycles();
1085
1086 ret = crypto_hash_init(desc);
1087 if (ret)
1088 goto out;
1089 for (pcount = 0; pcount < blen; pcount += plen) {
1090 ret = crypto_hash_update(desc, sg, plen);
1091 if (ret)
1092 goto out;
1093 }
1094 ret = crypto_hash_final(desc, out);
1095 if (ret)
1096 goto out;
1097
1098 end = get_cycles();
1099
1100 cycles += end - start;
1101 }
1102
1103 out:
1104 local_irq_enable();
1105 local_bh_enable();
1106
1107 if (ret)
1108 return ret;
1109
1110 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1111 cycles / 8, cycles / (8 * blen));
1112
1113 return 0;
1114 }
1115
1116 static void test_hash_speed(const char *algo, unsigned int sec,
1117 struct hash_speed *speed)
1118 {
1119 struct scatterlist sg[TVMEMSIZE];
1120 struct crypto_hash *tfm;
1121 struct hash_desc desc;
1122 char output[1024];
1123 int i;
1124 int ret;
1125
1126 printk("\ntesting speed of %s\n", algo);
1127
1128 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
1129
1130 if (IS_ERR(tfm)) {
1131 printk("failed to load transform for %s: %ld\n", algo,
1132 PTR_ERR(tfm));
1133 return;
1134 }
1135
1136 desc.tfm = tfm;
1137 desc.flags = 0;
1138
1139 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
1140 printk("digestsize(%u) > outputbuffer(%zu)\n",
1141 crypto_hash_digestsize(tfm), sizeof(output));
1142 goto out;
1143 }
1144
1145 sg_init_table(sg, TVMEMSIZE);
1146 for (i = 0; i < TVMEMSIZE; i++) {
1147 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
1148 memset(tvmem[i], 0xff, PAGE_SIZE);
1149 }
1150
1151 for (i = 0; speed[i].blen != 0; i++) {
1152 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1153 printk("template (%u) too big for tvmem (%lu)\n",
1154 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1155 goto out;
1156 }
1157
1158 printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
1159 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1160
1161 if (sec)
1162 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
1163 speed[i].plen, output, sec);
1164 else
1165 ret = test_hash_cycles(&desc, sg, speed[i].blen,
1166 speed[i].plen, output);
1167
1168 if (ret) {
1169 printk("hashing failed ret=%d\n", ret);
1170 break;
1171 }
1172 }
1173
1174 out:
1175 crypto_free_hash(tfm);
1176 }
1177
1178 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1179 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1180 {
1181 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1182 unsigned int i;
1183 char result[COMP_BUF_SIZE];
1184 int ret;
1185
1186 for (i = 0; i < ctcount; i++) {
1187 int ilen, dlen = COMP_BUF_SIZE;
1188
1189 memset(result, 0, sizeof (result));
1190
1191 ilen = ctemplate[i].inlen;
1192 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1193 ilen, result, &dlen);
1194 if (ret) {
1195 printk(KERN_ERR "alg: comp: compression failed "
1196 "on test %d for %s: ret=%d\n", i + 1, algo,
1197 -ret);
1198 goto out;
1199 }
1200
1201 if (memcmp(result, ctemplate[i].output, dlen)) {
1202 printk(KERN_ERR "alg: comp: Compression test %d "
1203 "failed for %s\n", i + 1, algo);
1204 hexdump(result, dlen);
1205 ret = -EINVAL;
1206 goto out;
1207 }
1208 }
1209
1210 for (i = 0; i < dtcount; i++) {
1211 int ilen, ret, dlen = COMP_BUF_SIZE;
1212
1213 memset(result, 0, sizeof (result));
1214
1215 ilen = dtemplate[i].inlen;
1216 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1217 ilen, result, &dlen);
1218 if (ret) {
1219 printk(KERN_ERR "alg: comp: decompression failed "
1220 "on test %d for %s: ret=%d\n", i + 1, algo,
1221 -ret);
1222 goto out;
1223 }
1224
1225 if (memcmp(result, dtemplate[i].output, dlen)) {
1226 printk(KERN_ERR "alg: comp: Decompression test %d "
1227 "failed for %s\n", i + 1, algo);
1228 hexdump(result, dlen);
1229 ret = -EINVAL;
1230 goto out;
1231 }
1232 }
1233
1234 ret = 0;
1235
1236 out:
1237 return ret;
1238 }
1239
1240 static void test_available(void)
1241 {
1242 char **name = check;
1243
1244 while (*name) {
1245 printk("alg %s ", *name);
1246 printk(crypto_has_alg(*name, 0, 0) ?
1247 "found\n" : "not found\n");
1248 name++;
1249 }
1250 }
1251
1252 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1253 u32 type, u32 mask)
1254 {
1255 struct crypto_aead *tfm;
1256 int err = 0;
1257
1258 tfm = crypto_alloc_aead(driver, type, mask);
1259 if (IS_ERR(tfm)) {
1260 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1261 "%ld\n", driver, PTR_ERR(tfm));
1262 return PTR_ERR(tfm);
1263 }
1264
1265 if (desc->suite.aead.enc.vecs) {
1266 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1267 desc->suite.aead.enc.count);
1268 if (err)
1269 goto out;
1270 }
1271
1272 if (!err && desc->suite.aead.dec.vecs)
1273 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1274 desc->suite.aead.dec.count);
1275
1276 out:
1277 crypto_free_aead(tfm);
1278 return err;
1279 }
1280
1281 static int alg_test_cipher(const struct alg_test_desc *desc,
1282 const char *driver, u32 type, u32 mask)
1283 {
1284 struct crypto_ablkcipher *tfm;
1285 int err = 0;
1286
1287 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1288 if (IS_ERR(tfm)) {
1289 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1290 "%s: %ld\n", driver, PTR_ERR(tfm));
1291 return PTR_ERR(tfm);
1292 }
1293
1294 if (desc->suite.cipher.enc.vecs) {
1295 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1296 desc->suite.cipher.enc.count);
1297 if (err)
1298 goto out;
1299 }
1300
1301 if (desc->suite.cipher.dec.vecs)
1302 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1303 desc->suite.cipher.dec.count);
1304
1305 out:
1306 crypto_free_ablkcipher(tfm);
1307 return err;
1308 }
1309
1310 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1311 u32 type, u32 mask)
1312 {
1313 struct crypto_comp *tfm;
1314 int err;
1315
1316 tfm = crypto_alloc_comp(driver, type, mask);
1317 if (IS_ERR(tfm)) {
1318 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1319 "%ld\n", driver, PTR_ERR(tfm));
1320 return PTR_ERR(tfm);
1321 }
1322
1323 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1324 desc->suite.comp.decomp.vecs,
1325 desc->suite.comp.comp.count,
1326 desc->suite.comp.decomp.count);
1327
1328 crypto_free_comp(tfm);
1329 return err;
1330 }
1331
1332 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1333 u32 type, u32 mask)
1334 {
1335 struct crypto_ahash *tfm;
1336 int err;
1337
1338 tfm = crypto_alloc_ahash(driver, type, mask);
1339 if (IS_ERR(tfm)) {
1340 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1341 "%ld\n", driver, PTR_ERR(tfm));
1342 return PTR_ERR(tfm);
1343 }
1344
1345 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1346
1347 crypto_free_ahash(tfm);
1348 return err;
1349 }
1350
1351 /* Please keep this list sorted by algorithm name. */
1352 static const struct alg_test_desc alg_test_descs[] = {
1353 {
1354 .alg = "cbc(aes)",
1355 .test = alg_test_cipher,
1356 .suite = {
1357 .cipher = {
1358 .enc = {
1359 .vecs = aes_cbc_enc_tv_template,
1360 .count = AES_CBC_ENC_TEST_VECTORS
1361 },
1362 .dec = {
1363 .vecs = aes_cbc_dec_tv_template,
1364 .count = AES_CBC_DEC_TEST_VECTORS
1365 }
1366 }
1367 }
1368 }, {
1369 .alg = "cbc(anubis)",
1370 .test = alg_test_cipher,
1371 .suite = {
1372 .cipher = {
1373 .enc = {
1374 .vecs = anubis_cbc_enc_tv_template,
1375 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1376 },
1377 .dec = {
1378 .vecs = anubis_cbc_dec_tv_template,
1379 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1380 }
1381 }
1382 }
1383 }, {
1384 .alg = "cbc(blowfish)",
1385 .test = alg_test_cipher,
1386 .suite = {
1387 .cipher = {
1388 .enc = {
1389 .vecs = bf_cbc_enc_tv_template,
1390 .count = BF_CBC_ENC_TEST_VECTORS
1391 },
1392 .dec = {
1393 .vecs = bf_cbc_dec_tv_template,
1394 .count = BF_CBC_DEC_TEST_VECTORS
1395 }
1396 }
1397 }
1398 }, {
1399 .alg = "cbc(camellia)",
1400 .test = alg_test_cipher,
1401 .suite = {
1402 .cipher = {
1403 .enc = {
1404 .vecs = camellia_cbc_enc_tv_template,
1405 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1406 },
1407 .dec = {
1408 .vecs = camellia_cbc_dec_tv_template,
1409 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1410 }
1411 }
1412 }
1413 }, {
1414 .alg = "cbc(des)",
1415 .test = alg_test_cipher,
1416 .suite = {
1417 .cipher = {
1418 .enc = {
1419 .vecs = des_cbc_enc_tv_template,
1420 .count = DES_CBC_ENC_TEST_VECTORS
1421 },
1422 .dec = {
1423 .vecs = des_cbc_dec_tv_template,
1424 .count = DES_CBC_DEC_TEST_VECTORS
1425 }
1426 }
1427 }
1428 }, {
1429 .alg = "cbc(des3_ede)",
1430 .test = alg_test_cipher,
1431 .suite = {
1432 .cipher = {
1433 .enc = {
1434 .vecs = des3_ede_cbc_enc_tv_template,
1435 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1436 },
1437 .dec = {
1438 .vecs = des3_ede_cbc_dec_tv_template,
1439 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1440 }
1441 }
1442 }
1443 }, {
1444 .alg = "cbc(twofish)",
1445 .test = alg_test_cipher,
1446 .suite = {
1447 .cipher = {
1448 .enc = {
1449 .vecs = tf_cbc_enc_tv_template,
1450 .count = TF_CBC_ENC_TEST_VECTORS
1451 },
1452 .dec = {
1453 .vecs = tf_cbc_dec_tv_template,
1454 .count = TF_CBC_DEC_TEST_VECTORS
1455 }
1456 }
1457 }
1458 }, {
1459 .alg = "ccm(aes)",
1460 .test = alg_test_aead,
1461 .suite = {
1462 .aead = {
1463 .enc = {
1464 .vecs = aes_ccm_enc_tv_template,
1465 .count = AES_CCM_ENC_TEST_VECTORS
1466 },
1467 .dec = {
1468 .vecs = aes_ccm_dec_tv_template,
1469 .count = AES_CCM_DEC_TEST_VECTORS
1470 }
1471 }
1472 }
1473 }, {
1474 .alg = "crc32c",
1475 .test = alg_test_hash,
1476 .suite = {
1477 .hash = {
1478 .vecs = crc32c_tv_template,
1479 .count = CRC32C_TEST_VECTORS
1480 }
1481 }
1482 }, {
1483 .alg = "cts(cbc(aes))",
1484 .test = alg_test_cipher,
1485 .suite = {
1486 .cipher = {
1487 .enc = {
1488 .vecs = cts_mode_enc_tv_template,
1489 .count = CTS_MODE_ENC_TEST_VECTORS
1490 },
1491 .dec = {
1492 .vecs = cts_mode_dec_tv_template,
1493 .count = CTS_MODE_DEC_TEST_VECTORS
1494 }
1495 }
1496 }
1497 }, {
1498 .alg = "deflate",
1499 .test = alg_test_comp,
1500 .suite = {
1501 .comp = {
1502 .comp = {
1503 .vecs = deflate_comp_tv_template,
1504 .count = DEFLATE_COMP_TEST_VECTORS
1505 },
1506 .decomp = {
1507 .vecs = deflate_decomp_tv_template,
1508 .count = DEFLATE_DECOMP_TEST_VECTORS
1509 }
1510 }
1511 }
1512 }, {
1513 .alg = "ecb(aes)",
1514 .test = alg_test_cipher,
1515 .suite = {
1516 .cipher = {
1517 .enc = {
1518 .vecs = aes_enc_tv_template,
1519 .count = AES_ENC_TEST_VECTORS
1520 },
1521 .dec = {
1522 .vecs = aes_dec_tv_template,
1523 .count = AES_DEC_TEST_VECTORS
1524 }
1525 }
1526 }
1527 }, {
1528 .alg = "ecb(anubis)",
1529 .test = alg_test_cipher,
1530 .suite = {
1531 .cipher = {
1532 .enc = {
1533 .vecs = anubis_enc_tv_template,
1534 .count = ANUBIS_ENC_TEST_VECTORS
1535 },
1536 .dec = {
1537 .vecs = anubis_dec_tv_template,
1538 .count = ANUBIS_DEC_TEST_VECTORS
1539 }
1540 }
1541 }
1542 }, {
1543 .alg = "ecb(arc4)",
1544 .test = alg_test_cipher,
1545 .suite = {
1546 .cipher = {
1547 .enc = {
1548 .vecs = arc4_enc_tv_template,
1549 .count = ARC4_ENC_TEST_VECTORS
1550 },
1551 .dec = {
1552 .vecs = arc4_dec_tv_template,
1553 .count = ARC4_DEC_TEST_VECTORS
1554 }
1555 }
1556 }
1557 }, {
1558 .alg = "ecb(blowfish)",
1559 .test = alg_test_cipher,
1560 .suite = {
1561 .cipher = {
1562 .enc = {
1563 .vecs = bf_enc_tv_template,
1564 .count = BF_ENC_TEST_VECTORS
1565 },
1566 .dec = {
1567 .vecs = bf_dec_tv_template,
1568 .count = BF_DEC_TEST_VECTORS
1569 }
1570 }
1571 }
1572 }, {
1573 .alg = "ecb(camellia)",
1574 .test = alg_test_cipher,
1575 .suite = {
1576 .cipher = {
1577 .enc = {
1578 .vecs = camellia_enc_tv_template,
1579 .count = CAMELLIA_ENC_TEST_VECTORS
1580 },
1581 .dec = {
1582 .vecs = camellia_dec_tv_template,
1583 .count = CAMELLIA_DEC_TEST_VECTORS
1584 }
1585 }
1586 }
1587 }, {
1588 .alg = "ecb(cast5)",
1589 .test = alg_test_cipher,
1590 .suite = {
1591 .cipher = {
1592 .enc = {
1593 .vecs = cast5_enc_tv_template,
1594 .count = CAST5_ENC_TEST_VECTORS
1595 },
1596 .dec = {
1597 .vecs = cast5_dec_tv_template,
1598 .count = CAST5_DEC_TEST_VECTORS
1599 }
1600 }
1601 }
1602 }, {
1603 .alg = "ecb(cast6)",
1604 .test = alg_test_cipher,
1605 .suite = {
1606 .cipher = {
1607 .enc = {
1608 .vecs = cast6_enc_tv_template,
1609 .count = CAST6_ENC_TEST_VECTORS
1610 },
1611 .dec = {
1612 .vecs = cast6_dec_tv_template,
1613 .count = CAST6_DEC_TEST_VECTORS
1614 }
1615 }
1616 }
1617 }, {
1618 .alg = "ecb(des)",
1619 .test = alg_test_cipher,
1620 .suite = {
1621 .cipher = {
1622 .enc = {
1623 .vecs = des_enc_tv_template,
1624 .count = DES_ENC_TEST_VECTORS
1625 },
1626 .dec = {
1627 .vecs = des_dec_tv_template,
1628 .count = DES_DEC_TEST_VECTORS
1629 }
1630 }
1631 }
1632 }, {
1633 .alg = "ecb(des3_ede)",
1634 .test = alg_test_cipher,
1635 .suite = {
1636 .cipher = {
1637 .enc = {
1638 .vecs = des3_ede_enc_tv_template,
1639 .count = DES3_EDE_ENC_TEST_VECTORS
1640 },
1641 .dec = {
1642 .vecs = des3_ede_dec_tv_template,
1643 .count = DES3_EDE_DEC_TEST_VECTORS
1644 }
1645 }
1646 }
1647 }, {
1648 .alg = "ecb(khazad)",
1649 .test = alg_test_cipher,
1650 .suite = {
1651 .cipher = {
1652 .enc = {
1653 .vecs = khazad_enc_tv_template,
1654 .count = KHAZAD_ENC_TEST_VECTORS
1655 },
1656 .dec = {
1657 .vecs = khazad_dec_tv_template,
1658 .count = KHAZAD_DEC_TEST_VECTORS
1659 }
1660 }
1661 }
1662 }, {
1663 .alg = "ecb(seed)",
1664 .test = alg_test_cipher,
1665 .suite = {
1666 .cipher = {
1667 .enc = {
1668 .vecs = seed_enc_tv_template,
1669 .count = SEED_ENC_TEST_VECTORS
1670 },
1671 .dec = {
1672 .vecs = seed_dec_tv_template,
1673 .count = SEED_DEC_TEST_VECTORS
1674 }
1675 }
1676 }
1677 }, {
1678 .alg = "ecb(serpent)",
1679 .test = alg_test_cipher,
1680 .suite = {
1681 .cipher = {
1682 .enc = {
1683 .vecs = serpent_enc_tv_template,
1684 .count = SERPENT_ENC_TEST_VECTORS
1685 },
1686 .dec = {
1687 .vecs = serpent_dec_tv_template,
1688 .count = SERPENT_DEC_TEST_VECTORS
1689 }
1690 }
1691 }
1692 }, {
1693 .alg = "ecb(tea)",
1694 .test = alg_test_cipher,
1695 .suite = {
1696 .cipher = {
1697 .enc = {
1698 .vecs = tea_enc_tv_template,
1699 .count = TEA_ENC_TEST_VECTORS
1700 },
1701 .dec = {
1702 .vecs = tea_dec_tv_template,
1703 .count = TEA_DEC_TEST_VECTORS
1704 }
1705 }
1706 }
1707 }, {
1708 .alg = "ecb(tnepres)",
1709 .test = alg_test_cipher,
1710 .suite = {
1711 .cipher = {
1712 .enc = {
1713 .vecs = tnepres_enc_tv_template,
1714 .count = TNEPRES_ENC_TEST_VECTORS
1715 },
1716 .dec = {
1717 .vecs = tnepres_dec_tv_template,
1718 .count = TNEPRES_DEC_TEST_VECTORS
1719 }
1720 }
1721 }
1722 }, {
1723 .alg = "ecb(twofish)",
1724 .test = alg_test_cipher,
1725 .suite = {
1726 .cipher = {
1727 .enc = {
1728 .vecs = tf_enc_tv_template,
1729 .count = TF_ENC_TEST_VECTORS
1730 },
1731 .dec = {
1732 .vecs = tf_dec_tv_template,
1733 .count = TF_DEC_TEST_VECTORS
1734 }
1735 }
1736 }
1737 }, {
1738 .alg = "ecb(xeta)",
1739 .test = alg_test_cipher,
1740 .suite = {
1741 .cipher = {
1742 .enc = {
1743 .vecs = xeta_enc_tv_template,
1744 .count = XETA_ENC_TEST_VECTORS
1745 },
1746 .dec = {
1747 .vecs = xeta_dec_tv_template,
1748 .count = XETA_DEC_TEST_VECTORS
1749 }
1750 }
1751 }
1752 }, {
1753 .alg = "ecb(xtea)",
1754 .test = alg_test_cipher,
1755 .suite = {
1756 .cipher = {
1757 .enc = {
1758 .vecs = xtea_enc_tv_template,
1759 .count = XTEA_ENC_TEST_VECTORS
1760 },
1761 .dec = {
1762 .vecs = xtea_dec_tv_template,
1763 .count = XTEA_DEC_TEST_VECTORS
1764 }
1765 }
1766 }
1767 }, {
1768 .alg = "gcm(aes)",
1769 .test = alg_test_aead,
1770 .suite = {
1771 .aead = {
1772 .enc = {
1773 .vecs = aes_gcm_enc_tv_template,
1774 .count = AES_GCM_ENC_TEST_VECTORS
1775 },
1776 .dec = {
1777 .vecs = aes_gcm_dec_tv_template,
1778 .count = AES_GCM_DEC_TEST_VECTORS
1779 }
1780 }
1781 }
1782 }, {
1783 .alg = "hmac(md5)",
1784 .test = alg_test_hash,
1785 .suite = {
1786 .hash = {
1787 .vecs = hmac_md5_tv_template,
1788 .count = HMAC_MD5_TEST_VECTORS
1789 }
1790 }
1791 }, {
1792 .alg = "hmac(rmd128)",
1793 .test = alg_test_hash,
1794 .suite = {
1795 .hash = {
1796 .vecs = hmac_rmd128_tv_template,
1797 .count = HMAC_RMD128_TEST_VECTORS
1798 }
1799 }
1800 }, {
1801 .alg = "hmac(rmd160)",
1802 .test = alg_test_hash,
1803 .suite = {
1804 .hash = {
1805 .vecs = hmac_rmd160_tv_template,
1806 .count = HMAC_RMD160_TEST_VECTORS
1807 }
1808 }
1809 }, {
1810 .alg = "hmac(sha1)",
1811 .test = alg_test_hash,
1812 .suite = {
1813 .hash = {
1814 .vecs = hmac_sha1_tv_template,
1815 .count = HMAC_SHA1_TEST_VECTORS
1816 }
1817 }
1818 }, {
1819 .alg = "hmac(sha224)",
1820 .test = alg_test_hash,
1821 .suite = {
1822 .hash = {
1823 .vecs = hmac_sha224_tv_template,
1824 .count = HMAC_SHA224_TEST_VECTORS
1825 }
1826 }
1827 }, {
1828 .alg = "hmac(sha256)",
1829 .test = alg_test_hash,
1830 .suite = {
1831 .hash = {
1832 .vecs = hmac_sha256_tv_template,
1833 .count = HMAC_SHA256_TEST_VECTORS
1834 }
1835 }
1836 }, {
1837 .alg = "hmac(sha384)",
1838 .test = alg_test_hash,
1839 .suite = {
1840 .hash = {
1841 .vecs = hmac_sha384_tv_template,
1842 .count = HMAC_SHA384_TEST_VECTORS
1843 }
1844 }
1845 }, {
1846 .alg = "hmac(sha512)",
1847 .test = alg_test_hash,
1848 .suite = {
1849 .hash = {
1850 .vecs = hmac_sha512_tv_template,
1851 .count = HMAC_SHA512_TEST_VECTORS
1852 }
1853 }
1854 }, {
1855 .alg = "lrw(aes)",
1856 .test = alg_test_cipher,
1857 .suite = {
1858 .cipher = {
1859 .enc = {
1860 .vecs = aes_lrw_enc_tv_template,
1861 .count = AES_LRW_ENC_TEST_VECTORS
1862 },
1863 .dec = {
1864 .vecs = aes_lrw_dec_tv_template,
1865 .count = AES_LRW_DEC_TEST_VECTORS
1866 }
1867 }
1868 }
1869 }, {
1870 .alg = "lzo",
1871 .test = alg_test_comp,
1872 .suite = {
1873 .comp = {
1874 .comp = {
1875 .vecs = lzo_comp_tv_template,
1876 .count = LZO_COMP_TEST_VECTORS
1877 },
1878 .decomp = {
1879 .vecs = lzo_decomp_tv_template,
1880 .count = LZO_DECOMP_TEST_VECTORS
1881 }
1882 }
1883 }
1884 }, {
1885 .alg = "md4",
1886 .test = alg_test_hash,
1887 .suite = {
1888 .hash = {
1889 .vecs = md4_tv_template,
1890 .count = MD4_TEST_VECTORS
1891 }
1892 }
1893 }, {
1894 .alg = "md5",
1895 .test = alg_test_hash,
1896 .suite = {
1897 .hash = {
1898 .vecs = md5_tv_template,
1899 .count = MD5_TEST_VECTORS
1900 }
1901 }
1902 }, {
1903 .alg = "michael_mic",
1904 .test = alg_test_hash,
1905 .suite = {
1906 .hash = {
1907 .vecs = michael_mic_tv_template,
1908 .count = MICHAEL_MIC_TEST_VECTORS
1909 }
1910 }
1911 }, {
1912 .alg = "pcbc(fcrypt)",
1913 .test = alg_test_cipher,
1914 .suite = {
1915 .cipher = {
1916 .enc = {
1917 .vecs = fcrypt_pcbc_enc_tv_template,
1918 .count = FCRYPT_ENC_TEST_VECTORS
1919 },
1920 .dec = {
1921 .vecs = fcrypt_pcbc_dec_tv_template,
1922 .count = FCRYPT_DEC_TEST_VECTORS
1923 }
1924 }
1925 }
1926 }, {
1927 .alg = "rfc3686(ctr(aes))",
1928 .test = alg_test_cipher,
1929 .suite = {
1930 .cipher = {
1931 .enc = {
1932 .vecs = aes_ctr_enc_tv_template,
1933 .count = AES_CTR_ENC_TEST_VECTORS
1934 },
1935 .dec = {
1936 .vecs = aes_ctr_dec_tv_template,
1937 .count = AES_CTR_DEC_TEST_VECTORS
1938 }
1939 }
1940 }
1941 }, {
1942 .alg = "rmd128",
1943 .test = alg_test_hash,
1944 .suite = {
1945 .hash = {
1946 .vecs = rmd128_tv_template,
1947 .count = RMD128_TEST_VECTORS
1948 }
1949 }
1950 }, {
1951 .alg = "rmd160",
1952 .test = alg_test_hash,
1953 .suite = {
1954 .hash = {
1955 .vecs = rmd160_tv_template,
1956 .count = RMD160_TEST_VECTORS
1957 }
1958 }
1959 }, {
1960 .alg = "rmd256",
1961 .test = alg_test_hash,
1962 .suite = {
1963 .hash = {
1964 .vecs = rmd256_tv_template,
1965 .count = RMD256_TEST_VECTORS
1966 }
1967 }
1968 }, {
1969 .alg = "rmd320",
1970 .test = alg_test_hash,
1971 .suite = {
1972 .hash = {
1973 .vecs = rmd320_tv_template,
1974 .count = RMD320_TEST_VECTORS
1975 }
1976 }
1977 }, {
1978 .alg = "salsa20",
1979 .test = alg_test_cipher,
1980 .suite = {
1981 .cipher = {
1982 .enc = {
1983 .vecs = salsa20_stream_enc_tv_template,
1984 .count = SALSA20_STREAM_ENC_TEST_VECTORS
1985 }
1986 }
1987 }
1988 }, {
1989 .alg = "sha1",
1990 .test = alg_test_hash,
1991 .suite = {
1992 .hash = {
1993 .vecs = sha1_tv_template,
1994 .count = SHA1_TEST_VECTORS
1995 }
1996 }
1997 }, {
1998 .alg = "sha224",
1999 .test = alg_test_hash,
2000 .suite = {
2001 .hash = {
2002 .vecs = sha224_tv_template,
2003 .count = SHA224_TEST_VECTORS
2004 }
2005 }
2006 }, {
2007 .alg = "sha256",
2008 .test = alg_test_hash,
2009 .suite = {
2010 .hash = {
2011 .vecs = sha256_tv_template,
2012 .count = SHA256_TEST_VECTORS
2013 }
2014 }
2015 }, {
2016 .alg = "sha384",
2017 .test = alg_test_hash,
2018 .suite = {
2019 .hash = {
2020 .vecs = sha384_tv_template,
2021 .count = SHA384_TEST_VECTORS
2022 }
2023 }
2024 }, {
2025 .alg = "sha512",
2026 .test = alg_test_hash,
2027 .suite = {
2028 .hash = {
2029 .vecs = sha512_tv_template,
2030 .count = SHA512_TEST_VECTORS
2031 }
2032 }
2033 }, {
2034 .alg = "tgr128",
2035 .test = alg_test_hash,
2036 .suite = {
2037 .hash = {
2038 .vecs = tgr128_tv_template,
2039 .count = TGR128_TEST_VECTORS
2040 }
2041 }
2042 }, {
2043 .alg = "tgr160",
2044 .test = alg_test_hash,
2045 .suite = {
2046 .hash = {
2047 .vecs = tgr160_tv_template,
2048 .count = TGR160_TEST_VECTORS
2049 }
2050 }
2051 }, {
2052 .alg = "tgr192",
2053 .test = alg_test_hash,
2054 .suite = {
2055 .hash = {
2056 .vecs = tgr192_tv_template,
2057 .count = TGR192_TEST_VECTORS
2058 }
2059 }
2060 }, {
2061 .alg = "wp256",
2062 .test = alg_test_hash,
2063 .suite = {
2064 .hash = {
2065 .vecs = wp256_tv_template,
2066 .count = WP256_TEST_VECTORS
2067 }
2068 }
2069 }, {
2070 .alg = "wp384",
2071 .test = alg_test_hash,
2072 .suite = {
2073 .hash = {
2074 .vecs = wp384_tv_template,
2075 .count = WP384_TEST_VECTORS
2076 }
2077 }
2078 }, {
2079 .alg = "wp512",
2080 .test = alg_test_hash,
2081 .suite = {
2082 .hash = {
2083 .vecs = wp512_tv_template,
2084 .count = WP512_TEST_VECTORS
2085 }
2086 }
2087 }, {
2088 .alg = "xcbc(aes)",
2089 .test = alg_test_hash,
2090 .suite = {
2091 .hash = {
2092 .vecs = aes_xcbc128_tv_template,
2093 .count = XCBC_AES_TEST_VECTORS
2094 }
2095 }
2096 }, {
2097 .alg = "xts(aes)",
2098 .test = alg_test_cipher,
2099 .suite = {
2100 .cipher = {
2101 .enc = {
2102 .vecs = aes_xts_enc_tv_template,
2103 .count = AES_XTS_ENC_TEST_VECTORS
2104 },
2105 .dec = {
2106 .vecs = aes_xts_dec_tv_template,
2107 .count = AES_XTS_DEC_TEST_VECTORS
2108 }
2109 }
2110 }
2111 }
2112 };
2113
2114 static int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2115 {
2116 int start = 0;
2117 int end = ARRAY_SIZE(alg_test_descs);
2118
2119 while (start < end) {
2120 int i = (start + end) / 2;
2121 int diff = strcmp(alg_test_descs[i].alg, alg);
2122
2123 if (diff > 0) {
2124 end = i;
2125 continue;
2126 }
2127
2128 if (diff < 0) {
2129 start = i + 1;
2130 continue;
2131 }
2132
2133 return alg_test_descs[i].test(alg_test_descs + i, driver,
2134 type, mask);
2135 }
2136
2137 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2138 return 0;
2139 }
2140
2141 static inline int tcrypt_test(const char *alg)
2142 {
2143 return alg_test(alg, alg, 0, 0);
2144 }
2145
2146 static void do_test(int m)
2147 {
2148 int i;
2149
2150 switch (m) {
2151 case 0:
2152 for (i = 1; i < 200; i++)
2153 do_test(i);
2154 break;
2155
2156 case 1:
2157 tcrypt_test("md5");
2158 break;
2159
2160 case 2:
2161 tcrypt_test("sha1");
2162 break;
2163
2164 case 3:
2165 tcrypt_test("ecb(des)");
2166 tcrypt_test("cbc(des)");
2167 break;
2168
2169 case 4:
2170 tcrypt_test("ecb(des3_ede)");
2171 tcrypt_test("cbc(des3_ede)");
2172 break;
2173
2174 case 5:
2175 tcrypt_test("md4");
2176 break;
2177
2178 case 6:
2179 tcrypt_test("sha256");
2180 break;
2181
2182 case 7:
2183 tcrypt_test("ecb(blowfish)");
2184 tcrypt_test("cbc(blowfish)");
2185 break;
2186
2187 case 8:
2188 tcrypt_test("ecb(twofish)");
2189 tcrypt_test("cbc(twofish)");
2190 break;
2191
2192 case 9:
2193 tcrypt_test("ecb(serpent)");
2194 break;
2195
2196 case 10:
2197 tcrypt_test("ecb(aes)");
2198 tcrypt_test("cbc(aes)");
2199 tcrypt_test("lrw(aes)");
2200 tcrypt_test("xts(aes)");
2201 tcrypt_test("rfc3686(ctr(aes))");
2202 break;
2203
2204 case 11:
2205 tcrypt_test("sha384");
2206 break;
2207
2208 case 12:
2209 tcrypt_test("sha512");
2210 break;
2211
2212 case 13:
2213 tcrypt_test("deflate");
2214 break;
2215
2216 case 14:
2217 tcrypt_test("ecb(cast5)");
2218 break;
2219
2220 case 15:
2221 tcrypt_test("ecb(cast6)");
2222 break;
2223
2224 case 16:
2225 tcrypt_test("ecb(arc4)");
2226 break;
2227
2228 case 17:
2229 tcrypt_test("michael_mic");
2230 break;
2231
2232 case 18:
2233 tcrypt_test("crc32c");
2234 break;
2235
2236 case 19:
2237 tcrypt_test("ecb(tea)");
2238 break;
2239
2240 case 20:
2241 tcrypt_test("ecb(xtea)");
2242 break;
2243
2244 case 21:
2245 tcrypt_test("ecb(khazad)");
2246 break;
2247
2248 case 22:
2249 tcrypt_test("wp512");
2250 break;
2251
2252 case 23:
2253 tcrypt_test("wp384");
2254 break;
2255
2256 case 24:
2257 tcrypt_test("wp256");
2258 break;
2259
2260 case 25:
2261 tcrypt_test("ecb(tnepres)");
2262 break;
2263
2264 case 26:
2265 tcrypt_test("ecb(anubis)");
2266 tcrypt_test("cbc(anubis)");
2267 break;
2268
2269 case 27:
2270 tcrypt_test("tgr192");
2271 break;
2272
2273 case 28:
2274
2275 tcrypt_test("tgr160");
2276 break;
2277
2278 case 29:
2279 tcrypt_test("tgr128");
2280 break;
2281
2282 case 30:
2283 tcrypt_test("ecb(xeta)");
2284 break;
2285
2286 case 31:
2287 tcrypt_test("pcbc(fcrypt)");
2288 break;
2289
2290 case 32:
2291 tcrypt_test("ecb(camellia)");
2292 tcrypt_test("cbc(camellia)");
2293 break;
2294 case 33:
2295 tcrypt_test("sha224");
2296 break;
2297
2298 case 34:
2299 tcrypt_test("salsa20");
2300 break;
2301
2302 case 35:
2303 tcrypt_test("gcm(aes)");
2304 break;
2305
2306 case 36:
2307 tcrypt_test("lzo");
2308 break;
2309
2310 case 37:
2311 tcrypt_test("ccm(aes)");
2312 break;
2313
2314 case 38:
2315 tcrypt_test("cts(cbc(aes))");
2316 break;
2317
2318 case 39:
2319 tcrypt_test("rmd128");
2320 break;
2321
2322 case 40:
2323 tcrypt_test("rmd160");
2324 break;
2325
2326 case 41:
2327 tcrypt_test("rmd256");
2328 break;
2329
2330 case 42:
2331 tcrypt_test("rmd320");
2332 break;
2333
2334 case 43:
2335 tcrypt_test("ecb(seed)");
2336 break;
2337
2338 case 100:
2339 tcrypt_test("hmac(md5)");
2340 break;
2341
2342 case 101:
2343 tcrypt_test("hmac(sha1)");
2344 break;
2345
2346 case 102:
2347 tcrypt_test("hmac(sha256)");
2348 break;
2349
2350 case 103:
2351 tcrypt_test("hmac(sha384)");
2352 break;
2353
2354 case 104:
2355 tcrypt_test("hmac(sha512)");
2356 break;
2357
2358 case 105:
2359 tcrypt_test("hmac(sha224)");
2360 break;
2361
2362 case 106:
2363 tcrypt_test("xcbc(aes)");
2364 break;
2365
2366 case 107:
2367 tcrypt_test("hmac(rmd128)");
2368 break;
2369
2370 case 108:
2371 tcrypt_test("hmac(rmd160)");
2372 break;
2373
2374 case 200:
2375 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2376 speed_template_16_24_32);
2377 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2378 speed_template_16_24_32);
2379 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2380 speed_template_16_24_32);
2381 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2382 speed_template_16_24_32);
2383 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2384 speed_template_32_40_48);
2385 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2386 speed_template_32_40_48);
2387 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2388 speed_template_32_48_64);
2389 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2390 speed_template_32_48_64);
2391 break;
2392
2393 case 201:
2394 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2395 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
2396 speed_template_24);
2397 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
2398 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
2399 speed_template_24);
2400 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2401 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
2402 speed_template_24);
2403 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
2404 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
2405 speed_template_24);
2406 break;
2407
2408 case 202:
2409 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2410 speed_template_16_24_32);
2411 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2412 speed_template_16_24_32);
2413 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2414 speed_template_16_24_32);
2415 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2416 speed_template_16_24_32);
2417 break;
2418
2419 case 203:
2420 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2421 speed_template_8_32);
2422 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2423 speed_template_8_32);
2424 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2425 speed_template_8_32);
2426 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2427 speed_template_8_32);
2428 break;
2429
2430 case 204:
2431 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2432 speed_template_8);
2433 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2434 speed_template_8);
2435 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2436 speed_template_8);
2437 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2438 speed_template_8);
2439 break;
2440
2441 case 205:
2442 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2443 speed_template_16_24_32);
2444 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2445 speed_template_16_24_32);
2446 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2447 speed_template_16_24_32);
2448 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2449 speed_template_16_24_32);
2450 break;
2451
2452 case 206:
2453 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
2454 speed_template_16_32);
2455 break;
2456
2457 case 300:
2458 /* fall through */
2459
2460 case 301:
2461 test_hash_speed("md4", sec, generic_hash_speed_template);
2462 if (mode > 300 && mode < 400) break;
2463
2464 case 302:
2465 test_hash_speed("md5", sec, generic_hash_speed_template);
2466 if (mode > 300 && mode < 400) break;
2467
2468 case 303:
2469 test_hash_speed("sha1", sec, generic_hash_speed_template);
2470 if (mode > 300 && mode < 400) break;
2471
2472 case 304:
2473 test_hash_speed("sha256", sec, generic_hash_speed_template);
2474 if (mode > 300 && mode < 400) break;
2475
2476 case 305:
2477 test_hash_speed("sha384", sec, generic_hash_speed_template);
2478 if (mode > 300 && mode < 400) break;
2479
2480 case 306:
2481 test_hash_speed("sha512", sec, generic_hash_speed_template);
2482 if (mode > 300 && mode < 400) break;
2483
2484 case 307:
2485 test_hash_speed("wp256", sec, generic_hash_speed_template);
2486 if (mode > 300 && mode < 400) break;
2487
2488 case 308:
2489 test_hash_speed("wp384", sec, generic_hash_speed_template);
2490 if (mode > 300 && mode < 400) break;
2491
2492 case 309:
2493 test_hash_speed("wp512", sec, generic_hash_speed_template);
2494 if (mode > 300 && mode < 400) break;
2495
2496 case 310:
2497 test_hash_speed("tgr128", sec, generic_hash_speed_template);
2498 if (mode > 300 && mode < 400) break;
2499
2500 case 311:
2501 test_hash_speed("tgr160", sec, generic_hash_speed_template);
2502 if (mode > 300 && mode < 400) break;
2503
2504 case 312:
2505 test_hash_speed("tgr192", sec, generic_hash_speed_template);
2506 if (mode > 300 && mode < 400) break;
2507
2508 case 313:
2509 test_hash_speed("sha224", sec, generic_hash_speed_template);
2510 if (mode > 300 && mode < 400) break;
2511
2512 case 314:
2513 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2514 if (mode > 300 && mode < 400) break;
2515
2516 case 315:
2517 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2518 if (mode > 300 && mode < 400) break;
2519
2520 case 316:
2521 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2522 if (mode > 300 && mode < 400) break;
2523
2524 case 317:
2525 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2526 if (mode > 300 && mode < 400) break;
2527
2528 case 399:
2529 break;
2530
2531 case 1000:
2532 test_available();
2533 break;
2534 }
2535 }
2536
2537 static int __init tcrypt_mod_init(void)
2538 {
2539 int err = -ENOMEM;
2540 int i;
2541
2542 for (i = 0; i < TVMEMSIZE; i++) {
2543 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2544 if (!tvmem[i])
2545 goto err_free_tv;
2546 }
2547
2548 for (i = 0; i < XBUFSIZE; i++) {
2549 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2550 if (!xbuf[i])
2551 goto err_free_xbuf;
2552 }
2553
2554 for (i = 0; i < XBUFSIZE; i++) {
2555 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2556 if (!axbuf[i])
2557 goto err_free_axbuf;
2558 }
2559
2560 do_test(mode);
2561
2562 /* We intentionaly return -EAGAIN to prevent keeping
2563 * the module. It does all its work from init()
2564 * and doesn't offer any runtime functionality
2565 * => we don't need it in the memory, do we?
2566 * -- mludvig
2567 */
2568 err = -EAGAIN;
2569
2570 err_free_axbuf:
2571 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
2572 free_page((unsigned long)axbuf[i]);
2573 err_free_xbuf:
2574 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
2575 free_page((unsigned long)xbuf[i]);
2576 err_free_tv:
2577 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2578 free_page((unsigned long)tvmem[i]);
2579
2580 return err;
2581 }
2582
2583 /*
2584 * If an init function is provided, an exit function must also be provided
2585 * to allow module unload.
2586 */
2587 static void __exit tcrypt_mod_fini(void) { }
2588
2589 module_init(tcrypt_mod_init);
2590 module_exit(tcrypt_mod_fini);
2591
2592 module_param(mode, int, 0);
2593 module_param(sec, uint, 0);
2594 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2595 "(defaults to zero which uses CPU cycles instead)");
2596
2597 MODULE_LICENSE("GPL");
2598 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2599 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");
This page took 0.084512 seconds and 6 git commands to generate.