crypto: tcrypt - Abort and only log if there is an error
[deliverable/linux.git] / crypto / tcrypt.c
1 /*
2 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18 #include <crypto/hash.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
22 #include <linux/mm.h>
23 #include <linux/slab.h>
24 #include <linux/scatterlist.h>
25 #include <linux/string.h>
26 #include <linux/crypto.h>
27 #include <linux/moduleparam.h>
28 #include <linux/jiffies.h>
29 #include <linux/timex.h>
30 #include <linux/interrupt.h>
31 #include "tcrypt.h"
32
33 /*
34 * Need slab memory for testing (size in number of pages).
35 */
36 #define TVMEMSIZE 4
37 #define XBUFSIZE 8
38
39 /*
40 * Indexes into the xbuf to simulate cross-page access.
41 */
42 #define IDX1 32
43 #define IDX2 32400
44 #define IDX3 1
45 #define IDX4 8193
46 #define IDX5 22222
47 #define IDX6 17101
48 #define IDX7 27333
49 #define IDX8 3000
50
51 /*
52 * Used by test_cipher()
53 */
54 #define ENCRYPT 1
55 #define DECRYPT 0
56
57 struct tcrypt_result {
58 struct completion completion;
59 int err;
60 };
61
62 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
63
64 /*
65 * Used by test_cipher_speed()
66 */
67 static unsigned int sec;
68
69 static int mode;
70 static char *xbuf[XBUFSIZE];
71 static char *axbuf[XBUFSIZE];
72 static char *tvmem[TVMEMSIZE];
73
74 static char *check[] = {
75 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
76 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
77 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
78 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
79 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
80 "lzo", "cts", NULL
81 };
82
83 static void hexdump(unsigned char *buf, unsigned int len)
84 {
85 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
86 16, 1,
87 buf, len, false);
88 }
89
90 static void tcrypt_complete(struct crypto_async_request *req, int err)
91 {
92 struct tcrypt_result *res = req->data;
93
94 if (err == -EINPROGRESS)
95 return;
96
97 res->err = err;
98 complete(&res->completion);
99 }
100
101 static int test_hash(char *algo, struct hash_testvec *template,
102 unsigned int tcount)
103 {
104 unsigned int i, j, k, temp;
105 struct scatterlist sg[8];
106 char result[64];
107 struct crypto_ahash *tfm;
108 struct ahash_request *req;
109 struct tcrypt_result tresult;
110 int ret;
111 void *hash_buff;
112
113 init_completion(&tresult.completion);
114
115 tfm = crypto_alloc_ahash(algo, 0, 0);
116 if (IS_ERR(tfm)) {
117 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
118 "%ld\n", algo, PTR_ERR(tfm));
119 return PTR_ERR(tfm);
120 }
121
122 req = ahash_request_alloc(tfm, GFP_KERNEL);
123 if (!req) {
124 printk(KERN_ERR "alg: hash: Failed to allocate request for "
125 "%s\n", algo);
126 ret = -ENOMEM;
127 goto out_noreq;
128 }
129 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
130 tcrypt_complete, &tresult);
131
132 for (i = 0; i < tcount; i++) {
133 memset(result, 0, 64);
134
135 hash_buff = xbuf[0];
136
137 memcpy(hash_buff, template[i].plaintext, template[i].psize);
138 sg_init_one(&sg[0], hash_buff, template[i].psize);
139
140 if (template[i].ksize) {
141 crypto_ahash_clear_flags(tfm, ~0);
142 ret = crypto_ahash_setkey(tfm, template[i].key,
143 template[i].ksize);
144 if (ret) {
145 printk(KERN_ERR "alg: hash: setkey failed on "
146 "test %d for %s: ret=%d\n", i + 1, algo,
147 -ret);
148 goto out;
149 }
150 }
151
152 ahash_request_set_crypt(req, sg, result, template[i].psize);
153 ret = crypto_ahash_digest(req);
154 switch (ret) {
155 case 0:
156 break;
157 case -EINPROGRESS:
158 case -EBUSY:
159 ret = wait_for_completion_interruptible(
160 &tresult.completion);
161 if (!ret && !(ret = tresult.err)) {
162 INIT_COMPLETION(tresult.completion);
163 break;
164 }
165 /* fall through */
166 default:
167 printk(KERN_ERR "alg: hash: digest failed on test %d "
168 "for %s: ret=%d\n", i + 1, algo, -ret);
169 goto out;
170 }
171
172 if (memcmp(result, template[i].digest,
173 crypto_ahash_digestsize(tfm))) {
174 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
175 i + 1, algo);
176 hexdump(result, crypto_ahash_digestsize(tfm));
177 ret = -EINVAL;
178 goto out;
179 }
180 }
181
182 j = 0;
183 for (i = 0; i < tcount; i++) {
184 if (template[i].np) {
185 j++;
186 memset(result, 0, 64);
187
188 temp = 0;
189 sg_init_table(sg, template[i].np);
190 for (k = 0; k < template[i].np; k++) {
191 sg_set_buf(&sg[k],
192 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
193 offset_in_page(IDX[k]),
194 template[i].plaintext + temp,
195 template[i].tap[k]),
196 template[i].tap[k]);
197 temp += template[i].tap[k];
198 }
199
200 if (template[i].ksize) {
201 crypto_ahash_clear_flags(tfm, ~0);
202 ret = crypto_ahash_setkey(tfm, template[i].key,
203 template[i].ksize);
204
205 if (ret) {
206 printk(KERN_ERR "alg: hash: setkey "
207 "failed on chunking test %d "
208 "for %s: ret=%d\n", j, algo,
209 -ret);
210 goto out;
211 }
212 }
213
214 ahash_request_set_crypt(req, sg, result,
215 template[i].psize);
216 ret = crypto_ahash_digest(req);
217 switch (ret) {
218 case 0:
219 break;
220 case -EINPROGRESS:
221 case -EBUSY:
222 ret = wait_for_completion_interruptible(
223 &tresult.completion);
224 if (!ret && !(ret = tresult.err)) {
225 INIT_COMPLETION(tresult.completion);
226 break;
227 }
228 /* fall through */
229 default:
230 printk(KERN_ERR "alg: hash: digest failed "
231 "on chunking test %d for %s: "
232 "ret=%d\n", j, algo, -ret);
233 goto out;
234 }
235
236 if (memcmp(result, template[i].digest,
237 crypto_ahash_digestsize(tfm))) {
238 printk(KERN_ERR "alg: hash: Chunking test %d "
239 "failed for %s\n", j, algo);
240 hexdump(result, crypto_ahash_digestsize(tfm));
241 ret = -EINVAL;
242 goto out;
243 }
244 }
245 }
246
247 ret = 0;
248
249 out:
250 ahash_request_free(req);
251 out_noreq:
252 crypto_free_ahash(tfm);
253 return ret;
254 }
255
256 static int test_aead(char *algo, int enc, struct aead_testvec *template,
257 unsigned int tcount)
258 {
259 unsigned int i, j, k, n, temp;
260 int ret = 0;
261 char *q;
262 struct crypto_aead *tfm;
263 char *key;
264 struct aead_request *req;
265 struct scatterlist sg[8];
266 struct scatterlist asg[8];
267 const char *e;
268 struct tcrypt_result result;
269 unsigned int authsize;
270 void *input;
271 void *assoc;
272 char iv[MAX_IVLEN];
273
274 if (enc == ENCRYPT)
275 e = "encryption";
276 else
277 e = "decryption";
278
279 init_completion(&result.completion);
280
281 tfm = crypto_alloc_aead(algo, 0, 0);
282
283 if (IS_ERR(tfm)) {
284 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
285 "%ld\n", algo, PTR_ERR(tfm));
286 return PTR_ERR(tfm);
287 }
288
289 req = aead_request_alloc(tfm, GFP_KERNEL);
290 if (!req) {
291 printk(KERN_ERR "alg: aead: Failed to allocate request for "
292 "%s\n", algo);
293 ret = -ENOMEM;
294 goto out;
295 }
296
297 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
298 tcrypt_complete, &result);
299
300 for (i = 0, j = 0; i < tcount; i++) {
301 if (!template[i].np) {
302 j++;
303
304 /* some tepmplates have no input data but they will
305 * touch input
306 */
307 input = xbuf[0];
308 assoc = axbuf[0];
309
310 memcpy(input, template[i].input, template[i].ilen);
311 memcpy(assoc, template[i].assoc, template[i].alen);
312 if (template[i].iv)
313 memcpy(iv, template[i].iv, MAX_IVLEN);
314 else
315 memset(iv, 0, MAX_IVLEN);
316
317 crypto_aead_clear_flags(tfm, ~0);
318 if (template[i].wk)
319 crypto_aead_set_flags(
320 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
321
322 key = template[i].key;
323
324 ret = crypto_aead_setkey(tfm, key,
325 template[i].klen);
326 if (!ret == template[i].fail) {
327 printk(KERN_ERR "alg: aead: setkey failed on "
328 "test %d for %s: flags=%x\n", j, algo,
329 crypto_aead_get_flags(tfm));
330 goto out;
331 } else if (ret)
332 continue;
333
334 authsize = abs(template[i].rlen - template[i].ilen);
335 ret = crypto_aead_setauthsize(tfm, authsize);
336 if (ret) {
337 printk(KERN_ERR "alg: aead: Failed to set "
338 "authsize to %u on test %d for %s\n",
339 authsize, j, algo);
340 goto out;
341 }
342
343 sg_init_one(&sg[0], input,
344 template[i].ilen + (enc ? authsize : 0));
345
346 sg_init_one(&asg[0], assoc, template[i].alen);
347
348 aead_request_set_crypt(req, sg, sg,
349 template[i].ilen, iv);
350
351 aead_request_set_assoc(req, asg, template[i].alen);
352
353 ret = enc ?
354 crypto_aead_encrypt(req) :
355 crypto_aead_decrypt(req);
356
357 switch (ret) {
358 case 0:
359 break;
360 case -EINPROGRESS:
361 case -EBUSY:
362 ret = wait_for_completion_interruptible(
363 &result.completion);
364 if (!ret && !(ret = result.err)) {
365 INIT_COMPLETION(result.completion);
366 break;
367 }
368 /* fall through */
369 default:
370 printk(KERN_ERR "alg: aead: %s failed on test "
371 "%d for %s: ret=%d\n", e, j, algo, -ret);
372 goto out;
373 }
374
375 q = input;
376 if (memcmp(q, template[i].result, template[i].rlen)) {
377 printk(KERN_ERR "alg: aead: Test %d failed on "
378 "%s for %s\n", j, e, algo);
379 hexdump(q, template[i].rlen);
380 ret = -EINVAL;
381 goto out;
382 }
383 }
384 }
385
386 for (i = 0, j = 0; i < tcount; i++) {
387 if (template[i].np) {
388 j++;
389
390 if (template[i].iv)
391 memcpy(iv, template[i].iv, MAX_IVLEN);
392 else
393 memset(iv, 0, MAX_IVLEN);
394
395 crypto_aead_clear_flags(tfm, ~0);
396 if (template[i].wk)
397 crypto_aead_set_flags(
398 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
399 key = template[i].key;
400
401 ret = crypto_aead_setkey(tfm, key, template[i].klen);
402 if (!ret == template[i].fail) {
403 printk(KERN_ERR "alg: aead: setkey failed on "
404 "chunk test %d for %s: flags=%x\n", j,
405 algo, crypto_aead_get_flags(tfm));
406 goto out;
407 } else if (ret)
408 continue;
409
410 authsize = abs(template[i].rlen - template[i].ilen);
411
412 ret = -EINVAL;
413 sg_init_table(sg, template[i].np);
414 for (k = 0, temp = 0; k < template[i].np; k++) {
415 if (WARN_ON(offset_in_page(IDX[k]) +
416 template[i].tap[k] > PAGE_SIZE))
417 goto out;
418
419 q = xbuf[IDX[k] >> PAGE_SHIFT] +
420 offset_in_page(IDX[k]);
421
422 memcpy(q, template[i].input + temp,
423 template[i].tap[k]);
424
425 n = template[i].tap[k];
426 if (k == template[i].np - 1 && enc)
427 n += authsize;
428 if (offset_in_page(q) + n < PAGE_SIZE)
429 q[n] = 0;
430
431 sg_set_buf(&sg[k], q, template[i].tap[k]);
432 temp += template[i].tap[k];
433 }
434
435 ret = crypto_aead_setauthsize(tfm, authsize);
436 if (ret) {
437 printk(KERN_ERR "alg: aead: Failed to set "
438 "authsize to %u on chunk test %d for "
439 "%s\n", authsize, j, algo);
440 goto out;
441 }
442
443 if (enc) {
444 if (WARN_ON(sg[k - 1].offset +
445 sg[k - 1].length + authsize >
446 PAGE_SIZE)) {
447 ret = -EINVAL;
448 goto out;
449 }
450
451 sg[k - 1].length += authsize;
452 }
453
454 sg_init_table(asg, template[i].anp);
455 for (k = 0, temp = 0; k < template[i].anp; k++) {
456 sg_set_buf(&asg[k],
457 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
458 offset_in_page(IDX[k]),
459 template[i].assoc + temp,
460 template[i].atap[k]),
461 template[i].atap[k]);
462 temp += template[i].atap[k];
463 }
464
465 aead_request_set_crypt(req, sg, sg,
466 template[i].ilen,
467 iv);
468
469 aead_request_set_assoc(req, asg, template[i].alen);
470
471 ret = enc ?
472 crypto_aead_encrypt(req) :
473 crypto_aead_decrypt(req);
474
475 switch (ret) {
476 case 0:
477 break;
478 case -EINPROGRESS:
479 case -EBUSY:
480 ret = wait_for_completion_interruptible(
481 &result.completion);
482 if (!ret && !(ret = result.err)) {
483 INIT_COMPLETION(result.completion);
484 break;
485 }
486 /* fall through */
487 default:
488 printk(KERN_ERR "alg: aead: %s failed on "
489 "chunk test %d for %s: ret=%d\n", e, j,
490 algo, -ret);
491 goto out;
492 }
493
494 ret = -EINVAL;
495 for (k = 0, temp = 0; k < template[i].np; k++) {
496 q = xbuf[IDX[k] >> PAGE_SHIFT] +
497 offset_in_page(IDX[k]);
498
499 n = template[i].tap[k];
500 if (k == template[i].np - 1)
501 n += enc ? authsize : -authsize;
502
503 if (memcmp(q, template[i].result + temp, n)) {
504 printk(KERN_ERR "alg: aead: Chunk "
505 "test %d failed on %s at page "
506 "%u for %s\n", j, e, k, algo);
507 hexdump(q, n);
508 goto out;
509 }
510
511 q += n;
512 if (k == template[i].np - 1 && !enc) {
513 if (memcmp(q, template[i].input +
514 temp + n, authsize))
515 n = authsize;
516 else
517 n = 0;
518 } else {
519 for (n = 0; offset_in_page(q + n) &&
520 q[n]; n++)
521 ;
522 }
523 if (n) {
524 printk(KERN_ERR "alg: aead: Result "
525 "buffer corruption in chunk "
526 "test %d on %s at page %u for "
527 "%s: %u bytes:\n", j, e, k,
528 algo, n);
529 hexdump(q, n);
530 goto out;
531 }
532
533 temp += template[i].tap[k];
534 }
535 }
536 }
537
538 ret = 0;
539
540 out:
541 crypto_free_aead(tfm);
542 aead_request_free(req);
543 return ret;
544 }
545
546 static int test_cipher(char *algo, int enc,
547 struct cipher_testvec *template, unsigned int tcount)
548 {
549 unsigned int i, j, k, n, temp;
550 int ret;
551 char *q;
552 struct crypto_ablkcipher *tfm;
553 struct ablkcipher_request *req;
554 struct scatterlist sg[8];
555 const char *e;
556 struct tcrypt_result result;
557 void *data;
558 char iv[MAX_IVLEN];
559
560 if (enc == ENCRYPT)
561 e = "encryption";
562 else
563 e = "decryption";
564
565 init_completion(&result.completion);
566 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
567
568 if (IS_ERR(tfm)) {
569 printk(KERN_ERR "alg: cipher: Failed to load transform for "
570 "%s: %ld\n", algo, PTR_ERR(tfm));
571 return PTR_ERR(tfm);
572 }
573
574 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
575 if (!req) {
576 printk(KERN_ERR "alg: cipher: Failed to allocate request for "
577 "%s\n", algo);
578 ret = -ENOMEM;
579 goto out;
580 }
581
582 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
583 tcrypt_complete, &result);
584
585 j = 0;
586 for (i = 0; i < tcount; i++) {
587 if (template[i].iv)
588 memcpy(iv, template[i].iv, MAX_IVLEN);
589 else
590 memset(iv, 0, MAX_IVLEN);
591
592 if (!(template[i].np)) {
593 j++;
594
595 data = xbuf[0];
596 memcpy(data, template[i].input, template[i].ilen);
597
598 crypto_ablkcipher_clear_flags(tfm, ~0);
599 if (template[i].wk)
600 crypto_ablkcipher_set_flags(
601 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
602
603 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
604 template[i].klen);
605 if (!ret == template[i].fail) {
606 printk(KERN_ERR "alg: cipher: setkey failed "
607 "on test %d for %s: flags=%x\n", j,
608 algo, crypto_ablkcipher_get_flags(tfm));
609 goto out;
610 } else if (ret)
611 continue;
612
613 sg_init_one(&sg[0], data, template[i].ilen);
614
615 ablkcipher_request_set_crypt(req, sg, sg,
616 template[i].ilen, iv);
617 ret = enc ?
618 crypto_ablkcipher_encrypt(req) :
619 crypto_ablkcipher_decrypt(req);
620
621 switch (ret) {
622 case 0:
623 break;
624 case -EINPROGRESS:
625 case -EBUSY:
626 ret = wait_for_completion_interruptible(
627 &result.completion);
628 if (!ret && !((ret = result.err))) {
629 INIT_COMPLETION(result.completion);
630 break;
631 }
632 /* fall through */
633 default:
634 printk(KERN_ERR "alg: cipher: %s failed on "
635 "test %d for %s: ret=%d\n", e, j, algo,
636 -ret);
637 goto out;
638 }
639
640 q = data;
641 if (memcmp(q, template[i].result, template[i].rlen)) {
642 printk(KERN_ERR "alg: cipher: Test %d failed "
643 "on %s for %s\n", j, e, algo);
644 hexdump(q, template[i].rlen);
645 ret = -EINVAL;
646 goto out;
647 }
648 }
649 }
650
651 j = 0;
652 for (i = 0; i < tcount; i++) {
653
654 if (template[i].iv)
655 memcpy(iv, template[i].iv, MAX_IVLEN);
656 else
657 memset(iv, 0, MAX_IVLEN);
658
659 if (template[i].np) {
660 j++;
661
662 crypto_ablkcipher_clear_flags(tfm, ~0);
663 if (template[i].wk)
664 crypto_ablkcipher_set_flags(
665 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
666
667 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
668 template[i].klen);
669 if (!ret == template[i].fail) {
670 printk(KERN_ERR "alg: cipher: setkey failed "
671 "on chunk test %d for %s: flags=%x\n",
672 j, algo,
673 crypto_ablkcipher_get_flags(tfm));
674 goto out;
675 } else if (ret)
676 continue;
677
678 temp = 0;
679 ret = -EINVAL;
680 sg_init_table(sg, template[i].np);
681 for (k = 0; k < template[i].np; k++) {
682 if (WARN_ON(offset_in_page(IDX[k]) +
683 template[i].tap[k] > PAGE_SIZE))
684 goto out;
685
686 q = xbuf[IDX[k] >> PAGE_SHIFT] +
687 offset_in_page(IDX[k]);
688
689 memcpy(q, template[i].input + temp,
690 template[i].tap[k]);
691
692 if (offset_in_page(q) + template[i].tap[k] <
693 PAGE_SIZE)
694 q[template[i].tap[k]] = 0;
695
696 sg_set_buf(&sg[k], q, template[i].tap[k]);
697
698 temp += template[i].tap[k];
699 }
700
701 ablkcipher_request_set_crypt(req, sg, sg,
702 template[i].ilen, iv);
703
704 ret = enc ?
705 crypto_ablkcipher_encrypt(req) :
706 crypto_ablkcipher_decrypt(req);
707
708 switch (ret) {
709 case 0:
710 break;
711 case -EINPROGRESS:
712 case -EBUSY:
713 ret = wait_for_completion_interruptible(
714 &result.completion);
715 if (!ret && !((ret = result.err))) {
716 INIT_COMPLETION(result.completion);
717 break;
718 }
719 /* fall through */
720 default:
721 printk(KERN_ERR "alg: cipher: %s failed on "
722 "chunk test %d for %s: ret=%d\n", e, j,
723 algo, -ret);
724 goto out;
725 }
726
727 temp = 0;
728 ret = -EINVAL;
729 for (k = 0; k < template[i].np; k++) {
730 q = xbuf[IDX[k] >> PAGE_SHIFT] +
731 offset_in_page(IDX[k]);
732
733 if (memcmp(q, template[i].result + temp,
734 template[i].tap[k])) {
735 printk(KERN_ERR "alg: cipher: Chunk "
736 "test %d failed on %s at page "
737 "%u for %s\n", j, e, k, algo);
738 hexdump(q, template[i].tap[k]);
739 goto out;
740 }
741
742 q += template[i].tap[k];
743 for (n = 0; offset_in_page(q + n) && q[n]; n++)
744 ;
745 if (n) {
746 printk(KERN_ERR "alg: cipher: "
747 "Result buffer corruption in "
748 "chunk test %d on %s at page "
749 "%u for %s: %u bytes:\n", j, e,
750 k, algo, n);
751 hexdump(q, n);
752 goto out;
753 }
754 temp += template[i].tap[k];
755 }
756 }
757 }
758
759 ret = 0;
760
761 out:
762 crypto_free_ablkcipher(tfm);
763 ablkcipher_request_free(req);
764 return ret;
765 }
766
767 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
768 struct scatterlist *sg, int blen, int sec)
769 {
770 unsigned long start, end;
771 int bcount;
772 int ret;
773
774 for (start = jiffies, end = start + sec * HZ, bcount = 0;
775 time_before(jiffies, end); bcount++) {
776 if (enc)
777 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
778 else
779 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
780
781 if (ret)
782 return ret;
783 }
784
785 printk("%d operations in %d seconds (%ld bytes)\n",
786 bcount, sec, (long)bcount * blen);
787 return 0;
788 }
789
790 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
791 struct scatterlist *sg, int blen)
792 {
793 unsigned long cycles = 0;
794 int ret = 0;
795 int i;
796
797 local_bh_disable();
798 local_irq_disable();
799
800 /* Warm-up run. */
801 for (i = 0; i < 4; i++) {
802 if (enc)
803 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
804 else
805 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
806
807 if (ret)
808 goto out;
809 }
810
811 /* The real thing. */
812 for (i = 0; i < 8; i++) {
813 cycles_t start, end;
814
815 start = get_cycles();
816 if (enc)
817 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
818 else
819 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
820 end = get_cycles();
821
822 if (ret)
823 goto out;
824
825 cycles += end - start;
826 }
827
828 out:
829 local_irq_enable();
830 local_bh_enable();
831
832 if (ret == 0)
833 printk("1 operation in %lu cycles (%d bytes)\n",
834 (cycles + 4) / 8, blen);
835
836 return ret;
837 }
838
839 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
840
841 static void test_cipher_speed(char *algo, int enc, unsigned int sec,
842 struct cipher_testvec *template,
843 unsigned int tcount, u8 *keysize)
844 {
845 unsigned int ret, i, j, iv_len;
846 unsigned char *key, iv[128];
847 struct crypto_blkcipher *tfm;
848 struct blkcipher_desc desc;
849 const char *e;
850 u32 *b_size;
851
852 if (enc == ENCRYPT)
853 e = "encryption";
854 else
855 e = "decryption";
856
857 printk("\ntesting speed of %s %s\n", algo, e);
858
859 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
860
861 if (IS_ERR(tfm)) {
862 printk("failed to load transform for %s: %ld\n", algo,
863 PTR_ERR(tfm));
864 return;
865 }
866 desc.tfm = tfm;
867 desc.flags = 0;
868
869 i = 0;
870 do {
871
872 b_size = block_sizes;
873 do {
874 struct scatterlist sg[TVMEMSIZE];
875
876 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
877 printk("template (%u) too big for "
878 "tvmem (%lu)\n", *keysize + *b_size,
879 TVMEMSIZE * PAGE_SIZE);
880 goto out;
881 }
882
883 printk("test %u (%d bit key, %d byte blocks): ", i,
884 *keysize * 8, *b_size);
885
886 memset(tvmem[0], 0xff, PAGE_SIZE);
887
888 /* set key, plain text and IV */
889 key = (unsigned char *)tvmem[0];
890 for (j = 0; j < tcount; j++) {
891 if (template[j].klen == *keysize) {
892 key = template[j].key;
893 break;
894 }
895 }
896
897 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
898 if (ret) {
899 printk("setkey() failed flags=%x\n",
900 crypto_blkcipher_get_flags(tfm));
901 goto out;
902 }
903
904 sg_init_table(sg, TVMEMSIZE);
905 sg_set_buf(sg, tvmem[0] + *keysize,
906 PAGE_SIZE - *keysize);
907 for (j = 1; j < TVMEMSIZE; j++) {
908 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
909 memset (tvmem[j], 0xff, PAGE_SIZE);
910 }
911
912 iv_len = crypto_blkcipher_ivsize(tfm);
913 if (iv_len) {
914 memset(&iv, 0xff, iv_len);
915 crypto_blkcipher_set_iv(tfm, iv, iv_len);
916 }
917
918 if (sec)
919 ret = test_cipher_jiffies(&desc, enc, sg,
920 *b_size, sec);
921 else
922 ret = test_cipher_cycles(&desc, enc, sg,
923 *b_size);
924
925 if (ret) {
926 printk("%s() failed flags=%x\n", e, desc.flags);
927 break;
928 }
929 b_size++;
930 i++;
931 } while (*b_size);
932 keysize++;
933 } while (*keysize);
934
935 out:
936 crypto_free_blkcipher(tfm);
937 }
938
939 static int test_hash_jiffies_digest(struct hash_desc *desc,
940 struct scatterlist *sg, int blen,
941 char *out, int sec)
942 {
943 unsigned long start, end;
944 int bcount;
945 int ret;
946
947 for (start = jiffies, end = start + sec * HZ, bcount = 0;
948 time_before(jiffies, end); bcount++) {
949 ret = crypto_hash_digest(desc, sg, blen, out);
950 if (ret)
951 return ret;
952 }
953
954 printk("%6u opers/sec, %9lu bytes/sec\n",
955 bcount / sec, ((long)bcount * blen) / sec);
956
957 return 0;
958 }
959
960 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
961 int blen, int plen, char *out, int sec)
962 {
963 unsigned long start, end;
964 int bcount, pcount;
965 int ret;
966
967 if (plen == blen)
968 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
969
970 for (start = jiffies, end = start + sec * HZ, bcount = 0;
971 time_before(jiffies, end); bcount++) {
972 ret = crypto_hash_init(desc);
973 if (ret)
974 return ret;
975 for (pcount = 0; pcount < blen; pcount += plen) {
976 ret = crypto_hash_update(desc, sg, plen);
977 if (ret)
978 return ret;
979 }
980 /* we assume there is enough space in 'out' for the result */
981 ret = crypto_hash_final(desc, out);
982 if (ret)
983 return ret;
984 }
985
986 printk("%6u opers/sec, %9lu bytes/sec\n",
987 bcount / sec, ((long)bcount * blen) / sec);
988
989 return 0;
990 }
991
992 static int test_hash_cycles_digest(struct hash_desc *desc,
993 struct scatterlist *sg, int blen, char *out)
994 {
995 unsigned long cycles = 0;
996 int i;
997 int ret;
998
999 local_bh_disable();
1000 local_irq_disable();
1001
1002 /* Warm-up run. */
1003 for (i = 0; i < 4; i++) {
1004 ret = crypto_hash_digest(desc, sg, blen, out);
1005 if (ret)
1006 goto out;
1007 }
1008
1009 /* The real thing. */
1010 for (i = 0; i < 8; i++) {
1011 cycles_t start, end;
1012
1013 start = get_cycles();
1014
1015 ret = crypto_hash_digest(desc, sg, blen, out);
1016 if (ret)
1017 goto out;
1018
1019 end = get_cycles();
1020
1021 cycles += end - start;
1022 }
1023
1024 out:
1025 local_irq_enable();
1026 local_bh_enable();
1027
1028 if (ret)
1029 return ret;
1030
1031 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1032 cycles / 8, cycles / (8 * blen));
1033
1034 return 0;
1035 }
1036
1037 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
1038 int blen, int plen, char *out)
1039 {
1040 unsigned long cycles = 0;
1041 int i, pcount;
1042 int ret;
1043
1044 if (plen == blen)
1045 return test_hash_cycles_digest(desc, sg, blen, out);
1046
1047 local_bh_disable();
1048 local_irq_disable();
1049
1050 /* Warm-up run. */
1051 for (i = 0; i < 4; i++) {
1052 ret = crypto_hash_init(desc);
1053 if (ret)
1054 goto out;
1055 for (pcount = 0; pcount < blen; pcount += plen) {
1056 ret = crypto_hash_update(desc, sg, plen);
1057 if (ret)
1058 goto out;
1059 }
1060 ret = crypto_hash_final(desc, out);
1061 if (ret)
1062 goto out;
1063 }
1064
1065 /* The real thing. */
1066 for (i = 0; i < 8; i++) {
1067 cycles_t start, end;
1068
1069 start = get_cycles();
1070
1071 ret = crypto_hash_init(desc);
1072 if (ret)
1073 goto out;
1074 for (pcount = 0; pcount < blen; pcount += plen) {
1075 ret = crypto_hash_update(desc, sg, plen);
1076 if (ret)
1077 goto out;
1078 }
1079 ret = crypto_hash_final(desc, out);
1080 if (ret)
1081 goto out;
1082
1083 end = get_cycles();
1084
1085 cycles += end - start;
1086 }
1087
1088 out:
1089 local_irq_enable();
1090 local_bh_enable();
1091
1092 if (ret)
1093 return ret;
1094
1095 printk("%6lu cycles/operation, %4lu cycles/byte\n",
1096 cycles / 8, cycles / (8 * blen));
1097
1098 return 0;
1099 }
1100
1101 static void test_hash_speed(char *algo, unsigned int sec,
1102 struct hash_speed *speed)
1103 {
1104 struct scatterlist sg[TVMEMSIZE];
1105 struct crypto_hash *tfm;
1106 struct hash_desc desc;
1107 char output[1024];
1108 int i;
1109 int ret;
1110
1111 printk("\ntesting speed of %s\n", algo);
1112
1113 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
1114
1115 if (IS_ERR(tfm)) {
1116 printk("failed to load transform for %s: %ld\n", algo,
1117 PTR_ERR(tfm));
1118 return;
1119 }
1120
1121 desc.tfm = tfm;
1122 desc.flags = 0;
1123
1124 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
1125 printk("digestsize(%u) > outputbuffer(%zu)\n",
1126 crypto_hash_digestsize(tfm), sizeof(output));
1127 goto out;
1128 }
1129
1130 sg_init_table(sg, TVMEMSIZE);
1131 for (i = 0; i < TVMEMSIZE; i++) {
1132 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
1133 memset(tvmem[i], 0xff, PAGE_SIZE);
1134 }
1135
1136 for (i = 0; speed[i].blen != 0; i++) {
1137 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1138 printk("template (%u) too big for tvmem (%lu)\n",
1139 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1140 goto out;
1141 }
1142
1143 printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
1144 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1145
1146 if (sec)
1147 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
1148 speed[i].plen, output, sec);
1149 else
1150 ret = test_hash_cycles(&desc, sg, speed[i].blen,
1151 speed[i].plen, output);
1152
1153 if (ret) {
1154 printk("hashing failed ret=%d\n", ret);
1155 break;
1156 }
1157 }
1158
1159 out:
1160 crypto_free_hash(tfm);
1161 }
1162
1163 static int test_comp(char *algo, struct comp_testvec *ctemplate,
1164 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1165 {
1166 unsigned int i;
1167 char result[COMP_BUF_SIZE];
1168 struct crypto_comp *tfm;
1169 int ret;
1170
1171 tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
1172 if (IS_ERR(tfm)) {
1173 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1174 "%ld\n", algo, PTR_ERR(tfm));
1175 return PTR_ERR(tfm);
1176 }
1177
1178 for (i = 0; i < ctcount; i++) {
1179 int ilen, dlen = COMP_BUF_SIZE;
1180
1181 memset(result, 0, sizeof (result));
1182
1183 ilen = ctemplate[i].inlen;
1184 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1185 ilen, result, &dlen);
1186 if (ret) {
1187 printk(KERN_ERR "alg: comp: compression failed "
1188 "on test %d for %s: ret=%d\n", i + 1, algo,
1189 -ret);
1190 goto out;
1191 }
1192
1193 if (memcmp(result, ctemplate[i].output, dlen)) {
1194 printk(KERN_ERR "alg: comp: Compression test %d "
1195 "failed for %s\n", i + 1, algo);
1196 hexdump(result, dlen);
1197 ret = -EINVAL;
1198 goto out;
1199 }
1200 }
1201
1202 for (i = 0; i < dtcount; i++) {
1203 int ilen, ret, dlen = COMP_BUF_SIZE;
1204
1205 memset(result, 0, sizeof (result));
1206
1207 ilen = dtemplate[i].inlen;
1208 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1209 ilen, result, &dlen);
1210 if (ret) {
1211 printk(KERN_ERR "alg: comp: decompression failed "
1212 "on test %d for %s: ret=%d\n", i + 1, algo,
1213 -ret);
1214 goto out;
1215 }
1216
1217 if (memcmp(result, dtemplate[i].output, dlen)) {
1218 printk(KERN_ERR "alg: comp: Decompression test %d "
1219 "failed for %s\n", i + 1, algo);
1220 hexdump(result, dlen);
1221 ret = -EINVAL;
1222 goto out;
1223 }
1224 }
1225
1226 ret = 0;
1227
1228 out:
1229 crypto_free_comp(tfm);
1230 return ret;
1231 }
1232
1233 static void test_available(void)
1234 {
1235 char **name = check;
1236
1237 while (*name) {
1238 printk("alg %s ", *name);
1239 printk(crypto_has_alg(*name, 0, 0) ?
1240 "found\n" : "not found\n");
1241 name++;
1242 }
1243 }
1244
1245 static void do_test(void)
1246 {
1247 switch (mode) {
1248
1249 case 0:
1250 test_hash("md5", md5_tv_template, MD5_TEST_VECTORS);
1251
1252 test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS);
1253
1254 //DES
1255 test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template,
1256 DES_ENC_TEST_VECTORS);
1257 test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
1258 DES_DEC_TEST_VECTORS);
1259 test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
1260 DES_CBC_ENC_TEST_VECTORS);
1261 test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
1262 DES_CBC_DEC_TEST_VECTORS);
1263
1264 //DES3_EDE
1265 test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template,
1266 DES3_EDE_ENC_TEST_VECTORS);
1267 test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
1268 DES3_EDE_DEC_TEST_VECTORS);
1269
1270 test_cipher("cbc(des3_ede)", ENCRYPT,
1271 des3_ede_cbc_enc_tv_template,
1272 DES3_EDE_CBC_ENC_TEST_VECTORS);
1273
1274 test_cipher("cbc(des3_ede)", DECRYPT,
1275 des3_ede_cbc_dec_tv_template,
1276 DES3_EDE_CBC_DEC_TEST_VECTORS);
1277
1278 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
1279
1280 test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
1281
1282 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
1283
1284 //BLOWFISH
1285 test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template,
1286 BF_ENC_TEST_VECTORS);
1287 test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
1288 BF_DEC_TEST_VECTORS);
1289 test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
1290 BF_CBC_ENC_TEST_VECTORS);
1291 test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
1292 BF_CBC_DEC_TEST_VECTORS);
1293
1294 //TWOFISH
1295 test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template,
1296 TF_ENC_TEST_VECTORS);
1297 test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
1298 TF_DEC_TEST_VECTORS);
1299 test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
1300 TF_CBC_ENC_TEST_VECTORS);
1301 test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
1302 TF_CBC_DEC_TEST_VECTORS);
1303
1304 //SERPENT
1305 test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template,
1306 SERPENT_ENC_TEST_VECTORS);
1307 test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
1308 SERPENT_DEC_TEST_VECTORS);
1309
1310 //TNEPRES
1311 test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template,
1312 TNEPRES_ENC_TEST_VECTORS);
1313 test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
1314 TNEPRES_DEC_TEST_VECTORS);
1315
1316 //AES
1317 test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template,
1318 AES_ENC_TEST_VECTORS);
1319 test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template,
1320 AES_DEC_TEST_VECTORS);
1321 test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template,
1322 AES_CBC_ENC_TEST_VECTORS);
1323 test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
1324 AES_CBC_DEC_TEST_VECTORS);
1325 test_cipher("lrw(aes)", ENCRYPT, aes_lrw_enc_tv_template,
1326 AES_LRW_ENC_TEST_VECTORS);
1327 test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
1328 AES_LRW_DEC_TEST_VECTORS);
1329 test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
1330 AES_XTS_ENC_TEST_VECTORS);
1331 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1332 AES_XTS_DEC_TEST_VECTORS);
1333 test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1334 AES_CTR_ENC_TEST_VECTORS);
1335 test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1336 AES_CTR_DEC_TEST_VECTORS);
1337 test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
1338 AES_GCM_ENC_TEST_VECTORS);
1339 test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
1340 AES_GCM_DEC_TEST_VECTORS);
1341 test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
1342 AES_CCM_ENC_TEST_VECTORS);
1343 test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
1344 AES_CCM_DEC_TEST_VECTORS);
1345
1346 //CAST5
1347 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
1348 CAST5_ENC_TEST_VECTORS);
1349 test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
1350 CAST5_DEC_TEST_VECTORS);
1351
1352 //CAST6
1353 test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template,
1354 CAST6_ENC_TEST_VECTORS);
1355 test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
1356 CAST6_DEC_TEST_VECTORS);
1357
1358 //ARC4
1359 test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template,
1360 ARC4_ENC_TEST_VECTORS);
1361 test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
1362 ARC4_DEC_TEST_VECTORS);
1363
1364 //TEA
1365 test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template,
1366 TEA_ENC_TEST_VECTORS);
1367 test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
1368 TEA_DEC_TEST_VECTORS);
1369
1370
1371 //XTEA
1372 test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template,
1373 XTEA_ENC_TEST_VECTORS);
1374 test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
1375 XTEA_DEC_TEST_VECTORS);
1376
1377 //KHAZAD
1378 test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template,
1379 KHAZAD_ENC_TEST_VECTORS);
1380 test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
1381 KHAZAD_DEC_TEST_VECTORS);
1382
1383 //ANUBIS
1384 test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template,
1385 ANUBIS_ENC_TEST_VECTORS);
1386 test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
1387 ANUBIS_DEC_TEST_VECTORS);
1388 test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
1389 ANUBIS_CBC_ENC_TEST_VECTORS);
1390 test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
1391 ANUBIS_CBC_ENC_TEST_VECTORS);
1392
1393 //XETA
1394 test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
1395 XETA_ENC_TEST_VECTORS);
1396 test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
1397 XETA_DEC_TEST_VECTORS);
1398
1399 //FCrypt
1400 test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template,
1401 FCRYPT_ENC_TEST_VECTORS);
1402 test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template,
1403 FCRYPT_DEC_TEST_VECTORS);
1404
1405 //CAMELLIA
1406 test_cipher("ecb(camellia)", ENCRYPT,
1407 camellia_enc_tv_template,
1408 CAMELLIA_ENC_TEST_VECTORS);
1409 test_cipher("ecb(camellia)", DECRYPT,
1410 camellia_dec_tv_template,
1411 CAMELLIA_DEC_TEST_VECTORS);
1412 test_cipher("cbc(camellia)", ENCRYPT,
1413 camellia_cbc_enc_tv_template,
1414 CAMELLIA_CBC_ENC_TEST_VECTORS);
1415 test_cipher("cbc(camellia)", DECRYPT,
1416 camellia_cbc_dec_tv_template,
1417 CAMELLIA_CBC_DEC_TEST_VECTORS);
1418
1419 //SEED
1420 test_cipher("ecb(seed)", ENCRYPT, seed_enc_tv_template,
1421 SEED_ENC_TEST_VECTORS);
1422 test_cipher("ecb(seed)", DECRYPT, seed_dec_tv_template,
1423 SEED_DEC_TEST_VECTORS);
1424
1425 //CTS
1426 test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template,
1427 CTS_MODE_ENC_TEST_VECTORS);
1428 test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
1429 CTS_MODE_DEC_TEST_VECTORS);
1430
1431 test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS);
1432 test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS);
1433 test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS);
1434 test_hash("wp384", wp384_tv_template, WP384_TEST_VECTORS);
1435 test_hash("wp256", wp256_tv_template, WP256_TEST_VECTORS);
1436 test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS);
1437 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
1438 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
1439 test_comp("deflate", deflate_comp_tv_template,
1440 deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
1441 DEFLATE_DECOMP_TEST_VECTORS);
1442 test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
1443 LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
1444 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
1445 test_hash("hmac(md5)", hmac_md5_tv_template,
1446 HMAC_MD5_TEST_VECTORS);
1447 test_hash("hmac(sha1)", hmac_sha1_tv_template,
1448 HMAC_SHA1_TEST_VECTORS);
1449 test_hash("hmac(sha224)", hmac_sha224_tv_template,
1450 HMAC_SHA224_TEST_VECTORS);
1451 test_hash("hmac(sha256)", hmac_sha256_tv_template,
1452 HMAC_SHA256_TEST_VECTORS);
1453 test_hash("hmac(sha384)", hmac_sha384_tv_template,
1454 HMAC_SHA384_TEST_VECTORS);
1455 test_hash("hmac(sha512)", hmac_sha512_tv_template,
1456 HMAC_SHA512_TEST_VECTORS);
1457
1458 test_hash("xcbc(aes)", aes_xcbc128_tv_template,
1459 XCBC_AES_TEST_VECTORS);
1460
1461 test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS);
1462 break;
1463
1464 case 1:
1465 test_hash("md5", md5_tv_template, MD5_TEST_VECTORS);
1466 break;
1467
1468 case 2:
1469 test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS);
1470 break;
1471
1472 case 3:
1473 test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template,
1474 DES_ENC_TEST_VECTORS);
1475 test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
1476 DES_DEC_TEST_VECTORS);
1477 test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
1478 DES_CBC_ENC_TEST_VECTORS);
1479 test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
1480 DES_CBC_DEC_TEST_VECTORS);
1481 break;
1482
1483 case 4:
1484 test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template,
1485 DES3_EDE_ENC_TEST_VECTORS);
1486 test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
1487 DES3_EDE_DEC_TEST_VECTORS);
1488
1489 test_cipher("cbc(des3_ede)", ENCRYPT,
1490 des3_ede_cbc_enc_tv_template,
1491 DES3_EDE_CBC_ENC_TEST_VECTORS);
1492
1493 test_cipher("cbc(des3_ede)", DECRYPT,
1494 des3_ede_cbc_dec_tv_template,
1495 DES3_EDE_CBC_DEC_TEST_VECTORS);
1496 break;
1497
1498 case 5:
1499 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
1500 break;
1501
1502 case 6:
1503 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
1504 break;
1505
1506 case 7:
1507 test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template,
1508 BF_ENC_TEST_VECTORS);
1509 test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
1510 BF_DEC_TEST_VECTORS);
1511 test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
1512 BF_CBC_ENC_TEST_VECTORS);
1513 test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
1514 BF_CBC_DEC_TEST_VECTORS);
1515 break;
1516
1517 case 8:
1518 test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template,
1519 TF_ENC_TEST_VECTORS);
1520 test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
1521 TF_DEC_TEST_VECTORS);
1522 test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
1523 TF_CBC_ENC_TEST_VECTORS);
1524 test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
1525 TF_CBC_DEC_TEST_VECTORS);
1526 break;
1527
1528 case 9:
1529 test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template,
1530 SERPENT_ENC_TEST_VECTORS);
1531 test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
1532 SERPENT_DEC_TEST_VECTORS);
1533 break;
1534
1535 case 10:
1536 test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template,
1537 AES_ENC_TEST_VECTORS);
1538 test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template,
1539 AES_DEC_TEST_VECTORS);
1540 test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template,
1541 AES_CBC_ENC_TEST_VECTORS);
1542 test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
1543 AES_CBC_DEC_TEST_VECTORS);
1544 test_cipher("lrw(aes)", ENCRYPT, aes_lrw_enc_tv_template,
1545 AES_LRW_ENC_TEST_VECTORS);
1546 test_cipher("lrw(aes)", DECRYPT, aes_lrw_dec_tv_template,
1547 AES_LRW_DEC_TEST_VECTORS);
1548 test_cipher("xts(aes)", ENCRYPT, aes_xts_enc_tv_template,
1549 AES_XTS_ENC_TEST_VECTORS);
1550 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1551 AES_XTS_DEC_TEST_VECTORS);
1552 test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1553 AES_CTR_ENC_TEST_VECTORS);
1554 test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1555 AES_CTR_DEC_TEST_VECTORS);
1556 break;
1557
1558 case 11:
1559 test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS);
1560 break;
1561
1562 case 12:
1563 test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS);
1564 break;
1565
1566 case 13:
1567 test_comp("deflate", deflate_comp_tv_template,
1568 deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
1569 DEFLATE_DECOMP_TEST_VECTORS);
1570 break;
1571
1572 case 14:
1573 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
1574 CAST5_ENC_TEST_VECTORS);
1575 test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
1576 CAST5_DEC_TEST_VECTORS);
1577 break;
1578
1579 case 15:
1580 test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template,
1581 CAST6_ENC_TEST_VECTORS);
1582 test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
1583 CAST6_DEC_TEST_VECTORS);
1584 break;
1585
1586 case 16:
1587 test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template,
1588 ARC4_ENC_TEST_VECTORS);
1589 test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
1590 ARC4_DEC_TEST_VECTORS);
1591 break;
1592
1593 case 17:
1594 test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS);
1595 break;
1596
1597 case 18:
1598 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
1599 break;
1600
1601 case 19:
1602 test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template,
1603 TEA_ENC_TEST_VECTORS);
1604 test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
1605 TEA_DEC_TEST_VECTORS);
1606 break;
1607
1608 case 20:
1609 test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template,
1610 XTEA_ENC_TEST_VECTORS);
1611 test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
1612 XTEA_DEC_TEST_VECTORS);
1613 break;
1614
1615 case 21:
1616 test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template,
1617 KHAZAD_ENC_TEST_VECTORS);
1618 test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
1619 KHAZAD_DEC_TEST_VECTORS);
1620 break;
1621
1622 case 22:
1623 test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS);
1624 break;
1625
1626 case 23:
1627 test_hash("wp384", wp384_tv_template, WP384_TEST_VECTORS);
1628 break;
1629
1630 case 24:
1631 test_hash("wp256", wp256_tv_template, WP256_TEST_VECTORS);
1632 break;
1633
1634 case 25:
1635 test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template,
1636 TNEPRES_ENC_TEST_VECTORS);
1637 test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
1638 TNEPRES_DEC_TEST_VECTORS);
1639 break;
1640
1641 case 26:
1642 test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template,
1643 ANUBIS_ENC_TEST_VECTORS);
1644 test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
1645 ANUBIS_DEC_TEST_VECTORS);
1646 test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
1647 ANUBIS_CBC_ENC_TEST_VECTORS);
1648 test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
1649 ANUBIS_CBC_ENC_TEST_VECTORS);
1650 break;
1651
1652 case 27:
1653 test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS);
1654 break;
1655
1656 case 28:
1657
1658 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
1659 break;
1660
1661 case 29:
1662 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
1663 break;
1664
1665 case 30:
1666 test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
1667 XETA_ENC_TEST_VECTORS);
1668 test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
1669 XETA_DEC_TEST_VECTORS);
1670 break;
1671
1672 case 31:
1673 test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template,
1674 FCRYPT_ENC_TEST_VECTORS);
1675 test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template,
1676 FCRYPT_DEC_TEST_VECTORS);
1677 break;
1678
1679 case 32:
1680 test_cipher("ecb(camellia)", ENCRYPT,
1681 camellia_enc_tv_template,
1682 CAMELLIA_ENC_TEST_VECTORS);
1683 test_cipher("ecb(camellia)", DECRYPT,
1684 camellia_dec_tv_template,
1685 CAMELLIA_DEC_TEST_VECTORS);
1686 test_cipher("cbc(camellia)", ENCRYPT,
1687 camellia_cbc_enc_tv_template,
1688 CAMELLIA_CBC_ENC_TEST_VECTORS);
1689 test_cipher("cbc(camellia)", DECRYPT,
1690 camellia_cbc_dec_tv_template,
1691 CAMELLIA_CBC_DEC_TEST_VECTORS);
1692 break;
1693 case 33:
1694 test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
1695 break;
1696
1697 case 34:
1698 test_cipher("salsa20", ENCRYPT,
1699 salsa20_stream_enc_tv_template,
1700 SALSA20_STREAM_ENC_TEST_VECTORS);
1701 break;
1702
1703 case 35:
1704 test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
1705 AES_GCM_ENC_TEST_VECTORS);
1706 test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
1707 AES_GCM_DEC_TEST_VECTORS);
1708 break;
1709
1710 case 36:
1711 test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
1712 LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
1713 break;
1714
1715 case 37:
1716 test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
1717 AES_CCM_ENC_TEST_VECTORS);
1718 test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
1719 AES_CCM_DEC_TEST_VECTORS);
1720 break;
1721
1722 case 38:
1723 test_cipher("cts(cbc(aes))", ENCRYPT, cts_mode_enc_tv_template,
1724 CTS_MODE_ENC_TEST_VECTORS);
1725 test_cipher("cts(cbc(aes))", DECRYPT, cts_mode_dec_tv_template,
1726 CTS_MODE_DEC_TEST_VECTORS);
1727 break;
1728
1729 case 39:
1730 test_hash("rmd128", rmd128_tv_template, RMD128_TEST_VECTORS);
1731 break;
1732
1733 case 40:
1734 test_hash("rmd160", rmd160_tv_template, RMD160_TEST_VECTORS);
1735 break;
1736
1737 case 41:
1738 test_hash("rmd256", rmd256_tv_template, RMD256_TEST_VECTORS);
1739 break;
1740
1741 case 42:
1742 test_hash("rmd320", rmd320_tv_template, RMD320_TEST_VECTORS);
1743 break;
1744
1745 case 100:
1746 test_hash("hmac(md5)", hmac_md5_tv_template,
1747 HMAC_MD5_TEST_VECTORS);
1748 break;
1749
1750 case 101:
1751 test_hash("hmac(sha1)", hmac_sha1_tv_template,
1752 HMAC_SHA1_TEST_VECTORS);
1753 break;
1754
1755 case 102:
1756 test_hash("hmac(sha256)", hmac_sha256_tv_template,
1757 HMAC_SHA256_TEST_VECTORS);
1758 break;
1759
1760 case 103:
1761 test_hash("hmac(sha384)", hmac_sha384_tv_template,
1762 HMAC_SHA384_TEST_VECTORS);
1763 break;
1764
1765 case 104:
1766 test_hash("hmac(sha512)", hmac_sha512_tv_template,
1767 HMAC_SHA512_TEST_VECTORS);
1768 break;
1769
1770 case 105:
1771 test_hash("hmac(sha224)", hmac_sha224_tv_template,
1772 HMAC_SHA224_TEST_VECTORS);
1773 break;
1774
1775 case 106:
1776 test_hash("xcbc(aes)", aes_xcbc128_tv_template,
1777 XCBC_AES_TEST_VECTORS);
1778 break;
1779
1780 case 107:
1781 test_hash("hmac(rmd128)", hmac_rmd128_tv_template,
1782 HMAC_RMD128_TEST_VECTORS);
1783 break;
1784
1785 case 108:
1786 test_hash("hmac(rmd160)", hmac_rmd160_tv_template,
1787 HMAC_RMD160_TEST_VECTORS);
1788 break;
1789
1790 case 200:
1791 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1792 speed_template_16_24_32);
1793 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1794 speed_template_16_24_32);
1795 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1796 speed_template_16_24_32);
1797 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1798 speed_template_16_24_32);
1799 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1800 speed_template_32_40_48);
1801 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1802 speed_template_32_40_48);
1803 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1804 speed_template_32_48_64);
1805 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1806 speed_template_32_48_64);
1807 break;
1808
1809 case 201:
1810 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1811 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
1812 speed_template_24);
1813 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1814 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
1815 speed_template_24);
1816 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1817 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
1818 speed_template_24);
1819 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1820 des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS,
1821 speed_template_24);
1822 break;
1823
1824 case 202:
1825 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1826 speed_template_16_24_32);
1827 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1828 speed_template_16_24_32);
1829 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1830 speed_template_16_24_32);
1831 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1832 speed_template_16_24_32);
1833 break;
1834
1835 case 203:
1836 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1837 speed_template_8_32);
1838 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1839 speed_template_8_32);
1840 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1841 speed_template_8_32);
1842 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1843 speed_template_8_32);
1844 break;
1845
1846 case 204:
1847 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1848 speed_template_8);
1849 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1850 speed_template_8);
1851 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1852 speed_template_8);
1853 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1854 speed_template_8);
1855 break;
1856
1857 case 205:
1858 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1859 speed_template_16_24_32);
1860 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1861 speed_template_16_24_32);
1862 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1863 speed_template_16_24_32);
1864 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1865 speed_template_16_24_32);
1866 break;
1867
1868 case 206:
1869 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1870 speed_template_16_32);
1871 break;
1872
1873 case 300:
1874 /* fall through */
1875
1876 case 301:
1877 test_hash_speed("md4", sec, generic_hash_speed_template);
1878 if (mode > 300 && mode < 400) break;
1879
1880 case 302:
1881 test_hash_speed("md5", sec, generic_hash_speed_template);
1882 if (mode > 300 && mode < 400) break;
1883
1884 case 303:
1885 test_hash_speed("sha1", sec, generic_hash_speed_template);
1886 if (mode > 300 && mode < 400) break;
1887
1888 case 304:
1889 test_hash_speed("sha256", sec, generic_hash_speed_template);
1890 if (mode > 300 && mode < 400) break;
1891
1892 case 305:
1893 test_hash_speed("sha384", sec, generic_hash_speed_template);
1894 if (mode > 300 && mode < 400) break;
1895
1896 case 306:
1897 test_hash_speed("sha512", sec, generic_hash_speed_template);
1898 if (mode > 300 && mode < 400) break;
1899
1900 case 307:
1901 test_hash_speed("wp256", sec, generic_hash_speed_template);
1902 if (mode > 300 && mode < 400) break;
1903
1904 case 308:
1905 test_hash_speed("wp384", sec, generic_hash_speed_template);
1906 if (mode > 300 && mode < 400) break;
1907
1908 case 309:
1909 test_hash_speed("wp512", sec, generic_hash_speed_template);
1910 if (mode > 300 && mode < 400) break;
1911
1912 case 310:
1913 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1914 if (mode > 300 && mode < 400) break;
1915
1916 case 311:
1917 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1918 if (mode > 300 && mode < 400) break;
1919
1920 case 312:
1921 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1922 if (mode > 300 && mode < 400) break;
1923
1924 case 313:
1925 test_hash_speed("sha224", sec, generic_hash_speed_template);
1926 if (mode > 300 && mode < 400) break;
1927
1928 case 314:
1929 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1930 if (mode > 300 && mode < 400) break;
1931
1932 case 315:
1933 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1934 if (mode > 300 && mode < 400) break;
1935
1936 case 316:
1937 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1938 if (mode > 300 && mode < 400) break;
1939
1940 case 317:
1941 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1942 if (mode > 300 && mode < 400) break;
1943
1944 case 399:
1945 break;
1946
1947 case 1000:
1948 test_available();
1949 break;
1950
1951 default:
1952 /* useful for debugging */
1953 printk("not testing anything\n");
1954 break;
1955 }
1956 }
1957
1958 static int __init tcrypt_mod_init(void)
1959 {
1960 int err = -ENOMEM;
1961 int i;
1962
1963 for (i = 0; i < TVMEMSIZE; i++) {
1964 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
1965 if (!tvmem[i])
1966 goto err_free_tv;
1967 }
1968
1969 for (i = 0; i < XBUFSIZE; i++) {
1970 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
1971 if (!xbuf[i])
1972 goto err_free_xbuf;
1973 }
1974
1975 for (i = 0; i < XBUFSIZE; i++) {
1976 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
1977 if (!axbuf[i])
1978 goto err_free_axbuf;
1979 }
1980
1981 do_test();
1982
1983 /* We intentionaly return -EAGAIN to prevent keeping
1984 * the module. It does all its work from init()
1985 * and doesn't offer any runtime functionality
1986 * => we don't need it in the memory, do we?
1987 * -- mludvig
1988 */
1989 err = -EAGAIN;
1990
1991 err_free_axbuf:
1992 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
1993 free_page((unsigned long)axbuf[i]);
1994 err_free_xbuf:
1995 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
1996 free_page((unsigned long)xbuf[i]);
1997 err_free_tv:
1998 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
1999 free_page((unsigned long)tvmem[i]);
2000
2001 return err;
2002 }
2003
2004 /*
2005 * If an init function is provided, an exit function must also be provided
2006 * to allow module unload.
2007 */
2008 static void __exit tcrypt_mod_fini(void) { }
2009
2010 module_init(tcrypt_mod_init);
2011 module_exit(tcrypt_mod_fini);
2012
2013 module_param(mode, int, 0);
2014 module_param(sec, uint, 0);
2015 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2016 "(defaults to zero which uses CPU cycles instead)");
2017
2018 MODULE_LICENSE("GPL");
2019 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2020 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");
This page took 0.104181 seconds and 6 git commands to generate.