crypto: testmgr - white space removal on __test_hash
[deliverable/linux.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
30 #include <crypto/drbg.h>
31
32 #include "internal.h"
33
34 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35
36 /* a perfect nop */
37 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
38 {
39 return 0;
40 }
41
42 #else
43
44 #include "testmgr.h"
45
46 /*
47 * Need slab memory for testing (size in number of pages).
48 */
49 #define XBUFSIZE 8
50
51 /*
52 * Indexes into the xbuf to simulate cross-page access.
53 */
54 #define IDX1 32
55 #define IDX2 32400
56 #define IDX3 1
57 #define IDX4 8193
58 #define IDX5 22222
59 #define IDX6 17101
60 #define IDX7 27333
61 #define IDX8 3000
62
63 /*
64 * Used by test_cipher()
65 */
66 #define ENCRYPT 1
67 #define DECRYPT 0
68
69 struct tcrypt_result {
70 struct completion completion;
71 int err;
72 };
73
74 struct aead_test_suite {
75 struct {
76 struct aead_testvec *vecs;
77 unsigned int count;
78 } enc, dec;
79 };
80
81 struct cipher_test_suite {
82 struct {
83 struct cipher_testvec *vecs;
84 unsigned int count;
85 } enc, dec;
86 };
87
88 struct comp_test_suite {
89 struct {
90 struct comp_testvec *vecs;
91 unsigned int count;
92 } comp, decomp;
93 };
94
95 struct pcomp_test_suite {
96 struct {
97 struct pcomp_testvec *vecs;
98 unsigned int count;
99 } comp, decomp;
100 };
101
102 struct hash_test_suite {
103 struct hash_testvec *vecs;
104 unsigned int count;
105 };
106
107 struct cprng_test_suite {
108 struct cprng_testvec *vecs;
109 unsigned int count;
110 };
111
112 struct drbg_test_suite {
113 struct drbg_testvec *vecs;
114 unsigned int count;
115 };
116
117 struct alg_test_desc {
118 const char *alg;
119 int (*test)(const struct alg_test_desc *desc, const char *driver,
120 u32 type, u32 mask);
121 int fips_allowed; /* set if alg is allowed in fips mode */
122
123 union {
124 struct aead_test_suite aead;
125 struct cipher_test_suite cipher;
126 struct comp_test_suite comp;
127 struct pcomp_test_suite pcomp;
128 struct hash_test_suite hash;
129 struct cprng_test_suite cprng;
130 struct drbg_test_suite drbg;
131 } suite;
132 };
133
134 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
135
136 static void hexdump(unsigned char *buf, unsigned int len)
137 {
138 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
139 16, 1,
140 buf, len, false);
141 }
142
143 static void tcrypt_complete(struct crypto_async_request *req, int err)
144 {
145 struct tcrypt_result *res = req->data;
146
147 if (err == -EINPROGRESS)
148 return;
149
150 res->err = err;
151 complete(&res->completion);
152 }
153
154 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
155 {
156 int i;
157
158 for (i = 0; i < XBUFSIZE; i++) {
159 buf[i] = (void *)__get_free_page(GFP_KERNEL);
160 if (!buf[i])
161 goto err_free_buf;
162 }
163
164 return 0;
165
166 err_free_buf:
167 while (i-- > 0)
168 free_page((unsigned long)buf[i]);
169
170 return -ENOMEM;
171 }
172
173 static void testmgr_free_buf(char *buf[XBUFSIZE])
174 {
175 int i;
176
177 for (i = 0; i < XBUFSIZE; i++)
178 free_page((unsigned long)buf[i]);
179 }
180
181 static int do_one_async_hash_op(struct ahash_request *req,
182 struct tcrypt_result *tr,
183 int ret)
184 {
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 ret = wait_for_completion_interruptible(&tr->completion);
187 if (!ret)
188 ret = tr->err;
189 reinit_completion(&tr->completion);
190 }
191 return ret;
192 }
193
194 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
195 unsigned int tcount, bool use_digest,
196 const int align_offset)
197 {
198 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
199 unsigned int i, j, k, temp;
200 struct scatterlist sg[8];
201 char *result;
202 char *key;
203 struct ahash_request *req;
204 struct tcrypt_result tresult;
205 void *hash_buff;
206 char *xbuf[XBUFSIZE];
207 int ret = -ENOMEM;
208
209 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
210 if (!result)
211 return ret;
212 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
213 if (!key)
214 goto out_nobuf;
215 if (testmgr_alloc_buf(xbuf))
216 goto out_nobuf;
217
218 init_completion(&tresult.completion);
219
220 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 if (!req) {
222 printk(KERN_ERR "alg: hash: Failed to allocate request for "
223 "%s\n", algo);
224 goto out_noreq;
225 }
226 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
227 tcrypt_complete, &tresult);
228
229 j = 0;
230 for (i = 0; i < tcount; i++) {
231 if (template[i].np)
232 continue;
233
234 ret = -EINVAL;
235 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
236 goto out;
237
238 j++;
239 memset(result, 0, MAX_DIGEST_SIZE);
240
241 hash_buff = xbuf[0];
242 hash_buff += align_offset;
243
244 memcpy(hash_buff, template[i].plaintext, template[i].psize);
245 sg_init_one(&sg[0], hash_buff, template[i].psize);
246
247 if (template[i].ksize) {
248 crypto_ahash_clear_flags(tfm, ~0);
249 if (template[i].ksize > MAX_KEYLEN) {
250 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
251 j, algo, template[i].ksize, MAX_KEYLEN);
252 ret = -EINVAL;
253 goto out;
254 }
255 memcpy(key, template[i].key, template[i].ksize);
256 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
257 if (ret) {
258 printk(KERN_ERR "alg: hash: setkey failed on "
259 "test %d for %s: ret=%d\n", j, algo,
260 -ret);
261 goto out;
262 }
263 }
264
265 ahash_request_set_crypt(req, sg, result, template[i].psize);
266 if (use_digest) {
267 ret = do_one_async_hash_op(req, &tresult,
268 crypto_ahash_digest(req));
269 if (ret) {
270 pr_err("alg: hash: digest failed on test %d "
271 "for %s: ret=%d\n", j, algo, -ret);
272 goto out;
273 }
274 } else {
275 ret = do_one_async_hash_op(req, &tresult,
276 crypto_ahash_init(req));
277 if (ret) {
278 pr_err("alt: hash: init failed on test %d "
279 "for %s: ret=%d\n", j, algo, -ret);
280 goto out;
281 }
282 ret = do_one_async_hash_op(req, &tresult,
283 crypto_ahash_update(req));
284 if (ret) {
285 pr_err("alt: hash: update failed on test %d "
286 "for %s: ret=%d\n", j, algo, -ret);
287 goto out;
288 }
289 ret = do_one_async_hash_op(req, &tresult,
290 crypto_ahash_final(req));
291 if (ret) {
292 pr_err("alt: hash: final failed on test %d "
293 "for %s: ret=%d\n", j, algo, -ret);
294 goto out;
295 }
296 }
297
298 if (memcmp(result, template[i].digest,
299 crypto_ahash_digestsize(tfm))) {
300 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
301 j, algo);
302 hexdump(result, crypto_ahash_digestsize(tfm));
303 ret = -EINVAL;
304 goto out;
305 }
306 }
307
308 j = 0;
309 for (i = 0; i < tcount; i++) {
310 /* alignment tests are only done with continuous buffers */
311 if (align_offset != 0)
312 break;
313
314 if (!template[i].np)
315 continue;
316
317 j++;
318 memset(result, 0, MAX_DIGEST_SIZE);
319
320 temp = 0;
321 sg_init_table(sg, template[i].np);
322 ret = -EINVAL;
323 for (k = 0; k < template[i].np; k++) {
324 if (WARN_ON(offset_in_page(IDX[k]) +
325 template[i].tap[k] > PAGE_SIZE))
326 goto out;
327 sg_set_buf(&sg[k],
328 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
329 offset_in_page(IDX[k]),
330 template[i].plaintext + temp,
331 template[i].tap[k]),
332 template[i].tap[k]);
333 temp += template[i].tap[k];
334 }
335
336 if (template[i].ksize) {
337 if (template[i].ksize > MAX_KEYLEN) {
338 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
339 j, algo, template[i].ksize, MAX_KEYLEN);
340 ret = -EINVAL;
341 goto out;
342 }
343 crypto_ahash_clear_flags(tfm, ~0);
344 memcpy(key, template[i].key, template[i].ksize);
345 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
346
347 if (ret) {
348 printk(KERN_ERR "alg: hash: setkey "
349 "failed on chunking test %d "
350 "for %s: ret=%d\n", j, algo, -ret);
351 goto out;
352 }
353 }
354
355 ahash_request_set_crypt(req, sg, result, template[i].psize);
356 ret = crypto_ahash_digest(req);
357 switch (ret) {
358 case 0:
359 break;
360 case -EINPROGRESS:
361 case -EBUSY:
362 ret = wait_for_completion_interruptible(
363 &tresult.completion);
364 if (!ret && !(ret = tresult.err)) {
365 reinit_completion(&tresult.completion);
366 break;
367 }
368 /* fall through */
369 default:
370 printk(KERN_ERR "alg: hash: digest failed "
371 "on chunking test %d for %s: "
372 "ret=%d\n", j, algo, -ret);
373 goto out;
374 }
375
376 if (memcmp(result, template[i].digest,
377 crypto_ahash_digestsize(tfm))) {
378 printk(KERN_ERR "alg: hash: Chunking test %d "
379 "failed for %s\n", j, algo);
380 hexdump(result, crypto_ahash_digestsize(tfm));
381 ret = -EINVAL;
382 goto out;
383 }
384 }
385
386 ret = 0;
387
388 out:
389 ahash_request_free(req);
390 out_noreq:
391 testmgr_free_buf(xbuf);
392 out_nobuf:
393 kfree(key);
394 kfree(result);
395 return ret;
396 }
397
398 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
399 unsigned int tcount, bool use_digest)
400 {
401 unsigned int alignmask;
402 int ret;
403
404 ret = __test_hash(tfm, template, tcount, use_digest, 0);
405 if (ret)
406 return ret;
407
408 /* test unaligned buffers, check with one byte offset */
409 ret = __test_hash(tfm, template, tcount, use_digest, 1);
410 if (ret)
411 return ret;
412
413 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
414 if (alignmask) {
415 /* Check if alignment mask for tfm is correctly set. */
416 ret = __test_hash(tfm, template, tcount, use_digest,
417 alignmask + 1);
418 if (ret)
419 return ret;
420 }
421
422 return 0;
423 }
424
425 static int __test_aead(struct crypto_aead *tfm, int enc,
426 struct aead_testvec *template, unsigned int tcount,
427 const bool diff_dst, const int align_offset)
428 {
429 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
430 unsigned int i, j, k, n, temp;
431 int ret = -ENOMEM;
432 char *q;
433 char *key;
434 struct aead_request *req;
435 struct scatterlist *sg;
436 struct scatterlist *asg;
437 struct scatterlist *sgout;
438 const char *e, *d;
439 struct tcrypt_result result;
440 unsigned int authsize;
441 void *input;
442 void *output;
443 void *assoc;
444 char *iv;
445 char *xbuf[XBUFSIZE];
446 char *xoutbuf[XBUFSIZE];
447 char *axbuf[XBUFSIZE];
448
449 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
450 if (!iv)
451 return ret;
452 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
453 if (!key)
454 goto out_noxbuf;
455 if (testmgr_alloc_buf(xbuf))
456 goto out_noxbuf;
457 if (testmgr_alloc_buf(axbuf))
458 goto out_noaxbuf;
459 if (diff_dst && testmgr_alloc_buf(xoutbuf))
460 goto out_nooutbuf;
461
462 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
463 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
464 if (!sg)
465 goto out_nosg;
466 asg = &sg[8];
467 sgout = &asg[8];
468
469 if (diff_dst)
470 d = "-ddst";
471 else
472 d = "";
473
474 if (enc == ENCRYPT)
475 e = "encryption";
476 else
477 e = "decryption";
478
479 init_completion(&result.completion);
480
481 req = aead_request_alloc(tfm, GFP_KERNEL);
482 if (!req) {
483 pr_err("alg: aead%s: Failed to allocate request for %s\n",
484 d, algo);
485 goto out;
486 }
487
488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
489 tcrypt_complete, &result);
490
491 for (i = 0, j = 0; i < tcount; i++) {
492 if (template[i].np)
493 continue;
494
495 j++;
496
497 /* some templates have no input data but they will
498 * touch input
499 */
500 input = xbuf[0];
501 input += align_offset;
502 assoc = axbuf[0];
503
504 ret = -EINVAL;
505 if (WARN_ON(align_offset + template[i].ilen >
506 PAGE_SIZE || template[i].alen > PAGE_SIZE))
507 goto out;
508
509 memcpy(input, template[i].input, template[i].ilen);
510 memcpy(assoc, template[i].assoc, template[i].alen);
511 if (template[i].iv)
512 memcpy(iv, template[i].iv, MAX_IVLEN);
513 else
514 memset(iv, 0, MAX_IVLEN);
515
516 crypto_aead_clear_flags(tfm, ~0);
517 if (template[i].wk)
518 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
519
520 if (template[i].klen > MAX_KEYLEN) {
521 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
522 d, j, algo, template[i].klen,
523 MAX_KEYLEN);
524 ret = -EINVAL;
525 goto out;
526 }
527 memcpy(key, template[i].key, template[i].klen);
528
529 ret = crypto_aead_setkey(tfm, key, template[i].klen);
530 if (!ret == template[i].fail) {
531 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
532 d, j, algo, crypto_aead_get_flags(tfm));
533 goto out;
534 } else if (ret)
535 continue;
536
537 authsize = abs(template[i].rlen - template[i].ilen);
538 ret = crypto_aead_setauthsize(tfm, authsize);
539 if (ret) {
540 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
541 d, authsize, j, algo);
542 goto out;
543 }
544
545 if (diff_dst) {
546 output = xoutbuf[0];
547 output += align_offset;
548 sg_init_one(&sg[0], input, template[i].ilen);
549 sg_init_one(&sgout[0], output, template[i].rlen);
550 } else {
551 sg_init_one(&sg[0], input,
552 template[i].ilen + (enc ? authsize : 0));
553 output = input;
554 }
555
556 sg_init_one(&asg[0], assoc, template[i].alen);
557
558 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
559 template[i].ilen, iv);
560
561 aead_request_set_assoc(req, asg, template[i].alen);
562
563 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
564
565 switch (ret) {
566 case 0:
567 if (template[i].novrfy) {
568 /* verification was supposed to fail */
569 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
570 d, e, j, algo);
571 /* so really, we got a bad message */
572 ret = -EBADMSG;
573 goto out;
574 }
575 break;
576 case -EINPROGRESS:
577 case -EBUSY:
578 ret = wait_for_completion_interruptible(
579 &result.completion);
580 if (!ret && !(ret = result.err)) {
581 reinit_completion(&result.completion);
582 break;
583 }
584 case -EBADMSG:
585 if (template[i].novrfy)
586 /* verification failure was expected */
587 continue;
588 /* fall through */
589 default:
590 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
591 d, e, j, algo, -ret);
592 goto out;
593 }
594
595 q = output;
596 if (memcmp(q, template[i].result, template[i].rlen)) {
597 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
598 d, j, e, algo);
599 hexdump(q, template[i].rlen);
600 ret = -EINVAL;
601 goto out;
602 }
603 }
604
605 for (i = 0, j = 0; i < tcount; i++) {
606 /* alignment tests are only done with continuous buffers */
607 if (align_offset != 0)
608 break;
609
610 if (!template[i].np)
611 continue;
612
613 j++;
614
615 if (template[i].iv)
616 memcpy(iv, template[i].iv, MAX_IVLEN);
617 else
618 memset(iv, 0, MAX_IVLEN);
619
620 crypto_aead_clear_flags(tfm, ~0);
621 if (template[i].wk)
622 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
623 if (template[i].klen > MAX_KEYLEN) {
624 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
625 d, j, algo, template[i].klen, MAX_KEYLEN);
626 ret = -EINVAL;
627 goto out;
628 }
629 memcpy(key, template[i].key, template[i].klen);
630
631 ret = crypto_aead_setkey(tfm, key, template[i].klen);
632 if (!ret == template[i].fail) {
633 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
634 d, j, algo, crypto_aead_get_flags(tfm));
635 goto out;
636 } else if (ret)
637 continue;
638
639 authsize = abs(template[i].rlen - template[i].ilen);
640
641 ret = -EINVAL;
642 sg_init_table(sg, template[i].np);
643 if (diff_dst)
644 sg_init_table(sgout, template[i].np);
645 for (k = 0, temp = 0; k < template[i].np; k++) {
646 if (WARN_ON(offset_in_page(IDX[k]) +
647 template[i].tap[k] > PAGE_SIZE))
648 goto out;
649
650 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
651 memcpy(q, template[i].input + temp, template[i].tap[k]);
652 sg_set_buf(&sg[k], q, template[i].tap[k]);
653
654 if (diff_dst) {
655 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
656 offset_in_page(IDX[k]);
657
658 memset(q, 0, template[i].tap[k]);
659
660 sg_set_buf(&sgout[k], q, template[i].tap[k]);
661 }
662
663 n = template[i].tap[k];
664 if (k == template[i].np - 1 && enc)
665 n += authsize;
666 if (offset_in_page(q) + n < PAGE_SIZE)
667 q[n] = 0;
668
669 temp += template[i].tap[k];
670 }
671
672 ret = crypto_aead_setauthsize(tfm, authsize);
673 if (ret) {
674 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
675 d, authsize, j, algo);
676 goto out;
677 }
678
679 if (enc) {
680 if (WARN_ON(sg[k - 1].offset +
681 sg[k - 1].length + authsize >
682 PAGE_SIZE)) {
683 ret = -EINVAL;
684 goto out;
685 }
686
687 if (diff_dst)
688 sgout[k - 1].length += authsize;
689 else
690 sg[k - 1].length += authsize;
691 }
692
693 sg_init_table(asg, template[i].anp);
694 ret = -EINVAL;
695 for (k = 0, temp = 0; k < template[i].anp; k++) {
696 if (WARN_ON(offset_in_page(IDX[k]) +
697 template[i].atap[k] > PAGE_SIZE))
698 goto out;
699 sg_set_buf(&asg[k],
700 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
701 offset_in_page(IDX[k]),
702 template[i].assoc + temp,
703 template[i].atap[k]),
704 template[i].atap[k]);
705 temp += template[i].atap[k];
706 }
707
708 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
709 template[i].ilen,
710 iv);
711
712 aead_request_set_assoc(req, asg, template[i].alen);
713
714 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
715
716 switch (ret) {
717 case 0:
718 if (template[i].novrfy) {
719 /* verification was supposed to fail */
720 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
721 d, e, j, algo);
722 /* so really, we got a bad message */
723 ret = -EBADMSG;
724 goto out;
725 }
726 break;
727 case -EINPROGRESS:
728 case -EBUSY:
729 ret = wait_for_completion_interruptible(
730 &result.completion);
731 if (!ret && !(ret = result.err)) {
732 reinit_completion(&result.completion);
733 break;
734 }
735 case -EBADMSG:
736 if (template[i].novrfy)
737 /* verification failure was expected */
738 continue;
739 /* fall through */
740 default:
741 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
742 d, e, j, algo, -ret);
743 goto out;
744 }
745
746 ret = -EINVAL;
747 for (k = 0, temp = 0; k < template[i].np; k++) {
748 if (diff_dst)
749 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
750 offset_in_page(IDX[k]);
751 else
752 q = xbuf[IDX[k] >> PAGE_SHIFT] +
753 offset_in_page(IDX[k]);
754
755 n = template[i].tap[k];
756 if (k == template[i].np - 1)
757 n += enc ? authsize : -authsize;
758
759 if (memcmp(q, template[i].result + temp, n)) {
760 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
761 d, j, e, k, algo);
762 hexdump(q, n);
763 goto out;
764 }
765
766 q += n;
767 if (k == template[i].np - 1 && !enc) {
768 if (!diff_dst &&
769 memcmp(q, template[i].input +
770 temp + n, authsize))
771 n = authsize;
772 else
773 n = 0;
774 } else {
775 for (n = 0; offset_in_page(q + n) && q[n]; n++)
776 ;
777 }
778 if (n) {
779 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
780 d, j, e, k, algo, n);
781 hexdump(q, n);
782 goto out;
783 }
784
785 temp += template[i].tap[k];
786 }
787 }
788
789 ret = 0;
790
791 out:
792 aead_request_free(req);
793 kfree(sg);
794 out_nosg:
795 if (diff_dst)
796 testmgr_free_buf(xoutbuf);
797 out_nooutbuf:
798 testmgr_free_buf(axbuf);
799 out_noaxbuf:
800 testmgr_free_buf(xbuf);
801 out_noxbuf:
802 kfree(key);
803 kfree(iv);
804 return ret;
805 }
806
807 static int test_aead(struct crypto_aead *tfm, int enc,
808 struct aead_testvec *template, unsigned int tcount)
809 {
810 unsigned int alignmask;
811 int ret;
812
813 /* test 'dst == src' case */
814 ret = __test_aead(tfm, enc, template, tcount, false, 0);
815 if (ret)
816 return ret;
817
818 /* test 'dst != src' case */
819 ret = __test_aead(tfm, enc, template, tcount, true, 0);
820 if (ret)
821 return ret;
822
823 /* test unaligned buffers, check with one byte offset */
824 ret = __test_aead(tfm, enc, template, tcount, true, 1);
825 if (ret)
826 return ret;
827
828 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
829 if (alignmask) {
830 /* Check if alignment mask for tfm is correctly set. */
831 ret = __test_aead(tfm, enc, template, tcount, true,
832 alignmask + 1);
833 if (ret)
834 return ret;
835 }
836
837 return 0;
838 }
839
840 static int test_cipher(struct crypto_cipher *tfm, int enc,
841 struct cipher_testvec *template, unsigned int tcount)
842 {
843 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
844 unsigned int i, j, k;
845 char *q;
846 const char *e;
847 void *data;
848 char *xbuf[XBUFSIZE];
849 int ret = -ENOMEM;
850
851 if (testmgr_alloc_buf(xbuf))
852 goto out_nobuf;
853
854 if (enc == ENCRYPT)
855 e = "encryption";
856 else
857 e = "decryption";
858
859 j = 0;
860 for (i = 0; i < tcount; i++) {
861 if (template[i].np)
862 continue;
863
864 j++;
865
866 ret = -EINVAL;
867 if (WARN_ON(template[i].ilen > PAGE_SIZE))
868 goto out;
869
870 data = xbuf[0];
871 memcpy(data, template[i].input, template[i].ilen);
872
873 crypto_cipher_clear_flags(tfm, ~0);
874 if (template[i].wk)
875 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
876
877 ret = crypto_cipher_setkey(tfm, template[i].key,
878 template[i].klen);
879 if (!ret == template[i].fail) {
880 printk(KERN_ERR "alg: cipher: setkey failed "
881 "on test %d for %s: flags=%x\n", j,
882 algo, crypto_cipher_get_flags(tfm));
883 goto out;
884 } else if (ret)
885 continue;
886
887 for (k = 0; k < template[i].ilen;
888 k += crypto_cipher_blocksize(tfm)) {
889 if (enc)
890 crypto_cipher_encrypt_one(tfm, data + k,
891 data + k);
892 else
893 crypto_cipher_decrypt_one(tfm, data + k,
894 data + k);
895 }
896
897 q = data;
898 if (memcmp(q, template[i].result, template[i].rlen)) {
899 printk(KERN_ERR "alg: cipher: Test %d failed "
900 "on %s for %s\n", j, e, algo);
901 hexdump(q, template[i].rlen);
902 ret = -EINVAL;
903 goto out;
904 }
905 }
906
907 ret = 0;
908
909 out:
910 testmgr_free_buf(xbuf);
911 out_nobuf:
912 return ret;
913 }
914
915 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
916 struct cipher_testvec *template, unsigned int tcount,
917 const bool diff_dst, const int align_offset)
918 {
919 const char *algo =
920 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
921 unsigned int i, j, k, n, temp;
922 char *q;
923 struct ablkcipher_request *req;
924 struct scatterlist sg[8];
925 struct scatterlist sgout[8];
926 const char *e, *d;
927 struct tcrypt_result result;
928 void *data;
929 char iv[MAX_IVLEN];
930 char *xbuf[XBUFSIZE];
931 char *xoutbuf[XBUFSIZE];
932 int ret = -ENOMEM;
933
934 if (testmgr_alloc_buf(xbuf))
935 goto out_nobuf;
936
937 if (diff_dst && testmgr_alloc_buf(xoutbuf))
938 goto out_nooutbuf;
939
940 if (diff_dst)
941 d = "-ddst";
942 else
943 d = "";
944
945 if (enc == ENCRYPT)
946 e = "encryption";
947 else
948 e = "decryption";
949
950 init_completion(&result.completion);
951
952 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
953 if (!req) {
954 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
955 d, algo);
956 goto out;
957 }
958
959 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
960 tcrypt_complete, &result);
961
962 j = 0;
963 for (i = 0; i < tcount; i++) {
964 if (template[i].iv)
965 memcpy(iv, template[i].iv, MAX_IVLEN);
966 else
967 memset(iv, 0, MAX_IVLEN);
968
969 if (!(template[i].np) || (template[i].also_non_np)) {
970 j++;
971
972 ret = -EINVAL;
973 if (WARN_ON(align_offset + template[i].ilen >
974 PAGE_SIZE))
975 goto out;
976
977 data = xbuf[0];
978 data += align_offset;
979 memcpy(data, template[i].input, template[i].ilen);
980
981 crypto_ablkcipher_clear_flags(tfm, ~0);
982 if (template[i].wk)
983 crypto_ablkcipher_set_flags(
984 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
985
986 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
987 template[i].klen);
988 if (!ret == template[i].fail) {
989 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
990 d, j, algo,
991 crypto_ablkcipher_get_flags(tfm));
992 goto out;
993 } else if (ret)
994 continue;
995
996 sg_init_one(&sg[0], data, template[i].ilen);
997 if (diff_dst) {
998 data = xoutbuf[0];
999 data += align_offset;
1000 sg_init_one(&sgout[0], data, template[i].ilen);
1001 }
1002
1003 ablkcipher_request_set_crypt(req, sg,
1004 (diff_dst) ? sgout : sg,
1005 template[i].ilen, iv);
1006 ret = enc ?
1007 crypto_ablkcipher_encrypt(req) :
1008 crypto_ablkcipher_decrypt(req);
1009
1010 switch (ret) {
1011 case 0:
1012 break;
1013 case -EINPROGRESS:
1014 case -EBUSY:
1015 ret = wait_for_completion_interruptible(
1016 &result.completion);
1017 if (!ret && !((ret = result.err))) {
1018 reinit_completion(&result.completion);
1019 break;
1020 }
1021 /* fall through */
1022 default:
1023 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1024 d, e, j, algo, -ret);
1025 goto out;
1026 }
1027
1028 q = data;
1029 if (memcmp(q, template[i].result, template[i].rlen)) {
1030 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
1031 d, j, e, algo);
1032 hexdump(q, template[i].rlen);
1033 ret = -EINVAL;
1034 goto out;
1035 }
1036 }
1037 }
1038
1039 j = 0;
1040 for (i = 0; i < tcount; i++) {
1041 /* alignment tests are only done with continuous buffers */
1042 if (align_offset != 0)
1043 break;
1044
1045 if (template[i].iv)
1046 memcpy(iv, template[i].iv, MAX_IVLEN);
1047 else
1048 memset(iv, 0, MAX_IVLEN);
1049
1050 if (template[i].np) {
1051 j++;
1052
1053 crypto_ablkcipher_clear_flags(tfm, ~0);
1054 if (template[i].wk)
1055 crypto_ablkcipher_set_flags(
1056 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1057
1058 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1059 template[i].klen);
1060 if (!ret == template[i].fail) {
1061 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1062 d, j, algo,
1063 crypto_ablkcipher_get_flags(tfm));
1064 goto out;
1065 } else if (ret)
1066 continue;
1067
1068 temp = 0;
1069 ret = -EINVAL;
1070 sg_init_table(sg, template[i].np);
1071 if (diff_dst)
1072 sg_init_table(sgout, template[i].np);
1073 for (k = 0; k < template[i].np; k++) {
1074 if (WARN_ON(offset_in_page(IDX[k]) +
1075 template[i].tap[k] > PAGE_SIZE))
1076 goto out;
1077
1078 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1079 offset_in_page(IDX[k]);
1080
1081 memcpy(q, template[i].input + temp,
1082 template[i].tap[k]);
1083
1084 if (offset_in_page(q) + template[i].tap[k] <
1085 PAGE_SIZE)
1086 q[template[i].tap[k]] = 0;
1087
1088 sg_set_buf(&sg[k], q, template[i].tap[k]);
1089 if (diff_dst) {
1090 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1091 offset_in_page(IDX[k]);
1092
1093 sg_set_buf(&sgout[k], q,
1094 template[i].tap[k]);
1095
1096 memset(q, 0, template[i].tap[k]);
1097 if (offset_in_page(q) +
1098 template[i].tap[k] < PAGE_SIZE)
1099 q[template[i].tap[k]] = 0;
1100 }
1101
1102 temp += template[i].tap[k];
1103 }
1104
1105 ablkcipher_request_set_crypt(req, sg,
1106 (diff_dst) ? sgout : sg,
1107 template[i].ilen, iv);
1108
1109 ret = enc ?
1110 crypto_ablkcipher_encrypt(req) :
1111 crypto_ablkcipher_decrypt(req);
1112
1113 switch (ret) {
1114 case 0:
1115 break;
1116 case -EINPROGRESS:
1117 case -EBUSY:
1118 ret = wait_for_completion_interruptible(
1119 &result.completion);
1120 if (!ret && !((ret = result.err))) {
1121 reinit_completion(&result.completion);
1122 break;
1123 }
1124 /* fall through */
1125 default:
1126 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1127 d, e, j, algo, -ret);
1128 goto out;
1129 }
1130
1131 temp = 0;
1132 ret = -EINVAL;
1133 for (k = 0; k < template[i].np; k++) {
1134 if (diff_dst)
1135 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1136 offset_in_page(IDX[k]);
1137 else
1138 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1139 offset_in_page(IDX[k]);
1140
1141 if (memcmp(q, template[i].result + temp,
1142 template[i].tap[k])) {
1143 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1144 d, j, e, k, algo);
1145 hexdump(q, template[i].tap[k]);
1146 goto out;
1147 }
1148
1149 q += template[i].tap[k];
1150 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1151 ;
1152 if (n) {
1153 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1154 d, j, e, k, algo, n);
1155 hexdump(q, n);
1156 goto out;
1157 }
1158 temp += template[i].tap[k];
1159 }
1160 }
1161 }
1162
1163 ret = 0;
1164
1165 out:
1166 ablkcipher_request_free(req);
1167 if (diff_dst)
1168 testmgr_free_buf(xoutbuf);
1169 out_nooutbuf:
1170 testmgr_free_buf(xbuf);
1171 out_nobuf:
1172 return ret;
1173 }
1174
1175 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1176 struct cipher_testvec *template, unsigned int tcount)
1177 {
1178 unsigned int alignmask;
1179 int ret;
1180
1181 /* test 'dst == src' case */
1182 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1183 if (ret)
1184 return ret;
1185
1186 /* test 'dst != src' case */
1187 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1188 if (ret)
1189 return ret;
1190
1191 /* test unaligned buffers, check with one byte offset */
1192 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1193 if (ret)
1194 return ret;
1195
1196 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1197 if (alignmask) {
1198 /* Check if alignment mask for tfm is correctly set. */
1199 ret = __test_skcipher(tfm, enc, template, tcount, true,
1200 alignmask + 1);
1201 if (ret)
1202 return ret;
1203 }
1204
1205 return 0;
1206 }
1207
1208 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1209 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1210 {
1211 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1212 unsigned int i;
1213 char result[COMP_BUF_SIZE];
1214 int ret;
1215
1216 for (i = 0; i < ctcount; i++) {
1217 int ilen;
1218 unsigned int dlen = COMP_BUF_SIZE;
1219
1220 memset(result, 0, sizeof (result));
1221
1222 ilen = ctemplate[i].inlen;
1223 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1224 ilen, result, &dlen);
1225 if (ret) {
1226 printk(KERN_ERR "alg: comp: compression failed "
1227 "on test %d for %s: ret=%d\n", i + 1, algo,
1228 -ret);
1229 goto out;
1230 }
1231
1232 if (dlen != ctemplate[i].outlen) {
1233 printk(KERN_ERR "alg: comp: Compression test %d "
1234 "failed for %s: output len = %d\n", i + 1, algo,
1235 dlen);
1236 ret = -EINVAL;
1237 goto out;
1238 }
1239
1240 if (memcmp(result, ctemplate[i].output, dlen)) {
1241 printk(KERN_ERR "alg: comp: Compression test %d "
1242 "failed for %s\n", i + 1, algo);
1243 hexdump(result, dlen);
1244 ret = -EINVAL;
1245 goto out;
1246 }
1247 }
1248
1249 for (i = 0; i < dtcount; i++) {
1250 int ilen;
1251 unsigned int dlen = COMP_BUF_SIZE;
1252
1253 memset(result, 0, sizeof (result));
1254
1255 ilen = dtemplate[i].inlen;
1256 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1257 ilen, result, &dlen);
1258 if (ret) {
1259 printk(KERN_ERR "alg: comp: decompression failed "
1260 "on test %d for %s: ret=%d\n", i + 1, algo,
1261 -ret);
1262 goto out;
1263 }
1264
1265 if (dlen != dtemplate[i].outlen) {
1266 printk(KERN_ERR "alg: comp: Decompression test %d "
1267 "failed for %s: output len = %d\n", i + 1, algo,
1268 dlen);
1269 ret = -EINVAL;
1270 goto out;
1271 }
1272
1273 if (memcmp(result, dtemplate[i].output, dlen)) {
1274 printk(KERN_ERR "alg: comp: Decompression test %d "
1275 "failed for %s\n", i + 1, algo);
1276 hexdump(result, dlen);
1277 ret = -EINVAL;
1278 goto out;
1279 }
1280 }
1281
1282 ret = 0;
1283
1284 out:
1285 return ret;
1286 }
1287
1288 static int test_pcomp(struct crypto_pcomp *tfm,
1289 struct pcomp_testvec *ctemplate,
1290 struct pcomp_testvec *dtemplate, int ctcount,
1291 int dtcount)
1292 {
1293 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1294 unsigned int i;
1295 char result[COMP_BUF_SIZE];
1296 int res;
1297
1298 for (i = 0; i < ctcount; i++) {
1299 struct comp_request req;
1300 unsigned int produced = 0;
1301
1302 res = crypto_compress_setup(tfm, ctemplate[i].params,
1303 ctemplate[i].paramsize);
1304 if (res) {
1305 pr_err("alg: pcomp: compression setup failed on test "
1306 "%d for %s: error=%d\n", i + 1, algo, res);
1307 return res;
1308 }
1309
1310 res = crypto_compress_init(tfm);
1311 if (res) {
1312 pr_err("alg: pcomp: compression init failed on test "
1313 "%d for %s: error=%d\n", i + 1, algo, res);
1314 return res;
1315 }
1316
1317 memset(result, 0, sizeof(result));
1318
1319 req.next_in = ctemplate[i].input;
1320 req.avail_in = ctemplate[i].inlen / 2;
1321 req.next_out = result;
1322 req.avail_out = ctemplate[i].outlen / 2;
1323
1324 res = crypto_compress_update(tfm, &req);
1325 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1326 pr_err("alg: pcomp: compression update failed on test "
1327 "%d for %s: error=%d\n", i + 1, algo, res);
1328 return res;
1329 }
1330 if (res > 0)
1331 produced += res;
1332
1333 /* Add remaining input data */
1334 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1335
1336 res = crypto_compress_update(tfm, &req);
1337 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1338 pr_err("alg: pcomp: compression update failed on test "
1339 "%d for %s: error=%d\n", i + 1, algo, res);
1340 return res;
1341 }
1342 if (res > 0)
1343 produced += res;
1344
1345 /* Provide remaining output space */
1346 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1347
1348 res = crypto_compress_final(tfm, &req);
1349 if (res < 0) {
1350 pr_err("alg: pcomp: compression final failed on test "
1351 "%d for %s: error=%d\n", i + 1, algo, res);
1352 return res;
1353 }
1354 produced += res;
1355
1356 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1357 pr_err("alg: comp: Compression test %d failed for %s: "
1358 "output len = %d (expected %d)\n", i + 1, algo,
1359 COMP_BUF_SIZE - req.avail_out,
1360 ctemplate[i].outlen);
1361 return -EINVAL;
1362 }
1363
1364 if (produced != ctemplate[i].outlen) {
1365 pr_err("alg: comp: Compression test %d failed for %s: "
1366 "returned len = %u (expected %d)\n", i + 1,
1367 algo, produced, ctemplate[i].outlen);
1368 return -EINVAL;
1369 }
1370
1371 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1372 pr_err("alg: pcomp: Compression test %d failed for "
1373 "%s\n", i + 1, algo);
1374 hexdump(result, ctemplate[i].outlen);
1375 return -EINVAL;
1376 }
1377 }
1378
1379 for (i = 0; i < dtcount; i++) {
1380 struct comp_request req;
1381 unsigned int produced = 0;
1382
1383 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1384 dtemplate[i].paramsize);
1385 if (res) {
1386 pr_err("alg: pcomp: decompression setup failed on "
1387 "test %d for %s: error=%d\n", i + 1, algo, res);
1388 return res;
1389 }
1390
1391 res = crypto_decompress_init(tfm);
1392 if (res) {
1393 pr_err("alg: pcomp: decompression init failed on test "
1394 "%d for %s: error=%d\n", i + 1, algo, res);
1395 return res;
1396 }
1397
1398 memset(result, 0, sizeof(result));
1399
1400 req.next_in = dtemplate[i].input;
1401 req.avail_in = dtemplate[i].inlen / 2;
1402 req.next_out = result;
1403 req.avail_out = dtemplate[i].outlen / 2;
1404
1405 res = crypto_decompress_update(tfm, &req);
1406 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1407 pr_err("alg: pcomp: decompression update failed on "
1408 "test %d for %s: error=%d\n", i + 1, algo, res);
1409 return res;
1410 }
1411 if (res > 0)
1412 produced += res;
1413
1414 /* Add remaining input data */
1415 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1416
1417 res = crypto_decompress_update(tfm, &req);
1418 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1419 pr_err("alg: pcomp: decompression update failed on "
1420 "test %d for %s: error=%d\n", i + 1, algo, res);
1421 return res;
1422 }
1423 if (res > 0)
1424 produced += res;
1425
1426 /* Provide remaining output space */
1427 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1428
1429 res = crypto_decompress_final(tfm, &req);
1430 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1431 pr_err("alg: pcomp: decompression final failed on "
1432 "test %d for %s: error=%d\n", i + 1, algo, res);
1433 return res;
1434 }
1435 if (res > 0)
1436 produced += res;
1437
1438 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1439 pr_err("alg: comp: Decompression test %d failed for "
1440 "%s: output len = %d (expected %d)\n", i + 1,
1441 algo, COMP_BUF_SIZE - req.avail_out,
1442 dtemplate[i].outlen);
1443 return -EINVAL;
1444 }
1445
1446 if (produced != dtemplate[i].outlen) {
1447 pr_err("alg: comp: Decompression test %d failed for "
1448 "%s: returned len = %u (expected %d)\n", i + 1,
1449 algo, produced, dtemplate[i].outlen);
1450 return -EINVAL;
1451 }
1452
1453 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1454 pr_err("alg: pcomp: Decompression test %d failed for "
1455 "%s\n", i + 1, algo);
1456 hexdump(result, dtemplate[i].outlen);
1457 return -EINVAL;
1458 }
1459 }
1460
1461 return 0;
1462 }
1463
1464
1465 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1466 unsigned int tcount)
1467 {
1468 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1469 int err = 0, i, j, seedsize;
1470 u8 *seed;
1471 char result[32];
1472
1473 seedsize = crypto_rng_seedsize(tfm);
1474
1475 seed = kmalloc(seedsize, GFP_KERNEL);
1476 if (!seed) {
1477 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1478 "for %s\n", algo);
1479 return -ENOMEM;
1480 }
1481
1482 for (i = 0; i < tcount; i++) {
1483 memset(result, 0, 32);
1484
1485 memcpy(seed, template[i].v, template[i].vlen);
1486 memcpy(seed + template[i].vlen, template[i].key,
1487 template[i].klen);
1488 memcpy(seed + template[i].vlen + template[i].klen,
1489 template[i].dt, template[i].dtlen);
1490
1491 err = crypto_rng_reset(tfm, seed, seedsize);
1492 if (err) {
1493 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1494 "for %s\n", algo);
1495 goto out;
1496 }
1497
1498 for (j = 0; j < template[i].loops; j++) {
1499 err = crypto_rng_get_bytes(tfm, result,
1500 template[i].rlen);
1501 if (err != template[i].rlen) {
1502 printk(KERN_ERR "alg: cprng: Failed to obtain "
1503 "the correct amount of random data for "
1504 "%s (requested %d, got %d)\n", algo,
1505 template[i].rlen, err);
1506 goto out;
1507 }
1508 }
1509
1510 err = memcmp(result, template[i].result,
1511 template[i].rlen);
1512 if (err) {
1513 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1514 i, algo);
1515 hexdump(result, template[i].rlen);
1516 err = -EINVAL;
1517 goto out;
1518 }
1519 }
1520
1521 out:
1522 kfree(seed);
1523 return err;
1524 }
1525
1526 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1527 u32 type, u32 mask)
1528 {
1529 struct crypto_aead *tfm;
1530 int err = 0;
1531
1532 tfm = crypto_alloc_aead(driver, type, mask);
1533 if (IS_ERR(tfm)) {
1534 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1535 "%ld\n", driver, PTR_ERR(tfm));
1536 return PTR_ERR(tfm);
1537 }
1538
1539 if (desc->suite.aead.enc.vecs) {
1540 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1541 desc->suite.aead.enc.count);
1542 if (err)
1543 goto out;
1544 }
1545
1546 if (!err && desc->suite.aead.dec.vecs)
1547 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1548 desc->suite.aead.dec.count);
1549
1550 out:
1551 crypto_free_aead(tfm);
1552 return err;
1553 }
1554
1555 static int alg_test_cipher(const struct alg_test_desc *desc,
1556 const char *driver, u32 type, u32 mask)
1557 {
1558 struct crypto_cipher *tfm;
1559 int err = 0;
1560
1561 tfm = crypto_alloc_cipher(driver, type, mask);
1562 if (IS_ERR(tfm)) {
1563 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1564 "%s: %ld\n", driver, PTR_ERR(tfm));
1565 return PTR_ERR(tfm);
1566 }
1567
1568 if (desc->suite.cipher.enc.vecs) {
1569 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1570 desc->suite.cipher.enc.count);
1571 if (err)
1572 goto out;
1573 }
1574
1575 if (desc->suite.cipher.dec.vecs)
1576 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1577 desc->suite.cipher.dec.count);
1578
1579 out:
1580 crypto_free_cipher(tfm);
1581 return err;
1582 }
1583
1584 static int alg_test_skcipher(const struct alg_test_desc *desc,
1585 const char *driver, u32 type, u32 mask)
1586 {
1587 struct crypto_ablkcipher *tfm;
1588 int err = 0;
1589
1590 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1591 if (IS_ERR(tfm)) {
1592 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1593 "%s: %ld\n", driver, PTR_ERR(tfm));
1594 return PTR_ERR(tfm);
1595 }
1596
1597 if (desc->suite.cipher.enc.vecs) {
1598 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1599 desc->suite.cipher.enc.count);
1600 if (err)
1601 goto out;
1602 }
1603
1604 if (desc->suite.cipher.dec.vecs)
1605 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1606 desc->suite.cipher.dec.count);
1607
1608 out:
1609 crypto_free_ablkcipher(tfm);
1610 return err;
1611 }
1612
1613 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1614 u32 type, u32 mask)
1615 {
1616 struct crypto_comp *tfm;
1617 int err;
1618
1619 tfm = crypto_alloc_comp(driver, type, mask);
1620 if (IS_ERR(tfm)) {
1621 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1622 "%ld\n", driver, PTR_ERR(tfm));
1623 return PTR_ERR(tfm);
1624 }
1625
1626 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1627 desc->suite.comp.decomp.vecs,
1628 desc->suite.comp.comp.count,
1629 desc->suite.comp.decomp.count);
1630
1631 crypto_free_comp(tfm);
1632 return err;
1633 }
1634
1635 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1636 u32 type, u32 mask)
1637 {
1638 struct crypto_pcomp *tfm;
1639 int err;
1640
1641 tfm = crypto_alloc_pcomp(driver, type, mask);
1642 if (IS_ERR(tfm)) {
1643 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1644 driver, PTR_ERR(tfm));
1645 return PTR_ERR(tfm);
1646 }
1647
1648 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1649 desc->suite.pcomp.decomp.vecs,
1650 desc->suite.pcomp.comp.count,
1651 desc->suite.pcomp.decomp.count);
1652
1653 crypto_free_pcomp(tfm);
1654 return err;
1655 }
1656
1657 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1658 u32 type, u32 mask)
1659 {
1660 struct crypto_ahash *tfm;
1661 int err;
1662
1663 tfm = crypto_alloc_ahash(driver, type, mask);
1664 if (IS_ERR(tfm)) {
1665 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1666 "%ld\n", driver, PTR_ERR(tfm));
1667 return PTR_ERR(tfm);
1668 }
1669
1670 err = test_hash(tfm, desc->suite.hash.vecs,
1671 desc->suite.hash.count, true);
1672 if (!err)
1673 err = test_hash(tfm, desc->suite.hash.vecs,
1674 desc->suite.hash.count, false);
1675
1676 crypto_free_ahash(tfm);
1677 return err;
1678 }
1679
1680 static int alg_test_crc32c(const struct alg_test_desc *desc,
1681 const char *driver, u32 type, u32 mask)
1682 {
1683 struct crypto_shash *tfm;
1684 u32 val;
1685 int err;
1686
1687 err = alg_test_hash(desc, driver, type, mask);
1688 if (err)
1689 goto out;
1690
1691 tfm = crypto_alloc_shash(driver, type, mask);
1692 if (IS_ERR(tfm)) {
1693 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1694 "%ld\n", driver, PTR_ERR(tfm));
1695 err = PTR_ERR(tfm);
1696 goto out;
1697 }
1698
1699 do {
1700 struct {
1701 struct shash_desc shash;
1702 char ctx[crypto_shash_descsize(tfm)];
1703 } sdesc;
1704
1705 sdesc.shash.tfm = tfm;
1706 sdesc.shash.flags = 0;
1707
1708 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1709 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1710 if (err) {
1711 printk(KERN_ERR "alg: crc32c: Operation failed for "
1712 "%s: %d\n", driver, err);
1713 break;
1714 }
1715
1716 if (val != ~420553207) {
1717 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1718 "%d\n", driver, val);
1719 err = -EINVAL;
1720 }
1721 } while (0);
1722
1723 crypto_free_shash(tfm);
1724
1725 out:
1726 return err;
1727 }
1728
1729 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1730 u32 type, u32 mask)
1731 {
1732 struct crypto_rng *rng;
1733 int err;
1734
1735 rng = crypto_alloc_rng(driver, type, mask);
1736 if (IS_ERR(rng)) {
1737 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1738 "%ld\n", driver, PTR_ERR(rng));
1739 return PTR_ERR(rng);
1740 }
1741
1742 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1743
1744 crypto_free_rng(rng);
1745
1746 return err;
1747 }
1748
1749
1750 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1751 const char *driver, u32 type, u32 mask)
1752 {
1753 int ret = -EAGAIN;
1754 struct crypto_rng *drng;
1755 struct drbg_test_data test_data;
1756 struct drbg_string addtl, pers, testentropy;
1757 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1758
1759 if (!buf)
1760 return -ENOMEM;
1761
1762 drng = crypto_alloc_rng(driver, type, mask);
1763 if (IS_ERR(drng)) {
1764 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1765 "%s\n", driver);
1766 kzfree(buf);
1767 return -ENOMEM;
1768 }
1769
1770 test_data.testentropy = &testentropy;
1771 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1772 drbg_string_fill(&pers, test->pers, test->perslen);
1773 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1774 if (ret) {
1775 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1776 goto outbuf;
1777 }
1778
1779 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1780 if (pr) {
1781 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1782 ret = crypto_drbg_get_bytes_addtl_test(drng,
1783 buf, test->expectedlen, &addtl, &test_data);
1784 } else {
1785 ret = crypto_drbg_get_bytes_addtl(drng,
1786 buf, test->expectedlen, &addtl);
1787 }
1788 if (ret <= 0) {
1789 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1790 "driver %s\n", driver);
1791 goto outbuf;
1792 }
1793
1794 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1795 if (pr) {
1796 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1797 ret = crypto_drbg_get_bytes_addtl_test(drng,
1798 buf, test->expectedlen, &addtl, &test_data);
1799 } else {
1800 ret = crypto_drbg_get_bytes_addtl(drng,
1801 buf, test->expectedlen, &addtl);
1802 }
1803 if (ret <= 0) {
1804 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1805 "driver %s\n", driver);
1806 goto outbuf;
1807 }
1808
1809 ret = memcmp(test->expected, buf, test->expectedlen);
1810
1811 outbuf:
1812 crypto_free_rng(drng);
1813 kzfree(buf);
1814 return ret;
1815 }
1816
1817
1818 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1819 u32 type, u32 mask)
1820 {
1821 int err = 0;
1822 int pr = 0;
1823 int i = 0;
1824 struct drbg_testvec *template = desc->suite.drbg.vecs;
1825 unsigned int tcount = desc->suite.drbg.count;
1826
1827 if (0 == memcmp(driver, "drbg_pr_", 8))
1828 pr = 1;
1829
1830 for (i = 0; i < tcount; i++) {
1831 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1832 if (err) {
1833 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1834 i, driver);
1835 err = -EINVAL;
1836 break;
1837 }
1838 }
1839 return err;
1840
1841 }
1842
1843 static int alg_test_null(const struct alg_test_desc *desc,
1844 const char *driver, u32 type, u32 mask)
1845 {
1846 return 0;
1847 }
1848
1849 /* Please keep this list sorted by algorithm name. */
1850 static const struct alg_test_desc alg_test_descs[] = {
1851 {
1852 .alg = "__cbc-cast5-avx",
1853 .test = alg_test_null,
1854 }, {
1855 .alg = "__cbc-cast6-avx",
1856 .test = alg_test_null,
1857 }, {
1858 .alg = "__cbc-serpent-avx",
1859 .test = alg_test_null,
1860 }, {
1861 .alg = "__cbc-serpent-avx2",
1862 .test = alg_test_null,
1863 }, {
1864 .alg = "__cbc-serpent-sse2",
1865 .test = alg_test_null,
1866 }, {
1867 .alg = "__cbc-twofish-avx",
1868 .test = alg_test_null,
1869 }, {
1870 .alg = "__driver-cbc-aes-aesni",
1871 .test = alg_test_null,
1872 .fips_allowed = 1,
1873 }, {
1874 .alg = "__driver-cbc-camellia-aesni",
1875 .test = alg_test_null,
1876 }, {
1877 .alg = "__driver-cbc-camellia-aesni-avx2",
1878 .test = alg_test_null,
1879 }, {
1880 .alg = "__driver-cbc-cast5-avx",
1881 .test = alg_test_null,
1882 }, {
1883 .alg = "__driver-cbc-cast6-avx",
1884 .test = alg_test_null,
1885 }, {
1886 .alg = "__driver-cbc-serpent-avx",
1887 .test = alg_test_null,
1888 }, {
1889 .alg = "__driver-cbc-serpent-avx2",
1890 .test = alg_test_null,
1891 }, {
1892 .alg = "__driver-cbc-serpent-sse2",
1893 .test = alg_test_null,
1894 }, {
1895 .alg = "__driver-cbc-twofish-avx",
1896 .test = alg_test_null,
1897 }, {
1898 .alg = "__driver-ecb-aes-aesni",
1899 .test = alg_test_null,
1900 .fips_allowed = 1,
1901 }, {
1902 .alg = "__driver-ecb-camellia-aesni",
1903 .test = alg_test_null,
1904 }, {
1905 .alg = "__driver-ecb-camellia-aesni-avx2",
1906 .test = alg_test_null,
1907 }, {
1908 .alg = "__driver-ecb-cast5-avx",
1909 .test = alg_test_null,
1910 }, {
1911 .alg = "__driver-ecb-cast6-avx",
1912 .test = alg_test_null,
1913 }, {
1914 .alg = "__driver-ecb-serpent-avx",
1915 .test = alg_test_null,
1916 }, {
1917 .alg = "__driver-ecb-serpent-avx2",
1918 .test = alg_test_null,
1919 }, {
1920 .alg = "__driver-ecb-serpent-sse2",
1921 .test = alg_test_null,
1922 }, {
1923 .alg = "__driver-ecb-twofish-avx",
1924 .test = alg_test_null,
1925 }, {
1926 .alg = "__ghash-pclmulqdqni",
1927 .test = alg_test_null,
1928 .fips_allowed = 1,
1929 }, {
1930 .alg = "ansi_cprng",
1931 .test = alg_test_cprng,
1932 .fips_allowed = 1,
1933 .suite = {
1934 .cprng = {
1935 .vecs = ansi_cprng_aes_tv_template,
1936 .count = ANSI_CPRNG_AES_TEST_VECTORS
1937 }
1938 }
1939 }, {
1940 .alg = "authenc(hmac(md5),ecb(cipher_null))",
1941 .test = alg_test_aead,
1942 .fips_allowed = 1,
1943 .suite = {
1944 .aead = {
1945 .enc = {
1946 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1947 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1948 },
1949 .dec = {
1950 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1951 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1952 }
1953 }
1954 }
1955 }, {
1956 .alg = "authenc(hmac(sha1),cbc(aes))",
1957 .test = alg_test_aead,
1958 .fips_allowed = 1,
1959 .suite = {
1960 .aead = {
1961 .enc = {
1962 .vecs =
1963 hmac_sha1_aes_cbc_enc_tv_temp,
1964 .count =
1965 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
1966 }
1967 }
1968 }
1969 }, {
1970 .alg = "authenc(hmac(sha1),cbc(des))",
1971 .test = alg_test_aead,
1972 .fips_allowed = 1,
1973 .suite = {
1974 .aead = {
1975 .enc = {
1976 .vecs =
1977 hmac_sha1_des_cbc_enc_tv_temp,
1978 .count =
1979 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
1980 }
1981 }
1982 }
1983 }, {
1984 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
1985 .test = alg_test_aead,
1986 .fips_allowed = 1,
1987 .suite = {
1988 .aead = {
1989 .enc = {
1990 .vecs =
1991 hmac_sha1_des3_ede_cbc_enc_tv_temp,
1992 .count =
1993 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
1994 }
1995 }
1996 }
1997 }, {
1998 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
1999 .test = alg_test_aead,
2000 .fips_allowed = 1,
2001 .suite = {
2002 .aead = {
2003 .enc = {
2004 .vecs =
2005 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2006 .count =
2007 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2008 },
2009 .dec = {
2010 .vecs =
2011 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2012 .count =
2013 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2014 }
2015 }
2016 }
2017 }, {
2018 .alg = "authenc(hmac(sha224),cbc(des))",
2019 .test = alg_test_aead,
2020 .fips_allowed = 1,
2021 .suite = {
2022 .aead = {
2023 .enc = {
2024 .vecs =
2025 hmac_sha224_des_cbc_enc_tv_temp,
2026 .count =
2027 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2028 }
2029 }
2030 }
2031 }, {
2032 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2033 .test = alg_test_aead,
2034 .fips_allowed = 1,
2035 .suite = {
2036 .aead = {
2037 .enc = {
2038 .vecs =
2039 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2040 .count =
2041 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2042 }
2043 }
2044 }
2045 }, {
2046 .alg = "authenc(hmac(sha256),cbc(aes))",
2047 .test = alg_test_aead,
2048 .fips_allowed = 1,
2049 .suite = {
2050 .aead = {
2051 .enc = {
2052 .vecs =
2053 hmac_sha256_aes_cbc_enc_tv_temp,
2054 .count =
2055 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2056 }
2057 }
2058 }
2059 }, {
2060 .alg = "authenc(hmac(sha256),cbc(des))",
2061 .test = alg_test_aead,
2062 .fips_allowed = 1,
2063 .suite = {
2064 .aead = {
2065 .enc = {
2066 .vecs =
2067 hmac_sha256_des_cbc_enc_tv_temp,
2068 .count =
2069 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2070 }
2071 }
2072 }
2073 }, {
2074 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2075 .test = alg_test_aead,
2076 .fips_allowed = 1,
2077 .suite = {
2078 .aead = {
2079 .enc = {
2080 .vecs =
2081 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2082 .count =
2083 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2084 }
2085 }
2086 }
2087 }, {
2088 .alg = "authenc(hmac(sha384),cbc(des))",
2089 .test = alg_test_aead,
2090 .fips_allowed = 1,
2091 .suite = {
2092 .aead = {
2093 .enc = {
2094 .vecs =
2095 hmac_sha384_des_cbc_enc_tv_temp,
2096 .count =
2097 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2098 }
2099 }
2100 }
2101 }, {
2102 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2103 .test = alg_test_aead,
2104 .fips_allowed = 1,
2105 .suite = {
2106 .aead = {
2107 .enc = {
2108 .vecs =
2109 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2110 .count =
2111 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2112 }
2113 }
2114 }
2115 }, {
2116 .alg = "authenc(hmac(sha512),cbc(aes))",
2117 .test = alg_test_aead,
2118 .fips_allowed = 1,
2119 .suite = {
2120 .aead = {
2121 .enc = {
2122 .vecs =
2123 hmac_sha512_aes_cbc_enc_tv_temp,
2124 .count =
2125 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2126 }
2127 }
2128 }
2129 }, {
2130 .alg = "authenc(hmac(sha512),cbc(des))",
2131 .test = alg_test_aead,
2132 .fips_allowed = 1,
2133 .suite = {
2134 .aead = {
2135 .enc = {
2136 .vecs =
2137 hmac_sha512_des_cbc_enc_tv_temp,
2138 .count =
2139 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2140 }
2141 }
2142 }
2143 }, {
2144 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2145 .test = alg_test_aead,
2146 .fips_allowed = 1,
2147 .suite = {
2148 .aead = {
2149 .enc = {
2150 .vecs =
2151 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2152 .count =
2153 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2154 }
2155 }
2156 }
2157 }, {
2158 .alg = "cbc(aes)",
2159 .test = alg_test_skcipher,
2160 .fips_allowed = 1,
2161 .suite = {
2162 .cipher = {
2163 .enc = {
2164 .vecs = aes_cbc_enc_tv_template,
2165 .count = AES_CBC_ENC_TEST_VECTORS
2166 },
2167 .dec = {
2168 .vecs = aes_cbc_dec_tv_template,
2169 .count = AES_CBC_DEC_TEST_VECTORS
2170 }
2171 }
2172 }
2173 }, {
2174 .alg = "cbc(anubis)",
2175 .test = alg_test_skcipher,
2176 .suite = {
2177 .cipher = {
2178 .enc = {
2179 .vecs = anubis_cbc_enc_tv_template,
2180 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2181 },
2182 .dec = {
2183 .vecs = anubis_cbc_dec_tv_template,
2184 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2185 }
2186 }
2187 }
2188 }, {
2189 .alg = "cbc(blowfish)",
2190 .test = alg_test_skcipher,
2191 .suite = {
2192 .cipher = {
2193 .enc = {
2194 .vecs = bf_cbc_enc_tv_template,
2195 .count = BF_CBC_ENC_TEST_VECTORS
2196 },
2197 .dec = {
2198 .vecs = bf_cbc_dec_tv_template,
2199 .count = BF_CBC_DEC_TEST_VECTORS
2200 }
2201 }
2202 }
2203 }, {
2204 .alg = "cbc(camellia)",
2205 .test = alg_test_skcipher,
2206 .suite = {
2207 .cipher = {
2208 .enc = {
2209 .vecs = camellia_cbc_enc_tv_template,
2210 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2211 },
2212 .dec = {
2213 .vecs = camellia_cbc_dec_tv_template,
2214 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2215 }
2216 }
2217 }
2218 }, {
2219 .alg = "cbc(cast5)",
2220 .test = alg_test_skcipher,
2221 .suite = {
2222 .cipher = {
2223 .enc = {
2224 .vecs = cast5_cbc_enc_tv_template,
2225 .count = CAST5_CBC_ENC_TEST_VECTORS
2226 },
2227 .dec = {
2228 .vecs = cast5_cbc_dec_tv_template,
2229 .count = CAST5_CBC_DEC_TEST_VECTORS
2230 }
2231 }
2232 }
2233 }, {
2234 .alg = "cbc(cast6)",
2235 .test = alg_test_skcipher,
2236 .suite = {
2237 .cipher = {
2238 .enc = {
2239 .vecs = cast6_cbc_enc_tv_template,
2240 .count = CAST6_CBC_ENC_TEST_VECTORS
2241 },
2242 .dec = {
2243 .vecs = cast6_cbc_dec_tv_template,
2244 .count = CAST6_CBC_DEC_TEST_VECTORS
2245 }
2246 }
2247 }
2248 }, {
2249 .alg = "cbc(des)",
2250 .test = alg_test_skcipher,
2251 .suite = {
2252 .cipher = {
2253 .enc = {
2254 .vecs = des_cbc_enc_tv_template,
2255 .count = DES_CBC_ENC_TEST_VECTORS
2256 },
2257 .dec = {
2258 .vecs = des_cbc_dec_tv_template,
2259 .count = DES_CBC_DEC_TEST_VECTORS
2260 }
2261 }
2262 }
2263 }, {
2264 .alg = "cbc(des3_ede)",
2265 .test = alg_test_skcipher,
2266 .fips_allowed = 1,
2267 .suite = {
2268 .cipher = {
2269 .enc = {
2270 .vecs = des3_ede_cbc_enc_tv_template,
2271 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2272 },
2273 .dec = {
2274 .vecs = des3_ede_cbc_dec_tv_template,
2275 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2276 }
2277 }
2278 }
2279 }, {
2280 .alg = "cbc(serpent)",
2281 .test = alg_test_skcipher,
2282 .suite = {
2283 .cipher = {
2284 .enc = {
2285 .vecs = serpent_cbc_enc_tv_template,
2286 .count = SERPENT_CBC_ENC_TEST_VECTORS
2287 },
2288 .dec = {
2289 .vecs = serpent_cbc_dec_tv_template,
2290 .count = SERPENT_CBC_DEC_TEST_VECTORS
2291 }
2292 }
2293 }
2294 }, {
2295 .alg = "cbc(twofish)",
2296 .test = alg_test_skcipher,
2297 .suite = {
2298 .cipher = {
2299 .enc = {
2300 .vecs = tf_cbc_enc_tv_template,
2301 .count = TF_CBC_ENC_TEST_VECTORS
2302 },
2303 .dec = {
2304 .vecs = tf_cbc_dec_tv_template,
2305 .count = TF_CBC_DEC_TEST_VECTORS
2306 }
2307 }
2308 }
2309 }, {
2310 .alg = "ccm(aes)",
2311 .test = alg_test_aead,
2312 .fips_allowed = 1,
2313 .suite = {
2314 .aead = {
2315 .enc = {
2316 .vecs = aes_ccm_enc_tv_template,
2317 .count = AES_CCM_ENC_TEST_VECTORS
2318 },
2319 .dec = {
2320 .vecs = aes_ccm_dec_tv_template,
2321 .count = AES_CCM_DEC_TEST_VECTORS
2322 }
2323 }
2324 }
2325 }, {
2326 .alg = "cmac(aes)",
2327 .test = alg_test_hash,
2328 .suite = {
2329 .hash = {
2330 .vecs = aes_cmac128_tv_template,
2331 .count = CMAC_AES_TEST_VECTORS
2332 }
2333 }
2334 }, {
2335 .alg = "cmac(des3_ede)",
2336 .test = alg_test_hash,
2337 .suite = {
2338 .hash = {
2339 .vecs = des3_ede_cmac64_tv_template,
2340 .count = CMAC_DES3_EDE_TEST_VECTORS
2341 }
2342 }
2343 }, {
2344 .alg = "compress_null",
2345 .test = alg_test_null,
2346 }, {
2347 .alg = "crc32c",
2348 .test = alg_test_crc32c,
2349 .fips_allowed = 1,
2350 .suite = {
2351 .hash = {
2352 .vecs = crc32c_tv_template,
2353 .count = CRC32C_TEST_VECTORS
2354 }
2355 }
2356 }, {
2357 .alg = "crct10dif",
2358 .test = alg_test_hash,
2359 .fips_allowed = 1,
2360 .suite = {
2361 .hash = {
2362 .vecs = crct10dif_tv_template,
2363 .count = CRCT10DIF_TEST_VECTORS
2364 }
2365 }
2366 }, {
2367 .alg = "cryptd(__driver-cbc-aes-aesni)",
2368 .test = alg_test_null,
2369 .fips_allowed = 1,
2370 }, {
2371 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2372 .test = alg_test_null,
2373 }, {
2374 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2375 .test = alg_test_null,
2376 }, {
2377 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2378 .test = alg_test_null,
2379 }, {
2380 .alg = "cryptd(__driver-ecb-aes-aesni)",
2381 .test = alg_test_null,
2382 .fips_allowed = 1,
2383 }, {
2384 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2385 .test = alg_test_null,
2386 }, {
2387 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2388 .test = alg_test_null,
2389 }, {
2390 .alg = "cryptd(__driver-ecb-cast5-avx)",
2391 .test = alg_test_null,
2392 }, {
2393 .alg = "cryptd(__driver-ecb-cast6-avx)",
2394 .test = alg_test_null,
2395 }, {
2396 .alg = "cryptd(__driver-ecb-serpent-avx)",
2397 .test = alg_test_null,
2398 }, {
2399 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2400 .test = alg_test_null,
2401 }, {
2402 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2403 .test = alg_test_null,
2404 }, {
2405 .alg = "cryptd(__driver-ecb-twofish-avx)",
2406 .test = alg_test_null,
2407 }, {
2408 .alg = "cryptd(__driver-gcm-aes-aesni)",
2409 .test = alg_test_null,
2410 .fips_allowed = 1,
2411 }, {
2412 .alg = "cryptd(__ghash-pclmulqdqni)",
2413 .test = alg_test_null,
2414 .fips_allowed = 1,
2415 }, {
2416 .alg = "ctr(aes)",
2417 .test = alg_test_skcipher,
2418 .fips_allowed = 1,
2419 .suite = {
2420 .cipher = {
2421 .enc = {
2422 .vecs = aes_ctr_enc_tv_template,
2423 .count = AES_CTR_ENC_TEST_VECTORS
2424 },
2425 .dec = {
2426 .vecs = aes_ctr_dec_tv_template,
2427 .count = AES_CTR_DEC_TEST_VECTORS
2428 }
2429 }
2430 }
2431 }, {
2432 .alg = "ctr(blowfish)",
2433 .test = alg_test_skcipher,
2434 .suite = {
2435 .cipher = {
2436 .enc = {
2437 .vecs = bf_ctr_enc_tv_template,
2438 .count = BF_CTR_ENC_TEST_VECTORS
2439 },
2440 .dec = {
2441 .vecs = bf_ctr_dec_tv_template,
2442 .count = BF_CTR_DEC_TEST_VECTORS
2443 }
2444 }
2445 }
2446 }, {
2447 .alg = "ctr(camellia)",
2448 .test = alg_test_skcipher,
2449 .suite = {
2450 .cipher = {
2451 .enc = {
2452 .vecs = camellia_ctr_enc_tv_template,
2453 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2454 },
2455 .dec = {
2456 .vecs = camellia_ctr_dec_tv_template,
2457 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2458 }
2459 }
2460 }
2461 }, {
2462 .alg = "ctr(cast5)",
2463 .test = alg_test_skcipher,
2464 .suite = {
2465 .cipher = {
2466 .enc = {
2467 .vecs = cast5_ctr_enc_tv_template,
2468 .count = CAST5_CTR_ENC_TEST_VECTORS
2469 },
2470 .dec = {
2471 .vecs = cast5_ctr_dec_tv_template,
2472 .count = CAST5_CTR_DEC_TEST_VECTORS
2473 }
2474 }
2475 }
2476 }, {
2477 .alg = "ctr(cast6)",
2478 .test = alg_test_skcipher,
2479 .suite = {
2480 .cipher = {
2481 .enc = {
2482 .vecs = cast6_ctr_enc_tv_template,
2483 .count = CAST6_CTR_ENC_TEST_VECTORS
2484 },
2485 .dec = {
2486 .vecs = cast6_ctr_dec_tv_template,
2487 .count = CAST6_CTR_DEC_TEST_VECTORS
2488 }
2489 }
2490 }
2491 }, {
2492 .alg = "ctr(des)",
2493 .test = alg_test_skcipher,
2494 .suite = {
2495 .cipher = {
2496 .enc = {
2497 .vecs = des_ctr_enc_tv_template,
2498 .count = DES_CTR_ENC_TEST_VECTORS
2499 },
2500 .dec = {
2501 .vecs = des_ctr_dec_tv_template,
2502 .count = DES_CTR_DEC_TEST_VECTORS
2503 }
2504 }
2505 }
2506 }, {
2507 .alg = "ctr(des3_ede)",
2508 .test = alg_test_skcipher,
2509 .suite = {
2510 .cipher = {
2511 .enc = {
2512 .vecs = des3_ede_ctr_enc_tv_template,
2513 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2514 },
2515 .dec = {
2516 .vecs = des3_ede_ctr_dec_tv_template,
2517 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2518 }
2519 }
2520 }
2521 }, {
2522 .alg = "ctr(serpent)",
2523 .test = alg_test_skcipher,
2524 .suite = {
2525 .cipher = {
2526 .enc = {
2527 .vecs = serpent_ctr_enc_tv_template,
2528 .count = SERPENT_CTR_ENC_TEST_VECTORS
2529 },
2530 .dec = {
2531 .vecs = serpent_ctr_dec_tv_template,
2532 .count = SERPENT_CTR_DEC_TEST_VECTORS
2533 }
2534 }
2535 }
2536 }, {
2537 .alg = "ctr(twofish)",
2538 .test = alg_test_skcipher,
2539 .suite = {
2540 .cipher = {
2541 .enc = {
2542 .vecs = tf_ctr_enc_tv_template,
2543 .count = TF_CTR_ENC_TEST_VECTORS
2544 },
2545 .dec = {
2546 .vecs = tf_ctr_dec_tv_template,
2547 .count = TF_CTR_DEC_TEST_VECTORS
2548 }
2549 }
2550 }
2551 }, {
2552 .alg = "cts(cbc(aes))",
2553 .test = alg_test_skcipher,
2554 .suite = {
2555 .cipher = {
2556 .enc = {
2557 .vecs = cts_mode_enc_tv_template,
2558 .count = CTS_MODE_ENC_TEST_VECTORS
2559 },
2560 .dec = {
2561 .vecs = cts_mode_dec_tv_template,
2562 .count = CTS_MODE_DEC_TEST_VECTORS
2563 }
2564 }
2565 }
2566 }, {
2567 .alg = "deflate",
2568 .test = alg_test_comp,
2569 .fips_allowed = 1,
2570 .suite = {
2571 .comp = {
2572 .comp = {
2573 .vecs = deflate_comp_tv_template,
2574 .count = DEFLATE_COMP_TEST_VECTORS
2575 },
2576 .decomp = {
2577 .vecs = deflate_decomp_tv_template,
2578 .count = DEFLATE_DECOMP_TEST_VECTORS
2579 }
2580 }
2581 }
2582 }, {
2583 .alg = "digest_null",
2584 .test = alg_test_null,
2585 }, {
2586 .alg = "drbg_nopr_ctr_aes128",
2587 .test = alg_test_drbg,
2588 .fips_allowed = 1,
2589 .suite = {
2590 .drbg = {
2591 .vecs = drbg_nopr_ctr_aes128_tv_template,
2592 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2593 }
2594 }
2595 }, {
2596 .alg = "drbg_nopr_ctr_aes192",
2597 .test = alg_test_drbg,
2598 .fips_allowed = 1,
2599 .suite = {
2600 .drbg = {
2601 .vecs = drbg_nopr_ctr_aes192_tv_template,
2602 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2603 }
2604 }
2605 }, {
2606 .alg = "drbg_nopr_ctr_aes256",
2607 .test = alg_test_drbg,
2608 .fips_allowed = 1,
2609 .suite = {
2610 .drbg = {
2611 .vecs = drbg_nopr_ctr_aes256_tv_template,
2612 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2613 }
2614 }
2615 }, {
2616 /*
2617 * There is no need to specifically test the DRBG with every
2618 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2619 */
2620 .alg = "drbg_nopr_hmac_sha1",
2621 .fips_allowed = 1,
2622 .test = alg_test_null,
2623 }, {
2624 .alg = "drbg_nopr_hmac_sha256",
2625 .test = alg_test_drbg,
2626 .fips_allowed = 1,
2627 .suite = {
2628 .drbg = {
2629 .vecs = drbg_nopr_hmac_sha256_tv_template,
2630 .count =
2631 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2632 }
2633 }
2634 }, {
2635 /* covered by drbg_nopr_hmac_sha256 test */
2636 .alg = "drbg_nopr_hmac_sha384",
2637 .fips_allowed = 1,
2638 .test = alg_test_null,
2639 }, {
2640 .alg = "drbg_nopr_hmac_sha512",
2641 .test = alg_test_null,
2642 .fips_allowed = 1,
2643 }, {
2644 .alg = "drbg_nopr_sha1",
2645 .fips_allowed = 1,
2646 .test = alg_test_null,
2647 }, {
2648 .alg = "drbg_nopr_sha256",
2649 .test = alg_test_drbg,
2650 .fips_allowed = 1,
2651 .suite = {
2652 .drbg = {
2653 .vecs = drbg_nopr_sha256_tv_template,
2654 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2655 }
2656 }
2657 }, {
2658 /* covered by drbg_nopr_sha256 test */
2659 .alg = "drbg_nopr_sha384",
2660 .fips_allowed = 1,
2661 .test = alg_test_null,
2662 }, {
2663 .alg = "drbg_nopr_sha512",
2664 .fips_allowed = 1,
2665 .test = alg_test_null,
2666 }, {
2667 .alg = "drbg_pr_ctr_aes128",
2668 .test = alg_test_drbg,
2669 .fips_allowed = 1,
2670 .suite = {
2671 .drbg = {
2672 .vecs = drbg_pr_ctr_aes128_tv_template,
2673 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2674 }
2675 }
2676 }, {
2677 /* covered by drbg_pr_ctr_aes128 test */
2678 .alg = "drbg_pr_ctr_aes192",
2679 .fips_allowed = 1,
2680 .test = alg_test_null,
2681 }, {
2682 .alg = "drbg_pr_ctr_aes256",
2683 .fips_allowed = 1,
2684 .test = alg_test_null,
2685 }, {
2686 .alg = "drbg_pr_hmac_sha1",
2687 .fips_allowed = 1,
2688 .test = alg_test_null,
2689 }, {
2690 .alg = "drbg_pr_hmac_sha256",
2691 .test = alg_test_drbg,
2692 .fips_allowed = 1,
2693 .suite = {
2694 .drbg = {
2695 .vecs = drbg_pr_hmac_sha256_tv_template,
2696 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2697 }
2698 }
2699 }, {
2700 /* covered by drbg_pr_hmac_sha256 test */
2701 .alg = "drbg_pr_hmac_sha384",
2702 .fips_allowed = 1,
2703 .test = alg_test_null,
2704 }, {
2705 .alg = "drbg_pr_hmac_sha512",
2706 .test = alg_test_null,
2707 .fips_allowed = 1,
2708 }, {
2709 .alg = "drbg_pr_sha1",
2710 .fips_allowed = 1,
2711 .test = alg_test_null,
2712 }, {
2713 .alg = "drbg_pr_sha256",
2714 .test = alg_test_drbg,
2715 .fips_allowed = 1,
2716 .suite = {
2717 .drbg = {
2718 .vecs = drbg_pr_sha256_tv_template,
2719 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2720 }
2721 }
2722 }, {
2723 /* covered by drbg_pr_sha256 test */
2724 .alg = "drbg_pr_sha384",
2725 .fips_allowed = 1,
2726 .test = alg_test_null,
2727 }, {
2728 .alg = "drbg_pr_sha512",
2729 .fips_allowed = 1,
2730 .test = alg_test_null,
2731 }, {
2732 .alg = "ecb(__aes-aesni)",
2733 .test = alg_test_null,
2734 .fips_allowed = 1,
2735 }, {
2736 .alg = "ecb(aes)",
2737 .test = alg_test_skcipher,
2738 .fips_allowed = 1,
2739 .suite = {
2740 .cipher = {
2741 .enc = {
2742 .vecs = aes_enc_tv_template,
2743 .count = AES_ENC_TEST_VECTORS
2744 },
2745 .dec = {
2746 .vecs = aes_dec_tv_template,
2747 .count = AES_DEC_TEST_VECTORS
2748 }
2749 }
2750 }
2751 }, {
2752 .alg = "ecb(anubis)",
2753 .test = alg_test_skcipher,
2754 .suite = {
2755 .cipher = {
2756 .enc = {
2757 .vecs = anubis_enc_tv_template,
2758 .count = ANUBIS_ENC_TEST_VECTORS
2759 },
2760 .dec = {
2761 .vecs = anubis_dec_tv_template,
2762 .count = ANUBIS_DEC_TEST_VECTORS
2763 }
2764 }
2765 }
2766 }, {
2767 .alg = "ecb(arc4)",
2768 .test = alg_test_skcipher,
2769 .suite = {
2770 .cipher = {
2771 .enc = {
2772 .vecs = arc4_enc_tv_template,
2773 .count = ARC4_ENC_TEST_VECTORS
2774 },
2775 .dec = {
2776 .vecs = arc4_dec_tv_template,
2777 .count = ARC4_DEC_TEST_VECTORS
2778 }
2779 }
2780 }
2781 }, {
2782 .alg = "ecb(blowfish)",
2783 .test = alg_test_skcipher,
2784 .suite = {
2785 .cipher = {
2786 .enc = {
2787 .vecs = bf_enc_tv_template,
2788 .count = BF_ENC_TEST_VECTORS
2789 },
2790 .dec = {
2791 .vecs = bf_dec_tv_template,
2792 .count = BF_DEC_TEST_VECTORS
2793 }
2794 }
2795 }
2796 }, {
2797 .alg = "ecb(camellia)",
2798 .test = alg_test_skcipher,
2799 .suite = {
2800 .cipher = {
2801 .enc = {
2802 .vecs = camellia_enc_tv_template,
2803 .count = CAMELLIA_ENC_TEST_VECTORS
2804 },
2805 .dec = {
2806 .vecs = camellia_dec_tv_template,
2807 .count = CAMELLIA_DEC_TEST_VECTORS
2808 }
2809 }
2810 }
2811 }, {
2812 .alg = "ecb(cast5)",
2813 .test = alg_test_skcipher,
2814 .suite = {
2815 .cipher = {
2816 .enc = {
2817 .vecs = cast5_enc_tv_template,
2818 .count = CAST5_ENC_TEST_VECTORS
2819 },
2820 .dec = {
2821 .vecs = cast5_dec_tv_template,
2822 .count = CAST5_DEC_TEST_VECTORS
2823 }
2824 }
2825 }
2826 }, {
2827 .alg = "ecb(cast6)",
2828 .test = alg_test_skcipher,
2829 .suite = {
2830 .cipher = {
2831 .enc = {
2832 .vecs = cast6_enc_tv_template,
2833 .count = CAST6_ENC_TEST_VECTORS
2834 },
2835 .dec = {
2836 .vecs = cast6_dec_tv_template,
2837 .count = CAST6_DEC_TEST_VECTORS
2838 }
2839 }
2840 }
2841 }, {
2842 .alg = "ecb(cipher_null)",
2843 .test = alg_test_null,
2844 }, {
2845 .alg = "ecb(des)",
2846 .test = alg_test_skcipher,
2847 .fips_allowed = 1,
2848 .suite = {
2849 .cipher = {
2850 .enc = {
2851 .vecs = des_enc_tv_template,
2852 .count = DES_ENC_TEST_VECTORS
2853 },
2854 .dec = {
2855 .vecs = des_dec_tv_template,
2856 .count = DES_DEC_TEST_VECTORS
2857 }
2858 }
2859 }
2860 }, {
2861 .alg = "ecb(des3_ede)",
2862 .test = alg_test_skcipher,
2863 .fips_allowed = 1,
2864 .suite = {
2865 .cipher = {
2866 .enc = {
2867 .vecs = des3_ede_enc_tv_template,
2868 .count = DES3_EDE_ENC_TEST_VECTORS
2869 },
2870 .dec = {
2871 .vecs = des3_ede_dec_tv_template,
2872 .count = DES3_EDE_DEC_TEST_VECTORS
2873 }
2874 }
2875 }
2876 }, {
2877 .alg = "ecb(fcrypt)",
2878 .test = alg_test_skcipher,
2879 .suite = {
2880 .cipher = {
2881 .enc = {
2882 .vecs = fcrypt_pcbc_enc_tv_template,
2883 .count = 1
2884 },
2885 .dec = {
2886 .vecs = fcrypt_pcbc_dec_tv_template,
2887 .count = 1
2888 }
2889 }
2890 }
2891 }, {
2892 .alg = "ecb(khazad)",
2893 .test = alg_test_skcipher,
2894 .suite = {
2895 .cipher = {
2896 .enc = {
2897 .vecs = khazad_enc_tv_template,
2898 .count = KHAZAD_ENC_TEST_VECTORS
2899 },
2900 .dec = {
2901 .vecs = khazad_dec_tv_template,
2902 .count = KHAZAD_DEC_TEST_VECTORS
2903 }
2904 }
2905 }
2906 }, {
2907 .alg = "ecb(seed)",
2908 .test = alg_test_skcipher,
2909 .suite = {
2910 .cipher = {
2911 .enc = {
2912 .vecs = seed_enc_tv_template,
2913 .count = SEED_ENC_TEST_VECTORS
2914 },
2915 .dec = {
2916 .vecs = seed_dec_tv_template,
2917 .count = SEED_DEC_TEST_VECTORS
2918 }
2919 }
2920 }
2921 }, {
2922 .alg = "ecb(serpent)",
2923 .test = alg_test_skcipher,
2924 .suite = {
2925 .cipher = {
2926 .enc = {
2927 .vecs = serpent_enc_tv_template,
2928 .count = SERPENT_ENC_TEST_VECTORS
2929 },
2930 .dec = {
2931 .vecs = serpent_dec_tv_template,
2932 .count = SERPENT_DEC_TEST_VECTORS
2933 }
2934 }
2935 }
2936 }, {
2937 .alg = "ecb(tea)",
2938 .test = alg_test_skcipher,
2939 .suite = {
2940 .cipher = {
2941 .enc = {
2942 .vecs = tea_enc_tv_template,
2943 .count = TEA_ENC_TEST_VECTORS
2944 },
2945 .dec = {
2946 .vecs = tea_dec_tv_template,
2947 .count = TEA_DEC_TEST_VECTORS
2948 }
2949 }
2950 }
2951 }, {
2952 .alg = "ecb(tnepres)",
2953 .test = alg_test_skcipher,
2954 .suite = {
2955 .cipher = {
2956 .enc = {
2957 .vecs = tnepres_enc_tv_template,
2958 .count = TNEPRES_ENC_TEST_VECTORS
2959 },
2960 .dec = {
2961 .vecs = tnepres_dec_tv_template,
2962 .count = TNEPRES_DEC_TEST_VECTORS
2963 }
2964 }
2965 }
2966 }, {
2967 .alg = "ecb(twofish)",
2968 .test = alg_test_skcipher,
2969 .suite = {
2970 .cipher = {
2971 .enc = {
2972 .vecs = tf_enc_tv_template,
2973 .count = TF_ENC_TEST_VECTORS
2974 },
2975 .dec = {
2976 .vecs = tf_dec_tv_template,
2977 .count = TF_DEC_TEST_VECTORS
2978 }
2979 }
2980 }
2981 }, {
2982 .alg = "ecb(xeta)",
2983 .test = alg_test_skcipher,
2984 .suite = {
2985 .cipher = {
2986 .enc = {
2987 .vecs = xeta_enc_tv_template,
2988 .count = XETA_ENC_TEST_VECTORS
2989 },
2990 .dec = {
2991 .vecs = xeta_dec_tv_template,
2992 .count = XETA_DEC_TEST_VECTORS
2993 }
2994 }
2995 }
2996 }, {
2997 .alg = "ecb(xtea)",
2998 .test = alg_test_skcipher,
2999 .suite = {
3000 .cipher = {
3001 .enc = {
3002 .vecs = xtea_enc_tv_template,
3003 .count = XTEA_ENC_TEST_VECTORS
3004 },
3005 .dec = {
3006 .vecs = xtea_dec_tv_template,
3007 .count = XTEA_DEC_TEST_VECTORS
3008 }
3009 }
3010 }
3011 }, {
3012 .alg = "gcm(aes)",
3013 .test = alg_test_aead,
3014 .fips_allowed = 1,
3015 .suite = {
3016 .aead = {
3017 .enc = {
3018 .vecs = aes_gcm_enc_tv_template,
3019 .count = AES_GCM_ENC_TEST_VECTORS
3020 },
3021 .dec = {
3022 .vecs = aes_gcm_dec_tv_template,
3023 .count = AES_GCM_DEC_TEST_VECTORS
3024 }
3025 }
3026 }
3027 }, {
3028 .alg = "ghash",
3029 .test = alg_test_hash,
3030 .fips_allowed = 1,
3031 .suite = {
3032 .hash = {
3033 .vecs = ghash_tv_template,
3034 .count = GHASH_TEST_VECTORS
3035 }
3036 }
3037 }, {
3038 .alg = "hmac(crc32)",
3039 .test = alg_test_hash,
3040 .suite = {
3041 .hash = {
3042 .vecs = bfin_crc_tv_template,
3043 .count = BFIN_CRC_TEST_VECTORS
3044 }
3045 }
3046 }, {
3047 .alg = "hmac(md5)",
3048 .test = alg_test_hash,
3049 .suite = {
3050 .hash = {
3051 .vecs = hmac_md5_tv_template,
3052 .count = HMAC_MD5_TEST_VECTORS
3053 }
3054 }
3055 }, {
3056 .alg = "hmac(rmd128)",
3057 .test = alg_test_hash,
3058 .suite = {
3059 .hash = {
3060 .vecs = hmac_rmd128_tv_template,
3061 .count = HMAC_RMD128_TEST_VECTORS
3062 }
3063 }
3064 }, {
3065 .alg = "hmac(rmd160)",
3066 .test = alg_test_hash,
3067 .suite = {
3068 .hash = {
3069 .vecs = hmac_rmd160_tv_template,
3070 .count = HMAC_RMD160_TEST_VECTORS
3071 }
3072 }
3073 }, {
3074 .alg = "hmac(sha1)",
3075 .test = alg_test_hash,
3076 .fips_allowed = 1,
3077 .suite = {
3078 .hash = {
3079 .vecs = hmac_sha1_tv_template,
3080 .count = HMAC_SHA1_TEST_VECTORS
3081 }
3082 }
3083 }, {
3084 .alg = "hmac(sha224)",
3085 .test = alg_test_hash,
3086 .fips_allowed = 1,
3087 .suite = {
3088 .hash = {
3089 .vecs = hmac_sha224_tv_template,
3090 .count = HMAC_SHA224_TEST_VECTORS
3091 }
3092 }
3093 }, {
3094 .alg = "hmac(sha256)",
3095 .test = alg_test_hash,
3096 .fips_allowed = 1,
3097 .suite = {
3098 .hash = {
3099 .vecs = hmac_sha256_tv_template,
3100 .count = HMAC_SHA256_TEST_VECTORS
3101 }
3102 }
3103 }, {
3104 .alg = "hmac(sha384)",
3105 .test = alg_test_hash,
3106 .fips_allowed = 1,
3107 .suite = {
3108 .hash = {
3109 .vecs = hmac_sha384_tv_template,
3110 .count = HMAC_SHA384_TEST_VECTORS
3111 }
3112 }
3113 }, {
3114 .alg = "hmac(sha512)",
3115 .test = alg_test_hash,
3116 .fips_allowed = 1,
3117 .suite = {
3118 .hash = {
3119 .vecs = hmac_sha512_tv_template,
3120 .count = HMAC_SHA512_TEST_VECTORS
3121 }
3122 }
3123 }, {
3124 .alg = "lrw(aes)",
3125 .test = alg_test_skcipher,
3126 .suite = {
3127 .cipher = {
3128 .enc = {
3129 .vecs = aes_lrw_enc_tv_template,
3130 .count = AES_LRW_ENC_TEST_VECTORS
3131 },
3132 .dec = {
3133 .vecs = aes_lrw_dec_tv_template,
3134 .count = AES_LRW_DEC_TEST_VECTORS
3135 }
3136 }
3137 }
3138 }, {
3139 .alg = "lrw(camellia)",
3140 .test = alg_test_skcipher,
3141 .suite = {
3142 .cipher = {
3143 .enc = {
3144 .vecs = camellia_lrw_enc_tv_template,
3145 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3146 },
3147 .dec = {
3148 .vecs = camellia_lrw_dec_tv_template,
3149 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3150 }
3151 }
3152 }
3153 }, {
3154 .alg = "lrw(cast6)",
3155 .test = alg_test_skcipher,
3156 .suite = {
3157 .cipher = {
3158 .enc = {
3159 .vecs = cast6_lrw_enc_tv_template,
3160 .count = CAST6_LRW_ENC_TEST_VECTORS
3161 },
3162 .dec = {
3163 .vecs = cast6_lrw_dec_tv_template,
3164 .count = CAST6_LRW_DEC_TEST_VECTORS
3165 }
3166 }
3167 }
3168 }, {
3169 .alg = "lrw(serpent)",
3170 .test = alg_test_skcipher,
3171 .suite = {
3172 .cipher = {
3173 .enc = {
3174 .vecs = serpent_lrw_enc_tv_template,
3175 .count = SERPENT_LRW_ENC_TEST_VECTORS
3176 },
3177 .dec = {
3178 .vecs = serpent_lrw_dec_tv_template,
3179 .count = SERPENT_LRW_DEC_TEST_VECTORS
3180 }
3181 }
3182 }
3183 }, {
3184 .alg = "lrw(twofish)",
3185 .test = alg_test_skcipher,
3186 .suite = {
3187 .cipher = {
3188 .enc = {
3189 .vecs = tf_lrw_enc_tv_template,
3190 .count = TF_LRW_ENC_TEST_VECTORS
3191 },
3192 .dec = {
3193 .vecs = tf_lrw_dec_tv_template,
3194 .count = TF_LRW_DEC_TEST_VECTORS
3195 }
3196 }
3197 }
3198 }, {
3199 .alg = "lzo",
3200 .test = alg_test_comp,
3201 .fips_allowed = 1,
3202 .suite = {
3203 .comp = {
3204 .comp = {
3205 .vecs = lzo_comp_tv_template,
3206 .count = LZO_COMP_TEST_VECTORS
3207 },
3208 .decomp = {
3209 .vecs = lzo_decomp_tv_template,
3210 .count = LZO_DECOMP_TEST_VECTORS
3211 }
3212 }
3213 }
3214 }, {
3215 .alg = "md4",
3216 .test = alg_test_hash,
3217 .suite = {
3218 .hash = {
3219 .vecs = md4_tv_template,
3220 .count = MD4_TEST_VECTORS
3221 }
3222 }
3223 }, {
3224 .alg = "md5",
3225 .test = alg_test_hash,
3226 .suite = {
3227 .hash = {
3228 .vecs = md5_tv_template,
3229 .count = MD5_TEST_VECTORS
3230 }
3231 }
3232 }, {
3233 .alg = "michael_mic",
3234 .test = alg_test_hash,
3235 .suite = {
3236 .hash = {
3237 .vecs = michael_mic_tv_template,
3238 .count = MICHAEL_MIC_TEST_VECTORS
3239 }
3240 }
3241 }, {
3242 .alg = "ofb(aes)",
3243 .test = alg_test_skcipher,
3244 .fips_allowed = 1,
3245 .suite = {
3246 .cipher = {
3247 .enc = {
3248 .vecs = aes_ofb_enc_tv_template,
3249 .count = AES_OFB_ENC_TEST_VECTORS
3250 },
3251 .dec = {
3252 .vecs = aes_ofb_dec_tv_template,
3253 .count = AES_OFB_DEC_TEST_VECTORS
3254 }
3255 }
3256 }
3257 }, {
3258 .alg = "pcbc(fcrypt)",
3259 .test = alg_test_skcipher,
3260 .suite = {
3261 .cipher = {
3262 .enc = {
3263 .vecs = fcrypt_pcbc_enc_tv_template,
3264 .count = FCRYPT_ENC_TEST_VECTORS
3265 },
3266 .dec = {
3267 .vecs = fcrypt_pcbc_dec_tv_template,
3268 .count = FCRYPT_DEC_TEST_VECTORS
3269 }
3270 }
3271 }
3272 }, {
3273 .alg = "rfc3686(ctr(aes))",
3274 .test = alg_test_skcipher,
3275 .fips_allowed = 1,
3276 .suite = {
3277 .cipher = {
3278 .enc = {
3279 .vecs = aes_ctr_rfc3686_enc_tv_template,
3280 .count = AES_CTR_3686_ENC_TEST_VECTORS
3281 },
3282 .dec = {
3283 .vecs = aes_ctr_rfc3686_dec_tv_template,
3284 .count = AES_CTR_3686_DEC_TEST_VECTORS
3285 }
3286 }
3287 }
3288 }, {
3289 .alg = "rfc4106(gcm(aes))",
3290 .test = alg_test_aead,
3291 .suite = {
3292 .aead = {
3293 .enc = {
3294 .vecs = aes_gcm_rfc4106_enc_tv_template,
3295 .count = AES_GCM_4106_ENC_TEST_VECTORS
3296 },
3297 .dec = {
3298 .vecs = aes_gcm_rfc4106_dec_tv_template,
3299 .count = AES_GCM_4106_DEC_TEST_VECTORS
3300 }
3301 }
3302 }
3303 }, {
3304 .alg = "rfc4309(ccm(aes))",
3305 .test = alg_test_aead,
3306 .fips_allowed = 1,
3307 .suite = {
3308 .aead = {
3309 .enc = {
3310 .vecs = aes_ccm_rfc4309_enc_tv_template,
3311 .count = AES_CCM_4309_ENC_TEST_VECTORS
3312 },
3313 .dec = {
3314 .vecs = aes_ccm_rfc4309_dec_tv_template,
3315 .count = AES_CCM_4309_DEC_TEST_VECTORS
3316 }
3317 }
3318 }
3319 }, {
3320 .alg = "rfc4543(gcm(aes))",
3321 .test = alg_test_aead,
3322 .suite = {
3323 .aead = {
3324 .enc = {
3325 .vecs = aes_gcm_rfc4543_enc_tv_template,
3326 .count = AES_GCM_4543_ENC_TEST_VECTORS
3327 },
3328 .dec = {
3329 .vecs = aes_gcm_rfc4543_dec_tv_template,
3330 .count = AES_GCM_4543_DEC_TEST_VECTORS
3331 },
3332 }
3333 }
3334 }, {
3335 .alg = "rmd128",
3336 .test = alg_test_hash,
3337 .suite = {
3338 .hash = {
3339 .vecs = rmd128_tv_template,
3340 .count = RMD128_TEST_VECTORS
3341 }
3342 }
3343 }, {
3344 .alg = "rmd160",
3345 .test = alg_test_hash,
3346 .suite = {
3347 .hash = {
3348 .vecs = rmd160_tv_template,
3349 .count = RMD160_TEST_VECTORS
3350 }
3351 }
3352 }, {
3353 .alg = "rmd256",
3354 .test = alg_test_hash,
3355 .suite = {
3356 .hash = {
3357 .vecs = rmd256_tv_template,
3358 .count = RMD256_TEST_VECTORS
3359 }
3360 }
3361 }, {
3362 .alg = "rmd320",
3363 .test = alg_test_hash,
3364 .suite = {
3365 .hash = {
3366 .vecs = rmd320_tv_template,
3367 .count = RMD320_TEST_VECTORS
3368 }
3369 }
3370 }, {
3371 .alg = "salsa20",
3372 .test = alg_test_skcipher,
3373 .suite = {
3374 .cipher = {
3375 .enc = {
3376 .vecs = salsa20_stream_enc_tv_template,
3377 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3378 }
3379 }
3380 }
3381 }, {
3382 .alg = "sha1",
3383 .test = alg_test_hash,
3384 .fips_allowed = 1,
3385 .suite = {
3386 .hash = {
3387 .vecs = sha1_tv_template,
3388 .count = SHA1_TEST_VECTORS
3389 }
3390 }
3391 }, {
3392 .alg = "sha224",
3393 .test = alg_test_hash,
3394 .fips_allowed = 1,
3395 .suite = {
3396 .hash = {
3397 .vecs = sha224_tv_template,
3398 .count = SHA224_TEST_VECTORS
3399 }
3400 }
3401 }, {
3402 .alg = "sha256",
3403 .test = alg_test_hash,
3404 .fips_allowed = 1,
3405 .suite = {
3406 .hash = {
3407 .vecs = sha256_tv_template,
3408 .count = SHA256_TEST_VECTORS
3409 }
3410 }
3411 }, {
3412 .alg = "sha384",
3413 .test = alg_test_hash,
3414 .fips_allowed = 1,
3415 .suite = {
3416 .hash = {
3417 .vecs = sha384_tv_template,
3418 .count = SHA384_TEST_VECTORS
3419 }
3420 }
3421 }, {
3422 .alg = "sha512",
3423 .test = alg_test_hash,
3424 .fips_allowed = 1,
3425 .suite = {
3426 .hash = {
3427 .vecs = sha512_tv_template,
3428 .count = SHA512_TEST_VECTORS
3429 }
3430 }
3431 }, {
3432 .alg = "tgr128",
3433 .test = alg_test_hash,
3434 .suite = {
3435 .hash = {
3436 .vecs = tgr128_tv_template,
3437 .count = TGR128_TEST_VECTORS
3438 }
3439 }
3440 }, {
3441 .alg = "tgr160",
3442 .test = alg_test_hash,
3443 .suite = {
3444 .hash = {
3445 .vecs = tgr160_tv_template,
3446 .count = TGR160_TEST_VECTORS
3447 }
3448 }
3449 }, {
3450 .alg = "tgr192",
3451 .test = alg_test_hash,
3452 .suite = {
3453 .hash = {
3454 .vecs = tgr192_tv_template,
3455 .count = TGR192_TEST_VECTORS
3456 }
3457 }
3458 }, {
3459 .alg = "vmac(aes)",
3460 .test = alg_test_hash,
3461 .suite = {
3462 .hash = {
3463 .vecs = aes_vmac128_tv_template,
3464 .count = VMAC_AES_TEST_VECTORS
3465 }
3466 }
3467 }, {
3468 .alg = "wp256",
3469 .test = alg_test_hash,
3470 .suite = {
3471 .hash = {
3472 .vecs = wp256_tv_template,
3473 .count = WP256_TEST_VECTORS
3474 }
3475 }
3476 }, {
3477 .alg = "wp384",
3478 .test = alg_test_hash,
3479 .suite = {
3480 .hash = {
3481 .vecs = wp384_tv_template,
3482 .count = WP384_TEST_VECTORS
3483 }
3484 }
3485 }, {
3486 .alg = "wp512",
3487 .test = alg_test_hash,
3488 .suite = {
3489 .hash = {
3490 .vecs = wp512_tv_template,
3491 .count = WP512_TEST_VECTORS
3492 }
3493 }
3494 }, {
3495 .alg = "xcbc(aes)",
3496 .test = alg_test_hash,
3497 .suite = {
3498 .hash = {
3499 .vecs = aes_xcbc128_tv_template,
3500 .count = XCBC_AES_TEST_VECTORS
3501 }
3502 }
3503 }, {
3504 .alg = "xts(aes)",
3505 .test = alg_test_skcipher,
3506 .fips_allowed = 1,
3507 .suite = {
3508 .cipher = {
3509 .enc = {
3510 .vecs = aes_xts_enc_tv_template,
3511 .count = AES_XTS_ENC_TEST_VECTORS
3512 },
3513 .dec = {
3514 .vecs = aes_xts_dec_tv_template,
3515 .count = AES_XTS_DEC_TEST_VECTORS
3516 }
3517 }
3518 }
3519 }, {
3520 .alg = "xts(camellia)",
3521 .test = alg_test_skcipher,
3522 .suite = {
3523 .cipher = {
3524 .enc = {
3525 .vecs = camellia_xts_enc_tv_template,
3526 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3527 },
3528 .dec = {
3529 .vecs = camellia_xts_dec_tv_template,
3530 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3531 }
3532 }
3533 }
3534 }, {
3535 .alg = "xts(cast6)",
3536 .test = alg_test_skcipher,
3537 .suite = {
3538 .cipher = {
3539 .enc = {
3540 .vecs = cast6_xts_enc_tv_template,
3541 .count = CAST6_XTS_ENC_TEST_VECTORS
3542 },
3543 .dec = {
3544 .vecs = cast6_xts_dec_tv_template,
3545 .count = CAST6_XTS_DEC_TEST_VECTORS
3546 }
3547 }
3548 }
3549 }, {
3550 .alg = "xts(serpent)",
3551 .test = alg_test_skcipher,
3552 .suite = {
3553 .cipher = {
3554 .enc = {
3555 .vecs = serpent_xts_enc_tv_template,
3556 .count = SERPENT_XTS_ENC_TEST_VECTORS
3557 },
3558 .dec = {
3559 .vecs = serpent_xts_dec_tv_template,
3560 .count = SERPENT_XTS_DEC_TEST_VECTORS
3561 }
3562 }
3563 }
3564 }, {
3565 .alg = "xts(twofish)",
3566 .test = alg_test_skcipher,
3567 .suite = {
3568 .cipher = {
3569 .enc = {
3570 .vecs = tf_xts_enc_tv_template,
3571 .count = TF_XTS_ENC_TEST_VECTORS
3572 },
3573 .dec = {
3574 .vecs = tf_xts_dec_tv_template,
3575 .count = TF_XTS_DEC_TEST_VECTORS
3576 }
3577 }
3578 }
3579 }, {
3580 .alg = "zlib",
3581 .test = alg_test_pcomp,
3582 .fips_allowed = 1,
3583 .suite = {
3584 .pcomp = {
3585 .comp = {
3586 .vecs = zlib_comp_tv_template,
3587 .count = ZLIB_COMP_TEST_VECTORS
3588 },
3589 .decomp = {
3590 .vecs = zlib_decomp_tv_template,
3591 .count = ZLIB_DECOMP_TEST_VECTORS
3592 }
3593 }
3594 }
3595 }
3596 };
3597
3598 static bool alg_test_descs_checked;
3599
3600 static void alg_test_descs_check_order(void)
3601 {
3602 int i;
3603
3604 /* only check once */
3605 if (alg_test_descs_checked)
3606 return;
3607
3608 alg_test_descs_checked = true;
3609
3610 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3611 int diff = strcmp(alg_test_descs[i - 1].alg,
3612 alg_test_descs[i].alg);
3613
3614 if (WARN_ON(diff > 0)) {
3615 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3616 alg_test_descs[i - 1].alg,
3617 alg_test_descs[i].alg);
3618 }
3619
3620 if (WARN_ON(diff == 0)) {
3621 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3622 alg_test_descs[i].alg);
3623 }
3624 }
3625 }
3626
3627 static int alg_find_test(const char *alg)
3628 {
3629 int start = 0;
3630 int end = ARRAY_SIZE(alg_test_descs);
3631
3632 while (start < end) {
3633 int i = (start + end) / 2;
3634 int diff = strcmp(alg_test_descs[i].alg, alg);
3635
3636 if (diff > 0) {
3637 end = i;
3638 continue;
3639 }
3640
3641 if (diff < 0) {
3642 start = i + 1;
3643 continue;
3644 }
3645
3646 return i;
3647 }
3648
3649 return -1;
3650 }
3651
3652 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3653 {
3654 int i;
3655 int j;
3656 int rc;
3657
3658 alg_test_descs_check_order();
3659
3660 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3661 char nalg[CRYPTO_MAX_ALG_NAME];
3662
3663 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3664 sizeof(nalg))
3665 return -ENAMETOOLONG;
3666
3667 i = alg_find_test(nalg);
3668 if (i < 0)
3669 goto notest;
3670
3671 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3672 goto non_fips_alg;
3673
3674 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3675 goto test_done;
3676 }
3677
3678 i = alg_find_test(alg);
3679 j = alg_find_test(driver);
3680 if (i < 0 && j < 0)
3681 goto notest;
3682
3683 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3684 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3685 goto non_fips_alg;
3686
3687 rc = 0;
3688 if (i >= 0)
3689 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3690 type, mask);
3691 if (j >= 0 && j != i)
3692 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3693 type, mask);
3694
3695 test_done:
3696 if (fips_enabled && rc)
3697 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3698
3699 if (fips_enabled && !rc)
3700 pr_info(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3701 driver, alg);
3702
3703 return rc;
3704
3705 notest:
3706 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3707 return 0;
3708 non_fips_alg:
3709 return -EINVAL;
3710 }
3711
3712 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3713
3714 EXPORT_SYMBOL_GPL(alg_test);
This page took 0.108635 seconds and 5 git commands to generate.