crypto: testmgr - Add a flag allowing the self-tests to be disabled at runtime.
[deliverable/linux.git] / crypto / testmgr.c
1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35
36 #include "internal.h"
37
38 static bool notests;
39 module_param(notests, bool, 0644);
40 MODULE_PARM_DESC(notests, "disable crypto self-tests");
41
42 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
43
44 /* a perfect nop */
45 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
46 {
47 return 0;
48 }
49
50 #else
51
52 #include "testmgr.h"
53
54 /*
55 * Need slab memory for testing (size in number of pages).
56 */
57 #define XBUFSIZE 8
58
59 /*
60 * Indexes into the xbuf to simulate cross-page access.
61 */
62 #define IDX1 32
63 #define IDX2 32400
64 #define IDX3 1
65 #define IDX4 8193
66 #define IDX5 22222
67 #define IDX6 17101
68 #define IDX7 27333
69 #define IDX8 3000
70
71 /*
72 * Used by test_cipher()
73 */
74 #define ENCRYPT 1
75 #define DECRYPT 0
76
77 struct tcrypt_result {
78 struct completion completion;
79 int err;
80 };
81
82 struct aead_test_suite {
83 struct {
84 struct aead_testvec *vecs;
85 unsigned int count;
86 } enc, dec;
87 };
88
89 struct cipher_test_suite {
90 struct {
91 struct cipher_testvec *vecs;
92 unsigned int count;
93 } enc, dec;
94 };
95
96 struct comp_test_suite {
97 struct {
98 struct comp_testvec *vecs;
99 unsigned int count;
100 } comp, decomp;
101 };
102
103 struct hash_test_suite {
104 struct hash_testvec *vecs;
105 unsigned int count;
106 };
107
108 struct cprng_test_suite {
109 struct cprng_testvec *vecs;
110 unsigned int count;
111 };
112
113 struct drbg_test_suite {
114 struct drbg_testvec *vecs;
115 unsigned int count;
116 };
117
118 struct akcipher_test_suite {
119 struct akcipher_testvec *vecs;
120 unsigned int count;
121 };
122
123 struct alg_test_desc {
124 const char *alg;
125 int (*test)(const struct alg_test_desc *desc, const char *driver,
126 u32 type, u32 mask);
127 int fips_allowed; /* set if alg is allowed in fips mode */
128
129 union {
130 struct aead_test_suite aead;
131 struct cipher_test_suite cipher;
132 struct comp_test_suite comp;
133 struct hash_test_suite hash;
134 struct cprng_test_suite cprng;
135 struct drbg_test_suite drbg;
136 struct akcipher_test_suite akcipher;
137 } suite;
138 };
139
140 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
141
142 static void hexdump(unsigned char *buf, unsigned int len)
143 {
144 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 16, 1,
146 buf, len, false);
147 }
148
149 static void tcrypt_complete(struct crypto_async_request *req, int err)
150 {
151 struct tcrypt_result *res = req->data;
152
153 if (err == -EINPROGRESS)
154 return;
155
156 res->err = err;
157 complete(&res->completion);
158 }
159
160 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
161 {
162 int i;
163
164 for (i = 0; i < XBUFSIZE; i++) {
165 buf[i] = (void *)__get_free_page(GFP_KERNEL);
166 if (!buf[i])
167 goto err_free_buf;
168 }
169
170 return 0;
171
172 err_free_buf:
173 while (i-- > 0)
174 free_page((unsigned long)buf[i]);
175
176 return -ENOMEM;
177 }
178
179 static void testmgr_free_buf(char *buf[XBUFSIZE])
180 {
181 int i;
182
183 for (i = 0; i < XBUFSIZE; i++)
184 free_page((unsigned long)buf[i]);
185 }
186
187 static int wait_async_op(struct tcrypt_result *tr, int ret)
188 {
189 if (ret == -EINPROGRESS || ret == -EBUSY) {
190 wait_for_completion(&tr->completion);
191 reinit_completion(&tr->completion);
192 ret = tr->err;
193 }
194 return ret;
195 }
196
197 static int ahash_partial_update(struct ahash_request **preq,
198 struct crypto_ahash *tfm, struct hash_testvec *template,
199 void *hash_buff, int k, int temp, struct scatterlist *sg,
200 const char *algo, char *result, struct tcrypt_result *tresult)
201 {
202 char *state;
203 struct ahash_request *req;
204 int statesize, ret = -EINVAL;
205
206 req = *preq;
207 statesize = crypto_ahash_statesize(
208 crypto_ahash_reqtfm(req));
209 state = kmalloc(statesize, GFP_KERNEL);
210 if (!state) {
211 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
212 goto out_nostate;
213 }
214 ret = crypto_ahash_export(req, state);
215 if (ret) {
216 pr_err("alt: hash: Failed to export() for %s\n", algo);
217 goto out;
218 }
219 ahash_request_free(req);
220 req = ahash_request_alloc(tfm, GFP_KERNEL);
221 if (!req) {
222 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
223 goto out_noreq;
224 }
225 ahash_request_set_callback(req,
226 CRYPTO_TFM_REQ_MAY_BACKLOG,
227 tcrypt_complete, tresult);
228
229 memcpy(hash_buff, template->plaintext + temp,
230 template->tap[k]);
231 sg_init_one(&sg[0], hash_buff, template->tap[k]);
232 ahash_request_set_crypt(req, sg, result, template->tap[k]);
233 ret = crypto_ahash_import(req, state);
234 if (ret) {
235 pr_err("alg: hash: Failed to import() for %s\n", algo);
236 goto out;
237 }
238 ret = wait_async_op(tresult, crypto_ahash_update(req));
239 if (ret)
240 goto out;
241 *preq = req;
242 ret = 0;
243 goto out_noreq;
244 out:
245 ahash_request_free(req);
246 out_noreq:
247 kfree(state);
248 out_nostate:
249 return ret;
250 }
251
252 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
253 unsigned int tcount, bool use_digest,
254 const int align_offset)
255 {
256 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
257 unsigned int i, j, k, temp;
258 struct scatterlist sg[8];
259 char *result;
260 char *key;
261 struct ahash_request *req;
262 struct tcrypt_result tresult;
263 void *hash_buff;
264 char *xbuf[XBUFSIZE];
265 int ret = -ENOMEM;
266
267 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
268 if (!result)
269 return ret;
270 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
271 if (!key)
272 goto out_nobuf;
273 if (testmgr_alloc_buf(xbuf))
274 goto out_nobuf;
275
276 init_completion(&tresult.completion);
277
278 req = ahash_request_alloc(tfm, GFP_KERNEL);
279 if (!req) {
280 printk(KERN_ERR "alg: hash: Failed to allocate request for "
281 "%s\n", algo);
282 goto out_noreq;
283 }
284 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
285 tcrypt_complete, &tresult);
286
287 j = 0;
288 for (i = 0; i < tcount; i++) {
289 if (template[i].np)
290 continue;
291
292 ret = -EINVAL;
293 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
294 goto out;
295
296 j++;
297 memset(result, 0, MAX_DIGEST_SIZE);
298
299 hash_buff = xbuf[0];
300 hash_buff += align_offset;
301
302 memcpy(hash_buff, template[i].plaintext, template[i].psize);
303 sg_init_one(&sg[0], hash_buff, template[i].psize);
304
305 if (template[i].ksize) {
306 crypto_ahash_clear_flags(tfm, ~0);
307 if (template[i].ksize > MAX_KEYLEN) {
308 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
309 j, algo, template[i].ksize, MAX_KEYLEN);
310 ret = -EINVAL;
311 goto out;
312 }
313 memcpy(key, template[i].key, template[i].ksize);
314 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
315 if (ret) {
316 printk(KERN_ERR "alg: hash: setkey failed on "
317 "test %d for %s: ret=%d\n", j, algo,
318 -ret);
319 goto out;
320 }
321 }
322
323 ahash_request_set_crypt(req, sg, result, template[i].psize);
324 if (use_digest) {
325 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
326 if (ret) {
327 pr_err("alg: hash: digest failed on test %d "
328 "for %s: ret=%d\n", j, algo, -ret);
329 goto out;
330 }
331 } else {
332 ret = wait_async_op(&tresult, crypto_ahash_init(req));
333 if (ret) {
334 pr_err("alt: hash: init failed on test %d "
335 "for %s: ret=%d\n", j, algo, -ret);
336 goto out;
337 }
338 ret = wait_async_op(&tresult, crypto_ahash_update(req));
339 if (ret) {
340 pr_err("alt: hash: update failed on test %d "
341 "for %s: ret=%d\n", j, algo, -ret);
342 goto out;
343 }
344 ret = wait_async_op(&tresult, crypto_ahash_final(req));
345 if (ret) {
346 pr_err("alt: hash: final failed on test %d "
347 "for %s: ret=%d\n", j, algo, -ret);
348 goto out;
349 }
350 }
351
352 if (memcmp(result, template[i].digest,
353 crypto_ahash_digestsize(tfm))) {
354 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
355 j, algo);
356 hexdump(result, crypto_ahash_digestsize(tfm));
357 ret = -EINVAL;
358 goto out;
359 }
360 }
361
362 j = 0;
363 for (i = 0; i < tcount; i++) {
364 /* alignment tests are only done with continuous buffers */
365 if (align_offset != 0)
366 break;
367
368 if (!template[i].np)
369 continue;
370
371 j++;
372 memset(result, 0, MAX_DIGEST_SIZE);
373
374 temp = 0;
375 sg_init_table(sg, template[i].np);
376 ret = -EINVAL;
377 for (k = 0; k < template[i].np; k++) {
378 if (WARN_ON(offset_in_page(IDX[k]) +
379 template[i].tap[k] > PAGE_SIZE))
380 goto out;
381 sg_set_buf(&sg[k],
382 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
383 offset_in_page(IDX[k]),
384 template[i].plaintext + temp,
385 template[i].tap[k]),
386 template[i].tap[k]);
387 temp += template[i].tap[k];
388 }
389
390 if (template[i].ksize) {
391 if (template[i].ksize > MAX_KEYLEN) {
392 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
393 j, algo, template[i].ksize, MAX_KEYLEN);
394 ret = -EINVAL;
395 goto out;
396 }
397 crypto_ahash_clear_flags(tfm, ~0);
398 memcpy(key, template[i].key, template[i].ksize);
399 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
400
401 if (ret) {
402 printk(KERN_ERR "alg: hash: setkey "
403 "failed on chunking test %d "
404 "for %s: ret=%d\n", j, algo, -ret);
405 goto out;
406 }
407 }
408
409 ahash_request_set_crypt(req, sg, result, template[i].psize);
410 ret = crypto_ahash_digest(req);
411 switch (ret) {
412 case 0:
413 break;
414 case -EINPROGRESS:
415 case -EBUSY:
416 wait_for_completion(&tresult.completion);
417 reinit_completion(&tresult.completion);
418 ret = tresult.err;
419 if (!ret)
420 break;
421 /* fall through */
422 default:
423 printk(KERN_ERR "alg: hash: digest failed "
424 "on chunking test %d for %s: "
425 "ret=%d\n", j, algo, -ret);
426 goto out;
427 }
428
429 if (memcmp(result, template[i].digest,
430 crypto_ahash_digestsize(tfm))) {
431 printk(KERN_ERR "alg: hash: Chunking test %d "
432 "failed for %s\n", j, algo);
433 hexdump(result, crypto_ahash_digestsize(tfm));
434 ret = -EINVAL;
435 goto out;
436 }
437 }
438
439 /* partial update exercise */
440 j = 0;
441 for (i = 0; i < tcount; i++) {
442 /* alignment tests are only done with continuous buffers */
443 if (align_offset != 0)
444 break;
445
446 if (template[i].np < 2)
447 continue;
448
449 j++;
450 memset(result, 0, MAX_DIGEST_SIZE);
451
452 ret = -EINVAL;
453 hash_buff = xbuf[0];
454 memcpy(hash_buff, template[i].plaintext,
455 template[i].tap[0]);
456 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
457
458 if (template[i].ksize) {
459 crypto_ahash_clear_flags(tfm, ~0);
460 if (template[i].ksize > MAX_KEYLEN) {
461 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
462 j, algo, template[i].ksize, MAX_KEYLEN);
463 ret = -EINVAL;
464 goto out;
465 }
466 memcpy(key, template[i].key, template[i].ksize);
467 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
468 if (ret) {
469 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
470 j, algo, -ret);
471 goto out;
472 }
473 }
474
475 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
476 ret = wait_async_op(&tresult, crypto_ahash_init(req));
477 if (ret) {
478 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
479 j, algo, -ret);
480 goto out;
481 }
482 ret = wait_async_op(&tresult, crypto_ahash_update(req));
483 if (ret) {
484 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
485 j, algo, -ret);
486 goto out;
487 }
488
489 temp = template[i].tap[0];
490 for (k = 1; k < template[i].np; k++) {
491 ret = ahash_partial_update(&req, tfm, &template[i],
492 hash_buff, k, temp, &sg[0], algo, result,
493 &tresult);
494 if (ret) {
495 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
496 j, algo, -ret);
497 goto out_noreq;
498 }
499 temp += template[i].tap[k];
500 }
501 ret = wait_async_op(&tresult, crypto_ahash_final(req));
502 if (ret) {
503 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
504 j, algo, -ret);
505 goto out;
506 }
507 if (memcmp(result, template[i].digest,
508 crypto_ahash_digestsize(tfm))) {
509 pr_err("alg: hash: Partial Test %d failed for %s\n",
510 j, algo);
511 hexdump(result, crypto_ahash_digestsize(tfm));
512 ret = -EINVAL;
513 goto out;
514 }
515 }
516
517 ret = 0;
518
519 out:
520 ahash_request_free(req);
521 out_noreq:
522 testmgr_free_buf(xbuf);
523 out_nobuf:
524 kfree(key);
525 kfree(result);
526 return ret;
527 }
528
529 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
530 unsigned int tcount, bool use_digest)
531 {
532 unsigned int alignmask;
533 int ret;
534
535 ret = __test_hash(tfm, template, tcount, use_digest, 0);
536 if (ret)
537 return ret;
538
539 /* test unaligned buffers, check with one byte offset */
540 ret = __test_hash(tfm, template, tcount, use_digest, 1);
541 if (ret)
542 return ret;
543
544 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
545 if (alignmask) {
546 /* Check if alignment mask for tfm is correctly set. */
547 ret = __test_hash(tfm, template, tcount, use_digest,
548 alignmask + 1);
549 if (ret)
550 return ret;
551 }
552
553 return 0;
554 }
555
556 static int __test_aead(struct crypto_aead *tfm, int enc,
557 struct aead_testvec *template, unsigned int tcount,
558 const bool diff_dst, const int align_offset)
559 {
560 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
561 unsigned int i, j, k, n, temp;
562 int ret = -ENOMEM;
563 char *q;
564 char *key;
565 struct aead_request *req;
566 struct scatterlist *sg;
567 struct scatterlist *sgout;
568 const char *e, *d;
569 struct tcrypt_result result;
570 unsigned int authsize, iv_len;
571 void *input;
572 void *output;
573 void *assoc;
574 char *iv;
575 char *xbuf[XBUFSIZE];
576 char *xoutbuf[XBUFSIZE];
577 char *axbuf[XBUFSIZE];
578
579 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
580 if (!iv)
581 return ret;
582 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
583 if (!key)
584 goto out_noxbuf;
585 if (testmgr_alloc_buf(xbuf))
586 goto out_noxbuf;
587 if (testmgr_alloc_buf(axbuf))
588 goto out_noaxbuf;
589 if (diff_dst && testmgr_alloc_buf(xoutbuf))
590 goto out_nooutbuf;
591
592 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
593 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
594 if (!sg)
595 goto out_nosg;
596 sgout = &sg[16];
597
598 if (diff_dst)
599 d = "-ddst";
600 else
601 d = "";
602
603 if (enc == ENCRYPT)
604 e = "encryption";
605 else
606 e = "decryption";
607
608 init_completion(&result.completion);
609
610 req = aead_request_alloc(tfm, GFP_KERNEL);
611 if (!req) {
612 pr_err("alg: aead%s: Failed to allocate request for %s\n",
613 d, algo);
614 goto out;
615 }
616
617 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
618 tcrypt_complete, &result);
619
620 iv_len = crypto_aead_ivsize(tfm);
621
622 for (i = 0, j = 0; i < tcount; i++) {
623 if (template[i].np)
624 continue;
625
626 j++;
627
628 /* some templates have no input data but they will
629 * touch input
630 */
631 input = xbuf[0];
632 input += align_offset;
633 assoc = axbuf[0];
634
635 ret = -EINVAL;
636 if (WARN_ON(align_offset + template[i].ilen >
637 PAGE_SIZE || template[i].alen > PAGE_SIZE))
638 goto out;
639
640 memcpy(input, template[i].input, template[i].ilen);
641 memcpy(assoc, template[i].assoc, template[i].alen);
642 if (template[i].iv)
643 memcpy(iv, template[i].iv, iv_len);
644 else
645 memset(iv, 0, iv_len);
646
647 crypto_aead_clear_flags(tfm, ~0);
648 if (template[i].wk)
649 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
650
651 if (template[i].klen > MAX_KEYLEN) {
652 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
653 d, j, algo, template[i].klen,
654 MAX_KEYLEN);
655 ret = -EINVAL;
656 goto out;
657 }
658 memcpy(key, template[i].key, template[i].klen);
659
660 ret = crypto_aead_setkey(tfm, key, template[i].klen);
661 if (!ret == template[i].fail) {
662 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
663 d, j, algo, crypto_aead_get_flags(tfm));
664 goto out;
665 } else if (ret)
666 continue;
667
668 authsize = abs(template[i].rlen - template[i].ilen);
669 ret = crypto_aead_setauthsize(tfm, authsize);
670 if (ret) {
671 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
672 d, authsize, j, algo);
673 goto out;
674 }
675
676 k = !!template[i].alen;
677 sg_init_table(sg, k + 1);
678 sg_set_buf(&sg[0], assoc, template[i].alen);
679 sg_set_buf(&sg[k], input,
680 template[i].ilen + (enc ? authsize : 0));
681 output = input;
682
683 if (diff_dst) {
684 sg_init_table(sgout, k + 1);
685 sg_set_buf(&sgout[0], assoc, template[i].alen);
686
687 output = xoutbuf[0];
688 output += align_offset;
689 sg_set_buf(&sgout[k], output,
690 template[i].rlen + (enc ? 0 : authsize));
691 }
692
693 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
694 template[i].ilen, iv);
695
696 aead_request_set_ad(req, template[i].alen);
697
698 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
699
700 switch (ret) {
701 case 0:
702 if (template[i].novrfy) {
703 /* verification was supposed to fail */
704 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
705 d, e, j, algo);
706 /* so really, we got a bad message */
707 ret = -EBADMSG;
708 goto out;
709 }
710 break;
711 case -EINPROGRESS:
712 case -EBUSY:
713 wait_for_completion(&result.completion);
714 reinit_completion(&result.completion);
715 ret = result.err;
716 if (!ret)
717 break;
718 case -EBADMSG:
719 if (template[i].novrfy)
720 /* verification failure was expected */
721 continue;
722 /* fall through */
723 default:
724 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
725 d, e, j, algo, -ret);
726 goto out;
727 }
728
729 q = output;
730 if (memcmp(q, template[i].result, template[i].rlen)) {
731 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
732 d, j, e, algo);
733 hexdump(q, template[i].rlen);
734 ret = -EINVAL;
735 goto out;
736 }
737 }
738
739 for (i = 0, j = 0; i < tcount; i++) {
740 /* alignment tests are only done with continuous buffers */
741 if (align_offset != 0)
742 break;
743
744 if (!template[i].np)
745 continue;
746
747 j++;
748
749 if (template[i].iv)
750 memcpy(iv, template[i].iv, iv_len);
751 else
752 memset(iv, 0, MAX_IVLEN);
753
754 crypto_aead_clear_flags(tfm, ~0);
755 if (template[i].wk)
756 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
757 if (template[i].klen > MAX_KEYLEN) {
758 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
759 d, j, algo, template[i].klen, MAX_KEYLEN);
760 ret = -EINVAL;
761 goto out;
762 }
763 memcpy(key, template[i].key, template[i].klen);
764
765 ret = crypto_aead_setkey(tfm, key, template[i].klen);
766 if (!ret == template[i].fail) {
767 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
768 d, j, algo, crypto_aead_get_flags(tfm));
769 goto out;
770 } else if (ret)
771 continue;
772
773 authsize = abs(template[i].rlen - template[i].ilen);
774
775 ret = -EINVAL;
776 sg_init_table(sg, template[i].anp + template[i].np);
777 if (diff_dst)
778 sg_init_table(sgout, template[i].anp + template[i].np);
779
780 ret = -EINVAL;
781 for (k = 0, temp = 0; k < template[i].anp; k++) {
782 if (WARN_ON(offset_in_page(IDX[k]) +
783 template[i].atap[k] > PAGE_SIZE))
784 goto out;
785 sg_set_buf(&sg[k],
786 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
787 offset_in_page(IDX[k]),
788 template[i].assoc + temp,
789 template[i].atap[k]),
790 template[i].atap[k]);
791 if (diff_dst)
792 sg_set_buf(&sgout[k],
793 axbuf[IDX[k] >> PAGE_SHIFT] +
794 offset_in_page(IDX[k]),
795 template[i].atap[k]);
796 temp += template[i].atap[k];
797 }
798
799 for (k = 0, temp = 0; k < template[i].np; k++) {
800 if (WARN_ON(offset_in_page(IDX[k]) +
801 template[i].tap[k] > PAGE_SIZE))
802 goto out;
803
804 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
805 memcpy(q, template[i].input + temp, template[i].tap[k]);
806 sg_set_buf(&sg[template[i].anp + k],
807 q, template[i].tap[k]);
808
809 if (diff_dst) {
810 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
811 offset_in_page(IDX[k]);
812
813 memset(q, 0, template[i].tap[k]);
814
815 sg_set_buf(&sgout[template[i].anp + k],
816 q, template[i].tap[k]);
817 }
818
819 n = template[i].tap[k];
820 if (k == template[i].np - 1 && enc)
821 n += authsize;
822 if (offset_in_page(q) + n < PAGE_SIZE)
823 q[n] = 0;
824
825 temp += template[i].tap[k];
826 }
827
828 ret = crypto_aead_setauthsize(tfm, authsize);
829 if (ret) {
830 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
831 d, authsize, j, algo);
832 goto out;
833 }
834
835 if (enc) {
836 if (WARN_ON(sg[template[i].anp + k - 1].offset +
837 sg[template[i].anp + k - 1].length +
838 authsize > PAGE_SIZE)) {
839 ret = -EINVAL;
840 goto out;
841 }
842
843 if (diff_dst)
844 sgout[template[i].anp + k - 1].length +=
845 authsize;
846 sg[template[i].anp + k - 1].length += authsize;
847 }
848
849 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
850 template[i].ilen,
851 iv);
852
853 aead_request_set_ad(req, template[i].alen);
854
855 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
856
857 switch (ret) {
858 case 0:
859 if (template[i].novrfy) {
860 /* verification was supposed to fail */
861 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
862 d, e, j, algo);
863 /* so really, we got a bad message */
864 ret = -EBADMSG;
865 goto out;
866 }
867 break;
868 case -EINPROGRESS:
869 case -EBUSY:
870 wait_for_completion(&result.completion);
871 reinit_completion(&result.completion);
872 ret = result.err;
873 if (!ret)
874 break;
875 case -EBADMSG:
876 if (template[i].novrfy)
877 /* verification failure was expected */
878 continue;
879 /* fall through */
880 default:
881 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
882 d, e, j, algo, -ret);
883 goto out;
884 }
885
886 ret = -EINVAL;
887 for (k = 0, temp = 0; k < template[i].np; k++) {
888 if (diff_dst)
889 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
890 offset_in_page(IDX[k]);
891 else
892 q = xbuf[IDX[k] >> PAGE_SHIFT] +
893 offset_in_page(IDX[k]);
894
895 n = template[i].tap[k];
896 if (k == template[i].np - 1)
897 n += enc ? authsize : -authsize;
898
899 if (memcmp(q, template[i].result + temp, n)) {
900 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
901 d, j, e, k, algo);
902 hexdump(q, n);
903 goto out;
904 }
905
906 q += n;
907 if (k == template[i].np - 1 && !enc) {
908 if (!diff_dst &&
909 memcmp(q, template[i].input +
910 temp + n, authsize))
911 n = authsize;
912 else
913 n = 0;
914 } else {
915 for (n = 0; offset_in_page(q + n) && q[n]; n++)
916 ;
917 }
918 if (n) {
919 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
920 d, j, e, k, algo, n);
921 hexdump(q, n);
922 goto out;
923 }
924
925 temp += template[i].tap[k];
926 }
927 }
928
929 ret = 0;
930
931 out:
932 aead_request_free(req);
933 kfree(sg);
934 out_nosg:
935 if (diff_dst)
936 testmgr_free_buf(xoutbuf);
937 out_nooutbuf:
938 testmgr_free_buf(axbuf);
939 out_noaxbuf:
940 testmgr_free_buf(xbuf);
941 out_noxbuf:
942 kfree(key);
943 kfree(iv);
944 return ret;
945 }
946
947 static int test_aead(struct crypto_aead *tfm, int enc,
948 struct aead_testvec *template, unsigned int tcount)
949 {
950 unsigned int alignmask;
951 int ret;
952
953 /* test 'dst == src' case */
954 ret = __test_aead(tfm, enc, template, tcount, false, 0);
955 if (ret)
956 return ret;
957
958 /* test 'dst != src' case */
959 ret = __test_aead(tfm, enc, template, tcount, true, 0);
960 if (ret)
961 return ret;
962
963 /* test unaligned buffers, check with one byte offset */
964 ret = __test_aead(tfm, enc, template, tcount, true, 1);
965 if (ret)
966 return ret;
967
968 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
969 if (alignmask) {
970 /* Check if alignment mask for tfm is correctly set. */
971 ret = __test_aead(tfm, enc, template, tcount, true,
972 alignmask + 1);
973 if (ret)
974 return ret;
975 }
976
977 return 0;
978 }
979
980 static int test_cipher(struct crypto_cipher *tfm, int enc,
981 struct cipher_testvec *template, unsigned int tcount)
982 {
983 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
984 unsigned int i, j, k;
985 char *q;
986 const char *e;
987 void *data;
988 char *xbuf[XBUFSIZE];
989 int ret = -ENOMEM;
990
991 if (testmgr_alloc_buf(xbuf))
992 goto out_nobuf;
993
994 if (enc == ENCRYPT)
995 e = "encryption";
996 else
997 e = "decryption";
998
999 j = 0;
1000 for (i = 0; i < tcount; i++) {
1001 if (template[i].np)
1002 continue;
1003
1004 j++;
1005
1006 ret = -EINVAL;
1007 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1008 goto out;
1009
1010 data = xbuf[0];
1011 memcpy(data, template[i].input, template[i].ilen);
1012
1013 crypto_cipher_clear_flags(tfm, ~0);
1014 if (template[i].wk)
1015 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1016
1017 ret = crypto_cipher_setkey(tfm, template[i].key,
1018 template[i].klen);
1019 if (!ret == template[i].fail) {
1020 printk(KERN_ERR "alg: cipher: setkey failed "
1021 "on test %d for %s: flags=%x\n", j,
1022 algo, crypto_cipher_get_flags(tfm));
1023 goto out;
1024 } else if (ret)
1025 continue;
1026
1027 for (k = 0; k < template[i].ilen;
1028 k += crypto_cipher_blocksize(tfm)) {
1029 if (enc)
1030 crypto_cipher_encrypt_one(tfm, data + k,
1031 data + k);
1032 else
1033 crypto_cipher_decrypt_one(tfm, data + k,
1034 data + k);
1035 }
1036
1037 q = data;
1038 if (memcmp(q, template[i].result, template[i].rlen)) {
1039 printk(KERN_ERR "alg: cipher: Test %d failed "
1040 "on %s for %s\n", j, e, algo);
1041 hexdump(q, template[i].rlen);
1042 ret = -EINVAL;
1043 goto out;
1044 }
1045 }
1046
1047 ret = 0;
1048
1049 out:
1050 testmgr_free_buf(xbuf);
1051 out_nobuf:
1052 return ret;
1053 }
1054
1055 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1056 struct cipher_testvec *template, unsigned int tcount,
1057 const bool diff_dst, const int align_offset)
1058 {
1059 const char *algo =
1060 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1061 unsigned int i, j, k, n, temp;
1062 char *q;
1063 struct skcipher_request *req;
1064 struct scatterlist sg[8];
1065 struct scatterlist sgout[8];
1066 const char *e, *d;
1067 struct tcrypt_result result;
1068 void *data;
1069 char iv[MAX_IVLEN];
1070 char *xbuf[XBUFSIZE];
1071 char *xoutbuf[XBUFSIZE];
1072 int ret = -ENOMEM;
1073 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1074
1075 if (testmgr_alloc_buf(xbuf))
1076 goto out_nobuf;
1077
1078 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1079 goto out_nooutbuf;
1080
1081 if (diff_dst)
1082 d = "-ddst";
1083 else
1084 d = "";
1085
1086 if (enc == ENCRYPT)
1087 e = "encryption";
1088 else
1089 e = "decryption";
1090
1091 init_completion(&result.completion);
1092
1093 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1094 if (!req) {
1095 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1096 d, algo);
1097 goto out;
1098 }
1099
1100 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1101 tcrypt_complete, &result);
1102
1103 j = 0;
1104 for (i = 0; i < tcount; i++) {
1105 if (template[i].np && !template[i].also_non_np)
1106 continue;
1107
1108 if (template[i].iv)
1109 memcpy(iv, template[i].iv, ivsize);
1110 else
1111 memset(iv, 0, MAX_IVLEN);
1112
1113 j++;
1114 ret = -EINVAL;
1115 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1116 goto out;
1117
1118 data = xbuf[0];
1119 data += align_offset;
1120 memcpy(data, template[i].input, template[i].ilen);
1121
1122 crypto_skcipher_clear_flags(tfm, ~0);
1123 if (template[i].wk)
1124 crypto_skcipher_set_flags(tfm,
1125 CRYPTO_TFM_REQ_WEAK_KEY);
1126
1127 ret = crypto_skcipher_setkey(tfm, template[i].key,
1128 template[i].klen);
1129 if (!ret == template[i].fail) {
1130 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1131 d, j, algo, crypto_skcipher_get_flags(tfm));
1132 goto out;
1133 } else if (ret)
1134 continue;
1135
1136 sg_init_one(&sg[0], data, template[i].ilen);
1137 if (diff_dst) {
1138 data = xoutbuf[0];
1139 data += align_offset;
1140 sg_init_one(&sgout[0], data, template[i].ilen);
1141 }
1142
1143 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1144 template[i].ilen, iv);
1145 ret = enc ? crypto_skcipher_encrypt(req) :
1146 crypto_skcipher_decrypt(req);
1147
1148 switch (ret) {
1149 case 0:
1150 break;
1151 case -EINPROGRESS:
1152 case -EBUSY:
1153 wait_for_completion(&result.completion);
1154 reinit_completion(&result.completion);
1155 ret = result.err;
1156 if (!ret)
1157 break;
1158 /* fall through */
1159 default:
1160 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1161 d, e, j, algo, -ret);
1162 goto out;
1163 }
1164
1165 q = data;
1166 if (memcmp(q, template[i].result, template[i].rlen)) {
1167 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1168 d, j, e, algo);
1169 hexdump(q, template[i].rlen);
1170 ret = -EINVAL;
1171 goto out;
1172 }
1173
1174 if (template[i].iv_out &&
1175 memcmp(iv, template[i].iv_out,
1176 crypto_skcipher_ivsize(tfm))) {
1177 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1178 d, j, e, algo);
1179 hexdump(iv, crypto_skcipher_ivsize(tfm));
1180 ret = -EINVAL;
1181 goto out;
1182 }
1183 }
1184
1185 j = 0;
1186 for (i = 0; i < tcount; i++) {
1187 /* alignment tests are only done with continuous buffers */
1188 if (align_offset != 0)
1189 break;
1190
1191 if (!template[i].np)
1192 continue;
1193
1194 if (template[i].iv)
1195 memcpy(iv, template[i].iv, ivsize);
1196 else
1197 memset(iv, 0, MAX_IVLEN);
1198
1199 j++;
1200 crypto_skcipher_clear_flags(tfm, ~0);
1201 if (template[i].wk)
1202 crypto_skcipher_set_flags(tfm,
1203 CRYPTO_TFM_REQ_WEAK_KEY);
1204
1205 ret = crypto_skcipher_setkey(tfm, template[i].key,
1206 template[i].klen);
1207 if (!ret == template[i].fail) {
1208 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1209 d, j, algo, crypto_skcipher_get_flags(tfm));
1210 goto out;
1211 } else if (ret)
1212 continue;
1213
1214 temp = 0;
1215 ret = -EINVAL;
1216 sg_init_table(sg, template[i].np);
1217 if (diff_dst)
1218 sg_init_table(sgout, template[i].np);
1219 for (k = 0; k < template[i].np; k++) {
1220 if (WARN_ON(offset_in_page(IDX[k]) +
1221 template[i].tap[k] > PAGE_SIZE))
1222 goto out;
1223
1224 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1225
1226 memcpy(q, template[i].input + temp, template[i].tap[k]);
1227
1228 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1229 q[template[i].tap[k]] = 0;
1230
1231 sg_set_buf(&sg[k], q, template[i].tap[k]);
1232 if (diff_dst) {
1233 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1234 offset_in_page(IDX[k]);
1235
1236 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1237
1238 memset(q, 0, template[i].tap[k]);
1239 if (offset_in_page(q) +
1240 template[i].tap[k] < PAGE_SIZE)
1241 q[template[i].tap[k]] = 0;
1242 }
1243
1244 temp += template[i].tap[k];
1245 }
1246
1247 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1248 template[i].ilen, iv);
1249
1250 ret = enc ? crypto_skcipher_encrypt(req) :
1251 crypto_skcipher_decrypt(req);
1252
1253 switch (ret) {
1254 case 0:
1255 break;
1256 case -EINPROGRESS:
1257 case -EBUSY:
1258 wait_for_completion(&result.completion);
1259 reinit_completion(&result.completion);
1260 ret = result.err;
1261 if (!ret)
1262 break;
1263 /* fall through */
1264 default:
1265 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1266 d, e, j, algo, -ret);
1267 goto out;
1268 }
1269
1270 temp = 0;
1271 ret = -EINVAL;
1272 for (k = 0; k < template[i].np; k++) {
1273 if (diff_dst)
1274 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1275 offset_in_page(IDX[k]);
1276 else
1277 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1278 offset_in_page(IDX[k]);
1279
1280 if (memcmp(q, template[i].result + temp,
1281 template[i].tap[k])) {
1282 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1283 d, j, e, k, algo);
1284 hexdump(q, template[i].tap[k]);
1285 goto out;
1286 }
1287
1288 q += template[i].tap[k];
1289 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1290 ;
1291 if (n) {
1292 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1293 d, j, e, k, algo, n);
1294 hexdump(q, n);
1295 goto out;
1296 }
1297 temp += template[i].tap[k];
1298 }
1299 }
1300
1301 ret = 0;
1302
1303 out:
1304 skcipher_request_free(req);
1305 if (diff_dst)
1306 testmgr_free_buf(xoutbuf);
1307 out_nooutbuf:
1308 testmgr_free_buf(xbuf);
1309 out_nobuf:
1310 return ret;
1311 }
1312
1313 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1314 struct cipher_testvec *template, unsigned int tcount)
1315 {
1316 unsigned int alignmask;
1317 int ret;
1318
1319 /* test 'dst == src' case */
1320 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1321 if (ret)
1322 return ret;
1323
1324 /* test 'dst != src' case */
1325 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1326 if (ret)
1327 return ret;
1328
1329 /* test unaligned buffers, check with one byte offset */
1330 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1331 if (ret)
1332 return ret;
1333
1334 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1335 if (alignmask) {
1336 /* Check if alignment mask for tfm is correctly set. */
1337 ret = __test_skcipher(tfm, enc, template, tcount, true,
1338 alignmask + 1);
1339 if (ret)
1340 return ret;
1341 }
1342
1343 return 0;
1344 }
1345
1346 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1347 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1348 {
1349 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1350 unsigned int i;
1351 char result[COMP_BUF_SIZE];
1352 int ret;
1353
1354 for (i = 0; i < ctcount; i++) {
1355 int ilen;
1356 unsigned int dlen = COMP_BUF_SIZE;
1357
1358 memset(result, 0, sizeof (result));
1359
1360 ilen = ctemplate[i].inlen;
1361 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1362 ilen, result, &dlen);
1363 if (ret) {
1364 printk(KERN_ERR "alg: comp: compression failed "
1365 "on test %d for %s: ret=%d\n", i + 1, algo,
1366 -ret);
1367 goto out;
1368 }
1369
1370 if (dlen != ctemplate[i].outlen) {
1371 printk(KERN_ERR "alg: comp: Compression test %d "
1372 "failed for %s: output len = %d\n", i + 1, algo,
1373 dlen);
1374 ret = -EINVAL;
1375 goto out;
1376 }
1377
1378 if (memcmp(result, ctemplate[i].output, dlen)) {
1379 printk(KERN_ERR "alg: comp: Compression test %d "
1380 "failed for %s\n", i + 1, algo);
1381 hexdump(result, dlen);
1382 ret = -EINVAL;
1383 goto out;
1384 }
1385 }
1386
1387 for (i = 0; i < dtcount; i++) {
1388 int ilen;
1389 unsigned int dlen = COMP_BUF_SIZE;
1390
1391 memset(result, 0, sizeof (result));
1392
1393 ilen = dtemplate[i].inlen;
1394 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1395 ilen, result, &dlen);
1396 if (ret) {
1397 printk(KERN_ERR "alg: comp: decompression failed "
1398 "on test %d for %s: ret=%d\n", i + 1, algo,
1399 -ret);
1400 goto out;
1401 }
1402
1403 if (dlen != dtemplate[i].outlen) {
1404 printk(KERN_ERR "alg: comp: Decompression test %d "
1405 "failed for %s: output len = %d\n", i + 1, algo,
1406 dlen);
1407 ret = -EINVAL;
1408 goto out;
1409 }
1410
1411 if (memcmp(result, dtemplate[i].output, dlen)) {
1412 printk(KERN_ERR "alg: comp: Decompression test %d "
1413 "failed for %s\n", i + 1, algo);
1414 hexdump(result, dlen);
1415 ret = -EINVAL;
1416 goto out;
1417 }
1418 }
1419
1420 ret = 0;
1421
1422 out:
1423 return ret;
1424 }
1425
1426 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1427 unsigned int tcount)
1428 {
1429 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1430 int err = 0, i, j, seedsize;
1431 u8 *seed;
1432 char result[32];
1433
1434 seedsize = crypto_rng_seedsize(tfm);
1435
1436 seed = kmalloc(seedsize, GFP_KERNEL);
1437 if (!seed) {
1438 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1439 "for %s\n", algo);
1440 return -ENOMEM;
1441 }
1442
1443 for (i = 0; i < tcount; i++) {
1444 memset(result, 0, 32);
1445
1446 memcpy(seed, template[i].v, template[i].vlen);
1447 memcpy(seed + template[i].vlen, template[i].key,
1448 template[i].klen);
1449 memcpy(seed + template[i].vlen + template[i].klen,
1450 template[i].dt, template[i].dtlen);
1451
1452 err = crypto_rng_reset(tfm, seed, seedsize);
1453 if (err) {
1454 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1455 "for %s\n", algo);
1456 goto out;
1457 }
1458
1459 for (j = 0; j < template[i].loops; j++) {
1460 err = crypto_rng_get_bytes(tfm, result,
1461 template[i].rlen);
1462 if (err < 0) {
1463 printk(KERN_ERR "alg: cprng: Failed to obtain "
1464 "the correct amount of random data for "
1465 "%s (requested %d)\n", algo,
1466 template[i].rlen);
1467 goto out;
1468 }
1469 }
1470
1471 err = memcmp(result, template[i].result,
1472 template[i].rlen);
1473 if (err) {
1474 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1475 i, algo);
1476 hexdump(result, template[i].rlen);
1477 err = -EINVAL;
1478 goto out;
1479 }
1480 }
1481
1482 out:
1483 kfree(seed);
1484 return err;
1485 }
1486
1487 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1488 u32 type, u32 mask)
1489 {
1490 struct crypto_aead *tfm;
1491 int err = 0;
1492
1493 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1494 if (IS_ERR(tfm)) {
1495 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1496 "%ld\n", driver, PTR_ERR(tfm));
1497 return PTR_ERR(tfm);
1498 }
1499
1500 if (desc->suite.aead.enc.vecs) {
1501 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1502 desc->suite.aead.enc.count);
1503 if (err)
1504 goto out;
1505 }
1506
1507 if (!err && desc->suite.aead.dec.vecs)
1508 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1509 desc->suite.aead.dec.count);
1510
1511 out:
1512 crypto_free_aead(tfm);
1513 return err;
1514 }
1515
1516 static int alg_test_cipher(const struct alg_test_desc *desc,
1517 const char *driver, u32 type, u32 mask)
1518 {
1519 struct crypto_cipher *tfm;
1520 int err = 0;
1521
1522 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1523 if (IS_ERR(tfm)) {
1524 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1525 "%s: %ld\n", driver, PTR_ERR(tfm));
1526 return PTR_ERR(tfm);
1527 }
1528
1529 if (desc->suite.cipher.enc.vecs) {
1530 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1531 desc->suite.cipher.enc.count);
1532 if (err)
1533 goto out;
1534 }
1535
1536 if (desc->suite.cipher.dec.vecs)
1537 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1538 desc->suite.cipher.dec.count);
1539
1540 out:
1541 crypto_free_cipher(tfm);
1542 return err;
1543 }
1544
1545 static int alg_test_skcipher(const struct alg_test_desc *desc,
1546 const char *driver, u32 type, u32 mask)
1547 {
1548 struct crypto_skcipher *tfm;
1549 int err = 0;
1550
1551 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1552 if (IS_ERR(tfm)) {
1553 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1554 "%s: %ld\n", driver, PTR_ERR(tfm));
1555 return PTR_ERR(tfm);
1556 }
1557
1558 if (desc->suite.cipher.enc.vecs) {
1559 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1560 desc->suite.cipher.enc.count);
1561 if (err)
1562 goto out;
1563 }
1564
1565 if (desc->suite.cipher.dec.vecs)
1566 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1567 desc->suite.cipher.dec.count);
1568
1569 out:
1570 crypto_free_skcipher(tfm);
1571 return err;
1572 }
1573
1574 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1575 u32 type, u32 mask)
1576 {
1577 struct crypto_comp *tfm;
1578 int err;
1579
1580 tfm = crypto_alloc_comp(driver, type, mask);
1581 if (IS_ERR(tfm)) {
1582 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1583 "%ld\n", driver, PTR_ERR(tfm));
1584 return PTR_ERR(tfm);
1585 }
1586
1587 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1588 desc->suite.comp.decomp.vecs,
1589 desc->suite.comp.comp.count,
1590 desc->suite.comp.decomp.count);
1591
1592 crypto_free_comp(tfm);
1593 return err;
1594 }
1595
1596 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1597 u32 type, u32 mask)
1598 {
1599 struct crypto_ahash *tfm;
1600 int err;
1601
1602 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1603 if (IS_ERR(tfm)) {
1604 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1605 "%ld\n", driver, PTR_ERR(tfm));
1606 return PTR_ERR(tfm);
1607 }
1608
1609 err = test_hash(tfm, desc->suite.hash.vecs,
1610 desc->suite.hash.count, true);
1611 if (!err)
1612 err = test_hash(tfm, desc->suite.hash.vecs,
1613 desc->suite.hash.count, false);
1614
1615 crypto_free_ahash(tfm);
1616 return err;
1617 }
1618
1619 static int alg_test_crc32c(const struct alg_test_desc *desc,
1620 const char *driver, u32 type, u32 mask)
1621 {
1622 struct crypto_shash *tfm;
1623 u32 val;
1624 int err;
1625
1626 err = alg_test_hash(desc, driver, type, mask);
1627 if (err)
1628 goto out;
1629
1630 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1631 if (IS_ERR(tfm)) {
1632 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1633 "%ld\n", driver, PTR_ERR(tfm));
1634 err = PTR_ERR(tfm);
1635 goto out;
1636 }
1637
1638 do {
1639 SHASH_DESC_ON_STACK(shash, tfm);
1640 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1641
1642 shash->tfm = tfm;
1643 shash->flags = 0;
1644
1645 *ctx = le32_to_cpu(420553207);
1646 err = crypto_shash_final(shash, (u8 *)&val);
1647 if (err) {
1648 printk(KERN_ERR "alg: crc32c: Operation failed for "
1649 "%s: %d\n", driver, err);
1650 break;
1651 }
1652
1653 if (val != ~420553207) {
1654 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1655 "%d\n", driver, val);
1656 err = -EINVAL;
1657 }
1658 } while (0);
1659
1660 crypto_free_shash(tfm);
1661
1662 out:
1663 return err;
1664 }
1665
1666 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1667 u32 type, u32 mask)
1668 {
1669 struct crypto_rng *rng;
1670 int err;
1671
1672 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1673 if (IS_ERR(rng)) {
1674 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1675 "%ld\n", driver, PTR_ERR(rng));
1676 return PTR_ERR(rng);
1677 }
1678
1679 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1680
1681 crypto_free_rng(rng);
1682
1683 return err;
1684 }
1685
1686
1687 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1688 const char *driver, u32 type, u32 mask)
1689 {
1690 int ret = -EAGAIN;
1691 struct crypto_rng *drng;
1692 struct drbg_test_data test_data;
1693 struct drbg_string addtl, pers, testentropy;
1694 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1695
1696 if (!buf)
1697 return -ENOMEM;
1698
1699 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1700 if (IS_ERR(drng)) {
1701 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1702 "%s\n", driver);
1703 kzfree(buf);
1704 return -ENOMEM;
1705 }
1706
1707 test_data.testentropy = &testentropy;
1708 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1709 drbg_string_fill(&pers, test->pers, test->perslen);
1710 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1711 if (ret) {
1712 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1713 goto outbuf;
1714 }
1715
1716 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1717 if (pr) {
1718 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1719 ret = crypto_drbg_get_bytes_addtl_test(drng,
1720 buf, test->expectedlen, &addtl, &test_data);
1721 } else {
1722 ret = crypto_drbg_get_bytes_addtl(drng,
1723 buf, test->expectedlen, &addtl);
1724 }
1725 if (ret < 0) {
1726 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1727 "driver %s\n", driver);
1728 goto outbuf;
1729 }
1730
1731 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1732 if (pr) {
1733 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1734 ret = crypto_drbg_get_bytes_addtl_test(drng,
1735 buf, test->expectedlen, &addtl, &test_data);
1736 } else {
1737 ret = crypto_drbg_get_bytes_addtl(drng,
1738 buf, test->expectedlen, &addtl);
1739 }
1740 if (ret < 0) {
1741 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1742 "driver %s\n", driver);
1743 goto outbuf;
1744 }
1745
1746 ret = memcmp(test->expected, buf, test->expectedlen);
1747
1748 outbuf:
1749 crypto_free_rng(drng);
1750 kzfree(buf);
1751 return ret;
1752 }
1753
1754
1755 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1756 u32 type, u32 mask)
1757 {
1758 int err = 0;
1759 int pr = 0;
1760 int i = 0;
1761 struct drbg_testvec *template = desc->suite.drbg.vecs;
1762 unsigned int tcount = desc->suite.drbg.count;
1763
1764 if (0 == memcmp(driver, "drbg_pr_", 8))
1765 pr = 1;
1766
1767 for (i = 0; i < tcount; i++) {
1768 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1769 if (err) {
1770 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1771 i, driver);
1772 err = -EINVAL;
1773 break;
1774 }
1775 }
1776 return err;
1777
1778 }
1779
1780 static int do_test_rsa(struct crypto_akcipher *tfm,
1781 struct akcipher_testvec *vecs)
1782 {
1783 struct akcipher_request *req;
1784 void *outbuf_enc = NULL;
1785 void *outbuf_dec = NULL;
1786 struct tcrypt_result result;
1787 unsigned int out_len_max, out_len = 0;
1788 int err = -ENOMEM;
1789 struct scatterlist src, dst, src_tab[2];
1790
1791 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1792 if (!req)
1793 return err;
1794
1795 init_completion(&result.completion);
1796
1797 if (vecs->public_key_vec)
1798 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1799 vecs->key_len);
1800 else
1801 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1802 vecs->key_len);
1803 if (err)
1804 goto free_req;
1805
1806 out_len_max = crypto_akcipher_maxsize(tfm);
1807 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1808 if (!outbuf_enc)
1809 goto free_req;
1810
1811 sg_init_table(src_tab, 2);
1812 sg_set_buf(&src_tab[0], vecs->m, 8);
1813 sg_set_buf(&src_tab[1], vecs->m + 8, vecs->m_size - 8);
1814 sg_init_one(&dst, outbuf_enc, out_len_max);
1815 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1816 out_len_max);
1817 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1818 tcrypt_complete, &result);
1819
1820 /* Run RSA encrypt - c = m^e mod n;*/
1821 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1822 if (err) {
1823 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1824 goto free_all;
1825 }
1826 if (req->dst_len != vecs->c_size) {
1827 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1828 err = -EINVAL;
1829 goto free_all;
1830 }
1831 /* verify that encrypted message is equal to expected */
1832 if (memcmp(vecs->c, sg_virt(req->dst), vecs->c_size)) {
1833 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1834 err = -EINVAL;
1835 goto free_all;
1836 }
1837 /* Don't invoke decrypt for vectors with public key */
1838 if (vecs->public_key_vec) {
1839 err = 0;
1840 goto free_all;
1841 }
1842 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1843 if (!outbuf_dec) {
1844 err = -ENOMEM;
1845 goto free_all;
1846 }
1847 sg_init_one(&src, vecs->c, vecs->c_size);
1848 sg_init_one(&dst, outbuf_dec, out_len_max);
1849 init_completion(&result.completion);
1850 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1851
1852 /* Run RSA decrypt - m = c^d mod n;*/
1853 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1854 if (err) {
1855 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1856 goto free_all;
1857 }
1858 out_len = req->dst_len;
1859 if (out_len != vecs->m_size) {
1860 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1861 err = -EINVAL;
1862 goto free_all;
1863 }
1864 /* verify that decrypted message is equal to the original msg */
1865 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1866 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1867 err = -EINVAL;
1868 }
1869 free_all:
1870 kfree(outbuf_dec);
1871 kfree(outbuf_enc);
1872 free_req:
1873 akcipher_request_free(req);
1874 return err;
1875 }
1876
1877 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1878 unsigned int tcount)
1879 {
1880 int ret, i;
1881
1882 for (i = 0; i < tcount; i++) {
1883 ret = do_test_rsa(tfm, vecs++);
1884 if (ret) {
1885 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1886 i + 1, ret);
1887 return ret;
1888 }
1889 }
1890 return 0;
1891 }
1892
1893 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1894 struct akcipher_testvec *vecs, unsigned int tcount)
1895 {
1896 if (strncmp(alg, "rsa", 3) == 0)
1897 return test_rsa(tfm, vecs, tcount);
1898
1899 return 0;
1900 }
1901
1902 static int alg_test_akcipher(const struct alg_test_desc *desc,
1903 const char *driver, u32 type, u32 mask)
1904 {
1905 struct crypto_akcipher *tfm;
1906 int err = 0;
1907
1908 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1909 if (IS_ERR(tfm)) {
1910 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1911 driver, PTR_ERR(tfm));
1912 return PTR_ERR(tfm);
1913 }
1914 if (desc->suite.akcipher.vecs)
1915 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1916 desc->suite.akcipher.count);
1917
1918 crypto_free_akcipher(tfm);
1919 return err;
1920 }
1921
1922 static int alg_test_null(const struct alg_test_desc *desc,
1923 const char *driver, u32 type, u32 mask)
1924 {
1925 return 0;
1926 }
1927
1928 /* Please keep this list sorted by algorithm name. */
1929 static const struct alg_test_desc alg_test_descs[] = {
1930 {
1931 .alg = "__cbc-cast5-avx",
1932 .test = alg_test_null,
1933 }, {
1934 .alg = "__cbc-cast6-avx",
1935 .test = alg_test_null,
1936 }, {
1937 .alg = "__cbc-serpent-avx",
1938 .test = alg_test_null,
1939 }, {
1940 .alg = "__cbc-serpent-avx2",
1941 .test = alg_test_null,
1942 }, {
1943 .alg = "__cbc-serpent-sse2",
1944 .test = alg_test_null,
1945 }, {
1946 .alg = "__cbc-twofish-avx",
1947 .test = alg_test_null,
1948 }, {
1949 .alg = "__driver-cbc-aes-aesni",
1950 .test = alg_test_null,
1951 .fips_allowed = 1,
1952 }, {
1953 .alg = "__driver-cbc-camellia-aesni",
1954 .test = alg_test_null,
1955 }, {
1956 .alg = "__driver-cbc-camellia-aesni-avx2",
1957 .test = alg_test_null,
1958 }, {
1959 .alg = "__driver-cbc-cast5-avx",
1960 .test = alg_test_null,
1961 }, {
1962 .alg = "__driver-cbc-cast6-avx",
1963 .test = alg_test_null,
1964 }, {
1965 .alg = "__driver-cbc-serpent-avx",
1966 .test = alg_test_null,
1967 }, {
1968 .alg = "__driver-cbc-serpent-avx2",
1969 .test = alg_test_null,
1970 }, {
1971 .alg = "__driver-cbc-serpent-sse2",
1972 .test = alg_test_null,
1973 }, {
1974 .alg = "__driver-cbc-twofish-avx",
1975 .test = alg_test_null,
1976 }, {
1977 .alg = "__driver-ecb-aes-aesni",
1978 .test = alg_test_null,
1979 .fips_allowed = 1,
1980 }, {
1981 .alg = "__driver-ecb-camellia-aesni",
1982 .test = alg_test_null,
1983 }, {
1984 .alg = "__driver-ecb-camellia-aesni-avx2",
1985 .test = alg_test_null,
1986 }, {
1987 .alg = "__driver-ecb-cast5-avx",
1988 .test = alg_test_null,
1989 }, {
1990 .alg = "__driver-ecb-cast6-avx",
1991 .test = alg_test_null,
1992 }, {
1993 .alg = "__driver-ecb-serpent-avx",
1994 .test = alg_test_null,
1995 }, {
1996 .alg = "__driver-ecb-serpent-avx2",
1997 .test = alg_test_null,
1998 }, {
1999 .alg = "__driver-ecb-serpent-sse2",
2000 .test = alg_test_null,
2001 }, {
2002 .alg = "__driver-ecb-twofish-avx",
2003 .test = alg_test_null,
2004 }, {
2005 .alg = "__driver-gcm-aes-aesni",
2006 .test = alg_test_null,
2007 .fips_allowed = 1,
2008 }, {
2009 .alg = "__ghash-pclmulqdqni",
2010 .test = alg_test_null,
2011 .fips_allowed = 1,
2012 }, {
2013 .alg = "ansi_cprng",
2014 .test = alg_test_cprng,
2015 .suite = {
2016 .cprng = {
2017 .vecs = ansi_cprng_aes_tv_template,
2018 .count = ANSI_CPRNG_AES_TEST_VECTORS
2019 }
2020 }
2021 }, {
2022 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2023 .test = alg_test_aead,
2024 .suite = {
2025 .aead = {
2026 .enc = {
2027 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2028 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2029 },
2030 .dec = {
2031 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2032 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2033 }
2034 }
2035 }
2036 }, {
2037 .alg = "authenc(hmac(sha1),cbc(aes))",
2038 .test = alg_test_aead,
2039 .suite = {
2040 .aead = {
2041 .enc = {
2042 .vecs =
2043 hmac_sha1_aes_cbc_enc_tv_temp,
2044 .count =
2045 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2046 }
2047 }
2048 }
2049 }, {
2050 .alg = "authenc(hmac(sha1),cbc(des))",
2051 .test = alg_test_aead,
2052 .suite = {
2053 .aead = {
2054 .enc = {
2055 .vecs =
2056 hmac_sha1_des_cbc_enc_tv_temp,
2057 .count =
2058 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2059 }
2060 }
2061 }
2062 }, {
2063 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2064 .test = alg_test_aead,
2065 .fips_allowed = 1,
2066 .suite = {
2067 .aead = {
2068 .enc = {
2069 .vecs =
2070 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2071 .count =
2072 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2073 }
2074 }
2075 }
2076 }, {
2077 .alg = "authenc(hmac(sha1),ctr(aes))",
2078 .test = alg_test_null,
2079 .fips_allowed = 1,
2080 }, {
2081 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2082 .test = alg_test_aead,
2083 .suite = {
2084 .aead = {
2085 .enc = {
2086 .vecs =
2087 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2088 .count =
2089 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2090 },
2091 .dec = {
2092 .vecs =
2093 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2094 .count =
2095 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2096 }
2097 }
2098 }
2099 }, {
2100 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2101 .test = alg_test_null,
2102 .fips_allowed = 1,
2103 }, {
2104 .alg = "authenc(hmac(sha224),cbc(des))",
2105 .test = alg_test_aead,
2106 .suite = {
2107 .aead = {
2108 .enc = {
2109 .vecs =
2110 hmac_sha224_des_cbc_enc_tv_temp,
2111 .count =
2112 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2113 }
2114 }
2115 }
2116 }, {
2117 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2118 .test = alg_test_aead,
2119 .fips_allowed = 1,
2120 .suite = {
2121 .aead = {
2122 .enc = {
2123 .vecs =
2124 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2125 .count =
2126 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2127 }
2128 }
2129 }
2130 }, {
2131 .alg = "authenc(hmac(sha256),cbc(aes))",
2132 .test = alg_test_aead,
2133 .fips_allowed = 1,
2134 .suite = {
2135 .aead = {
2136 .enc = {
2137 .vecs =
2138 hmac_sha256_aes_cbc_enc_tv_temp,
2139 .count =
2140 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2141 }
2142 }
2143 }
2144 }, {
2145 .alg = "authenc(hmac(sha256),cbc(des))",
2146 .test = alg_test_aead,
2147 .suite = {
2148 .aead = {
2149 .enc = {
2150 .vecs =
2151 hmac_sha256_des_cbc_enc_tv_temp,
2152 .count =
2153 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2154 }
2155 }
2156 }
2157 }, {
2158 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2159 .test = alg_test_aead,
2160 .fips_allowed = 1,
2161 .suite = {
2162 .aead = {
2163 .enc = {
2164 .vecs =
2165 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2166 .count =
2167 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2168 }
2169 }
2170 }
2171 }, {
2172 .alg = "authenc(hmac(sha256),ctr(aes))",
2173 .test = alg_test_null,
2174 .fips_allowed = 1,
2175 }, {
2176 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2177 .test = alg_test_null,
2178 .fips_allowed = 1,
2179 }, {
2180 .alg = "authenc(hmac(sha384),cbc(des))",
2181 .test = alg_test_aead,
2182 .suite = {
2183 .aead = {
2184 .enc = {
2185 .vecs =
2186 hmac_sha384_des_cbc_enc_tv_temp,
2187 .count =
2188 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2189 }
2190 }
2191 }
2192 }, {
2193 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2194 .test = alg_test_aead,
2195 .fips_allowed = 1,
2196 .suite = {
2197 .aead = {
2198 .enc = {
2199 .vecs =
2200 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2201 .count =
2202 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2203 }
2204 }
2205 }
2206 }, {
2207 .alg = "authenc(hmac(sha384),ctr(aes))",
2208 .test = alg_test_null,
2209 .fips_allowed = 1,
2210 }, {
2211 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2212 .test = alg_test_null,
2213 .fips_allowed = 1,
2214 }, {
2215 .alg = "authenc(hmac(sha512),cbc(aes))",
2216 .fips_allowed = 1,
2217 .test = alg_test_aead,
2218 .suite = {
2219 .aead = {
2220 .enc = {
2221 .vecs =
2222 hmac_sha512_aes_cbc_enc_tv_temp,
2223 .count =
2224 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2225 }
2226 }
2227 }
2228 }, {
2229 .alg = "authenc(hmac(sha512),cbc(des))",
2230 .test = alg_test_aead,
2231 .suite = {
2232 .aead = {
2233 .enc = {
2234 .vecs =
2235 hmac_sha512_des_cbc_enc_tv_temp,
2236 .count =
2237 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2238 }
2239 }
2240 }
2241 }, {
2242 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2243 .test = alg_test_aead,
2244 .fips_allowed = 1,
2245 .suite = {
2246 .aead = {
2247 .enc = {
2248 .vecs =
2249 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2250 .count =
2251 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2252 }
2253 }
2254 }
2255 }, {
2256 .alg = "authenc(hmac(sha512),ctr(aes))",
2257 .test = alg_test_null,
2258 .fips_allowed = 1,
2259 }, {
2260 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2261 .test = alg_test_null,
2262 .fips_allowed = 1,
2263 }, {
2264 .alg = "cbc(aes)",
2265 .test = alg_test_skcipher,
2266 .fips_allowed = 1,
2267 .suite = {
2268 .cipher = {
2269 .enc = {
2270 .vecs = aes_cbc_enc_tv_template,
2271 .count = AES_CBC_ENC_TEST_VECTORS
2272 },
2273 .dec = {
2274 .vecs = aes_cbc_dec_tv_template,
2275 .count = AES_CBC_DEC_TEST_VECTORS
2276 }
2277 }
2278 }
2279 }, {
2280 .alg = "cbc(anubis)",
2281 .test = alg_test_skcipher,
2282 .suite = {
2283 .cipher = {
2284 .enc = {
2285 .vecs = anubis_cbc_enc_tv_template,
2286 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2287 },
2288 .dec = {
2289 .vecs = anubis_cbc_dec_tv_template,
2290 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2291 }
2292 }
2293 }
2294 }, {
2295 .alg = "cbc(blowfish)",
2296 .test = alg_test_skcipher,
2297 .suite = {
2298 .cipher = {
2299 .enc = {
2300 .vecs = bf_cbc_enc_tv_template,
2301 .count = BF_CBC_ENC_TEST_VECTORS
2302 },
2303 .dec = {
2304 .vecs = bf_cbc_dec_tv_template,
2305 .count = BF_CBC_DEC_TEST_VECTORS
2306 }
2307 }
2308 }
2309 }, {
2310 .alg = "cbc(camellia)",
2311 .test = alg_test_skcipher,
2312 .suite = {
2313 .cipher = {
2314 .enc = {
2315 .vecs = camellia_cbc_enc_tv_template,
2316 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2317 },
2318 .dec = {
2319 .vecs = camellia_cbc_dec_tv_template,
2320 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2321 }
2322 }
2323 }
2324 }, {
2325 .alg = "cbc(cast5)",
2326 .test = alg_test_skcipher,
2327 .suite = {
2328 .cipher = {
2329 .enc = {
2330 .vecs = cast5_cbc_enc_tv_template,
2331 .count = CAST5_CBC_ENC_TEST_VECTORS
2332 },
2333 .dec = {
2334 .vecs = cast5_cbc_dec_tv_template,
2335 .count = CAST5_CBC_DEC_TEST_VECTORS
2336 }
2337 }
2338 }
2339 }, {
2340 .alg = "cbc(cast6)",
2341 .test = alg_test_skcipher,
2342 .suite = {
2343 .cipher = {
2344 .enc = {
2345 .vecs = cast6_cbc_enc_tv_template,
2346 .count = CAST6_CBC_ENC_TEST_VECTORS
2347 },
2348 .dec = {
2349 .vecs = cast6_cbc_dec_tv_template,
2350 .count = CAST6_CBC_DEC_TEST_VECTORS
2351 }
2352 }
2353 }
2354 }, {
2355 .alg = "cbc(des)",
2356 .test = alg_test_skcipher,
2357 .suite = {
2358 .cipher = {
2359 .enc = {
2360 .vecs = des_cbc_enc_tv_template,
2361 .count = DES_CBC_ENC_TEST_VECTORS
2362 },
2363 .dec = {
2364 .vecs = des_cbc_dec_tv_template,
2365 .count = DES_CBC_DEC_TEST_VECTORS
2366 }
2367 }
2368 }
2369 }, {
2370 .alg = "cbc(des3_ede)",
2371 .test = alg_test_skcipher,
2372 .fips_allowed = 1,
2373 .suite = {
2374 .cipher = {
2375 .enc = {
2376 .vecs = des3_ede_cbc_enc_tv_template,
2377 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2378 },
2379 .dec = {
2380 .vecs = des3_ede_cbc_dec_tv_template,
2381 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2382 }
2383 }
2384 }
2385 }, {
2386 .alg = "cbc(serpent)",
2387 .test = alg_test_skcipher,
2388 .suite = {
2389 .cipher = {
2390 .enc = {
2391 .vecs = serpent_cbc_enc_tv_template,
2392 .count = SERPENT_CBC_ENC_TEST_VECTORS
2393 },
2394 .dec = {
2395 .vecs = serpent_cbc_dec_tv_template,
2396 .count = SERPENT_CBC_DEC_TEST_VECTORS
2397 }
2398 }
2399 }
2400 }, {
2401 .alg = "cbc(twofish)",
2402 .test = alg_test_skcipher,
2403 .suite = {
2404 .cipher = {
2405 .enc = {
2406 .vecs = tf_cbc_enc_tv_template,
2407 .count = TF_CBC_ENC_TEST_VECTORS
2408 },
2409 .dec = {
2410 .vecs = tf_cbc_dec_tv_template,
2411 .count = TF_CBC_DEC_TEST_VECTORS
2412 }
2413 }
2414 }
2415 }, {
2416 .alg = "ccm(aes)",
2417 .test = alg_test_aead,
2418 .fips_allowed = 1,
2419 .suite = {
2420 .aead = {
2421 .enc = {
2422 .vecs = aes_ccm_enc_tv_template,
2423 .count = AES_CCM_ENC_TEST_VECTORS
2424 },
2425 .dec = {
2426 .vecs = aes_ccm_dec_tv_template,
2427 .count = AES_CCM_DEC_TEST_VECTORS
2428 }
2429 }
2430 }
2431 }, {
2432 .alg = "chacha20",
2433 .test = alg_test_skcipher,
2434 .suite = {
2435 .cipher = {
2436 .enc = {
2437 .vecs = chacha20_enc_tv_template,
2438 .count = CHACHA20_ENC_TEST_VECTORS
2439 },
2440 .dec = {
2441 .vecs = chacha20_enc_tv_template,
2442 .count = CHACHA20_ENC_TEST_VECTORS
2443 },
2444 }
2445 }
2446 }, {
2447 .alg = "cmac(aes)",
2448 .fips_allowed = 1,
2449 .test = alg_test_hash,
2450 .suite = {
2451 .hash = {
2452 .vecs = aes_cmac128_tv_template,
2453 .count = CMAC_AES_TEST_VECTORS
2454 }
2455 }
2456 }, {
2457 .alg = "cmac(des3_ede)",
2458 .fips_allowed = 1,
2459 .test = alg_test_hash,
2460 .suite = {
2461 .hash = {
2462 .vecs = des3_ede_cmac64_tv_template,
2463 .count = CMAC_DES3_EDE_TEST_VECTORS
2464 }
2465 }
2466 }, {
2467 .alg = "compress_null",
2468 .test = alg_test_null,
2469 }, {
2470 .alg = "crc32",
2471 .test = alg_test_hash,
2472 .suite = {
2473 .hash = {
2474 .vecs = crc32_tv_template,
2475 .count = CRC32_TEST_VECTORS
2476 }
2477 }
2478 }, {
2479 .alg = "crc32c",
2480 .test = alg_test_crc32c,
2481 .fips_allowed = 1,
2482 .suite = {
2483 .hash = {
2484 .vecs = crc32c_tv_template,
2485 .count = CRC32C_TEST_VECTORS
2486 }
2487 }
2488 }, {
2489 .alg = "crct10dif",
2490 .test = alg_test_hash,
2491 .fips_allowed = 1,
2492 .suite = {
2493 .hash = {
2494 .vecs = crct10dif_tv_template,
2495 .count = CRCT10DIF_TEST_VECTORS
2496 }
2497 }
2498 }, {
2499 .alg = "cryptd(__driver-cbc-aes-aesni)",
2500 .test = alg_test_null,
2501 .fips_allowed = 1,
2502 }, {
2503 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2504 .test = alg_test_null,
2505 }, {
2506 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2507 .test = alg_test_null,
2508 }, {
2509 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2510 .test = alg_test_null,
2511 }, {
2512 .alg = "cryptd(__driver-ecb-aes-aesni)",
2513 .test = alg_test_null,
2514 .fips_allowed = 1,
2515 }, {
2516 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2517 .test = alg_test_null,
2518 }, {
2519 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2520 .test = alg_test_null,
2521 }, {
2522 .alg = "cryptd(__driver-ecb-cast5-avx)",
2523 .test = alg_test_null,
2524 }, {
2525 .alg = "cryptd(__driver-ecb-cast6-avx)",
2526 .test = alg_test_null,
2527 }, {
2528 .alg = "cryptd(__driver-ecb-serpent-avx)",
2529 .test = alg_test_null,
2530 }, {
2531 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2532 .test = alg_test_null,
2533 }, {
2534 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2535 .test = alg_test_null,
2536 }, {
2537 .alg = "cryptd(__driver-ecb-twofish-avx)",
2538 .test = alg_test_null,
2539 }, {
2540 .alg = "cryptd(__driver-gcm-aes-aesni)",
2541 .test = alg_test_null,
2542 .fips_allowed = 1,
2543 }, {
2544 .alg = "cryptd(__ghash-pclmulqdqni)",
2545 .test = alg_test_null,
2546 .fips_allowed = 1,
2547 }, {
2548 .alg = "ctr(aes)",
2549 .test = alg_test_skcipher,
2550 .fips_allowed = 1,
2551 .suite = {
2552 .cipher = {
2553 .enc = {
2554 .vecs = aes_ctr_enc_tv_template,
2555 .count = AES_CTR_ENC_TEST_VECTORS
2556 },
2557 .dec = {
2558 .vecs = aes_ctr_dec_tv_template,
2559 .count = AES_CTR_DEC_TEST_VECTORS
2560 }
2561 }
2562 }
2563 }, {
2564 .alg = "ctr(blowfish)",
2565 .test = alg_test_skcipher,
2566 .suite = {
2567 .cipher = {
2568 .enc = {
2569 .vecs = bf_ctr_enc_tv_template,
2570 .count = BF_CTR_ENC_TEST_VECTORS
2571 },
2572 .dec = {
2573 .vecs = bf_ctr_dec_tv_template,
2574 .count = BF_CTR_DEC_TEST_VECTORS
2575 }
2576 }
2577 }
2578 }, {
2579 .alg = "ctr(camellia)",
2580 .test = alg_test_skcipher,
2581 .suite = {
2582 .cipher = {
2583 .enc = {
2584 .vecs = camellia_ctr_enc_tv_template,
2585 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2586 },
2587 .dec = {
2588 .vecs = camellia_ctr_dec_tv_template,
2589 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2590 }
2591 }
2592 }
2593 }, {
2594 .alg = "ctr(cast5)",
2595 .test = alg_test_skcipher,
2596 .suite = {
2597 .cipher = {
2598 .enc = {
2599 .vecs = cast5_ctr_enc_tv_template,
2600 .count = CAST5_CTR_ENC_TEST_VECTORS
2601 },
2602 .dec = {
2603 .vecs = cast5_ctr_dec_tv_template,
2604 .count = CAST5_CTR_DEC_TEST_VECTORS
2605 }
2606 }
2607 }
2608 }, {
2609 .alg = "ctr(cast6)",
2610 .test = alg_test_skcipher,
2611 .suite = {
2612 .cipher = {
2613 .enc = {
2614 .vecs = cast6_ctr_enc_tv_template,
2615 .count = CAST6_CTR_ENC_TEST_VECTORS
2616 },
2617 .dec = {
2618 .vecs = cast6_ctr_dec_tv_template,
2619 .count = CAST6_CTR_DEC_TEST_VECTORS
2620 }
2621 }
2622 }
2623 }, {
2624 .alg = "ctr(des)",
2625 .test = alg_test_skcipher,
2626 .suite = {
2627 .cipher = {
2628 .enc = {
2629 .vecs = des_ctr_enc_tv_template,
2630 .count = DES_CTR_ENC_TEST_VECTORS
2631 },
2632 .dec = {
2633 .vecs = des_ctr_dec_tv_template,
2634 .count = DES_CTR_DEC_TEST_VECTORS
2635 }
2636 }
2637 }
2638 }, {
2639 .alg = "ctr(des3_ede)",
2640 .test = alg_test_skcipher,
2641 .suite = {
2642 .cipher = {
2643 .enc = {
2644 .vecs = des3_ede_ctr_enc_tv_template,
2645 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2646 },
2647 .dec = {
2648 .vecs = des3_ede_ctr_dec_tv_template,
2649 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2650 }
2651 }
2652 }
2653 }, {
2654 .alg = "ctr(serpent)",
2655 .test = alg_test_skcipher,
2656 .suite = {
2657 .cipher = {
2658 .enc = {
2659 .vecs = serpent_ctr_enc_tv_template,
2660 .count = SERPENT_CTR_ENC_TEST_VECTORS
2661 },
2662 .dec = {
2663 .vecs = serpent_ctr_dec_tv_template,
2664 .count = SERPENT_CTR_DEC_TEST_VECTORS
2665 }
2666 }
2667 }
2668 }, {
2669 .alg = "ctr(twofish)",
2670 .test = alg_test_skcipher,
2671 .suite = {
2672 .cipher = {
2673 .enc = {
2674 .vecs = tf_ctr_enc_tv_template,
2675 .count = TF_CTR_ENC_TEST_VECTORS
2676 },
2677 .dec = {
2678 .vecs = tf_ctr_dec_tv_template,
2679 .count = TF_CTR_DEC_TEST_VECTORS
2680 }
2681 }
2682 }
2683 }, {
2684 .alg = "cts(cbc(aes))",
2685 .test = alg_test_skcipher,
2686 .suite = {
2687 .cipher = {
2688 .enc = {
2689 .vecs = cts_mode_enc_tv_template,
2690 .count = CTS_MODE_ENC_TEST_VECTORS
2691 },
2692 .dec = {
2693 .vecs = cts_mode_dec_tv_template,
2694 .count = CTS_MODE_DEC_TEST_VECTORS
2695 }
2696 }
2697 }
2698 }, {
2699 .alg = "deflate",
2700 .test = alg_test_comp,
2701 .fips_allowed = 1,
2702 .suite = {
2703 .comp = {
2704 .comp = {
2705 .vecs = deflate_comp_tv_template,
2706 .count = DEFLATE_COMP_TEST_VECTORS
2707 },
2708 .decomp = {
2709 .vecs = deflate_decomp_tv_template,
2710 .count = DEFLATE_DECOMP_TEST_VECTORS
2711 }
2712 }
2713 }
2714 }, {
2715 .alg = "digest_null",
2716 .test = alg_test_null,
2717 }, {
2718 .alg = "drbg_nopr_ctr_aes128",
2719 .test = alg_test_drbg,
2720 .fips_allowed = 1,
2721 .suite = {
2722 .drbg = {
2723 .vecs = drbg_nopr_ctr_aes128_tv_template,
2724 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2725 }
2726 }
2727 }, {
2728 .alg = "drbg_nopr_ctr_aes192",
2729 .test = alg_test_drbg,
2730 .fips_allowed = 1,
2731 .suite = {
2732 .drbg = {
2733 .vecs = drbg_nopr_ctr_aes192_tv_template,
2734 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2735 }
2736 }
2737 }, {
2738 .alg = "drbg_nopr_ctr_aes256",
2739 .test = alg_test_drbg,
2740 .fips_allowed = 1,
2741 .suite = {
2742 .drbg = {
2743 .vecs = drbg_nopr_ctr_aes256_tv_template,
2744 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2745 }
2746 }
2747 }, {
2748 /*
2749 * There is no need to specifically test the DRBG with every
2750 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2751 */
2752 .alg = "drbg_nopr_hmac_sha1",
2753 .fips_allowed = 1,
2754 .test = alg_test_null,
2755 }, {
2756 .alg = "drbg_nopr_hmac_sha256",
2757 .test = alg_test_drbg,
2758 .fips_allowed = 1,
2759 .suite = {
2760 .drbg = {
2761 .vecs = drbg_nopr_hmac_sha256_tv_template,
2762 .count =
2763 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2764 }
2765 }
2766 }, {
2767 /* covered by drbg_nopr_hmac_sha256 test */
2768 .alg = "drbg_nopr_hmac_sha384",
2769 .fips_allowed = 1,
2770 .test = alg_test_null,
2771 }, {
2772 .alg = "drbg_nopr_hmac_sha512",
2773 .test = alg_test_null,
2774 .fips_allowed = 1,
2775 }, {
2776 .alg = "drbg_nopr_sha1",
2777 .fips_allowed = 1,
2778 .test = alg_test_null,
2779 }, {
2780 .alg = "drbg_nopr_sha256",
2781 .test = alg_test_drbg,
2782 .fips_allowed = 1,
2783 .suite = {
2784 .drbg = {
2785 .vecs = drbg_nopr_sha256_tv_template,
2786 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2787 }
2788 }
2789 }, {
2790 /* covered by drbg_nopr_sha256 test */
2791 .alg = "drbg_nopr_sha384",
2792 .fips_allowed = 1,
2793 .test = alg_test_null,
2794 }, {
2795 .alg = "drbg_nopr_sha512",
2796 .fips_allowed = 1,
2797 .test = alg_test_null,
2798 }, {
2799 .alg = "drbg_pr_ctr_aes128",
2800 .test = alg_test_drbg,
2801 .fips_allowed = 1,
2802 .suite = {
2803 .drbg = {
2804 .vecs = drbg_pr_ctr_aes128_tv_template,
2805 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2806 }
2807 }
2808 }, {
2809 /* covered by drbg_pr_ctr_aes128 test */
2810 .alg = "drbg_pr_ctr_aes192",
2811 .fips_allowed = 1,
2812 .test = alg_test_null,
2813 }, {
2814 .alg = "drbg_pr_ctr_aes256",
2815 .fips_allowed = 1,
2816 .test = alg_test_null,
2817 }, {
2818 .alg = "drbg_pr_hmac_sha1",
2819 .fips_allowed = 1,
2820 .test = alg_test_null,
2821 }, {
2822 .alg = "drbg_pr_hmac_sha256",
2823 .test = alg_test_drbg,
2824 .fips_allowed = 1,
2825 .suite = {
2826 .drbg = {
2827 .vecs = drbg_pr_hmac_sha256_tv_template,
2828 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2829 }
2830 }
2831 }, {
2832 /* covered by drbg_pr_hmac_sha256 test */
2833 .alg = "drbg_pr_hmac_sha384",
2834 .fips_allowed = 1,
2835 .test = alg_test_null,
2836 }, {
2837 .alg = "drbg_pr_hmac_sha512",
2838 .test = alg_test_null,
2839 .fips_allowed = 1,
2840 }, {
2841 .alg = "drbg_pr_sha1",
2842 .fips_allowed = 1,
2843 .test = alg_test_null,
2844 }, {
2845 .alg = "drbg_pr_sha256",
2846 .test = alg_test_drbg,
2847 .fips_allowed = 1,
2848 .suite = {
2849 .drbg = {
2850 .vecs = drbg_pr_sha256_tv_template,
2851 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2852 }
2853 }
2854 }, {
2855 /* covered by drbg_pr_sha256 test */
2856 .alg = "drbg_pr_sha384",
2857 .fips_allowed = 1,
2858 .test = alg_test_null,
2859 }, {
2860 .alg = "drbg_pr_sha512",
2861 .fips_allowed = 1,
2862 .test = alg_test_null,
2863 }, {
2864 .alg = "ecb(__aes-aesni)",
2865 .test = alg_test_null,
2866 .fips_allowed = 1,
2867 }, {
2868 .alg = "ecb(aes)",
2869 .test = alg_test_skcipher,
2870 .fips_allowed = 1,
2871 .suite = {
2872 .cipher = {
2873 .enc = {
2874 .vecs = aes_enc_tv_template,
2875 .count = AES_ENC_TEST_VECTORS
2876 },
2877 .dec = {
2878 .vecs = aes_dec_tv_template,
2879 .count = AES_DEC_TEST_VECTORS
2880 }
2881 }
2882 }
2883 }, {
2884 .alg = "ecb(anubis)",
2885 .test = alg_test_skcipher,
2886 .suite = {
2887 .cipher = {
2888 .enc = {
2889 .vecs = anubis_enc_tv_template,
2890 .count = ANUBIS_ENC_TEST_VECTORS
2891 },
2892 .dec = {
2893 .vecs = anubis_dec_tv_template,
2894 .count = ANUBIS_DEC_TEST_VECTORS
2895 }
2896 }
2897 }
2898 }, {
2899 .alg = "ecb(arc4)",
2900 .test = alg_test_skcipher,
2901 .suite = {
2902 .cipher = {
2903 .enc = {
2904 .vecs = arc4_enc_tv_template,
2905 .count = ARC4_ENC_TEST_VECTORS
2906 },
2907 .dec = {
2908 .vecs = arc4_dec_tv_template,
2909 .count = ARC4_DEC_TEST_VECTORS
2910 }
2911 }
2912 }
2913 }, {
2914 .alg = "ecb(blowfish)",
2915 .test = alg_test_skcipher,
2916 .suite = {
2917 .cipher = {
2918 .enc = {
2919 .vecs = bf_enc_tv_template,
2920 .count = BF_ENC_TEST_VECTORS
2921 },
2922 .dec = {
2923 .vecs = bf_dec_tv_template,
2924 .count = BF_DEC_TEST_VECTORS
2925 }
2926 }
2927 }
2928 }, {
2929 .alg = "ecb(camellia)",
2930 .test = alg_test_skcipher,
2931 .suite = {
2932 .cipher = {
2933 .enc = {
2934 .vecs = camellia_enc_tv_template,
2935 .count = CAMELLIA_ENC_TEST_VECTORS
2936 },
2937 .dec = {
2938 .vecs = camellia_dec_tv_template,
2939 .count = CAMELLIA_DEC_TEST_VECTORS
2940 }
2941 }
2942 }
2943 }, {
2944 .alg = "ecb(cast5)",
2945 .test = alg_test_skcipher,
2946 .suite = {
2947 .cipher = {
2948 .enc = {
2949 .vecs = cast5_enc_tv_template,
2950 .count = CAST5_ENC_TEST_VECTORS
2951 },
2952 .dec = {
2953 .vecs = cast5_dec_tv_template,
2954 .count = CAST5_DEC_TEST_VECTORS
2955 }
2956 }
2957 }
2958 }, {
2959 .alg = "ecb(cast6)",
2960 .test = alg_test_skcipher,
2961 .suite = {
2962 .cipher = {
2963 .enc = {
2964 .vecs = cast6_enc_tv_template,
2965 .count = CAST6_ENC_TEST_VECTORS
2966 },
2967 .dec = {
2968 .vecs = cast6_dec_tv_template,
2969 .count = CAST6_DEC_TEST_VECTORS
2970 }
2971 }
2972 }
2973 }, {
2974 .alg = "ecb(cipher_null)",
2975 .test = alg_test_null,
2976 }, {
2977 .alg = "ecb(des)",
2978 .test = alg_test_skcipher,
2979 .suite = {
2980 .cipher = {
2981 .enc = {
2982 .vecs = des_enc_tv_template,
2983 .count = DES_ENC_TEST_VECTORS
2984 },
2985 .dec = {
2986 .vecs = des_dec_tv_template,
2987 .count = DES_DEC_TEST_VECTORS
2988 }
2989 }
2990 }
2991 }, {
2992 .alg = "ecb(des3_ede)",
2993 .test = alg_test_skcipher,
2994 .fips_allowed = 1,
2995 .suite = {
2996 .cipher = {
2997 .enc = {
2998 .vecs = des3_ede_enc_tv_template,
2999 .count = DES3_EDE_ENC_TEST_VECTORS
3000 },
3001 .dec = {
3002 .vecs = des3_ede_dec_tv_template,
3003 .count = DES3_EDE_DEC_TEST_VECTORS
3004 }
3005 }
3006 }
3007 }, {
3008 .alg = "ecb(fcrypt)",
3009 .test = alg_test_skcipher,
3010 .suite = {
3011 .cipher = {
3012 .enc = {
3013 .vecs = fcrypt_pcbc_enc_tv_template,
3014 .count = 1
3015 },
3016 .dec = {
3017 .vecs = fcrypt_pcbc_dec_tv_template,
3018 .count = 1
3019 }
3020 }
3021 }
3022 }, {
3023 .alg = "ecb(khazad)",
3024 .test = alg_test_skcipher,
3025 .suite = {
3026 .cipher = {
3027 .enc = {
3028 .vecs = khazad_enc_tv_template,
3029 .count = KHAZAD_ENC_TEST_VECTORS
3030 },
3031 .dec = {
3032 .vecs = khazad_dec_tv_template,
3033 .count = KHAZAD_DEC_TEST_VECTORS
3034 }
3035 }
3036 }
3037 }, {
3038 .alg = "ecb(seed)",
3039 .test = alg_test_skcipher,
3040 .suite = {
3041 .cipher = {
3042 .enc = {
3043 .vecs = seed_enc_tv_template,
3044 .count = SEED_ENC_TEST_VECTORS
3045 },
3046 .dec = {
3047 .vecs = seed_dec_tv_template,
3048 .count = SEED_DEC_TEST_VECTORS
3049 }
3050 }
3051 }
3052 }, {
3053 .alg = "ecb(serpent)",
3054 .test = alg_test_skcipher,
3055 .suite = {
3056 .cipher = {
3057 .enc = {
3058 .vecs = serpent_enc_tv_template,
3059 .count = SERPENT_ENC_TEST_VECTORS
3060 },
3061 .dec = {
3062 .vecs = serpent_dec_tv_template,
3063 .count = SERPENT_DEC_TEST_VECTORS
3064 }
3065 }
3066 }
3067 }, {
3068 .alg = "ecb(tea)",
3069 .test = alg_test_skcipher,
3070 .suite = {
3071 .cipher = {
3072 .enc = {
3073 .vecs = tea_enc_tv_template,
3074 .count = TEA_ENC_TEST_VECTORS
3075 },
3076 .dec = {
3077 .vecs = tea_dec_tv_template,
3078 .count = TEA_DEC_TEST_VECTORS
3079 }
3080 }
3081 }
3082 }, {
3083 .alg = "ecb(tnepres)",
3084 .test = alg_test_skcipher,
3085 .suite = {
3086 .cipher = {
3087 .enc = {
3088 .vecs = tnepres_enc_tv_template,
3089 .count = TNEPRES_ENC_TEST_VECTORS
3090 },
3091 .dec = {
3092 .vecs = tnepres_dec_tv_template,
3093 .count = TNEPRES_DEC_TEST_VECTORS
3094 }
3095 }
3096 }
3097 }, {
3098 .alg = "ecb(twofish)",
3099 .test = alg_test_skcipher,
3100 .suite = {
3101 .cipher = {
3102 .enc = {
3103 .vecs = tf_enc_tv_template,
3104 .count = TF_ENC_TEST_VECTORS
3105 },
3106 .dec = {
3107 .vecs = tf_dec_tv_template,
3108 .count = TF_DEC_TEST_VECTORS
3109 }
3110 }
3111 }
3112 }, {
3113 .alg = "ecb(xeta)",
3114 .test = alg_test_skcipher,
3115 .suite = {
3116 .cipher = {
3117 .enc = {
3118 .vecs = xeta_enc_tv_template,
3119 .count = XETA_ENC_TEST_VECTORS
3120 },
3121 .dec = {
3122 .vecs = xeta_dec_tv_template,
3123 .count = XETA_DEC_TEST_VECTORS
3124 }
3125 }
3126 }
3127 }, {
3128 .alg = "ecb(xtea)",
3129 .test = alg_test_skcipher,
3130 .suite = {
3131 .cipher = {
3132 .enc = {
3133 .vecs = xtea_enc_tv_template,
3134 .count = XTEA_ENC_TEST_VECTORS
3135 },
3136 .dec = {
3137 .vecs = xtea_dec_tv_template,
3138 .count = XTEA_DEC_TEST_VECTORS
3139 }
3140 }
3141 }
3142 }, {
3143 .alg = "gcm(aes)",
3144 .test = alg_test_aead,
3145 .fips_allowed = 1,
3146 .suite = {
3147 .aead = {
3148 .enc = {
3149 .vecs = aes_gcm_enc_tv_template,
3150 .count = AES_GCM_ENC_TEST_VECTORS
3151 },
3152 .dec = {
3153 .vecs = aes_gcm_dec_tv_template,
3154 .count = AES_GCM_DEC_TEST_VECTORS
3155 }
3156 }
3157 }
3158 }, {
3159 .alg = "ghash",
3160 .test = alg_test_hash,
3161 .fips_allowed = 1,
3162 .suite = {
3163 .hash = {
3164 .vecs = ghash_tv_template,
3165 .count = GHASH_TEST_VECTORS
3166 }
3167 }
3168 }, {
3169 .alg = "hmac(crc32)",
3170 .test = alg_test_hash,
3171 .suite = {
3172 .hash = {
3173 .vecs = bfin_crc_tv_template,
3174 .count = BFIN_CRC_TEST_VECTORS
3175 }
3176 }
3177 }, {
3178 .alg = "hmac(md5)",
3179 .test = alg_test_hash,
3180 .suite = {
3181 .hash = {
3182 .vecs = hmac_md5_tv_template,
3183 .count = HMAC_MD5_TEST_VECTORS
3184 }
3185 }
3186 }, {
3187 .alg = "hmac(rmd128)",
3188 .test = alg_test_hash,
3189 .suite = {
3190 .hash = {
3191 .vecs = hmac_rmd128_tv_template,
3192 .count = HMAC_RMD128_TEST_VECTORS
3193 }
3194 }
3195 }, {
3196 .alg = "hmac(rmd160)",
3197 .test = alg_test_hash,
3198 .suite = {
3199 .hash = {
3200 .vecs = hmac_rmd160_tv_template,
3201 .count = HMAC_RMD160_TEST_VECTORS
3202 }
3203 }
3204 }, {
3205 .alg = "hmac(sha1)",
3206 .test = alg_test_hash,
3207 .fips_allowed = 1,
3208 .suite = {
3209 .hash = {
3210 .vecs = hmac_sha1_tv_template,
3211 .count = HMAC_SHA1_TEST_VECTORS
3212 }
3213 }
3214 }, {
3215 .alg = "hmac(sha224)",
3216 .test = alg_test_hash,
3217 .fips_allowed = 1,
3218 .suite = {
3219 .hash = {
3220 .vecs = hmac_sha224_tv_template,
3221 .count = HMAC_SHA224_TEST_VECTORS
3222 }
3223 }
3224 }, {
3225 .alg = "hmac(sha256)",
3226 .test = alg_test_hash,
3227 .fips_allowed = 1,
3228 .suite = {
3229 .hash = {
3230 .vecs = hmac_sha256_tv_template,
3231 .count = HMAC_SHA256_TEST_VECTORS
3232 }
3233 }
3234 }, {
3235 .alg = "hmac(sha384)",
3236 .test = alg_test_hash,
3237 .fips_allowed = 1,
3238 .suite = {
3239 .hash = {
3240 .vecs = hmac_sha384_tv_template,
3241 .count = HMAC_SHA384_TEST_VECTORS
3242 }
3243 }
3244 }, {
3245 .alg = "hmac(sha512)",
3246 .test = alg_test_hash,
3247 .fips_allowed = 1,
3248 .suite = {
3249 .hash = {
3250 .vecs = hmac_sha512_tv_template,
3251 .count = HMAC_SHA512_TEST_VECTORS
3252 }
3253 }
3254 }, {
3255 .alg = "jitterentropy_rng",
3256 .fips_allowed = 1,
3257 .test = alg_test_null,
3258 }, {
3259 .alg = "kw(aes)",
3260 .test = alg_test_skcipher,
3261 .fips_allowed = 1,
3262 .suite = {
3263 .cipher = {
3264 .enc = {
3265 .vecs = aes_kw_enc_tv_template,
3266 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3267 },
3268 .dec = {
3269 .vecs = aes_kw_dec_tv_template,
3270 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3271 }
3272 }
3273 }
3274 }, {
3275 .alg = "lrw(aes)",
3276 .test = alg_test_skcipher,
3277 .suite = {
3278 .cipher = {
3279 .enc = {
3280 .vecs = aes_lrw_enc_tv_template,
3281 .count = AES_LRW_ENC_TEST_VECTORS
3282 },
3283 .dec = {
3284 .vecs = aes_lrw_dec_tv_template,
3285 .count = AES_LRW_DEC_TEST_VECTORS
3286 }
3287 }
3288 }
3289 }, {
3290 .alg = "lrw(camellia)",
3291 .test = alg_test_skcipher,
3292 .suite = {
3293 .cipher = {
3294 .enc = {
3295 .vecs = camellia_lrw_enc_tv_template,
3296 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3297 },
3298 .dec = {
3299 .vecs = camellia_lrw_dec_tv_template,
3300 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3301 }
3302 }
3303 }
3304 }, {
3305 .alg = "lrw(cast6)",
3306 .test = alg_test_skcipher,
3307 .suite = {
3308 .cipher = {
3309 .enc = {
3310 .vecs = cast6_lrw_enc_tv_template,
3311 .count = CAST6_LRW_ENC_TEST_VECTORS
3312 },
3313 .dec = {
3314 .vecs = cast6_lrw_dec_tv_template,
3315 .count = CAST6_LRW_DEC_TEST_VECTORS
3316 }
3317 }
3318 }
3319 }, {
3320 .alg = "lrw(serpent)",
3321 .test = alg_test_skcipher,
3322 .suite = {
3323 .cipher = {
3324 .enc = {
3325 .vecs = serpent_lrw_enc_tv_template,
3326 .count = SERPENT_LRW_ENC_TEST_VECTORS
3327 },
3328 .dec = {
3329 .vecs = serpent_lrw_dec_tv_template,
3330 .count = SERPENT_LRW_DEC_TEST_VECTORS
3331 }
3332 }
3333 }
3334 }, {
3335 .alg = "lrw(twofish)",
3336 .test = alg_test_skcipher,
3337 .suite = {
3338 .cipher = {
3339 .enc = {
3340 .vecs = tf_lrw_enc_tv_template,
3341 .count = TF_LRW_ENC_TEST_VECTORS
3342 },
3343 .dec = {
3344 .vecs = tf_lrw_dec_tv_template,
3345 .count = TF_LRW_DEC_TEST_VECTORS
3346 }
3347 }
3348 }
3349 }, {
3350 .alg = "lz4",
3351 .test = alg_test_comp,
3352 .fips_allowed = 1,
3353 .suite = {
3354 .comp = {
3355 .comp = {
3356 .vecs = lz4_comp_tv_template,
3357 .count = LZ4_COMP_TEST_VECTORS
3358 },
3359 .decomp = {
3360 .vecs = lz4_decomp_tv_template,
3361 .count = LZ4_DECOMP_TEST_VECTORS
3362 }
3363 }
3364 }
3365 }, {
3366 .alg = "lz4hc",
3367 .test = alg_test_comp,
3368 .fips_allowed = 1,
3369 .suite = {
3370 .comp = {
3371 .comp = {
3372 .vecs = lz4hc_comp_tv_template,
3373 .count = LZ4HC_COMP_TEST_VECTORS
3374 },
3375 .decomp = {
3376 .vecs = lz4hc_decomp_tv_template,
3377 .count = LZ4HC_DECOMP_TEST_VECTORS
3378 }
3379 }
3380 }
3381 }, {
3382 .alg = "lzo",
3383 .test = alg_test_comp,
3384 .fips_allowed = 1,
3385 .suite = {
3386 .comp = {
3387 .comp = {
3388 .vecs = lzo_comp_tv_template,
3389 .count = LZO_COMP_TEST_VECTORS
3390 },
3391 .decomp = {
3392 .vecs = lzo_decomp_tv_template,
3393 .count = LZO_DECOMP_TEST_VECTORS
3394 }
3395 }
3396 }
3397 }, {
3398 .alg = "md4",
3399 .test = alg_test_hash,
3400 .suite = {
3401 .hash = {
3402 .vecs = md4_tv_template,
3403 .count = MD4_TEST_VECTORS
3404 }
3405 }
3406 }, {
3407 .alg = "md5",
3408 .test = alg_test_hash,
3409 .suite = {
3410 .hash = {
3411 .vecs = md5_tv_template,
3412 .count = MD5_TEST_VECTORS
3413 }
3414 }
3415 }, {
3416 .alg = "michael_mic",
3417 .test = alg_test_hash,
3418 .suite = {
3419 .hash = {
3420 .vecs = michael_mic_tv_template,
3421 .count = MICHAEL_MIC_TEST_VECTORS
3422 }
3423 }
3424 }, {
3425 .alg = "ofb(aes)",
3426 .test = alg_test_skcipher,
3427 .fips_allowed = 1,
3428 .suite = {
3429 .cipher = {
3430 .enc = {
3431 .vecs = aes_ofb_enc_tv_template,
3432 .count = AES_OFB_ENC_TEST_VECTORS
3433 },
3434 .dec = {
3435 .vecs = aes_ofb_dec_tv_template,
3436 .count = AES_OFB_DEC_TEST_VECTORS
3437 }
3438 }
3439 }
3440 }, {
3441 .alg = "pcbc(fcrypt)",
3442 .test = alg_test_skcipher,
3443 .suite = {
3444 .cipher = {
3445 .enc = {
3446 .vecs = fcrypt_pcbc_enc_tv_template,
3447 .count = FCRYPT_ENC_TEST_VECTORS
3448 },
3449 .dec = {
3450 .vecs = fcrypt_pcbc_dec_tv_template,
3451 .count = FCRYPT_DEC_TEST_VECTORS
3452 }
3453 }
3454 }
3455 }, {
3456 .alg = "poly1305",
3457 .test = alg_test_hash,
3458 .suite = {
3459 .hash = {
3460 .vecs = poly1305_tv_template,
3461 .count = POLY1305_TEST_VECTORS
3462 }
3463 }
3464 }, {
3465 .alg = "rfc3686(ctr(aes))",
3466 .test = alg_test_skcipher,
3467 .fips_allowed = 1,
3468 .suite = {
3469 .cipher = {
3470 .enc = {
3471 .vecs = aes_ctr_rfc3686_enc_tv_template,
3472 .count = AES_CTR_3686_ENC_TEST_VECTORS
3473 },
3474 .dec = {
3475 .vecs = aes_ctr_rfc3686_dec_tv_template,
3476 .count = AES_CTR_3686_DEC_TEST_VECTORS
3477 }
3478 }
3479 }
3480 }, {
3481 .alg = "rfc4106(gcm(aes))",
3482 .test = alg_test_aead,
3483 .fips_allowed = 1,
3484 .suite = {
3485 .aead = {
3486 .enc = {
3487 .vecs = aes_gcm_rfc4106_enc_tv_template,
3488 .count = AES_GCM_4106_ENC_TEST_VECTORS
3489 },
3490 .dec = {
3491 .vecs = aes_gcm_rfc4106_dec_tv_template,
3492 .count = AES_GCM_4106_DEC_TEST_VECTORS
3493 }
3494 }
3495 }
3496 }, {
3497 .alg = "rfc4309(ccm(aes))",
3498 .test = alg_test_aead,
3499 .fips_allowed = 1,
3500 .suite = {
3501 .aead = {
3502 .enc = {
3503 .vecs = aes_ccm_rfc4309_enc_tv_template,
3504 .count = AES_CCM_4309_ENC_TEST_VECTORS
3505 },
3506 .dec = {
3507 .vecs = aes_ccm_rfc4309_dec_tv_template,
3508 .count = AES_CCM_4309_DEC_TEST_VECTORS
3509 }
3510 }
3511 }
3512 }, {
3513 .alg = "rfc4543(gcm(aes))",
3514 .test = alg_test_aead,
3515 .suite = {
3516 .aead = {
3517 .enc = {
3518 .vecs = aes_gcm_rfc4543_enc_tv_template,
3519 .count = AES_GCM_4543_ENC_TEST_VECTORS
3520 },
3521 .dec = {
3522 .vecs = aes_gcm_rfc4543_dec_tv_template,
3523 .count = AES_GCM_4543_DEC_TEST_VECTORS
3524 },
3525 }
3526 }
3527 }, {
3528 .alg = "rfc7539(chacha20,poly1305)",
3529 .test = alg_test_aead,
3530 .suite = {
3531 .aead = {
3532 .enc = {
3533 .vecs = rfc7539_enc_tv_template,
3534 .count = RFC7539_ENC_TEST_VECTORS
3535 },
3536 .dec = {
3537 .vecs = rfc7539_dec_tv_template,
3538 .count = RFC7539_DEC_TEST_VECTORS
3539 },
3540 }
3541 }
3542 }, {
3543 .alg = "rfc7539esp(chacha20,poly1305)",
3544 .test = alg_test_aead,
3545 .suite = {
3546 .aead = {
3547 .enc = {
3548 .vecs = rfc7539esp_enc_tv_template,
3549 .count = RFC7539ESP_ENC_TEST_VECTORS
3550 },
3551 .dec = {
3552 .vecs = rfc7539esp_dec_tv_template,
3553 .count = RFC7539ESP_DEC_TEST_VECTORS
3554 },
3555 }
3556 }
3557 }, {
3558 .alg = "rmd128",
3559 .test = alg_test_hash,
3560 .suite = {
3561 .hash = {
3562 .vecs = rmd128_tv_template,
3563 .count = RMD128_TEST_VECTORS
3564 }
3565 }
3566 }, {
3567 .alg = "rmd160",
3568 .test = alg_test_hash,
3569 .suite = {
3570 .hash = {
3571 .vecs = rmd160_tv_template,
3572 .count = RMD160_TEST_VECTORS
3573 }
3574 }
3575 }, {
3576 .alg = "rmd256",
3577 .test = alg_test_hash,
3578 .suite = {
3579 .hash = {
3580 .vecs = rmd256_tv_template,
3581 .count = RMD256_TEST_VECTORS
3582 }
3583 }
3584 }, {
3585 .alg = "rmd320",
3586 .test = alg_test_hash,
3587 .suite = {
3588 .hash = {
3589 .vecs = rmd320_tv_template,
3590 .count = RMD320_TEST_VECTORS
3591 }
3592 }
3593 }, {
3594 .alg = "rsa",
3595 .test = alg_test_akcipher,
3596 .fips_allowed = 1,
3597 .suite = {
3598 .akcipher = {
3599 .vecs = rsa_tv_template,
3600 .count = RSA_TEST_VECTORS
3601 }
3602 }
3603 }, {
3604 .alg = "salsa20",
3605 .test = alg_test_skcipher,
3606 .suite = {
3607 .cipher = {
3608 .enc = {
3609 .vecs = salsa20_stream_enc_tv_template,
3610 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3611 }
3612 }
3613 }
3614 }, {
3615 .alg = "sha1",
3616 .test = alg_test_hash,
3617 .fips_allowed = 1,
3618 .suite = {
3619 .hash = {
3620 .vecs = sha1_tv_template,
3621 .count = SHA1_TEST_VECTORS
3622 }
3623 }
3624 }, {
3625 .alg = "sha224",
3626 .test = alg_test_hash,
3627 .fips_allowed = 1,
3628 .suite = {
3629 .hash = {
3630 .vecs = sha224_tv_template,
3631 .count = SHA224_TEST_VECTORS
3632 }
3633 }
3634 }, {
3635 .alg = "sha256",
3636 .test = alg_test_hash,
3637 .fips_allowed = 1,
3638 .suite = {
3639 .hash = {
3640 .vecs = sha256_tv_template,
3641 .count = SHA256_TEST_VECTORS
3642 }
3643 }
3644 }, {
3645 .alg = "sha384",
3646 .test = alg_test_hash,
3647 .fips_allowed = 1,
3648 .suite = {
3649 .hash = {
3650 .vecs = sha384_tv_template,
3651 .count = SHA384_TEST_VECTORS
3652 }
3653 }
3654 }, {
3655 .alg = "sha512",
3656 .test = alg_test_hash,
3657 .fips_allowed = 1,
3658 .suite = {
3659 .hash = {
3660 .vecs = sha512_tv_template,
3661 .count = SHA512_TEST_VECTORS
3662 }
3663 }
3664 }, {
3665 .alg = "tgr128",
3666 .test = alg_test_hash,
3667 .suite = {
3668 .hash = {
3669 .vecs = tgr128_tv_template,
3670 .count = TGR128_TEST_VECTORS
3671 }
3672 }
3673 }, {
3674 .alg = "tgr160",
3675 .test = alg_test_hash,
3676 .suite = {
3677 .hash = {
3678 .vecs = tgr160_tv_template,
3679 .count = TGR160_TEST_VECTORS
3680 }
3681 }
3682 }, {
3683 .alg = "tgr192",
3684 .test = alg_test_hash,
3685 .suite = {
3686 .hash = {
3687 .vecs = tgr192_tv_template,
3688 .count = TGR192_TEST_VECTORS
3689 }
3690 }
3691 }, {
3692 .alg = "vmac(aes)",
3693 .test = alg_test_hash,
3694 .suite = {
3695 .hash = {
3696 .vecs = aes_vmac128_tv_template,
3697 .count = VMAC_AES_TEST_VECTORS
3698 }
3699 }
3700 }, {
3701 .alg = "wp256",
3702 .test = alg_test_hash,
3703 .suite = {
3704 .hash = {
3705 .vecs = wp256_tv_template,
3706 .count = WP256_TEST_VECTORS
3707 }
3708 }
3709 }, {
3710 .alg = "wp384",
3711 .test = alg_test_hash,
3712 .suite = {
3713 .hash = {
3714 .vecs = wp384_tv_template,
3715 .count = WP384_TEST_VECTORS
3716 }
3717 }
3718 }, {
3719 .alg = "wp512",
3720 .test = alg_test_hash,
3721 .suite = {
3722 .hash = {
3723 .vecs = wp512_tv_template,
3724 .count = WP512_TEST_VECTORS
3725 }
3726 }
3727 }, {
3728 .alg = "xcbc(aes)",
3729 .test = alg_test_hash,
3730 .suite = {
3731 .hash = {
3732 .vecs = aes_xcbc128_tv_template,
3733 .count = XCBC_AES_TEST_VECTORS
3734 }
3735 }
3736 }, {
3737 .alg = "xts(aes)",
3738 .test = alg_test_skcipher,
3739 .fips_allowed = 1,
3740 .suite = {
3741 .cipher = {
3742 .enc = {
3743 .vecs = aes_xts_enc_tv_template,
3744 .count = AES_XTS_ENC_TEST_VECTORS
3745 },
3746 .dec = {
3747 .vecs = aes_xts_dec_tv_template,
3748 .count = AES_XTS_DEC_TEST_VECTORS
3749 }
3750 }
3751 }
3752 }, {
3753 .alg = "xts(camellia)",
3754 .test = alg_test_skcipher,
3755 .suite = {
3756 .cipher = {
3757 .enc = {
3758 .vecs = camellia_xts_enc_tv_template,
3759 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3760 },
3761 .dec = {
3762 .vecs = camellia_xts_dec_tv_template,
3763 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3764 }
3765 }
3766 }
3767 }, {
3768 .alg = "xts(cast6)",
3769 .test = alg_test_skcipher,
3770 .suite = {
3771 .cipher = {
3772 .enc = {
3773 .vecs = cast6_xts_enc_tv_template,
3774 .count = CAST6_XTS_ENC_TEST_VECTORS
3775 },
3776 .dec = {
3777 .vecs = cast6_xts_dec_tv_template,
3778 .count = CAST6_XTS_DEC_TEST_VECTORS
3779 }
3780 }
3781 }
3782 }, {
3783 .alg = "xts(serpent)",
3784 .test = alg_test_skcipher,
3785 .suite = {
3786 .cipher = {
3787 .enc = {
3788 .vecs = serpent_xts_enc_tv_template,
3789 .count = SERPENT_XTS_ENC_TEST_VECTORS
3790 },
3791 .dec = {
3792 .vecs = serpent_xts_dec_tv_template,
3793 .count = SERPENT_XTS_DEC_TEST_VECTORS
3794 }
3795 }
3796 }
3797 }, {
3798 .alg = "xts(twofish)",
3799 .test = alg_test_skcipher,
3800 .suite = {
3801 .cipher = {
3802 .enc = {
3803 .vecs = tf_xts_enc_tv_template,
3804 .count = TF_XTS_ENC_TEST_VECTORS
3805 },
3806 .dec = {
3807 .vecs = tf_xts_dec_tv_template,
3808 .count = TF_XTS_DEC_TEST_VECTORS
3809 }
3810 }
3811 }
3812 }
3813 };
3814
3815 static bool alg_test_descs_checked;
3816
3817 static void alg_test_descs_check_order(void)
3818 {
3819 int i;
3820
3821 /* only check once */
3822 if (alg_test_descs_checked)
3823 return;
3824
3825 alg_test_descs_checked = true;
3826
3827 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3828 int diff = strcmp(alg_test_descs[i - 1].alg,
3829 alg_test_descs[i].alg);
3830
3831 if (WARN_ON(diff > 0)) {
3832 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3833 alg_test_descs[i - 1].alg,
3834 alg_test_descs[i].alg);
3835 }
3836
3837 if (WARN_ON(diff == 0)) {
3838 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3839 alg_test_descs[i].alg);
3840 }
3841 }
3842 }
3843
3844 static int alg_find_test(const char *alg)
3845 {
3846 int start = 0;
3847 int end = ARRAY_SIZE(alg_test_descs);
3848
3849 while (start < end) {
3850 int i = (start + end) / 2;
3851 int diff = strcmp(alg_test_descs[i].alg, alg);
3852
3853 if (diff > 0) {
3854 end = i;
3855 continue;
3856 }
3857
3858 if (diff < 0) {
3859 start = i + 1;
3860 continue;
3861 }
3862
3863 return i;
3864 }
3865
3866 return -1;
3867 }
3868
3869 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3870 {
3871 int i;
3872 int j;
3873 int rc;
3874
3875 if (!fips_enabled && notests) {
3876 printk_once(KERN_INFO "alg: self-tests disabled\n");
3877 return 0;
3878 }
3879
3880 alg_test_descs_check_order();
3881
3882 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3883 char nalg[CRYPTO_MAX_ALG_NAME];
3884
3885 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3886 sizeof(nalg))
3887 return -ENAMETOOLONG;
3888
3889 i = alg_find_test(nalg);
3890 if (i < 0)
3891 goto notest;
3892
3893 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3894 goto non_fips_alg;
3895
3896 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3897 goto test_done;
3898 }
3899
3900 i = alg_find_test(alg);
3901 j = alg_find_test(driver);
3902 if (i < 0 && j < 0)
3903 goto notest;
3904
3905 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3906 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3907 goto non_fips_alg;
3908
3909 rc = 0;
3910 if (i >= 0)
3911 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3912 type, mask);
3913 if (j >= 0 && j != i)
3914 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3915 type, mask);
3916
3917 test_done:
3918 if (fips_enabled && rc)
3919 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3920
3921 if (fips_enabled && !rc)
3922 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3923
3924 return rc;
3925
3926 notest:
3927 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3928 return 0;
3929 non_fips_alg:
3930 return -EINVAL;
3931 }
3932
3933 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3934
3935 EXPORT_SYMBOL_GPL(alg_test);
This page took 0.110308 seconds and 6 git commands to generate.