[CRYPTO] api: Add aead crypto type
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_HASH 0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000005
37 #define CRYPTO_ALG_TYPE_AEAD 0x00000006
38
39 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
40
41 #define CRYPTO_ALG_LARVAL 0x00000010
42 #define CRYPTO_ALG_DEAD 0x00000020
43 #define CRYPTO_ALG_DYING 0x00000040
44 #define CRYPTO_ALG_ASYNC 0x00000080
45
46 /*
47 * Set this bit if and only if the algorithm requires another algorithm of
48 * the same type to handle corner cases.
49 */
50 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
51
52 /*
53 * Transform masks and values (for crt_flags).
54 */
55 #define CRYPTO_TFM_REQ_MASK 0x000fff00
56 #define CRYPTO_TFM_RES_MASK 0xfff00000
57
58 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
59 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
60 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
61 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
62 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
63 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
64 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
65 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
66
67 /*
68 * Miscellaneous stuff.
69 */
70 #define CRYPTO_MAX_ALG_NAME 64
71
72 /*
73 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
74 * declaration) is used to ensure that the crypto_tfm context structure is
75 * aligned correctly for the given architecture so that there are no alignment
76 * faults for C data types. In particular, this is required on platforms such
77 * as arm where pointers are 32-bit aligned but there are data types such as
78 * u64 which require 64-bit alignment.
79 */
80 #if defined(ARCH_KMALLOC_MINALIGN)
81 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
82 #elif defined(ARCH_SLAB_MINALIGN)
83 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
84 #endif
85
86 #ifdef CRYPTO_MINALIGN
87 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
88 #else
89 #define CRYPTO_MINALIGN_ATTR
90 #endif
91
92 struct scatterlist;
93 struct crypto_ablkcipher;
94 struct crypto_async_request;
95 struct crypto_aead;
96 struct crypto_blkcipher;
97 struct crypto_hash;
98 struct crypto_queue;
99 struct crypto_tfm;
100 struct crypto_type;
101
102 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
103
104 struct crypto_async_request {
105 struct list_head list;
106 crypto_completion_t complete;
107 void *data;
108 struct crypto_tfm *tfm;
109
110 u32 flags;
111 };
112
113 struct ablkcipher_request {
114 struct crypto_async_request base;
115
116 unsigned int nbytes;
117
118 void *info;
119
120 struct scatterlist *src;
121 struct scatterlist *dst;
122
123 void *__ctx[] CRYPTO_MINALIGN_ATTR;
124 };
125
126 /**
127 * struct aead_request - AEAD request
128 * @base: Common attributes for async crypto requests
129 * @assoclen: Length in bytes of associated data for authentication
130 * @cryptlen: Length of data to be encrypted or decrypted
131 * @iv: Initialisation vector
132 * @assoc: Associated data
133 * @src: Source data
134 * @dst: Destination data
135 * @__ctx: Start of private context data
136 */
137 struct aead_request {
138 struct crypto_async_request base;
139
140 unsigned int assoclen;
141 unsigned int cryptlen;
142
143 u8 *iv;
144
145 struct scatterlist *assoc;
146 struct scatterlist *src;
147 struct scatterlist *dst;
148
149 void *__ctx[] CRYPTO_MINALIGN_ATTR;
150 };
151
152 struct blkcipher_desc {
153 struct crypto_blkcipher *tfm;
154 void *info;
155 u32 flags;
156 };
157
158 struct cipher_desc {
159 struct crypto_tfm *tfm;
160 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
161 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
162 const u8 *src, unsigned int nbytes);
163 void *info;
164 };
165
166 struct hash_desc {
167 struct crypto_hash *tfm;
168 u32 flags;
169 };
170
171 /*
172 * Algorithms: modular crypto algorithm implementations, managed
173 * via crypto_register_alg() and crypto_unregister_alg().
174 */
175 struct ablkcipher_alg {
176 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
177 unsigned int keylen);
178 int (*encrypt)(struct ablkcipher_request *req);
179 int (*decrypt)(struct ablkcipher_request *req);
180
181 struct crypto_queue *queue;
182
183 unsigned int min_keysize;
184 unsigned int max_keysize;
185 unsigned int ivsize;
186 };
187
188 struct aead_alg {
189 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
190 unsigned int keylen);
191 int (*encrypt)(struct aead_request *req);
192 int (*decrypt)(struct aead_request *req);
193
194 unsigned int ivsize;
195 unsigned int authsize;
196 };
197
198 struct blkcipher_alg {
199 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
200 unsigned int keylen);
201 int (*encrypt)(struct blkcipher_desc *desc,
202 struct scatterlist *dst, struct scatterlist *src,
203 unsigned int nbytes);
204 int (*decrypt)(struct blkcipher_desc *desc,
205 struct scatterlist *dst, struct scatterlist *src,
206 unsigned int nbytes);
207
208 unsigned int min_keysize;
209 unsigned int max_keysize;
210 unsigned int ivsize;
211 };
212
213 struct cipher_alg {
214 unsigned int cia_min_keysize;
215 unsigned int cia_max_keysize;
216 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
217 unsigned int keylen);
218 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
219 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
220 };
221
222 struct digest_alg {
223 unsigned int dia_digestsize;
224 void (*dia_init)(struct crypto_tfm *tfm);
225 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
226 unsigned int len);
227 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
228 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
229 unsigned int keylen);
230 };
231
232 struct hash_alg {
233 int (*init)(struct hash_desc *desc);
234 int (*update)(struct hash_desc *desc, struct scatterlist *sg,
235 unsigned int nbytes);
236 int (*final)(struct hash_desc *desc, u8 *out);
237 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
238 unsigned int nbytes, u8 *out);
239 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
240 unsigned int keylen);
241
242 unsigned int digestsize;
243 };
244
245 struct compress_alg {
246 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
247 unsigned int slen, u8 *dst, unsigned int *dlen);
248 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
249 unsigned int slen, u8 *dst, unsigned int *dlen);
250 };
251
252 #define cra_ablkcipher cra_u.ablkcipher
253 #define cra_aead cra_u.aead
254 #define cra_blkcipher cra_u.blkcipher
255 #define cra_cipher cra_u.cipher
256 #define cra_digest cra_u.digest
257 #define cra_hash cra_u.hash
258 #define cra_compress cra_u.compress
259
260 struct crypto_alg {
261 struct list_head cra_list;
262 struct list_head cra_users;
263
264 u32 cra_flags;
265 unsigned int cra_blocksize;
266 unsigned int cra_ctxsize;
267 unsigned int cra_alignmask;
268
269 int cra_priority;
270 atomic_t cra_refcnt;
271
272 char cra_name[CRYPTO_MAX_ALG_NAME];
273 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
274
275 const struct crypto_type *cra_type;
276
277 union {
278 struct ablkcipher_alg ablkcipher;
279 struct aead_alg aead;
280 struct blkcipher_alg blkcipher;
281 struct cipher_alg cipher;
282 struct digest_alg digest;
283 struct hash_alg hash;
284 struct compress_alg compress;
285 } cra_u;
286
287 int (*cra_init)(struct crypto_tfm *tfm);
288 void (*cra_exit)(struct crypto_tfm *tfm);
289 void (*cra_destroy)(struct crypto_alg *alg);
290
291 struct module *cra_module;
292 };
293
294 /*
295 * Algorithm registration interface.
296 */
297 int crypto_register_alg(struct crypto_alg *alg);
298 int crypto_unregister_alg(struct crypto_alg *alg);
299
300 /*
301 * Algorithm query interface.
302 */
303 #ifdef CONFIG_CRYPTO
304 int crypto_has_alg(const char *name, u32 type, u32 mask);
305 #else
306 static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
307 {
308 return 0;
309 }
310 #endif
311
312 /*
313 * Transforms: user-instantiated objects which encapsulate algorithms
314 * and core processing logic. Managed via crypto_alloc_*() and
315 * crypto_free_*(), as well as the various helpers below.
316 */
317
318 struct ablkcipher_tfm {
319 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
320 unsigned int keylen);
321 int (*encrypt)(struct ablkcipher_request *req);
322 int (*decrypt)(struct ablkcipher_request *req);
323 unsigned int ivsize;
324 unsigned int reqsize;
325 };
326
327 struct aead_tfm {
328 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
329 unsigned int keylen);
330 int (*encrypt)(struct aead_request *req);
331 int (*decrypt)(struct aead_request *req);
332 unsigned int ivsize;
333 unsigned int authsize;
334 unsigned int reqsize;
335 };
336
337 struct blkcipher_tfm {
338 void *iv;
339 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
340 unsigned int keylen);
341 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
342 struct scatterlist *src, unsigned int nbytes);
343 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
344 struct scatterlist *src, unsigned int nbytes);
345 };
346
347 struct cipher_tfm {
348 int (*cit_setkey)(struct crypto_tfm *tfm,
349 const u8 *key, unsigned int keylen);
350 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
351 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
352 };
353
354 struct hash_tfm {
355 int (*init)(struct hash_desc *desc);
356 int (*update)(struct hash_desc *desc,
357 struct scatterlist *sg, unsigned int nsg);
358 int (*final)(struct hash_desc *desc, u8 *out);
359 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
360 unsigned int nsg, u8 *out);
361 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
362 unsigned int keylen);
363 unsigned int digestsize;
364 };
365
366 struct compress_tfm {
367 int (*cot_compress)(struct crypto_tfm *tfm,
368 const u8 *src, unsigned int slen,
369 u8 *dst, unsigned int *dlen);
370 int (*cot_decompress)(struct crypto_tfm *tfm,
371 const u8 *src, unsigned int slen,
372 u8 *dst, unsigned int *dlen);
373 };
374
375 #define crt_ablkcipher crt_u.ablkcipher
376 #define crt_aead crt_u.aead
377 #define crt_blkcipher crt_u.blkcipher
378 #define crt_cipher crt_u.cipher
379 #define crt_hash crt_u.hash
380 #define crt_compress crt_u.compress
381
382 struct crypto_tfm {
383
384 u32 crt_flags;
385
386 union {
387 struct ablkcipher_tfm ablkcipher;
388 struct aead_tfm aead;
389 struct blkcipher_tfm blkcipher;
390 struct cipher_tfm cipher;
391 struct hash_tfm hash;
392 struct compress_tfm compress;
393 } crt_u;
394
395 struct crypto_alg *__crt_alg;
396
397 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
398 };
399
400 struct crypto_ablkcipher {
401 struct crypto_tfm base;
402 };
403
404 struct crypto_aead {
405 struct crypto_tfm base;
406 };
407
408 struct crypto_blkcipher {
409 struct crypto_tfm base;
410 };
411
412 struct crypto_cipher {
413 struct crypto_tfm base;
414 };
415
416 struct crypto_comp {
417 struct crypto_tfm base;
418 };
419
420 struct crypto_hash {
421 struct crypto_tfm base;
422 };
423
424 enum {
425 CRYPTOA_UNSPEC,
426 CRYPTOA_ALG,
427 CRYPTOA_TYPE,
428 __CRYPTOA_MAX,
429 };
430
431 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
432
433 struct crypto_attr_alg {
434 char name[CRYPTO_MAX_ALG_NAME];
435 };
436
437 struct crypto_attr_type {
438 u32 type;
439 u32 mask;
440 };
441
442 /*
443 * Transform user interface.
444 */
445
446 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
447 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
448 void crypto_free_tfm(struct crypto_tfm *tfm);
449
450 /*
451 * Transform helpers which query the underlying algorithm.
452 */
453 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
454 {
455 return tfm->__crt_alg->cra_name;
456 }
457
458 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
459 {
460 return tfm->__crt_alg->cra_driver_name;
461 }
462
463 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
464 {
465 return tfm->__crt_alg->cra_priority;
466 }
467
468 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
469 {
470 return module_name(tfm->__crt_alg->cra_module);
471 }
472
473 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
474 {
475 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
476 }
477
478 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
479 {
480 return tfm->__crt_alg->cra_blocksize;
481 }
482
483 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
484 {
485 return tfm->__crt_alg->cra_alignmask;
486 }
487
488 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
489 {
490 return tfm->crt_flags;
491 }
492
493 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
494 {
495 tfm->crt_flags |= flags;
496 }
497
498 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
499 {
500 tfm->crt_flags &= ~flags;
501 }
502
503 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
504 {
505 return tfm->__crt_ctx;
506 }
507
508 static inline unsigned int crypto_tfm_ctx_alignment(void)
509 {
510 struct crypto_tfm *tfm;
511 return __alignof__(tfm->__crt_ctx);
512 }
513
514 /*
515 * API wrappers.
516 */
517 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
518 struct crypto_tfm *tfm)
519 {
520 return (struct crypto_ablkcipher *)tfm;
521 }
522
523 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
524 const char *alg_name, u32 type, u32 mask)
525 {
526 type &= ~CRYPTO_ALG_TYPE_MASK;
527 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
528 mask |= CRYPTO_ALG_TYPE_MASK;
529
530 return __crypto_ablkcipher_cast(
531 crypto_alloc_base(alg_name, type, mask));
532 }
533
534 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
535 struct crypto_ablkcipher *tfm)
536 {
537 return &tfm->base;
538 }
539
540 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
541 {
542 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
543 }
544
545 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
546 u32 mask)
547 {
548 type &= ~CRYPTO_ALG_TYPE_MASK;
549 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
550 mask |= CRYPTO_ALG_TYPE_MASK;
551
552 return crypto_has_alg(alg_name, type, mask);
553 }
554
555 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
556 struct crypto_ablkcipher *tfm)
557 {
558 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
559 }
560
561 static inline unsigned int crypto_ablkcipher_ivsize(
562 struct crypto_ablkcipher *tfm)
563 {
564 return crypto_ablkcipher_crt(tfm)->ivsize;
565 }
566
567 static inline unsigned int crypto_ablkcipher_blocksize(
568 struct crypto_ablkcipher *tfm)
569 {
570 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
571 }
572
573 static inline unsigned int crypto_ablkcipher_alignmask(
574 struct crypto_ablkcipher *tfm)
575 {
576 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
577 }
578
579 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
580 {
581 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
582 }
583
584 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
585 u32 flags)
586 {
587 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
588 }
589
590 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
591 u32 flags)
592 {
593 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
594 }
595
596 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
597 const u8 *key, unsigned int keylen)
598 {
599 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
600 }
601
602 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
603 struct ablkcipher_request *req)
604 {
605 return __crypto_ablkcipher_cast(req->base.tfm);
606 }
607
608 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
609 {
610 struct ablkcipher_tfm *crt =
611 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
612 return crt->encrypt(req);
613 }
614
615 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
616 {
617 struct ablkcipher_tfm *crt =
618 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
619 return crt->decrypt(req);
620 }
621
622 static inline int crypto_ablkcipher_reqsize(struct crypto_ablkcipher *tfm)
623 {
624 return crypto_ablkcipher_crt(tfm)->reqsize;
625 }
626
627 static inline void ablkcipher_request_set_tfm(
628 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
629 {
630 req->base.tfm = crypto_ablkcipher_tfm(tfm);
631 }
632
633 static inline struct ablkcipher_request *ablkcipher_request_cast(
634 struct crypto_async_request *req)
635 {
636 return container_of(req, struct ablkcipher_request, base);
637 }
638
639 static inline struct ablkcipher_request *ablkcipher_request_alloc(
640 struct crypto_ablkcipher *tfm, gfp_t gfp)
641 {
642 struct ablkcipher_request *req;
643
644 req = kmalloc(sizeof(struct ablkcipher_request) +
645 crypto_ablkcipher_reqsize(tfm), gfp);
646
647 if (likely(req))
648 ablkcipher_request_set_tfm(req, tfm);
649
650 return req;
651 }
652
653 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
654 {
655 kfree(req);
656 }
657
658 static inline void ablkcipher_request_set_callback(
659 struct ablkcipher_request *req,
660 u32 flags, crypto_completion_t complete, void *data)
661 {
662 req->base.complete = complete;
663 req->base.data = data;
664 req->base.flags = flags;
665 }
666
667 static inline void ablkcipher_request_set_crypt(
668 struct ablkcipher_request *req,
669 struct scatterlist *src, struct scatterlist *dst,
670 unsigned int nbytes, void *iv)
671 {
672 req->src = src;
673 req->dst = dst;
674 req->nbytes = nbytes;
675 req->info = iv;
676 }
677
678 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
679 {
680 return (struct crypto_aead *)tfm;
681 }
682
683 static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name,
684 u32 type, u32 mask)
685 {
686 type &= ~CRYPTO_ALG_TYPE_MASK;
687 type |= CRYPTO_ALG_TYPE_AEAD;
688 mask |= CRYPTO_ALG_TYPE_MASK;
689
690 return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask));
691 }
692
693 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
694 {
695 return &tfm->base;
696 }
697
698 static inline void crypto_free_aead(struct crypto_aead *tfm)
699 {
700 crypto_free_tfm(crypto_aead_tfm(tfm));
701 }
702
703 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm)
704 {
705 return &crypto_aead_tfm(tfm)->crt_aead;
706 }
707
708 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm)
709 {
710 return crypto_aead_crt(tfm)->ivsize;
711 }
712
713 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm)
714 {
715 return crypto_aead_crt(tfm)->authsize;
716 }
717
718 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm)
719 {
720 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm));
721 }
722
723 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm)
724 {
725 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm));
726 }
727
728 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm)
729 {
730 return crypto_tfm_get_flags(crypto_aead_tfm(tfm));
731 }
732
733 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags)
734 {
735 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags);
736 }
737
738 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
739 {
740 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags);
741 }
742
743 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
744 unsigned int keylen)
745 {
746 return crypto_aead_crt(tfm)->setkey(tfm, key, keylen);
747 }
748
749 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
750 {
751 return __crypto_aead_cast(req->base.tfm);
752 }
753
754 static inline int crypto_aead_encrypt(struct aead_request *req)
755 {
756 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req);
757 }
758
759 static inline int crypto_aead_decrypt(struct aead_request *req)
760 {
761 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req);
762 }
763
764 static inline int crypto_aead_reqsize(struct crypto_aead *tfm)
765 {
766 return crypto_aead_crt(tfm)->reqsize;
767 }
768
769 static inline void aead_request_set_tfm(struct aead_request *req,
770 struct crypto_aead *tfm)
771 {
772 req->base.tfm = crypto_aead_tfm(tfm);
773 }
774
775 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
776 gfp_t gfp)
777 {
778 struct aead_request *req;
779
780 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp);
781
782 if (likely(req))
783 aead_request_set_tfm(req, tfm);
784
785 return req;
786 }
787
788 static inline void aead_request_free(struct aead_request *req)
789 {
790 kfree(req);
791 }
792
793 static inline void aead_request_set_callback(struct aead_request *req,
794 u32 flags,
795 crypto_completion_t complete,
796 void *data)
797 {
798 req->base.complete = complete;
799 req->base.data = data;
800 req->base.flags = flags;
801 }
802
803 static inline void aead_request_set_crypt(struct aead_request *req,
804 struct scatterlist *src,
805 struct scatterlist *dst,
806 unsigned int cryptlen, u8 *iv)
807 {
808 req->src = src;
809 req->dst = dst;
810 req->cryptlen = cryptlen;
811 req->iv = iv;
812 }
813
814 static inline void aead_request_set_assoc(struct aead_request *req,
815 struct scatterlist *assoc,
816 unsigned int assoclen)
817 {
818 req->assoc = assoc;
819 req->assoclen = assoclen;
820 }
821
822 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
823 struct crypto_tfm *tfm)
824 {
825 return (struct crypto_blkcipher *)tfm;
826 }
827
828 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
829 struct crypto_tfm *tfm)
830 {
831 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
832 return __crypto_blkcipher_cast(tfm);
833 }
834
835 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
836 const char *alg_name, u32 type, u32 mask)
837 {
838 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
839 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
840 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
841
842 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
843 }
844
845 static inline struct crypto_tfm *crypto_blkcipher_tfm(
846 struct crypto_blkcipher *tfm)
847 {
848 return &tfm->base;
849 }
850
851 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
852 {
853 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
854 }
855
856 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
857 {
858 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
859 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
860 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
861
862 return crypto_has_alg(alg_name, type, mask);
863 }
864
865 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
866 {
867 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
868 }
869
870 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
871 struct crypto_blkcipher *tfm)
872 {
873 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
874 }
875
876 static inline struct blkcipher_alg *crypto_blkcipher_alg(
877 struct crypto_blkcipher *tfm)
878 {
879 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
880 }
881
882 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
883 {
884 return crypto_blkcipher_alg(tfm)->ivsize;
885 }
886
887 static inline unsigned int crypto_blkcipher_blocksize(
888 struct crypto_blkcipher *tfm)
889 {
890 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
891 }
892
893 static inline unsigned int crypto_blkcipher_alignmask(
894 struct crypto_blkcipher *tfm)
895 {
896 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
897 }
898
899 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
900 {
901 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
902 }
903
904 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
905 u32 flags)
906 {
907 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
908 }
909
910 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
911 u32 flags)
912 {
913 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
914 }
915
916 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
917 const u8 *key, unsigned int keylen)
918 {
919 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
920 key, keylen);
921 }
922
923 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
924 struct scatterlist *dst,
925 struct scatterlist *src,
926 unsigned int nbytes)
927 {
928 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
929 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
930 }
931
932 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
933 struct scatterlist *dst,
934 struct scatterlist *src,
935 unsigned int nbytes)
936 {
937 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
938 }
939
940 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
941 struct scatterlist *dst,
942 struct scatterlist *src,
943 unsigned int nbytes)
944 {
945 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
946 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
947 }
948
949 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
950 struct scatterlist *dst,
951 struct scatterlist *src,
952 unsigned int nbytes)
953 {
954 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
955 }
956
957 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
958 const u8 *src, unsigned int len)
959 {
960 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
961 }
962
963 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
964 u8 *dst, unsigned int len)
965 {
966 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
967 }
968
969 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
970 {
971 return (struct crypto_cipher *)tfm;
972 }
973
974 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
975 {
976 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
977 return __crypto_cipher_cast(tfm);
978 }
979
980 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
981 u32 type, u32 mask)
982 {
983 type &= ~CRYPTO_ALG_TYPE_MASK;
984 type |= CRYPTO_ALG_TYPE_CIPHER;
985 mask |= CRYPTO_ALG_TYPE_MASK;
986
987 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
988 }
989
990 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
991 {
992 return &tfm->base;
993 }
994
995 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
996 {
997 crypto_free_tfm(crypto_cipher_tfm(tfm));
998 }
999
1000 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
1001 {
1002 type &= ~CRYPTO_ALG_TYPE_MASK;
1003 type |= CRYPTO_ALG_TYPE_CIPHER;
1004 mask |= CRYPTO_ALG_TYPE_MASK;
1005
1006 return crypto_has_alg(alg_name, type, mask);
1007 }
1008
1009 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
1010 {
1011 return &crypto_cipher_tfm(tfm)->crt_cipher;
1012 }
1013
1014 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
1015 {
1016 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
1017 }
1018
1019 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
1020 {
1021 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
1022 }
1023
1024 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
1025 {
1026 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
1027 }
1028
1029 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
1030 u32 flags)
1031 {
1032 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
1033 }
1034
1035 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
1036 u32 flags)
1037 {
1038 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
1039 }
1040
1041 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
1042 const u8 *key, unsigned int keylen)
1043 {
1044 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
1045 key, keylen);
1046 }
1047
1048 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
1049 u8 *dst, const u8 *src)
1050 {
1051 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
1052 dst, src);
1053 }
1054
1055 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
1056 u8 *dst, const u8 *src)
1057 {
1058 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
1059 dst, src);
1060 }
1061
1062 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
1063 {
1064 return (struct crypto_hash *)tfm;
1065 }
1066
1067 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
1068 {
1069 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
1070 CRYPTO_ALG_TYPE_HASH_MASK);
1071 return __crypto_hash_cast(tfm);
1072 }
1073
1074 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1075 u32 type, u32 mask)
1076 {
1077 type &= ~CRYPTO_ALG_TYPE_MASK;
1078 type |= CRYPTO_ALG_TYPE_HASH;
1079 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1080
1081 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
1082 }
1083
1084 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
1085 {
1086 return &tfm->base;
1087 }
1088
1089 static inline void crypto_free_hash(struct crypto_hash *tfm)
1090 {
1091 crypto_free_tfm(crypto_hash_tfm(tfm));
1092 }
1093
1094 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1095 {
1096 type &= ~CRYPTO_ALG_TYPE_MASK;
1097 type |= CRYPTO_ALG_TYPE_HASH;
1098 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1099
1100 return crypto_has_alg(alg_name, type, mask);
1101 }
1102
1103 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
1104 {
1105 return &crypto_hash_tfm(tfm)->crt_hash;
1106 }
1107
1108 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
1109 {
1110 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
1111 }
1112
1113 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
1114 {
1115 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
1116 }
1117
1118 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
1119 {
1120 return crypto_hash_crt(tfm)->digestsize;
1121 }
1122
1123 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
1124 {
1125 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
1126 }
1127
1128 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
1129 {
1130 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
1131 }
1132
1133 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
1134 {
1135 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
1136 }
1137
1138 static inline int crypto_hash_init(struct hash_desc *desc)
1139 {
1140 return crypto_hash_crt(desc->tfm)->init(desc);
1141 }
1142
1143 static inline int crypto_hash_update(struct hash_desc *desc,
1144 struct scatterlist *sg,
1145 unsigned int nbytes)
1146 {
1147 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
1148 }
1149
1150 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
1151 {
1152 return crypto_hash_crt(desc->tfm)->final(desc, out);
1153 }
1154
1155 static inline int crypto_hash_digest(struct hash_desc *desc,
1156 struct scatterlist *sg,
1157 unsigned int nbytes, u8 *out)
1158 {
1159 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
1160 }
1161
1162 static inline int crypto_hash_setkey(struct crypto_hash *hash,
1163 const u8 *key, unsigned int keylen)
1164 {
1165 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
1166 }
1167
1168 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
1169 {
1170 return (struct crypto_comp *)tfm;
1171 }
1172
1173 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
1174 {
1175 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
1176 CRYPTO_ALG_TYPE_MASK);
1177 return __crypto_comp_cast(tfm);
1178 }
1179
1180 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1181 u32 type, u32 mask)
1182 {
1183 type &= ~CRYPTO_ALG_TYPE_MASK;
1184 type |= CRYPTO_ALG_TYPE_COMPRESS;
1185 mask |= CRYPTO_ALG_TYPE_MASK;
1186
1187 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1188 }
1189
1190 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1191 {
1192 return &tfm->base;
1193 }
1194
1195 static inline void crypto_free_comp(struct crypto_comp *tfm)
1196 {
1197 crypto_free_tfm(crypto_comp_tfm(tfm));
1198 }
1199
1200 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1201 {
1202 type &= ~CRYPTO_ALG_TYPE_MASK;
1203 type |= CRYPTO_ALG_TYPE_COMPRESS;
1204 mask |= CRYPTO_ALG_TYPE_MASK;
1205
1206 return crypto_has_alg(alg_name, type, mask);
1207 }
1208
1209 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1210 {
1211 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1212 }
1213
1214 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1215 {
1216 return &crypto_comp_tfm(tfm)->crt_compress;
1217 }
1218
1219 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1220 const u8 *src, unsigned int slen,
1221 u8 *dst, unsigned int *dlen)
1222 {
1223 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1224 src, slen, dst, dlen);
1225 }
1226
1227 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1228 const u8 *src, unsigned int slen,
1229 u8 *dst, unsigned int *dlen)
1230 {
1231 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1232 src, slen, dst, dlen);
1233 }
1234
1235 #endif /* _LINUX_CRYPTO_H */
1236
This page took 0.079452 seconds and 5 git commands to generate.