[CRYPTO] cipher: Remove obsolete fields from cipher_tfm
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_HASH 0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000005
37
38 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
39
40 #define CRYPTO_ALG_LARVAL 0x00000010
41 #define CRYPTO_ALG_DEAD 0x00000020
42 #define CRYPTO_ALG_DYING 0x00000040
43 #define CRYPTO_ALG_ASYNC 0x00000080
44
45 /*
46 * Set this bit if and only if the algorithm requires another algorithm of
47 * the same type to handle corner cases.
48 */
49 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
50
51 /*
52 * Transform masks and values (for crt_flags).
53 */
54 #define CRYPTO_TFM_REQ_MASK 0x000fff00
55 #define CRYPTO_TFM_RES_MASK 0xfff00000
56
57 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
58 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
59 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
60 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
61 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
62 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
63 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
64 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
65
66 /*
67 * Miscellaneous stuff.
68 */
69 #define CRYPTO_MAX_ALG_NAME 64
70
71 /*
72 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
73 * declaration) is used to ensure that the crypto_tfm context structure is
74 * aligned correctly for the given architecture so that there are no alignment
75 * faults for C data types. In particular, this is required on platforms such
76 * as arm where pointers are 32-bit aligned but there are data types such as
77 * u64 which require 64-bit alignment.
78 */
79 #if defined(ARCH_KMALLOC_MINALIGN)
80 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
81 #elif defined(ARCH_SLAB_MINALIGN)
82 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
83 #endif
84
85 #ifdef CRYPTO_MINALIGN
86 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
87 #else
88 #define CRYPTO_MINALIGN_ATTR
89 #endif
90
91 struct scatterlist;
92 struct crypto_ablkcipher;
93 struct crypto_async_request;
94 struct crypto_blkcipher;
95 struct crypto_hash;
96 struct crypto_queue;
97 struct crypto_tfm;
98 struct crypto_type;
99
100 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
101
102 struct crypto_async_request {
103 struct list_head list;
104 crypto_completion_t complete;
105 void *data;
106 struct crypto_tfm *tfm;
107
108 u32 flags;
109 };
110
111 struct ablkcipher_request {
112 struct crypto_async_request base;
113
114 unsigned int nbytes;
115
116 void *info;
117
118 struct scatterlist *src;
119 struct scatterlist *dst;
120
121 void *__ctx[] CRYPTO_MINALIGN_ATTR;
122 };
123
124 struct blkcipher_desc {
125 struct crypto_blkcipher *tfm;
126 void *info;
127 u32 flags;
128 };
129
130 struct cipher_desc {
131 struct crypto_tfm *tfm;
132 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
133 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
134 const u8 *src, unsigned int nbytes);
135 void *info;
136 };
137
138 struct hash_desc {
139 struct crypto_hash *tfm;
140 u32 flags;
141 };
142
143 /*
144 * Algorithms: modular crypto algorithm implementations, managed
145 * via crypto_register_alg() and crypto_unregister_alg().
146 */
147 struct ablkcipher_alg {
148 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
149 unsigned int keylen);
150 int (*encrypt)(struct ablkcipher_request *req);
151 int (*decrypt)(struct ablkcipher_request *req);
152
153 struct crypto_queue *queue;
154
155 unsigned int min_keysize;
156 unsigned int max_keysize;
157 unsigned int ivsize;
158 };
159
160 struct blkcipher_alg {
161 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
162 unsigned int keylen);
163 int (*encrypt)(struct blkcipher_desc *desc,
164 struct scatterlist *dst, struct scatterlist *src,
165 unsigned int nbytes);
166 int (*decrypt)(struct blkcipher_desc *desc,
167 struct scatterlist *dst, struct scatterlist *src,
168 unsigned int nbytes);
169
170 unsigned int min_keysize;
171 unsigned int max_keysize;
172 unsigned int ivsize;
173 };
174
175 struct cipher_alg {
176 unsigned int cia_min_keysize;
177 unsigned int cia_max_keysize;
178 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
179 unsigned int keylen);
180 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
181 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
182 };
183
184 struct digest_alg {
185 unsigned int dia_digestsize;
186 void (*dia_init)(struct crypto_tfm *tfm);
187 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
188 unsigned int len);
189 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
190 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
191 unsigned int keylen);
192 };
193
194 struct hash_alg {
195 int (*init)(struct hash_desc *desc);
196 int (*update)(struct hash_desc *desc, struct scatterlist *sg,
197 unsigned int nbytes);
198 int (*final)(struct hash_desc *desc, u8 *out);
199 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
200 unsigned int nbytes, u8 *out);
201 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
202 unsigned int keylen);
203
204 unsigned int digestsize;
205 };
206
207 struct compress_alg {
208 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
209 unsigned int slen, u8 *dst, unsigned int *dlen);
210 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
211 unsigned int slen, u8 *dst, unsigned int *dlen);
212 };
213
214 #define cra_ablkcipher cra_u.ablkcipher
215 #define cra_blkcipher cra_u.blkcipher
216 #define cra_cipher cra_u.cipher
217 #define cra_digest cra_u.digest
218 #define cra_hash cra_u.hash
219 #define cra_compress cra_u.compress
220
221 struct crypto_alg {
222 struct list_head cra_list;
223 struct list_head cra_users;
224
225 u32 cra_flags;
226 unsigned int cra_blocksize;
227 unsigned int cra_ctxsize;
228 unsigned int cra_alignmask;
229
230 int cra_priority;
231 atomic_t cra_refcnt;
232
233 char cra_name[CRYPTO_MAX_ALG_NAME];
234 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
235
236 const struct crypto_type *cra_type;
237
238 union {
239 struct ablkcipher_alg ablkcipher;
240 struct blkcipher_alg blkcipher;
241 struct cipher_alg cipher;
242 struct digest_alg digest;
243 struct hash_alg hash;
244 struct compress_alg compress;
245 } cra_u;
246
247 int (*cra_init)(struct crypto_tfm *tfm);
248 void (*cra_exit)(struct crypto_tfm *tfm);
249 void (*cra_destroy)(struct crypto_alg *alg);
250
251 struct module *cra_module;
252 };
253
254 /*
255 * Algorithm registration interface.
256 */
257 int crypto_register_alg(struct crypto_alg *alg);
258 int crypto_unregister_alg(struct crypto_alg *alg);
259
260 /*
261 * Algorithm query interface.
262 */
263 #ifdef CONFIG_CRYPTO
264 int crypto_has_alg(const char *name, u32 type, u32 mask);
265 #else
266 static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
267 {
268 return 0;
269 }
270 #endif
271
272 /*
273 * Transforms: user-instantiated objects which encapsulate algorithms
274 * and core processing logic. Managed via crypto_alloc_*() and
275 * crypto_free_*(), as well as the various helpers below.
276 */
277
278 struct ablkcipher_tfm {
279 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
280 unsigned int keylen);
281 int (*encrypt)(struct ablkcipher_request *req);
282 int (*decrypt)(struct ablkcipher_request *req);
283 unsigned int ivsize;
284 unsigned int reqsize;
285 };
286
287 struct blkcipher_tfm {
288 void *iv;
289 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
290 unsigned int keylen);
291 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
292 struct scatterlist *src, unsigned int nbytes);
293 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
294 struct scatterlist *src, unsigned int nbytes);
295 };
296
297 struct cipher_tfm {
298 int (*cit_setkey)(struct crypto_tfm *tfm,
299 const u8 *key, unsigned int keylen);
300 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
301 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
302 };
303
304 struct hash_tfm {
305 int (*init)(struct hash_desc *desc);
306 int (*update)(struct hash_desc *desc,
307 struct scatterlist *sg, unsigned int nsg);
308 int (*final)(struct hash_desc *desc, u8 *out);
309 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
310 unsigned int nsg, u8 *out);
311 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
312 unsigned int keylen);
313 unsigned int digestsize;
314 };
315
316 struct compress_tfm {
317 int (*cot_compress)(struct crypto_tfm *tfm,
318 const u8 *src, unsigned int slen,
319 u8 *dst, unsigned int *dlen);
320 int (*cot_decompress)(struct crypto_tfm *tfm,
321 const u8 *src, unsigned int slen,
322 u8 *dst, unsigned int *dlen);
323 };
324
325 #define crt_ablkcipher crt_u.ablkcipher
326 #define crt_blkcipher crt_u.blkcipher
327 #define crt_cipher crt_u.cipher
328 #define crt_hash crt_u.hash
329 #define crt_compress crt_u.compress
330
331 struct crypto_tfm {
332
333 u32 crt_flags;
334
335 union {
336 struct ablkcipher_tfm ablkcipher;
337 struct blkcipher_tfm blkcipher;
338 struct cipher_tfm cipher;
339 struct hash_tfm hash;
340 struct compress_tfm compress;
341 } crt_u;
342
343 struct crypto_alg *__crt_alg;
344
345 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
346 };
347
348 struct crypto_ablkcipher {
349 struct crypto_tfm base;
350 };
351
352 struct crypto_blkcipher {
353 struct crypto_tfm base;
354 };
355
356 struct crypto_cipher {
357 struct crypto_tfm base;
358 };
359
360 struct crypto_comp {
361 struct crypto_tfm base;
362 };
363
364 struct crypto_hash {
365 struct crypto_tfm base;
366 };
367
368 enum {
369 CRYPTOA_UNSPEC,
370 CRYPTOA_ALG,
371 CRYPTOA_TYPE,
372 __CRYPTOA_MAX,
373 };
374
375 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
376
377 struct crypto_attr_alg {
378 char name[CRYPTO_MAX_ALG_NAME];
379 };
380
381 struct crypto_attr_type {
382 u32 type;
383 u32 mask;
384 };
385
386 /*
387 * Transform user interface.
388 */
389
390 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
391 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
392 void crypto_free_tfm(struct crypto_tfm *tfm);
393
394 /*
395 * Transform helpers which query the underlying algorithm.
396 */
397 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
398 {
399 return tfm->__crt_alg->cra_name;
400 }
401
402 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
403 {
404 return tfm->__crt_alg->cra_driver_name;
405 }
406
407 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
408 {
409 return tfm->__crt_alg->cra_priority;
410 }
411
412 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
413 {
414 return module_name(tfm->__crt_alg->cra_module);
415 }
416
417 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
418 {
419 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
420 }
421
422 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
423 {
424 return tfm->__crt_alg->cra_blocksize;
425 }
426
427 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
428 {
429 return tfm->__crt_alg->cra_alignmask;
430 }
431
432 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
433 {
434 return tfm->crt_flags;
435 }
436
437 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
438 {
439 tfm->crt_flags |= flags;
440 }
441
442 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
443 {
444 tfm->crt_flags &= ~flags;
445 }
446
447 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
448 {
449 return tfm->__crt_ctx;
450 }
451
452 static inline unsigned int crypto_tfm_ctx_alignment(void)
453 {
454 struct crypto_tfm *tfm;
455 return __alignof__(tfm->__crt_ctx);
456 }
457
458 /*
459 * API wrappers.
460 */
461 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
462 struct crypto_tfm *tfm)
463 {
464 return (struct crypto_ablkcipher *)tfm;
465 }
466
467 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
468 const char *alg_name, u32 type, u32 mask)
469 {
470 type &= ~CRYPTO_ALG_TYPE_MASK;
471 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
472 mask |= CRYPTO_ALG_TYPE_MASK;
473
474 return __crypto_ablkcipher_cast(
475 crypto_alloc_base(alg_name, type, mask));
476 }
477
478 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
479 struct crypto_ablkcipher *tfm)
480 {
481 return &tfm->base;
482 }
483
484 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
485 {
486 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
487 }
488
489 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
490 u32 mask)
491 {
492 type &= ~CRYPTO_ALG_TYPE_MASK;
493 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
494 mask |= CRYPTO_ALG_TYPE_MASK;
495
496 return crypto_has_alg(alg_name, type, mask);
497 }
498
499 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
500 struct crypto_ablkcipher *tfm)
501 {
502 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
503 }
504
505 static inline unsigned int crypto_ablkcipher_ivsize(
506 struct crypto_ablkcipher *tfm)
507 {
508 return crypto_ablkcipher_crt(tfm)->ivsize;
509 }
510
511 static inline unsigned int crypto_ablkcipher_blocksize(
512 struct crypto_ablkcipher *tfm)
513 {
514 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
515 }
516
517 static inline unsigned int crypto_ablkcipher_alignmask(
518 struct crypto_ablkcipher *tfm)
519 {
520 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
521 }
522
523 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
524 {
525 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
526 }
527
528 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
529 u32 flags)
530 {
531 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
532 }
533
534 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
535 u32 flags)
536 {
537 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
538 }
539
540 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
541 const u8 *key, unsigned int keylen)
542 {
543 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
544 }
545
546 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
547 struct ablkcipher_request *req)
548 {
549 return __crypto_ablkcipher_cast(req->base.tfm);
550 }
551
552 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
553 {
554 struct ablkcipher_tfm *crt =
555 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
556 return crt->encrypt(req);
557 }
558
559 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
560 {
561 struct ablkcipher_tfm *crt =
562 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
563 return crt->decrypt(req);
564 }
565
566 static inline int crypto_ablkcipher_reqsize(struct crypto_ablkcipher *tfm)
567 {
568 return crypto_ablkcipher_crt(tfm)->reqsize;
569 }
570
571 static inline void ablkcipher_request_set_tfm(
572 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
573 {
574 req->base.tfm = crypto_ablkcipher_tfm(tfm);
575 }
576
577 static inline struct ablkcipher_request *ablkcipher_request_cast(
578 struct crypto_async_request *req)
579 {
580 return container_of(req, struct ablkcipher_request, base);
581 }
582
583 static inline struct ablkcipher_request *ablkcipher_request_alloc(
584 struct crypto_ablkcipher *tfm, gfp_t gfp)
585 {
586 struct ablkcipher_request *req;
587
588 req = kmalloc(sizeof(struct ablkcipher_request) +
589 crypto_ablkcipher_reqsize(tfm), gfp);
590
591 if (likely(req))
592 ablkcipher_request_set_tfm(req, tfm);
593
594 return req;
595 }
596
597 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
598 {
599 kfree(req);
600 }
601
602 static inline void ablkcipher_request_set_callback(
603 struct ablkcipher_request *req,
604 u32 flags, crypto_completion_t complete, void *data)
605 {
606 req->base.complete = complete;
607 req->base.data = data;
608 req->base.flags = flags;
609 }
610
611 static inline void ablkcipher_request_set_crypt(
612 struct ablkcipher_request *req,
613 struct scatterlist *src, struct scatterlist *dst,
614 unsigned int nbytes, void *iv)
615 {
616 req->src = src;
617 req->dst = dst;
618 req->nbytes = nbytes;
619 req->info = iv;
620 }
621
622 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
623 struct crypto_tfm *tfm)
624 {
625 return (struct crypto_blkcipher *)tfm;
626 }
627
628 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
629 struct crypto_tfm *tfm)
630 {
631 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
632 return __crypto_blkcipher_cast(tfm);
633 }
634
635 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
636 const char *alg_name, u32 type, u32 mask)
637 {
638 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
639 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
640 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
641
642 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
643 }
644
645 static inline struct crypto_tfm *crypto_blkcipher_tfm(
646 struct crypto_blkcipher *tfm)
647 {
648 return &tfm->base;
649 }
650
651 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
652 {
653 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
654 }
655
656 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
657 {
658 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
659 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
660 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
661
662 return crypto_has_alg(alg_name, type, mask);
663 }
664
665 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
666 {
667 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
668 }
669
670 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
671 struct crypto_blkcipher *tfm)
672 {
673 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
674 }
675
676 static inline struct blkcipher_alg *crypto_blkcipher_alg(
677 struct crypto_blkcipher *tfm)
678 {
679 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
680 }
681
682 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
683 {
684 return crypto_blkcipher_alg(tfm)->ivsize;
685 }
686
687 static inline unsigned int crypto_blkcipher_blocksize(
688 struct crypto_blkcipher *tfm)
689 {
690 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
691 }
692
693 static inline unsigned int crypto_blkcipher_alignmask(
694 struct crypto_blkcipher *tfm)
695 {
696 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
697 }
698
699 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
700 {
701 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
702 }
703
704 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
705 u32 flags)
706 {
707 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
708 }
709
710 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
711 u32 flags)
712 {
713 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
714 }
715
716 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
717 const u8 *key, unsigned int keylen)
718 {
719 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
720 key, keylen);
721 }
722
723 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
724 struct scatterlist *dst,
725 struct scatterlist *src,
726 unsigned int nbytes)
727 {
728 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
729 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
730 }
731
732 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
733 struct scatterlist *dst,
734 struct scatterlist *src,
735 unsigned int nbytes)
736 {
737 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
738 }
739
740 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
741 struct scatterlist *dst,
742 struct scatterlist *src,
743 unsigned int nbytes)
744 {
745 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
746 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
747 }
748
749 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
750 struct scatterlist *dst,
751 struct scatterlist *src,
752 unsigned int nbytes)
753 {
754 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
755 }
756
757 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
758 const u8 *src, unsigned int len)
759 {
760 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
761 }
762
763 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
764 u8 *dst, unsigned int len)
765 {
766 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
767 }
768
769 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
770 {
771 return (struct crypto_cipher *)tfm;
772 }
773
774 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
775 {
776 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
777 return __crypto_cipher_cast(tfm);
778 }
779
780 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
781 u32 type, u32 mask)
782 {
783 type &= ~CRYPTO_ALG_TYPE_MASK;
784 type |= CRYPTO_ALG_TYPE_CIPHER;
785 mask |= CRYPTO_ALG_TYPE_MASK;
786
787 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
788 }
789
790 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
791 {
792 return &tfm->base;
793 }
794
795 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
796 {
797 crypto_free_tfm(crypto_cipher_tfm(tfm));
798 }
799
800 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
801 {
802 type &= ~CRYPTO_ALG_TYPE_MASK;
803 type |= CRYPTO_ALG_TYPE_CIPHER;
804 mask |= CRYPTO_ALG_TYPE_MASK;
805
806 return crypto_has_alg(alg_name, type, mask);
807 }
808
809 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
810 {
811 return &crypto_cipher_tfm(tfm)->crt_cipher;
812 }
813
814 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
815 {
816 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
817 }
818
819 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
820 {
821 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
822 }
823
824 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
825 {
826 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
827 }
828
829 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
830 u32 flags)
831 {
832 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
833 }
834
835 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
836 u32 flags)
837 {
838 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
839 }
840
841 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
842 const u8 *key, unsigned int keylen)
843 {
844 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
845 key, keylen);
846 }
847
848 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
849 u8 *dst, const u8 *src)
850 {
851 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
852 dst, src);
853 }
854
855 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
856 u8 *dst, const u8 *src)
857 {
858 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
859 dst, src);
860 }
861
862 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
863 {
864 return (struct crypto_hash *)tfm;
865 }
866
867 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
868 {
869 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
870 CRYPTO_ALG_TYPE_HASH_MASK);
871 return __crypto_hash_cast(tfm);
872 }
873
874 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
875 u32 type, u32 mask)
876 {
877 type &= ~CRYPTO_ALG_TYPE_MASK;
878 type |= CRYPTO_ALG_TYPE_HASH;
879 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
880
881 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
882 }
883
884 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
885 {
886 return &tfm->base;
887 }
888
889 static inline void crypto_free_hash(struct crypto_hash *tfm)
890 {
891 crypto_free_tfm(crypto_hash_tfm(tfm));
892 }
893
894 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
895 {
896 type &= ~CRYPTO_ALG_TYPE_MASK;
897 type |= CRYPTO_ALG_TYPE_HASH;
898 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
899
900 return crypto_has_alg(alg_name, type, mask);
901 }
902
903 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
904 {
905 return &crypto_hash_tfm(tfm)->crt_hash;
906 }
907
908 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
909 {
910 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
911 }
912
913 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
914 {
915 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
916 }
917
918 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
919 {
920 return crypto_hash_crt(tfm)->digestsize;
921 }
922
923 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
924 {
925 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
926 }
927
928 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
929 {
930 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
931 }
932
933 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
934 {
935 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
936 }
937
938 static inline int crypto_hash_init(struct hash_desc *desc)
939 {
940 return crypto_hash_crt(desc->tfm)->init(desc);
941 }
942
943 static inline int crypto_hash_update(struct hash_desc *desc,
944 struct scatterlist *sg,
945 unsigned int nbytes)
946 {
947 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
948 }
949
950 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
951 {
952 return crypto_hash_crt(desc->tfm)->final(desc, out);
953 }
954
955 static inline int crypto_hash_digest(struct hash_desc *desc,
956 struct scatterlist *sg,
957 unsigned int nbytes, u8 *out)
958 {
959 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
960 }
961
962 static inline int crypto_hash_setkey(struct crypto_hash *hash,
963 const u8 *key, unsigned int keylen)
964 {
965 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
966 }
967
968 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
969 {
970 return (struct crypto_comp *)tfm;
971 }
972
973 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
974 {
975 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
976 CRYPTO_ALG_TYPE_MASK);
977 return __crypto_comp_cast(tfm);
978 }
979
980 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
981 u32 type, u32 mask)
982 {
983 type &= ~CRYPTO_ALG_TYPE_MASK;
984 type |= CRYPTO_ALG_TYPE_COMPRESS;
985 mask |= CRYPTO_ALG_TYPE_MASK;
986
987 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
988 }
989
990 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
991 {
992 return &tfm->base;
993 }
994
995 static inline void crypto_free_comp(struct crypto_comp *tfm)
996 {
997 crypto_free_tfm(crypto_comp_tfm(tfm));
998 }
999
1000 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1001 {
1002 type &= ~CRYPTO_ALG_TYPE_MASK;
1003 type |= CRYPTO_ALG_TYPE_COMPRESS;
1004 mask |= CRYPTO_ALG_TYPE_MASK;
1005
1006 return crypto_has_alg(alg_name, type, mask);
1007 }
1008
1009 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1010 {
1011 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1012 }
1013
1014 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1015 {
1016 return &crypto_comp_tfm(tfm)->crt_compress;
1017 }
1018
1019 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1020 const u8 *src, unsigned int slen,
1021 u8 *dst, unsigned int *dlen)
1022 {
1023 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1024 src, slen, dst, dlen);
1025 }
1026
1027 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1028 const u8 *src, unsigned int slen,
1029 u8 *dst, unsigned int *dlen)
1030 {
1031 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1032 src, slen, dst, dlen);
1033 }
1034
1035 #endif /* _LINUX_CRYPTO_H */
1036
This page took 0.070089 seconds and 6 git commands to generate.