[CRYPTO] templates: Pass type/mask when creating instances
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_HASH 0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000005
37
38 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
39
40 #define CRYPTO_ALG_LARVAL 0x00000010
41 #define CRYPTO_ALG_DEAD 0x00000020
42 #define CRYPTO_ALG_DYING 0x00000040
43 #define CRYPTO_ALG_ASYNC 0x00000080
44
45 /*
46 * Set this bit if and only if the algorithm requires another algorithm of
47 * the same type to handle corner cases.
48 */
49 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
50
51 /*
52 * Transform masks and values (for crt_flags).
53 */
54 #define CRYPTO_TFM_REQ_MASK 0x000fff00
55 #define CRYPTO_TFM_RES_MASK 0xfff00000
56
57 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
58 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
59 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
60 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
61 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
62 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
63 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
64 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
65
66 /*
67 * Miscellaneous stuff.
68 */
69 #define CRYPTO_MAX_ALG_NAME 64
70
71 /*
72 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
73 * declaration) is used to ensure that the crypto_tfm context structure is
74 * aligned correctly for the given architecture so that there are no alignment
75 * faults for C data types. In particular, this is required on platforms such
76 * as arm where pointers are 32-bit aligned but there are data types such as
77 * u64 which require 64-bit alignment.
78 */
79 #if defined(ARCH_KMALLOC_MINALIGN)
80 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
81 #elif defined(ARCH_SLAB_MINALIGN)
82 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
83 #endif
84
85 #ifdef CRYPTO_MINALIGN
86 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
87 #else
88 #define CRYPTO_MINALIGN_ATTR
89 #endif
90
91 struct scatterlist;
92 struct crypto_ablkcipher;
93 struct crypto_async_request;
94 struct crypto_blkcipher;
95 struct crypto_hash;
96 struct crypto_tfm;
97 struct crypto_type;
98
99 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
100
101 struct crypto_async_request {
102 struct list_head list;
103 crypto_completion_t complete;
104 void *data;
105 struct crypto_tfm *tfm;
106
107 u32 flags;
108 };
109
110 struct ablkcipher_request {
111 struct crypto_async_request base;
112
113 unsigned int nbytes;
114
115 void *info;
116
117 struct scatterlist *src;
118 struct scatterlist *dst;
119
120 void *__ctx[] CRYPTO_MINALIGN_ATTR;
121 };
122
123 struct blkcipher_desc {
124 struct crypto_blkcipher *tfm;
125 void *info;
126 u32 flags;
127 };
128
129 struct cipher_desc {
130 struct crypto_tfm *tfm;
131 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
132 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
133 const u8 *src, unsigned int nbytes);
134 void *info;
135 };
136
137 struct hash_desc {
138 struct crypto_hash *tfm;
139 u32 flags;
140 };
141
142 /*
143 * Algorithms: modular crypto algorithm implementations, managed
144 * via crypto_register_alg() and crypto_unregister_alg().
145 */
146 struct blkcipher_alg {
147 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
148 unsigned int keylen);
149 int (*encrypt)(struct blkcipher_desc *desc,
150 struct scatterlist *dst, struct scatterlist *src,
151 unsigned int nbytes);
152 int (*decrypt)(struct blkcipher_desc *desc,
153 struct scatterlist *dst, struct scatterlist *src,
154 unsigned int nbytes);
155
156 unsigned int min_keysize;
157 unsigned int max_keysize;
158 unsigned int ivsize;
159 };
160
161 struct cipher_alg {
162 unsigned int cia_min_keysize;
163 unsigned int cia_max_keysize;
164 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
165 unsigned int keylen);
166 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
167 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
168 };
169
170 struct digest_alg {
171 unsigned int dia_digestsize;
172 void (*dia_init)(struct crypto_tfm *tfm);
173 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
174 unsigned int len);
175 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
176 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
177 unsigned int keylen);
178 };
179
180 struct hash_alg {
181 int (*init)(struct hash_desc *desc);
182 int (*update)(struct hash_desc *desc, struct scatterlist *sg,
183 unsigned int nbytes);
184 int (*final)(struct hash_desc *desc, u8 *out);
185 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
186 unsigned int nbytes, u8 *out);
187 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
188 unsigned int keylen);
189
190 unsigned int digestsize;
191 };
192
193 struct compress_alg {
194 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
195 unsigned int slen, u8 *dst, unsigned int *dlen);
196 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
197 unsigned int slen, u8 *dst, unsigned int *dlen);
198 };
199
200 #define cra_blkcipher cra_u.blkcipher
201 #define cra_cipher cra_u.cipher
202 #define cra_digest cra_u.digest
203 #define cra_hash cra_u.hash
204 #define cra_compress cra_u.compress
205
206 struct crypto_alg {
207 struct list_head cra_list;
208 struct list_head cra_users;
209
210 u32 cra_flags;
211 unsigned int cra_blocksize;
212 unsigned int cra_ctxsize;
213 unsigned int cra_alignmask;
214
215 int cra_priority;
216 atomic_t cra_refcnt;
217
218 char cra_name[CRYPTO_MAX_ALG_NAME];
219 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
220
221 const struct crypto_type *cra_type;
222
223 union {
224 struct blkcipher_alg blkcipher;
225 struct cipher_alg cipher;
226 struct digest_alg digest;
227 struct hash_alg hash;
228 struct compress_alg compress;
229 } cra_u;
230
231 int (*cra_init)(struct crypto_tfm *tfm);
232 void (*cra_exit)(struct crypto_tfm *tfm);
233 void (*cra_destroy)(struct crypto_alg *alg);
234
235 struct module *cra_module;
236 };
237
238 /*
239 * Algorithm registration interface.
240 */
241 int crypto_register_alg(struct crypto_alg *alg);
242 int crypto_unregister_alg(struct crypto_alg *alg);
243
244 /*
245 * Algorithm query interface.
246 */
247 #ifdef CONFIG_CRYPTO
248 int crypto_has_alg(const char *name, u32 type, u32 mask);
249 #else
250 static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
251 {
252 return 0;
253 }
254 #endif
255
256 /*
257 * Transforms: user-instantiated objects which encapsulate algorithms
258 * and core processing logic. Managed via crypto_alloc_*() and
259 * crypto_free_*(), as well as the various helpers below.
260 */
261
262 struct ablkcipher_tfm {
263 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
264 unsigned int keylen);
265 int (*encrypt)(struct ablkcipher_request *req);
266 int (*decrypt)(struct ablkcipher_request *req);
267 unsigned int ivsize;
268 unsigned int reqsize;
269 };
270
271 struct blkcipher_tfm {
272 void *iv;
273 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
274 unsigned int keylen);
275 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
276 struct scatterlist *src, unsigned int nbytes);
277 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
278 struct scatterlist *src, unsigned int nbytes);
279 };
280
281 struct cipher_tfm {
282 void *cit_iv;
283 unsigned int cit_ivsize;
284 u32 cit_mode;
285 int (*cit_setkey)(struct crypto_tfm *tfm,
286 const u8 *key, unsigned int keylen);
287 int (*cit_encrypt)(struct crypto_tfm *tfm,
288 struct scatterlist *dst,
289 struct scatterlist *src,
290 unsigned int nbytes);
291 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
292 struct scatterlist *dst,
293 struct scatterlist *src,
294 unsigned int nbytes, u8 *iv);
295 int (*cit_decrypt)(struct crypto_tfm *tfm,
296 struct scatterlist *dst,
297 struct scatterlist *src,
298 unsigned int nbytes);
299 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
300 struct scatterlist *dst,
301 struct scatterlist *src,
302 unsigned int nbytes, u8 *iv);
303 void (*cit_xor_block)(u8 *dst, const u8 *src);
304 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
305 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
306 };
307
308 struct hash_tfm {
309 int (*init)(struct hash_desc *desc);
310 int (*update)(struct hash_desc *desc,
311 struct scatterlist *sg, unsigned int nsg);
312 int (*final)(struct hash_desc *desc, u8 *out);
313 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
314 unsigned int nsg, u8 *out);
315 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
316 unsigned int keylen);
317 unsigned int digestsize;
318 };
319
320 struct compress_tfm {
321 int (*cot_compress)(struct crypto_tfm *tfm,
322 const u8 *src, unsigned int slen,
323 u8 *dst, unsigned int *dlen);
324 int (*cot_decompress)(struct crypto_tfm *tfm,
325 const u8 *src, unsigned int slen,
326 u8 *dst, unsigned int *dlen);
327 };
328
329 #define crt_ablkcipher crt_u.ablkcipher
330 #define crt_blkcipher crt_u.blkcipher
331 #define crt_cipher crt_u.cipher
332 #define crt_hash crt_u.hash
333 #define crt_compress crt_u.compress
334
335 struct crypto_tfm {
336
337 u32 crt_flags;
338
339 union {
340 struct ablkcipher_tfm ablkcipher;
341 struct blkcipher_tfm blkcipher;
342 struct cipher_tfm cipher;
343 struct hash_tfm hash;
344 struct compress_tfm compress;
345 } crt_u;
346
347 struct crypto_alg *__crt_alg;
348
349 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
350 };
351
352 struct crypto_ablkcipher {
353 struct crypto_tfm base;
354 };
355
356 struct crypto_blkcipher {
357 struct crypto_tfm base;
358 };
359
360 struct crypto_cipher {
361 struct crypto_tfm base;
362 };
363
364 struct crypto_comp {
365 struct crypto_tfm base;
366 };
367
368 struct crypto_hash {
369 struct crypto_tfm base;
370 };
371
372 enum {
373 CRYPTOA_UNSPEC,
374 CRYPTOA_ALG,
375 CRYPTOA_TYPE,
376 __CRYPTOA_MAX,
377 };
378
379 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
380
381 struct crypto_attr_alg {
382 char name[CRYPTO_MAX_ALG_NAME];
383 };
384
385 struct crypto_attr_type {
386 u32 type;
387 u32 mask;
388 };
389
390 /*
391 * Transform user interface.
392 */
393
394 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
395 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
396 void crypto_free_tfm(struct crypto_tfm *tfm);
397
398 /*
399 * Transform helpers which query the underlying algorithm.
400 */
401 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
402 {
403 return tfm->__crt_alg->cra_name;
404 }
405
406 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
407 {
408 return tfm->__crt_alg->cra_driver_name;
409 }
410
411 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
412 {
413 return tfm->__crt_alg->cra_priority;
414 }
415
416 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
417 {
418 return module_name(tfm->__crt_alg->cra_module);
419 }
420
421 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
422 {
423 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
424 }
425
426 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
427 {
428 return tfm->__crt_alg->cra_blocksize;
429 }
430
431 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
432 {
433 return tfm->__crt_alg->cra_alignmask;
434 }
435
436 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
437 {
438 return tfm->crt_flags;
439 }
440
441 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
442 {
443 tfm->crt_flags |= flags;
444 }
445
446 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
447 {
448 tfm->crt_flags &= ~flags;
449 }
450
451 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
452 {
453 return tfm->__crt_ctx;
454 }
455
456 static inline unsigned int crypto_tfm_ctx_alignment(void)
457 {
458 struct crypto_tfm *tfm;
459 return __alignof__(tfm->__crt_ctx);
460 }
461
462 /*
463 * API wrappers.
464 */
465 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
466 struct crypto_tfm *tfm)
467 {
468 return (struct crypto_ablkcipher *)tfm;
469 }
470
471 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
472 const char *alg_name, u32 type, u32 mask)
473 {
474 type &= ~CRYPTO_ALG_TYPE_MASK;
475 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
476 mask |= CRYPTO_ALG_TYPE_MASK;
477
478 return __crypto_ablkcipher_cast(
479 crypto_alloc_base(alg_name, type, mask));
480 }
481
482 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
483 struct crypto_ablkcipher *tfm)
484 {
485 return &tfm->base;
486 }
487
488 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
489 {
490 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
491 }
492
493 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
494 u32 mask)
495 {
496 type &= ~CRYPTO_ALG_TYPE_MASK;
497 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
498 mask |= CRYPTO_ALG_TYPE_MASK;
499
500 return crypto_has_alg(alg_name, type, mask);
501 }
502
503 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
504 struct crypto_ablkcipher *tfm)
505 {
506 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
507 }
508
509 static inline unsigned int crypto_ablkcipher_ivsize(
510 struct crypto_ablkcipher *tfm)
511 {
512 return crypto_ablkcipher_crt(tfm)->ivsize;
513 }
514
515 static inline unsigned int crypto_ablkcipher_blocksize(
516 struct crypto_ablkcipher *tfm)
517 {
518 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
519 }
520
521 static inline unsigned int crypto_ablkcipher_alignmask(
522 struct crypto_ablkcipher *tfm)
523 {
524 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
525 }
526
527 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
528 {
529 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
530 }
531
532 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
533 u32 flags)
534 {
535 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
536 }
537
538 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
539 u32 flags)
540 {
541 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
542 }
543
544 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
545 const u8 *key, unsigned int keylen)
546 {
547 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
548 }
549
550 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
551 struct ablkcipher_request *req)
552 {
553 return __crypto_ablkcipher_cast(req->base.tfm);
554 }
555
556 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
557 {
558 struct ablkcipher_tfm *crt =
559 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
560 return crt->encrypt(req);
561 }
562
563 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
564 {
565 struct ablkcipher_tfm *crt =
566 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
567 return crt->decrypt(req);
568 }
569
570 static inline int crypto_ablkcipher_reqsize(struct crypto_ablkcipher *tfm)
571 {
572 return crypto_ablkcipher_crt(tfm)->reqsize;
573 }
574
575 static inline struct ablkcipher_request *ablkcipher_request_alloc(
576 struct crypto_ablkcipher *tfm, gfp_t gfp)
577 {
578 struct ablkcipher_request *req;
579
580 req = kmalloc(sizeof(struct ablkcipher_request) +
581 crypto_ablkcipher_reqsize(tfm), gfp);
582
583 if (likely(req))
584 req->base.tfm = crypto_ablkcipher_tfm(tfm);
585
586 return req;
587 }
588
589 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
590 {
591 kfree(req);
592 }
593
594 static inline void ablkcipher_request_set_callback(
595 struct ablkcipher_request *req,
596 u32 flags, crypto_completion_t complete, void *data)
597 {
598 req->base.complete = complete;
599 req->base.data = data;
600 req->base.flags = flags;
601 }
602
603 static inline void ablkcipher_request_set_crypt(
604 struct ablkcipher_request *req,
605 struct scatterlist *src, struct scatterlist *dst,
606 unsigned int nbytes, void *iv)
607 {
608 req->src = src;
609 req->dst = dst;
610 req->nbytes = nbytes;
611 req->info = iv;
612 }
613
614 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
615 struct crypto_tfm *tfm)
616 {
617 return (struct crypto_blkcipher *)tfm;
618 }
619
620 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
621 struct crypto_tfm *tfm)
622 {
623 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
624 return __crypto_blkcipher_cast(tfm);
625 }
626
627 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
628 const char *alg_name, u32 type, u32 mask)
629 {
630 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
631 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
632 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
633
634 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
635 }
636
637 static inline struct crypto_tfm *crypto_blkcipher_tfm(
638 struct crypto_blkcipher *tfm)
639 {
640 return &tfm->base;
641 }
642
643 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
644 {
645 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
646 }
647
648 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
649 {
650 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
651 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
652 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
653
654 return crypto_has_alg(alg_name, type, mask);
655 }
656
657 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
658 {
659 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
660 }
661
662 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
663 struct crypto_blkcipher *tfm)
664 {
665 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
666 }
667
668 static inline struct blkcipher_alg *crypto_blkcipher_alg(
669 struct crypto_blkcipher *tfm)
670 {
671 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
672 }
673
674 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
675 {
676 return crypto_blkcipher_alg(tfm)->ivsize;
677 }
678
679 static inline unsigned int crypto_blkcipher_blocksize(
680 struct crypto_blkcipher *tfm)
681 {
682 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
683 }
684
685 static inline unsigned int crypto_blkcipher_alignmask(
686 struct crypto_blkcipher *tfm)
687 {
688 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
689 }
690
691 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
692 {
693 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
694 }
695
696 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
697 u32 flags)
698 {
699 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
700 }
701
702 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
703 u32 flags)
704 {
705 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
706 }
707
708 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
709 const u8 *key, unsigned int keylen)
710 {
711 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
712 key, keylen);
713 }
714
715 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
716 struct scatterlist *dst,
717 struct scatterlist *src,
718 unsigned int nbytes)
719 {
720 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
721 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
722 }
723
724 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
725 struct scatterlist *dst,
726 struct scatterlist *src,
727 unsigned int nbytes)
728 {
729 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
730 }
731
732 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
733 struct scatterlist *dst,
734 struct scatterlist *src,
735 unsigned int nbytes)
736 {
737 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
738 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
739 }
740
741 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
742 struct scatterlist *dst,
743 struct scatterlist *src,
744 unsigned int nbytes)
745 {
746 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
747 }
748
749 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
750 const u8 *src, unsigned int len)
751 {
752 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
753 }
754
755 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
756 u8 *dst, unsigned int len)
757 {
758 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
759 }
760
761 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
762 {
763 return (struct crypto_cipher *)tfm;
764 }
765
766 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
767 {
768 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
769 return __crypto_cipher_cast(tfm);
770 }
771
772 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
773 u32 type, u32 mask)
774 {
775 type &= ~CRYPTO_ALG_TYPE_MASK;
776 type |= CRYPTO_ALG_TYPE_CIPHER;
777 mask |= CRYPTO_ALG_TYPE_MASK;
778
779 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
780 }
781
782 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
783 {
784 return &tfm->base;
785 }
786
787 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
788 {
789 crypto_free_tfm(crypto_cipher_tfm(tfm));
790 }
791
792 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
793 {
794 type &= ~CRYPTO_ALG_TYPE_MASK;
795 type |= CRYPTO_ALG_TYPE_CIPHER;
796 mask |= CRYPTO_ALG_TYPE_MASK;
797
798 return crypto_has_alg(alg_name, type, mask);
799 }
800
801 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
802 {
803 return &crypto_cipher_tfm(tfm)->crt_cipher;
804 }
805
806 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
807 {
808 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
809 }
810
811 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
812 {
813 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
814 }
815
816 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
817 {
818 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
819 }
820
821 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
822 u32 flags)
823 {
824 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
825 }
826
827 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
828 u32 flags)
829 {
830 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
831 }
832
833 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
834 const u8 *key, unsigned int keylen)
835 {
836 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
837 key, keylen);
838 }
839
840 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
841 u8 *dst, const u8 *src)
842 {
843 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
844 dst, src);
845 }
846
847 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
848 u8 *dst, const u8 *src)
849 {
850 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
851 dst, src);
852 }
853
854 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
855 {
856 return (struct crypto_hash *)tfm;
857 }
858
859 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
860 {
861 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
862 CRYPTO_ALG_TYPE_HASH_MASK);
863 return __crypto_hash_cast(tfm);
864 }
865
866 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
867 u32 type, u32 mask)
868 {
869 type &= ~CRYPTO_ALG_TYPE_MASK;
870 type |= CRYPTO_ALG_TYPE_HASH;
871 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
872
873 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
874 }
875
876 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
877 {
878 return &tfm->base;
879 }
880
881 static inline void crypto_free_hash(struct crypto_hash *tfm)
882 {
883 crypto_free_tfm(crypto_hash_tfm(tfm));
884 }
885
886 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
887 {
888 type &= ~CRYPTO_ALG_TYPE_MASK;
889 type |= CRYPTO_ALG_TYPE_HASH;
890 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
891
892 return crypto_has_alg(alg_name, type, mask);
893 }
894
895 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
896 {
897 return &crypto_hash_tfm(tfm)->crt_hash;
898 }
899
900 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
901 {
902 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
903 }
904
905 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
906 {
907 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
908 }
909
910 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
911 {
912 return crypto_hash_crt(tfm)->digestsize;
913 }
914
915 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
916 {
917 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
918 }
919
920 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
921 {
922 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
923 }
924
925 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
926 {
927 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
928 }
929
930 static inline int crypto_hash_init(struct hash_desc *desc)
931 {
932 return crypto_hash_crt(desc->tfm)->init(desc);
933 }
934
935 static inline int crypto_hash_update(struct hash_desc *desc,
936 struct scatterlist *sg,
937 unsigned int nbytes)
938 {
939 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
940 }
941
942 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
943 {
944 return crypto_hash_crt(desc->tfm)->final(desc, out);
945 }
946
947 static inline int crypto_hash_digest(struct hash_desc *desc,
948 struct scatterlist *sg,
949 unsigned int nbytes, u8 *out)
950 {
951 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
952 }
953
954 static inline int crypto_hash_setkey(struct crypto_hash *hash,
955 const u8 *key, unsigned int keylen)
956 {
957 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
958 }
959
960 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
961 {
962 return (struct crypto_comp *)tfm;
963 }
964
965 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
966 {
967 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
968 CRYPTO_ALG_TYPE_MASK);
969 return __crypto_comp_cast(tfm);
970 }
971
972 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
973 u32 type, u32 mask)
974 {
975 type &= ~CRYPTO_ALG_TYPE_MASK;
976 type |= CRYPTO_ALG_TYPE_COMPRESS;
977 mask |= CRYPTO_ALG_TYPE_MASK;
978
979 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
980 }
981
982 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
983 {
984 return &tfm->base;
985 }
986
987 static inline void crypto_free_comp(struct crypto_comp *tfm)
988 {
989 crypto_free_tfm(crypto_comp_tfm(tfm));
990 }
991
992 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
993 {
994 type &= ~CRYPTO_ALG_TYPE_MASK;
995 type |= CRYPTO_ALG_TYPE_COMPRESS;
996 mask |= CRYPTO_ALG_TYPE_MASK;
997
998 return crypto_has_alg(alg_name, type, mask);
999 }
1000
1001 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1002 {
1003 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1004 }
1005
1006 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1007 {
1008 return &crypto_comp_tfm(tfm)->crt_compress;
1009 }
1010
1011 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1012 const u8 *src, unsigned int slen,
1013 u8 *dst, unsigned int *dlen)
1014 {
1015 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1016 src, slen, dst, dlen);
1017 }
1018
1019 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1020 const u8 *src, unsigned int slen,
1021 u8 *dst, unsigned int *dlen)
1022 {
1023 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1024 src, slen, dst, dlen);
1025 }
1026
1027 #endif /* _LINUX_CRYPTO_H */
1028
This page took 0.062932 seconds and 6 git commands to generate.