Merge branch 'for-linus' of master.kernel.org:/home/rmk/linux-2.6-arm
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_HASH 0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000005
37
38 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
39
40 #define CRYPTO_ALG_LARVAL 0x00000010
41 #define CRYPTO_ALG_DEAD 0x00000020
42 #define CRYPTO_ALG_DYING 0x00000040
43 #define CRYPTO_ALG_ASYNC 0x00000080
44
45 /*
46 * Set this bit if and only if the algorithm requires another algorithm of
47 * the same type to handle corner cases.
48 */
49 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
50
51 /*
52 * Transform masks and values (for crt_flags).
53 */
54 #define CRYPTO_TFM_REQ_MASK 0x000fff00
55 #define CRYPTO_TFM_RES_MASK 0xfff00000
56
57 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
58 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
59 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
60 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
61 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
62 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
63 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
64 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
65
66 /*
67 * Miscellaneous stuff.
68 */
69 #define CRYPTO_MAX_ALG_NAME 64
70
71 /*
72 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
73 * declaration) is used to ensure that the crypto_tfm context structure is
74 * aligned correctly for the given architecture so that there are no alignment
75 * faults for C data types. In particular, this is required on platforms such
76 * as arm where pointers are 32-bit aligned but there are data types such as
77 * u64 which require 64-bit alignment.
78 */
79 #if defined(ARCH_KMALLOC_MINALIGN)
80 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
81 #elif defined(ARCH_SLAB_MINALIGN)
82 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
83 #endif
84
85 #ifdef CRYPTO_MINALIGN
86 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
87 #else
88 #define CRYPTO_MINALIGN_ATTR
89 #endif
90
91 struct scatterlist;
92 struct crypto_ablkcipher;
93 struct crypto_async_request;
94 struct crypto_blkcipher;
95 struct crypto_hash;
96 struct crypto_queue;
97 struct crypto_tfm;
98 struct crypto_type;
99
100 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
101
102 struct crypto_async_request {
103 struct list_head list;
104 crypto_completion_t complete;
105 void *data;
106 struct crypto_tfm *tfm;
107
108 u32 flags;
109 };
110
111 struct ablkcipher_request {
112 struct crypto_async_request base;
113
114 unsigned int nbytes;
115
116 void *info;
117
118 struct scatterlist *src;
119 struct scatterlist *dst;
120
121 void *__ctx[] CRYPTO_MINALIGN_ATTR;
122 };
123
124 struct blkcipher_desc {
125 struct crypto_blkcipher *tfm;
126 void *info;
127 u32 flags;
128 };
129
130 struct cipher_desc {
131 struct crypto_tfm *tfm;
132 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
133 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
134 const u8 *src, unsigned int nbytes);
135 void *info;
136 };
137
138 struct hash_desc {
139 struct crypto_hash *tfm;
140 u32 flags;
141 };
142
143 /*
144 * Algorithms: modular crypto algorithm implementations, managed
145 * via crypto_register_alg() and crypto_unregister_alg().
146 */
147 struct ablkcipher_alg {
148 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
149 unsigned int keylen);
150 int (*encrypt)(struct ablkcipher_request *req);
151 int (*decrypt)(struct ablkcipher_request *req);
152
153 struct crypto_queue *queue;
154
155 unsigned int min_keysize;
156 unsigned int max_keysize;
157 unsigned int ivsize;
158 };
159
160 struct blkcipher_alg {
161 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
162 unsigned int keylen);
163 int (*encrypt)(struct blkcipher_desc *desc,
164 struct scatterlist *dst, struct scatterlist *src,
165 unsigned int nbytes);
166 int (*decrypt)(struct blkcipher_desc *desc,
167 struct scatterlist *dst, struct scatterlist *src,
168 unsigned int nbytes);
169
170 unsigned int min_keysize;
171 unsigned int max_keysize;
172 unsigned int ivsize;
173 };
174
175 struct cipher_alg {
176 unsigned int cia_min_keysize;
177 unsigned int cia_max_keysize;
178 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
179 unsigned int keylen);
180 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
181 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
182 };
183
184 struct digest_alg {
185 unsigned int dia_digestsize;
186 void (*dia_init)(struct crypto_tfm *tfm);
187 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
188 unsigned int len);
189 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
190 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
191 unsigned int keylen);
192 };
193
194 struct hash_alg {
195 int (*init)(struct hash_desc *desc);
196 int (*update)(struct hash_desc *desc, struct scatterlist *sg,
197 unsigned int nbytes);
198 int (*final)(struct hash_desc *desc, u8 *out);
199 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
200 unsigned int nbytes, u8 *out);
201 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
202 unsigned int keylen);
203
204 unsigned int digestsize;
205 };
206
207 struct compress_alg {
208 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
209 unsigned int slen, u8 *dst, unsigned int *dlen);
210 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
211 unsigned int slen, u8 *dst, unsigned int *dlen);
212 };
213
214 #define cra_ablkcipher cra_u.ablkcipher
215 #define cra_blkcipher cra_u.blkcipher
216 #define cra_cipher cra_u.cipher
217 #define cra_digest cra_u.digest
218 #define cra_hash cra_u.hash
219 #define cra_compress cra_u.compress
220
221 struct crypto_alg {
222 struct list_head cra_list;
223 struct list_head cra_users;
224
225 u32 cra_flags;
226 unsigned int cra_blocksize;
227 unsigned int cra_ctxsize;
228 unsigned int cra_alignmask;
229
230 int cra_priority;
231 atomic_t cra_refcnt;
232
233 char cra_name[CRYPTO_MAX_ALG_NAME];
234 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
235
236 const struct crypto_type *cra_type;
237
238 union {
239 struct ablkcipher_alg ablkcipher;
240 struct blkcipher_alg blkcipher;
241 struct cipher_alg cipher;
242 struct digest_alg digest;
243 struct hash_alg hash;
244 struct compress_alg compress;
245 } cra_u;
246
247 int (*cra_init)(struct crypto_tfm *tfm);
248 void (*cra_exit)(struct crypto_tfm *tfm);
249 void (*cra_destroy)(struct crypto_alg *alg);
250
251 struct module *cra_module;
252 };
253
254 /*
255 * Algorithm registration interface.
256 */
257 int crypto_register_alg(struct crypto_alg *alg);
258 int crypto_unregister_alg(struct crypto_alg *alg);
259
260 /*
261 * Algorithm query interface.
262 */
263 #ifdef CONFIG_CRYPTO
264 int crypto_has_alg(const char *name, u32 type, u32 mask);
265 #else
266 static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
267 {
268 return 0;
269 }
270 #endif
271
272 /*
273 * Transforms: user-instantiated objects which encapsulate algorithms
274 * and core processing logic. Managed via crypto_alloc_*() and
275 * crypto_free_*(), as well as the various helpers below.
276 */
277
278 struct ablkcipher_tfm {
279 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
280 unsigned int keylen);
281 int (*encrypt)(struct ablkcipher_request *req);
282 int (*decrypt)(struct ablkcipher_request *req);
283 unsigned int ivsize;
284 unsigned int reqsize;
285 };
286
287 struct blkcipher_tfm {
288 void *iv;
289 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
290 unsigned int keylen);
291 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
292 struct scatterlist *src, unsigned int nbytes);
293 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
294 struct scatterlist *src, unsigned int nbytes);
295 };
296
297 struct cipher_tfm {
298 void *cit_iv;
299 unsigned int cit_ivsize;
300 u32 cit_mode;
301 int (*cit_setkey)(struct crypto_tfm *tfm,
302 const u8 *key, unsigned int keylen);
303 int (*cit_encrypt)(struct crypto_tfm *tfm,
304 struct scatterlist *dst,
305 struct scatterlist *src,
306 unsigned int nbytes);
307 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
308 struct scatterlist *dst,
309 struct scatterlist *src,
310 unsigned int nbytes, u8 *iv);
311 int (*cit_decrypt)(struct crypto_tfm *tfm,
312 struct scatterlist *dst,
313 struct scatterlist *src,
314 unsigned int nbytes);
315 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
316 struct scatterlist *dst,
317 struct scatterlist *src,
318 unsigned int nbytes, u8 *iv);
319 void (*cit_xor_block)(u8 *dst, const u8 *src);
320 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
321 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
322 };
323
324 struct hash_tfm {
325 int (*init)(struct hash_desc *desc);
326 int (*update)(struct hash_desc *desc,
327 struct scatterlist *sg, unsigned int nsg);
328 int (*final)(struct hash_desc *desc, u8 *out);
329 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
330 unsigned int nsg, u8 *out);
331 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
332 unsigned int keylen);
333 unsigned int digestsize;
334 };
335
336 struct compress_tfm {
337 int (*cot_compress)(struct crypto_tfm *tfm,
338 const u8 *src, unsigned int slen,
339 u8 *dst, unsigned int *dlen);
340 int (*cot_decompress)(struct crypto_tfm *tfm,
341 const u8 *src, unsigned int slen,
342 u8 *dst, unsigned int *dlen);
343 };
344
345 #define crt_ablkcipher crt_u.ablkcipher
346 #define crt_blkcipher crt_u.blkcipher
347 #define crt_cipher crt_u.cipher
348 #define crt_hash crt_u.hash
349 #define crt_compress crt_u.compress
350
351 struct crypto_tfm {
352
353 u32 crt_flags;
354
355 union {
356 struct ablkcipher_tfm ablkcipher;
357 struct blkcipher_tfm blkcipher;
358 struct cipher_tfm cipher;
359 struct hash_tfm hash;
360 struct compress_tfm compress;
361 } crt_u;
362
363 struct crypto_alg *__crt_alg;
364
365 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
366 };
367
368 struct crypto_ablkcipher {
369 struct crypto_tfm base;
370 };
371
372 struct crypto_blkcipher {
373 struct crypto_tfm base;
374 };
375
376 struct crypto_cipher {
377 struct crypto_tfm base;
378 };
379
380 struct crypto_comp {
381 struct crypto_tfm base;
382 };
383
384 struct crypto_hash {
385 struct crypto_tfm base;
386 };
387
388 enum {
389 CRYPTOA_UNSPEC,
390 CRYPTOA_ALG,
391 CRYPTOA_TYPE,
392 __CRYPTOA_MAX,
393 };
394
395 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
396
397 struct crypto_attr_alg {
398 char name[CRYPTO_MAX_ALG_NAME];
399 };
400
401 struct crypto_attr_type {
402 u32 type;
403 u32 mask;
404 };
405
406 /*
407 * Transform user interface.
408 */
409
410 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
411 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
412 void crypto_free_tfm(struct crypto_tfm *tfm);
413
414 /*
415 * Transform helpers which query the underlying algorithm.
416 */
417 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
418 {
419 return tfm->__crt_alg->cra_name;
420 }
421
422 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
423 {
424 return tfm->__crt_alg->cra_driver_name;
425 }
426
427 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
428 {
429 return tfm->__crt_alg->cra_priority;
430 }
431
432 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
433 {
434 return module_name(tfm->__crt_alg->cra_module);
435 }
436
437 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
438 {
439 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
440 }
441
442 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
443 {
444 return tfm->__crt_alg->cra_blocksize;
445 }
446
447 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
448 {
449 return tfm->__crt_alg->cra_alignmask;
450 }
451
452 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
453 {
454 return tfm->crt_flags;
455 }
456
457 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
458 {
459 tfm->crt_flags |= flags;
460 }
461
462 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
463 {
464 tfm->crt_flags &= ~flags;
465 }
466
467 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
468 {
469 return tfm->__crt_ctx;
470 }
471
472 static inline unsigned int crypto_tfm_ctx_alignment(void)
473 {
474 struct crypto_tfm *tfm;
475 return __alignof__(tfm->__crt_ctx);
476 }
477
478 /*
479 * API wrappers.
480 */
481 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
482 struct crypto_tfm *tfm)
483 {
484 return (struct crypto_ablkcipher *)tfm;
485 }
486
487 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
488 const char *alg_name, u32 type, u32 mask)
489 {
490 type &= ~CRYPTO_ALG_TYPE_MASK;
491 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
492 mask |= CRYPTO_ALG_TYPE_MASK;
493
494 return __crypto_ablkcipher_cast(
495 crypto_alloc_base(alg_name, type, mask));
496 }
497
498 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
499 struct crypto_ablkcipher *tfm)
500 {
501 return &tfm->base;
502 }
503
504 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
505 {
506 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
507 }
508
509 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
510 u32 mask)
511 {
512 type &= ~CRYPTO_ALG_TYPE_MASK;
513 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
514 mask |= CRYPTO_ALG_TYPE_MASK;
515
516 return crypto_has_alg(alg_name, type, mask);
517 }
518
519 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
520 struct crypto_ablkcipher *tfm)
521 {
522 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
523 }
524
525 static inline unsigned int crypto_ablkcipher_ivsize(
526 struct crypto_ablkcipher *tfm)
527 {
528 return crypto_ablkcipher_crt(tfm)->ivsize;
529 }
530
531 static inline unsigned int crypto_ablkcipher_blocksize(
532 struct crypto_ablkcipher *tfm)
533 {
534 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
535 }
536
537 static inline unsigned int crypto_ablkcipher_alignmask(
538 struct crypto_ablkcipher *tfm)
539 {
540 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
541 }
542
543 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
544 {
545 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
546 }
547
548 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
549 u32 flags)
550 {
551 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
552 }
553
554 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
555 u32 flags)
556 {
557 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
558 }
559
560 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
561 const u8 *key, unsigned int keylen)
562 {
563 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
564 }
565
566 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
567 struct ablkcipher_request *req)
568 {
569 return __crypto_ablkcipher_cast(req->base.tfm);
570 }
571
572 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
573 {
574 struct ablkcipher_tfm *crt =
575 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
576 return crt->encrypt(req);
577 }
578
579 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
580 {
581 struct ablkcipher_tfm *crt =
582 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
583 return crt->decrypt(req);
584 }
585
586 static inline int crypto_ablkcipher_reqsize(struct crypto_ablkcipher *tfm)
587 {
588 return crypto_ablkcipher_crt(tfm)->reqsize;
589 }
590
591 static inline void ablkcipher_request_set_tfm(
592 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
593 {
594 req->base.tfm = crypto_ablkcipher_tfm(tfm);
595 }
596
597 static inline struct ablkcipher_request *ablkcipher_request_cast(
598 struct crypto_async_request *req)
599 {
600 return container_of(req, struct ablkcipher_request, base);
601 }
602
603 static inline struct ablkcipher_request *ablkcipher_request_alloc(
604 struct crypto_ablkcipher *tfm, gfp_t gfp)
605 {
606 struct ablkcipher_request *req;
607
608 req = kmalloc(sizeof(struct ablkcipher_request) +
609 crypto_ablkcipher_reqsize(tfm), gfp);
610
611 if (likely(req))
612 ablkcipher_request_set_tfm(req, tfm);
613
614 return req;
615 }
616
617 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
618 {
619 kfree(req);
620 }
621
622 static inline void ablkcipher_request_set_callback(
623 struct ablkcipher_request *req,
624 u32 flags, crypto_completion_t complete, void *data)
625 {
626 req->base.complete = complete;
627 req->base.data = data;
628 req->base.flags = flags;
629 }
630
631 static inline void ablkcipher_request_set_crypt(
632 struct ablkcipher_request *req,
633 struct scatterlist *src, struct scatterlist *dst,
634 unsigned int nbytes, void *iv)
635 {
636 req->src = src;
637 req->dst = dst;
638 req->nbytes = nbytes;
639 req->info = iv;
640 }
641
642 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
643 struct crypto_tfm *tfm)
644 {
645 return (struct crypto_blkcipher *)tfm;
646 }
647
648 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
649 struct crypto_tfm *tfm)
650 {
651 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
652 return __crypto_blkcipher_cast(tfm);
653 }
654
655 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
656 const char *alg_name, u32 type, u32 mask)
657 {
658 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
659 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
660 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
661
662 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
663 }
664
665 static inline struct crypto_tfm *crypto_blkcipher_tfm(
666 struct crypto_blkcipher *tfm)
667 {
668 return &tfm->base;
669 }
670
671 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
672 {
673 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
674 }
675
676 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
677 {
678 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
679 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
680 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC;
681
682 return crypto_has_alg(alg_name, type, mask);
683 }
684
685 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
686 {
687 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
688 }
689
690 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
691 struct crypto_blkcipher *tfm)
692 {
693 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
694 }
695
696 static inline struct blkcipher_alg *crypto_blkcipher_alg(
697 struct crypto_blkcipher *tfm)
698 {
699 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
700 }
701
702 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
703 {
704 return crypto_blkcipher_alg(tfm)->ivsize;
705 }
706
707 static inline unsigned int crypto_blkcipher_blocksize(
708 struct crypto_blkcipher *tfm)
709 {
710 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
711 }
712
713 static inline unsigned int crypto_blkcipher_alignmask(
714 struct crypto_blkcipher *tfm)
715 {
716 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
717 }
718
719 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
720 {
721 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
722 }
723
724 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
725 u32 flags)
726 {
727 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
728 }
729
730 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
731 u32 flags)
732 {
733 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
734 }
735
736 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
737 const u8 *key, unsigned int keylen)
738 {
739 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
740 key, keylen);
741 }
742
743 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
744 struct scatterlist *dst,
745 struct scatterlist *src,
746 unsigned int nbytes)
747 {
748 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
749 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
750 }
751
752 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
753 struct scatterlist *dst,
754 struct scatterlist *src,
755 unsigned int nbytes)
756 {
757 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
758 }
759
760 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
761 struct scatterlist *dst,
762 struct scatterlist *src,
763 unsigned int nbytes)
764 {
765 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
766 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
767 }
768
769 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
770 struct scatterlist *dst,
771 struct scatterlist *src,
772 unsigned int nbytes)
773 {
774 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
775 }
776
777 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
778 const u8 *src, unsigned int len)
779 {
780 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
781 }
782
783 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
784 u8 *dst, unsigned int len)
785 {
786 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
787 }
788
789 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
790 {
791 return (struct crypto_cipher *)tfm;
792 }
793
794 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
795 {
796 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
797 return __crypto_cipher_cast(tfm);
798 }
799
800 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
801 u32 type, u32 mask)
802 {
803 type &= ~CRYPTO_ALG_TYPE_MASK;
804 type |= CRYPTO_ALG_TYPE_CIPHER;
805 mask |= CRYPTO_ALG_TYPE_MASK;
806
807 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
808 }
809
810 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
811 {
812 return &tfm->base;
813 }
814
815 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
816 {
817 crypto_free_tfm(crypto_cipher_tfm(tfm));
818 }
819
820 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
821 {
822 type &= ~CRYPTO_ALG_TYPE_MASK;
823 type |= CRYPTO_ALG_TYPE_CIPHER;
824 mask |= CRYPTO_ALG_TYPE_MASK;
825
826 return crypto_has_alg(alg_name, type, mask);
827 }
828
829 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
830 {
831 return &crypto_cipher_tfm(tfm)->crt_cipher;
832 }
833
834 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
835 {
836 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
837 }
838
839 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
840 {
841 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
842 }
843
844 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
845 {
846 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
847 }
848
849 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
850 u32 flags)
851 {
852 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
853 }
854
855 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
856 u32 flags)
857 {
858 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
859 }
860
861 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
862 const u8 *key, unsigned int keylen)
863 {
864 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
865 key, keylen);
866 }
867
868 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
869 u8 *dst, const u8 *src)
870 {
871 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
872 dst, src);
873 }
874
875 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
876 u8 *dst, const u8 *src)
877 {
878 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
879 dst, src);
880 }
881
882 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
883 {
884 return (struct crypto_hash *)tfm;
885 }
886
887 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
888 {
889 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
890 CRYPTO_ALG_TYPE_HASH_MASK);
891 return __crypto_hash_cast(tfm);
892 }
893
894 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
895 u32 type, u32 mask)
896 {
897 type &= ~CRYPTO_ALG_TYPE_MASK;
898 type |= CRYPTO_ALG_TYPE_HASH;
899 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
900
901 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
902 }
903
904 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
905 {
906 return &tfm->base;
907 }
908
909 static inline void crypto_free_hash(struct crypto_hash *tfm)
910 {
911 crypto_free_tfm(crypto_hash_tfm(tfm));
912 }
913
914 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
915 {
916 type &= ~CRYPTO_ALG_TYPE_MASK;
917 type |= CRYPTO_ALG_TYPE_HASH;
918 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
919
920 return crypto_has_alg(alg_name, type, mask);
921 }
922
923 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
924 {
925 return &crypto_hash_tfm(tfm)->crt_hash;
926 }
927
928 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
929 {
930 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
931 }
932
933 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
934 {
935 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
936 }
937
938 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
939 {
940 return crypto_hash_crt(tfm)->digestsize;
941 }
942
943 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
944 {
945 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
946 }
947
948 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
949 {
950 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
951 }
952
953 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
954 {
955 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
956 }
957
958 static inline int crypto_hash_init(struct hash_desc *desc)
959 {
960 return crypto_hash_crt(desc->tfm)->init(desc);
961 }
962
963 static inline int crypto_hash_update(struct hash_desc *desc,
964 struct scatterlist *sg,
965 unsigned int nbytes)
966 {
967 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
968 }
969
970 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
971 {
972 return crypto_hash_crt(desc->tfm)->final(desc, out);
973 }
974
975 static inline int crypto_hash_digest(struct hash_desc *desc,
976 struct scatterlist *sg,
977 unsigned int nbytes, u8 *out)
978 {
979 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
980 }
981
982 static inline int crypto_hash_setkey(struct crypto_hash *hash,
983 const u8 *key, unsigned int keylen)
984 {
985 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
986 }
987
988 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
989 {
990 return (struct crypto_comp *)tfm;
991 }
992
993 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
994 {
995 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
996 CRYPTO_ALG_TYPE_MASK);
997 return __crypto_comp_cast(tfm);
998 }
999
1000 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1001 u32 type, u32 mask)
1002 {
1003 type &= ~CRYPTO_ALG_TYPE_MASK;
1004 type |= CRYPTO_ALG_TYPE_COMPRESS;
1005 mask |= CRYPTO_ALG_TYPE_MASK;
1006
1007 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1008 }
1009
1010 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1011 {
1012 return &tfm->base;
1013 }
1014
1015 static inline void crypto_free_comp(struct crypto_comp *tfm)
1016 {
1017 crypto_free_tfm(crypto_comp_tfm(tfm));
1018 }
1019
1020 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1021 {
1022 type &= ~CRYPTO_ALG_TYPE_MASK;
1023 type |= CRYPTO_ALG_TYPE_COMPRESS;
1024 mask |= CRYPTO_ALG_TYPE_MASK;
1025
1026 return crypto_has_alg(alg_name, type, mask);
1027 }
1028
1029 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1030 {
1031 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1032 }
1033
1034 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1035 {
1036 return &crypto_comp_tfm(tfm)->crt_compress;
1037 }
1038
1039 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1040 const u8 *src, unsigned int slen,
1041 u8 *dst, unsigned int *dlen)
1042 {
1043 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1044 src, slen, dst, dlen);
1045 }
1046
1047 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1048 const u8 *src, unsigned int slen,
1049 u8 *dst, unsigned int *dlen)
1050 {
1051 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1052 src, slen, dst, dlen);
1053 }
1054
1055 #endif /* _LINUX_CRYPTO_H */
1056
This page took 0.052814 seconds and 6 git commands to generate.