[PATCH] spinlock consolidation
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 *
7 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
8 * and Nettle, by Niels Möller.
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16 #ifndef _LINUX_CRYPTO_H
17 #define _LINUX_CRYPTO_H
18
19 #include <linux/config.h>
20 #include <linux/module.h>
21 #include <linux/kernel.h>
22 #include <linux/types.h>
23 #include <linux/list.h>
24 #include <linux/string.h>
25 #include <asm/page.h>
26
27 /*
28 * Algorithm masks and types.
29 */
30 #define CRYPTO_ALG_TYPE_MASK 0x000000ff
31 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
32 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
33 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
34
35 /*
36 * Transform masks and values (for crt_flags).
37 */
38 #define CRYPTO_TFM_MODE_MASK 0x000000ff
39 #define CRYPTO_TFM_REQ_MASK 0x000fff00
40 #define CRYPTO_TFM_RES_MASK 0xfff00000
41
42 #define CRYPTO_TFM_MODE_ECB 0x00000001
43 #define CRYPTO_TFM_MODE_CBC 0x00000002
44 #define CRYPTO_TFM_MODE_CFB 0x00000004
45 #define CRYPTO_TFM_MODE_CTR 0x00000008
46
47 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
48 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
49 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
50 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
51 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
52 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
53 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
54
55 /*
56 * Miscellaneous stuff.
57 */
58 #define CRYPTO_UNSPEC 0
59 #define CRYPTO_MAX_ALG_NAME 64
60
61 #define CRYPTO_DIR_ENCRYPT 1
62 #define CRYPTO_DIR_DECRYPT 0
63
64 struct scatterlist;
65 struct crypto_tfm;
66
67 struct cipher_desc {
68 struct crypto_tfm *tfm;
69 void (*crfn)(void *ctx, u8 *dst, const u8 *src);
70 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
71 const u8 *src, unsigned int nbytes);
72 void *info;
73 };
74
75 /*
76 * Algorithms: modular crypto algorithm implementations, managed
77 * via crypto_register_alg() and crypto_unregister_alg().
78 */
79 struct cipher_alg {
80 unsigned int cia_min_keysize;
81 unsigned int cia_max_keysize;
82 int (*cia_setkey)(void *ctx, const u8 *key,
83 unsigned int keylen, u32 *flags);
84 void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
85 void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
86
87 unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
88 u8 *dst, const u8 *src,
89 unsigned int nbytes);
90 unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
91 u8 *dst, const u8 *src,
92 unsigned int nbytes);
93 unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
94 u8 *dst, const u8 *src,
95 unsigned int nbytes);
96 unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
97 u8 *dst, const u8 *src,
98 unsigned int nbytes);
99 };
100
101 struct digest_alg {
102 unsigned int dia_digestsize;
103 void (*dia_init)(void *ctx);
104 void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
105 void (*dia_final)(void *ctx, u8 *out);
106 int (*dia_setkey)(void *ctx, const u8 *key,
107 unsigned int keylen, u32 *flags);
108 };
109
110 struct compress_alg {
111 int (*coa_init)(void *ctx);
112 void (*coa_exit)(void *ctx);
113 int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
114 u8 *dst, unsigned int *dlen);
115 int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
116 u8 *dst, unsigned int *dlen);
117 };
118
119 #define cra_cipher cra_u.cipher
120 #define cra_digest cra_u.digest
121 #define cra_compress cra_u.compress
122
123 struct crypto_alg {
124 struct list_head cra_list;
125 u32 cra_flags;
126 unsigned int cra_blocksize;
127 unsigned int cra_ctxsize;
128 unsigned int cra_alignmask;
129 const char cra_name[CRYPTO_MAX_ALG_NAME];
130
131 union {
132 struct cipher_alg cipher;
133 struct digest_alg digest;
134 struct compress_alg compress;
135 } cra_u;
136
137 struct module *cra_module;
138 };
139
140 /*
141 * Algorithm registration interface.
142 */
143 int crypto_register_alg(struct crypto_alg *alg);
144 int crypto_unregister_alg(struct crypto_alg *alg);
145
146 /*
147 * Algorithm query interface.
148 */
149 #ifdef CONFIG_CRYPTO
150 int crypto_alg_available(const char *name, u32 flags);
151 #else
152 static inline int crypto_alg_available(const char *name, u32 flags)
153 {
154 return 0;
155 }
156 #endif
157
158 /*
159 * Transforms: user-instantiated objects which encapsulate algorithms
160 * and core processing logic. Managed via crypto_alloc_tfm() and
161 * crypto_free_tfm(), as well as the various helpers below.
162 */
163
164 struct cipher_tfm {
165 void *cit_iv;
166 unsigned int cit_ivsize;
167 u32 cit_mode;
168 int (*cit_setkey)(struct crypto_tfm *tfm,
169 const u8 *key, unsigned int keylen);
170 int (*cit_encrypt)(struct crypto_tfm *tfm,
171 struct scatterlist *dst,
172 struct scatterlist *src,
173 unsigned int nbytes);
174 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
175 struct scatterlist *dst,
176 struct scatterlist *src,
177 unsigned int nbytes, u8 *iv);
178 int (*cit_decrypt)(struct crypto_tfm *tfm,
179 struct scatterlist *dst,
180 struct scatterlist *src,
181 unsigned int nbytes);
182 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
183 struct scatterlist *dst,
184 struct scatterlist *src,
185 unsigned int nbytes, u8 *iv);
186 void (*cit_xor_block)(u8 *dst, const u8 *src);
187 };
188
189 struct digest_tfm {
190 void (*dit_init)(struct crypto_tfm *tfm);
191 void (*dit_update)(struct crypto_tfm *tfm,
192 struct scatterlist *sg, unsigned int nsg);
193 void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
194 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
195 unsigned int nsg, u8 *out);
196 int (*dit_setkey)(struct crypto_tfm *tfm,
197 const u8 *key, unsigned int keylen);
198 #ifdef CONFIG_CRYPTO_HMAC
199 void *dit_hmac_block;
200 #endif
201 };
202
203 struct compress_tfm {
204 int (*cot_compress)(struct crypto_tfm *tfm,
205 const u8 *src, unsigned int slen,
206 u8 *dst, unsigned int *dlen);
207 int (*cot_decompress)(struct crypto_tfm *tfm,
208 const u8 *src, unsigned int slen,
209 u8 *dst, unsigned int *dlen);
210 };
211
212 #define crt_cipher crt_u.cipher
213 #define crt_digest crt_u.digest
214 #define crt_compress crt_u.compress
215
216 struct crypto_tfm {
217
218 u32 crt_flags;
219
220 union {
221 struct cipher_tfm cipher;
222 struct digest_tfm digest;
223 struct compress_tfm compress;
224 } crt_u;
225
226 struct crypto_alg *__crt_alg;
227 };
228
229 /*
230 * Transform user interface.
231 */
232
233 /*
234 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
235 * If that fails and the kernel supports dynamically loadable modules, it
236 * will then attempt to load a module of the same name or alias. A refcount
237 * is grabbed on the algorithm which is then associated with the new transform.
238 *
239 * crypto_free_tfm() frees up the transform and any associated resources,
240 * then drops the refcount on the associated algorithm.
241 */
242 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
243 void crypto_free_tfm(struct crypto_tfm *tfm);
244
245 /*
246 * Transform helpers which query the underlying algorithm.
247 */
248 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
249 {
250 return tfm->__crt_alg->cra_name;
251 }
252
253 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
254 {
255 return module_name(tfm->__crt_alg->cra_module);
256 }
257
258 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
259 {
260 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
261 }
262
263 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
264 {
265 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
266 return tfm->__crt_alg->cra_cipher.cia_min_keysize;
267 }
268
269 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
270 {
271 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
272 return tfm->__crt_alg->cra_cipher.cia_max_keysize;
273 }
274
275 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
276 {
277 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
278 return tfm->crt_cipher.cit_ivsize;
279 }
280
281 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
282 {
283 return tfm->__crt_alg->cra_blocksize;
284 }
285
286 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
287 {
288 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
289 return tfm->__crt_alg->cra_digest.dia_digestsize;
290 }
291
292 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
293 {
294 return tfm->__crt_alg->cra_alignmask;
295 }
296
297 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
298 {
299 return (void *)&tfm[1];
300 }
301
302 /*
303 * API wrappers.
304 */
305 static inline void crypto_digest_init(struct crypto_tfm *tfm)
306 {
307 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
308 tfm->crt_digest.dit_init(tfm);
309 }
310
311 static inline void crypto_digest_update(struct crypto_tfm *tfm,
312 struct scatterlist *sg,
313 unsigned int nsg)
314 {
315 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
316 tfm->crt_digest.dit_update(tfm, sg, nsg);
317 }
318
319 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
320 {
321 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
322 tfm->crt_digest.dit_final(tfm, out);
323 }
324
325 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
326 struct scatterlist *sg,
327 unsigned int nsg, u8 *out)
328 {
329 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
330 tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
331 }
332
333 static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
334 const u8 *key, unsigned int keylen)
335 {
336 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
337 if (tfm->crt_digest.dit_setkey == NULL)
338 return -ENOSYS;
339 return tfm->crt_digest.dit_setkey(tfm, key, keylen);
340 }
341
342 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
343 const u8 *key, unsigned int keylen)
344 {
345 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
346 return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
347 }
348
349 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
350 struct scatterlist *dst,
351 struct scatterlist *src,
352 unsigned int nbytes)
353 {
354 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
355 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
356 }
357
358 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
359 struct scatterlist *dst,
360 struct scatterlist *src,
361 unsigned int nbytes, u8 *iv)
362 {
363 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
364 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
365 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
366 }
367
368 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
369 struct scatterlist *dst,
370 struct scatterlist *src,
371 unsigned int nbytes)
372 {
373 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
374 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
375 }
376
377 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
378 struct scatterlist *dst,
379 struct scatterlist *src,
380 unsigned int nbytes, u8 *iv)
381 {
382 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
383 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
384 return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
385 }
386
387 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
388 const u8 *src, unsigned int len)
389 {
390 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
391 memcpy(tfm->crt_cipher.cit_iv, src, len);
392 }
393
394 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
395 u8 *dst, unsigned int len)
396 {
397 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
398 memcpy(dst, tfm->crt_cipher.cit_iv, len);
399 }
400
401 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
402 const u8 *src, unsigned int slen,
403 u8 *dst, unsigned int *dlen)
404 {
405 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
406 return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
407 }
408
409 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
410 const u8 *src, unsigned int slen,
411 u8 *dst, unsigned int *dlen)
412 {
413 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
414 return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
415 }
416
417 /*
418 * HMAC support.
419 */
420 #ifdef CONFIG_CRYPTO_HMAC
421 void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
422 void crypto_hmac_update(struct crypto_tfm *tfm,
423 struct scatterlist *sg, unsigned int nsg);
424 void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
425 unsigned int *keylen, u8 *out);
426 void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
427 struct scatterlist *sg, unsigned int nsg, u8 *out);
428 #endif /* CONFIG_CRYPTO_HMAC */
429
430 #endif /* _LINUX_CRYPTO_H */
431
This page took 0.042007 seconds and 5 git commands to generate.