726a765e5ec63995564c627a462f4a49285e86c5
[deliverable/linux.git] / include / crypto / algapi.h
1 /*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18
19 struct module;
20 struct rtattr;
21 struct seq_file;
22
23 struct crypto_type {
24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
25 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
26 void (*exit)(struct crypto_tfm *tfm);
27 void (*show)(struct seq_file *m, struct crypto_alg *alg);
28 };
29
30 struct crypto_instance {
31 struct crypto_alg alg;
32
33 struct crypto_template *tmpl;
34 struct hlist_node list;
35
36 void *__ctx[] CRYPTO_MINALIGN_ATTR;
37 };
38
39 struct crypto_template {
40 struct list_head list;
41 struct hlist_head instances;
42 struct module *module;
43
44 struct crypto_instance *(*alloc)(struct rtattr **tb);
45 void (*free)(struct crypto_instance *inst);
46
47 char name[CRYPTO_MAX_ALG_NAME];
48 };
49
50 struct crypto_spawn {
51 struct list_head list;
52 struct crypto_alg *alg;
53 struct crypto_instance *inst;
54 u32 mask;
55 };
56
57 struct crypto_queue {
58 struct list_head list;
59 struct list_head *backlog;
60
61 unsigned int qlen;
62 unsigned int max_qlen;
63 };
64
65 struct scatter_walk {
66 struct scatterlist *sg;
67 unsigned int offset;
68 };
69
70 struct blkcipher_walk {
71 union {
72 struct {
73 struct page *page;
74 unsigned long offset;
75 } phys;
76
77 struct {
78 u8 *page;
79 u8 *addr;
80 } virt;
81 } src, dst;
82
83 struct scatter_walk in;
84 unsigned int nbytes;
85
86 struct scatter_walk out;
87 unsigned int total;
88
89 void *page;
90 u8 *buffer;
91 u8 *iv;
92
93 int flags;
94 unsigned int blocksize;
95 };
96
97 extern const struct crypto_type crypto_ablkcipher_type;
98 extern const struct crypto_type crypto_aead_type;
99 extern const struct crypto_type crypto_blkcipher_type;
100 extern const struct crypto_type crypto_hash_type;
101
102 void crypto_mod_put(struct crypto_alg *alg);
103
104 int crypto_register_template(struct crypto_template *tmpl);
105 void crypto_unregister_template(struct crypto_template *tmpl);
106 struct crypto_template *crypto_lookup_template(const char *name);
107
108 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
109 struct crypto_instance *inst, u32 mask);
110 void crypto_drop_spawn(struct crypto_spawn *spawn);
111 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
112 u32 mask);
113
114 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
115 int crypto_check_attr_type(struct rtattr **tb, u32 type);
116 const char *crypto_attr_alg_name(struct rtattr *rta);
117 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask);
118 int crypto_attr_u32(struct rtattr *rta, u32 *num);
119 struct crypto_instance *crypto_alloc_instance(const char *name,
120 struct crypto_alg *alg);
121
122 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
123 int crypto_enqueue_request(struct crypto_queue *queue,
124 struct crypto_async_request *request);
125 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
126 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
127
128 /* These functions require the input/output to be aligned as u32. */
129 void crypto_inc(u8 *a, unsigned int size);
130 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
131
132 int blkcipher_walk_done(struct blkcipher_desc *desc,
133 struct blkcipher_walk *walk, int err);
134 int blkcipher_walk_virt(struct blkcipher_desc *desc,
135 struct blkcipher_walk *walk);
136 int blkcipher_walk_phys(struct blkcipher_desc *desc,
137 struct blkcipher_walk *walk);
138 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
139 struct blkcipher_walk *walk,
140 unsigned int blocksize);
141
142 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
143 {
144 unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
145 unsigned long align = crypto_tfm_alg_alignmask(tfm);
146
147 if (align <= crypto_tfm_ctx_alignment())
148 align = 1;
149 return (void *)ALIGN(addr, align);
150 }
151
152 static inline struct crypto_instance *crypto_tfm_alg_instance(
153 struct crypto_tfm *tfm)
154 {
155 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
156 }
157
158 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
159 {
160 return inst->__ctx;
161 }
162
163 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
164 struct crypto_ablkcipher *tfm)
165 {
166 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
167 }
168
169 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
170 {
171 return crypto_tfm_ctx(&tfm->base);
172 }
173
174 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
175 {
176 return crypto_tfm_ctx_aligned(&tfm->base);
177 }
178
179 static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
180 {
181 return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
182 }
183
184 static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
185 {
186 return crypto_tfm_ctx(&tfm->base);
187 }
188
189 static inline struct crypto_instance *crypto_aead_alg_instance(
190 struct crypto_aead *aead)
191 {
192 return crypto_tfm_alg_instance(&aead->base);
193 }
194
195 static inline struct crypto_ablkcipher *crypto_spawn_ablkcipher(
196 struct crypto_spawn *spawn)
197 {
198 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
199 u32 mask = CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
200
201 return __crypto_ablkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
202 }
203
204 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
205 struct crypto_spawn *spawn)
206 {
207 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
208 u32 mask = CRYPTO_ALG_TYPE_MASK;
209
210 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
211 }
212
213 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
214 {
215 return crypto_tfm_ctx(&tfm->base);
216 }
217
218 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
219 {
220 return crypto_tfm_ctx_aligned(&tfm->base);
221 }
222
223 static inline struct crypto_cipher *crypto_spawn_cipher(
224 struct crypto_spawn *spawn)
225 {
226 u32 type = CRYPTO_ALG_TYPE_CIPHER;
227 u32 mask = CRYPTO_ALG_TYPE_MASK;
228
229 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
230 }
231
232 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
233 {
234 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
235 }
236
237 static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
238 {
239 u32 type = CRYPTO_ALG_TYPE_HASH;
240 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
241
242 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
243 }
244
245 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
246 {
247 return crypto_tfm_ctx_aligned(&tfm->base);
248 }
249
250 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
251 struct scatterlist *dst,
252 struct scatterlist *src,
253 unsigned int nbytes)
254 {
255 walk->in.sg = src;
256 walk->out.sg = dst;
257 walk->total = nbytes;
258 }
259
260 static inline struct crypto_async_request *crypto_get_backlog(
261 struct crypto_queue *queue)
262 {
263 return queue->backlog == &queue->list ? NULL :
264 container_of(queue->backlog, struct crypto_async_request, list);
265 }
266
267 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
268 struct ablkcipher_request *request)
269 {
270 return crypto_enqueue_request(queue, &request->base);
271 }
272
273 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
274 struct crypto_queue *queue)
275 {
276 return ablkcipher_request_cast(crypto_dequeue_request(queue));
277 }
278
279 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
280 {
281 return req->__ctx;
282 }
283
284 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
285 struct crypto_ablkcipher *tfm)
286 {
287 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
288 }
289
290 static inline void *aead_request_ctx(struct aead_request *req)
291 {
292 return req->__ctx;
293 }
294
295 static inline void aead_request_complete(struct aead_request *req, int err)
296 {
297 req->base.complete(&req->base, err);
298 }
299
300 static inline u32 aead_request_flags(struct aead_request *req)
301 {
302 return req->base.flags;
303 }
304
305 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
306 u32 type, u32 mask)
307 {
308 return crypto_attr_alg(tb[1], type, mask);
309 }
310
311 #endif /* _CRYPTO_ALGAPI_H */
312
This page took 0.035519 seconds and 4 git commands to generate.