Merge tag 'libnvdimm-for-4.2' of git://git.kernel.org/pub/scm/linux/kernel/git/djbw...
[deliverable/linux.git] / include / crypto / algapi.h
1 /*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
19
20 struct crypto_aead;
21 struct module;
22 struct rtattr;
23 struct seq_file;
24
25 struct crypto_type {
26 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
27 unsigned int (*extsize)(struct crypto_alg *alg);
28 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
29 int (*init_tfm)(struct crypto_tfm *tfm);
30 void (*show)(struct seq_file *m, struct crypto_alg *alg);
31 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
32 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
33
34 unsigned int type;
35 unsigned int maskclear;
36 unsigned int maskset;
37 unsigned int tfmsize;
38 };
39
40 struct crypto_instance {
41 struct crypto_alg alg;
42
43 struct crypto_template *tmpl;
44 struct hlist_node list;
45
46 void *__ctx[] CRYPTO_MINALIGN_ATTR;
47 };
48
49 struct crypto_template {
50 struct list_head list;
51 struct hlist_head instances;
52 struct module *module;
53
54 struct crypto_instance *(*alloc)(struct rtattr **tb);
55 void (*free)(struct crypto_instance *inst);
56 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
57
58 char name[CRYPTO_MAX_ALG_NAME];
59 };
60
61 struct crypto_spawn {
62 struct list_head list;
63 struct crypto_alg *alg;
64 struct crypto_instance *inst;
65 const struct crypto_type *frontend;
66 u32 mask;
67 };
68
69 struct crypto_queue {
70 struct list_head list;
71 struct list_head *backlog;
72
73 unsigned int qlen;
74 unsigned int max_qlen;
75 };
76
77 struct scatter_walk {
78 struct scatterlist *sg;
79 unsigned int offset;
80 };
81
82 struct blkcipher_walk {
83 union {
84 struct {
85 struct page *page;
86 unsigned long offset;
87 } phys;
88
89 struct {
90 u8 *page;
91 u8 *addr;
92 } virt;
93 } src, dst;
94
95 struct scatter_walk in;
96 unsigned int nbytes;
97
98 struct scatter_walk out;
99 unsigned int total;
100
101 void *page;
102 u8 *buffer;
103 u8 *iv;
104 unsigned int ivsize;
105
106 int flags;
107 unsigned int walk_blocksize;
108 unsigned int cipher_blocksize;
109 unsigned int alignmask;
110 };
111
112 struct ablkcipher_walk {
113 struct {
114 struct page *page;
115 unsigned int offset;
116 } src, dst;
117
118 struct scatter_walk in;
119 unsigned int nbytes;
120 struct scatter_walk out;
121 unsigned int total;
122 struct list_head buffers;
123 u8 *iv_buffer;
124 u8 *iv;
125 int flags;
126 unsigned int blocksize;
127 };
128
129 extern const struct crypto_type crypto_ablkcipher_type;
130 extern const struct crypto_type crypto_blkcipher_type;
131
132 void crypto_mod_put(struct crypto_alg *alg);
133
134 int crypto_register_template(struct crypto_template *tmpl);
135 void crypto_unregister_template(struct crypto_template *tmpl);
136 struct crypto_template *crypto_lookup_template(const char *name);
137
138 int crypto_register_instance(struct crypto_template *tmpl,
139 struct crypto_instance *inst);
140 int crypto_unregister_instance(struct crypto_instance *inst);
141
142 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
143 struct crypto_instance *inst, u32 mask);
144 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
145 struct crypto_instance *inst,
146 const struct crypto_type *frontend);
147 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
148 u32 type, u32 mask);
149
150 void crypto_drop_spawn(struct crypto_spawn *spawn);
151 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
152 u32 mask);
153 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
154
155 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
156 struct crypto_instance *inst)
157 {
158 spawn->inst = inst;
159 }
160
161 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
162 int crypto_check_attr_type(struct rtattr **tb, u32 type);
163 const char *crypto_attr_alg_name(struct rtattr *rta);
164 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
165 const struct crypto_type *frontend,
166 u32 type, u32 mask);
167
168 static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
169 u32 type, u32 mask)
170 {
171 return crypto_attr_alg2(rta, NULL, type, mask);
172 }
173
174 int crypto_attr_u32(struct rtattr *rta, u32 *num);
175 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
176 unsigned int head);
177 struct crypto_instance *crypto_alloc_instance(const char *name,
178 struct crypto_alg *alg);
179
180 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
181 int crypto_enqueue_request(struct crypto_queue *queue,
182 struct crypto_async_request *request);
183 void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
184 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
185 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
186
187 /* These functions require the input/output to be aligned as u32. */
188 void crypto_inc(u8 *a, unsigned int size);
189 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
190
191 int blkcipher_walk_done(struct blkcipher_desc *desc,
192 struct blkcipher_walk *walk, int err);
193 int blkcipher_walk_virt(struct blkcipher_desc *desc,
194 struct blkcipher_walk *walk);
195 int blkcipher_walk_phys(struct blkcipher_desc *desc,
196 struct blkcipher_walk *walk);
197 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
198 struct blkcipher_walk *walk,
199 unsigned int blocksize);
200 int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
201 struct blkcipher_walk *walk,
202 struct crypto_aead *tfm,
203 unsigned int blocksize);
204
205 int ablkcipher_walk_done(struct ablkcipher_request *req,
206 struct ablkcipher_walk *walk, int err);
207 int ablkcipher_walk_phys(struct ablkcipher_request *req,
208 struct ablkcipher_walk *walk);
209 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
210
211 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
212 {
213 return PTR_ALIGN(crypto_tfm_ctx(tfm),
214 crypto_tfm_alg_alignmask(tfm) + 1);
215 }
216
217 static inline struct crypto_instance *crypto_tfm_alg_instance(
218 struct crypto_tfm *tfm)
219 {
220 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
221 }
222
223 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
224 {
225 return inst->__ctx;
226 }
227
228 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
229 struct crypto_ablkcipher *tfm)
230 {
231 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
232 }
233
234 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
235 {
236 return crypto_tfm_ctx(&tfm->base);
237 }
238
239 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
240 {
241 return crypto_tfm_ctx_aligned(&tfm->base);
242 }
243
244 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
245 struct crypto_spawn *spawn)
246 {
247 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
248 u32 mask = CRYPTO_ALG_TYPE_MASK;
249
250 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
251 }
252
253 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
254 {
255 return crypto_tfm_ctx(&tfm->base);
256 }
257
258 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
259 {
260 return crypto_tfm_ctx_aligned(&tfm->base);
261 }
262
263 static inline struct crypto_cipher *crypto_spawn_cipher(
264 struct crypto_spawn *spawn)
265 {
266 u32 type = CRYPTO_ALG_TYPE_CIPHER;
267 u32 mask = CRYPTO_ALG_TYPE_MASK;
268
269 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
270 }
271
272 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
273 {
274 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
275 }
276
277 static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
278 {
279 u32 type = CRYPTO_ALG_TYPE_HASH;
280 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
281
282 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
283 }
284
285 static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
286 {
287 return crypto_tfm_ctx(&tfm->base);
288 }
289
290 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
291 {
292 return crypto_tfm_ctx_aligned(&tfm->base);
293 }
294
295 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
296 struct scatterlist *dst,
297 struct scatterlist *src,
298 unsigned int nbytes)
299 {
300 walk->in.sg = src;
301 walk->out.sg = dst;
302 walk->total = nbytes;
303 }
304
305 static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
306 struct scatterlist *dst,
307 struct scatterlist *src,
308 unsigned int nbytes)
309 {
310 walk->in.sg = src;
311 walk->out.sg = dst;
312 walk->total = nbytes;
313 INIT_LIST_HEAD(&walk->buffers);
314 }
315
316 static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
317 {
318 if (unlikely(!list_empty(&walk->buffers)))
319 __ablkcipher_walk_complete(walk);
320 }
321
322 static inline struct crypto_async_request *crypto_get_backlog(
323 struct crypto_queue *queue)
324 {
325 return queue->backlog == &queue->list ? NULL :
326 container_of(queue->backlog, struct crypto_async_request, list);
327 }
328
329 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
330 struct ablkcipher_request *request)
331 {
332 return crypto_enqueue_request(queue, &request->base);
333 }
334
335 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
336 struct crypto_queue *queue)
337 {
338 return ablkcipher_request_cast(crypto_dequeue_request(queue));
339 }
340
341 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
342 {
343 return req->__ctx;
344 }
345
346 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
347 struct crypto_ablkcipher *tfm)
348 {
349 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
350 }
351
352 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
353 u32 type, u32 mask)
354 {
355 return crypto_attr_alg(tb[1], type, mask);
356 }
357
358 /*
359 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
360 * Otherwise returns zero.
361 */
362 static inline int crypto_requires_sync(u32 type, u32 mask)
363 {
364 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
365 }
366
367 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
368
369 /**
370 * crypto_memneq - Compare two areas of memory without leaking
371 * timing information.
372 *
373 * @a: One area of memory
374 * @b: Another area of memory
375 * @size: The size of the area.
376 *
377 * Returns 0 when data is equal, 1 otherwise.
378 */
379 static inline int crypto_memneq(const void *a, const void *b, size_t size)
380 {
381 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
382 }
383
384 static inline void crypto_yield(u32 flags)
385 {
386 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
387 cond_resched();
388 }
389
390 #endif /* _CRYPTO_ALGAPI_H */
This page took 0.140139 seconds and 5 git commands to generate.