14a3b707a31f281dae7b3096f869c0931ba2aaf3
[deliverable/linux.git] / crypto / shash.c
1 /*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20
21 #include "internal.h"
22
23 static const struct crypto_type crypto_shash_type;
24
25 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27 {
28 struct shash_alg *shash = crypto_shash_alg(tfm);
29 unsigned long alignmask = crypto_shash_alignmask(tfm);
30 unsigned long absize;
31 u8 *buffer, *alignbuffer;
32 int err;
33
34 absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
35 buffer = kmalloc(absize, GFP_KERNEL);
36 if (!buffer)
37 return -ENOMEM;
38
39 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 memcpy(alignbuffer, key, keylen);
41 err = shash->setkey(tfm, alignbuffer, keylen);
42 memset(alignbuffer, 0, keylen);
43 kfree(buffer);
44 return err;
45 }
46
47 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
48 unsigned int keylen)
49 {
50 struct shash_alg *shash = crypto_shash_alg(tfm);
51 unsigned long alignmask = crypto_shash_alignmask(tfm);
52
53 if (!shash->setkey)
54 return -ENOSYS;
55
56 if ((unsigned long)key & alignmask)
57 return shash_setkey_unaligned(tfm, key, keylen);
58
59 return shash->setkey(tfm, key, keylen);
60 }
61 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
62
63 static inline unsigned int shash_align_buffer_size(unsigned len,
64 unsigned long mask)
65 {
66 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
67 }
68
69 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
70 unsigned int len)
71 {
72 struct crypto_shash *tfm = desc->tfm;
73 struct shash_alg *shash = crypto_shash_alg(tfm);
74 unsigned long alignmask = crypto_shash_alignmask(tfm);
75 unsigned int unaligned_len = alignmask + 1 -
76 ((unsigned long)data & alignmask);
77 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
78 __attribute__ ((aligned));
79
80 if (unaligned_len > len)
81 unaligned_len = len;
82
83 memcpy(buf, data, unaligned_len);
84
85 return shash->update(desc, buf, unaligned_len) ?:
86 shash->update(desc, data + unaligned_len, len - unaligned_len);
87 }
88
89 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
90 unsigned int len)
91 {
92 struct crypto_shash *tfm = desc->tfm;
93 struct shash_alg *shash = crypto_shash_alg(tfm);
94 unsigned long alignmask = crypto_shash_alignmask(tfm);
95
96 if ((unsigned long)data & alignmask)
97 return shash_update_unaligned(desc, data, len);
98
99 return shash->update(desc, data, len);
100 }
101 EXPORT_SYMBOL_GPL(crypto_shash_update);
102
103 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
104 {
105 struct crypto_shash *tfm = desc->tfm;
106 unsigned long alignmask = crypto_shash_alignmask(tfm);
107 struct shash_alg *shash = crypto_shash_alg(tfm);
108 unsigned int ds = crypto_shash_digestsize(tfm);
109 u8 buf[shash_align_buffer_size(ds, alignmask)]
110 __attribute__ ((aligned));
111 int err;
112
113 err = shash->final(desc, buf);
114 memcpy(out, buf, ds);
115 return err;
116 }
117
118 int crypto_shash_final(struct shash_desc *desc, u8 *out)
119 {
120 struct crypto_shash *tfm = desc->tfm;
121 struct shash_alg *shash = crypto_shash_alg(tfm);
122 unsigned long alignmask = crypto_shash_alignmask(tfm);
123
124 if ((unsigned long)out & alignmask)
125 return shash_final_unaligned(desc, out);
126
127 return shash->final(desc, out);
128 }
129 EXPORT_SYMBOL_GPL(crypto_shash_final);
130
131 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
132 unsigned int len, u8 *out)
133 {
134 return crypto_shash_update(desc, data, len) ?:
135 crypto_shash_final(desc, out);
136 }
137
138 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
139 unsigned int len, u8 *out)
140 {
141 struct crypto_shash *tfm = desc->tfm;
142 struct shash_alg *shash = crypto_shash_alg(tfm);
143 unsigned long alignmask = crypto_shash_alignmask(tfm);
144
145 if (((unsigned long)data | (unsigned long)out) & alignmask ||
146 !shash->finup)
147 return shash_finup_unaligned(desc, data, len, out);
148
149 return shash->finup(desc, data, len, out);
150 }
151 EXPORT_SYMBOL_GPL(crypto_shash_finup);
152
153 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
154 unsigned int len, u8 *out)
155 {
156 return crypto_shash_init(desc) ?:
157 crypto_shash_finup(desc, data, len, out);
158 }
159
160 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
161 unsigned int len, u8 *out)
162 {
163 struct crypto_shash *tfm = desc->tfm;
164 struct shash_alg *shash = crypto_shash_alg(tfm);
165 unsigned long alignmask = crypto_shash_alignmask(tfm);
166
167 if (((unsigned long)data | (unsigned long)out) & alignmask ||
168 !shash->digest)
169 return shash_digest_unaligned(desc, data, len, out);
170
171 return shash->digest(desc, data, len, out);
172 }
173 EXPORT_SYMBOL_GPL(crypto_shash_digest);
174
175 static int shash_no_export(struct shash_desc *desc, void *out)
176 {
177 return -ENOSYS;
178 }
179
180 static int shash_no_import(struct shash_desc *desc, const void *in)
181 {
182 return -ENOSYS;
183 }
184
185 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
186 unsigned int keylen)
187 {
188 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
189
190 return crypto_shash_setkey(*ctx, key, keylen);
191 }
192
193 static int shash_async_init(struct ahash_request *req)
194 {
195 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
196 struct shash_desc *desc = ahash_request_ctx(req);
197
198 desc->tfm = *ctx;
199 desc->flags = req->base.flags;
200
201 return crypto_shash_init(desc);
202 }
203
204 static int shash_async_update(struct ahash_request *req)
205 {
206 struct shash_desc *desc = ahash_request_ctx(req);
207 struct crypto_hash_walk walk;
208 int nbytes;
209
210 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
211 nbytes = crypto_hash_walk_done(&walk, nbytes))
212 nbytes = crypto_shash_update(desc, walk.data, nbytes);
213
214 return nbytes;
215 }
216
217 static int shash_async_final(struct ahash_request *req)
218 {
219 return crypto_shash_final(ahash_request_ctx(req), req->result);
220 }
221
222 static int shash_async_digest(struct ahash_request *req)
223 {
224 struct scatterlist *sg = req->src;
225 unsigned int offset = sg->offset;
226 unsigned int nbytes = req->nbytes;
227 int err;
228
229 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
230 struct crypto_shash **ctx =
231 crypto_ahash_ctx(crypto_ahash_reqtfm(req));
232 struct shash_desc *desc = ahash_request_ctx(req);
233 void *data;
234
235 desc->tfm = *ctx;
236 desc->flags = req->base.flags;
237
238 data = crypto_kmap(sg_page(sg), 0);
239 err = crypto_shash_digest(desc, data + offset, nbytes,
240 req->result);
241 crypto_kunmap(data, 0);
242 crypto_yield(desc->flags);
243 goto out;
244 }
245
246 err = shash_async_init(req);
247 if (err)
248 goto out;
249
250 err = shash_async_update(req);
251 if (err)
252 goto out;
253
254 err = shash_async_final(req);
255
256 out:
257 return err;
258 }
259
260 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
261 {
262 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
263
264 crypto_free_shash(*ctx);
265 }
266
267 static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
268 {
269 struct crypto_alg *calg = tfm->__crt_alg;
270 struct shash_alg *alg = __crypto_shash_alg(calg);
271 struct ahash_tfm *crt = &tfm->crt_ahash;
272 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
273 struct crypto_shash *shash;
274
275 if (!crypto_mod_get(calg))
276 return -EAGAIN;
277
278 shash = crypto_create_tfm(calg, &crypto_shash_type);
279 if (IS_ERR(shash)) {
280 crypto_mod_put(calg);
281 return PTR_ERR(shash);
282 }
283
284 *ctx = shash;
285 tfm->exit = crypto_exit_shash_ops_async;
286
287 crt->init = shash_async_init;
288 crt->update = shash_async_update;
289 crt->final = shash_async_final;
290 crt->digest = shash_async_digest;
291 crt->setkey = shash_async_setkey;
292
293 crt->digestsize = alg->digestsize;
294 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
295
296 return 0;
297 }
298
299 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
300 unsigned int keylen)
301 {
302 struct shash_desc *desc = crypto_hash_ctx(tfm);
303
304 return crypto_shash_setkey(desc->tfm, key, keylen);
305 }
306
307 static int shash_compat_init(struct hash_desc *hdesc)
308 {
309 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
310
311 desc->flags = hdesc->flags;
312
313 return crypto_shash_init(desc);
314 }
315
316 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
317 unsigned int len)
318 {
319 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
320 struct crypto_hash_walk walk;
321 int nbytes;
322
323 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
324 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
325 nbytes = crypto_shash_update(desc, walk.data, nbytes);
326
327 return nbytes;
328 }
329
330 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
331 {
332 return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
333 }
334
335 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
336 unsigned int nbytes, u8 *out)
337 {
338 unsigned int offset = sg->offset;
339 int err;
340
341 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
342 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
343 void *data;
344
345 desc->flags = hdesc->flags;
346
347 data = crypto_kmap(sg_page(sg), 0);
348 err = crypto_shash_digest(desc, data + offset, nbytes, out);
349 crypto_kunmap(data, 0);
350 crypto_yield(desc->flags);
351 goto out;
352 }
353
354 err = shash_compat_init(hdesc);
355 if (err)
356 goto out;
357
358 err = shash_compat_update(hdesc, sg, nbytes);
359 if (err)
360 goto out;
361
362 err = shash_compat_final(hdesc, out);
363
364 out:
365 return err;
366 }
367
368 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
369 {
370 struct shash_desc *desc= crypto_tfm_ctx(tfm);
371
372 crypto_free_shash(desc->tfm);
373 }
374
375 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
376 {
377 struct hash_tfm *crt = &tfm->crt_hash;
378 struct crypto_alg *calg = tfm->__crt_alg;
379 struct shash_alg *alg = __crypto_shash_alg(calg);
380 struct shash_desc *desc = crypto_tfm_ctx(tfm);
381 struct crypto_shash *shash;
382
383 if (!crypto_mod_get(calg))
384 return -EAGAIN;
385
386 shash = crypto_create_tfm(calg, &crypto_shash_type);
387 if (IS_ERR(shash)) {
388 crypto_mod_put(calg);
389 return PTR_ERR(shash);
390 }
391
392 desc->tfm = shash;
393 tfm->exit = crypto_exit_shash_ops_compat;
394
395 crt->init = shash_compat_init;
396 crt->update = shash_compat_update;
397 crt->final = shash_compat_final;
398 crt->digest = shash_compat_digest;
399 crt->setkey = shash_compat_setkey;
400
401 crt->digestsize = alg->digestsize;
402
403 return 0;
404 }
405
406 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
407 {
408 switch (mask & CRYPTO_ALG_TYPE_MASK) {
409 case CRYPTO_ALG_TYPE_HASH_MASK:
410 return crypto_init_shash_ops_compat(tfm);
411 case CRYPTO_ALG_TYPE_AHASH_MASK:
412 return crypto_init_shash_ops_async(tfm);
413 }
414
415 return -EINVAL;
416 }
417
418 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
419 u32 mask)
420 {
421 struct shash_alg *salg = __crypto_shash_alg(alg);
422
423 switch (mask & CRYPTO_ALG_TYPE_MASK) {
424 case CRYPTO_ALG_TYPE_HASH_MASK:
425 return sizeof(struct shash_desc) + salg->descsize;
426 case CRYPTO_ALG_TYPE_AHASH_MASK:
427 return sizeof(struct crypto_shash *);
428 }
429
430 return 0;
431 }
432
433 static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
434 const struct crypto_type *frontend)
435 {
436 return 0;
437 }
438
439 static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
440 const struct crypto_type *frontend)
441 {
442 return alg->cra_ctxsize;
443 }
444
445 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
446 __attribute__ ((unused));
447 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
448 {
449 struct shash_alg *salg = __crypto_shash_alg(alg);
450
451 seq_printf(m, "type : shash\n");
452 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
453 seq_printf(m, "digestsize : %u\n", salg->digestsize);
454 seq_printf(m, "descsize : %u\n", salg->descsize);
455 }
456
457 static const struct crypto_type crypto_shash_type = {
458 .ctxsize = crypto_shash_ctxsize,
459 .extsize = crypto_shash_extsize,
460 .init = crypto_init_shash_ops,
461 .init_tfm = crypto_shash_init_tfm,
462 #ifdef CONFIG_PROC_FS
463 .show = crypto_shash_show,
464 #endif
465 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
466 .maskset = CRYPTO_ALG_TYPE_MASK,
467 .type = CRYPTO_ALG_TYPE_SHASH,
468 .tfmsize = offsetof(struct crypto_shash, base),
469 };
470
471 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
472 u32 mask)
473 {
474 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
475 }
476 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
477
478 static int shash_prepare_alg(struct shash_alg *alg)
479 {
480 struct crypto_alg *base = &alg->base;
481
482 if (alg->digestsize > PAGE_SIZE / 8 ||
483 alg->descsize > PAGE_SIZE / 8 ||
484 alg->statesize > PAGE_SIZE / 8)
485 return -EINVAL;
486
487 base->cra_type = &crypto_shash_type;
488 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
489 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
490
491 if (!alg->import)
492 alg->import = shash_no_import;
493 if (!alg->export)
494 alg->export = shash_no_export;
495
496 return 0;
497 }
498
499 int crypto_register_shash(struct shash_alg *alg)
500 {
501 struct crypto_alg *base = &alg->base;
502 int err;
503
504 err = shash_prepare_alg(alg);
505 if (err)
506 return err;
507
508 return crypto_register_alg(base);
509 }
510 EXPORT_SYMBOL_GPL(crypto_register_shash);
511
512 int crypto_unregister_shash(struct shash_alg *alg)
513 {
514 return crypto_unregister_alg(&alg->base);
515 }
516 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
517
518 int shash_register_instance(struct crypto_template *tmpl,
519 struct shash_instance *inst)
520 {
521 int err;
522
523 err = shash_prepare_alg(&inst->alg);
524 if (err)
525 return err;
526
527 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
528 }
529 EXPORT_SYMBOL_GPL(shash_register_instance);
530
531 void shash_free_instance(struct crypto_instance *inst)
532 {
533 crypto_drop_spawn(crypto_instance_ctx(inst));
534 kfree(shash_instance(inst));
535 }
536 EXPORT_SYMBOL_GPL(shash_free_instance);
537
538 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
539 struct shash_alg *alg,
540 struct crypto_instance *inst)
541 {
542 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
543 &crypto_shash_type);
544 }
545 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
546
547 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
548 {
549 struct crypto_alg *alg;
550
551 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
552 return IS_ERR(alg) ? ERR_CAST(alg) :
553 container_of(alg, struct shash_alg, base);
554 }
555 EXPORT_SYMBOL_GPL(shash_attr_alg);
556
557 MODULE_LICENSE("GPL");
558 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
This page took 0.065133 seconds and 5 git commands to generate.