Commit | Line | Data |
---|---|---|
7b5a080b HX |
1 | /* |
2 | * Synchronous Cryptographic Hash operations. | |
3 | * | |
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the Free | |
8 | * Software Foundation; either version 2 of the License, or (at your option) | |
9 | * any later version. | |
10 | * | |
11 | */ | |
12 | ||
3b2f6df0 | 13 | #include <crypto/scatterwalk.h> |
7b5a080b HX |
14 | #include <crypto/internal/hash.h> |
15 | #include <linux/err.h> | |
16 | #include <linux/kernel.h> | |
17 | #include <linux/module.h> | |
18 | #include <linux/slab.h> | |
19 | #include <linux/seq_file.h> | |
20 | ||
3b2f6df0 HX |
21 | static const struct crypto_type crypto_shash_type; |
22 | ||
7b5a080b HX |
23 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) |
24 | { | |
25 | return container_of(tfm, struct crypto_shash, base); | |
26 | } | |
27 | ||
3b2f6df0 HX |
28 | #include "internal.h" |
29 | ||
7b5a080b HX |
30 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, |
31 | unsigned int keylen) | |
32 | { | |
33 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
34 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
35 | unsigned long absize; | |
36 | u8 *buffer, *alignbuffer; | |
37 | int err; | |
38 | ||
39 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | |
40 | buffer = kmalloc(absize, GFP_KERNEL); | |
41 | if (!buffer) | |
42 | return -ENOMEM; | |
43 | ||
44 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
45 | memcpy(alignbuffer, key, keylen); | |
46 | err = shash->setkey(tfm, alignbuffer, keylen); | |
47 | memset(alignbuffer, 0, keylen); | |
48 | kfree(buffer); | |
49 | return err; | |
50 | } | |
51 | ||
52 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | |
53 | unsigned int keylen) | |
54 | { | |
55 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
56 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
57 | ||
3751f402 HX |
58 | if (!shash->setkey) |
59 | return -ENOSYS; | |
60 | ||
7b5a080b HX |
61 | if ((unsigned long)key & alignmask) |
62 | return shash_setkey_unaligned(tfm, key, keylen); | |
63 | ||
64 | return shash->setkey(tfm, key, keylen); | |
65 | } | |
66 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | |
67 | ||
68 | static inline unsigned int shash_align_buffer_size(unsigned len, | |
69 | unsigned long mask) | |
70 | { | |
71 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | |
72 | } | |
73 | ||
74 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | |
75 | unsigned int len) | |
76 | { | |
77 | struct crypto_shash *tfm = desc->tfm; | |
78 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
79 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
80 | unsigned int unaligned_len = alignmask + 1 - | |
81 | ((unsigned long)data & alignmask); | |
82 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | |
83 | __attribute__ ((aligned)); | |
84 | ||
85 | memcpy(buf, data, unaligned_len); | |
86 | ||
87 | return shash->update(desc, buf, unaligned_len) ?: | |
88 | shash->update(desc, data + unaligned_len, len - unaligned_len); | |
89 | } | |
90 | ||
91 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | |
92 | unsigned int len) | |
93 | { | |
94 | struct crypto_shash *tfm = desc->tfm; | |
95 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
96 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
97 | ||
98 | if ((unsigned long)data & alignmask) | |
99 | return shash_update_unaligned(desc, data, len); | |
100 | ||
101 | return shash->update(desc, data, len); | |
102 | } | |
103 | EXPORT_SYMBOL_GPL(crypto_shash_update); | |
104 | ||
105 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | |
106 | { | |
107 | struct crypto_shash *tfm = desc->tfm; | |
108 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
109 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
110 | unsigned int ds = crypto_shash_digestsize(tfm); | |
111 | u8 buf[shash_align_buffer_size(ds, alignmask)] | |
112 | __attribute__ ((aligned)); | |
113 | int err; | |
114 | ||
115 | err = shash->final(desc, buf); | |
116 | memcpy(out, buf, ds); | |
117 | return err; | |
118 | } | |
119 | ||
120 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | |
121 | { | |
122 | struct crypto_shash *tfm = desc->tfm; | |
123 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
124 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
125 | ||
126 | if ((unsigned long)out & alignmask) | |
127 | return shash_final_unaligned(desc, out); | |
128 | ||
129 | return shash->final(desc, out); | |
130 | } | |
131 | EXPORT_SYMBOL_GPL(crypto_shash_final); | |
132 | ||
133 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | |
134 | unsigned int len, u8 *out) | |
135 | { | |
136 | return crypto_shash_update(desc, data, len) ?: | |
137 | crypto_shash_final(desc, out); | |
138 | } | |
139 | ||
140 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | |
141 | unsigned int len, u8 *out) | |
142 | { | |
143 | struct crypto_shash *tfm = desc->tfm; | |
144 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
145 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
146 | ||
147 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
148 | !shash->finup) | |
149 | return shash_finup_unaligned(desc, data, len, out); | |
150 | ||
151 | return shash->finup(desc, data, len, out); | |
152 | } | |
153 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | |
154 | ||
155 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | |
156 | unsigned int len, u8 *out) | |
157 | { | |
158 | return crypto_shash_init(desc) ?: | |
159 | crypto_shash_update(desc, data, len) ?: | |
160 | crypto_shash_final(desc, out); | |
161 | } | |
162 | ||
163 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | |
164 | unsigned int len, u8 *out) | |
165 | { | |
166 | struct crypto_shash *tfm = desc->tfm; | |
167 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
168 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
169 | ||
170 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
171 | !shash->digest) | |
172 | return shash_digest_unaligned(desc, data, len, out); | |
173 | ||
174 | return shash->digest(desc, data, len, out); | |
175 | } | |
176 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | |
177 | ||
dec8b786 HX |
178 | int crypto_shash_import(struct shash_desc *desc, const u8 *in) |
179 | { | |
180 | struct crypto_shash *tfm = desc->tfm; | |
181 | struct shash_alg *alg = crypto_shash_alg(tfm); | |
182 | ||
183 | memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); | |
184 | ||
185 | if (alg->reinit) | |
186 | alg->reinit(desc); | |
187 | ||
188 | return 0; | |
189 | } | |
190 | EXPORT_SYMBOL_GPL(crypto_shash_import); | |
191 | ||
3b2f6df0 HX |
192 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, |
193 | unsigned int keylen) | |
194 | { | |
195 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | |
196 | ||
197 | return crypto_shash_setkey(*ctx, key, keylen); | |
198 | } | |
199 | ||
200 | static int shash_async_init(struct ahash_request *req) | |
201 | { | |
202 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
203 | struct shash_desc *desc = ahash_request_ctx(req); | |
204 | ||
205 | desc->tfm = *ctx; | |
206 | desc->flags = req->base.flags; | |
207 | ||
208 | return crypto_shash_init(desc); | |
209 | } | |
210 | ||
211 | static int shash_async_update(struct ahash_request *req) | |
212 | { | |
213 | struct shash_desc *desc = ahash_request_ctx(req); | |
214 | struct crypto_hash_walk walk; | |
215 | int nbytes; | |
216 | ||
217 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | |
218 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
219 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
220 | ||
221 | return nbytes; | |
222 | } | |
223 | ||
224 | static int shash_async_final(struct ahash_request *req) | |
225 | { | |
226 | return crypto_shash_final(ahash_request_ctx(req), req->result); | |
227 | } | |
228 | ||
229 | static int shash_async_digest(struct ahash_request *req) | |
230 | { | |
231 | struct scatterlist *sg = req->src; | |
232 | unsigned int offset = sg->offset; | |
233 | unsigned int nbytes = req->nbytes; | |
234 | int err; | |
235 | ||
236 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
237 | struct crypto_shash **ctx = | |
238 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
239 | struct shash_desc *desc = ahash_request_ctx(req); | |
240 | void *data; | |
241 | ||
242 | desc->tfm = *ctx; | |
243 | desc->flags = req->base.flags; | |
244 | ||
245 | data = crypto_kmap(sg_page(sg), 0); | |
246 | err = crypto_shash_digest(desc, data + offset, nbytes, | |
247 | req->result); | |
248 | crypto_kunmap(data, 0); | |
249 | crypto_yield(desc->flags); | |
250 | goto out; | |
251 | } | |
252 | ||
253 | err = shash_async_init(req); | |
254 | if (err) | |
255 | goto out; | |
256 | ||
257 | err = shash_async_update(req); | |
258 | if (err) | |
259 | goto out; | |
260 | ||
261 | err = shash_async_final(req); | |
262 | ||
263 | out: | |
264 | return err; | |
265 | } | |
266 | ||
267 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | |
268 | { | |
269 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
270 | ||
271 | crypto_free_shash(*ctx); | |
272 | } | |
273 | ||
274 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | |
275 | { | |
276 | struct crypto_alg *calg = tfm->__crt_alg; | |
277 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
278 | struct ahash_tfm *crt = &tfm->crt_ahash; | |
279 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
280 | struct crypto_shash *shash; | |
281 | ||
282 | if (!crypto_mod_get(calg)) | |
283 | return -EAGAIN; | |
284 | ||
285 | shash = __crypto_shash_cast(crypto_create_tfm( | |
286 | calg, &crypto_shash_type)); | |
287 | if (IS_ERR(shash)) { | |
288 | crypto_mod_put(calg); | |
289 | return PTR_ERR(shash); | |
290 | } | |
291 | ||
292 | *ctx = shash; | |
293 | tfm->exit = crypto_exit_shash_ops_async; | |
294 | ||
295 | crt->init = shash_async_init; | |
296 | crt->update = shash_async_update; | |
297 | crt->final = shash_async_final; | |
298 | crt->digest = shash_async_digest; | |
299 | crt->setkey = shash_async_setkey; | |
300 | ||
301 | crt->digestsize = alg->digestsize; | |
302 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | |
303 | ||
304 | return 0; | |
305 | } | |
306 | ||
5f7082ed HX |
307 | static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, |
308 | unsigned int keylen) | |
309 | { | |
310 | struct shash_desc *desc = crypto_hash_ctx(tfm); | |
311 | ||
312 | return crypto_shash_setkey(desc->tfm, key, keylen); | |
313 | } | |
314 | ||
315 | static int shash_compat_init(struct hash_desc *hdesc) | |
316 | { | |
317 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
318 | ||
319 | desc->flags = hdesc->flags; | |
320 | ||
321 | return crypto_shash_init(desc); | |
322 | } | |
323 | ||
324 | static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, | |
325 | unsigned int len) | |
326 | { | |
327 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
328 | struct crypto_hash_walk walk; | |
329 | int nbytes; | |
330 | ||
331 | for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); | |
332 | nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
333 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
334 | ||
335 | return nbytes; | |
336 | } | |
337 | ||
338 | static int shash_compat_final(struct hash_desc *hdesc, u8 *out) | |
339 | { | |
340 | return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); | |
341 | } | |
342 | ||
343 | static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, | |
344 | unsigned int nbytes, u8 *out) | |
345 | { | |
346 | unsigned int offset = sg->offset; | |
347 | int err; | |
348 | ||
349 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
350 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
351 | void *data; | |
352 | ||
353 | desc->flags = hdesc->flags; | |
354 | ||
355 | data = crypto_kmap(sg_page(sg), 0); | |
356 | err = crypto_shash_digest(desc, data + offset, nbytes, out); | |
357 | crypto_kunmap(data, 0); | |
358 | crypto_yield(desc->flags); | |
359 | goto out; | |
360 | } | |
361 | ||
362 | err = shash_compat_init(hdesc); | |
363 | if (err) | |
364 | goto out; | |
365 | ||
366 | err = shash_compat_update(hdesc, sg, nbytes); | |
367 | if (err) | |
368 | goto out; | |
369 | ||
370 | err = shash_compat_final(hdesc, out); | |
371 | ||
372 | out: | |
373 | return err; | |
374 | } | |
375 | ||
376 | static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) | |
377 | { | |
378 | struct shash_desc *desc= crypto_tfm_ctx(tfm); | |
379 | ||
380 | crypto_free_shash(desc->tfm); | |
381 | } | |
382 | ||
383 | static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) | |
384 | { | |
385 | struct hash_tfm *crt = &tfm->crt_hash; | |
386 | struct crypto_alg *calg = tfm->__crt_alg; | |
387 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
388 | struct shash_desc *desc = crypto_tfm_ctx(tfm); | |
389 | struct crypto_shash *shash; | |
390 | ||
4abfd73e AKR |
391 | if (!crypto_mod_get(calg)) |
392 | return -EAGAIN; | |
393 | ||
5f7082ed HX |
394 | shash = __crypto_shash_cast(crypto_create_tfm( |
395 | calg, &crypto_shash_type)); | |
4abfd73e AKR |
396 | if (IS_ERR(shash)) { |
397 | crypto_mod_put(calg); | |
5f7082ed | 398 | return PTR_ERR(shash); |
4abfd73e | 399 | } |
5f7082ed HX |
400 | |
401 | desc->tfm = shash; | |
402 | tfm->exit = crypto_exit_shash_ops_compat; | |
403 | ||
404 | crt->init = shash_compat_init; | |
405 | crt->update = shash_compat_update; | |
406 | crt->final = shash_compat_final; | |
407 | crt->digest = shash_compat_digest; | |
408 | crt->setkey = shash_compat_setkey; | |
409 | ||
410 | crt->digestsize = alg->digestsize; | |
411 | ||
412 | return 0; | |
413 | } | |
414 | ||
3b2f6df0 HX |
415 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
416 | { | |
417 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | |
5f7082ed HX |
418 | case CRYPTO_ALG_TYPE_HASH_MASK: |
419 | return crypto_init_shash_ops_compat(tfm); | |
3b2f6df0 HX |
420 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
421 | return crypto_init_shash_ops_async(tfm); | |
422 | } | |
423 | ||
424 | return -EINVAL; | |
425 | } | |
426 | ||
427 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | |
428 | u32 mask) | |
429 | { | |
5f7082ed HX |
430 | struct shash_alg *salg = __crypto_shash_alg(alg); |
431 | ||
3b2f6df0 | 432 | switch (mask & CRYPTO_ALG_TYPE_MASK) { |
5f7082ed HX |
433 | case CRYPTO_ALG_TYPE_HASH_MASK: |
434 | return sizeof(struct shash_desc) + salg->descsize; | |
3b2f6df0 HX |
435 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
436 | return sizeof(struct crypto_shash *); | |
437 | } | |
438 | ||
439 | return 0; | |
440 | } | |
441 | ||
7b5a080b HX |
442 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, |
443 | const struct crypto_type *frontend) | |
444 | { | |
445 | if (frontend->type != CRYPTO_ALG_TYPE_SHASH) | |
446 | return -EINVAL; | |
447 | return 0; | |
448 | } | |
449 | ||
450 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | |
451 | const struct crypto_type *frontend) | |
452 | { | |
453 | return alg->cra_ctxsize; | |
454 | } | |
455 | ||
456 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
457 | __attribute__ ((unused)); | |
458 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
459 | { | |
460 | struct shash_alg *salg = __crypto_shash_alg(alg); | |
461 | ||
462 | seq_printf(m, "type : shash\n"); | |
463 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | |
464 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | |
465 | seq_printf(m, "descsize : %u\n", salg->descsize); | |
466 | } | |
467 | ||
468 | static const struct crypto_type crypto_shash_type = { | |
3b2f6df0 | 469 | .ctxsize = crypto_shash_ctxsize, |
7b5a080b | 470 | .extsize = crypto_shash_extsize, |
3b2f6df0 | 471 | .init = crypto_init_shash_ops, |
7b5a080b HX |
472 | .init_tfm = crypto_shash_init_tfm, |
473 | #ifdef CONFIG_PROC_FS | |
474 | .show = crypto_shash_show, | |
475 | #endif | |
476 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | |
477 | .maskset = CRYPTO_ALG_TYPE_MASK, | |
478 | .type = CRYPTO_ALG_TYPE_SHASH, | |
479 | .tfmsize = offsetof(struct crypto_shash, base), | |
480 | }; | |
481 | ||
482 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | |
483 | u32 mask) | |
484 | { | |
485 | return __crypto_shash_cast( | |
486 | crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask)); | |
487 | } | |
488 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | |
489 | ||
490 | int crypto_register_shash(struct shash_alg *alg) | |
491 | { | |
492 | struct crypto_alg *base = &alg->base; | |
493 | ||
494 | if (alg->digestsize > PAGE_SIZE / 8 || | |
495 | alg->descsize > PAGE_SIZE / 8) | |
496 | return -EINVAL; | |
497 | ||
498 | base->cra_type = &crypto_shash_type; | |
499 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | |
500 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | |
501 | ||
502 | return crypto_register_alg(base); | |
503 | } | |
504 | EXPORT_SYMBOL_GPL(crypto_register_shash); | |
505 | ||
506 | int crypto_unregister_shash(struct shash_alg *alg) | |
507 | { | |
508 | return crypto_unregister_alg(&alg->base); | |
509 | } | |
510 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | |
511 | ||
512 | MODULE_LICENSE("GPL"); | |
513 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |