[CRYPTO] skcipher: Add givcrypt operations and givcipher type
[deliverable/linux.git] / crypto / gcm.c
1 /*
2 * GCM: Galois/Counter Mode.
3 *
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
9 */
10
11 #include <crypto/algapi.h>
12 #include <crypto/gf128mul.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/completion.h>
15 #include <linux/err.h>
16 #include <linux/init.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/slab.h>
20
21 #include "internal.h"
22
23 struct gcm_instance_ctx {
24 struct crypto_spawn ctr;
25 };
26
27 struct crypto_gcm_ctx {
28 struct crypto_ablkcipher *ctr;
29 struct gf128mul_4k *gf128;
30 };
31
32 struct crypto_gcm_ghash_ctx {
33 u32 bytes;
34 u32 flags;
35 struct gf128mul_4k *gf128;
36 u8 buffer[16];
37 };
38
39 struct crypto_gcm_req_priv_ctx {
40 u8 auth_tag[16];
41 u8 iauth_tag[16];
42 struct scatterlist src[2];
43 struct scatterlist dst[2];
44 struct crypto_gcm_ghash_ctx ghash;
45 struct ablkcipher_request abreq;
46 };
47
48 struct crypto_gcm_setkey_result {
49 int err;
50 struct completion completion;
51 };
52
53 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
54 struct aead_request *req)
55 {
56 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
57
58 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
59 }
60
61 static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
62 struct gf128mul_4k *gf128)
63 {
64 ctx->bytes = 0;
65 ctx->flags = flags;
66 ctx->gf128 = gf128;
67 memset(ctx->buffer, 0, 16);
68 }
69
70 static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
71 const u8 *src, unsigned int srclen)
72 {
73 u8 *dst = ctx->buffer;
74
75 if (ctx->bytes) {
76 int n = min(srclen, ctx->bytes);
77 u8 *pos = dst + (16 - ctx->bytes);
78
79 ctx->bytes -= n;
80 srclen -= n;
81
82 while (n--)
83 *pos++ ^= *src++;
84
85 if (!ctx->bytes)
86 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
87 }
88
89 while (srclen >= 16) {
90 crypto_xor(dst, src, 16);
91 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
92 src += 16;
93 srclen -= 16;
94 }
95
96 if (srclen) {
97 ctx->bytes = 16 - srclen;
98 while (srclen--)
99 *dst++ ^= *src++;
100 }
101 }
102
103 static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
104 struct scatterlist *sg, int len)
105 {
106 struct scatter_walk walk;
107 u8 *src;
108 int n;
109
110 if (!len)
111 return;
112
113 scatterwalk_start(&walk, sg);
114
115 while (len) {
116 n = scatterwalk_clamp(&walk, len);
117
118 if (!n) {
119 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
120 n = scatterwalk_clamp(&walk, len);
121 }
122
123 src = scatterwalk_map(&walk, 0);
124
125 crypto_gcm_ghash_update(ctx, src, n);
126 len -= n;
127
128 scatterwalk_unmap(src, 0);
129 scatterwalk_advance(&walk, n);
130 scatterwalk_done(&walk, 0, len);
131 if (len)
132 crypto_yield(ctx->flags);
133 }
134 }
135
136 static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
137 {
138 u8 *dst = ctx->buffer;
139
140 if (ctx->bytes) {
141 u8 *tmp = dst + (16 - ctx->bytes);
142
143 while (ctx->bytes--)
144 *tmp++ ^= 0;
145
146 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
147 }
148
149 ctx->bytes = 0;
150 }
151
152 static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
153 unsigned int authlen,
154 unsigned int cryptlen, u8 *dst)
155 {
156 u8 *buf = ctx->buffer;
157 u128 lengths;
158
159 lengths.a = cpu_to_be64(authlen * 8);
160 lengths.b = cpu_to_be64(cryptlen * 8);
161
162 crypto_gcm_ghash_flush(ctx);
163 crypto_xor(buf, (u8 *)&lengths, 16);
164 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
165 crypto_xor(dst, buf, 16);
166 }
167
168 static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
169 {
170 struct crypto_gcm_setkey_result *result = req->data;
171
172 if (err == -EINPROGRESS)
173 return;
174
175 result->err = err;
176 complete(&result->completion);
177 }
178
179 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
180 unsigned int keylen)
181 {
182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
183 struct crypto_ablkcipher *ctr = ctx->ctr;
184 struct {
185 be128 hash;
186 u8 iv[8];
187
188 struct crypto_gcm_setkey_result result;
189
190 struct scatterlist sg[1];
191 struct ablkcipher_request req;
192 } *data;
193 int err;
194
195 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
196 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
197 CRYPTO_TFM_REQ_MASK);
198
199 err = crypto_ablkcipher_setkey(ctr, key, keylen);
200 if (err)
201 return err;
202
203 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
204 CRYPTO_TFM_RES_MASK);
205
206 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
207 GFP_KERNEL);
208 if (!data)
209 return -ENOMEM;
210
211 init_completion(&data->result.completion);
212 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
213 ablkcipher_request_set_tfm(&data->req, ctr);
214 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
215 CRYPTO_TFM_REQ_MAY_BACKLOG,
216 crypto_gcm_setkey_done,
217 &data->result);
218 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
219 sizeof(data->hash), data->iv);
220
221 err = crypto_ablkcipher_encrypt(&data->req);
222 if (err == -EINPROGRESS || err == -EBUSY) {
223 err = wait_for_completion_interruptible(
224 &data->result.completion);
225 if (!err)
226 err = data->result.err;
227 }
228
229 if (err)
230 goto out;
231
232 if (ctx->gf128 != NULL)
233 gf128mul_free_4k(ctx->gf128);
234
235 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
236
237 if (ctx->gf128 == NULL)
238 err = -ENOMEM;
239
240 out:
241 kfree(data);
242 return err;
243 }
244
245 static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
246 struct aead_request *req,
247 unsigned int cryptlen)
248 {
249 struct crypto_aead *aead = crypto_aead_reqtfm(req);
250 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
251 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
252 u32 flags = req->base.tfm->crt_flags;
253 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
254 struct scatterlist *dst;
255 __be32 counter = cpu_to_be32(1);
256
257 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
258 memcpy(req->iv + 12, &counter, 4);
259
260 sg_init_table(pctx->src, 2);
261 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
262 scatterwalk_sg_chain(pctx->src, 2, req->src);
263
264 dst = pctx->src;
265 if (req->src != req->dst) {
266 sg_init_table(pctx->dst, 2);
267 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
268 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
269 dst = pctx->dst;
270 }
271
272 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
273 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
274 cryptlen + sizeof(pctx->auth_tag),
275 req->iv);
276
277 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
278
279 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
280 crypto_gcm_ghash_flush(ghash);
281 }
282
283 static int crypto_gcm_hash(struct aead_request *req)
284 {
285 struct crypto_aead *aead = crypto_aead_reqtfm(req);
286 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
287 u8 *auth_tag = pctx->auth_tag;
288 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
289
290 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
291 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
292 auth_tag);
293
294 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
295 crypto_aead_authsize(aead), 1);
296 return 0;
297 }
298
299 static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
300 {
301 struct aead_request *req = areq->data;
302
303 if (!err)
304 err = crypto_gcm_hash(req);
305
306 aead_request_complete(req, err);
307 }
308
309 static int crypto_gcm_encrypt(struct aead_request *req)
310 {
311 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
312 struct ablkcipher_request *abreq = &pctx->abreq;
313 int err;
314
315 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
316 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
317 crypto_gcm_encrypt_done, req);
318
319 err = crypto_ablkcipher_encrypt(abreq);
320 if (err)
321 return err;
322
323 return crypto_gcm_hash(req);
324 }
325
326 static int crypto_gcm_verify(struct aead_request *req)
327 {
328 struct crypto_aead *aead = crypto_aead_reqtfm(req);
329 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
330 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
331 u8 *auth_tag = pctx->auth_tag;
332 u8 *iauth_tag = pctx->iauth_tag;
333 unsigned int authsize = crypto_aead_authsize(aead);
334 unsigned int cryptlen = req->cryptlen - authsize;
335
336 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
337
338 authsize = crypto_aead_authsize(aead);
339 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
340 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
341 }
342
343 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
344 {
345 struct aead_request *req = areq->data;
346
347 if (!err)
348 err = crypto_gcm_verify(req);
349
350 aead_request_complete(req, err);
351 }
352
353 static int crypto_gcm_decrypt(struct aead_request *req)
354 {
355 struct crypto_aead *aead = crypto_aead_reqtfm(req);
356 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
357 struct ablkcipher_request *abreq = &pctx->abreq;
358 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
359 unsigned int cryptlen = req->cryptlen;
360 unsigned int authsize = crypto_aead_authsize(aead);
361 int err;
362
363 if (cryptlen < authsize)
364 return -EINVAL;
365 cryptlen -= authsize;
366
367 crypto_gcm_init_crypt(abreq, req, cryptlen);
368 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
369 crypto_gcm_decrypt_done, req);
370
371 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
372
373 err = crypto_ablkcipher_decrypt(abreq);
374 if (err)
375 return err;
376
377 return crypto_gcm_verify(req);
378 }
379
380 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
381 {
382 struct crypto_instance *inst = (void *)tfm->__crt_alg;
383 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
384 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
385 struct crypto_ablkcipher *ctr;
386 unsigned long align;
387 int err;
388
389 ctr = crypto_spawn_ablkcipher(&ictx->ctr);
390 err = PTR_ERR(ctr);
391 if (IS_ERR(ctr))
392 return err;
393
394 ctx->ctr = ctr;
395 ctx->gf128 = NULL;
396
397 align = crypto_tfm_alg_alignmask(tfm);
398 align &= ~(crypto_tfm_ctx_alignment() - 1);
399 tfm->crt_aead.reqsize = align +
400 sizeof(struct crypto_gcm_req_priv_ctx) +
401 crypto_ablkcipher_reqsize(ctr);
402
403 return 0;
404 }
405
406 static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
407 {
408 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
409
410 if (ctx->gf128 != NULL)
411 gf128mul_free_4k(ctx->gf128);
412
413 crypto_free_ablkcipher(ctx->ctr);
414 }
415
416 static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
417 {
418 struct crypto_instance *inst;
419 struct crypto_alg *ctr;
420 struct crypto_alg *cipher;
421 struct gcm_instance_ctx *ctx;
422 int err;
423 char ctr_name[CRYPTO_MAX_ALG_NAME];
424
425 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD);
426 if (err)
427 return ERR_PTR(err);
428
429 cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
430 CRYPTO_ALG_TYPE_MASK);
431
432 inst = ERR_PTR(PTR_ERR(cipher));
433 if (IS_ERR(cipher))
434 return inst;
435
436 inst = ERR_PTR(ENAMETOOLONG);
437 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
438 cipher->cra_name) >= CRYPTO_MAX_ALG_NAME)
439 return inst;
440
441 ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER,
442 CRYPTO_ALG_TYPE_MASK);
443
444 if (IS_ERR(ctr))
445 return ERR_PTR(PTR_ERR(ctr));
446
447 if (cipher->cra_blocksize != 16)
448 goto out_put_ctr;
449
450 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
451 err = -ENOMEM;
452 if (!inst)
453 goto out_put_ctr;
454
455 err = -ENAMETOOLONG;
456 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
457 "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME ||
458 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
459 "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
460 goto err_free_inst;
461
462
463 ctx = crypto_instance_ctx(inst);
464 err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK);
465 if (err)
466 goto err_free_inst;
467
468 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
469 inst->alg.cra_priority = ctr->cra_priority;
470 inst->alg.cra_blocksize = 16;
471 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
472 inst->alg.cra_type = &crypto_aead_type;
473 inst->alg.cra_aead.ivsize = 16;
474 inst->alg.cra_aead.maxauthsize = 16;
475 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
476 inst->alg.cra_init = crypto_gcm_init_tfm;
477 inst->alg.cra_exit = crypto_gcm_exit_tfm;
478 inst->alg.cra_aead.setkey = crypto_gcm_setkey;
479 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
480 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
481
482 out:
483 crypto_mod_put(ctr);
484 return inst;
485 err_free_inst:
486 kfree(inst);
487 out_put_ctr:
488 inst = ERR_PTR(err);
489 goto out;
490 }
491
492 static void crypto_gcm_free(struct crypto_instance *inst)
493 {
494 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
495
496 crypto_drop_spawn(&ctx->ctr);
497 kfree(inst);
498 }
499
500 static struct crypto_template crypto_gcm_tmpl = {
501 .name = "gcm",
502 .alloc = crypto_gcm_alloc,
503 .free = crypto_gcm_free,
504 .module = THIS_MODULE,
505 };
506
507 static int __init crypto_gcm_module_init(void)
508 {
509 return crypto_register_template(&crypto_gcm_tmpl);
510 }
511
512 static void __exit crypto_gcm_module_exit(void)
513 {
514 crypto_unregister_template(&crypto_gcm_tmpl);
515 }
516
517 module_init(crypto_gcm_module_init);
518 module_exit(crypto_gcm_module_exit);
519
520 MODULE_LICENSE("GPL");
521 MODULE_DESCRIPTION("Galois/Counter Mode");
522 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
This page took 0.041396 seconds and 5 git commands to generate.