crypto: pcrypt - Make tfm_count an atomic_t
[deliverable/linux.git] / crypto / pcrypt.c
1 /*
2 * pcrypt - Parallel crypto wrapper.
3 *
4 * Copyright (C) 2009 secunet Security Networks AG
5 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 * You should have received a copy of the GNU General Public License along with
17 * this program; if not, write to the Free Software Foundation, Inc.,
18 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
19 */
20
21 #include <crypto/algapi.h>
22 #include <crypto/internal/aead.h>
23 #include <linux/atomic.h>
24 #include <linux/err.h>
25 #include <linux/init.h>
26 #include <linux/module.h>
27 #include <linux/slab.h>
28 #include <linux/notifier.h>
29 #include <linux/kobject.h>
30 #include <linux/cpu.h>
31 #include <crypto/pcrypt.h>
32
33 struct padata_pcrypt {
34 struct padata_instance *pinst;
35 struct workqueue_struct *wq;
36
37 /*
38 * Cpumask for callback CPUs. It should be
39 * equal to serial cpumask of corresponding padata instance,
40 * so it is updated when padata notifies us about serial
41 * cpumask change.
42 *
43 * cb_cpumask is protected by RCU. This fact prevents us from
44 * using cpumask_var_t directly because the actual type of
45 * cpumsak_var_t depends on kernel configuration(particularly on
46 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
47 * cpumask_var_t may be either a pointer to the struct cpumask
48 * or a variable allocated on the stack. Thus we can not safely use
49 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
50 * rcu_dereference. So cpumask_var_t is wrapped with struct
51 * pcrypt_cpumask which makes possible to use it with RCU.
52 */
53 struct pcrypt_cpumask {
54 cpumask_var_t mask;
55 } *cb_cpumask;
56 struct notifier_block nblock;
57 };
58
59 static struct padata_pcrypt pencrypt;
60 static struct padata_pcrypt pdecrypt;
61 static struct kset *pcrypt_kset;
62
63 struct pcrypt_instance_ctx {
64 struct crypto_aead_spawn spawn;
65 atomic_t tfm_count;
66 };
67
68 struct pcrypt_aead_ctx {
69 struct crypto_aead *child;
70 unsigned int cb_cpu;
71 };
72
73 static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu,
74 struct padata_pcrypt *pcrypt)
75 {
76 unsigned int cpu_index, cpu, i;
77 struct pcrypt_cpumask *cpumask;
78
79 cpu = *cb_cpu;
80
81 rcu_read_lock_bh();
82 cpumask = rcu_dereference_bh(pcrypt->cb_cpumask);
83 if (cpumask_test_cpu(cpu, cpumask->mask))
84 goto out;
85
86 if (!cpumask_weight(cpumask->mask))
87 goto out;
88
89 cpu_index = cpu % cpumask_weight(cpumask->mask);
90
91 cpu = cpumask_first(cpumask->mask);
92 for (i = 0; i < cpu_index; i++)
93 cpu = cpumask_next(cpu, cpumask->mask);
94
95 *cb_cpu = cpu;
96
97 out:
98 rcu_read_unlock_bh();
99 return padata_do_parallel(pcrypt->pinst, padata, cpu);
100 }
101
102 static int pcrypt_aead_setkey(struct crypto_aead *parent,
103 const u8 *key, unsigned int keylen)
104 {
105 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
106
107 return crypto_aead_setkey(ctx->child, key, keylen);
108 }
109
110 static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
111 unsigned int authsize)
112 {
113 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
114
115 return crypto_aead_setauthsize(ctx->child, authsize);
116 }
117
118 static void pcrypt_aead_serial(struct padata_priv *padata)
119 {
120 struct pcrypt_request *preq = pcrypt_padata_request(padata);
121 struct aead_request *req = pcrypt_request_ctx(preq);
122
123 aead_request_complete(req->base.data, padata->info);
124 }
125
126 static void pcrypt_aead_giv_serial(struct padata_priv *padata)
127 {
128 struct pcrypt_request *preq = pcrypt_padata_request(padata);
129 struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
130
131 aead_request_complete(req->areq.base.data, padata->info);
132 }
133
134 static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
135 {
136 struct aead_request *req = areq->data;
137 struct pcrypt_request *preq = aead_request_ctx(req);
138 struct padata_priv *padata = pcrypt_request_padata(preq);
139
140 padata->info = err;
141 req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
142
143 padata_do_serial(padata);
144 }
145
146 static void pcrypt_aead_enc(struct padata_priv *padata)
147 {
148 struct pcrypt_request *preq = pcrypt_padata_request(padata);
149 struct aead_request *req = pcrypt_request_ctx(preq);
150
151 padata->info = crypto_aead_encrypt(req);
152
153 if (padata->info == -EINPROGRESS)
154 return;
155
156 padata_do_serial(padata);
157 }
158
159 static int pcrypt_aead_encrypt(struct aead_request *req)
160 {
161 int err;
162 struct pcrypt_request *preq = aead_request_ctx(req);
163 struct aead_request *creq = pcrypt_request_ctx(preq);
164 struct padata_priv *padata = pcrypt_request_padata(preq);
165 struct crypto_aead *aead = crypto_aead_reqtfm(req);
166 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
167 u32 flags = aead_request_flags(req);
168
169 memset(padata, 0, sizeof(struct padata_priv));
170
171 padata->parallel = pcrypt_aead_enc;
172 padata->serial = pcrypt_aead_serial;
173
174 aead_request_set_tfm(creq, ctx->child);
175 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
176 pcrypt_aead_done, req);
177 aead_request_set_crypt(creq, req->src, req->dst,
178 req->cryptlen, req->iv);
179 aead_request_set_assoc(creq, req->assoc, req->assoclen);
180
181 err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt);
182 if (!err)
183 return -EINPROGRESS;
184
185 return err;
186 }
187
188 static void pcrypt_aead_dec(struct padata_priv *padata)
189 {
190 struct pcrypt_request *preq = pcrypt_padata_request(padata);
191 struct aead_request *req = pcrypt_request_ctx(preq);
192
193 padata->info = crypto_aead_decrypt(req);
194
195 if (padata->info == -EINPROGRESS)
196 return;
197
198 padata_do_serial(padata);
199 }
200
201 static int pcrypt_aead_decrypt(struct aead_request *req)
202 {
203 int err;
204 struct pcrypt_request *preq = aead_request_ctx(req);
205 struct aead_request *creq = pcrypt_request_ctx(preq);
206 struct padata_priv *padata = pcrypt_request_padata(preq);
207 struct crypto_aead *aead = crypto_aead_reqtfm(req);
208 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
209 u32 flags = aead_request_flags(req);
210
211 memset(padata, 0, sizeof(struct padata_priv));
212
213 padata->parallel = pcrypt_aead_dec;
214 padata->serial = pcrypt_aead_serial;
215
216 aead_request_set_tfm(creq, ctx->child);
217 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
218 pcrypt_aead_done, req);
219 aead_request_set_crypt(creq, req->src, req->dst,
220 req->cryptlen, req->iv);
221 aead_request_set_assoc(creq, req->assoc, req->assoclen);
222
223 err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pdecrypt);
224 if (!err)
225 return -EINPROGRESS;
226
227 return err;
228 }
229
230 static void pcrypt_aead_givenc(struct padata_priv *padata)
231 {
232 struct pcrypt_request *preq = pcrypt_padata_request(padata);
233 struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
234
235 padata->info = crypto_aead_givencrypt(req);
236
237 if (padata->info == -EINPROGRESS)
238 return;
239
240 padata_do_serial(padata);
241 }
242
243 static int pcrypt_aead_givencrypt(struct aead_givcrypt_request *req)
244 {
245 int err;
246 struct aead_request *areq = &req->areq;
247 struct pcrypt_request *preq = aead_request_ctx(areq);
248 struct aead_givcrypt_request *creq = pcrypt_request_ctx(preq);
249 struct padata_priv *padata = pcrypt_request_padata(preq);
250 struct crypto_aead *aead = aead_givcrypt_reqtfm(req);
251 struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
252 u32 flags = aead_request_flags(areq);
253
254 memset(padata, 0, sizeof(struct padata_priv));
255
256 padata->parallel = pcrypt_aead_givenc;
257 padata->serial = pcrypt_aead_giv_serial;
258
259 aead_givcrypt_set_tfm(creq, ctx->child);
260 aead_givcrypt_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
261 pcrypt_aead_done, areq);
262 aead_givcrypt_set_crypt(creq, areq->src, areq->dst,
263 areq->cryptlen, areq->iv);
264 aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen);
265 aead_givcrypt_set_giv(creq, req->giv, req->seq);
266
267 err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt);
268 if (!err)
269 return -EINPROGRESS;
270
271 return err;
272 }
273
274 static int pcrypt_aead_init_tfm(struct crypto_tfm *tfm)
275 {
276 int cpu, cpu_index;
277 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
278 struct pcrypt_instance_ctx *ictx = crypto_instance_ctx(inst);
279 struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
280 struct crypto_aead *cipher;
281
282 cpu_index = (unsigned int)atomic_inc_return(&ictx->tfm_count) %
283 cpumask_weight(cpu_online_mask);
284
285 ctx->cb_cpu = cpumask_first(cpu_online_mask);
286 for (cpu = 0; cpu < cpu_index; cpu++)
287 ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
288
289 cipher = crypto_spawn_aead(crypto_instance_ctx(inst));
290
291 if (IS_ERR(cipher))
292 return PTR_ERR(cipher);
293
294 ctx->child = cipher;
295 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
296 sizeof(struct pcrypt_request) +
297 sizeof(struct aead_givcrypt_request) +
298 crypto_aead_reqsize(cipher));
299
300 return 0;
301 }
302
303 static void pcrypt_aead_exit_tfm(struct crypto_tfm *tfm)
304 {
305 struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
306
307 crypto_free_aead(ctx->child);
308 }
309
310 static int pcrypt_init_instance(struct crypto_instance *inst,
311 struct crypto_alg *alg)
312 {
313 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
314 "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
315 return -ENAMETOOLONG;
316
317 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
318
319 inst->alg.cra_priority = alg->cra_priority + 100;
320 inst->alg.cra_blocksize = alg->cra_blocksize;
321 inst->alg.cra_alignmask = alg->cra_alignmask;
322
323 return 0;
324 }
325
326 static struct crypto_instance *pcrypt_alloc_aead(struct rtattr **tb,
327 u32 type, u32 mask)
328 {
329 struct pcrypt_instance_ctx *ctx;
330 struct crypto_instance *inst;
331 struct crypto_alg *alg;
332 const char *name;
333 int err;
334
335 name = crypto_attr_alg_name(tb[1]);
336 if (IS_ERR(name))
337 return ERR_CAST(name);
338
339 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
340 if (!inst)
341 return ERR_PTR(-ENOMEM);
342
343 ctx = crypto_instance_ctx(inst);
344 crypto_set_aead_spawn(&ctx->spawn, inst);
345
346 err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
347 if (err)
348 goto out_free_inst;
349
350 alg = crypto_aead_spawn_alg(&ctx->spawn);
351 err = pcrypt_init_instance(inst, alg);
352 if (err)
353 goto out_drop_aead;
354
355 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
356 inst->alg.cra_type = &crypto_aead_type;
357
358 inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
359 inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
360 inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
361
362 inst->alg.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
363
364 inst->alg.cra_init = pcrypt_aead_init_tfm;
365 inst->alg.cra_exit = pcrypt_aead_exit_tfm;
366
367 inst->alg.cra_aead.setkey = pcrypt_aead_setkey;
368 inst->alg.cra_aead.setauthsize = pcrypt_aead_setauthsize;
369 inst->alg.cra_aead.encrypt = pcrypt_aead_encrypt;
370 inst->alg.cra_aead.decrypt = pcrypt_aead_decrypt;
371 inst->alg.cra_aead.givencrypt = pcrypt_aead_givencrypt;
372
373 out:
374 return inst;
375
376 out_drop_aead:
377 crypto_drop_aead(&ctx->spawn);
378 out_free_inst:
379 kfree(inst);
380 inst = ERR_PTR(err);
381 goto out;
382 }
383
384 static struct crypto_instance *pcrypt_alloc(struct rtattr **tb)
385 {
386 struct crypto_attr_type *algt;
387
388 algt = crypto_get_attr_type(tb);
389 if (IS_ERR(algt))
390 return ERR_CAST(algt);
391
392 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
393 case CRYPTO_ALG_TYPE_AEAD:
394 return pcrypt_alloc_aead(tb, algt->type, algt->mask);
395 }
396
397 return ERR_PTR(-EINVAL);
398 }
399
400 static void pcrypt_free(struct crypto_instance *inst)
401 {
402 struct pcrypt_instance_ctx *ctx = crypto_instance_ctx(inst);
403
404 crypto_drop_aead(&ctx->spawn);
405 kfree(inst);
406 }
407
408 static int pcrypt_cpumask_change_notify(struct notifier_block *self,
409 unsigned long val, void *data)
410 {
411 struct padata_pcrypt *pcrypt;
412 struct pcrypt_cpumask *new_mask, *old_mask;
413 struct padata_cpumask *cpumask = (struct padata_cpumask *)data;
414
415 if (!(val & PADATA_CPU_SERIAL))
416 return 0;
417
418 pcrypt = container_of(self, struct padata_pcrypt, nblock);
419 new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL);
420 if (!new_mask)
421 return -ENOMEM;
422 if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) {
423 kfree(new_mask);
424 return -ENOMEM;
425 }
426
427 old_mask = pcrypt->cb_cpumask;
428
429 cpumask_copy(new_mask->mask, cpumask->cbcpu);
430 rcu_assign_pointer(pcrypt->cb_cpumask, new_mask);
431 synchronize_rcu_bh();
432
433 free_cpumask_var(old_mask->mask);
434 kfree(old_mask);
435 return 0;
436 }
437
438 static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
439 {
440 int ret;
441
442 pinst->kobj.kset = pcrypt_kset;
443 ret = kobject_add(&pinst->kobj, NULL, name);
444 if (!ret)
445 kobject_uevent(&pinst->kobj, KOBJ_ADD);
446
447 return ret;
448 }
449
450 static int pcrypt_init_padata(struct padata_pcrypt *pcrypt,
451 const char *name)
452 {
453 int ret = -ENOMEM;
454 struct pcrypt_cpumask *mask;
455
456 get_online_cpus();
457
458 pcrypt->wq = alloc_workqueue("%s", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
459 1, name);
460 if (!pcrypt->wq)
461 goto err;
462
463 pcrypt->pinst = padata_alloc_possible(pcrypt->wq);
464 if (!pcrypt->pinst)
465 goto err_destroy_workqueue;
466
467 mask = kmalloc(sizeof(*mask), GFP_KERNEL);
468 if (!mask)
469 goto err_free_padata;
470 if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) {
471 kfree(mask);
472 goto err_free_padata;
473 }
474
475 cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
476 rcu_assign_pointer(pcrypt->cb_cpumask, mask);
477
478 pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify;
479 ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
480 if (ret)
481 goto err_free_cpumask;
482
483 ret = pcrypt_sysfs_add(pcrypt->pinst, name);
484 if (ret)
485 goto err_unregister_notifier;
486
487 put_online_cpus();
488
489 return ret;
490
491 err_unregister_notifier:
492 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
493 err_free_cpumask:
494 free_cpumask_var(mask->mask);
495 kfree(mask);
496 err_free_padata:
497 padata_free(pcrypt->pinst);
498 err_destroy_workqueue:
499 destroy_workqueue(pcrypt->wq);
500 err:
501 put_online_cpus();
502
503 return ret;
504 }
505
506 static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt)
507 {
508 free_cpumask_var(pcrypt->cb_cpumask->mask);
509 kfree(pcrypt->cb_cpumask);
510
511 padata_stop(pcrypt->pinst);
512 padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
513 destroy_workqueue(pcrypt->wq);
514 padata_free(pcrypt->pinst);
515 }
516
517 static struct crypto_template pcrypt_tmpl = {
518 .name = "pcrypt",
519 .alloc = pcrypt_alloc,
520 .free = pcrypt_free,
521 .module = THIS_MODULE,
522 };
523
524 static int __init pcrypt_init(void)
525 {
526 int err = -ENOMEM;
527
528 pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
529 if (!pcrypt_kset)
530 goto err;
531
532 err = pcrypt_init_padata(&pencrypt, "pencrypt");
533 if (err)
534 goto err_unreg_kset;
535
536 err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
537 if (err)
538 goto err_deinit_pencrypt;
539
540 padata_start(pencrypt.pinst);
541 padata_start(pdecrypt.pinst);
542
543 return crypto_register_template(&pcrypt_tmpl);
544
545 err_deinit_pencrypt:
546 pcrypt_fini_padata(&pencrypt);
547 err_unreg_kset:
548 kset_unregister(pcrypt_kset);
549 err:
550 return err;
551 }
552
553 static void __exit pcrypt_exit(void)
554 {
555 pcrypt_fini_padata(&pencrypt);
556 pcrypt_fini_padata(&pdecrypt);
557
558 kset_unregister(pcrypt_kset);
559 crypto_unregister_template(&pcrypt_tmpl);
560 }
561
562 module_init(pcrypt_init);
563 module_exit(pcrypt_exit);
564
565 MODULE_LICENSE("GPL");
566 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
567 MODULE_DESCRIPTION("Parallel crypto wrapper");
568 MODULE_ALIAS_CRYPTO("pcrypt");
This page took 0.041352 seconds and 6 git commands to generate.