2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/kthread.h>
18 #include <linux/list.h>
19 #include <linux/module.h>
20 #include <linux/mutex.h>
21 #include <linux/scatterlist.h>
22 #include <linux/sched.h>
23 #include <linux/slab.h>
24 #include <linux/spinlock.h>
26 #define CRYPTD_MAX_QLEN 100
31 struct crypto_queue queue
;
32 struct task_struct
*task
;
35 struct cryptd_instance_ctx
{
36 struct crypto_spawn spawn
;
37 struct cryptd_state
*state
;
40 struct cryptd_blkcipher_ctx
{
41 struct crypto_blkcipher
*child
;
44 struct cryptd_blkcipher_request_ctx
{
45 crypto_completion_t complete
;
48 struct cryptd_hash_ctx
{
49 struct crypto_hash
*child
;
52 struct cryptd_hash_request_ctx
{
53 crypto_completion_t complete
;
56 static inline struct cryptd_state
*cryptd_get_state(struct crypto_tfm
*tfm
)
58 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
59 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
63 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
64 const u8
*key
, unsigned int keylen
)
66 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
67 struct crypto_blkcipher
*child
= ctx
->child
;
70 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
71 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
73 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
74 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
79 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
80 struct crypto_blkcipher
*child
,
82 int (*crypt
)(struct blkcipher_desc
*desc
,
83 struct scatterlist
*dst
,
84 struct scatterlist
*src
,
87 struct cryptd_blkcipher_request_ctx
*rctx
;
88 struct blkcipher_desc desc
;
90 rctx
= ablkcipher_request_ctx(req
);
92 if (unlikely(err
== -EINPROGRESS
))
96 desc
.info
= req
->info
;
97 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
99 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
101 req
->base
.complete
= rctx
->complete
;
105 rctx
->complete(&req
->base
, err
);
109 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
111 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
112 struct crypto_blkcipher
*child
= ctx
->child
;
114 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
115 crypto_blkcipher_crt(child
)->encrypt
);
118 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
120 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
121 struct crypto_blkcipher
*child
= ctx
->child
;
123 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
124 crypto_blkcipher_crt(child
)->decrypt
);
127 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
128 crypto_completion_t complete
)
130 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
131 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
132 struct cryptd_state
*state
=
133 cryptd_get_state(crypto_ablkcipher_tfm(tfm
));
136 rctx
->complete
= req
->base
.complete
;
137 req
->base
.complete
= complete
;
139 spin_lock_bh(&state
->lock
);
140 err
= ablkcipher_enqueue_request(&state
->queue
, req
);
141 spin_unlock_bh(&state
->lock
);
143 wake_up_process(state
->task
);
147 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
149 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
152 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
154 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
157 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
159 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
160 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
161 struct crypto_spawn
*spawn
= &ictx
->spawn
;
162 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
163 struct crypto_blkcipher
*cipher
;
165 cipher
= crypto_spawn_blkcipher(spawn
);
167 return PTR_ERR(cipher
);
170 tfm
->crt_ablkcipher
.reqsize
=
171 sizeof(struct cryptd_blkcipher_request_ctx
);
175 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
177 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
178 struct cryptd_state
*state
= cryptd_get_state(tfm
);
181 mutex_lock(&state
->mutex
);
182 active
= ablkcipher_tfm_in_queue(&state
->queue
,
183 __crypto_ablkcipher_cast(tfm
));
184 mutex_unlock(&state
->mutex
);
188 crypto_free_blkcipher(ctx
->child
);
191 static struct crypto_instance
*cryptd_alloc_instance(struct crypto_alg
*alg
,
192 struct cryptd_state
*state
)
194 struct crypto_instance
*inst
;
195 struct cryptd_instance_ctx
*ctx
;
198 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
200 inst
= ERR_PTR(-ENOMEM
);
205 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
206 "cryptd(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
209 ctx
= crypto_instance_ctx(inst
);
210 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
211 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
217 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
219 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
220 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
221 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
232 static struct crypto_instance
*cryptd_alloc_blkcipher(
233 struct rtattr
**tb
, struct cryptd_state
*state
)
235 struct crypto_instance
*inst
;
236 struct crypto_alg
*alg
;
238 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_BLKCIPHER
,
239 CRYPTO_ALG_TYPE_MASK
);
241 return ERR_CAST(alg
);
243 inst
= cryptd_alloc_instance(alg
, state
);
247 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
248 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
250 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
251 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
252 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
254 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
256 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
258 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
259 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
261 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
262 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
263 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
270 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
272 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
273 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
274 struct crypto_spawn
*spawn
= &ictx
->spawn
;
275 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
276 struct crypto_hash
*cipher
;
278 cipher
= crypto_spawn_hash(spawn
);
280 return PTR_ERR(cipher
);
283 tfm
->crt_ahash
.reqsize
=
284 sizeof(struct cryptd_hash_request_ctx
);
288 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
290 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
291 struct cryptd_state
*state
= cryptd_get_state(tfm
);
294 mutex_lock(&state
->mutex
);
295 active
= ahash_tfm_in_queue(&state
->queue
,
296 __crypto_ahash_cast(tfm
));
297 mutex_unlock(&state
->mutex
);
301 crypto_free_hash(ctx
->child
);
304 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
305 const u8
*key
, unsigned int keylen
)
307 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
308 struct crypto_hash
*child
= ctx
->child
;
311 crypto_hash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
312 crypto_hash_set_flags(child
, crypto_ahash_get_flags(parent
) &
313 CRYPTO_TFM_REQ_MASK
);
314 err
= crypto_hash_setkey(child
, key
, keylen
);
315 crypto_ahash_set_flags(parent
, crypto_hash_get_flags(child
) &
316 CRYPTO_TFM_RES_MASK
);
320 static int cryptd_hash_enqueue(struct ahash_request
*req
,
321 crypto_completion_t complete
)
323 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
324 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
325 struct cryptd_state
*state
=
326 cryptd_get_state(crypto_ahash_tfm(tfm
));
329 rctx
->complete
= req
->base
.complete
;
330 req
->base
.complete
= complete
;
332 spin_lock_bh(&state
->lock
);
333 err
= ahash_enqueue_request(&state
->queue
, req
);
334 spin_unlock_bh(&state
->lock
);
336 wake_up_process(state
->task
);
340 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
342 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
343 struct crypto_hash
*child
= ctx
->child
;
344 struct ahash_request
*req
= ahash_request_cast(req_async
);
345 struct cryptd_hash_request_ctx
*rctx
;
346 struct hash_desc desc
;
348 rctx
= ahash_request_ctx(req
);
350 if (unlikely(err
== -EINPROGRESS
))
354 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
356 err
= crypto_hash_crt(child
)->init(&desc
);
358 req
->base
.complete
= rctx
->complete
;
362 rctx
->complete(&req
->base
, err
);
366 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
368 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
371 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
373 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
374 struct crypto_hash
*child
= ctx
->child
;
375 struct ahash_request
*req
= ahash_request_cast(req_async
);
376 struct cryptd_hash_request_ctx
*rctx
;
377 struct hash_desc desc
;
379 rctx
= ahash_request_ctx(req
);
381 if (unlikely(err
== -EINPROGRESS
))
385 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
387 err
= crypto_hash_crt(child
)->update(&desc
,
391 req
->base
.complete
= rctx
->complete
;
395 rctx
->complete(&req
->base
, err
);
399 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
401 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
404 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
406 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
407 struct crypto_hash
*child
= ctx
->child
;
408 struct ahash_request
*req
= ahash_request_cast(req_async
);
409 struct cryptd_hash_request_ctx
*rctx
;
410 struct hash_desc desc
;
412 rctx
= ahash_request_ctx(req
);
414 if (unlikely(err
== -EINPROGRESS
))
418 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
420 err
= crypto_hash_crt(child
)->final(&desc
, req
->result
);
422 req
->base
.complete
= rctx
->complete
;
426 rctx
->complete(&req
->base
, err
);
430 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
432 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
435 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
437 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
438 struct crypto_hash
*child
= ctx
->child
;
439 struct ahash_request
*req
= ahash_request_cast(req_async
);
440 struct cryptd_hash_request_ctx
*rctx
;
441 struct hash_desc desc
;
443 rctx
= ahash_request_ctx(req
);
445 if (unlikely(err
== -EINPROGRESS
))
449 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
451 err
= crypto_hash_crt(child
)->digest(&desc
,
456 req
->base
.complete
= rctx
->complete
;
460 rctx
->complete(&req
->base
, err
);
464 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
466 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
469 static struct crypto_instance
*cryptd_alloc_hash(
470 struct rtattr
**tb
, struct cryptd_state
*state
)
472 struct crypto_instance
*inst
;
473 struct crypto_alg
*alg
;
475 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_HASH
,
476 CRYPTO_ALG_TYPE_HASH_MASK
);
478 return ERR_PTR(PTR_ERR(alg
));
480 inst
= cryptd_alloc_instance(alg
, state
);
484 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
;
485 inst
->alg
.cra_type
= &crypto_ahash_type
;
487 inst
->alg
.cra_ahash
.digestsize
= alg
->cra_hash
.digestsize
;
488 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
490 inst
->alg
.cra_init
= cryptd_hash_init_tfm
;
491 inst
->alg
.cra_exit
= cryptd_hash_exit_tfm
;
493 inst
->alg
.cra_ahash
.init
= cryptd_hash_init_enqueue
;
494 inst
->alg
.cra_ahash
.update
= cryptd_hash_update_enqueue
;
495 inst
->alg
.cra_ahash
.final
= cryptd_hash_final_enqueue
;
496 inst
->alg
.cra_ahash
.setkey
= cryptd_hash_setkey
;
497 inst
->alg
.cra_ahash
.digest
= cryptd_hash_digest_enqueue
;
504 static struct cryptd_state state
;
506 static struct crypto_instance
*cryptd_alloc(struct rtattr
**tb
)
508 struct crypto_attr_type
*algt
;
510 algt
= crypto_get_attr_type(tb
);
512 return ERR_CAST(algt
);
514 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
515 case CRYPTO_ALG_TYPE_BLKCIPHER
:
516 return cryptd_alloc_blkcipher(tb
, &state
);
517 case CRYPTO_ALG_TYPE_DIGEST
:
518 return cryptd_alloc_hash(tb
, &state
);
521 return ERR_PTR(-EINVAL
);
524 static void cryptd_free(struct crypto_instance
*inst
)
526 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
528 crypto_drop_spawn(&ctx
->spawn
);
532 static struct crypto_template cryptd_tmpl
= {
534 .alloc
= cryptd_alloc
,
536 .module
= THIS_MODULE
,
539 static inline int cryptd_create_thread(struct cryptd_state
*state
,
540 int (*fn
)(void *data
), const char *name
)
542 spin_lock_init(&state
->lock
);
543 mutex_init(&state
->mutex
);
544 crypto_init_queue(&state
->queue
, CRYPTD_MAX_QLEN
);
546 state
->task
= kthread_run(fn
, state
, name
);
547 if (IS_ERR(state
->task
))
548 return PTR_ERR(state
->task
);
553 static inline void cryptd_stop_thread(struct cryptd_state
*state
)
555 BUG_ON(state
->queue
.qlen
);
556 kthread_stop(state
->task
);
559 static int cryptd_thread(void *data
)
561 struct cryptd_state
*state
= data
;
564 current
->flags
|= PF_NOFREEZE
;
567 struct crypto_async_request
*req
, *backlog
;
569 mutex_lock(&state
->mutex
);
570 __set_current_state(TASK_INTERRUPTIBLE
);
572 spin_lock_bh(&state
->lock
);
573 backlog
= crypto_get_backlog(&state
->queue
);
574 req
= crypto_dequeue_request(&state
->queue
);
575 spin_unlock_bh(&state
->lock
);
577 stop
= kthread_should_stop();
580 __set_current_state(TASK_RUNNING
);
583 backlog
->complete(backlog
,
585 req
->complete(req
, 0);
589 mutex_unlock(&state
->mutex
);
597 static int __init
cryptd_init(void)
601 err
= cryptd_create_thread(&state
, cryptd_thread
, "cryptd");
605 err
= crypto_register_template(&cryptd_tmpl
);
607 kthread_stop(state
.task
);
612 static void __exit
cryptd_exit(void)
614 cryptd_stop_thread(&state
);
615 crypto_unregister_template(&cryptd_tmpl
);
618 module_init(cryptd_init
);
619 module_exit(cryptd_exit
);
621 MODULE_LICENSE("GPL");
622 MODULE_DESCRIPTION("Software async crypto daemon");