2 * AEAD: Authenticated Encryption with Associated Data
4 * This file provides API support for AEAD algorithms.
6 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <crypto/internal/geniv.h>
16 #include <crypto/internal/rng.h>
17 #include <crypto/null.h>
18 #include <crypto/scatterwalk.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/rtnetlink.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/seq_file.h>
27 #include <linux/cryptouser.h>
28 #include <net/netlink.h>
32 struct compat_request_ctx
{
33 struct scatterlist src
[2];
34 struct scatterlist dst
[2];
35 struct scatterlist ivbuf
[2];
36 struct scatterlist
*ivsg
;
37 struct aead_givcrypt_request subreq
;
40 static int aead_null_givencrypt(struct aead_givcrypt_request
*req
);
41 static int aead_null_givdecrypt(struct aead_givcrypt_request
*req
);
43 static int setkey_unaligned(struct crypto_aead
*tfm
, const u8
*key
,
46 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
48 u8
*buffer
, *alignbuffer
;
51 absize
= keylen
+ alignmask
;
52 buffer
= kmalloc(absize
, GFP_ATOMIC
);
56 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
57 memcpy(alignbuffer
, key
, keylen
);
58 ret
= tfm
->setkey(tfm
, alignbuffer
, keylen
);
59 memset(alignbuffer
, 0, keylen
);
64 int crypto_aead_setkey(struct crypto_aead
*tfm
,
65 const u8
*key
, unsigned int keylen
)
67 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
71 if ((unsigned long)key
& alignmask
)
72 return setkey_unaligned(tfm
, key
, keylen
);
74 return tfm
->setkey(tfm
, key
, keylen
);
76 EXPORT_SYMBOL_GPL(crypto_aead_setkey
);
78 int crypto_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
82 if (authsize
> crypto_aead_maxauthsize(tfm
))
85 if (tfm
->setauthsize
) {
86 err
= tfm
->setauthsize(tfm
->child
, authsize
);
91 tfm
->child
->authsize
= authsize
;
92 tfm
->authsize
= authsize
;
95 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize
);
97 struct aead_old_request
{
98 struct scatterlist srcbuf
[2];
99 struct scatterlist dstbuf
[2];
100 struct aead_request subreq
;
103 unsigned int crypto_aead_reqsize(struct crypto_aead
*tfm
)
105 return tfm
->reqsize
+ sizeof(struct aead_old_request
);
107 EXPORT_SYMBOL_GPL(crypto_aead_reqsize
);
109 static int old_crypt(struct aead_request
*req
,
110 int (*crypt
)(struct aead_request
*req
))
112 struct aead_old_request
*nreq
= aead_request_ctx(req
);
113 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
114 struct scatterlist
*src
, *dst
;
119 src
= scatterwalk_ffwd(nreq
->srcbuf
, req
->src
, req
->assoclen
);
120 dst
= req
->src
== req
->dst
?
121 src
: scatterwalk_ffwd(nreq
->dstbuf
, req
->dst
, req
->assoclen
);
123 aead_request_set_tfm(&nreq
->subreq
, aead
);
124 aead_request_set_callback(&nreq
->subreq
, aead_request_flags(req
),
125 req
->base
.complete
, req
->base
.data
);
126 aead_request_set_crypt(&nreq
->subreq
, src
, dst
, req
->cryptlen
,
128 aead_request_set_assoc(&nreq
->subreq
, req
->src
, req
->assoclen
);
130 return crypt(&nreq
->subreq
);
133 static int old_encrypt(struct aead_request
*req
)
135 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
136 struct old_aead_alg
*alg
= crypto_old_aead_alg(aead
);
138 return old_crypt(req
, alg
->encrypt
);
141 static int old_decrypt(struct aead_request
*req
)
143 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
144 struct old_aead_alg
*alg
= crypto_old_aead_alg(aead
);
146 return old_crypt(req
, alg
->decrypt
);
149 static int no_givcrypt(struct aead_givcrypt_request
*req
)
154 static int crypto_old_aead_init_tfm(struct crypto_tfm
*tfm
)
156 struct old_aead_alg
*alg
= &tfm
->__crt_alg
->cra_aead
;
157 struct crypto_aead
*crt
= __crypto_aead_cast(tfm
);
159 if (max(alg
->maxauthsize
, alg
->ivsize
) > PAGE_SIZE
/ 8)
162 crt
->setkey
= alg
->setkey
;
163 crt
->setauthsize
= alg
->setauthsize
;
164 crt
->encrypt
= old_encrypt
;
165 crt
->decrypt
= old_decrypt
;
167 crt
->givencrypt
= alg
->givencrypt
?: no_givcrypt
;
168 crt
->givdecrypt
= alg
->givdecrypt
?: no_givcrypt
;
170 crt
->givencrypt
= aead_null_givencrypt
;
171 crt
->givdecrypt
= aead_null_givdecrypt
;
173 crt
->child
= __crypto_aead_cast(tfm
);
174 crt
->authsize
= alg
->maxauthsize
;
179 static void crypto_aead_exit_tfm(struct crypto_tfm
*tfm
)
181 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
182 struct aead_alg
*alg
= crypto_aead_alg(aead
);
187 static int crypto_aead_init_tfm(struct crypto_tfm
*tfm
)
189 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
190 struct aead_alg
*alg
= crypto_aead_alg(aead
);
192 if (crypto_old_aead_alg(aead
)->encrypt
)
193 return crypto_old_aead_init_tfm(tfm
);
195 aead
->setkey
= alg
->setkey
;
196 aead
->setauthsize
= alg
->setauthsize
;
197 aead
->encrypt
= alg
->encrypt
;
198 aead
->decrypt
= alg
->decrypt
;
199 aead
->child
= __crypto_aead_cast(tfm
);
200 aead
->authsize
= alg
->maxauthsize
;
203 aead
->base
.exit
= crypto_aead_exit_tfm
;
206 return alg
->init(aead
);
212 static int crypto_old_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
214 struct crypto_report_aead raead
;
215 struct old_aead_alg
*aead
= &alg
->cra_aead
;
217 strncpy(raead
.type
, "aead", sizeof(raead
.type
));
218 strncpy(raead
.geniv
, aead
->geniv
?: "<built-in>", sizeof(raead
.geniv
));
220 raead
.blocksize
= alg
->cra_blocksize
;
221 raead
.maxauthsize
= aead
->maxauthsize
;
222 raead
.ivsize
= aead
->ivsize
;
224 if (nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
,
225 sizeof(struct crypto_report_aead
), &raead
))
226 goto nla_put_failure
;
233 static int crypto_old_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
239 static void crypto_old_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
240 __attribute__ ((unused
));
241 static void crypto_old_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
243 struct old_aead_alg
*aead
= &alg
->cra_aead
;
245 seq_printf(m
, "type : aead\n");
246 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
248 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
249 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
250 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
251 seq_printf(m
, "geniv : %s\n", aead
->geniv
?: "<built-in>");
254 const struct crypto_type crypto_aead_type
= {
255 .extsize
= crypto_alg_extsize
,
256 .init_tfm
= crypto_aead_init_tfm
,
257 #ifdef CONFIG_PROC_FS
258 .show
= crypto_old_aead_show
,
260 .report
= crypto_old_aead_report
,
261 .lookup
= crypto_lookup_aead
,
262 .maskclear
= ~(CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
),
263 .maskset
= CRYPTO_ALG_TYPE_MASK
,
264 .type
= CRYPTO_ALG_TYPE_AEAD
,
265 .tfmsize
= offsetof(struct crypto_aead
, base
),
267 EXPORT_SYMBOL_GPL(crypto_aead_type
);
270 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
272 struct crypto_report_aead raead
;
273 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
275 strncpy(raead
.type
, "aead", sizeof(raead
.type
));
276 strncpy(raead
.geniv
, "<none>", sizeof(raead
.geniv
));
278 raead
.blocksize
= alg
->cra_blocksize
;
279 raead
.maxauthsize
= aead
->maxauthsize
;
280 raead
.ivsize
= aead
->ivsize
;
282 if (nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
,
283 sizeof(struct crypto_report_aead
), &raead
))
284 goto nla_put_failure
;
291 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
297 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
298 __attribute__ ((unused
));
299 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
301 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
303 seq_printf(m
, "type : aead\n");
304 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
306 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
307 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
308 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
309 seq_printf(m
, "geniv : <none>\n");
312 static void crypto_aead_free_instance(struct crypto_instance
*inst
)
314 struct aead_instance
*aead
= aead_instance(inst
);
317 inst
->tmpl
->free(inst
);
324 static const struct crypto_type crypto_new_aead_type
= {
325 .extsize
= crypto_alg_extsize
,
326 .init_tfm
= crypto_aead_init_tfm
,
327 .free
= crypto_aead_free_instance
,
328 #ifdef CONFIG_PROC_FS
329 .show
= crypto_aead_show
,
331 .report
= crypto_aead_report
,
332 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
333 .maskset
= CRYPTO_ALG_TYPE_MASK
,
334 .type
= CRYPTO_ALG_TYPE_AEAD
,
335 .tfmsize
= offsetof(struct crypto_aead
, base
),
338 static int aead_null_givencrypt(struct aead_givcrypt_request
*req
)
340 return crypto_aead_encrypt(&req
->areq
);
343 static int aead_null_givdecrypt(struct aead_givcrypt_request
*req
)
345 return crypto_aead_decrypt(&req
->areq
);
349 static int crypto_nivaead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
351 struct crypto_report_aead raead
;
352 struct old_aead_alg
*aead
= &alg
->cra_aead
;
354 strncpy(raead
.type
, "nivaead", sizeof(raead
.type
));
355 strncpy(raead
.geniv
, aead
->geniv
, sizeof(raead
.geniv
));
357 raead
.blocksize
= alg
->cra_blocksize
;
358 raead
.maxauthsize
= aead
->maxauthsize
;
359 raead
.ivsize
= aead
->ivsize
;
361 if (nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
,
362 sizeof(struct crypto_report_aead
), &raead
))
363 goto nla_put_failure
;
370 static int crypto_nivaead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
377 static void crypto_nivaead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
378 __attribute__ ((unused
));
379 static void crypto_nivaead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
381 struct old_aead_alg
*aead
= &alg
->cra_aead
;
383 seq_printf(m
, "type : nivaead\n");
384 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
386 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
387 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
388 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
389 seq_printf(m
, "geniv : %s\n", aead
->geniv
);
392 const struct crypto_type crypto_nivaead_type
= {
393 .extsize
= crypto_alg_extsize
,
394 .init_tfm
= crypto_aead_init_tfm
,
395 #ifdef CONFIG_PROC_FS
396 .show
= crypto_nivaead_show
,
398 .report
= crypto_nivaead_report
,
399 .maskclear
= ~(CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
),
400 .maskset
= CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
,
401 .type
= CRYPTO_ALG_TYPE_AEAD
,
402 .tfmsize
= offsetof(struct crypto_aead
, base
),
404 EXPORT_SYMBOL_GPL(crypto_nivaead_type
);
406 static int crypto_grab_nivaead(struct crypto_aead_spawn
*spawn
,
407 const char *name
, u32 type
, u32 mask
)
409 spawn
->base
.frontend
= &crypto_nivaead_type
;
410 return crypto_grab_spawn(&spawn
->base
, name
, type
, mask
);
413 static int aead_geniv_setkey(struct crypto_aead
*tfm
,
414 const u8
*key
, unsigned int keylen
)
416 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
418 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
421 static int aead_geniv_setauthsize(struct crypto_aead
*tfm
,
422 unsigned int authsize
)
424 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
426 return crypto_aead_setauthsize(ctx
->child
, authsize
);
429 static void compat_encrypt_complete2(struct aead_request
*req
, int err
)
431 struct compat_request_ctx
*rctx
= aead_request_ctx(req
);
432 struct aead_givcrypt_request
*subreq
= &rctx
->subreq
;
433 struct crypto_aead
*geniv
;
435 if (err
== -EINPROGRESS
)
441 geniv
= crypto_aead_reqtfm(req
);
442 scatterwalk_map_and_copy(subreq
->giv
, rctx
->ivsg
, 0,
443 crypto_aead_ivsize(geniv
), 1);
449 static void compat_encrypt_complete(struct crypto_async_request
*base
, int err
)
451 struct aead_request
*req
= base
->data
;
453 compat_encrypt_complete2(req
, err
);
454 aead_request_complete(req
, err
);
457 static int compat_encrypt(struct aead_request
*req
)
459 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
460 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(geniv
);
461 struct compat_request_ctx
*rctx
= aead_request_ctx(req
);
462 struct aead_givcrypt_request
*subreq
= &rctx
->subreq
;
463 unsigned int ivsize
= crypto_aead_ivsize(geniv
);
464 struct scatterlist
*src
, *dst
;
465 crypto_completion_t
compl;
471 if (req
->cryptlen
< ivsize
)
474 compl = req
->base
.complete
;
475 data
= req
->base
.data
;
477 rctx
->ivsg
= scatterwalk_ffwd(rctx
->ivbuf
, req
->dst
, req
->assoclen
);
478 info
= PageHighMem(sg_page(rctx
->ivsg
)) ? NULL
: sg_virt(rctx
->ivsg
);
481 info
= kmalloc(ivsize
, req
->base
.flags
&
482 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
487 compl = compat_encrypt_complete
;
491 memcpy(&seq
, req
->iv
+ ivsize
- sizeof(seq
), sizeof(seq
));
493 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
+ ivsize
);
494 dst
= req
->src
== req
->dst
?
495 src
: scatterwalk_ffwd(rctx
->dst
, rctx
->ivsg
, ivsize
);
497 aead_givcrypt_set_tfm(subreq
, ctx
->child
);
498 aead_givcrypt_set_callback(subreq
, req
->base
.flags
,
499 req
->base
.complete
, req
->base
.data
);
500 aead_givcrypt_set_crypt(subreq
, src
, dst
,
501 req
->cryptlen
- ivsize
, req
->iv
);
502 aead_givcrypt_set_assoc(subreq
, req
->src
, req
->assoclen
);
503 aead_givcrypt_set_giv(subreq
, info
, be64_to_cpu(seq
));
505 err
= crypto_aead_givencrypt(subreq
);
506 if (unlikely(PageHighMem(sg_page(rctx
->ivsg
))))
507 compat_encrypt_complete2(req
, err
);
511 static int compat_decrypt(struct aead_request
*req
)
513 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
514 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(geniv
);
515 struct compat_request_ctx
*rctx
= aead_request_ctx(req
);
516 struct aead_request
*subreq
= &rctx
->subreq
.areq
;
517 unsigned int ivsize
= crypto_aead_ivsize(geniv
);
518 struct scatterlist
*src
, *dst
;
519 crypto_completion_t
compl;
522 if (req
->cryptlen
< ivsize
)
525 aead_request_set_tfm(subreq
, ctx
->child
);
527 compl = req
->base
.complete
;
528 data
= req
->base
.data
;
530 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
+ ivsize
);
531 dst
= req
->src
== req
->dst
?
532 src
: scatterwalk_ffwd(rctx
->dst
, req
->dst
,
533 req
->assoclen
+ ivsize
);
535 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
536 aead_request_set_crypt(subreq
, src
, dst
,
537 req
->cryptlen
- ivsize
, req
->iv
);
538 aead_request_set_assoc(subreq
, req
->src
, req
->assoclen
);
540 scatterwalk_map_and_copy(req
->iv
, req
->src
, req
->assoclen
, ivsize
, 0);
542 return crypto_aead_decrypt(subreq
);
545 static int compat_encrypt_first(struct aead_request
*req
)
547 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
548 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(geniv
);
551 spin_lock_bh(&ctx
->lock
);
552 if (geniv
->encrypt
!= compat_encrypt_first
)
555 geniv
->encrypt
= compat_encrypt
;
558 spin_unlock_bh(&ctx
->lock
);
563 return compat_encrypt(req
);
566 static int aead_geniv_init_compat(struct crypto_tfm
*tfm
)
568 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
569 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(geniv
);
572 spin_lock_init(&ctx
->lock
);
574 crypto_aead_set_reqsize(geniv
, sizeof(struct compat_request_ctx
));
576 err
= aead_geniv_init(tfm
);
578 ctx
->child
= geniv
->child
;
579 geniv
->child
= geniv
;
584 static void aead_geniv_exit_compat(struct crypto_tfm
*tfm
)
586 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
587 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(geniv
);
589 crypto_free_aead(ctx
->child
);
592 struct aead_instance
*aead_geniv_alloc(struct crypto_template
*tmpl
,
593 struct rtattr
**tb
, u32 type
, u32 mask
)
596 struct crypto_aead_spawn
*spawn
;
597 struct crypto_attr_type
*algt
;
598 struct aead_instance
*inst
;
599 struct aead_alg
*alg
;
601 unsigned int maxauthsize
;
604 algt
= crypto_get_attr_type(tb
);
606 return ERR_CAST(algt
);
608 if ((algt
->type
^ (CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_GENIV
)) &
609 algt
->mask
& ~CRYPTO_ALG_AEAD_NEW
)
610 return ERR_PTR(-EINVAL
);
612 name
= crypto_attr_alg_name(tb
[1]);
614 return ERR_CAST(name
);
616 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
618 return ERR_PTR(-ENOMEM
);
620 spawn
= aead_instance_ctx(inst
);
622 /* Ignore async algorithms if necessary. */
623 mask
|= crypto_requires_sync(algt
->type
, algt
->mask
);
625 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
626 err
= (algt
->mask
& CRYPTO_ALG_GENIV
) ?
627 crypto_grab_nivaead(spawn
, name
, type
, mask
) :
628 crypto_grab_aead(spawn
, name
, type
, mask
);
632 alg
= crypto_spawn_aead_alg(spawn
);
634 ivsize
= crypto_aead_alg_ivsize(alg
);
635 maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
638 if (ivsize
< sizeof(u64
))
642 * This is only true if we're constructing an algorithm with its
643 * default IV generator. For the default generator we elide the
644 * template name and double-check the IV generator.
646 if (algt
->mask
& CRYPTO_ALG_GENIV
) {
647 if (!alg
->base
.cra_aead
.encrypt
)
649 if (strcmp(tmpl
->name
, alg
->base
.cra_aead
.geniv
))
652 memcpy(inst
->alg
.base
.cra_name
, alg
->base
.cra_name
,
653 CRYPTO_MAX_ALG_NAME
);
654 memcpy(inst
->alg
.base
.cra_driver_name
,
655 alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
);
657 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
|
659 inst
->alg
.base
.cra_flags
|= alg
->base
.cra_flags
&
661 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
662 inst
->alg
.base
.cra_blocksize
= alg
->base
.cra_blocksize
;
663 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
664 inst
->alg
.base
.cra_type
= &crypto_aead_type
;
666 inst
->alg
.base
.cra_aead
.ivsize
= ivsize
;
667 inst
->alg
.base
.cra_aead
.maxauthsize
= maxauthsize
;
669 inst
->alg
.base
.cra_aead
.setkey
= alg
->base
.cra_aead
.setkey
;
670 inst
->alg
.base
.cra_aead
.setauthsize
=
671 alg
->base
.cra_aead
.setauthsize
;
672 inst
->alg
.base
.cra_aead
.encrypt
= alg
->base
.cra_aead
.encrypt
;
673 inst
->alg
.base
.cra_aead
.decrypt
= alg
->base
.cra_aead
.decrypt
;
679 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
680 "%s(%s)", tmpl
->name
, alg
->base
.cra_name
) >=
683 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
684 "%s(%s)", tmpl
->name
, alg
->base
.cra_driver_name
) >=
688 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
&
689 (CRYPTO_ALG_ASYNC
| CRYPTO_ALG_AEAD_NEW
);
690 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
691 inst
->alg
.base
.cra_blocksize
= alg
->base
.cra_blocksize
;
692 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
693 inst
->alg
.base
.cra_ctxsize
= sizeof(struct aead_geniv_ctx
);
695 inst
->alg
.setkey
= aead_geniv_setkey
;
696 inst
->alg
.setauthsize
= aead_geniv_setauthsize
;
698 inst
->alg
.ivsize
= ivsize
;
699 inst
->alg
.maxauthsize
= maxauthsize
;
701 inst
->alg
.encrypt
= compat_encrypt_first
;
702 inst
->alg
.decrypt
= compat_decrypt
;
704 inst
->alg
.base
.cra_init
= aead_geniv_init_compat
;
705 inst
->alg
.base
.cra_exit
= aead_geniv_exit_compat
;
711 crypto_drop_aead(spawn
);
717 EXPORT_SYMBOL_GPL(aead_geniv_alloc
);
719 void aead_geniv_free(struct aead_instance
*inst
)
721 crypto_drop_aead(aead_instance_ctx(inst
));
724 EXPORT_SYMBOL_GPL(aead_geniv_free
);
726 int aead_geniv_init(struct crypto_tfm
*tfm
)
728 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
729 struct crypto_aead
*child
;
730 struct crypto_aead
*aead
;
732 aead
= __crypto_aead_cast(tfm
);
734 child
= crypto_spawn_aead(crypto_instance_ctx(inst
));
736 return PTR_ERR(child
);
739 aead
->reqsize
+= crypto_aead_reqsize(child
);
743 EXPORT_SYMBOL_GPL(aead_geniv_init
);
745 void aead_geniv_exit(struct crypto_tfm
*tfm
)
747 crypto_free_aead(__crypto_aead_cast(tfm
)->child
);
749 EXPORT_SYMBOL_GPL(aead_geniv_exit
);
751 int aead_init_geniv(struct crypto_aead
*aead
)
753 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(aead
);
754 struct aead_instance
*inst
= aead_alg_instance(aead
);
755 struct crypto_aead
*child
;
758 spin_lock_init(&ctx
->lock
);
760 err
= crypto_get_default_rng();
764 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
765 crypto_aead_ivsize(aead
));
766 crypto_put_default_rng();
770 ctx
->null
= crypto_get_default_null_skcipher();
771 err
= PTR_ERR(ctx
->null
);
772 if (IS_ERR(ctx
->null
))
775 child
= crypto_spawn_aead(aead_instance_ctx(inst
));
776 err
= PTR_ERR(child
);
781 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(child
) +
782 sizeof(struct aead_request
));
790 crypto_put_default_null_skcipher();
793 EXPORT_SYMBOL_GPL(aead_init_geniv
);
795 void aead_exit_geniv(struct crypto_aead
*tfm
)
797 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
799 crypto_free_aead(ctx
->child
);
800 crypto_put_default_null_skcipher();
802 EXPORT_SYMBOL_GPL(aead_exit_geniv
);
804 static int crypto_nivaead_default(struct crypto_alg
*alg
, u32 type
, u32 mask
)
806 struct rtattr
*tb
[3];
809 struct crypto_attr_type data
;
813 struct crypto_attr_alg data
;
815 struct crypto_template
*tmpl
;
816 struct crypto_instance
*inst
;
817 struct crypto_alg
*larval
;
821 larval
= crypto_larval_lookup(alg
->cra_driver_name
,
822 CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_GENIV
,
823 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
);
824 err
= PTR_ERR(larval
);
829 if (!crypto_is_larval(larval
))
832 ptype
.attr
.rta_len
= sizeof(ptype
);
833 ptype
.attr
.rta_type
= CRYPTOA_TYPE
;
834 ptype
.data
.type
= type
| CRYPTO_ALG_GENIV
;
835 /* GENIV tells the template that we're making a default geniv. */
836 ptype
.data
.mask
= mask
| CRYPTO_ALG_GENIV
;
839 palg
.attr
.rta_len
= sizeof(palg
);
840 palg
.attr
.rta_type
= CRYPTOA_ALG
;
841 /* Must use the exact name to locate ourselves. */
842 memcpy(palg
.data
.name
, alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
);
847 geniv
= alg
->cra_aead
.geniv
;
849 tmpl
= crypto_lookup_template(geniv
);
855 err
= tmpl
->create(tmpl
, tb
);
861 inst
= tmpl
->alloc(tb
);
866 err
= crypto_register_instance(tmpl
, inst
);
873 /* Redo the lookup to use the instance we just registered. */
877 crypto_tmpl_put(tmpl
);
879 crypto_larval_kill(larval
);
881 crypto_mod_put(larval
);
887 struct crypto_alg
*crypto_lookup_aead(const char *name
, u32 type
, u32 mask
)
889 struct crypto_alg
*alg
;
891 alg
= crypto_alg_mod_lookup(name
, type
, mask
);
895 if (alg
->cra_type
== &crypto_aead_type
)
898 if (!alg
->cra_aead
.ivsize
)
902 alg
= crypto_alg_mod_lookup(name
, type
| CRYPTO_ALG_TESTED
,
903 mask
& ~CRYPTO_ALG_TESTED
);
907 if (alg
->cra_type
== &crypto_aead_type
) {
908 if (~alg
->cra_flags
& (type
^ ~mask
) & CRYPTO_ALG_TESTED
) {
910 alg
= ERR_PTR(-ENOENT
);
915 BUG_ON(!alg
->cra_aead
.ivsize
);
917 return ERR_PTR(crypto_nivaead_default(alg
, type
, mask
));
919 EXPORT_SYMBOL_GPL(crypto_lookup_aead
);
921 int crypto_grab_aead(struct crypto_aead_spawn
*spawn
, const char *name
,
924 spawn
->base
.frontend
= &crypto_aead_type
;
925 return crypto_grab_spawn(&spawn
->base
, name
, type
, mask
);
927 EXPORT_SYMBOL_GPL(crypto_grab_aead
);
929 struct crypto_aead
*crypto_alloc_aead(const char *alg_name
, u32 type
, u32 mask
)
931 return crypto_alloc_tfm(alg_name
, &crypto_aead_type
, type
, mask
);
933 EXPORT_SYMBOL_GPL(crypto_alloc_aead
);
935 static int aead_prepare_alg(struct aead_alg
*alg
)
937 struct crypto_alg
*base
= &alg
->base
;
939 if (max(alg
->maxauthsize
, alg
->ivsize
) > PAGE_SIZE
/ 8)
942 base
->cra_type
= &crypto_new_aead_type
;
943 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
944 base
->cra_flags
|= CRYPTO_ALG_TYPE_AEAD
;
949 int crypto_register_aead(struct aead_alg
*alg
)
951 struct crypto_alg
*base
= &alg
->base
;
954 err
= aead_prepare_alg(alg
);
958 return crypto_register_alg(base
);
960 EXPORT_SYMBOL_GPL(crypto_register_aead
);
962 void crypto_unregister_aead(struct aead_alg
*alg
)
964 crypto_unregister_alg(&alg
->base
);
966 EXPORT_SYMBOL_GPL(crypto_unregister_aead
);
968 int crypto_register_aeads(struct aead_alg
*algs
, int count
)
972 for (i
= 0; i
< count
; i
++) {
973 ret
= crypto_register_aead(&algs
[i
]);
981 for (--i
; i
>= 0; --i
)
982 crypto_unregister_aead(&algs
[i
]);
986 EXPORT_SYMBOL_GPL(crypto_register_aeads
);
988 void crypto_unregister_aeads(struct aead_alg
*algs
, int count
)
992 for (i
= count
- 1; i
>= 0; --i
)
993 crypto_unregister_aead(&algs
[i
]);
995 EXPORT_SYMBOL_GPL(crypto_unregister_aeads
);
997 int aead_register_instance(struct crypto_template
*tmpl
,
998 struct aead_instance
*inst
)
1002 err
= aead_prepare_alg(&inst
->alg
);
1006 return crypto_register_instance(tmpl
, aead_crypto_instance(inst
));
1008 EXPORT_SYMBOL_GPL(aead_register_instance
);
1010 MODULE_LICENSE("GPL");
1011 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");