crypto: cast5 - simplify if-statements
[deliverable/linux.git] / arch / x86 / crypto / aesni-intel_glue.c
CommitLineData
54b6a1bd
HY
1/*
2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 */
13
14#include <linux/hardirq.h>
15#include <linux/types.h>
16#include <linux/crypto.h>
17#include <linux/err.h>
18#include <crypto/algapi.h>
19#include <crypto/aes.h>
20#include <crypto/cryptd.h>
12387a46 21#include <crypto/ctr.h>
54b6a1bd
HY
22#include <asm/i387.h>
23#include <asm/aes.h>
24
2cf4ac8b
HY
25#if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
26#define HAS_CTR
27#endif
28
29#if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
30#define HAS_LRW
31#endif
32
33#if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
34#define HAS_PCBC
35#endif
36
37#if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
38#define HAS_XTS
39#endif
40
54b6a1bd
HY
41struct async_aes_ctx {
42 struct cryptd_ablkcipher *cryptd_tfm;
43};
44
45#define AESNI_ALIGN 16
46#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
47
48asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
49 unsigned int key_len);
50asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
51 const u8 *in);
52asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
53 const u8 *in);
54asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
55 const u8 *in, unsigned int len);
56asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
57 const u8 *in, unsigned int len);
58asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
59 const u8 *in, unsigned int len, u8 *iv);
60asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
61 const u8 *in, unsigned int len, u8 *iv);
12387a46
HY
62asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
63 const u8 *in, unsigned int len, u8 *iv);
54b6a1bd 64
54b6a1bd
HY
65static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
66{
67 unsigned long addr = (unsigned long)raw_ctx;
68 unsigned long align = AESNI_ALIGN;
69
70 if (align <= crypto_tfm_ctx_alignment())
71 align = 1;
72 return (struct crypto_aes_ctx *)ALIGN(addr, align);
73}
74
75static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
76 const u8 *in_key, unsigned int key_len)
77{
78 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
79 u32 *flags = &tfm->crt_flags;
80 int err;
81
82 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
83 key_len != AES_KEYSIZE_256) {
84 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
85 return -EINVAL;
86 }
87
13b79b97 88 if (!irq_fpu_usable())
54b6a1bd
HY
89 err = crypto_aes_expand_key(ctx, in_key, key_len);
90 else {
91 kernel_fpu_begin();
92 err = aesni_set_key(ctx, in_key, key_len);
93 kernel_fpu_end();
94 }
95
96 return err;
97}
98
99static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
100 unsigned int key_len)
101{
102 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
103}
104
105static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
106{
107 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
108
13b79b97 109 if (!irq_fpu_usable())
54b6a1bd
HY
110 crypto_aes_encrypt_x86(ctx, dst, src);
111 else {
112 kernel_fpu_begin();
113 aesni_enc(ctx, dst, src);
114 kernel_fpu_end();
115 }
116}
117
118static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
119{
120 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
121
13b79b97 122 if (!irq_fpu_usable())
54b6a1bd
HY
123 crypto_aes_decrypt_x86(ctx, dst, src);
124 else {
125 kernel_fpu_begin();
126 aesni_dec(ctx, dst, src);
127 kernel_fpu_end();
128 }
129}
130
131static struct crypto_alg aesni_alg = {
132 .cra_name = "aes",
133 .cra_driver_name = "aes-aesni",
134 .cra_priority = 300,
135 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
136 .cra_blocksize = AES_BLOCK_SIZE,
137 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
138 .cra_alignmask = 0,
139 .cra_module = THIS_MODULE,
140 .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
141 .cra_u = {
142 .cipher = {
143 .cia_min_keysize = AES_MIN_KEY_SIZE,
144 .cia_max_keysize = AES_MAX_KEY_SIZE,
145 .cia_setkey = aes_set_key,
146 .cia_encrypt = aes_encrypt,
147 .cia_decrypt = aes_decrypt
148 }
149 }
150};
151
2cf4ac8b
HY
152static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
153{
154 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
155
156 aesni_enc(ctx, dst, src);
157}
158
159static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
160{
161 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
162
163 aesni_dec(ctx, dst, src);
164}
165
166static struct crypto_alg __aesni_alg = {
167 .cra_name = "__aes-aesni",
168 .cra_driver_name = "__driver-aes-aesni",
169 .cra_priority = 0,
170 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
171 .cra_blocksize = AES_BLOCK_SIZE,
172 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
173 .cra_alignmask = 0,
174 .cra_module = THIS_MODULE,
175 .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
176 .cra_u = {
177 .cipher = {
178 .cia_min_keysize = AES_MIN_KEY_SIZE,
179 .cia_max_keysize = AES_MAX_KEY_SIZE,
180 .cia_setkey = aes_set_key,
181 .cia_encrypt = __aes_encrypt,
182 .cia_decrypt = __aes_decrypt
183 }
184 }
185};
186
54b6a1bd
HY
187static int ecb_encrypt(struct blkcipher_desc *desc,
188 struct scatterlist *dst, struct scatterlist *src,
189 unsigned int nbytes)
190{
191 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
192 struct blkcipher_walk walk;
193 int err;
194
195 blkcipher_walk_init(&walk, dst, src, nbytes);
196 err = blkcipher_walk_virt(desc, &walk);
9251b64f 197 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
198
199 kernel_fpu_begin();
200 while ((nbytes = walk.nbytes)) {
201 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
202 nbytes & AES_BLOCK_MASK);
203 nbytes &= AES_BLOCK_SIZE - 1;
204 err = blkcipher_walk_done(desc, &walk, nbytes);
205 }
206 kernel_fpu_end();
207
208 return err;
209}
210
211static int ecb_decrypt(struct blkcipher_desc *desc,
212 struct scatterlist *dst, struct scatterlist *src,
213 unsigned int nbytes)
214{
215 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
216 struct blkcipher_walk walk;
217 int err;
218
219 blkcipher_walk_init(&walk, dst, src, nbytes);
220 err = blkcipher_walk_virt(desc, &walk);
9251b64f 221 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
222
223 kernel_fpu_begin();
224 while ((nbytes = walk.nbytes)) {
225 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
226 nbytes & AES_BLOCK_MASK);
227 nbytes &= AES_BLOCK_SIZE - 1;
228 err = blkcipher_walk_done(desc, &walk, nbytes);
229 }
230 kernel_fpu_end();
231
232 return err;
233}
234
235static struct crypto_alg blk_ecb_alg = {
236 .cra_name = "__ecb-aes-aesni",
237 .cra_driver_name = "__driver-ecb-aes-aesni",
238 .cra_priority = 0,
239 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
240 .cra_blocksize = AES_BLOCK_SIZE,
241 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
242 .cra_alignmask = 0,
243 .cra_type = &crypto_blkcipher_type,
244 .cra_module = THIS_MODULE,
245 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
246 .cra_u = {
247 .blkcipher = {
248 .min_keysize = AES_MIN_KEY_SIZE,
249 .max_keysize = AES_MAX_KEY_SIZE,
250 .setkey = aes_set_key,
251 .encrypt = ecb_encrypt,
252 .decrypt = ecb_decrypt,
253 },
254 },
255};
256
257static int cbc_encrypt(struct blkcipher_desc *desc,
258 struct scatterlist *dst, struct scatterlist *src,
259 unsigned int nbytes)
260{
261 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
262 struct blkcipher_walk walk;
263 int err;
264
265 blkcipher_walk_init(&walk, dst, src, nbytes);
266 err = blkcipher_walk_virt(desc, &walk);
9251b64f 267 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
268
269 kernel_fpu_begin();
270 while ((nbytes = walk.nbytes)) {
271 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
272 nbytes & AES_BLOCK_MASK, walk.iv);
273 nbytes &= AES_BLOCK_SIZE - 1;
274 err = blkcipher_walk_done(desc, &walk, nbytes);
275 }
276 kernel_fpu_end();
277
278 return err;
279}
280
281static int cbc_decrypt(struct blkcipher_desc *desc,
282 struct scatterlist *dst, struct scatterlist *src,
283 unsigned int nbytes)
284{
285 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
286 struct blkcipher_walk walk;
287 int err;
288
289 blkcipher_walk_init(&walk, dst, src, nbytes);
290 err = blkcipher_walk_virt(desc, &walk);
9251b64f 291 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
292
293 kernel_fpu_begin();
294 while ((nbytes = walk.nbytes)) {
295 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
296 nbytes & AES_BLOCK_MASK, walk.iv);
297 nbytes &= AES_BLOCK_SIZE - 1;
298 err = blkcipher_walk_done(desc, &walk, nbytes);
299 }
300 kernel_fpu_end();
301
302 return err;
303}
304
305static struct crypto_alg blk_cbc_alg = {
306 .cra_name = "__cbc-aes-aesni",
307 .cra_driver_name = "__driver-cbc-aes-aesni",
308 .cra_priority = 0,
309 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
310 .cra_blocksize = AES_BLOCK_SIZE,
311 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
312 .cra_alignmask = 0,
313 .cra_type = &crypto_blkcipher_type,
314 .cra_module = THIS_MODULE,
315 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
316 .cra_u = {
317 .blkcipher = {
318 .min_keysize = AES_MIN_KEY_SIZE,
319 .max_keysize = AES_MAX_KEY_SIZE,
320 .setkey = aes_set_key,
321 .encrypt = cbc_encrypt,
322 .decrypt = cbc_decrypt,
323 },
324 },
325};
326
12387a46
HY
327static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
328 struct blkcipher_walk *walk)
329{
330 u8 *ctrblk = walk->iv;
331 u8 keystream[AES_BLOCK_SIZE];
332 u8 *src = walk->src.virt.addr;
333 u8 *dst = walk->dst.virt.addr;
334 unsigned int nbytes = walk->nbytes;
335
336 aesni_enc(ctx, keystream, ctrblk);
337 crypto_xor(keystream, src, nbytes);
338 memcpy(dst, keystream, nbytes);
339 crypto_inc(ctrblk, AES_BLOCK_SIZE);
340}
341
342static int ctr_crypt(struct blkcipher_desc *desc,
343 struct scatterlist *dst, struct scatterlist *src,
344 unsigned int nbytes)
345{
346 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
347 struct blkcipher_walk walk;
348 int err;
349
350 blkcipher_walk_init(&walk, dst, src, nbytes);
351 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
352 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
353
354 kernel_fpu_begin();
355 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
356 aesni_ctr_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
357 nbytes & AES_BLOCK_MASK, walk.iv);
358 nbytes &= AES_BLOCK_SIZE - 1;
359 err = blkcipher_walk_done(desc, &walk, nbytes);
360 }
361 if (walk.nbytes) {
362 ctr_crypt_final(ctx, &walk);
363 err = blkcipher_walk_done(desc, &walk, 0);
364 }
365 kernel_fpu_end();
366
367 return err;
368}
369
370static struct crypto_alg blk_ctr_alg = {
371 .cra_name = "__ctr-aes-aesni",
372 .cra_driver_name = "__driver-ctr-aes-aesni",
373 .cra_priority = 0,
374 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
375 .cra_blocksize = 1,
376 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
377 .cra_alignmask = 0,
378 .cra_type = &crypto_blkcipher_type,
379 .cra_module = THIS_MODULE,
380 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
381 .cra_u = {
382 .blkcipher = {
383 .min_keysize = AES_MIN_KEY_SIZE,
384 .max_keysize = AES_MAX_KEY_SIZE,
385 .ivsize = AES_BLOCK_SIZE,
386 .setkey = aes_set_key,
387 .encrypt = ctr_crypt,
388 .decrypt = ctr_crypt,
389 },
390 },
391};
392
54b6a1bd
HY
393static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
394 unsigned int key_len)
395{
396 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
2cf4ac8b
HY
397 struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
398 int err;
54b6a1bd 399
2cf4ac8b
HY
400 crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
401 crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
402 & CRYPTO_TFM_REQ_MASK);
403 err = crypto_ablkcipher_setkey(child, key, key_len);
404 crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
405 & CRYPTO_TFM_RES_MASK);
406 return err;
54b6a1bd
HY
407}
408
409static int ablk_encrypt(struct ablkcipher_request *req)
410{
411 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
412 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
413
13b79b97 414 if (!irq_fpu_usable()) {
54b6a1bd
HY
415 struct ablkcipher_request *cryptd_req =
416 ablkcipher_request_ctx(req);
417 memcpy(cryptd_req, req, sizeof(*req));
418 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
419 return crypto_ablkcipher_encrypt(cryptd_req);
420 } else {
421 struct blkcipher_desc desc;
422 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
423 desc.info = req->info;
424 desc.flags = 0;
425 return crypto_blkcipher_crt(desc.tfm)->encrypt(
426 &desc, req->dst, req->src, req->nbytes);
427 }
428}
429
430static int ablk_decrypt(struct ablkcipher_request *req)
431{
432 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
433 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
434
13b79b97 435 if (!irq_fpu_usable()) {
54b6a1bd
HY
436 struct ablkcipher_request *cryptd_req =
437 ablkcipher_request_ctx(req);
438 memcpy(cryptd_req, req, sizeof(*req));
439 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
440 return crypto_ablkcipher_decrypt(cryptd_req);
441 } else {
442 struct blkcipher_desc desc;
443 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
444 desc.info = req->info;
445 desc.flags = 0;
446 return crypto_blkcipher_crt(desc.tfm)->decrypt(
447 &desc, req->dst, req->src, req->nbytes);
448 }
449}
450
451static void ablk_exit(struct crypto_tfm *tfm)
452{
453 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
454
455 cryptd_free_ablkcipher(ctx->cryptd_tfm);
456}
457
458static void ablk_init_common(struct crypto_tfm *tfm,
459 struct cryptd_ablkcipher *cryptd_tfm)
460{
461 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
462
463 ctx->cryptd_tfm = cryptd_tfm;
464 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
465 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
466}
467
468static int ablk_ecb_init(struct crypto_tfm *tfm)
469{
470 struct cryptd_ablkcipher *cryptd_tfm;
471
472 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
473 if (IS_ERR(cryptd_tfm))
474 return PTR_ERR(cryptd_tfm);
475 ablk_init_common(tfm, cryptd_tfm);
476 return 0;
477}
478
479static struct crypto_alg ablk_ecb_alg = {
480 .cra_name = "ecb(aes)",
481 .cra_driver_name = "ecb-aes-aesni",
482 .cra_priority = 400,
483 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
484 .cra_blocksize = AES_BLOCK_SIZE,
485 .cra_ctxsize = sizeof(struct async_aes_ctx),
486 .cra_alignmask = 0,
487 .cra_type = &crypto_ablkcipher_type,
488 .cra_module = THIS_MODULE,
489 .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
490 .cra_init = ablk_ecb_init,
491 .cra_exit = ablk_exit,
492 .cra_u = {
493 .ablkcipher = {
494 .min_keysize = AES_MIN_KEY_SIZE,
495 .max_keysize = AES_MAX_KEY_SIZE,
496 .setkey = ablk_set_key,
497 .encrypt = ablk_encrypt,
498 .decrypt = ablk_decrypt,
499 },
500 },
501};
502
503static int ablk_cbc_init(struct crypto_tfm *tfm)
504{
505 struct cryptd_ablkcipher *cryptd_tfm;
506
507 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
508 if (IS_ERR(cryptd_tfm))
509 return PTR_ERR(cryptd_tfm);
510 ablk_init_common(tfm, cryptd_tfm);
511 return 0;
512}
513
514static struct crypto_alg ablk_cbc_alg = {
515 .cra_name = "cbc(aes)",
516 .cra_driver_name = "cbc-aes-aesni",
517 .cra_priority = 400,
518 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
519 .cra_blocksize = AES_BLOCK_SIZE,
520 .cra_ctxsize = sizeof(struct async_aes_ctx),
521 .cra_alignmask = 0,
522 .cra_type = &crypto_ablkcipher_type,
523 .cra_module = THIS_MODULE,
524 .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
525 .cra_init = ablk_cbc_init,
526 .cra_exit = ablk_exit,
527 .cra_u = {
528 .ablkcipher = {
529 .min_keysize = AES_MIN_KEY_SIZE,
530 .max_keysize = AES_MAX_KEY_SIZE,
531 .ivsize = AES_BLOCK_SIZE,
532 .setkey = ablk_set_key,
533 .encrypt = ablk_encrypt,
534 .decrypt = ablk_decrypt,
535 },
536 },
537};
538
2cf4ac8b
HY
539static int ablk_ctr_init(struct crypto_tfm *tfm)
540{
541 struct cryptd_ablkcipher *cryptd_tfm;
542
12387a46 543 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
2cf4ac8b
HY
544 if (IS_ERR(cryptd_tfm))
545 return PTR_ERR(cryptd_tfm);
546 ablk_init_common(tfm, cryptd_tfm);
547 return 0;
548}
549
550static struct crypto_alg ablk_ctr_alg = {
551 .cra_name = "ctr(aes)",
552 .cra_driver_name = "ctr-aes-aesni",
553 .cra_priority = 400,
554 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
555 .cra_blocksize = 1,
556 .cra_ctxsize = sizeof(struct async_aes_ctx),
557 .cra_alignmask = 0,
558 .cra_type = &crypto_ablkcipher_type,
559 .cra_module = THIS_MODULE,
560 .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
561 .cra_init = ablk_ctr_init,
562 .cra_exit = ablk_exit,
563 .cra_u = {
564 .ablkcipher = {
565 .min_keysize = AES_MIN_KEY_SIZE,
566 .max_keysize = AES_MAX_KEY_SIZE,
567 .ivsize = AES_BLOCK_SIZE,
568 .setkey = ablk_set_key,
569 .encrypt = ablk_encrypt,
12387a46 570 .decrypt = ablk_encrypt,
2cf4ac8b
HY
571 .geniv = "chainiv",
572 },
573 },
574};
12387a46
HY
575
576#ifdef HAS_CTR
577static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
578{
579 struct cryptd_ablkcipher *cryptd_tfm;
580
581 cryptd_tfm = cryptd_alloc_ablkcipher(
582 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
583 if (IS_ERR(cryptd_tfm))
584 return PTR_ERR(cryptd_tfm);
585 ablk_init_common(tfm, cryptd_tfm);
586 return 0;
587}
588
589static struct crypto_alg ablk_rfc3686_ctr_alg = {
590 .cra_name = "rfc3686(ctr(aes))",
591 .cra_driver_name = "rfc3686-ctr-aes-aesni",
592 .cra_priority = 400,
593 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
594 .cra_blocksize = 1,
595 .cra_ctxsize = sizeof(struct async_aes_ctx),
596 .cra_alignmask = 0,
597 .cra_type = &crypto_ablkcipher_type,
598 .cra_module = THIS_MODULE,
599 .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
600 .cra_init = ablk_rfc3686_ctr_init,
601 .cra_exit = ablk_exit,
602 .cra_u = {
603 .ablkcipher = {
604 .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
605 .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
606 .ivsize = CTR_RFC3686_IV_SIZE,
607 .setkey = ablk_set_key,
608 .encrypt = ablk_encrypt,
609 .decrypt = ablk_decrypt,
610 .geniv = "seqiv",
611 },
612 },
613};
2cf4ac8b
HY
614#endif
615
616#ifdef HAS_LRW
617static int ablk_lrw_init(struct crypto_tfm *tfm)
618{
619 struct cryptd_ablkcipher *cryptd_tfm;
620
621 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
622 0, 0);
623 if (IS_ERR(cryptd_tfm))
624 return PTR_ERR(cryptd_tfm);
625 ablk_init_common(tfm, cryptd_tfm);
626 return 0;
627}
628
629static struct crypto_alg ablk_lrw_alg = {
630 .cra_name = "lrw(aes)",
631 .cra_driver_name = "lrw-aes-aesni",
632 .cra_priority = 400,
633 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
634 .cra_blocksize = AES_BLOCK_SIZE,
635 .cra_ctxsize = sizeof(struct async_aes_ctx),
636 .cra_alignmask = 0,
637 .cra_type = &crypto_ablkcipher_type,
638 .cra_module = THIS_MODULE,
639 .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
640 .cra_init = ablk_lrw_init,
641 .cra_exit = ablk_exit,
642 .cra_u = {
643 .ablkcipher = {
644 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
645 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
646 .ivsize = AES_BLOCK_SIZE,
647 .setkey = ablk_set_key,
648 .encrypt = ablk_encrypt,
649 .decrypt = ablk_decrypt,
650 },
651 },
652};
653#endif
654
655#ifdef HAS_PCBC
656static int ablk_pcbc_init(struct crypto_tfm *tfm)
657{
658 struct cryptd_ablkcipher *cryptd_tfm;
659
660 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
661 0, 0);
662 if (IS_ERR(cryptd_tfm))
663 return PTR_ERR(cryptd_tfm);
664 ablk_init_common(tfm, cryptd_tfm);
665 return 0;
666}
667
668static struct crypto_alg ablk_pcbc_alg = {
669 .cra_name = "pcbc(aes)",
670 .cra_driver_name = "pcbc-aes-aesni",
671 .cra_priority = 400,
672 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
673 .cra_blocksize = AES_BLOCK_SIZE,
674 .cra_ctxsize = sizeof(struct async_aes_ctx),
675 .cra_alignmask = 0,
676 .cra_type = &crypto_ablkcipher_type,
677 .cra_module = THIS_MODULE,
678 .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
679 .cra_init = ablk_pcbc_init,
680 .cra_exit = ablk_exit,
681 .cra_u = {
682 .ablkcipher = {
683 .min_keysize = AES_MIN_KEY_SIZE,
684 .max_keysize = AES_MAX_KEY_SIZE,
685 .ivsize = AES_BLOCK_SIZE,
686 .setkey = ablk_set_key,
687 .encrypt = ablk_encrypt,
688 .decrypt = ablk_decrypt,
689 },
690 },
691};
692#endif
693
694#ifdef HAS_XTS
695static int ablk_xts_init(struct crypto_tfm *tfm)
696{
697 struct cryptd_ablkcipher *cryptd_tfm;
698
699 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
700 0, 0);
701 if (IS_ERR(cryptd_tfm))
702 return PTR_ERR(cryptd_tfm);
703 ablk_init_common(tfm, cryptd_tfm);
704 return 0;
705}
706
707static struct crypto_alg ablk_xts_alg = {
708 .cra_name = "xts(aes)",
709 .cra_driver_name = "xts-aes-aesni",
710 .cra_priority = 400,
711 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
712 .cra_blocksize = AES_BLOCK_SIZE,
713 .cra_ctxsize = sizeof(struct async_aes_ctx),
714 .cra_alignmask = 0,
715 .cra_type = &crypto_ablkcipher_type,
716 .cra_module = THIS_MODULE,
717 .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
718 .cra_init = ablk_xts_init,
719 .cra_exit = ablk_exit,
720 .cra_u = {
721 .ablkcipher = {
722 .min_keysize = 2 * AES_MIN_KEY_SIZE,
723 .max_keysize = 2 * AES_MAX_KEY_SIZE,
724 .ivsize = AES_BLOCK_SIZE,
725 .setkey = ablk_set_key,
726 .encrypt = ablk_encrypt,
727 .decrypt = ablk_decrypt,
728 },
729 },
730};
731#endif
732
54b6a1bd
HY
733static int __init aesni_init(void)
734{
735 int err;
736
737 if (!cpu_has_aes) {
c9944881 738 printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
54b6a1bd
HY
739 return -ENODEV;
740 }
741 if ((err = crypto_register_alg(&aesni_alg)))
742 goto aes_err;
2cf4ac8b
HY
743 if ((err = crypto_register_alg(&__aesni_alg)))
744 goto __aes_err;
54b6a1bd
HY
745 if ((err = crypto_register_alg(&blk_ecb_alg)))
746 goto blk_ecb_err;
747 if ((err = crypto_register_alg(&blk_cbc_alg)))
748 goto blk_cbc_err;
12387a46
HY
749 if ((err = crypto_register_alg(&blk_ctr_alg)))
750 goto blk_ctr_err;
54b6a1bd
HY
751 if ((err = crypto_register_alg(&ablk_ecb_alg)))
752 goto ablk_ecb_err;
753 if ((err = crypto_register_alg(&ablk_cbc_alg)))
754 goto ablk_cbc_err;
2cf4ac8b
HY
755 if ((err = crypto_register_alg(&ablk_ctr_alg)))
756 goto ablk_ctr_err;
12387a46
HY
757#ifdef HAS_CTR
758 if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
759 goto ablk_rfc3686_ctr_err;
2cf4ac8b
HY
760#endif
761#ifdef HAS_LRW
762 if ((err = crypto_register_alg(&ablk_lrw_alg)))
763 goto ablk_lrw_err;
764#endif
765#ifdef HAS_PCBC
766 if ((err = crypto_register_alg(&ablk_pcbc_alg)))
767 goto ablk_pcbc_err;
768#endif
769#ifdef HAS_XTS
770 if ((err = crypto_register_alg(&ablk_xts_alg)))
771 goto ablk_xts_err;
772#endif
54b6a1bd
HY
773
774 return err;
775
2cf4ac8b
HY
776#ifdef HAS_XTS
777ablk_xts_err:
778#endif
779#ifdef HAS_PCBC
780 crypto_unregister_alg(&ablk_pcbc_alg);
781ablk_pcbc_err:
782#endif
783#ifdef HAS_LRW
784 crypto_unregister_alg(&ablk_lrw_alg);
785ablk_lrw_err:
786#endif
787#ifdef HAS_CTR
12387a46
HY
788 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
789ablk_rfc3686_ctr_err:
790#endif
2cf4ac8b
HY
791 crypto_unregister_alg(&ablk_ctr_alg);
792ablk_ctr_err:
2cf4ac8b 793 crypto_unregister_alg(&ablk_cbc_alg);
54b6a1bd
HY
794ablk_cbc_err:
795 crypto_unregister_alg(&ablk_ecb_alg);
796ablk_ecb_err:
12387a46
HY
797 crypto_unregister_alg(&blk_ctr_alg);
798blk_ctr_err:
54b6a1bd
HY
799 crypto_unregister_alg(&blk_cbc_alg);
800blk_cbc_err:
801 crypto_unregister_alg(&blk_ecb_alg);
802blk_ecb_err:
2cf4ac8b
HY
803 crypto_unregister_alg(&__aesni_alg);
804__aes_err:
54b6a1bd
HY
805 crypto_unregister_alg(&aesni_alg);
806aes_err:
807 return err;
808}
809
810static void __exit aesni_exit(void)
811{
2cf4ac8b
HY
812#ifdef HAS_XTS
813 crypto_unregister_alg(&ablk_xts_alg);
814#endif
815#ifdef HAS_PCBC
816 crypto_unregister_alg(&ablk_pcbc_alg);
817#endif
818#ifdef HAS_LRW
819 crypto_unregister_alg(&ablk_lrw_alg);
820#endif
821#ifdef HAS_CTR
12387a46 822 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
2cf4ac8b 823#endif
12387a46 824 crypto_unregister_alg(&ablk_ctr_alg);
54b6a1bd
HY
825 crypto_unregister_alg(&ablk_cbc_alg);
826 crypto_unregister_alg(&ablk_ecb_alg);
12387a46 827 crypto_unregister_alg(&blk_ctr_alg);
54b6a1bd
HY
828 crypto_unregister_alg(&blk_cbc_alg);
829 crypto_unregister_alg(&blk_ecb_alg);
2cf4ac8b 830 crypto_unregister_alg(&__aesni_alg);
54b6a1bd
HY
831 crypto_unregister_alg(&aesni_alg);
832}
833
834module_init(aesni_init);
835module_exit(aesni_exit);
836
837MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
838MODULE_LICENSE("GPL");
839MODULE_ALIAS("aes");
This page took 0.568828 seconds and 5 git commands to generate.