4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005, 2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/cpufeature.h>
28 #include <linux/init.h>
29 #include <linux/spinlock.h>
30 #include <crypto/xts.h>
31 #include <asm/cpacf.h>
33 #define AES_KEYLEN_128 1
34 #define AES_KEYLEN_192 2
35 #define AES_KEYLEN_256 4
38 static DEFINE_SPINLOCK(ctrblk_lock
);
39 static char keylen_flag
;
42 u8 key
[AES_MAX_KEY_SIZE
];
47 struct crypto_blkcipher
*blk
;
48 struct crypto_cipher
*cip
;
66 struct crypto_blkcipher
*fallback
;
70 * Check if the key_len is supported by the HW.
71 * Returns 0 if it is, a positive number if it is not and software fallback is
72 * required or a negative number in case the key size is not valid
74 static int need_fallback(unsigned int key_len
)
78 if (!(keylen_flag
& AES_KEYLEN_128
))
82 if (!(keylen_flag
& AES_KEYLEN_192
))
86 if (!(keylen_flag
& AES_KEYLEN_256
))
96 static int setkey_fallback_cip(struct crypto_tfm
*tfm
, const u8
*in_key
,
99 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
102 sctx
->fallback
.cip
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
103 sctx
->fallback
.cip
->base
.crt_flags
|= (tfm
->crt_flags
&
104 CRYPTO_TFM_REQ_MASK
);
106 ret
= crypto_cipher_setkey(sctx
->fallback
.cip
, in_key
, key_len
);
108 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
109 tfm
->crt_flags
|= (sctx
->fallback
.cip
->base
.crt_flags
&
110 CRYPTO_TFM_RES_MASK
);
115 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
116 unsigned int key_len
)
118 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
119 u32
*flags
= &tfm
->crt_flags
;
122 ret
= need_fallback(key_len
);
124 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
128 sctx
->key_len
= key_len
;
130 memcpy(sctx
->key
, in_key
, key_len
);
134 return setkey_fallback_cip(tfm
, in_key
, key_len
);
137 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
139 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
141 if (unlikely(need_fallback(sctx
->key_len
))) {
142 crypto_cipher_encrypt_one(sctx
->fallback
.cip
, out
, in
);
146 switch (sctx
->key_len
) {
148 cpacf_km(CPACF_KM_AES_128_ENC
, &sctx
->key
, out
, in
,
152 cpacf_km(CPACF_KM_AES_192_ENC
, &sctx
->key
, out
, in
,
156 cpacf_km(CPACF_KM_AES_256_ENC
, &sctx
->key
, out
, in
,
162 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
164 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
166 if (unlikely(need_fallback(sctx
->key_len
))) {
167 crypto_cipher_decrypt_one(sctx
->fallback
.cip
, out
, in
);
171 switch (sctx
->key_len
) {
173 cpacf_km(CPACF_KM_AES_128_DEC
, &sctx
->key
, out
, in
,
177 cpacf_km(CPACF_KM_AES_192_DEC
, &sctx
->key
, out
, in
,
181 cpacf_km(CPACF_KM_AES_256_DEC
, &sctx
->key
, out
, in
,
187 static int fallback_init_cip(struct crypto_tfm
*tfm
)
189 const char *name
= tfm
->__crt_alg
->cra_name
;
190 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
192 sctx
->fallback
.cip
= crypto_alloc_cipher(name
, 0,
193 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
195 if (IS_ERR(sctx
->fallback
.cip
)) {
196 pr_err("Allocating AES fallback algorithm %s failed\n",
198 return PTR_ERR(sctx
->fallback
.cip
);
204 static void fallback_exit_cip(struct crypto_tfm
*tfm
)
206 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
208 crypto_free_cipher(sctx
->fallback
.cip
);
209 sctx
->fallback
.cip
= NULL
;
212 static struct crypto_alg aes_alg
= {
214 .cra_driver_name
= "aes-s390",
216 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
|
217 CRYPTO_ALG_NEED_FALLBACK
,
218 .cra_blocksize
= AES_BLOCK_SIZE
,
219 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
220 .cra_module
= THIS_MODULE
,
221 .cra_init
= fallback_init_cip
,
222 .cra_exit
= fallback_exit_cip
,
225 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
226 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
227 .cia_setkey
= aes_set_key
,
228 .cia_encrypt
= aes_encrypt
,
229 .cia_decrypt
= aes_decrypt
,
234 static int setkey_fallback_blk(struct crypto_tfm
*tfm
, const u8
*key
,
237 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
240 sctx
->fallback
.blk
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
241 sctx
->fallback
.blk
->base
.crt_flags
|= (tfm
->crt_flags
&
242 CRYPTO_TFM_REQ_MASK
);
244 ret
= crypto_blkcipher_setkey(sctx
->fallback
.blk
, key
, len
);
246 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
247 tfm
->crt_flags
|= (sctx
->fallback
.blk
->base
.crt_flags
&
248 CRYPTO_TFM_RES_MASK
);
253 static int fallback_blk_dec(struct blkcipher_desc
*desc
,
254 struct scatterlist
*dst
, struct scatterlist
*src
,
258 struct crypto_blkcipher
*tfm
;
259 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
262 desc
->tfm
= sctx
->fallback
.blk
;
264 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
270 static int fallback_blk_enc(struct blkcipher_desc
*desc
,
271 struct scatterlist
*dst
, struct scatterlist
*src
,
275 struct crypto_blkcipher
*tfm
;
276 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
279 desc
->tfm
= sctx
->fallback
.blk
;
281 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
287 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
288 unsigned int key_len
)
290 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
293 ret
= need_fallback(key_len
);
295 sctx
->key_len
= key_len
;
296 return setkey_fallback_blk(tfm
, in_key
, key_len
);
301 sctx
->enc
= CPACF_KM_AES_128_ENC
;
302 sctx
->dec
= CPACF_KM_AES_128_DEC
;
305 sctx
->enc
= CPACF_KM_AES_192_ENC
;
306 sctx
->dec
= CPACF_KM_AES_192_DEC
;
309 sctx
->enc
= CPACF_KM_AES_256_ENC
;
310 sctx
->dec
= CPACF_KM_AES_256_DEC
;
314 return aes_set_key(tfm
, in_key
, key_len
);
317 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
318 struct blkcipher_walk
*walk
)
320 int ret
= blkcipher_walk_virt(desc
, walk
);
323 while ((nbytes
= walk
->nbytes
)) {
324 /* only use complete blocks */
325 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
326 u8
*out
= walk
->dst
.virt
.addr
;
327 u8
*in
= walk
->src
.virt
.addr
;
329 ret
= cpacf_km(func
, param
, out
, in
, n
);
330 if (ret
< 0 || ret
!= n
)
333 nbytes
&= AES_BLOCK_SIZE
- 1;
334 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
340 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
341 struct scatterlist
*dst
, struct scatterlist
*src
,
344 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
345 struct blkcipher_walk walk
;
347 if (unlikely(need_fallback(sctx
->key_len
)))
348 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
350 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
351 return ecb_aes_crypt(desc
, sctx
->enc
, sctx
->key
, &walk
);
354 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
355 struct scatterlist
*dst
, struct scatterlist
*src
,
358 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
359 struct blkcipher_walk walk
;
361 if (unlikely(need_fallback(sctx
->key_len
)))
362 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
364 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
365 return ecb_aes_crypt(desc
, sctx
->dec
, sctx
->key
, &walk
);
368 static int fallback_init_blk(struct crypto_tfm
*tfm
)
370 const char *name
= tfm
->__crt_alg
->cra_name
;
371 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
373 sctx
->fallback
.blk
= crypto_alloc_blkcipher(name
, 0,
374 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
376 if (IS_ERR(sctx
->fallback
.blk
)) {
377 pr_err("Allocating AES fallback algorithm %s failed\n",
379 return PTR_ERR(sctx
->fallback
.blk
);
385 static void fallback_exit_blk(struct crypto_tfm
*tfm
)
387 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
389 crypto_free_blkcipher(sctx
->fallback
.blk
);
390 sctx
->fallback
.blk
= NULL
;
393 static struct crypto_alg ecb_aes_alg
= {
394 .cra_name
= "ecb(aes)",
395 .cra_driver_name
= "ecb-aes-s390",
396 .cra_priority
= 400, /* combo: aes + ecb */
397 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
398 CRYPTO_ALG_NEED_FALLBACK
,
399 .cra_blocksize
= AES_BLOCK_SIZE
,
400 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
401 .cra_type
= &crypto_blkcipher_type
,
402 .cra_module
= THIS_MODULE
,
403 .cra_init
= fallback_init_blk
,
404 .cra_exit
= fallback_exit_blk
,
407 .min_keysize
= AES_MIN_KEY_SIZE
,
408 .max_keysize
= AES_MAX_KEY_SIZE
,
409 .setkey
= ecb_aes_set_key
,
410 .encrypt
= ecb_aes_encrypt
,
411 .decrypt
= ecb_aes_decrypt
,
416 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
417 unsigned int key_len
)
419 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
422 ret
= need_fallback(key_len
);
424 sctx
->key_len
= key_len
;
425 return setkey_fallback_blk(tfm
, in_key
, key_len
);
430 sctx
->enc
= CPACF_KMC_AES_128_ENC
;
431 sctx
->dec
= CPACF_KMC_AES_128_DEC
;
434 sctx
->enc
= CPACF_KMC_AES_192_ENC
;
435 sctx
->dec
= CPACF_KMC_AES_192_DEC
;
438 sctx
->enc
= CPACF_KMC_AES_256_ENC
;
439 sctx
->dec
= CPACF_KMC_AES_256_DEC
;
443 return aes_set_key(tfm
, in_key
, key_len
);
446 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, long func
,
447 struct blkcipher_walk
*walk
)
449 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
450 int ret
= blkcipher_walk_virt(desc
, walk
);
451 unsigned int nbytes
= walk
->nbytes
;
453 u8 iv
[AES_BLOCK_SIZE
];
454 u8 key
[AES_MAX_KEY_SIZE
];
460 memcpy(param
.iv
, walk
->iv
, AES_BLOCK_SIZE
);
461 memcpy(param
.key
, sctx
->key
, sctx
->key_len
);
463 /* only use complete blocks */
464 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
465 u8
*out
= walk
->dst
.virt
.addr
;
466 u8
*in
= walk
->src
.virt
.addr
;
468 ret
= cpacf_kmc(func
, ¶m
, out
, in
, n
);
469 if (ret
< 0 || ret
!= n
)
472 nbytes
&= AES_BLOCK_SIZE
- 1;
473 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
474 } while ((nbytes
= walk
->nbytes
));
475 memcpy(walk
->iv
, param
.iv
, AES_BLOCK_SIZE
);
481 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
482 struct scatterlist
*dst
, struct scatterlist
*src
,
485 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
486 struct blkcipher_walk walk
;
488 if (unlikely(need_fallback(sctx
->key_len
)))
489 return fallback_blk_enc(desc
, dst
, src
, nbytes
);
491 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
492 return cbc_aes_crypt(desc
, sctx
->enc
, &walk
);
495 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
496 struct scatterlist
*dst
, struct scatterlist
*src
,
499 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
500 struct blkcipher_walk walk
;
502 if (unlikely(need_fallback(sctx
->key_len
)))
503 return fallback_blk_dec(desc
, dst
, src
, nbytes
);
505 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
506 return cbc_aes_crypt(desc
, sctx
->dec
, &walk
);
509 static struct crypto_alg cbc_aes_alg
= {
510 .cra_name
= "cbc(aes)",
511 .cra_driver_name
= "cbc-aes-s390",
512 .cra_priority
= 400, /* combo: aes + cbc */
513 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
514 CRYPTO_ALG_NEED_FALLBACK
,
515 .cra_blocksize
= AES_BLOCK_SIZE
,
516 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
517 .cra_type
= &crypto_blkcipher_type
,
518 .cra_module
= THIS_MODULE
,
519 .cra_init
= fallback_init_blk
,
520 .cra_exit
= fallback_exit_blk
,
523 .min_keysize
= AES_MIN_KEY_SIZE
,
524 .max_keysize
= AES_MAX_KEY_SIZE
,
525 .ivsize
= AES_BLOCK_SIZE
,
526 .setkey
= cbc_aes_set_key
,
527 .encrypt
= cbc_aes_encrypt
,
528 .decrypt
= cbc_aes_decrypt
,
533 static int xts_fallback_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
536 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
539 xts_ctx
->fallback
->base
.crt_flags
&= ~CRYPTO_TFM_REQ_MASK
;
540 xts_ctx
->fallback
->base
.crt_flags
|= (tfm
->crt_flags
&
541 CRYPTO_TFM_REQ_MASK
);
543 ret
= crypto_blkcipher_setkey(xts_ctx
->fallback
, key
, len
);
545 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
546 tfm
->crt_flags
|= (xts_ctx
->fallback
->base
.crt_flags
&
547 CRYPTO_TFM_RES_MASK
);
552 static int xts_fallback_decrypt(struct blkcipher_desc
*desc
,
553 struct scatterlist
*dst
, struct scatterlist
*src
,
556 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
557 struct crypto_blkcipher
*tfm
;
561 desc
->tfm
= xts_ctx
->fallback
;
563 ret
= crypto_blkcipher_decrypt_iv(desc
, dst
, src
, nbytes
);
569 static int xts_fallback_encrypt(struct blkcipher_desc
*desc
,
570 struct scatterlist
*dst
, struct scatterlist
*src
,
573 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
574 struct crypto_blkcipher
*tfm
;
578 desc
->tfm
= xts_ctx
->fallback
;
580 ret
= crypto_blkcipher_encrypt_iv(desc
, dst
, src
, nbytes
);
586 static int xts_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
587 unsigned int key_len
)
589 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
590 u32
*flags
= &tfm
->crt_flags
;
593 err
= xts_check_key(tfm
, in_key
, key_len
);
599 xts_ctx
->enc
= CPACF_KM_XTS_128_ENC
;
600 xts_ctx
->dec
= CPACF_KM_XTS_128_DEC
;
601 memcpy(xts_ctx
->key
+ 16, in_key
, 16);
602 memcpy(xts_ctx
->pcc_key
+ 16, in_key
+ 16, 16);
607 xts_fallback_setkey(tfm
, in_key
, key_len
);
610 xts_ctx
->enc
= CPACF_KM_XTS_256_ENC
;
611 xts_ctx
->dec
= CPACF_KM_XTS_256_DEC
;
612 memcpy(xts_ctx
->key
, in_key
, 32);
613 memcpy(xts_ctx
->pcc_key
, in_key
+ 32, 32);
616 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
619 xts_ctx
->key_len
= key_len
;
623 static int xts_aes_crypt(struct blkcipher_desc
*desc
, long func
,
624 struct s390_xts_ctx
*xts_ctx
,
625 struct blkcipher_walk
*walk
)
627 unsigned int offset
= (xts_ctx
->key_len
>> 1) & 0x10;
628 int ret
= blkcipher_walk_virt(desc
, walk
);
629 unsigned int nbytes
= walk
->nbytes
;
632 struct pcc_param pcc_param
;
641 memset(pcc_param
.block
, 0, sizeof(pcc_param
.block
));
642 memset(pcc_param
.bit
, 0, sizeof(pcc_param
.bit
));
643 memset(pcc_param
.xts
, 0, sizeof(pcc_param
.xts
));
644 memcpy(pcc_param
.tweak
, walk
->iv
, sizeof(pcc_param
.tweak
));
645 memcpy(pcc_param
.key
, xts_ctx
->pcc_key
, 32);
646 /* remove decipher modifier bit from 'func' and call PCC */
647 ret
= cpacf_pcc(func
& 0x7f, &pcc_param
.key
[offset
]);
651 memcpy(xts_param
.key
, xts_ctx
->key
, 32);
652 memcpy(xts_param
.init
, pcc_param
.xts
, 16);
654 /* only use complete blocks */
655 n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
656 out
= walk
->dst
.virt
.addr
;
657 in
= walk
->src
.virt
.addr
;
659 ret
= cpacf_km(func
, &xts_param
.key
[offset
], out
, in
, n
);
660 if (ret
< 0 || ret
!= n
)
663 nbytes
&= AES_BLOCK_SIZE
- 1;
664 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
665 } while ((nbytes
= walk
->nbytes
));
670 static int xts_aes_encrypt(struct blkcipher_desc
*desc
,
671 struct scatterlist
*dst
, struct scatterlist
*src
,
674 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
675 struct blkcipher_walk walk
;
677 if (unlikely(xts_ctx
->key_len
== 48))
678 return xts_fallback_encrypt(desc
, dst
, src
, nbytes
);
680 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
681 return xts_aes_crypt(desc
, xts_ctx
->enc
, xts_ctx
, &walk
);
684 static int xts_aes_decrypt(struct blkcipher_desc
*desc
,
685 struct scatterlist
*dst
, struct scatterlist
*src
,
688 struct s390_xts_ctx
*xts_ctx
= crypto_blkcipher_ctx(desc
->tfm
);
689 struct blkcipher_walk walk
;
691 if (unlikely(xts_ctx
->key_len
== 48))
692 return xts_fallback_decrypt(desc
, dst
, src
, nbytes
);
694 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
695 return xts_aes_crypt(desc
, xts_ctx
->dec
, xts_ctx
, &walk
);
698 static int xts_fallback_init(struct crypto_tfm
*tfm
)
700 const char *name
= tfm
->__crt_alg
->cra_name
;
701 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
703 xts_ctx
->fallback
= crypto_alloc_blkcipher(name
, 0,
704 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
706 if (IS_ERR(xts_ctx
->fallback
)) {
707 pr_err("Allocating XTS fallback algorithm %s failed\n",
709 return PTR_ERR(xts_ctx
->fallback
);
714 static void xts_fallback_exit(struct crypto_tfm
*tfm
)
716 struct s390_xts_ctx
*xts_ctx
= crypto_tfm_ctx(tfm
);
718 crypto_free_blkcipher(xts_ctx
->fallback
);
719 xts_ctx
->fallback
= NULL
;
722 static struct crypto_alg xts_aes_alg
= {
723 .cra_name
= "xts(aes)",
724 .cra_driver_name
= "xts-aes-s390",
725 .cra_priority
= 400, /* combo: aes + xts */
726 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
727 CRYPTO_ALG_NEED_FALLBACK
,
728 .cra_blocksize
= AES_BLOCK_SIZE
,
729 .cra_ctxsize
= sizeof(struct s390_xts_ctx
),
730 .cra_type
= &crypto_blkcipher_type
,
731 .cra_module
= THIS_MODULE
,
732 .cra_init
= xts_fallback_init
,
733 .cra_exit
= xts_fallback_exit
,
736 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
737 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
738 .ivsize
= AES_BLOCK_SIZE
,
739 .setkey
= xts_aes_set_key
,
740 .encrypt
= xts_aes_encrypt
,
741 .decrypt
= xts_aes_decrypt
,
746 static int xts_aes_alg_reg
;
748 static int ctr_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
749 unsigned int key_len
)
751 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
755 sctx
->enc
= CPACF_KMCTR_AES_128_ENC
;
756 sctx
->dec
= CPACF_KMCTR_AES_128_DEC
;
759 sctx
->enc
= CPACF_KMCTR_AES_192_ENC
;
760 sctx
->dec
= CPACF_KMCTR_AES_192_DEC
;
763 sctx
->enc
= CPACF_KMCTR_AES_256_ENC
;
764 sctx
->dec
= CPACF_KMCTR_AES_256_DEC
;
768 return aes_set_key(tfm
, in_key
, key_len
);
771 static unsigned int __ctrblk_init(u8
*ctrptr
, unsigned int nbytes
)
775 /* only use complete blocks, max. PAGE_SIZE */
776 n
= (nbytes
> PAGE_SIZE
) ? PAGE_SIZE
: nbytes
& ~(AES_BLOCK_SIZE
- 1);
777 for (i
= AES_BLOCK_SIZE
; i
< n
; i
+= AES_BLOCK_SIZE
) {
778 memcpy(ctrptr
+ i
, ctrptr
+ i
- AES_BLOCK_SIZE
,
780 crypto_inc(ctrptr
+ i
, AES_BLOCK_SIZE
);
785 static int ctr_aes_crypt(struct blkcipher_desc
*desc
, long func
,
786 struct s390_aes_ctx
*sctx
, struct blkcipher_walk
*walk
)
788 int ret
= blkcipher_walk_virt_block(desc
, walk
, AES_BLOCK_SIZE
);
789 unsigned int n
, nbytes
;
790 u8 buf
[AES_BLOCK_SIZE
], ctrbuf
[AES_BLOCK_SIZE
];
791 u8
*out
, *in
, *ctrptr
= ctrbuf
;
796 if (spin_trylock(&ctrblk_lock
))
799 memcpy(ctrptr
, walk
->iv
, AES_BLOCK_SIZE
);
800 while ((nbytes
= walk
->nbytes
) >= AES_BLOCK_SIZE
) {
801 out
= walk
->dst
.virt
.addr
;
802 in
= walk
->src
.virt
.addr
;
803 while (nbytes
>= AES_BLOCK_SIZE
) {
804 if (ctrptr
== ctrblk
)
805 n
= __ctrblk_init(ctrptr
, nbytes
);
808 ret
= cpacf_kmctr(func
, sctx
->key
, out
, in
, n
, ctrptr
);
809 if (ret
< 0 || ret
!= n
) {
810 if (ctrptr
== ctrblk
)
811 spin_unlock(&ctrblk_lock
);
814 if (n
> AES_BLOCK_SIZE
)
815 memcpy(ctrptr
, ctrptr
+ n
- AES_BLOCK_SIZE
,
817 crypto_inc(ctrptr
, AES_BLOCK_SIZE
);
822 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
824 if (ctrptr
== ctrblk
) {
826 memcpy(ctrbuf
, ctrptr
, AES_BLOCK_SIZE
);
828 memcpy(walk
->iv
, ctrptr
, AES_BLOCK_SIZE
);
829 spin_unlock(&ctrblk_lock
);
832 memcpy(walk
->iv
, ctrptr
, AES_BLOCK_SIZE
);
835 * final block may be < AES_BLOCK_SIZE, copy only nbytes
838 out
= walk
->dst
.virt
.addr
;
839 in
= walk
->src
.virt
.addr
;
840 ret
= cpacf_kmctr(func
, sctx
->key
, buf
, in
,
841 AES_BLOCK_SIZE
, ctrbuf
);
842 if (ret
< 0 || ret
!= AES_BLOCK_SIZE
)
844 memcpy(out
, buf
, nbytes
);
845 crypto_inc(ctrbuf
, AES_BLOCK_SIZE
);
846 ret
= blkcipher_walk_done(desc
, walk
, 0);
847 memcpy(walk
->iv
, ctrbuf
, AES_BLOCK_SIZE
);
853 static int ctr_aes_encrypt(struct blkcipher_desc
*desc
,
854 struct scatterlist
*dst
, struct scatterlist
*src
,
857 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
858 struct blkcipher_walk walk
;
860 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
861 return ctr_aes_crypt(desc
, sctx
->enc
, sctx
, &walk
);
864 static int ctr_aes_decrypt(struct blkcipher_desc
*desc
,
865 struct scatterlist
*dst
, struct scatterlist
*src
,
868 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
869 struct blkcipher_walk walk
;
871 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
872 return ctr_aes_crypt(desc
, sctx
->dec
, sctx
, &walk
);
875 static struct crypto_alg ctr_aes_alg
= {
876 .cra_name
= "ctr(aes)",
877 .cra_driver_name
= "ctr-aes-s390",
878 .cra_priority
= 400, /* combo: aes + ctr */
879 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
881 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
882 .cra_type
= &crypto_blkcipher_type
,
883 .cra_module
= THIS_MODULE
,
886 .min_keysize
= AES_MIN_KEY_SIZE
,
887 .max_keysize
= AES_MAX_KEY_SIZE
,
888 .ivsize
= AES_BLOCK_SIZE
,
889 .setkey
= ctr_aes_set_key
,
890 .encrypt
= ctr_aes_encrypt
,
891 .decrypt
= ctr_aes_decrypt
,
896 static int ctr_aes_alg_reg
;
898 static int __init
aes_s390_init(void)
902 if (cpacf_query(CPACF_KM
, CPACF_KM_AES_128_ENC
))
903 keylen_flag
|= AES_KEYLEN_128
;
904 if (cpacf_query(CPACF_KM
, CPACF_KM_AES_192_ENC
))
905 keylen_flag
|= AES_KEYLEN_192
;
906 if (cpacf_query(CPACF_KM
, CPACF_KM_AES_256_ENC
))
907 keylen_flag
|= AES_KEYLEN_256
;
912 /* z9 109 and z9 BC/EC only support 128 bit key length */
913 if (keylen_flag
== AES_KEYLEN_128
)
914 pr_info("AES hardware acceleration is only available for"
917 ret
= crypto_register_alg(&aes_alg
);
921 ret
= crypto_register_alg(&ecb_aes_alg
);
925 ret
= crypto_register_alg(&cbc_aes_alg
);
929 if (cpacf_query(CPACF_KM
, CPACF_KM_XTS_128_ENC
) &&
930 cpacf_query(CPACF_KM
, CPACF_KM_XTS_256_ENC
)) {
931 ret
= crypto_register_alg(&xts_aes_alg
);
937 if (cpacf_query(CPACF_KMCTR
, CPACF_KMCTR_AES_128_ENC
) &&
938 cpacf_query(CPACF_KMCTR
, CPACF_KMCTR_AES_192_ENC
) &&
939 cpacf_query(CPACF_KMCTR
, CPACF_KMCTR_AES_256_ENC
)) {
940 ctrblk
= (u8
*) __get_free_page(GFP_KERNEL
);
945 ret
= crypto_register_alg(&ctr_aes_alg
);
947 free_page((unsigned long) ctrblk
);
957 crypto_unregister_alg(&xts_aes_alg
);
959 crypto_unregister_alg(&cbc_aes_alg
);
961 crypto_unregister_alg(&ecb_aes_alg
);
963 crypto_unregister_alg(&aes_alg
);
968 static void __exit
aes_s390_fini(void)
970 if (ctr_aes_alg_reg
) {
971 crypto_unregister_alg(&ctr_aes_alg
);
972 free_page((unsigned long) ctrblk
);
975 crypto_unregister_alg(&xts_aes_alg
);
976 crypto_unregister_alg(&cbc_aes_alg
);
977 crypto_unregister_alg(&ecb_aes_alg
);
978 crypto_unregister_alg(&aes_alg
);
981 module_cpu_feature_match(MSA
, aes_s390_init
);
982 module_exit(aes_s390_fini
);
984 MODULE_ALIAS_CRYPTO("aes-all");
986 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
987 MODULE_LICENSE("GPL");