4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
10 * Derived from "crypto/aes.c"
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
19 #include <crypto/algapi.h>
20 #include <linux/module.h>
21 #include <linux/init.h>
22 #include "crypt_s390.h"
24 #define AES_MIN_KEY_SIZE 16
25 #define AES_MAX_KEY_SIZE 32
27 /* data block size for all key lengths */
28 #define AES_BLOCK_SIZE 16
35 u8 iv
[AES_BLOCK_SIZE
];
36 u8 key
[AES_MAX_KEY_SIZE
];
42 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
45 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
46 u32
*flags
= &tfm
->crt_flags
;
63 /* invalid key length */
68 sctx
->key_len
= key_len
;
69 memcpy(sctx
->key
, in_key
, key_len
);
72 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
76 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
78 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
80 switch (sctx
->key_len
) {
82 crypt_s390_km(KM_AES_128_ENCRYPT
, &sctx
->key
, out
, in
,
86 crypt_s390_km(KM_AES_192_ENCRYPT
, &sctx
->key
, out
, in
,
90 crypt_s390_km(KM_AES_256_ENCRYPT
, &sctx
->key
, out
, in
,
96 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
98 const struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
100 switch (sctx
->key_len
) {
102 crypt_s390_km(KM_AES_128_DECRYPT
, &sctx
->key
, out
, in
,
106 crypt_s390_km(KM_AES_192_DECRYPT
, &sctx
->key
, out
, in
,
110 crypt_s390_km(KM_AES_256_DECRYPT
, &sctx
->key
, out
, in
,
116 static unsigned int aes_encrypt_ecb(const struct cipher_desc
*desc
, u8
*out
,
117 const u8
*in
, unsigned int nbytes
)
119 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(desc
->tfm
);
122 /* only use complete blocks */
123 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
125 switch (sctx
->key_len
) {
127 ret
= crypt_s390_km(KM_AES_128_ENCRYPT
, &sctx
->key
, out
, in
, nbytes
);
128 BUG_ON((ret
< 0) || (ret
!= nbytes
));
131 ret
= crypt_s390_km(KM_AES_192_ENCRYPT
, &sctx
->key
, out
, in
, nbytes
);
132 BUG_ON((ret
< 0) || (ret
!= nbytes
));
135 ret
= crypt_s390_km(KM_AES_256_ENCRYPT
, &sctx
->key
, out
, in
, nbytes
);
136 BUG_ON((ret
< 0) || (ret
!= nbytes
));
142 static unsigned int aes_decrypt_ecb(const struct cipher_desc
*desc
, u8
*out
,
143 const u8
*in
, unsigned int nbytes
)
145 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(desc
->tfm
);
148 /* only use complete blocks */
149 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
151 switch (sctx
->key_len
) {
153 ret
= crypt_s390_km(KM_AES_128_DECRYPT
, &sctx
->key
, out
, in
, nbytes
);
154 BUG_ON((ret
< 0) || (ret
!= nbytes
));
157 ret
= crypt_s390_km(KM_AES_192_DECRYPT
, &sctx
->key
, out
, in
, nbytes
);
158 BUG_ON((ret
< 0) || (ret
!= nbytes
));
161 ret
= crypt_s390_km(KM_AES_256_DECRYPT
, &sctx
->key
, out
, in
, nbytes
);
162 BUG_ON((ret
< 0) || (ret
!= nbytes
));
168 static unsigned int aes_encrypt_cbc(const struct cipher_desc
*desc
, u8
*out
,
169 const u8
*in
, unsigned int nbytes
)
171 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(desc
->tfm
);
174 /* only use complete blocks */
175 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
177 memcpy(&sctx
->iv
, desc
->info
, AES_BLOCK_SIZE
);
178 switch (sctx
->key_len
) {
180 ret
= crypt_s390_kmc(KMC_AES_128_ENCRYPT
, &sctx
->iv
, out
, in
, nbytes
);
181 BUG_ON((ret
< 0) || (ret
!= nbytes
));
184 ret
= crypt_s390_kmc(KMC_AES_192_ENCRYPT
, &sctx
->iv
, out
, in
, nbytes
);
185 BUG_ON((ret
< 0) || (ret
!= nbytes
));
188 ret
= crypt_s390_kmc(KMC_AES_256_ENCRYPT
, &sctx
->iv
, out
, in
, nbytes
);
189 BUG_ON((ret
< 0) || (ret
!= nbytes
));
192 memcpy(desc
->info
, &sctx
->iv
, AES_BLOCK_SIZE
);
197 static unsigned int aes_decrypt_cbc(const struct cipher_desc
*desc
, u8
*out
,
198 const u8
*in
, unsigned int nbytes
)
200 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(desc
->tfm
);
203 /* only use complete blocks */
204 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
206 memcpy(&sctx
->iv
, desc
->info
, AES_BLOCK_SIZE
);
207 switch (sctx
->key_len
) {
209 ret
= crypt_s390_kmc(KMC_AES_128_DECRYPT
, &sctx
->iv
, out
, in
, nbytes
);
210 BUG_ON((ret
< 0) || (ret
!= nbytes
));
213 ret
= crypt_s390_kmc(KMC_AES_192_DECRYPT
, &sctx
->iv
, out
, in
, nbytes
);
214 BUG_ON((ret
< 0) || (ret
!= nbytes
));
217 ret
= crypt_s390_kmc(KMC_AES_256_DECRYPT
, &sctx
->iv
, out
, in
, nbytes
);
218 BUG_ON((ret
< 0) || (ret
!= nbytes
));
225 static struct crypto_alg aes_alg
= {
227 .cra_driver_name
= "aes-s390",
228 .cra_priority
= CRYPT_S390_PRIORITY
,
229 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
230 .cra_blocksize
= AES_BLOCK_SIZE
,
231 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
232 .cra_module
= THIS_MODULE
,
233 .cra_list
= LIST_HEAD_INIT(aes_alg
.cra_list
),
236 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
237 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
238 .cia_setkey
= aes_set_key
,
239 .cia_encrypt
= aes_encrypt
,
240 .cia_decrypt
= aes_decrypt
,
241 .cia_encrypt_ecb
= aes_encrypt_ecb
,
242 .cia_decrypt_ecb
= aes_decrypt_ecb
,
243 .cia_encrypt_cbc
= aes_encrypt_cbc
,
244 .cia_decrypt_cbc
= aes_decrypt_cbc
,
249 static int ecb_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
250 unsigned int key_len
)
252 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
256 sctx
->enc
= KM_AES_128_ENCRYPT
;
257 sctx
->dec
= KM_AES_128_DECRYPT
;
260 sctx
->enc
= KM_AES_192_ENCRYPT
;
261 sctx
->dec
= KM_AES_192_DECRYPT
;
264 sctx
->enc
= KM_AES_256_ENCRYPT
;
265 sctx
->dec
= KM_AES_256_DECRYPT
;
269 return aes_set_key(tfm
, in_key
, key_len
);
272 static int ecb_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
273 struct blkcipher_walk
*walk
)
275 int ret
= blkcipher_walk_virt(desc
, walk
);
278 while ((nbytes
= walk
->nbytes
)) {
279 /* only use complete blocks */
280 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
281 u8
*out
= walk
->dst
.virt
.addr
;
282 u8
*in
= walk
->src
.virt
.addr
;
284 ret
= crypt_s390_km(func
, param
, out
, in
, n
);
285 BUG_ON((ret
< 0) || (ret
!= n
));
287 nbytes
&= AES_BLOCK_SIZE
- 1;
288 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
294 static int ecb_aes_encrypt(struct blkcipher_desc
*desc
,
295 struct scatterlist
*dst
, struct scatterlist
*src
,
298 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
299 struct blkcipher_walk walk
;
301 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
302 return ecb_aes_crypt(desc
, sctx
->enc
, sctx
->key
, &walk
);
305 static int ecb_aes_decrypt(struct blkcipher_desc
*desc
,
306 struct scatterlist
*dst
, struct scatterlist
*src
,
309 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
310 struct blkcipher_walk walk
;
312 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
313 return ecb_aes_crypt(desc
, sctx
->dec
, sctx
->key
, &walk
);
316 static struct crypto_alg ecb_aes_alg
= {
317 .cra_name
= "ecb(aes)",
318 .cra_driver_name
= "ecb-aes-s390",
319 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
320 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
321 .cra_blocksize
= AES_BLOCK_SIZE
,
322 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
323 .cra_type
= &crypto_blkcipher_type
,
324 .cra_module
= THIS_MODULE
,
325 .cra_list
= LIST_HEAD_INIT(ecb_aes_alg
.cra_list
),
328 .min_keysize
= AES_MIN_KEY_SIZE
,
329 .max_keysize
= AES_MAX_KEY_SIZE
,
330 .setkey
= ecb_aes_set_key
,
331 .encrypt
= ecb_aes_encrypt
,
332 .decrypt
= ecb_aes_decrypt
,
337 static int cbc_aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
338 unsigned int key_len
)
340 struct s390_aes_ctx
*sctx
= crypto_tfm_ctx(tfm
);
344 sctx
->enc
= KMC_AES_128_ENCRYPT
;
345 sctx
->dec
= KMC_AES_128_DECRYPT
;
348 sctx
->enc
= KMC_AES_192_ENCRYPT
;
349 sctx
->dec
= KMC_AES_192_DECRYPT
;
352 sctx
->enc
= KMC_AES_256_ENCRYPT
;
353 sctx
->dec
= KMC_AES_256_DECRYPT
;
357 return aes_set_key(tfm
, in_key
, key_len
);
360 static int cbc_aes_crypt(struct blkcipher_desc
*desc
, long func
, void *param
,
361 struct blkcipher_walk
*walk
)
363 int ret
= blkcipher_walk_virt(desc
, walk
);
364 unsigned int nbytes
= walk
->nbytes
;
369 memcpy(param
, walk
->iv
, AES_BLOCK_SIZE
);
371 /* only use complete blocks */
372 unsigned int n
= nbytes
& ~(AES_BLOCK_SIZE
- 1);
373 u8
*out
= walk
->dst
.virt
.addr
;
374 u8
*in
= walk
->src
.virt
.addr
;
376 ret
= crypt_s390_kmc(func
, param
, out
, in
, n
);
377 BUG_ON((ret
< 0) || (ret
!= n
));
379 nbytes
&= AES_BLOCK_SIZE
- 1;
380 ret
= blkcipher_walk_done(desc
, walk
, nbytes
);
381 } while ((nbytes
= walk
->nbytes
));
382 memcpy(walk
->iv
, param
, AES_BLOCK_SIZE
);
388 static int cbc_aes_encrypt(struct blkcipher_desc
*desc
,
389 struct scatterlist
*dst
, struct scatterlist
*src
,
392 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
393 struct blkcipher_walk walk
;
395 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
396 return cbc_aes_crypt(desc
, sctx
->enc
, sctx
->iv
, &walk
);
399 static int cbc_aes_decrypt(struct blkcipher_desc
*desc
,
400 struct scatterlist
*dst
, struct scatterlist
*src
,
403 struct s390_aes_ctx
*sctx
= crypto_blkcipher_ctx(desc
->tfm
);
404 struct blkcipher_walk walk
;
406 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
407 return cbc_aes_crypt(desc
, sctx
->dec
, sctx
->iv
, &walk
);
410 static struct crypto_alg cbc_aes_alg
= {
411 .cra_name
= "cbc(aes)",
412 .cra_driver_name
= "cbc-aes-s390",
413 .cra_priority
= CRYPT_S390_COMPOSITE_PRIORITY
,
414 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
415 .cra_blocksize
= AES_BLOCK_SIZE
,
416 .cra_ctxsize
= sizeof(struct s390_aes_ctx
),
417 .cra_type
= &crypto_blkcipher_type
,
418 .cra_module
= THIS_MODULE
,
419 .cra_list
= LIST_HEAD_INIT(cbc_aes_alg
.cra_list
),
422 .min_keysize
= AES_MIN_KEY_SIZE
,
423 .max_keysize
= AES_MAX_KEY_SIZE
,
424 .ivsize
= AES_BLOCK_SIZE
,
425 .setkey
= cbc_aes_set_key
,
426 .encrypt
= cbc_aes_encrypt
,
427 .decrypt
= cbc_aes_decrypt
,
432 static int __init
aes_init(void)
436 if (crypt_s390_func_available(KM_AES_128_ENCRYPT
))
438 if (crypt_s390_func_available(KM_AES_192_ENCRYPT
))
440 if (crypt_s390_func_available(KM_AES_256_ENCRYPT
))
443 if (!has_aes_128
&& !has_aes_192
&& !has_aes_256
)
446 ret
= crypto_register_alg(&aes_alg
);
448 printk(KERN_INFO
"crypt_s390: aes-s390 couldn't be loaded.\n");
452 ret
= crypto_register_alg(&ecb_aes_alg
);
455 "crypt_s390: ecb-aes-s390 couldn't be loaded.\n");
459 ret
= crypto_register_alg(&cbc_aes_alg
);
462 "crypt_s390: cbc-aes-s390 couldn't be loaded.\n");
470 crypto_unregister_alg(&ecb_aes_alg
);
472 crypto_unregister_alg(&aes_alg
);
477 static void __exit
aes_fini(void)
479 crypto_unregister_alg(&cbc_aes_alg
);
480 crypto_unregister_alg(&ecb_aes_alg
);
481 crypto_unregister_alg(&aes_alg
);
484 module_init(aes_init
);
485 module_exit(aes_fini
);
489 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
490 MODULE_LICENSE("GPL");