8f04b4e41b557c9086106e999612a4134d1c0eac
[deliverable/linux.git] / arch / s390 / crypto / aes_s390.c
1 /*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm.
5 *
6 * s390 Version:
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 *
10 * Derived from "crypto/aes.c"
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
15 * any later version.
16 *
17 */
18
19 #include <crypto/algapi.h>
20 #include <linux/module.h>
21 #include <linux/init.h>
22 #include "crypt_s390.h"
23
24 #define AES_MIN_KEY_SIZE 16
25 #define AES_MAX_KEY_SIZE 32
26
27 /* data block size for all key lengths */
28 #define AES_BLOCK_SIZE 16
29
30 int has_aes_128 = 0;
31 int has_aes_192 = 0;
32 int has_aes_256 = 0;
33
34 struct s390_aes_ctx {
35 u8 iv[AES_BLOCK_SIZE];
36 u8 key[AES_MAX_KEY_SIZE];
37 long enc;
38 long dec;
39 int key_len;
40 };
41
42 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
43 unsigned int key_len)
44 {
45 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
46 u32 *flags = &tfm->crt_flags;
47
48 switch (key_len) {
49 case 16:
50 if (!has_aes_128)
51 goto fail;
52 break;
53 case 24:
54 if (!has_aes_192)
55 goto fail;
56
57 break;
58 case 32:
59 if (!has_aes_256)
60 goto fail;
61 break;
62 default:
63 /* invalid key length */
64 goto fail;
65 break;
66 }
67
68 sctx->key_len = key_len;
69 memcpy(sctx->key, in_key, key_len);
70 return 0;
71 fail:
72 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
73 return -EINVAL;
74 }
75
76 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
77 {
78 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
79
80 switch (sctx->key_len) {
81 case 16:
82 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
83 AES_BLOCK_SIZE);
84 break;
85 case 24:
86 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
87 AES_BLOCK_SIZE);
88 break;
89 case 32:
90 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
91 AES_BLOCK_SIZE);
92 break;
93 }
94 }
95
96 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
97 {
98 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
99
100 switch (sctx->key_len) {
101 case 16:
102 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
103 AES_BLOCK_SIZE);
104 break;
105 case 24:
106 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
107 AES_BLOCK_SIZE);
108 break;
109 case 32:
110 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
111 AES_BLOCK_SIZE);
112 break;
113 }
114 }
115
116 static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
117 const u8 *in, unsigned int nbytes)
118 {
119 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
120 int ret;
121
122 /* only use complete blocks */
123 nbytes &= ~(AES_BLOCK_SIZE - 1);
124
125 switch (sctx->key_len) {
126 case 16:
127 ret = crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes);
128 BUG_ON((ret < 0) || (ret != nbytes));
129 break;
130 case 24:
131 ret = crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes);
132 BUG_ON((ret < 0) || (ret != nbytes));
133 break;
134 case 32:
135 ret = crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes);
136 BUG_ON((ret < 0) || (ret != nbytes));
137 break;
138 }
139 return nbytes;
140 }
141
142 static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
143 const u8 *in, unsigned int nbytes)
144 {
145 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
146 int ret;
147
148 /* only use complete blocks */
149 nbytes &= ~(AES_BLOCK_SIZE - 1);
150
151 switch (sctx->key_len) {
152 case 16:
153 ret = crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes);
154 BUG_ON((ret < 0) || (ret != nbytes));
155 break;
156 case 24:
157 ret = crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
158 BUG_ON((ret < 0) || (ret != nbytes));
159 break;
160 case 32:
161 ret = crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
162 BUG_ON((ret < 0) || (ret != nbytes));
163 break;
164 }
165 return nbytes;
166 }
167
168 static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
169 const u8 *in, unsigned int nbytes)
170 {
171 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
172 int ret;
173
174 /* only use complete blocks */
175 nbytes &= ~(AES_BLOCK_SIZE - 1);
176
177 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
178 switch (sctx->key_len) {
179 case 16:
180 ret = crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes);
181 BUG_ON((ret < 0) || (ret != nbytes));
182 break;
183 case 24:
184 ret = crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes);
185 BUG_ON((ret < 0) || (ret != nbytes));
186 break;
187 case 32:
188 ret = crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes);
189 BUG_ON((ret < 0) || (ret != nbytes));
190 break;
191 }
192 memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE);
193
194 return nbytes;
195 }
196
197 static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
198 const u8 *in, unsigned int nbytes)
199 {
200 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
201 int ret;
202
203 /* only use complete blocks */
204 nbytes &= ~(AES_BLOCK_SIZE - 1);
205
206 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
207 switch (sctx->key_len) {
208 case 16:
209 ret = crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes);
210 BUG_ON((ret < 0) || (ret != nbytes));
211 break;
212 case 24:
213 ret = crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes);
214 BUG_ON((ret < 0) || (ret != nbytes));
215 break;
216 case 32:
217 ret = crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes);
218 BUG_ON((ret < 0) || (ret != nbytes));
219 break;
220 }
221 return nbytes;
222 }
223
224
225 static struct crypto_alg aes_alg = {
226 .cra_name = "aes",
227 .cra_driver_name = "aes-s390",
228 .cra_priority = CRYPT_S390_PRIORITY,
229 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
230 .cra_blocksize = AES_BLOCK_SIZE,
231 .cra_ctxsize = sizeof(struct s390_aes_ctx),
232 .cra_module = THIS_MODULE,
233 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
234 .cra_u = {
235 .cipher = {
236 .cia_min_keysize = AES_MIN_KEY_SIZE,
237 .cia_max_keysize = AES_MAX_KEY_SIZE,
238 .cia_setkey = aes_set_key,
239 .cia_encrypt = aes_encrypt,
240 .cia_decrypt = aes_decrypt,
241 .cia_encrypt_ecb = aes_encrypt_ecb,
242 .cia_decrypt_ecb = aes_decrypt_ecb,
243 .cia_encrypt_cbc = aes_encrypt_cbc,
244 .cia_decrypt_cbc = aes_decrypt_cbc,
245 }
246 }
247 };
248
249 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
250 unsigned int key_len)
251 {
252 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
253
254 switch (key_len) {
255 case 16:
256 sctx->enc = KM_AES_128_ENCRYPT;
257 sctx->dec = KM_AES_128_DECRYPT;
258 break;
259 case 24:
260 sctx->enc = KM_AES_192_ENCRYPT;
261 sctx->dec = KM_AES_192_DECRYPT;
262 break;
263 case 32:
264 sctx->enc = KM_AES_256_ENCRYPT;
265 sctx->dec = KM_AES_256_DECRYPT;
266 break;
267 }
268
269 return aes_set_key(tfm, in_key, key_len);
270 }
271
272 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
273 struct blkcipher_walk *walk)
274 {
275 int ret = blkcipher_walk_virt(desc, walk);
276 unsigned int nbytes;
277
278 while ((nbytes = walk->nbytes)) {
279 /* only use complete blocks */
280 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
281 u8 *out = walk->dst.virt.addr;
282 u8 *in = walk->src.virt.addr;
283
284 ret = crypt_s390_km(func, param, out, in, n);
285 BUG_ON((ret < 0) || (ret != n));
286
287 nbytes &= AES_BLOCK_SIZE - 1;
288 ret = blkcipher_walk_done(desc, walk, nbytes);
289 }
290
291 return ret;
292 }
293
294 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
295 struct scatterlist *dst, struct scatterlist *src,
296 unsigned int nbytes)
297 {
298 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
299 struct blkcipher_walk walk;
300
301 blkcipher_walk_init(&walk, dst, src, nbytes);
302 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
303 }
304
305 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
306 struct scatterlist *dst, struct scatterlist *src,
307 unsigned int nbytes)
308 {
309 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
310 struct blkcipher_walk walk;
311
312 blkcipher_walk_init(&walk, dst, src, nbytes);
313 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
314 }
315
316 static struct crypto_alg ecb_aes_alg = {
317 .cra_name = "ecb(aes)",
318 .cra_driver_name = "ecb-aes-s390",
319 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
320 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
321 .cra_blocksize = AES_BLOCK_SIZE,
322 .cra_ctxsize = sizeof(struct s390_aes_ctx),
323 .cra_type = &crypto_blkcipher_type,
324 .cra_module = THIS_MODULE,
325 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
326 .cra_u = {
327 .blkcipher = {
328 .min_keysize = AES_MIN_KEY_SIZE,
329 .max_keysize = AES_MAX_KEY_SIZE,
330 .setkey = ecb_aes_set_key,
331 .encrypt = ecb_aes_encrypt,
332 .decrypt = ecb_aes_decrypt,
333 }
334 }
335 };
336
337 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
338 unsigned int key_len)
339 {
340 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
341
342 switch (key_len) {
343 case 16:
344 sctx->enc = KMC_AES_128_ENCRYPT;
345 sctx->dec = KMC_AES_128_DECRYPT;
346 break;
347 case 24:
348 sctx->enc = KMC_AES_192_ENCRYPT;
349 sctx->dec = KMC_AES_192_DECRYPT;
350 break;
351 case 32:
352 sctx->enc = KMC_AES_256_ENCRYPT;
353 sctx->dec = KMC_AES_256_DECRYPT;
354 break;
355 }
356
357 return aes_set_key(tfm, in_key, key_len);
358 }
359
360 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
361 struct blkcipher_walk *walk)
362 {
363 int ret = blkcipher_walk_virt(desc, walk);
364 unsigned int nbytes = walk->nbytes;
365
366 if (!nbytes)
367 goto out;
368
369 memcpy(param, walk->iv, AES_BLOCK_SIZE);
370 do {
371 /* only use complete blocks */
372 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
373 u8 *out = walk->dst.virt.addr;
374 u8 *in = walk->src.virt.addr;
375
376 ret = crypt_s390_kmc(func, param, out, in, n);
377 BUG_ON((ret < 0) || (ret != n));
378
379 nbytes &= AES_BLOCK_SIZE - 1;
380 ret = blkcipher_walk_done(desc, walk, nbytes);
381 } while ((nbytes = walk->nbytes));
382 memcpy(walk->iv, param, AES_BLOCK_SIZE);
383
384 out:
385 return ret;
386 }
387
388 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
389 struct scatterlist *dst, struct scatterlist *src,
390 unsigned int nbytes)
391 {
392 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
393 struct blkcipher_walk walk;
394
395 blkcipher_walk_init(&walk, dst, src, nbytes);
396 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
397 }
398
399 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
400 struct scatterlist *dst, struct scatterlist *src,
401 unsigned int nbytes)
402 {
403 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
404 struct blkcipher_walk walk;
405
406 blkcipher_walk_init(&walk, dst, src, nbytes);
407 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
408 }
409
410 static struct crypto_alg cbc_aes_alg = {
411 .cra_name = "cbc(aes)",
412 .cra_driver_name = "cbc-aes-s390",
413 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
414 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
415 .cra_blocksize = AES_BLOCK_SIZE,
416 .cra_ctxsize = sizeof(struct s390_aes_ctx),
417 .cra_type = &crypto_blkcipher_type,
418 .cra_module = THIS_MODULE,
419 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
420 .cra_u = {
421 .blkcipher = {
422 .min_keysize = AES_MIN_KEY_SIZE,
423 .max_keysize = AES_MAX_KEY_SIZE,
424 .ivsize = AES_BLOCK_SIZE,
425 .setkey = cbc_aes_set_key,
426 .encrypt = cbc_aes_encrypt,
427 .decrypt = cbc_aes_decrypt,
428 }
429 }
430 };
431
432 static int __init aes_init(void)
433 {
434 int ret;
435
436 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
437 has_aes_128 = 1;
438 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
439 has_aes_192 = 1;
440 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
441 has_aes_256 = 1;
442
443 if (!has_aes_128 && !has_aes_192 && !has_aes_256)
444 return -ENOSYS;
445
446 ret = crypto_register_alg(&aes_alg);
447 if (ret != 0) {
448 printk(KERN_INFO "crypt_s390: aes-s390 couldn't be loaded.\n");
449 goto aes_err;
450 }
451
452 ret = crypto_register_alg(&ecb_aes_alg);
453 if (ret != 0) {
454 printk(KERN_INFO
455 "crypt_s390: ecb-aes-s390 couldn't be loaded.\n");
456 goto ecb_aes_err;
457 }
458
459 ret = crypto_register_alg(&cbc_aes_alg);
460 if (ret != 0) {
461 printk(KERN_INFO
462 "crypt_s390: cbc-aes-s390 couldn't be loaded.\n");
463 goto cbc_aes_err;
464 }
465
466 out:
467 return ret;
468
469 cbc_aes_err:
470 crypto_unregister_alg(&ecb_aes_alg);
471 ecb_aes_err:
472 crypto_unregister_alg(&aes_alg);
473 aes_err:
474 goto out;
475 }
476
477 static void __exit aes_fini(void)
478 {
479 crypto_unregister_alg(&cbc_aes_alg);
480 crypto_unregister_alg(&ecb_aes_alg);
481 crypto_unregister_alg(&aes_alg);
482 }
483
484 module_init(aes_init);
485 module_exit(aes_fini);
486
487 MODULE_ALIAS("aes");
488
489 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
490 MODULE_LICENSE("GPL");
491
This page took 0.042652 seconds and 5 git commands to generate.