s390/crypto: cpacf function detection
[deliverable/linux.git] / arch / s390 / crypto / aes_s390.c
CommitLineData
bf754ae8
JG
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm.
5 *
6 * s390 Version:
a53c8fab 7 * Copyright IBM Corp. 2005, 2007
bf754ae8 8 * Author(s): Jan Glauber (jang@de.ibm.com)
b0c3e75d 9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
bf754ae8 10 *
f8246af0 11 * Derived from "crypto/aes_generic.c"
bf754ae8
JG
12 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
17 *
18 */
19
39f09392
JG
20#define KMSG_COMPONENT "aes_s390"
21#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
22
89e12654 23#include <crypto/aes.h>
a9e62fad 24#include <crypto/algapi.h>
64e26807 25#include <crypto/internal/skcipher.h>
b0c3e75d 26#include <linux/err.h>
bf754ae8 27#include <linux/module.h>
d05377c1 28#include <linux/cpufeature.h>
bf754ae8 29#include <linux/init.h>
0519e9ad 30#include <linux/spinlock.h>
49abc0d2 31#include <crypto/xts.h>
c7d4d259 32#include <asm/cpacf.h>
bf754ae8 33
0200f3ec 34static u8 *ctrblk;
0519e9ad 35static DEFINE_SPINLOCK(ctrblk_lock);
69c0e360
MS
36
37static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
bf754ae8
JG
38
39struct s390_aes_ctx {
bf754ae8
JG
40 u8 key[AES_MAX_KEY_SIZE];
41 int key_len;
edc63a37 42 unsigned long fc;
b0c3e75d 43 union {
64e26807 44 struct crypto_skcipher *blk;
b0c3e75d
SS
45 struct crypto_cipher *cip;
46 } fallback;
bf754ae8
JG
47};
48
99d97222
GS
49struct pcc_param {
50 u8 key[32];
51 u8 tweak[16];
52 u8 block[16];
53 u8 bit[16];
54 u8 xts[16];
55};
56
57struct s390_xts_ctx {
58 u8 key[32];
9dda2769 59 u8 pcc_key[32];
99d97222 60 int key_len;
edc63a37 61 unsigned long fc;
64e26807 62 struct crypto_skcipher *fallback;
99d97222
GS
63};
64
b0c3e75d
SS
65static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
66 unsigned int key_len)
67{
68 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
69 int ret;
70
d7ac7690
RK
71 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
72 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
b0c3e75d
SS
73 CRYPTO_TFM_REQ_MASK);
74
75 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
76 if (ret) {
77 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
d7ac7690 78 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
b0c3e75d
SS
79 CRYPTO_TFM_RES_MASK);
80 }
81 return ret;
82}
83
84static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
85 unsigned int key_len)
86{
87 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
69c0e360 88 unsigned long fc;
b0c3e75d 89
69c0e360
MS
90 /* Pick the correct function code based on the key length */
91 fc = (key_len == 16) ? CPACF_KM_AES_128 :
92 (key_len == 24) ? CPACF_KM_AES_192 :
93 (key_len == 32) ? CPACF_KM_AES_256 : 0;
bf754ae8 94
69c0e360
MS
95 /* Check if the function code is available */
96 sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
97 if (!sctx->fc)
98 return setkey_fallback_cip(tfm, in_key, key_len);
b0c3e75d 99
69c0e360
MS
100 sctx->key_len = key_len;
101 memcpy(sctx->key, in_key, key_len);
102 return 0;
bf754ae8
JG
103}
104
6c2bb98b 105static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
bf754ae8 106{
e6a67ad0 107 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
bf754ae8 108
69c0e360 109 if (unlikely(!sctx->fc)) {
b0c3e75d
SS
110 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
111 return;
112 }
69c0e360 113 cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE);
bf754ae8
JG
114}
115
6c2bb98b 116static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
bf754ae8 117{
e6a67ad0 118 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
bf754ae8 119
69c0e360 120 if (unlikely(!sctx->fc)) {
b0c3e75d
SS
121 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
122 return;
123 }
69c0e360
MS
124 cpacf_km(sctx->fc | CPACF_DECRYPT,
125 &sctx->key, out, in, AES_BLOCK_SIZE);
bf754ae8
JG
126}
127
b0c3e75d
SS
128static int fallback_init_cip(struct crypto_tfm *tfm)
129{
130 const char *name = tfm->__crt_alg->cra_name;
131 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
132
133 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
134 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
135
136 if (IS_ERR(sctx->fallback.cip)) {
39f09392
JG
137 pr_err("Allocating AES fallback algorithm %s failed\n",
138 name);
b59cdcb3 139 return PTR_ERR(sctx->fallback.cip);
b0c3e75d
SS
140 }
141
142 return 0;
143}
144
145static void fallback_exit_cip(struct crypto_tfm *tfm)
146{
147 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
148
149 crypto_free_cipher(sctx->fallback.cip);
150 sctx->fallback.cip = NULL;
151}
bf754ae8
JG
152
153static struct crypto_alg aes_alg = {
154 .cra_name = "aes",
65b75c36 155 .cra_driver_name = "aes-s390",
c7d4d259 156 .cra_priority = 300,
f67d1369
JG
157 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
158 CRYPTO_ALG_NEED_FALLBACK,
bf754ae8
JG
159 .cra_blocksize = AES_BLOCK_SIZE,
160 .cra_ctxsize = sizeof(struct s390_aes_ctx),
161 .cra_module = THIS_MODULE,
b0c3e75d
SS
162 .cra_init = fallback_init_cip,
163 .cra_exit = fallback_exit_cip,
bf754ae8
JG
164 .cra_u = {
165 .cipher = {
166 .cia_min_keysize = AES_MIN_KEY_SIZE,
167 .cia_max_keysize = AES_MAX_KEY_SIZE,
168 .cia_setkey = aes_set_key,
169 .cia_encrypt = aes_encrypt,
170 .cia_decrypt = aes_decrypt,
bf754ae8
JG
171 }
172 }
173};
174
b0c3e75d
SS
175static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
176 unsigned int len)
177{
178 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
179 unsigned int ret;
180
64e26807
HX
181 crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
182 crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
183 CRYPTO_TFM_REQ_MASK);
184
185 ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
186
187 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
188 tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
189 CRYPTO_TFM_RES_MASK;
b0c3e75d 190
b0c3e75d
SS
191 return ret;
192}
193
194static int fallback_blk_dec(struct blkcipher_desc *desc,
195 struct scatterlist *dst, struct scatterlist *src,
196 unsigned int nbytes)
197{
198 unsigned int ret;
64e26807
HX
199 struct crypto_blkcipher *tfm = desc->tfm;
200 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
201 SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
b0c3e75d 202
64e26807
HX
203 skcipher_request_set_tfm(req, sctx->fallback.blk);
204 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
205 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
b0c3e75d 206
64e26807 207 ret = crypto_skcipher_decrypt(req);
b0c3e75d 208
64e26807 209 skcipher_request_zero(req);
b0c3e75d
SS
210 return ret;
211}
212
213static int fallback_blk_enc(struct blkcipher_desc *desc,
214 struct scatterlist *dst, struct scatterlist *src,
215 unsigned int nbytes)
216{
217 unsigned int ret;
64e26807
HX
218 struct crypto_blkcipher *tfm = desc->tfm;
219 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
220 SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
b0c3e75d 221
64e26807
HX
222 skcipher_request_set_tfm(req, sctx->fallback.blk);
223 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
224 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
b0c3e75d 225
64e26807 226 ret = crypto_skcipher_encrypt(req);
b0c3e75d
SS
227 return ret;
228}
229
a9e62fad
HX
230static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
231 unsigned int key_len)
232{
233 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
69c0e360 234 unsigned long fc;
b0c3e75d 235
69c0e360
MS
236 /* Pick the correct function code based on the key length */
237 fc = (key_len == 16) ? CPACF_KM_AES_128 :
238 (key_len == 24) ? CPACF_KM_AES_192 :
239 (key_len == 32) ? CPACF_KM_AES_256 : 0;
a9e62fad 240
69c0e360
MS
241 /* Check if the function code is available */
242 sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
243 if (!sctx->fc)
244 return setkey_fallback_blk(tfm, in_key, key_len);
a9e62fad 245
69c0e360
MS
246 sctx->key_len = key_len;
247 memcpy(sctx->key, in_key, key_len);
248 return 0;
a9e62fad
HX
249}
250
251static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
252 struct blkcipher_walk *walk)
253{
254 int ret = blkcipher_walk_virt(desc, walk);
255 unsigned int nbytes;
256
257 while ((nbytes = walk->nbytes)) {
258 /* only use complete blocks */
259 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
260 u8 *out = walk->dst.virt.addr;
261 u8 *in = walk->src.virt.addr;
262
0177db01 263 cpacf_km(func, param, out, in, n);
a9e62fad
HX
264
265 nbytes &= AES_BLOCK_SIZE - 1;
266 ret = blkcipher_walk_done(desc, walk, nbytes);
267 }
268
269 return ret;
270}
271
272static int ecb_aes_encrypt(struct blkcipher_desc *desc,
273 struct scatterlist *dst, struct scatterlist *src,
274 unsigned int nbytes)
275{
276 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
277 struct blkcipher_walk walk;
278
69c0e360 279 if (unlikely(!sctx->fc))
b0c3e75d
SS
280 return fallback_blk_enc(desc, dst, src, nbytes);
281
a9e62fad 282 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 283 return ecb_aes_crypt(desc, sctx->fc, sctx->key, &walk);
a9e62fad
HX
284}
285
286static int ecb_aes_decrypt(struct blkcipher_desc *desc,
287 struct scatterlist *dst, struct scatterlist *src,
288 unsigned int nbytes)
289{
290 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
291 struct blkcipher_walk walk;
292
69c0e360 293 if (unlikely(!sctx->fc))
b0c3e75d
SS
294 return fallback_blk_dec(desc, dst, src, nbytes);
295
a9e62fad 296 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 297 return ecb_aes_crypt(desc, sctx->fc | CPACF_DECRYPT, sctx->key, &walk);
a9e62fad
HX
298}
299
b0c3e75d
SS
300static int fallback_init_blk(struct crypto_tfm *tfm)
301{
302 const char *name = tfm->__crt_alg->cra_name;
303 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
304
64e26807
HX
305 sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
306 CRYPTO_ALG_ASYNC |
307 CRYPTO_ALG_NEED_FALLBACK);
b0c3e75d
SS
308
309 if (IS_ERR(sctx->fallback.blk)) {
39f09392
JG
310 pr_err("Allocating AES fallback algorithm %s failed\n",
311 name);
b0c3e75d
SS
312 return PTR_ERR(sctx->fallback.blk);
313 }
314
315 return 0;
316}
317
318static void fallback_exit_blk(struct crypto_tfm *tfm)
319{
320 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
321
64e26807 322 crypto_free_skcipher(sctx->fallback.blk);
b0c3e75d
SS
323}
324
a9e62fad
HX
325static struct crypto_alg ecb_aes_alg = {
326 .cra_name = "ecb(aes)",
327 .cra_driver_name = "ecb-aes-s390",
c7d4d259 328 .cra_priority = 400, /* combo: aes + ecb */
f67d1369
JG
329 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
330 CRYPTO_ALG_NEED_FALLBACK,
a9e62fad
HX
331 .cra_blocksize = AES_BLOCK_SIZE,
332 .cra_ctxsize = sizeof(struct s390_aes_ctx),
333 .cra_type = &crypto_blkcipher_type,
334 .cra_module = THIS_MODULE,
b0c3e75d
SS
335 .cra_init = fallback_init_blk,
336 .cra_exit = fallback_exit_blk,
a9e62fad
HX
337 .cra_u = {
338 .blkcipher = {
339 .min_keysize = AES_MIN_KEY_SIZE,
340 .max_keysize = AES_MAX_KEY_SIZE,
341 .setkey = ecb_aes_set_key,
342 .encrypt = ecb_aes_encrypt,
343 .decrypt = ecb_aes_decrypt,
344 }
345 }
346};
347
348static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
349 unsigned int key_len)
350{
351 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
69c0e360 352 unsigned long fc;
b0c3e75d 353
69c0e360
MS
354 /* Pick the correct function code based on the key length */
355 fc = (key_len == 16) ? CPACF_KMC_AES_128 :
356 (key_len == 24) ? CPACF_KMC_AES_192 :
357 (key_len == 32) ? CPACF_KMC_AES_256 : 0;
a9e62fad 358
69c0e360
MS
359 /* Check if the function code is available */
360 sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
361 if (!sctx->fc)
362 return setkey_fallback_blk(tfm, in_key, key_len);
a9e62fad 363
69c0e360
MS
364 sctx->key_len = key_len;
365 memcpy(sctx->key, in_key, key_len);
366 return 0;
a9e62fad
HX
367}
368
f262f0f5 369static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
a9e62fad
HX
370 struct blkcipher_walk *walk)
371{
f262f0f5 372 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
a9e62fad
HX
373 int ret = blkcipher_walk_virt(desc, walk);
374 unsigned int nbytes = walk->nbytes;
f262f0f5
HX
375 struct {
376 u8 iv[AES_BLOCK_SIZE];
377 u8 key[AES_MAX_KEY_SIZE];
378 } param;
a9e62fad
HX
379
380 if (!nbytes)
381 goto out;
382
f262f0f5
HX
383 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
384 memcpy(param.key, sctx->key, sctx->key_len);
a9e62fad
HX
385 do {
386 /* only use complete blocks */
387 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
388 u8 *out = walk->dst.virt.addr;
389 u8 *in = walk->src.virt.addr;
390
0177db01 391 cpacf_kmc(func, &param, out, in, n);
a9e62fad
HX
392
393 nbytes &= AES_BLOCK_SIZE - 1;
394 ret = blkcipher_walk_done(desc, walk, nbytes);
395 } while ((nbytes = walk->nbytes));
f262f0f5 396 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
a9e62fad
HX
397
398out:
399 return ret;
400}
401
402static int cbc_aes_encrypt(struct blkcipher_desc *desc,
403 struct scatterlist *dst, struct scatterlist *src,
404 unsigned int nbytes)
405{
406 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
407 struct blkcipher_walk walk;
408
69c0e360 409 if (unlikely(!sctx->fc))
b0c3e75d
SS
410 return fallback_blk_enc(desc, dst, src, nbytes);
411
a9e62fad 412 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 413 return cbc_aes_crypt(desc, sctx->fc, &walk);
a9e62fad
HX
414}
415
416static int cbc_aes_decrypt(struct blkcipher_desc *desc,
417 struct scatterlist *dst, struct scatterlist *src,
418 unsigned int nbytes)
419{
420 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
421 struct blkcipher_walk walk;
422
69c0e360 423 if (unlikely(!sctx->fc))
b0c3e75d
SS
424 return fallback_blk_dec(desc, dst, src, nbytes);
425
a9e62fad 426 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 427 return cbc_aes_crypt(desc, sctx->fc | CPACF_DECRYPT, &walk);
a9e62fad
HX
428}
429
430static struct crypto_alg cbc_aes_alg = {
431 .cra_name = "cbc(aes)",
432 .cra_driver_name = "cbc-aes-s390",
c7d4d259 433 .cra_priority = 400, /* combo: aes + cbc */
f67d1369
JG
434 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
435 CRYPTO_ALG_NEED_FALLBACK,
a9e62fad
HX
436 .cra_blocksize = AES_BLOCK_SIZE,
437 .cra_ctxsize = sizeof(struct s390_aes_ctx),
438 .cra_type = &crypto_blkcipher_type,
439 .cra_module = THIS_MODULE,
b0c3e75d
SS
440 .cra_init = fallback_init_blk,
441 .cra_exit = fallback_exit_blk,
a9e62fad
HX
442 .cra_u = {
443 .blkcipher = {
444 .min_keysize = AES_MIN_KEY_SIZE,
445 .max_keysize = AES_MAX_KEY_SIZE,
446 .ivsize = AES_BLOCK_SIZE,
447 .setkey = cbc_aes_set_key,
448 .encrypt = cbc_aes_encrypt,
449 .decrypt = cbc_aes_decrypt,
450 }
451 }
452};
453
99d97222
GS
454static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
455 unsigned int len)
456{
457 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
458 unsigned int ret;
459
64e26807
HX
460 crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
461 crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
462 CRYPTO_TFM_REQ_MASK);
463
464 ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
465
466 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
467 tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
468 CRYPTO_TFM_RES_MASK;
99d97222 469
99d97222
GS
470 return ret;
471}
472
473static int xts_fallback_decrypt(struct blkcipher_desc *desc,
474 struct scatterlist *dst, struct scatterlist *src,
475 unsigned int nbytes)
476{
64e26807
HX
477 struct crypto_blkcipher *tfm = desc->tfm;
478 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
479 SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
99d97222
GS
480 unsigned int ret;
481
64e26807
HX
482 skcipher_request_set_tfm(req, xts_ctx->fallback);
483 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
484 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
99d97222 485
64e26807 486 ret = crypto_skcipher_decrypt(req);
99d97222 487
64e26807 488 skcipher_request_zero(req);
99d97222
GS
489 return ret;
490}
491
492static int xts_fallback_encrypt(struct blkcipher_desc *desc,
493 struct scatterlist *dst, struct scatterlist *src,
494 unsigned int nbytes)
495{
64e26807
HX
496 struct crypto_blkcipher *tfm = desc->tfm;
497 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
498 SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
99d97222
GS
499 unsigned int ret;
500
64e26807
HX
501 skcipher_request_set_tfm(req, xts_ctx->fallback);
502 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
503 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
99d97222 504
64e26807 505 ret = crypto_skcipher_encrypt(req);
99d97222 506
64e26807 507 skcipher_request_zero(req);
99d97222
GS
508 return ret;
509}
510
511static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
512 unsigned int key_len)
513{
514 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
69c0e360 515 unsigned long fc;
28856a9e
SM
516 int err;
517
518 err = xts_check_key(tfm, in_key, key_len);
519 if (err)
520 return err;
99d97222 521
69c0e360
MS
522 /* Pick the correct function code based on the key length */
523 fc = (key_len == 32) ? CPACF_KM_XTS_128 :
524 (key_len == 64) ? CPACF_KM_XTS_256 : 0;
525
526 /* Check if the function code is available */
527 xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
528 if (!xts_ctx->fc)
529 return xts_fallback_setkey(tfm, in_key, key_len);
530
531 /* Split the XTS key into the two subkeys */
532 key_len = key_len / 2;
99d97222 533 xts_ctx->key_len = key_len;
69c0e360
MS
534 memcpy(xts_ctx->key, in_key, key_len);
535 memcpy(xts_ctx->pcc_key, in_key + key_len, key_len);
99d97222
GS
536 return 0;
537}
538
539static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
540 struct s390_xts_ctx *xts_ctx,
541 struct blkcipher_walk *walk)
542{
69c0e360 543 unsigned int offset = xts_ctx->key_len & 0x10;
99d97222
GS
544 int ret = blkcipher_walk_virt(desc, walk);
545 unsigned int nbytes = walk->nbytes;
546 unsigned int n;
547 u8 *in, *out;
9dda2769
GS
548 struct pcc_param pcc_param;
549 struct {
550 u8 key[32];
551 u8 init[16];
552 } xts_param;
99d97222
GS
553
554 if (!nbytes)
555 goto out;
556
9dda2769
GS
557 memset(pcc_param.block, 0, sizeof(pcc_param.block));
558 memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
559 memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
560 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
69c0e360 561 memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len);
c7d4d259 562 /* remove decipher modifier bit from 'func' and call PCC */
0177db01 563 cpacf_pcc(func & 0x7f, &pcc_param.key[offset]);
99d97222 564
69c0e360 565 memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len);
9dda2769 566 memcpy(xts_param.init, pcc_param.xts, 16);
99d97222
GS
567 do {
568 /* only use complete blocks */
569 n = nbytes & ~(AES_BLOCK_SIZE - 1);
570 out = walk->dst.virt.addr;
571 in = walk->src.virt.addr;
572
0177db01 573 cpacf_km(func, &xts_param.key[offset], out, in, n);
99d97222
GS
574
575 nbytes &= AES_BLOCK_SIZE - 1;
576 ret = blkcipher_walk_done(desc, walk, nbytes);
577 } while ((nbytes = walk->nbytes));
578out:
579 return ret;
580}
581
582static int xts_aes_encrypt(struct blkcipher_desc *desc,
583 struct scatterlist *dst, struct scatterlist *src,
584 unsigned int nbytes)
585{
586 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
587 struct blkcipher_walk walk;
588
69c0e360 589 if (unlikely(!xts_ctx->fc))
99d97222
GS
590 return xts_fallback_encrypt(desc, dst, src, nbytes);
591
592 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 593 return xts_aes_crypt(desc, xts_ctx->fc, xts_ctx, &walk);
99d97222
GS
594}
595
596static int xts_aes_decrypt(struct blkcipher_desc *desc,
597 struct scatterlist *dst, struct scatterlist *src,
598 unsigned int nbytes)
599{
600 struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
601 struct blkcipher_walk walk;
602
69c0e360 603 if (unlikely(!xts_ctx->fc))
99d97222
GS
604 return xts_fallback_decrypt(desc, dst, src, nbytes);
605
606 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 607 return xts_aes_crypt(desc, xts_ctx->fc | CPACF_DECRYPT, xts_ctx, &walk);
99d97222
GS
608}
609
610static int xts_fallback_init(struct crypto_tfm *tfm)
611{
612 const char *name = tfm->__crt_alg->cra_name;
613 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
614
64e26807
HX
615 xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
616 CRYPTO_ALG_ASYNC |
617 CRYPTO_ALG_NEED_FALLBACK);
99d97222
GS
618
619 if (IS_ERR(xts_ctx->fallback)) {
620 pr_err("Allocating XTS fallback algorithm %s failed\n",
621 name);
622 return PTR_ERR(xts_ctx->fallback);
623 }
624 return 0;
625}
626
627static void xts_fallback_exit(struct crypto_tfm *tfm)
628{
629 struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
630
64e26807 631 crypto_free_skcipher(xts_ctx->fallback);
99d97222
GS
632}
633
634static struct crypto_alg xts_aes_alg = {
635 .cra_name = "xts(aes)",
636 .cra_driver_name = "xts-aes-s390",
c7d4d259 637 .cra_priority = 400, /* combo: aes + xts */
99d97222
GS
638 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
639 CRYPTO_ALG_NEED_FALLBACK,
640 .cra_blocksize = AES_BLOCK_SIZE,
641 .cra_ctxsize = sizeof(struct s390_xts_ctx),
642 .cra_type = &crypto_blkcipher_type,
643 .cra_module = THIS_MODULE,
99d97222
GS
644 .cra_init = xts_fallback_init,
645 .cra_exit = xts_fallback_exit,
646 .cra_u = {
647 .blkcipher = {
648 .min_keysize = 2 * AES_MIN_KEY_SIZE,
649 .max_keysize = 2 * AES_MAX_KEY_SIZE,
650 .ivsize = AES_BLOCK_SIZE,
651 .setkey = xts_aes_set_key,
652 .encrypt = xts_aes_encrypt,
653 .decrypt = xts_aes_decrypt,
654 }
655 }
656};
657
0200f3ec
GS
658static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
659 unsigned int key_len)
660{
661 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
69c0e360 662 unsigned long fc;
0200f3ec 663
69c0e360
MS
664 /* Pick the correct function code based on the key length */
665 fc = (key_len == 16) ? CPACF_KMCTR_AES_128 :
666 (key_len == 24) ? CPACF_KMCTR_AES_192 :
667 (key_len == 32) ? CPACF_KMCTR_AES_256 : 0;
668
669 /* Check if the function code is available */
670 sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
671 if (!sctx->fc)
672 return setkey_fallback_blk(tfm, in_key, key_len);
0200f3ec 673
69c0e360
MS
674 sctx->key_len = key_len;
675 memcpy(sctx->key, in_key, key_len);
676 return 0;
0200f3ec
GS
677}
678
0519e9ad
HF
679static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
680{
681 unsigned int i, n;
682
683 /* only use complete blocks, max. PAGE_SIZE */
684 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
685 for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
686 memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
687 AES_BLOCK_SIZE);
688 crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
689 }
690 return n;
691}
692
0200f3ec
GS
693static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
694 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
695{
696 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
0519e9ad
HF
697 unsigned int n, nbytes;
698 u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
699 u8 *out, *in, *ctrptr = ctrbuf;
0200f3ec
GS
700
701 if (!walk->nbytes)
702 return ret;
703
0519e9ad
HF
704 if (spin_trylock(&ctrblk_lock))
705 ctrptr = ctrblk;
706
707 memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
0200f3ec
GS
708 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
709 out = walk->dst.virt.addr;
710 in = walk->src.virt.addr;
711 while (nbytes >= AES_BLOCK_SIZE) {
0519e9ad
HF
712 if (ctrptr == ctrblk)
713 n = __ctrblk_init(ctrptr, nbytes);
714 else
715 n = AES_BLOCK_SIZE;
0177db01 716 cpacf_kmctr(func, sctx->key, out, in, n, ctrptr);
0200f3ec 717 if (n > AES_BLOCK_SIZE)
0519e9ad 718 memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
0200f3ec 719 AES_BLOCK_SIZE);
0519e9ad 720 crypto_inc(ctrptr, AES_BLOCK_SIZE);
0200f3ec
GS
721 out += n;
722 in += n;
723 nbytes -= n;
724 }
725 ret = blkcipher_walk_done(desc, walk, nbytes);
726 }
0519e9ad
HF
727 if (ctrptr == ctrblk) {
728 if (nbytes)
729 memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
730 else
731 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
732 spin_unlock(&ctrblk_lock);
3901c112
HF
733 } else {
734 if (!nbytes)
735 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
0519e9ad 736 }
0200f3ec
GS
737 /*
738 * final block may be < AES_BLOCK_SIZE, copy only nbytes
739 */
740 if (nbytes) {
741 out = walk->dst.virt.addr;
742 in = walk->src.virt.addr;
0177db01 743 cpacf_kmctr(func, sctx->key, buf, in, AES_BLOCK_SIZE, ctrbuf);
0200f3ec 744 memcpy(out, buf, nbytes);
0519e9ad 745 crypto_inc(ctrbuf, AES_BLOCK_SIZE);
0200f3ec 746 ret = blkcipher_walk_done(desc, walk, 0);
0519e9ad 747 memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
0200f3ec 748 }
0519e9ad 749
0200f3ec
GS
750 return ret;
751}
752
753static int ctr_aes_encrypt(struct blkcipher_desc *desc,
754 struct scatterlist *dst, struct scatterlist *src,
755 unsigned int nbytes)
756{
757 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
758 struct blkcipher_walk walk;
759
69c0e360
MS
760 if (unlikely(!sctx->fc))
761 return fallback_blk_enc(desc, dst, src, nbytes);
762
0200f3ec 763 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 764 return ctr_aes_crypt(desc, sctx->fc, sctx, &walk);
0200f3ec
GS
765}
766
767static int ctr_aes_decrypt(struct blkcipher_desc *desc,
768 struct scatterlist *dst, struct scatterlist *src,
769 unsigned int nbytes)
770{
771 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
772 struct blkcipher_walk walk;
773
69c0e360
MS
774 if (unlikely(!sctx->fc))
775 return fallback_blk_dec(desc, dst, src, nbytes);
776
0200f3ec 777 blkcipher_walk_init(&walk, dst, src, nbytes);
edc63a37 778 return ctr_aes_crypt(desc, sctx->fc | CPACF_DECRYPT, sctx, &walk);
0200f3ec
GS
779}
780
781static struct crypto_alg ctr_aes_alg = {
782 .cra_name = "ctr(aes)",
783 .cra_driver_name = "ctr-aes-s390",
c7d4d259 784 .cra_priority = 400, /* combo: aes + ctr */
69c0e360
MS
785 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
786 CRYPTO_ALG_NEED_FALLBACK,
0200f3ec
GS
787 .cra_blocksize = 1,
788 .cra_ctxsize = sizeof(struct s390_aes_ctx),
789 .cra_type = &crypto_blkcipher_type,
790 .cra_module = THIS_MODULE,
69c0e360
MS
791 .cra_init = fallback_init_blk,
792 .cra_exit = fallback_exit_blk,
0200f3ec
GS
793 .cra_u = {
794 .blkcipher = {
795 .min_keysize = AES_MIN_KEY_SIZE,
796 .max_keysize = AES_MAX_KEY_SIZE,
797 .ivsize = AES_BLOCK_SIZE,
798 .setkey = ctr_aes_set_key,
799 .encrypt = ctr_aes_encrypt,
800 .decrypt = ctr_aes_decrypt,
801 }
802 }
803};
804
d863d594
MS
805static struct crypto_alg *aes_s390_algs_ptr[5];
806static int aes_s390_algs_num;
807
808static int aes_s390_register_alg(struct crypto_alg *alg)
809{
810 int ret;
811
812 ret = crypto_register_alg(alg);
813 if (!ret)
814 aes_s390_algs_ptr[aes_s390_algs_num++] = alg;
815 return ret;
816}
817
818static void aes_s390_fini(void)
819{
820 while (aes_s390_algs_num--)
821 crypto_unregister_alg(aes_s390_algs_ptr[aes_s390_algs_num]);
822 if (ctrblk)
823 free_page((unsigned long) ctrblk);
824}
4f57ba71 825
9f7819c1 826static int __init aes_s390_init(void)
bf754ae8
JG
827{
828 int ret;
829
69c0e360
MS
830 /* Query available functions for KM, KMC and KMCTR */
831 cpacf_query(CPACF_KM, &km_functions);
832 cpacf_query(CPACF_KMC, &kmc_functions);
833 cpacf_query(CPACF_KMCTR, &kmctr_functions);
a9e62fad 834
69c0e360
MS
835 if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) ||
836 cpacf_test_func(&km_functions, CPACF_KM_AES_192) ||
837 cpacf_test_func(&km_functions, CPACF_KM_AES_256)) {
838 ret = aes_s390_register_alg(&aes_alg);
839 if (ret)
840 goto out_err;
841 ret = aes_s390_register_alg(&ecb_aes_alg);
842 if (ret)
843 goto out_err;
844 }
a9e62fad 845
69c0e360
MS
846 if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) ||
847 cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) ||
848 cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) {
849 ret = aes_s390_register_alg(&cbc_aes_alg);
850 if (ret)
851 goto out_err;
852 }
a9e62fad 853
69c0e360
MS
854 if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) ||
855 cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) {
d863d594 856 ret = aes_s390_register_alg(&xts_aes_alg);
99d97222 857 if (ret)
d863d594 858 goto out_err;
99d97222
GS
859 }
860
69c0e360
MS
861 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_128) ||
862 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_192) ||
863 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_256)) {
0200f3ec
GS
864 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
865 if (!ctrblk) {
866 ret = -ENOMEM;
d863d594 867 goto out_err;
0200f3ec 868 }
d863d594
MS
869 ret = aes_s390_register_alg(&ctr_aes_alg);
870 if (ret)
871 goto out_err;
0200f3ec
GS
872 }
873
d863d594
MS
874 return 0;
875out_err:
876 aes_s390_fini();
bf754ae8 877 return ret;
bf754ae8
JG
878}
879
d05377c1 880module_cpu_feature_match(MSA, aes_s390_init);
9f7819c1 881module_exit(aes_s390_fini);
bf754ae8 882
5d26a105 883MODULE_ALIAS_CRYPTO("aes-all");
bf754ae8
JG
884
885MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
886MODULE_LICENSE("GPL");
This page took 0.867781 seconds and 5 git commands to generate.