[CRYPTO] s390: Added block cipher versions of CBC/ECB
[deliverable/linux.git] / crypto / cipher.c
1 /*
2 * Cryptographic API.
3 *
4 * Cipher operations.
5 *
6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
19 #include <linux/mm.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
23 #include "internal.h"
24 #include "scatterwalk.h"
25
26 static inline void xor_64(u8 *a, const u8 *b)
27 {
28 ((u32 *)a)[0] ^= ((u32 *)b)[0];
29 ((u32 *)a)[1] ^= ((u32 *)b)[1];
30 }
31
32 static inline void xor_128(u8 *a, const u8 *b)
33 {
34 ((u32 *)a)[0] ^= ((u32 *)b)[0];
35 ((u32 *)a)[1] ^= ((u32 *)b)[1];
36 ((u32 *)a)[2] ^= ((u32 *)b)[2];
37 ((u32 *)a)[3] ^= ((u32 *)b)[3];
38 }
39
40 static unsigned int crypt_slow(const struct cipher_desc *desc,
41 struct scatter_walk *in,
42 struct scatter_walk *out, unsigned int bsize)
43 {
44 unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
45 u8 buffer[bsize * 2 + alignmask];
46 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 u8 *dst = src + bsize;
48
49 scatterwalk_copychunks(src, in, bsize, 0);
50 desc->prfn(desc, dst, src, bsize);
51 scatterwalk_copychunks(dst, out, bsize, 1);
52
53 return bsize;
54 }
55
56 static inline unsigned int crypt_fast(const struct cipher_desc *desc,
57 struct scatter_walk *in,
58 struct scatter_walk *out,
59 unsigned int nbytes, u8 *tmp)
60 {
61 u8 *src, *dst;
62 u8 *real_src, *real_dst;
63
64 real_src = scatterwalk_map(in, 0);
65 real_dst = scatterwalk_map(out, 1);
66
67 src = real_src;
68 dst = scatterwalk_samebuf(in, out) ? src : real_dst;
69
70 if (tmp) {
71 memcpy(tmp, src, nbytes);
72 src = tmp;
73 dst = tmp;
74 }
75
76 nbytes = desc->prfn(desc, dst, src, nbytes);
77
78 if (tmp)
79 memcpy(real_dst, tmp, nbytes);
80
81 scatterwalk_unmap(real_src, 0);
82 scatterwalk_unmap(real_dst, 1);
83
84 scatterwalk_advance(in, nbytes);
85 scatterwalk_advance(out, nbytes);
86
87 return nbytes;
88 }
89
90 /*
91 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
92 * multiple page boundaries by using temporary blocks. In user context,
93 * the kernel is given a chance to schedule us once per page.
94 */
95 static int crypt(const struct cipher_desc *desc,
96 struct scatterlist *dst,
97 struct scatterlist *src,
98 unsigned int nbytes)
99 {
100 struct scatter_walk walk_in, walk_out;
101 struct crypto_tfm *tfm = desc->tfm;
102 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
103 unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
104 unsigned long buffer = 0;
105
106 if (!nbytes)
107 return 0;
108
109 if (nbytes % bsize) {
110 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
111 return -EINVAL;
112 }
113
114 scatterwalk_start(&walk_in, src);
115 scatterwalk_start(&walk_out, dst);
116
117 for(;;) {
118 unsigned int n = nbytes;
119 u8 *tmp = NULL;
120
121 if (!scatterwalk_aligned(&walk_in, alignmask) ||
122 !scatterwalk_aligned(&walk_out, alignmask)) {
123 if (!buffer) {
124 buffer = __get_free_page(GFP_ATOMIC);
125 if (!buffer)
126 n = 0;
127 }
128 tmp = (u8 *)buffer;
129 }
130
131 n = scatterwalk_clamp(&walk_in, n);
132 n = scatterwalk_clamp(&walk_out, n);
133
134 if (likely(n >= bsize))
135 n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
136 else
137 n = crypt_slow(desc, &walk_in, &walk_out, bsize);
138
139 nbytes -= n;
140
141 scatterwalk_done(&walk_in, 0, nbytes);
142 scatterwalk_done(&walk_out, 1, nbytes);
143
144 if (!nbytes)
145 break;
146
147 crypto_yield(tfm->crt_flags);
148 }
149
150 if (buffer)
151 free_page(buffer);
152
153 return 0;
154 }
155
156 static int crypt_iv_unaligned(struct cipher_desc *desc,
157 struct scatterlist *dst,
158 struct scatterlist *src,
159 unsigned int nbytes)
160 {
161 struct crypto_tfm *tfm = desc->tfm;
162 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
163 u8 *iv = desc->info;
164
165 if (unlikely(((unsigned long)iv & alignmask))) {
166 unsigned int ivsize = tfm->crt_cipher.cit_ivsize;
167 u8 buffer[ivsize + alignmask];
168 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
169 int err;
170
171 desc->info = memcpy(tmp, iv, ivsize);
172 err = crypt(desc, dst, src, nbytes);
173 memcpy(iv, tmp, ivsize);
174
175 return err;
176 }
177
178 return crypt(desc, dst, src, nbytes);
179 }
180
181 static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
182 u8 *dst, const u8 *src,
183 unsigned int nbytes)
184 {
185 struct crypto_tfm *tfm = desc->tfm;
186 void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
187 int bsize = crypto_tfm_alg_blocksize(tfm);
188
189 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
190 u8 *iv = desc->info;
191 unsigned int done = 0;
192
193 nbytes -= bsize;
194
195 do {
196 xor(iv, src);
197 fn(tfm, dst, iv);
198 memcpy(iv, dst, bsize);
199
200 src += bsize;
201 dst += bsize;
202 } while ((done += bsize) <= nbytes);
203
204 return done;
205 }
206
207 static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
208 u8 *dst, const u8 *src,
209 unsigned int nbytes)
210 {
211 struct crypto_tfm *tfm = desc->tfm;
212 void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
213 int bsize = crypto_tfm_alg_blocksize(tfm);
214 unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
215
216 u8 stack[src == dst ? bsize + alignmask : 0];
217 u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
218 u8 **dst_p = src == dst ? &buf : &dst;
219
220 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
221 u8 *iv = desc->info;
222 unsigned int done = 0;
223
224 nbytes -= bsize;
225
226 do {
227 u8 *tmp_dst = *dst_p;
228
229 fn(tfm, tmp_dst, src);
230 xor(tmp_dst, iv);
231 memcpy(iv, src, bsize);
232 if (tmp_dst != dst)
233 memcpy(dst, tmp_dst, bsize);
234
235 src += bsize;
236 dst += bsize;
237 } while ((done += bsize) <= nbytes);
238
239 return done;
240 }
241
242 static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst,
243 const u8 *src, unsigned int nbytes)
244 {
245 struct crypto_tfm *tfm = desc->tfm;
246 int bsize = crypto_tfm_alg_blocksize(tfm);
247 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
248 unsigned int done = 0;
249
250 nbytes -= bsize;
251
252 do {
253 fn(tfm, dst, src);
254
255 src += bsize;
256 dst += bsize;
257 } while ((done += bsize) <= nbytes);
258
259 return done;
260 }
261
262 static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
263 {
264 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
265
266 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
267 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
268 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
269 return -EINVAL;
270 } else
271 return cia->cia_setkey(tfm, key, keylen);
272 }
273
274 static int ecb_encrypt(struct crypto_tfm *tfm,
275 struct scatterlist *dst,
276 struct scatterlist *src, unsigned int nbytes)
277 {
278 struct cipher_desc desc;
279 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
280
281 desc.tfm = tfm;
282 desc.crfn = cipher->cia_encrypt;
283 desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process;
284
285 return crypt(&desc, dst, src, nbytes);
286 }
287
288 static int ecb_decrypt(struct crypto_tfm *tfm,
289 struct scatterlist *dst,
290 struct scatterlist *src,
291 unsigned int nbytes)
292 {
293 struct cipher_desc desc;
294 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
295
296 desc.tfm = tfm;
297 desc.crfn = cipher->cia_decrypt;
298 desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process;
299
300 return crypt(&desc, dst, src, nbytes);
301 }
302
303 static int cbc_encrypt(struct crypto_tfm *tfm,
304 struct scatterlist *dst,
305 struct scatterlist *src,
306 unsigned int nbytes)
307 {
308 struct cipher_desc desc;
309 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
310
311 desc.tfm = tfm;
312 desc.crfn = cipher->cia_encrypt;
313 desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
314 desc.info = tfm->crt_cipher.cit_iv;
315
316 return crypt(&desc, dst, src, nbytes);
317 }
318
319 static int cbc_encrypt_iv(struct crypto_tfm *tfm,
320 struct scatterlist *dst,
321 struct scatterlist *src,
322 unsigned int nbytes, u8 *iv)
323 {
324 struct cipher_desc desc;
325 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
326
327 desc.tfm = tfm;
328 desc.crfn = cipher->cia_encrypt;
329 desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
330 desc.info = iv;
331
332 return crypt_iv_unaligned(&desc, dst, src, nbytes);
333 }
334
335 static int cbc_decrypt(struct crypto_tfm *tfm,
336 struct scatterlist *dst,
337 struct scatterlist *src,
338 unsigned int nbytes)
339 {
340 struct cipher_desc desc;
341 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
342
343 desc.tfm = tfm;
344 desc.crfn = cipher->cia_decrypt;
345 desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
346 desc.info = tfm->crt_cipher.cit_iv;
347
348 return crypt(&desc, dst, src, nbytes);
349 }
350
351 static int cbc_decrypt_iv(struct crypto_tfm *tfm,
352 struct scatterlist *dst,
353 struct scatterlist *src,
354 unsigned int nbytes, u8 *iv)
355 {
356 struct cipher_desc desc;
357 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
358
359 desc.tfm = tfm;
360 desc.crfn = cipher->cia_decrypt;
361 desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
362 desc.info = iv;
363
364 return crypt_iv_unaligned(&desc, dst, src, nbytes);
365 }
366
367 static int nocrypt(struct crypto_tfm *tfm,
368 struct scatterlist *dst,
369 struct scatterlist *src,
370 unsigned int nbytes)
371 {
372 return -ENOSYS;
373 }
374
375 static int nocrypt_iv(struct crypto_tfm *tfm,
376 struct scatterlist *dst,
377 struct scatterlist *src,
378 unsigned int nbytes, u8 *iv)
379 {
380 return -ENOSYS;
381 }
382
383 int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
384 {
385 u32 mode = flags & CRYPTO_TFM_MODE_MASK;
386 tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB;
387 return 0;
388 }
389
390 static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
391 const u8 *),
392 struct crypto_tfm *tfm,
393 u8 *dst, const u8 *src)
394 {
395 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
396 unsigned int size = crypto_tfm_alg_blocksize(tfm);
397 u8 buffer[size + alignmask];
398 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
399
400 memcpy(tmp, src, size);
401 fn(tfm, tmp, tmp);
402 memcpy(dst, tmp, size);
403 }
404
405 static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
406 u8 *dst, const u8 *src)
407 {
408 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
409 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
410
411 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
412 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
413 return;
414 }
415
416 cipher->cia_encrypt(tfm, dst, src);
417 }
418
419 static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
420 u8 *dst, const u8 *src)
421 {
422 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
423 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
424
425 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
426 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
427 return;
428 }
429
430 cipher->cia_decrypt(tfm, dst, src);
431 }
432
433 int crypto_init_cipher_ops(struct crypto_tfm *tfm)
434 {
435 int ret = 0;
436 struct cipher_tfm *ops = &tfm->crt_cipher;
437 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
438
439 ops->cit_setkey = setkey;
440 ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
441 cipher_encrypt_unaligned : cipher->cia_encrypt;
442 ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
443 cipher_decrypt_unaligned : cipher->cia_decrypt;
444
445 switch (tfm->crt_cipher.cit_mode) {
446 case CRYPTO_TFM_MODE_ECB:
447 ops->cit_encrypt = ecb_encrypt;
448 ops->cit_decrypt = ecb_decrypt;
449 ops->cit_encrypt_iv = nocrypt_iv;
450 ops->cit_decrypt_iv = nocrypt_iv;
451 break;
452
453 case CRYPTO_TFM_MODE_CBC:
454 ops->cit_encrypt = cbc_encrypt;
455 ops->cit_decrypt = cbc_decrypt;
456 ops->cit_encrypt_iv = cbc_encrypt_iv;
457 ops->cit_decrypt_iv = cbc_decrypt_iv;
458 break;
459
460 case CRYPTO_TFM_MODE_CFB:
461 ops->cit_encrypt = nocrypt;
462 ops->cit_decrypt = nocrypt;
463 ops->cit_encrypt_iv = nocrypt_iv;
464 ops->cit_decrypt_iv = nocrypt_iv;
465 break;
466
467 case CRYPTO_TFM_MODE_CTR:
468 ops->cit_encrypt = nocrypt;
469 ops->cit_decrypt = nocrypt;
470 ops->cit_encrypt_iv = nocrypt_iv;
471 ops->cit_decrypt_iv = nocrypt_iv;
472 break;
473
474 default:
475 BUG();
476 }
477
478 if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
479 unsigned long align;
480 unsigned long addr;
481
482 switch (crypto_tfm_alg_blocksize(tfm)) {
483 case 8:
484 ops->cit_xor_block = xor_64;
485 break;
486
487 case 16:
488 ops->cit_xor_block = xor_128;
489 break;
490
491 default:
492 printk(KERN_WARNING "%s: block size %u not supported\n",
493 crypto_tfm_alg_name(tfm),
494 crypto_tfm_alg_blocksize(tfm));
495 ret = -EINVAL;
496 goto out;
497 }
498
499 ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
500 align = crypto_tfm_alg_alignmask(tfm) + 1;
501 addr = (unsigned long)crypto_tfm_ctx(tfm);
502 addr = ALIGN(addr, align);
503 addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
504 ops->cit_iv = (void *)addr;
505 }
506
507 out:
508 return ret;
509 }
510
511 void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
512 {
513 }
This page took 0.082277 seconds and 5 git commands to generate.