Linux 3.2-rc3
[deliverable/linux.git] / arch / x86 / crypto / blowfish_glue.c
CommitLineData
64b94cea
JK
1/*
2 * Glue Code for assembler optimized version of Blowfish
3 *
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 *
a071d06e
JK
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10 *
64b94cea
JK
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
15 *
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24 * USA
25 *
26 */
27
28#include <crypto/blowfish.h>
29#include <linux/crypto.h>
30#include <linux/init.h>
31#include <linux/module.h>
32#include <linux/types.h>
33#include <crypto/algapi.h>
34
35/* regular block cipher functions */
36asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src,
37 bool xor);
38asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src);
39
40/* 4-way parallel cipher functions */
41asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
42 const u8 *src, bool xor);
43asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst,
44 const u8 *src);
45
46static inline void blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src)
47{
48 __blowfish_enc_blk(ctx, dst, src, false);
49}
50
51static inline void blowfish_enc_blk_xor(struct bf_ctx *ctx, u8 *dst,
52 const u8 *src)
53{
54 __blowfish_enc_blk(ctx, dst, src, true);
55}
56
57static inline void blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
58 const u8 *src)
59{
60 __blowfish_enc_blk_4way(ctx, dst, src, false);
61}
62
63static inline void blowfish_enc_blk_xor_4way(struct bf_ctx *ctx, u8 *dst,
64 const u8 *src)
65{
66 __blowfish_enc_blk_4way(ctx, dst, src, true);
67}
68
69static void blowfish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
70{
71 blowfish_enc_blk(crypto_tfm_ctx(tfm), dst, src);
72}
73
74static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
75{
76 blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src);
77}
78
79static struct crypto_alg bf_alg = {
80 .cra_name = "blowfish",
81 .cra_driver_name = "blowfish-asm",
82 .cra_priority = 200,
83 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
84 .cra_blocksize = BF_BLOCK_SIZE,
85 .cra_ctxsize = sizeof(struct bf_ctx),
86 .cra_alignmask = 3,
87 .cra_module = THIS_MODULE,
88 .cra_list = LIST_HEAD_INIT(bf_alg.cra_list),
89 .cra_u = {
90 .cipher = {
91 .cia_min_keysize = BF_MIN_KEY_SIZE,
92 .cia_max_keysize = BF_MAX_KEY_SIZE,
93 .cia_setkey = blowfish_setkey,
94 .cia_encrypt = blowfish_encrypt,
95 .cia_decrypt = blowfish_decrypt,
96 }
97 }
98};
99
100static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
101 void (*fn)(struct bf_ctx *, u8 *, const u8 *),
102 void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *))
103{
104 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
105 unsigned int bsize = BF_BLOCK_SIZE;
106 unsigned int nbytes;
107 int err;
108
109 err = blkcipher_walk_virt(desc, walk);
110
111 while ((nbytes = walk->nbytes)) {
112 u8 *wsrc = walk->src.virt.addr;
113 u8 *wdst = walk->dst.virt.addr;
114
115 /* Process four block batch */
116 if (nbytes >= bsize * 4) {
117 do {
118 fn_4way(ctx, wdst, wsrc);
119
120 wsrc += bsize * 4;
121 wdst += bsize * 4;
122 nbytes -= bsize * 4;
123 } while (nbytes >= bsize * 4);
124
125 if (nbytes < bsize)
126 goto done;
127 }
128
129 /* Handle leftovers */
130 do {
131 fn(ctx, wdst, wsrc);
132
133 wsrc += bsize;
134 wdst += bsize;
135 nbytes -= bsize;
136 } while (nbytes >= bsize);
137
138done:
139 err = blkcipher_walk_done(desc, walk, nbytes);
140 }
141
142 return err;
143}
144
145static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
146 struct scatterlist *src, unsigned int nbytes)
147{
148 struct blkcipher_walk walk;
149
150 blkcipher_walk_init(&walk, dst, src, nbytes);
151 return ecb_crypt(desc, &walk, blowfish_enc_blk, blowfish_enc_blk_4way);
152}
153
154static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
155 struct scatterlist *src, unsigned int nbytes)
156{
157 struct blkcipher_walk walk;
158
159 blkcipher_walk_init(&walk, dst, src, nbytes);
160 return ecb_crypt(desc, &walk, blowfish_dec_blk, blowfish_dec_blk_4way);
161}
162
163static struct crypto_alg blk_ecb_alg = {
164 .cra_name = "ecb(blowfish)",
165 .cra_driver_name = "ecb-blowfish-asm",
166 .cra_priority = 300,
167 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
168 .cra_blocksize = BF_BLOCK_SIZE,
169 .cra_ctxsize = sizeof(struct bf_ctx),
170 .cra_alignmask = 0,
171 .cra_type = &crypto_blkcipher_type,
172 .cra_module = THIS_MODULE,
173 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
174 .cra_u = {
175 .blkcipher = {
176 .min_keysize = BF_MIN_KEY_SIZE,
177 .max_keysize = BF_MAX_KEY_SIZE,
178 .setkey = blowfish_setkey,
179 .encrypt = ecb_encrypt,
180 .decrypt = ecb_decrypt,
181 },
182 },
183};
184
185static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
186 struct blkcipher_walk *walk)
187{
188 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
189 unsigned int bsize = BF_BLOCK_SIZE;
190 unsigned int nbytes = walk->nbytes;
191 u64 *src = (u64 *)walk->src.virt.addr;
192 u64 *dst = (u64 *)walk->dst.virt.addr;
193 u64 *iv = (u64 *)walk->iv;
194
195 do {
196 *dst = *src ^ *iv;
197 blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
198 iv = dst;
199
200 src += 1;
201 dst += 1;
202 nbytes -= bsize;
203 } while (nbytes >= bsize);
204
205 *(u64 *)walk->iv = *iv;
206 return nbytes;
207}
208
209static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
210 struct scatterlist *src, unsigned int nbytes)
211{
212 struct blkcipher_walk walk;
213 int err;
214
215 blkcipher_walk_init(&walk, dst, src, nbytes);
216 err = blkcipher_walk_virt(desc, &walk);
217
218 while ((nbytes = walk.nbytes)) {
219 nbytes = __cbc_encrypt(desc, &walk);
220 err = blkcipher_walk_done(desc, &walk, nbytes);
221 }
222
223 return err;
224}
225
226static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
227 struct blkcipher_walk *walk)
228{
229 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
230 unsigned int bsize = BF_BLOCK_SIZE;
231 unsigned int nbytes = walk->nbytes;
232 u64 *src = (u64 *)walk->src.virt.addr;
233 u64 *dst = (u64 *)walk->dst.virt.addr;
234 u64 ivs[4 - 1];
235 u64 last_iv;
236
237 /* Start of the last block. */
238 src += nbytes / bsize - 1;
239 dst += nbytes / bsize - 1;
240
241 last_iv = *src;
242
243 /* Process four block batch */
244 if (nbytes >= bsize * 4) {
245 do {
246 nbytes -= bsize * 4 - bsize;
247 src -= 4 - 1;
248 dst -= 4 - 1;
249
250 ivs[0] = src[0];
251 ivs[1] = src[1];
252 ivs[2] = src[2];
253
254 blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src);
255
256 dst[1] ^= ivs[0];
257 dst[2] ^= ivs[1];
258 dst[3] ^= ivs[2];
259
260 nbytes -= bsize;
261 if (nbytes < bsize)
262 goto done;
263
264 *dst ^= *(src - 1);
265 src -= 1;
266 dst -= 1;
267 } while (nbytes >= bsize * 4);
268
269 if (nbytes < bsize)
270 goto done;
271 }
272
273 /* Handle leftovers */
274 for (;;) {
275 blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
276
277 nbytes -= bsize;
278 if (nbytes < bsize)
279 break;
280
281 *dst ^= *(src - 1);
282 src -= 1;
283 dst -= 1;
284 }
285
286done:
287 *dst ^= *(u64 *)walk->iv;
288 *(u64 *)walk->iv = last_iv;
289
290 return nbytes;
291}
292
293static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
294 struct scatterlist *src, unsigned int nbytes)
295{
296 struct blkcipher_walk walk;
297 int err;
298
299 blkcipher_walk_init(&walk, dst, src, nbytes);
300 err = blkcipher_walk_virt(desc, &walk);
301
302 while ((nbytes = walk.nbytes)) {
303 nbytes = __cbc_decrypt(desc, &walk);
304 err = blkcipher_walk_done(desc, &walk, nbytes);
305 }
306
307 return err;
308}
309
310static struct crypto_alg blk_cbc_alg = {
311 .cra_name = "cbc(blowfish)",
312 .cra_driver_name = "cbc-blowfish-asm",
313 .cra_priority = 300,
314 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
315 .cra_blocksize = BF_BLOCK_SIZE,
316 .cra_ctxsize = sizeof(struct bf_ctx),
317 .cra_alignmask = 0,
318 .cra_type = &crypto_blkcipher_type,
319 .cra_module = THIS_MODULE,
320 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
321 .cra_u = {
322 .blkcipher = {
323 .min_keysize = BF_MIN_KEY_SIZE,
324 .max_keysize = BF_MAX_KEY_SIZE,
325 .ivsize = BF_BLOCK_SIZE,
326 .setkey = blowfish_setkey,
327 .encrypt = cbc_encrypt,
328 .decrypt = cbc_decrypt,
329 },
330 },
331};
332
333static void ctr_crypt_final(struct bf_ctx *ctx, struct blkcipher_walk *walk)
334{
335 u8 *ctrblk = walk->iv;
336 u8 keystream[BF_BLOCK_SIZE];
337 u8 *src = walk->src.virt.addr;
338 u8 *dst = walk->dst.virt.addr;
339 unsigned int nbytes = walk->nbytes;
340
341 blowfish_enc_blk(ctx, keystream, ctrblk);
342 crypto_xor(keystream, src, nbytes);
343 memcpy(dst, keystream, nbytes);
344
345 crypto_inc(ctrblk, BF_BLOCK_SIZE);
346}
347
348static unsigned int __ctr_crypt(struct blkcipher_desc *desc,
349 struct blkcipher_walk *walk)
350{
351 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
352 unsigned int bsize = BF_BLOCK_SIZE;
353 unsigned int nbytes = walk->nbytes;
354 u64 *src = (u64 *)walk->src.virt.addr;
355 u64 *dst = (u64 *)walk->dst.virt.addr;
356 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
357 __be64 ctrblocks[4];
358
359 /* Process four block batch */
360 if (nbytes >= bsize * 4) {
361 do {
362 if (dst != src) {
363 dst[0] = src[0];
364 dst[1] = src[1];
365 dst[2] = src[2];
366 dst[3] = src[3];
367 }
368
369 /* create ctrblks for parallel encrypt */
370 ctrblocks[0] = cpu_to_be64(ctrblk++);
371 ctrblocks[1] = cpu_to_be64(ctrblk++);
372 ctrblocks[2] = cpu_to_be64(ctrblk++);
373 ctrblocks[3] = cpu_to_be64(ctrblk++);
374
375 blowfish_enc_blk_xor_4way(ctx, (u8 *)dst,
376 (u8 *)ctrblocks);
377
378 src += 4;
379 dst += 4;
380 } while ((nbytes -= bsize * 4) >= bsize * 4);
381
382 if (nbytes < bsize)
383 goto done;
384 }
385
386 /* Handle leftovers */
387 do {
388 if (dst != src)
389 *dst = *src;
390
391 ctrblocks[0] = cpu_to_be64(ctrblk++);
392
393 blowfish_enc_blk_xor(ctx, (u8 *)dst, (u8 *)ctrblocks);
394
395 src += 1;
396 dst += 1;
397 } while ((nbytes -= bsize) >= bsize);
398
399done:
400 *(__be64 *)walk->iv = cpu_to_be64(ctrblk);
401 return nbytes;
402}
403
404static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
405 struct scatterlist *src, unsigned int nbytes)
406{
407 struct blkcipher_walk walk;
408 int err;
409
410 blkcipher_walk_init(&walk, dst, src, nbytes);
411 err = blkcipher_walk_virt_block(desc, &walk, BF_BLOCK_SIZE);
412
413 while ((nbytes = walk.nbytes) >= BF_BLOCK_SIZE) {
414 nbytes = __ctr_crypt(desc, &walk);
415 err = blkcipher_walk_done(desc, &walk, nbytes);
416 }
417
418 if (walk.nbytes) {
419 ctr_crypt_final(crypto_blkcipher_ctx(desc->tfm), &walk);
420 err = blkcipher_walk_done(desc, &walk, 0);
421 }
422
423 return err;
424}
425
426static struct crypto_alg blk_ctr_alg = {
427 .cra_name = "ctr(blowfish)",
428 .cra_driver_name = "ctr-blowfish-asm",
429 .cra_priority = 300,
430 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
a516ebaf 431 .cra_blocksize = 1,
64b94cea
JK
432 .cra_ctxsize = sizeof(struct bf_ctx),
433 .cra_alignmask = 0,
434 .cra_type = &crypto_blkcipher_type,
435 .cra_module = THIS_MODULE,
436 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
437 .cra_u = {
438 .blkcipher = {
439 .min_keysize = BF_MIN_KEY_SIZE,
440 .max_keysize = BF_MAX_KEY_SIZE,
441 .ivsize = BF_BLOCK_SIZE,
442 .setkey = blowfish_setkey,
443 .encrypt = ctr_crypt,
444 .decrypt = ctr_crypt,
445 },
446 },
447};
448
449static int __init init(void)
450{
451 int err;
452
453 err = crypto_register_alg(&bf_alg);
454 if (err)
455 goto bf_err;
456 err = crypto_register_alg(&blk_ecb_alg);
457 if (err)
458 goto ecb_err;
459 err = crypto_register_alg(&blk_cbc_alg);
460 if (err)
461 goto cbc_err;
462 err = crypto_register_alg(&blk_ctr_alg);
463 if (err)
464 goto ctr_err;
465
466 return 0;
467
468ctr_err:
469 crypto_unregister_alg(&blk_cbc_alg);
470cbc_err:
471 crypto_unregister_alg(&blk_ecb_alg);
472ecb_err:
473 crypto_unregister_alg(&bf_alg);
474bf_err:
475 return err;
476}
477
478static void __exit fini(void)
479{
480 crypto_unregister_alg(&blk_ctr_alg);
481 crypto_unregister_alg(&blk_cbc_alg);
482 crypto_unregister_alg(&blk_ecb_alg);
483 crypto_unregister_alg(&bf_alg);
484}
485
486module_init(init);
487module_exit(fini);
488
489MODULE_LICENSE("GPL");
490MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized");
491MODULE_ALIAS("blowfish");
492MODULE_ALIAS("blowfish-asm");
This page took 0.073653 seconds and 5 git commands to generate.