Commit | Line | Data |
---|---|---|
4ea1277d JG |
1 | /* |
2 | * Glue Code for the AVX assembler implemention of the Cast6 Cipher | |
3 | * | |
4 | * Copyright (C) 2012 Johannes Goetzfried | |
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | |
6 | * | |
7 | * This program is free software; you can redistribute it and/or modify | |
8 | * it under the terms of the GNU General Public License as published by | |
9 | * the Free Software Foundation; either version 2 of the License, or | |
10 | * (at your option) any later version. | |
11 | * | |
12 | * This program is distributed in the hope that it will be useful, | |
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | * GNU General Public License for more details. | |
16 | * | |
17 | * You should have received a copy of the GNU General Public License | |
18 | * along with this program; if not, write to the Free Software | |
19 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | |
20 | * USA | |
21 | * | |
22 | */ | |
23 | ||
24 | #include <linux/module.h> | |
25 | #include <linux/hardirq.h> | |
26 | #include <linux/types.h> | |
27 | #include <linux/crypto.h> | |
28 | #include <linux/err.h> | |
29 | #include <crypto/algapi.h> | |
30 | #include <crypto/cast6.h> | |
31 | #include <crypto/cryptd.h> | |
32 | #include <crypto/b128ops.h> | |
33 | #include <crypto/ctr.h> | |
34 | #include <crypto/lrw.h> | |
35 | #include <crypto/xts.h> | |
36 | #include <asm/xcr.h> | |
37 | #include <asm/xsave.h> | |
38 | #include <asm/crypto/ablk_helper.h> | |
39 | #include <asm/crypto/glue_helper.h> | |
40 | ||
41 | #define CAST6_PARALLEL_BLOCKS 8 | |
42 | ||
cba1cce0 JK |
43 | asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst, |
44 | const u8 *src); | |
45 | asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst, | |
4ea1277d JG |
46 | const u8 *src); |
47 | ||
cba1cce0 JK |
48 | asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst, |
49 | const u8 *src); | |
50 | asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src, | |
51 | le128 *iv); | |
4ea1277d | 52 | |
58990986 | 53 | static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) |
4ea1277d JG |
54 | { |
55 | be128 ctrblk; | |
56 | ||
58990986 JK |
57 | le128_to_be128(&ctrblk, iv); |
58 | le128_inc(iv); | |
4ea1277d JG |
59 | |
60 | __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); | |
61 | u128_xor(dst, src, (u128 *)&ctrblk); | |
62 | } | |
63 | ||
4ea1277d JG |
64 | static const struct common_glue_ctx cast6_enc = { |
65 | .num_funcs = 2, | |
66 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | |
67 | ||
68 | .funcs = { { | |
69 | .num_blocks = CAST6_PARALLEL_BLOCKS, | |
cba1cce0 | 70 | .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) } |
4ea1277d JG |
71 | }, { |
72 | .num_blocks = 1, | |
73 | .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) } | |
74 | } } | |
75 | }; | |
76 | ||
77 | static const struct common_glue_ctx cast6_ctr = { | |
78 | .num_funcs = 2, | |
79 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | |
80 | ||
81 | .funcs = { { | |
82 | .num_blocks = CAST6_PARALLEL_BLOCKS, | |
cba1cce0 | 83 | .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) } |
4ea1277d JG |
84 | }, { |
85 | .num_blocks = 1, | |
86 | .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) } | |
87 | } } | |
88 | }; | |
89 | ||
90 | static const struct common_glue_ctx cast6_dec = { | |
91 | .num_funcs = 2, | |
92 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | |
93 | ||
94 | .funcs = { { | |
95 | .num_blocks = CAST6_PARALLEL_BLOCKS, | |
cba1cce0 | 96 | .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) } |
4ea1277d JG |
97 | }, { |
98 | .num_blocks = 1, | |
99 | .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) } | |
100 | } } | |
101 | }; | |
102 | ||
103 | static const struct common_glue_ctx cast6_dec_cbc = { | |
104 | .num_funcs = 2, | |
105 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | |
106 | ||
107 | .funcs = { { | |
108 | .num_blocks = CAST6_PARALLEL_BLOCKS, | |
cba1cce0 | 109 | .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) } |
4ea1277d JG |
110 | }, { |
111 | .num_blocks = 1, | |
112 | .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) } | |
113 | } } | |
114 | }; | |
115 | ||
116 | static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
117 | struct scatterlist *src, unsigned int nbytes) | |
118 | { | |
119 | return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); | |
120 | } | |
121 | ||
122 | static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
123 | struct scatterlist *src, unsigned int nbytes) | |
124 | { | |
125 | return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); | |
126 | } | |
127 | ||
128 | static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
129 | struct scatterlist *src, unsigned int nbytes) | |
130 | { | |
131 | return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc, | |
132 | dst, src, nbytes); | |
133 | } | |
134 | ||
135 | static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
136 | struct scatterlist *src, unsigned int nbytes) | |
137 | { | |
138 | return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src, | |
139 | nbytes); | |
140 | } | |
141 | ||
142 | static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
143 | struct scatterlist *src, unsigned int nbytes) | |
144 | { | |
145 | return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); | |
146 | } | |
147 | ||
148 | static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes) | |
149 | { | |
150 | return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS, | |
151 | NULL, fpu_enabled, nbytes); | |
152 | } | |
153 | ||
154 | static inline void cast6_fpu_end(bool fpu_enabled) | |
155 | { | |
156 | glue_fpu_end(fpu_enabled); | |
157 | } | |
158 | ||
159 | struct crypt_priv { | |
160 | struct cast6_ctx *ctx; | |
161 | bool fpu_enabled; | |
162 | }; | |
163 | ||
164 | static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) | |
165 | { | |
166 | const unsigned int bsize = CAST6_BLOCK_SIZE; | |
167 | struct crypt_priv *ctx = priv; | |
168 | int i; | |
169 | ||
170 | ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); | |
171 | ||
172 | if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { | |
cba1cce0 | 173 | cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst); |
4ea1277d JG |
174 | return; |
175 | } | |
176 | ||
177 | for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) | |
178 | __cast6_encrypt(ctx->ctx, srcdst, srcdst); | |
179 | } | |
180 | ||
181 | static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) | |
182 | { | |
183 | const unsigned int bsize = CAST6_BLOCK_SIZE; | |
184 | struct crypt_priv *ctx = priv; | |
185 | int i; | |
186 | ||
187 | ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); | |
188 | ||
189 | if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { | |
cba1cce0 | 190 | cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst); |
4ea1277d JG |
191 | return; |
192 | } | |
193 | ||
194 | for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) | |
195 | __cast6_decrypt(ctx->ctx, srcdst, srcdst); | |
196 | } | |
197 | ||
198 | struct cast6_lrw_ctx { | |
199 | struct lrw_table_ctx lrw_table; | |
200 | struct cast6_ctx cast6_ctx; | |
201 | }; | |
202 | ||
203 | static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, | |
204 | unsigned int keylen) | |
205 | { | |
206 | struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | |
207 | int err; | |
208 | ||
209 | err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE, | |
210 | &tfm->crt_flags); | |
211 | if (err) | |
212 | return err; | |
213 | ||
214 | return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE); | |
215 | } | |
216 | ||
217 | static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
218 | struct scatterlist *src, unsigned int nbytes) | |
219 | { | |
220 | struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | |
221 | be128 buf[CAST6_PARALLEL_BLOCKS]; | |
222 | struct crypt_priv crypt_ctx = { | |
223 | .ctx = &ctx->cast6_ctx, | |
224 | .fpu_enabled = false, | |
225 | }; | |
226 | struct lrw_crypt_req req = { | |
227 | .tbuf = buf, | |
228 | .tbuflen = sizeof(buf), | |
229 | ||
230 | .table_ctx = &ctx->lrw_table, | |
231 | .crypt_ctx = &crypt_ctx, | |
232 | .crypt_fn = encrypt_callback, | |
233 | }; | |
234 | int ret; | |
235 | ||
236 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | |
237 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | |
238 | cast6_fpu_end(crypt_ctx.fpu_enabled); | |
239 | ||
240 | return ret; | |
241 | } | |
242 | ||
243 | static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
244 | struct scatterlist *src, unsigned int nbytes) | |
245 | { | |
246 | struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | |
247 | be128 buf[CAST6_PARALLEL_BLOCKS]; | |
248 | struct crypt_priv crypt_ctx = { | |
249 | .ctx = &ctx->cast6_ctx, | |
250 | .fpu_enabled = false, | |
251 | }; | |
252 | struct lrw_crypt_req req = { | |
253 | .tbuf = buf, | |
254 | .tbuflen = sizeof(buf), | |
255 | ||
256 | .table_ctx = &ctx->lrw_table, | |
257 | .crypt_ctx = &crypt_ctx, | |
258 | .crypt_fn = decrypt_callback, | |
259 | }; | |
260 | int ret; | |
261 | ||
262 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | |
263 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | |
264 | cast6_fpu_end(crypt_ctx.fpu_enabled); | |
265 | ||
266 | return ret; | |
267 | } | |
268 | ||
269 | static void lrw_exit_tfm(struct crypto_tfm *tfm) | |
270 | { | |
271 | struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | |
272 | ||
273 | lrw_free_table(&ctx->lrw_table); | |
274 | } | |
275 | ||
276 | struct cast6_xts_ctx { | |
277 | struct cast6_ctx tweak_ctx; | |
278 | struct cast6_ctx crypt_ctx; | |
279 | }; | |
280 | ||
281 | static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, | |
282 | unsigned int keylen) | |
283 | { | |
284 | struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm); | |
285 | u32 *flags = &tfm->crt_flags; | |
286 | int err; | |
287 | ||
288 | /* key consists of keys of equal size concatenated, therefore | |
289 | * the length must be even | |
290 | */ | |
291 | if (keylen % 2) { | |
292 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | |
293 | return -EINVAL; | |
294 | } | |
295 | ||
296 | /* first half of xts-key is for crypt */ | |
297 | err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags); | |
298 | if (err) | |
299 | return err; | |
300 | ||
301 | /* second half of xts-key is for tweak */ | |
302 | return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2, | |
303 | flags); | |
304 | } | |
305 | ||
306 | static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
307 | struct scatterlist *src, unsigned int nbytes) | |
308 | { | |
309 | struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | |
310 | be128 buf[CAST6_PARALLEL_BLOCKS]; | |
311 | struct crypt_priv crypt_ctx = { | |
312 | .ctx = &ctx->crypt_ctx, | |
313 | .fpu_enabled = false, | |
314 | }; | |
315 | struct xts_crypt_req req = { | |
316 | .tbuf = buf, | |
317 | .tbuflen = sizeof(buf), | |
318 | ||
319 | .tweak_ctx = &ctx->tweak_ctx, | |
320 | .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), | |
321 | .crypt_ctx = &crypt_ctx, | |
322 | .crypt_fn = encrypt_callback, | |
323 | }; | |
324 | int ret; | |
325 | ||
326 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | |
327 | ret = xts_crypt(desc, dst, src, nbytes, &req); | |
328 | cast6_fpu_end(crypt_ctx.fpu_enabled); | |
329 | ||
330 | return ret; | |
331 | } | |
332 | ||
333 | static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | |
334 | struct scatterlist *src, unsigned int nbytes) | |
335 | { | |
336 | struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | |
337 | be128 buf[CAST6_PARALLEL_BLOCKS]; | |
338 | struct crypt_priv crypt_ctx = { | |
339 | .ctx = &ctx->crypt_ctx, | |
340 | .fpu_enabled = false, | |
341 | }; | |
342 | struct xts_crypt_req req = { | |
343 | .tbuf = buf, | |
344 | .tbuflen = sizeof(buf), | |
345 | ||
346 | .tweak_ctx = &ctx->tweak_ctx, | |
347 | .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), | |
348 | .crypt_ctx = &crypt_ctx, | |
349 | .crypt_fn = decrypt_callback, | |
350 | }; | |
351 | int ret; | |
352 | ||
353 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | |
354 | ret = xts_crypt(desc, dst, src, nbytes, &req); | |
355 | cast6_fpu_end(crypt_ctx.fpu_enabled); | |
356 | ||
357 | return ret; | |
358 | } | |
359 | ||
360 | static struct crypto_alg cast6_algs[10] = { { | |
361 | .cra_name = "__ecb-cast6-avx", | |
362 | .cra_driver_name = "__driver-ecb-cast6-avx", | |
363 | .cra_priority = 0, | |
364 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | |
365 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
366 | .cra_ctxsize = sizeof(struct cast6_ctx), | |
367 | .cra_alignmask = 0, | |
368 | .cra_type = &crypto_blkcipher_type, | |
369 | .cra_module = THIS_MODULE, | |
370 | .cra_u = { | |
371 | .blkcipher = { | |
372 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
373 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
374 | .setkey = cast6_setkey, | |
375 | .encrypt = ecb_encrypt, | |
376 | .decrypt = ecb_decrypt, | |
377 | }, | |
378 | }, | |
379 | }, { | |
380 | .cra_name = "__cbc-cast6-avx", | |
381 | .cra_driver_name = "__driver-cbc-cast6-avx", | |
382 | .cra_priority = 0, | |
383 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | |
384 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
385 | .cra_ctxsize = sizeof(struct cast6_ctx), | |
386 | .cra_alignmask = 0, | |
387 | .cra_type = &crypto_blkcipher_type, | |
388 | .cra_module = THIS_MODULE, | |
389 | .cra_u = { | |
390 | .blkcipher = { | |
391 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
392 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
393 | .setkey = cast6_setkey, | |
394 | .encrypt = cbc_encrypt, | |
395 | .decrypt = cbc_decrypt, | |
396 | }, | |
397 | }, | |
398 | }, { | |
399 | .cra_name = "__ctr-cast6-avx", | |
400 | .cra_driver_name = "__driver-ctr-cast6-avx", | |
401 | .cra_priority = 0, | |
402 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | |
403 | .cra_blocksize = 1, | |
404 | .cra_ctxsize = sizeof(struct cast6_ctx), | |
405 | .cra_alignmask = 0, | |
406 | .cra_type = &crypto_blkcipher_type, | |
407 | .cra_module = THIS_MODULE, | |
408 | .cra_u = { | |
409 | .blkcipher = { | |
410 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
411 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
412 | .ivsize = CAST6_BLOCK_SIZE, | |
413 | .setkey = cast6_setkey, | |
414 | .encrypt = ctr_crypt, | |
415 | .decrypt = ctr_crypt, | |
416 | }, | |
417 | }, | |
418 | }, { | |
419 | .cra_name = "__lrw-cast6-avx", | |
420 | .cra_driver_name = "__driver-lrw-cast6-avx", | |
421 | .cra_priority = 0, | |
422 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | |
423 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
424 | .cra_ctxsize = sizeof(struct cast6_lrw_ctx), | |
425 | .cra_alignmask = 0, | |
426 | .cra_type = &crypto_blkcipher_type, | |
427 | .cra_module = THIS_MODULE, | |
428 | .cra_exit = lrw_exit_tfm, | |
429 | .cra_u = { | |
430 | .blkcipher = { | |
431 | .min_keysize = CAST6_MIN_KEY_SIZE + | |
432 | CAST6_BLOCK_SIZE, | |
433 | .max_keysize = CAST6_MAX_KEY_SIZE + | |
434 | CAST6_BLOCK_SIZE, | |
435 | .ivsize = CAST6_BLOCK_SIZE, | |
436 | .setkey = lrw_cast6_setkey, | |
437 | .encrypt = lrw_encrypt, | |
438 | .decrypt = lrw_decrypt, | |
439 | }, | |
440 | }, | |
441 | }, { | |
442 | .cra_name = "__xts-cast6-avx", | |
443 | .cra_driver_name = "__driver-xts-cast6-avx", | |
444 | .cra_priority = 0, | |
445 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | |
446 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
447 | .cra_ctxsize = sizeof(struct cast6_xts_ctx), | |
448 | .cra_alignmask = 0, | |
449 | .cra_type = &crypto_blkcipher_type, | |
450 | .cra_module = THIS_MODULE, | |
451 | .cra_u = { | |
452 | .blkcipher = { | |
453 | .min_keysize = CAST6_MIN_KEY_SIZE * 2, | |
454 | .max_keysize = CAST6_MAX_KEY_SIZE * 2, | |
455 | .ivsize = CAST6_BLOCK_SIZE, | |
456 | .setkey = xts_cast6_setkey, | |
457 | .encrypt = xts_encrypt, | |
458 | .decrypt = xts_decrypt, | |
459 | }, | |
460 | }, | |
461 | }, { | |
462 | .cra_name = "ecb(cast6)", | |
463 | .cra_driver_name = "ecb-cast6-avx", | |
464 | .cra_priority = 200, | |
465 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | |
466 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
467 | .cra_ctxsize = sizeof(struct async_helper_ctx), | |
468 | .cra_alignmask = 0, | |
469 | .cra_type = &crypto_ablkcipher_type, | |
470 | .cra_module = THIS_MODULE, | |
471 | .cra_init = ablk_init, | |
472 | .cra_exit = ablk_exit, | |
473 | .cra_u = { | |
474 | .ablkcipher = { | |
475 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
476 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
477 | .setkey = ablk_set_key, | |
478 | .encrypt = ablk_encrypt, | |
479 | .decrypt = ablk_decrypt, | |
480 | }, | |
481 | }, | |
482 | }, { | |
483 | .cra_name = "cbc(cast6)", | |
484 | .cra_driver_name = "cbc-cast6-avx", | |
485 | .cra_priority = 200, | |
486 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | |
487 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
488 | .cra_ctxsize = sizeof(struct async_helper_ctx), | |
489 | .cra_alignmask = 0, | |
490 | .cra_type = &crypto_ablkcipher_type, | |
491 | .cra_module = THIS_MODULE, | |
492 | .cra_init = ablk_init, | |
493 | .cra_exit = ablk_exit, | |
494 | .cra_u = { | |
495 | .ablkcipher = { | |
496 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
497 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
498 | .ivsize = CAST6_BLOCK_SIZE, | |
499 | .setkey = ablk_set_key, | |
500 | .encrypt = __ablk_encrypt, | |
501 | .decrypt = ablk_decrypt, | |
502 | }, | |
503 | }, | |
504 | }, { | |
505 | .cra_name = "ctr(cast6)", | |
506 | .cra_driver_name = "ctr-cast6-avx", | |
507 | .cra_priority = 200, | |
508 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | |
509 | .cra_blocksize = 1, | |
510 | .cra_ctxsize = sizeof(struct async_helper_ctx), | |
511 | .cra_alignmask = 0, | |
512 | .cra_type = &crypto_ablkcipher_type, | |
513 | .cra_module = THIS_MODULE, | |
514 | .cra_init = ablk_init, | |
515 | .cra_exit = ablk_exit, | |
516 | .cra_u = { | |
517 | .ablkcipher = { | |
518 | .min_keysize = CAST6_MIN_KEY_SIZE, | |
519 | .max_keysize = CAST6_MAX_KEY_SIZE, | |
520 | .ivsize = CAST6_BLOCK_SIZE, | |
521 | .setkey = ablk_set_key, | |
522 | .encrypt = ablk_encrypt, | |
523 | .decrypt = ablk_encrypt, | |
524 | .geniv = "chainiv", | |
525 | }, | |
526 | }, | |
527 | }, { | |
528 | .cra_name = "lrw(cast6)", | |
529 | .cra_driver_name = "lrw-cast6-avx", | |
530 | .cra_priority = 200, | |
531 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | |
532 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
533 | .cra_ctxsize = sizeof(struct async_helper_ctx), | |
534 | .cra_alignmask = 0, | |
535 | .cra_type = &crypto_ablkcipher_type, | |
536 | .cra_module = THIS_MODULE, | |
537 | .cra_init = ablk_init, | |
538 | .cra_exit = ablk_exit, | |
539 | .cra_u = { | |
540 | .ablkcipher = { | |
541 | .min_keysize = CAST6_MIN_KEY_SIZE + | |
542 | CAST6_BLOCK_SIZE, | |
543 | .max_keysize = CAST6_MAX_KEY_SIZE + | |
544 | CAST6_BLOCK_SIZE, | |
545 | .ivsize = CAST6_BLOCK_SIZE, | |
546 | .setkey = ablk_set_key, | |
547 | .encrypt = ablk_encrypt, | |
548 | .decrypt = ablk_decrypt, | |
549 | }, | |
550 | }, | |
551 | }, { | |
552 | .cra_name = "xts(cast6)", | |
553 | .cra_driver_name = "xts-cast6-avx", | |
554 | .cra_priority = 200, | |
555 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | |
556 | .cra_blocksize = CAST6_BLOCK_SIZE, | |
557 | .cra_ctxsize = sizeof(struct async_helper_ctx), | |
558 | .cra_alignmask = 0, | |
559 | .cra_type = &crypto_ablkcipher_type, | |
560 | .cra_module = THIS_MODULE, | |
561 | .cra_init = ablk_init, | |
562 | .cra_exit = ablk_exit, | |
563 | .cra_u = { | |
564 | .ablkcipher = { | |
565 | .min_keysize = CAST6_MIN_KEY_SIZE * 2, | |
566 | .max_keysize = CAST6_MAX_KEY_SIZE * 2, | |
567 | .ivsize = CAST6_BLOCK_SIZE, | |
568 | .setkey = ablk_set_key, | |
569 | .encrypt = ablk_encrypt, | |
570 | .decrypt = ablk_decrypt, | |
571 | }, | |
572 | }, | |
573 | } }; | |
574 | ||
575 | static int __init cast6_init(void) | |
576 | { | |
577 | u64 xcr0; | |
578 | ||
579 | if (!cpu_has_avx || !cpu_has_osxsave) { | |
580 | pr_info("AVX instructions are not detected.\n"); | |
581 | return -ENODEV; | |
582 | } | |
583 | ||
584 | xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); | |
585 | if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { | |
586 | pr_info("AVX detected but unusable.\n"); | |
587 | return -ENODEV; | |
588 | } | |
589 | ||
590 | return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); | |
591 | } | |
592 | ||
593 | static void __exit cast6_exit(void) | |
594 | { | |
595 | crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); | |
596 | } | |
597 | ||
598 | module_init(cast6_init); | |
599 | module_exit(cast6_exit); | |
600 | ||
601 | MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized"); | |
602 | MODULE_LICENSE("GPL"); | |
603 | MODULE_ALIAS("cast6"); |