6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <linux/compiler.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <linux/errno.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <asm/scatterlist.h>
24 #include "scatterwalk.h"
26 static inline void xor_64(u8
*a
, const u8
*b
)
28 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
29 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
32 static inline void xor_128(u8
*a
, const u8
*b
)
34 ((u32
*)a
)[0] ^= ((u32
*)b
)[0];
35 ((u32
*)a
)[1] ^= ((u32
*)b
)[1];
36 ((u32
*)a
)[2] ^= ((u32
*)b
)[2];
37 ((u32
*)a
)[3] ^= ((u32
*)b
)[3];
40 static unsigned int crypt_slow(const struct cipher_desc
*desc
,
41 struct scatter_walk
*in
,
42 struct scatter_walk
*out
, unsigned int bsize
)
44 unsigned long alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
45 u8 buffer
[bsize
* 2 + alignmask
];
46 u8
*src
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
47 u8
*dst
= src
+ bsize
;
49 scatterwalk_copychunks(src
, in
, bsize
, 0);
50 desc
->prfn(desc
, dst
, src
, bsize
);
51 scatterwalk_copychunks(dst
, out
, bsize
, 1);
56 static inline unsigned int crypt_fast(const struct cipher_desc
*desc
,
57 struct scatter_walk
*in
,
58 struct scatter_walk
*out
,
59 unsigned int nbytes
, u8
*tmp
)
62 u8
*real_src
, *real_dst
;
64 real_src
= scatterwalk_map(in
, 0);
65 real_dst
= scatterwalk_map(out
, 1);
68 dst
= scatterwalk_samebuf(in
, out
) ? src
: real_dst
;
71 memcpy(tmp
, src
, nbytes
);
76 nbytes
= desc
->prfn(desc
, dst
, src
, nbytes
);
79 memcpy(real_dst
, tmp
, nbytes
);
81 scatterwalk_unmap(real_src
, 0);
82 scatterwalk_unmap(real_dst
, 1);
84 scatterwalk_advance(in
, nbytes
);
85 scatterwalk_advance(out
, nbytes
);
91 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
92 * multiple page boundaries by using temporary blocks. In user context,
93 * the kernel is given a chance to schedule us once per page.
95 static int crypt(const struct cipher_desc
*desc
,
96 struct scatterlist
*dst
,
97 struct scatterlist
*src
,
100 struct scatter_walk walk_in
, walk_out
;
101 struct crypto_tfm
*tfm
= desc
->tfm
;
102 const unsigned int bsize
= crypto_tfm_alg_blocksize(tfm
);
103 unsigned int alignmask
= crypto_tfm_alg_alignmask(tfm
);
104 unsigned long buffer
= 0;
109 if (nbytes
% bsize
) {
110 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_BLOCK_LEN
;
114 scatterwalk_start(&walk_in
, src
);
115 scatterwalk_start(&walk_out
, dst
);
118 unsigned int n
= nbytes
;
121 if (!scatterwalk_aligned(&walk_in
, alignmask
) ||
122 !scatterwalk_aligned(&walk_out
, alignmask
)) {
124 buffer
= __get_free_page(GFP_ATOMIC
);
131 n
= scatterwalk_clamp(&walk_in
, n
);
132 n
= scatterwalk_clamp(&walk_out
, n
);
134 if (likely(n
>= bsize
))
135 n
= crypt_fast(desc
, &walk_in
, &walk_out
, n
, tmp
);
137 n
= crypt_slow(desc
, &walk_in
, &walk_out
, bsize
);
141 scatterwalk_done(&walk_in
, 0, nbytes
);
142 scatterwalk_done(&walk_out
, 1, nbytes
);
147 crypto_yield(tfm
->crt_flags
);
156 static int crypt_iv_unaligned(struct cipher_desc
*desc
,
157 struct scatterlist
*dst
,
158 struct scatterlist
*src
,
161 struct crypto_tfm
*tfm
= desc
->tfm
;
162 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
165 if (unlikely(((unsigned long)iv
& alignmask
))) {
166 unsigned int ivsize
= tfm
->crt_cipher
.cit_ivsize
;
167 u8 buffer
[ivsize
+ alignmask
];
168 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
171 desc
->info
= memcpy(tmp
, iv
, ivsize
);
172 err
= crypt(desc
, dst
, src
, nbytes
);
173 memcpy(iv
, tmp
, ivsize
);
178 return crypt(desc
, dst
, src
, nbytes
);
181 static unsigned int cbc_process_encrypt(const struct cipher_desc
*desc
,
182 u8
*dst
, const u8
*src
,
185 struct crypto_tfm
*tfm
= desc
->tfm
;
186 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
187 int bsize
= crypto_tfm_alg_blocksize(tfm
);
189 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
191 unsigned int done
= 0;
198 memcpy(iv
, dst
, bsize
);
202 } while ((done
+= bsize
) <= nbytes
);
207 static unsigned int cbc_process_decrypt(const struct cipher_desc
*desc
,
208 u8
*dst
, const u8
*src
,
211 struct crypto_tfm
*tfm
= desc
->tfm
;
212 void (*xor)(u8
*, const u8
*) = tfm
->crt_u
.cipher
.cit_xor_block
;
213 int bsize
= crypto_tfm_alg_blocksize(tfm
);
214 unsigned long alignmask
= crypto_tfm_alg_alignmask(desc
->tfm
);
216 u8 stack
[src
== dst
? bsize
+ alignmask
: 0];
217 u8
*buf
= (u8
*)ALIGN((unsigned long)stack
, alignmask
+ 1);
218 u8
**dst_p
= src
== dst
? &buf
: &dst
;
220 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
222 unsigned int done
= 0;
227 u8
*tmp_dst
= *dst_p
;
229 fn(tfm
, tmp_dst
, src
);
231 memcpy(iv
, src
, bsize
);
233 memcpy(dst
, tmp_dst
, bsize
);
237 } while ((done
+= bsize
) <= nbytes
);
242 static unsigned int ecb_process(const struct cipher_desc
*desc
, u8
*dst
,
243 const u8
*src
, unsigned int nbytes
)
245 struct crypto_tfm
*tfm
= desc
->tfm
;
246 int bsize
= crypto_tfm_alg_blocksize(tfm
);
247 void (*fn
)(struct crypto_tfm
*, u8
*, const u8
*) = desc
->crfn
;
248 unsigned int done
= 0;
257 } while ((done
+= bsize
) <= nbytes
);
262 static int setkey(struct crypto_tfm
*tfm
, const u8
*key
, unsigned int keylen
)
264 struct cipher_alg
*cia
= &tfm
->__crt_alg
->cra_cipher
;
266 tfm
->crt_flags
&= ~CRYPTO_TFM_RES_MASK
;
267 if (keylen
< cia
->cia_min_keysize
|| keylen
> cia
->cia_max_keysize
) {
268 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
271 return cia
->cia_setkey(tfm
, key
, keylen
);
274 static int ecb_encrypt(struct crypto_tfm
*tfm
,
275 struct scatterlist
*dst
,
276 struct scatterlist
*src
, unsigned int nbytes
)
278 struct cipher_desc desc
;
279 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
282 desc
.crfn
= cipher
->cia_encrypt
;
283 desc
.prfn
= cipher
->cia_encrypt_ecb
?: ecb_process
;
285 return crypt(&desc
, dst
, src
, nbytes
);
288 static int ecb_decrypt(struct crypto_tfm
*tfm
,
289 struct scatterlist
*dst
,
290 struct scatterlist
*src
,
293 struct cipher_desc desc
;
294 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
297 desc
.crfn
= cipher
->cia_decrypt
;
298 desc
.prfn
= cipher
->cia_decrypt_ecb
?: ecb_process
;
300 return crypt(&desc
, dst
, src
, nbytes
);
303 static int cbc_encrypt(struct crypto_tfm
*tfm
,
304 struct scatterlist
*dst
,
305 struct scatterlist
*src
,
308 struct cipher_desc desc
;
309 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
312 desc
.crfn
= cipher
->cia_encrypt
;
313 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
314 desc
.info
= tfm
->crt_cipher
.cit_iv
;
316 return crypt(&desc
, dst
, src
, nbytes
);
319 static int cbc_encrypt_iv(struct crypto_tfm
*tfm
,
320 struct scatterlist
*dst
,
321 struct scatterlist
*src
,
322 unsigned int nbytes
, u8
*iv
)
324 struct cipher_desc desc
;
325 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
328 desc
.crfn
= cipher
->cia_encrypt
;
329 desc
.prfn
= cipher
->cia_encrypt_cbc
?: cbc_process_encrypt
;
332 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
335 static int cbc_decrypt(struct crypto_tfm
*tfm
,
336 struct scatterlist
*dst
,
337 struct scatterlist
*src
,
340 struct cipher_desc desc
;
341 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
344 desc
.crfn
= cipher
->cia_decrypt
;
345 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
346 desc
.info
= tfm
->crt_cipher
.cit_iv
;
348 return crypt(&desc
, dst
, src
, nbytes
);
351 static int cbc_decrypt_iv(struct crypto_tfm
*tfm
,
352 struct scatterlist
*dst
,
353 struct scatterlist
*src
,
354 unsigned int nbytes
, u8
*iv
)
356 struct cipher_desc desc
;
357 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
360 desc
.crfn
= cipher
->cia_decrypt
;
361 desc
.prfn
= cipher
->cia_decrypt_cbc
?: cbc_process_decrypt
;
364 return crypt_iv_unaligned(&desc
, dst
, src
, nbytes
);
367 static int nocrypt(struct crypto_tfm
*tfm
,
368 struct scatterlist
*dst
,
369 struct scatterlist
*src
,
375 static int nocrypt_iv(struct crypto_tfm
*tfm
,
376 struct scatterlist
*dst
,
377 struct scatterlist
*src
,
378 unsigned int nbytes
, u8
*iv
)
383 int crypto_init_cipher_flags(struct crypto_tfm
*tfm
, u32 flags
)
385 u32 mode
= flags
& CRYPTO_TFM_MODE_MASK
;
386 tfm
->crt_cipher
.cit_mode
= mode
? mode
: CRYPTO_TFM_MODE_ECB
;
390 static void cipher_crypt_unaligned(void (*fn
)(struct crypto_tfm
*, u8
*,
392 struct crypto_tfm
*tfm
,
393 u8
*dst
, const u8
*src
)
395 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
396 unsigned int size
= crypto_tfm_alg_blocksize(tfm
);
397 u8 buffer
[size
+ alignmask
];
398 u8
*tmp
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
400 memcpy(tmp
, src
, size
);
402 memcpy(dst
, tmp
, size
);
405 static void cipher_encrypt_unaligned(struct crypto_tfm
*tfm
,
406 u8
*dst
, const u8
*src
)
408 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
409 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
411 if (unlikely(((unsigned long)dst
| (unsigned long)src
) & alignmask
)) {
412 cipher_crypt_unaligned(cipher
->cia_encrypt
, tfm
, dst
, src
);
416 cipher
->cia_encrypt(tfm
, dst
, src
);
419 static void cipher_decrypt_unaligned(struct crypto_tfm
*tfm
,
420 u8
*dst
, const u8
*src
)
422 unsigned long alignmask
= crypto_tfm_alg_alignmask(tfm
);
423 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
425 if (unlikely(((unsigned long)dst
| (unsigned long)src
) & alignmask
)) {
426 cipher_crypt_unaligned(cipher
->cia_decrypt
, tfm
, dst
, src
);
430 cipher
->cia_decrypt(tfm
, dst
, src
);
433 int crypto_init_cipher_ops(struct crypto_tfm
*tfm
)
436 struct cipher_tfm
*ops
= &tfm
->crt_cipher
;
437 struct cipher_alg
*cipher
= &tfm
->__crt_alg
->cra_cipher
;
439 ops
->cit_setkey
= setkey
;
440 ops
->cit_encrypt_one
= crypto_tfm_alg_alignmask(tfm
) ?
441 cipher_encrypt_unaligned
: cipher
->cia_encrypt
;
442 ops
->cit_decrypt_one
= crypto_tfm_alg_alignmask(tfm
) ?
443 cipher_decrypt_unaligned
: cipher
->cia_decrypt
;
445 switch (tfm
->crt_cipher
.cit_mode
) {
446 case CRYPTO_TFM_MODE_ECB
:
447 ops
->cit_encrypt
= ecb_encrypt
;
448 ops
->cit_decrypt
= ecb_decrypt
;
449 ops
->cit_encrypt_iv
= nocrypt_iv
;
450 ops
->cit_decrypt_iv
= nocrypt_iv
;
453 case CRYPTO_TFM_MODE_CBC
:
454 ops
->cit_encrypt
= cbc_encrypt
;
455 ops
->cit_decrypt
= cbc_decrypt
;
456 ops
->cit_encrypt_iv
= cbc_encrypt_iv
;
457 ops
->cit_decrypt_iv
= cbc_decrypt_iv
;
460 case CRYPTO_TFM_MODE_CFB
:
461 ops
->cit_encrypt
= nocrypt
;
462 ops
->cit_decrypt
= nocrypt
;
463 ops
->cit_encrypt_iv
= nocrypt_iv
;
464 ops
->cit_decrypt_iv
= nocrypt_iv
;
467 case CRYPTO_TFM_MODE_CTR
:
468 ops
->cit_encrypt
= nocrypt
;
469 ops
->cit_decrypt
= nocrypt
;
470 ops
->cit_encrypt_iv
= nocrypt_iv
;
471 ops
->cit_decrypt_iv
= nocrypt_iv
;
478 if (ops
->cit_mode
== CRYPTO_TFM_MODE_CBC
) {
482 switch (crypto_tfm_alg_blocksize(tfm
)) {
484 ops
->cit_xor_block
= xor_64
;
488 ops
->cit_xor_block
= xor_128
;
492 printk(KERN_WARNING
"%s: block size %u not supported\n",
493 crypto_tfm_alg_name(tfm
),
494 crypto_tfm_alg_blocksize(tfm
));
499 ops
->cit_ivsize
= crypto_tfm_alg_blocksize(tfm
);
500 align
= crypto_tfm_alg_alignmask(tfm
) + 1;
501 addr
= (unsigned long)crypto_tfm_ctx(tfm
);
502 addr
= ALIGN(addr
, align
);
503 addr
+= ALIGN(tfm
->__crt_alg
->cra_ctxsize
, align
);
504 ops
->cit_iv
= (void *)addr
;
511 void crypto_exit_cipher_ops(struct crypto_tfm
*tfm
)