drm: Round size of SHM maps to PAGE_SIZE
[deliverable/linux.git] / crypto / ctr.c
1 /*
2 * CTR: Counter mode
3 *
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/algapi.h>
14 #include <crypto/ctr.h>
15 #include <linux/err.h>
16 #include <linux/init.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/random.h>
20 #include <linux/scatterlist.h>
21 #include <linux/slab.h>
22
23 struct crypto_ctr_ctx {
24 struct crypto_cipher *child;
25 };
26
27 struct crypto_rfc3686_ctx {
28 struct crypto_blkcipher *child;
29 u8 nonce[CTR_RFC3686_NONCE_SIZE];
30 };
31
32 static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
33 unsigned int keylen)
34 {
35 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
36 struct crypto_cipher *child = ctx->child;
37 int err;
38
39 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
40 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
41 CRYPTO_TFM_REQ_MASK);
42 err = crypto_cipher_setkey(child, key, keylen);
43 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
44 CRYPTO_TFM_RES_MASK);
45
46 return err;
47 }
48
49 static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
50 struct crypto_cipher *tfm)
51 {
52 unsigned int bsize = crypto_cipher_blocksize(tfm);
53 unsigned long alignmask = crypto_cipher_alignmask(tfm);
54 u8 *ctrblk = walk->iv;
55 u8 tmp[bsize + alignmask];
56 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
57 u8 *src = walk->src.virt.addr;
58 u8 *dst = walk->dst.virt.addr;
59 unsigned int nbytes = walk->nbytes;
60
61 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
62 crypto_xor(keystream, src, nbytes);
63 memcpy(dst, keystream, nbytes);
64
65 crypto_inc(ctrblk, bsize);
66 }
67
68 static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
69 struct crypto_cipher *tfm)
70 {
71 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
72 crypto_cipher_alg(tfm)->cia_encrypt;
73 unsigned int bsize = crypto_cipher_blocksize(tfm);
74 u8 *ctrblk = walk->iv;
75 u8 *src = walk->src.virt.addr;
76 u8 *dst = walk->dst.virt.addr;
77 unsigned int nbytes = walk->nbytes;
78
79 do {
80 /* create keystream */
81 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
82 crypto_xor(dst, src, bsize);
83
84 /* increment counter in counterblock */
85 crypto_inc(ctrblk, bsize);
86
87 src += bsize;
88 dst += bsize;
89 } while ((nbytes -= bsize) >= bsize);
90
91 return nbytes;
92 }
93
94 static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
95 struct crypto_cipher *tfm)
96 {
97 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
98 crypto_cipher_alg(tfm)->cia_encrypt;
99 unsigned int bsize = crypto_cipher_blocksize(tfm);
100 unsigned long alignmask = crypto_cipher_alignmask(tfm);
101 unsigned int nbytes = walk->nbytes;
102 u8 *ctrblk = walk->iv;
103 u8 *src = walk->src.virt.addr;
104 u8 tmp[bsize + alignmask];
105 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
106
107 do {
108 /* create keystream */
109 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
110 crypto_xor(src, keystream, bsize);
111
112 /* increment counter in counterblock */
113 crypto_inc(ctrblk, bsize);
114
115 src += bsize;
116 } while ((nbytes -= bsize) >= bsize);
117
118 return nbytes;
119 }
120
121 static int crypto_ctr_crypt(struct blkcipher_desc *desc,
122 struct scatterlist *dst, struct scatterlist *src,
123 unsigned int nbytes)
124 {
125 struct blkcipher_walk walk;
126 struct crypto_blkcipher *tfm = desc->tfm;
127 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
128 struct crypto_cipher *child = ctx->child;
129 unsigned int bsize = crypto_cipher_blocksize(child);
130 int err;
131
132 blkcipher_walk_init(&walk, dst, src, nbytes);
133 err = blkcipher_walk_virt_block(desc, &walk, bsize);
134
135 while (walk.nbytes >= bsize) {
136 if (walk.src.virt.addr == walk.dst.virt.addr)
137 nbytes = crypto_ctr_crypt_inplace(&walk, child);
138 else
139 nbytes = crypto_ctr_crypt_segment(&walk, child);
140
141 err = blkcipher_walk_done(desc, &walk, nbytes);
142 }
143
144 if (walk.nbytes) {
145 crypto_ctr_crypt_final(&walk, child);
146 err = blkcipher_walk_done(desc, &walk, 0);
147 }
148
149 return err;
150 }
151
152 static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
153 {
154 struct crypto_instance *inst = (void *)tfm->__crt_alg;
155 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
156 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
157 struct crypto_cipher *cipher;
158
159 cipher = crypto_spawn_cipher(spawn);
160 if (IS_ERR(cipher))
161 return PTR_ERR(cipher);
162
163 ctx->child = cipher;
164
165 return 0;
166 }
167
168 static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
169 {
170 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
171
172 crypto_free_cipher(ctx->child);
173 }
174
175 static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
176 {
177 struct crypto_instance *inst;
178 struct crypto_alg *alg;
179 int err;
180
181 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
182 if (err)
183 return ERR_PTR(err);
184
185 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
186 CRYPTO_ALG_TYPE_MASK);
187 if (IS_ERR(alg))
188 return ERR_PTR(PTR_ERR(alg));
189
190 /* Block size must be >= 4 bytes. */
191 err = -EINVAL;
192 if (alg->cra_blocksize < 4)
193 goto out_put_alg;
194
195 /* If this is false we'd fail the alignment of crypto_inc. */
196 if (alg->cra_blocksize % 4)
197 goto out_put_alg;
198
199 inst = crypto_alloc_instance("ctr", alg);
200 if (IS_ERR(inst))
201 goto out;
202
203 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
204 inst->alg.cra_priority = alg->cra_priority;
205 inst->alg.cra_blocksize = 1;
206 inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
207 inst->alg.cra_type = &crypto_blkcipher_type;
208
209 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
210 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
211 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
212
213 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
214
215 inst->alg.cra_init = crypto_ctr_init_tfm;
216 inst->alg.cra_exit = crypto_ctr_exit_tfm;
217
218 inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
219 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
220 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
221
222 out:
223 crypto_mod_put(alg);
224 return inst;
225
226 out_put_alg:
227 inst = ERR_PTR(err);
228 goto out;
229 }
230
231 static void crypto_ctr_free(struct crypto_instance *inst)
232 {
233 crypto_drop_spawn(crypto_instance_ctx(inst));
234 kfree(inst);
235 }
236
237 static struct crypto_template crypto_ctr_tmpl = {
238 .name = "ctr",
239 .alloc = crypto_ctr_alloc,
240 .free = crypto_ctr_free,
241 .module = THIS_MODULE,
242 };
243
244 static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
245 unsigned int keylen)
246 {
247 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
248 struct crypto_blkcipher *child = ctx->child;
249 int err;
250
251 /* the nonce is stored in bytes at end of key */
252 if (keylen < CTR_RFC3686_NONCE_SIZE)
253 return -EINVAL;
254
255 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
256 CTR_RFC3686_NONCE_SIZE);
257
258 keylen -= CTR_RFC3686_NONCE_SIZE;
259
260 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
261 crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
262 CRYPTO_TFM_REQ_MASK);
263 err = crypto_blkcipher_setkey(child, key, keylen);
264 crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
265 CRYPTO_TFM_RES_MASK);
266
267 return err;
268 }
269
270 static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
271 struct scatterlist *dst,
272 struct scatterlist *src, unsigned int nbytes)
273 {
274 struct crypto_blkcipher *tfm = desc->tfm;
275 struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
276 struct crypto_blkcipher *child = ctx->child;
277 unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
278 u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
279 u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
280 u8 *info = desc->info;
281 int err;
282
283 /* set up counter block */
284 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
285 memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
286
287 /* initialize counter portion of counter block */
288 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
289 cpu_to_be32(1);
290
291 desc->tfm = child;
292 desc->info = iv;
293 err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
294 desc->tfm = tfm;
295 desc->info = info;
296
297 return err;
298 }
299
300 static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
301 {
302 struct crypto_instance *inst = (void *)tfm->__crt_alg;
303 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
304 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
305 struct crypto_blkcipher *cipher;
306
307 cipher = crypto_spawn_blkcipher(spawn);
308 if (IS_ERR(cipher))
309 return PTR_ERR(cipher);
310
311 ctx->child = cipher;
312
313 return 0;
314 }
315
316 static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
317 {
318 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
319
320 crypto_free_blkcipher(ctx->child);
321 }
322
323 static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
324 {
325 struct crypto_instance *inst;
326 struct crypto_alg *alg;
327 int err;
328
329 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
330 if (err)
331 return ERR_PTR(err);
332
333 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
334 CRYPTO_ALG_TYPE_MASK);
335 err = PTR_ERR(alg);
336 if (IS_ERR(alg))
337 return ERR_PTR(err);
338
339 /* We only support 16-byte blocks. */
340 err = -EINVAL;
341 if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
342 goto out_put_alg;
343
344 /* Not a stream cipher? */
345 if (alg->cra_blocksize != 1)
346 goto out_put_alg;
347
348 inst = crypto_alloc_instance("rfc3686", alg);
349 if (IS_ERR(inst))
350 goto out;
351
352 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
353 inst->alg.cra_priority = alg->cra_priority;
354 inst->alg.cra_blocksize = 1;
355 inst->alg.cra_alignmask = alg->cra_alignmask;
356 inst->alg.cra_type = &crypto_blkcipher_type;
357
358 inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
359 inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
360 + CTR_RFC3686_NONCE_SIZE;
361 inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
362 + CTR_RFC3686_NONCE_SIZE;
363
364 inst->alg.cra_blkcipher.geniv = "seqiv";
365
366 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
367
368 inst->alg.cra_init = crypto_rfc3686_init_tfm;
369 inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
370
371 inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
372 inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
373 inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
374
375 out:
376 crypto_mod_put(alg);
377 return inst;
378
379 out_put_alg:
380 inst = ERR_PTR(err);
381 goto out;
382 }
383
384 static struct crypto_template crypto_rfc3686_tmpl = {
385 .name = "rfc3686",
386 .alloc = crypto_rfc3686_alloc,
387 .free = crypto_ctr_free,
388 .module = THIS_MODULE,
389 };
390
391 static int __init crypto_ctr_module_init(void)
392 {
393 int err;
394
395 err = crypto_register_template(&crypto_ctr_tmpl);
396 if (err)
397 goto out;
398
399 err = crypto_register_template(&crypto_rfc3686_tmpl);
400 if (err)
401 goto out_drop_ctr;
402
403 out:
404 return err;
405
406 out_drop_ctr:
407 crypto_unregister_template(&crypto_ctr_tmpl);
408 goto out;
409 }
410
411 static void __exit crypto_ctr_module_exit(void)
412 {
413 crypto_unregister_template(&crypto_rfc3686_tmpl);
414 crypto_unregister_template(&crypto_ctr_tmpl);
415 }
416
417 module_init(crypto_ctr_module_init);
418 module_exit(crypto_ctr_module_exit);
419
420 MODULE_LICENSE("GPL");
421 MODULE_DESCRIPTION("CTR Counter block mode");
422 MODULE_ALIAS("rfc3686");
This page took 0.045477 seconds and 5 git commands to generate.