Merge branch 'writeback-for-next' of git://git.kernel.org/pub/scm/linux/kernel/git...
[deliverable/linux.git] / arch / x86 / include / asm / crypto / glue_helper.h
1 /*
2 * Shared glue code for 128bit block ciphers
3 */
4
5 #ifndef _CRYPTO_GLUE_HELPER_H
6 #define _CRYPTO_GLUE_HELPER_H
7
8 #include <linux/kernel.h>
9 #include <linux/crypto.h>
10 #include <asm/i387.h>
11 #include <crypto/b128ops.h>
12
13 typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
14 typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
15 typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
16 u128 *iv);
17
18 #define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
19 #define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
20 #define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
21
22 struct common_glue_func_entry {
23 unsigned int num_blocks; /* number of blocks that @fn will process */
24 union {
25 common_glue_func_t ecb;
26 common_glue_cbc_func_t cbc;
27 common_glue_ctr_func_t ctr;
28 } fn_u;
29 };
30
31 struct common_glue_ctx {
32 unsigned int num_funcs;
33 int fpu_blocks_limit; /* -1 means fpu not needed at all */
34
35 /*
36 * First funcs entry must have largest num_blocks and last funcs entry
37 * must have num_blocks == 1!
38 */
39 struct common_glue_func_entry funcs[];
40 };
41
42 static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
43 struct blkcipher_desc *desc,
44 bool fpu_enabled, unsigned int nbytes)
45 {
46 if (likely(fpu_blocks_limit < 0))
47 return false;
48
49 if (fpu_enabled)
50 return true;
51
52 /*
53 * Vector-registers are only used when chunk to be processed is large
54 * enough, so do not enable FPU until it is necessary.
55 */
56 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
57 return false;
58
59 if (desc) {
60 /* prevent sleeping if FPU is in use */
61 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
62 }
63
64 kernel_fpu_begin();
65 return true;
66 }
67
68 static inline void glue_fpu_end(bool fpu_enabled)
69 {
70 if (fpu_enabled)
71 kernel_fpu_end();
72 }
73
74 static inline void u128_to_be128(be128 *dst, const u128 *src)
75 {
76 dst->a = cpu_to_be64(src->a);
77 dst->b = cpu_to_be64(src->b);
78 }
79
80 static inline void be128_to_u128(u128 *dst, const be128 *src)
81 {
82 dst->a = be64_to_cpu(src->a);
83 dst->b = be64_to_cpu(src->b);
84 }
85
86 static inline void u128_inc(u128 *i)
87 {
88 i->b++;
89 if (!i->b)
90 i->a++;
91 }
92
93 extern int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
94 struct blkcipher_desc *desc,
95 struct scatterlist *dst,
96 struct scatterlist *src, unsigned int nbytes);
97
98 extern int glue_cbc_encrypt_128bit(const common_glue_func_t fn,
99 struct blkcipher_desc *desc,
100 struct scatterlist *dst,
101 struct scatterlist *src,
102 unsigned int nbytes);
103
104 extern int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
105 struct blkcipher_desc *desc,
106 struct scatterlist *dst,
107 struct scatterlist *src,
108 unsigned int nbytes);
109
110 extern int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
111 struct blkcipher_desc *desc,
112 struct scatterlist *dst,
113 struct scatterlist *src, unsigned int nbytes);
114
115 #endif /* _CRYPTO_GLUE_HELPER_H */
This page took 0.041124 seconds and 5 git commands to generate.