Merge branch 'upstream' of git://git.linux-mips.org/pub/scm/ralf/upstream-linus
[deliverable/linux.git] / arch / x86 / crypto / sha256_ssse3_glue.c
1 /*
2 * Cryptographic API.
3 *
4 * Glue code for the SHA256 Secure Hash Algorithm assembler
5 * implementation using supplemental SSE3 / AVX / AVX2 instructions.
6 *
7 * This file is based on sha256_generic.c
8 *
9 * Copyright (C) 2013 Intel Corporation.
10 *
11 * Author:
12 * Tim Chen <tim.c.chen@linux.intel.com>
13 *
14 * This program is free software; you can redistribute it and/or modify it
15 * under the terms of the GNU General Public License as published by the Free
16 * Software Foundation; either version 2 of the License, or (at your option)
17 * any later version.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26 * SOFTWARE.
27 */
28
29
30 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
31
32 #include <crypto/internal/hash.h>
33 #include <linux/init.h>
34 #include <linux/module.h>
35 #include <linux/mm.h>
36 #include <linux/cryptohash.h>
37 #include <linux/types.h>
38 #include <crypto/sha.h>
39 #include <crypto/sha256_base.h>
40 #include <asm/fpu/api.h>
41 #include <linux/string.h>
42
43 asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data,
44 u64 rounds);
45 #ifdef CONFIG_AS_AVX
46 asmlinkage void sha256_transform_avx(u32 *digest, const char *data,
47 u64 rounds);
48 #endif
49 #ifdef CONFIG_AS_AVX2
50 asmlinkage void sha256_transform_rorx(u32 *digest, const char *data,
51 u64 rounds);
52 #endif
53
54 static void (*sha256_transform_asm)(u32 *, const char *, u64);
55
56 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
57 unsigned int len)
58 {
59 struct sha256_state *sctx = shash_desc_ctx(desc);
60
61 if (!irq_fpu_usable() ||
62 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
63 return crypto_sha256_update(desc, data, len);
64
65 /* make sure casting to sha256_block_fn() is safe */
66 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
67
68 kernel_fpu_begin();
69 sha256_base_do_update(desc, data, len,
70 (sha256_block_fn *)sha256_transform_asm);
71 kernel_fpu_end();
72
73 return 0;
74 }
75
76 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
77 unsigned int len, u8 *out)
78 {
79 if (!irq_fpu_usable())
80 return crypto_sha256_finup(desc, data, len, out);
81
82 kernel_fpu_begin();
83 if (len)
84 sha256_base_do_update(desc, data, len,
85 (sha256_block_fn *)sha256_transform_asm);
86 sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_transform_asm);
87 kernel_fpu_end();
88
89 return sha256_base_finish(desc, out);
90 }
91
92 /* Add padding and return the message digest. */
93 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
94 {
95 return sha256_ssse3_finup(desc, NULL, 0, out);
96 }
97
98 static struct shash_alg algs[] = { {
99 .digestsize = SHA256_DIGEST_SIZE,
100 .init = sha256_base_init,
101 .update = sha256_ssse3_update,
102 .final = sha256_ssse3_final,
103 .finup = sha256_ssse3_finup,
104 .descsize = sizeof(struct sha256_state),
105 .base = {
106 .cra_name = "sha256",
107 .cra_driver_name = "sha256-ssse3",
108 .cra_priority = 150,
109 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
110 .cra_blocksize = SHA256_BLOCK_SIZE,
111 .cra_module = THIS_MODULE,
112 }
113 }, {
114 .digestsize = SHA224_DIGEST_SIZE,
115 .init = sha224_base_init,
116 .update = sha256_ssse3_update,
117 .final = sha256_ssse3_final,
118 .finup = sha256_ssse3_finup,
119 .descsize = sizeof(struct sha256_state),
120 .base = {
121 .cra_name = "sha224",
122 .cra_driver_name = "sha224-ssse3",
123 .cra_priority = 150,
124 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
125 .cra_blocksize = SHA224_BLOCK_SIZE,
126 .cra_module = THIS_MODULE,
127 }
128 } };
129
130 #ifdef CONFIG_AS_AVX
131 static bool __init avx_usable(void)
132 {
133 if (!cpu_has_xfeatures(XSTATE_SSE | XSTATE_YMM, NULL)) {
134 if (cpu_has_avx)
135 pr_info("AVX detected but unusable.\n");
136 return false;
137 }
138
139 return true;
140 }
141 #endif
142
143 static int __init sha256_ssse3_mod_init(void)
144 {
145 /* test for SSSE3 first */
146 if (cpu_has_ssse3)
147 sha256_transform_asm = sha256_transform_ssse3;
148
149 #ifdef CONFIG_AS_AVX
150 /* allow AVX to override SSSE3, it's a little faster */
151 if (avx_usable()) {
152 #ifdef CONFIG_AS_AVX2
153 if (boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_BMI2))
154 sha256_transform_asm = sha256_transform_rorx;
155 else
156 #endif
157 sha256_transform_asm = sha256_transform_avx;
158 }
159 #endif
160
161 if (sha256_transform_asm) {
162 #ifdef CONFIG_AS_AVX
163 if (sha256_transform_asm == sha256_transform_avx)
164 pr_info("Using AVX optimized SHA-256 implementation\n");
165 #ifdef CONFIG_AS_AVX2
166 else if (sha256_transform_asm == sha256_transform_rorx)
167 pr_info("Using AVX2 optimized SHA-256 implementation\n");
168 #endif
169 else
170 #endif
171 pr_info("Using SSSE3 optimized SHA-256 implementation\n");
172 return crypto_register_shashes(algs, ARRAY_SIZE(algs));
173 }
174 pr_info("Neither AVX nor SSSE3 is available/usable.\n");
175
176 return -ENODEV;
177 }
178
179 static void __exit sha256_ssse3_mod_fini(void)
180 {
181 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
182 }
183
184 module_init(sha256_ssse3_mod_init);
185 module_exit(sha256_ssse3_mod_fini);
186
187 MODULE_LICENSE("GPL");
188 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
189
190 MODULE_ALIAS_CRYPTO("sha256");
191 MODULE_ALIAS_CRYPTO("sha224");
This page took 0.036085 seconds and 5 git commands to generate.