1 /*
2 * Cryptographic API.
3 *
4 * Glue code for the SHA512 Secure Hash Algorithm assembler
5 * implementation using supplemental SSE3 / AVX / AVX2 instructions.
6 *
7 * This file is based on sha512_generic.c
8 *
9 * Copyright (C) 2013 Intel Corporation
10 * Author: Tim Chen <tim.c.chen@linux.intel.com>
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
15 * any later version.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
20 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
21 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
22 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
23 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 * SOFTWARE.
25 *
26 */
27
28 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
29
30 #include <crypto/internal/hash.h>
31 #include <crypto/internal/simd.h>
32 #include <linux/init.h>
33 #include <linux/module.h>
34 #include <linux/mm.h>
35 #include <linux/cryptohash.h>
36 #include <linux/string.h>
37 #include <linux/types.h>
38 #include <crypto/sha.h>
39 #include <crypto/sha512_base.h>
40 #include <asm/simd.h>
41
42 asmlinkage void sha512_transform_ssse3(u64 *digest, const char *data,
43 u64 rounds);
44
45 typedef void (sha512_transform_fn)(u64 *digest, const char *data, u64 rounds);
46
sha512_update(struct shash_desc * desc,const u8 * data,unsigned int len,sha512_transform_fn * sha512_xform)47 static int sha512_update(struct shash_desc *desc, const u8 *data,
48 unsigned int len, sha512_transform_fn *sha512_xform)
49 {
50 struct sha512_state *sctx = shash_desc_ctx(desc);
51
52 if (!crypto_simd_usable() ||
53 (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE)
54 return crypto_sha512_update(desc, data, len);
55
56 /* make sure casting to sha512_block_fn() is safe */
57 BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0);
58
59 kernel_fpu_begin();
60 sha512_base_do_update(desc, data, len,
61 (sha512_block_fn *)sha512_xform);
62 kernel_fpu_end();
63
64 return 0;
65 }
66
sha512_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out,sha512_transform_fn * sha512_xform)67 static int sha512_finup(struct shash_desc *desc, const u8 *data,
68 unsigned int len, u8 *out, sha512_transform_fn *sha512_xform)
69 {
70 if (!crypto_simd_usable())
71 return crypto_sha512_finup(desc, data, len, out);
72
73 kernel_fpu_begin();
74 if (len)
75 sha512_base_do_update(desc, data, len,
76 (sha512_block_fn *)sha512_xform);
77 sha512_base_do_finalize(desc, (sha512_block_fn *)sha512_xform);
78 kernel_fpu_end();
79
80 return sha512_base_finish(desc, out);
81 }
82
sha512_ssse3_update(struct shash_desc * desc,const u8 * data,unsigned int len)83 static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data,
84 unsigned int len)
85 {
86 return sha512_update(desc, data, len, sha512_transform_ssse3);
87 }
88
sha512_ssse3_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)89 static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data,
90 unsigned int len, u8 *out)
91 {
92 return sha512_finup(desc, data, len, out, sha512_transform_ssse3);
93 }
94
95 /* Add padding and return the message digest. */
sha512_ssse3_final(struct shash_desc * desc,u8 * out)96 static int sha512_ssse3_final(struct shash_desc *desc, u8 *out)
97 {
98 return sha512_ssse3_finup(desc, NULL, 0, out);
99 }
100
101 static struct shash_alg sha512_ssse3_algs[] = { {
102 .digestsize = SHA512_DIGEST_SIZE,
103 .init = sha512_base_init,
104 .update = sha512_ssse3_update,
105 .final = sha512_ssse3_final,
106 .finup = sha512_ssse3_finup,
107 .descsize = sizeof(struct sha512_state),
108 .base = {
109 .cra_name = "sha512",
110 .cra_driver_name = "sha512-ssse3",
111 .cra_priority = 150,
112 .cra_blocksize = SHA512_BLOCK_SIZE,
113 .cra_module = THIS_MODULE,
114 }
115 }, {
116 .digestsize = SHA384_DIGEST_SIZE,
117 .init = sha384_base_init,
118 .update = sha512_ssse3_update,
119 .final = sha512_ssse3_final,
120 .finup = sha512_ssse3_finup,
121 .descsize = sizeof(struct sha512_state),
122 .base = {
123 .cra_name = "sha384",
124 .cra_driver_name = "sha384-ssse3",
125 .cra_priority = 150,
126 .cra_blocksize = SHA384_BLOCK_SIZE,
127 .cra_module = THIS_MODULE,
128 }
129 } };
130
register_sha512_ssse3(void)131 static int register_sha512_ssse3(void)
132 {
133 if (boot_cpu_has(X86_FEATURE_SSSE3))
134 return crypto_register_shashes(sha512_ssse3_algs,
135 ARRAY_SIZE(sha512_ssse3_algs));
136 return 0;
137 }
138
unregister_sha512_ssse3(void)139 static void unregister_sha512_ssse3(void)
140 {
141 if (boot_cpu_has(X86_FEATURE_SSSE3))
142 crypto_unregister_shashes(sha512_ssse3_algs,
143 ARRAY_SIZE(sha512_ssse3_algs));
144 }
145
146 #ifdef CONFIG_AS_AVX
147 asmlinkage void sha512_transform_avx(u64 *digest, const char *data,
148 u64 rounds);
avx_usable(void)149 static bool avx_usable(void)
150 {
151 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
152 if (boot_cpu_has(X86_FEATURE_AVX))
153 pr_info("AVX detected but unusable.\n");
154 return false;
155 }
156
157 return true;
158 }
159
sha512_avx_update(struct shash_desc * desc,const u8 * data,unsigned int len)160 static int sha512_avx_update(struct shash_desc *desc, const u8 *data,
161 unsigned int len)
162 {
163 return sha512_update(desc, data, len, sha512_transform_avx);
164 }
165
sha512_avx_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)166 static int sha512_avx_finup(struct shash_desc *desc, const u8 *data,
167 unsigned int len, u8 *out)
168 {
169 return sha512_finup(desc, data, len, out, sha512_transform_avx);
170 }
171
172 /* Add padding and return the message digest. */
sha512_avx_final(struct shash_desc * desc,u8 * out)173 static int sha512_avx_final(struct shash_desc *desc, u8 *out)
174 {
175 return sha512_avx_finup(desc, NULL, 0, out);
176 }
177
178 static struct shash_alg sha512_avx_algs[] = { {
179 .digestsize = SHA512_DIGEST_SIZE,
180 .init = sha512_base_init,
181 .update = sha512_avx_update,
182 .final = sha512_avx_final,
183 .finup = sha512_avx_finup,
184 .descsize = sizeof(struct sha512_state),
185 .base = {
186 .cra_name = "sha512",
187 .cra_driver_name = "sha512-avx",
188 .cra_priority = 160,
189 .cra_blocksize = SHA512_BLOCK_SIZE,
190 .cra_module = THIS_MODULE,
191 }
192 }, {
193 .digestsize = SHA384_DIGEST_SIZE,
194 .init = sha384_base_init,
195 .update = sha512_avx_update,
196 .final = sha512_avx_final,
197 .finup = sha512_avx_finup,
198 .descsize = sizeof(struct sha512_state),
199 .base = {
200 .cra_name = "sha384",
201 .cra_driver_name = "sha384-avx",
202 .cra_priority = 160,
203 .cra_blocksize = SHA384_BLOCK_SIZE,
204 .cra_module = THIS_MODULE,
205 }
206 } };
207
register_sha512_avx(void)208 static int register_sha512_avx(void)
209 {
210 if (avx_usable())
211 return crypto_register_shashes(sha512_avx_algs,
212 ARRAY_SIZE(sha512_avx_algs));
213 return 0;
214 }
215
unregister_sha512_avx(void)216 static void unregister_sha512_avx(void)
217 {
218 if (avx_usable())
219 crypto_unregister_shashes(sha512_avx_algs,
220 ARRAY_SIZE(sha512_avx_algs));
221 }
222 #else
register_sha512_avx(void)223 static inline int register_sha512_avx(void) { return 0; }
unregister_sha512_avx(void)224 static inline void unregister_sha512_avx(void) { }
225 #endif
226
227 #if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX)
228 asmlinkage void sha512_transform_rorx(u64 *digest, const char *data,
229 u64 rounds);
230
sha512_avx2_update(struct shash_desc * desc,const u8 * data,unsigned int len)231 static int sha512_avx2_update(struct shash_desc *desc, const u8 *data,
232 unsigned int len)
233 {
234 return sha512_update(desc, data, len, sha512_transform_rorx);
235 }
236
sha512_avx2_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)237 static int sha512_avx2_finup(struct shash_desc *desc, const u8 *data,
238 unsigned int len, u8 *out)
239 {
240 return sha512_finup(desc, data, len, out, sha512_transform_rorx);
241 }
242
243 /* Add padding and return the message digest. */
sha512_avx2_final(struct shash_desc * desc,u8 * out)244 static int sha512_avx2_final(struct shash_desc *desc, u8 *out)
245 {
246 return sha512_avx2_finup(desc, NULL, 0, out);
247 }
248
249 static struct shash_alg sha512_avx2_algs[] = { {
250 .digestsize = SHA512_DIGEST_SIZE,
251 .init = sha512_base_init,
252 .update = sha512_avx2_update,
253 .final = sha512_avx2_final,
254 .finup = sha512_avx2_finup,
255 .descsize = sizeof(struct sha512_state),
256 .base = {
257 .cra_name = "sha512",
258 .cra_driver_name = "sha512-avx2",
259 .cra_priority = 170,
260 .cra_blocksize = SHA512_BLOCK_SIZE,
261 .cra_module = THIS_MODULE,
262 }
263 }, {
264 .digestsize = SHA384_DIGEST_SIZE,
265 .init = sha384_base_init,
266 .update = sha512_avx2_update,
267 .final = sha512_avx2_final,
268 .finup = sha512_avx2_finup,
269 .descsize = sizeof(struct sha512_state),
270 .base = {
271 .cra_name = "sha384",
272 .cra_driver_name = "sha384-avx2",
273 .cra_priority = 170,
274 .cra_blocksize = SHA384_BLOCK_SIZE,
275 .cra_module = THIS_MODULE,
276 }
277 } };
278
avx2_usable(void)279 static bool avx2_usable(void)
280 {
281 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
282 boot_cpu_has(X86_FEATURE_BMI2))
283 return true;
284
285 return false;
286 }
287
register_sha512_avx2(void)288 static int register_sha512_avx2(void)
289 {
290 if (avx2_usable())
291 return crypto_register_shashes(sha512_avx2_algs,
292 ARRAY_SIZE(sha512_avx2_algs));
293 return 0;
294 }
295
unregister_sha512_avx2(void)296 static void unregister_sha512_avx2(void)
297 {
298 if (avx2_usable())
299 crypto_unregister_shashes(sha512_avx2_algs,
300 ARRAY_SIZE(sha512_avx2_algs));
301 }
302 #else
register_sha512_avx2(void)303 static inline int register_sha512_avx2(void) { return 0; }
unregister_sha512_avx2(void)304 static inline void unregister_sha512_avx2(void) { }
305 #endif
306
sha512_ssse3_mod_init(void)307 static int __init sha512_ssse3_mod_init(void)
308 {
309
310 if (register_sha512_ssse3())
311 goto fail;
312
313 if (register_sha512_avx()) {
314 unregister_sha512_ssse3();
315 goto fail;
316 }
317
318 if (register_sha512_avx2()) {
319 unregister_sha512_avx();
320 unregister_sha512_ssse3();
321 goto fail;
322 }
323
324 return 0;
325 fail:
326 return -ENODEV;
327 }
328
sha512_ssse3_mod_fini(void)329 static void __exit sha512_ssse3_mod_fini(void)
330 {
331 unregister_sha512_avx2();
332 unregister_sha512_avx();
333 unregister_sha512_ssse3();
334 }
335
336 module_init(sha512_ssse3_mod_init);
337 module_exit(sha512_ssse3_mod_fini);
338
339 MODULE_LICENSE("GPL");
340 MODULE_DESCRIPTION("SHA512 Secure Hash Algorithm, Supplemental SSE3 accelerated");
341
342 MODULE_ALIAS_CRYPTO("sha512");
343 MODULE_ALIAS_CRYPTO("sha512-ssse3");
344 MODULE_ALIAS_CRYPTO("sha512-avx");
345 MODULE_ALIAS_CRYPTO("sha512-avx2");
346 MODULE_ALIAS_CRYPTO("sha384");
347 MODULE_ALIAS_CRYPTO("sha384-ssse3");
348 MODULE_ALIAS_CRYPTO("sha384-avx");
349 MODULE_ALIAS_CRYPTO("sha384-avx2");
350