sha256_ssse3_glue.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Glue code for the SHA256 Secure Hash Algorithm assembler
  5. * implementation using supplemental SSE3 / AVX / AVX2 instructions.
  6. *
  7. * This file is based on sha256_generic.c
  8. *
  9. * Copyright (C) 2013 Intel Corporation.
  10. *
  11. * Author:
  12. * Tim Chen <tim.c.chen@linux.intel.com>
  13. *
  14. * This program is free software; you can redistribute it and/or modify it
  15. * under the terms of the GNU General Public License as published by the Free
  16. * Software Foundation; either version 2 of the License, or (at your option)
  17. * any later version.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  20. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  21. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
  22. * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
  23. * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
  24. * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
  25. * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  26. * SOFTWARE.
  27. */
  28. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  29. #include <crypto/internal/hash.h>
  30. #include <linux/init.h>
  31. #include <linux/module.h>
  32. #include <linux/mm.h>
  33. #include <linux/cryptohash.h>
  34. #include <linux/types.h>
  35. #include <crypto/sha.h>
  36. #include <crypto/sha256_base.h>
  37. #include <asm/fpu/api.h>
  38. #include <linux/string.h>
  39. asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data,
  40. u64 rounds);
  41. typedef void (sha256_transform_fn)(u32 *digest, const char *data, u64 rounds);
  42. static int sha256_update(struct shash_desc *desc, const u8 *data,
  43. unsigned int len, sha256_transform_fn *sha256_xform)
  44. {
  45. struct sha256_state *sctx = shash_desc_ctx(desc);
  46. if (!irq_fpu_usable() ||
  47. (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
  48. return crypto_sha256_update(desc, data, len);
  49. /* make sure casting to sha256_block_fn() is safe */
  50. BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
  51. kernel_fpu_begin();
  52. sha256_base_do_update(desc, data, len,
  53. (sha256_block_fn *)sha256_xform);
  54. kernel_fpu_end();
  55. return 0;
  56. }
  57. static int sha256_finup(struct shash_desc *desc, const u8 *data,
  58. unsigned int len, u8 *out, sha256_transform_fn *sha256_xform)
  59. {
  60. if (!irq_fpu_usable())
  61. return crypto_sha256_finup(desc, data, len, out);
  62. kernel_fpu_begin();
  63. if (len)
  64. sha256_base_do_update(desc, data, len,
  65. (sha256_block_fn *)sha256_xform);
  66. sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_xform);
  67. kernel_fpu_end();
  68. return sha256_base_finish(desc, out);
  69. }
  70. static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
  71. unsigned int len)
  72. {
  73. return sha256_update(desc, data, len, sha256_transform_ssse3);
  74. }
  75. static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
  76. unsigned int len, u8 *out)
  77. {
  78. return sha256_finup(desc, data, len, out, sha256_transform_ssse3);
  79. }
  80. /* Add padding and return the message digest. */
  81. static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
  82. {
  83. return sha256_ssse3_finup(desc, NULL, 0, out);
  84. }
  85. static struct shash_alg sha256_ssse3_algs[] = { {
  86. .digestsize = SHA256_DIGEST_SIZE,
  87. .init = sha256_base_init,
  88. .update = sha256_ssse3_update,
  89. .final = sha256_ssse3_final,
  90. .finup = sha256_ssse3_finup,
  91. .descsize = sizeof(struct sha256_state),
  92. .base = {
  93. .cra_name = "sha256",
  94. .cra_driver_name = "sha256-ssse3",
  95. .cra_priority = 150,
  96. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  97. .cra_blocksize = SHA256_BLOCK_SIZE,
  98. .cra_module = THIS_MODULE,
  99. }
  100. }, {
  101. .digestsize = SHA224_DIGEST_SIZE,
  102. .init = sha224_base_init,
  103. .update = sha256_ssse3_update,
  104. .final = sha256_ssse3_final,
  105. .finup = sha256_ssse3_finup,
  106. .descsize = sizeof(struct sha256_state),
  107. .base = {
  108. .cra_name = "sha224",
  109. .cra_driver_name = "sha224-ssse3",
  110. .cra_priority = 150,
  111. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  112. .cra_blocksize = SHA224_BLOCK_SIZE,
  113. .cra_module = THIS_MODULE,
  114. }
  115. } };
  116. static int register_sha256_ssse3(void)
  117. {
  118. if (boot_cpu_has(X86_FEATURE_SSSE3))
  119. return crypto_register_shashes(sha256_ssse3_algs,
  120. ARRAY_SIZE(sha256_ssse3_algs));
  121. return 0;
  122. }
  123. static void unregister_sha256_ssse3(void)
  124. {
  125. if (boot_cpu_has(X86_FEATURE_SSSE3))
  126. crypto_unregister_shashes(sha256_ssse3_algs,
  127. ARRAY_SIZE(sha256_ssse3_algs));
  128. }
  129. #ifdef CONFIG_AS_AVX
  130. asmlinkage void sha256_transform_avx(u32 *digest, const char *data,
  131. u64 rounds);
  132. static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
  133. unsigned int len)
  134. {
  135. return sha256_update(desc, data, len, sha256_transform_avx);
  136. }
  137. static int sha256_avx_finup(struct shash_desc *desc, const u8 *data,
  138. unsigned int len, u8 *out)
  139. {
  140. return sha256_finup(desc, data, len, out, sha256_transform_avx);
  141. }
  142. static int sha256_avx_final(struct shash_desc *desc, u8 *out)
  143. {
  144. return sha256_avx_finup(desc, NULL, 0, out);
  145. }
  146. static struct shash_alg sha256_avx_algs[] = { {
  147. .digestsize = SHA256_DIGEST_SIZE,
  148. .init = sha256_base_init,
  149. .update = sha256_avx_update,
  150. .final = sha256_avx_final,
  151. .finup = sha256_avx_finup,
  152. .descsize = sizeof(struct sha256_state),
  153. .base = {
  154. .cra_name = "sha256",
  155. .cra_driver_name = "sha256-avx",
  156. .cra_priority = 160,
  157. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  158. .cra_blocksize = SHA256_BLOCK_SIZE,
  159. .cra_module = THIS_MODULE,
  160. }
  161. }, {
  162. .digestsize = SHA224_DIGEST_SIZE,
  163. .init = sha224_base_init,
  164. .update = sha256_avx_update,
  165. .final = sha256_avx_final,
  166. .finup = sha256_avx_finup,
  167. .descsize = sizeof(struct sha256_state),
  168. .base = {
  169. .cra_name = "sha224",
  170. .cra_driver_name = "sha224-avx",
  171. .cra_priority = 160,
  172. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  173. .cra_blocksize = SHA224_BLOCK_SIZE,
  174. .cra_module = THIS_MODULE,
  175. }
  176. } };
  177. static bool avx_usable(void)
  178. {
  179. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
  180. if (cpu_has_avx)
  181. pr_info("AVX detected but unusable.\n");
  182. return false;
  183. }
  184. return true;
  185. }
  186. static int register_sha256_avx(void)
  187. {
  188. if (avx_usable())
  189. return crypto_register_shashes(sha256_avx_algs,
  190. ARRAY_SIZE(sha256_avx_algs));
  191. return 0;
  192. }
  193. static void unregister_sha256_avx(void)
  194. {
  195. if (avx_usable())
  196. crypto_unregister_shashes(sha256_avx_algs,
  197. ARRAY_SIZE(sha256_avx_algs));
  198. }
  199. #else
  200. static inline int register_sha256_avx(void) { return 0; }
  201. static inline void unregister_sha256_avx(void) { }
  202. #endif
  203. #if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX)
  204. asmlinkage void sha256_transform_rorx(u32 *digest, const char *data,
  205. u64 rounds);
  206. static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
  207. unsigned int len)
  208. {
  209. return sha256_update(desc, data, len, sha256_transform_rorx);
  210. }
  211. static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data,
  212. unsigned int len, u8 *out)
  213. {
  214. return sha256_finup(desc, data, len, out, sha256_transform_rorx);
  215. }
  216. static int sha256_avx2_final(struct shash_desc *desc, u8 *out)
  217. {
  218. return sha256_avx2_finup(desc, NULL, 0, out);
  219. }
  220. static struct shash_alg sha256_avx2_algs[] = { {
  221. .digestsize = SHA256_DIGEST_SIZE,
  222. .init = sha256_base_init,
  223. .update = sha256_avx2_update,
  224. .final = sha256_avx2_final,
  225. .finup = sha256_avx2_finup,
  226. .descsize = sizeof(struct sha256_state),
  227. .base = {
  228. .cra_name = "sha256",
  229. .cra_driver_name = "sha256-avx2",
  230. .cra_priority = 170,
  231. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  232. .cra_blocksize = SHA256_BLOCK_SIZE,
  233. .cra_module = THIS_MODULE,
  234. }
  235. }, {
  236. .digestsize = SHA224_DIGEST_SIZE,
  237. .init = sha224_base_init,
  238. .update = sha256_avx2_update,
  239. .final = sha256_avx2_final,
  240. .finup = sha256_avx2_finup,
  241. .descsize = sizeof(struct sha256_state),
  242. .base = {
  243. .cra_name = "sha224",
  244. .cra_driver_name = "sha224-avx2",
  245. .cra_priority = 170,
  246. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  247. .cra_blocksize = SHA224_BLOCK_SIZE,
  248. .cra_module = THIS_MODULE,
  249. }
  250. } };
  251. static bool avx2_usable(void)
  252. {
  253. if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
  254. boot_cpu_has(X86_FEATURE_BMI2))
  255. return true;
  256. return false;
  257. }
  258. static int register_sha256_avx2(void)
  259. {
  260. if (avx2_usable())
  261. return crypto_register_shashes(sha256_avx2_algs,
  262. ARRAY_SIZE(sha256_avx2_algs));
  263. return 0;
  264. }
  265. static void unregister_sha256_avx2(void)
  266. {
  267. if (avx2_usable())
  268. crypto_unregister_shashes(sha256_avx2_algs,
  269. ARRAY_SIZE(sha256_avx2_algs));
  270. }
  271. #else
  272. static inline int register_sha256_avx2(void) { return 0; }
  273. static inline void unregister_sha256_avx2(void) { }
  274. #endif
  275. #ifdef CONFIG_AS_SHA256_NI
  276. asmlinkage void sha256_ni_transform(u32 *digest, const char *data,
  277. u64 rounds); /*unsigned int rounds);*/
  278. static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
  279. unsigned int len)
  280. {
  281. return sha256_update(desc, data, len, sha256_ni_transform);
  282. }
  283. static int sha256_ni_finup(struct shash_desc *desc, const u8 *data,
  284. unsigned int len, u8 *out)
  285. {
  286. return sha256_finup(desc, data, len, out, sha256_ni_transform);
  287. }
  288. static int sha256_ni_final(struct shash_desc *desc, u8 *out)
  289. {
  290. return sha256_ni_finup(desc, NULL, 0, out);
  291. }
  292. static struct shash_alg sha256_ni_algs[] = { {
  293. .digestsize = SHA256_DIGEST_SIZE,
  294. .init = sha256_base_init,
  295. .update = sha256_ni_update,
  296. .final = sha256_ni_final,
  297. .finup = sha256_ni_finup,
  298. .descsize = sizeof(struct sha256_state),
  299. .base = {
  300. .cra_name = "sha256",
  301. .cra_driver_name = "sha256-ni",
  302. .cra_priority = 250,
  303. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  304. .cra_blocksize = SHA256_BLOCK_SIZE,
  305. .cra_module = THIS_MODULE,
  306. }
  307. }, {
  308. .digestsize = SHA224_DIGEST_SIZE,
  309. .init = sha224_base_init,
  310. .update = sha256_ni_update,
  311. .final = sha256_ni_final,
  312. .finup = sha256_ni_finup,
  313. .descsize = sizeof(struct sha256_state),
  314. .base = {
  315. .cra_name = "sha224",
  316. .cra_driver_name = "sha224-ni",
  317. .cra_priority = 250,
  318. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  319. .cra_blocksize = SHA224_BLOCK_SIZE,
  320. .cra_module = THIS_MODULE,
  321. }
  322. } };
  323. static int register_sha256_ni(void)
  324. {
  325. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  326. return crypto_register_shashes(sha256_ni_algs,
  327. ARRAY_SIZE(sha256_ni_algs));
  328. return 0;
  329. }
  330. static void unregister_sha256_ni(void)
  331. {
  332. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  333. crypto_unregister_shashes(sha256_ni_algs,
  334. ARRAY_SIZE(sha256_ni_algs));
  335. }
  336. #else
  337. static inline int register_sha256_ni(void) { return 0; }
  338. static inline void unregister_sha256_ni(void) { }
  339. #endif
  340. static int __init sha256_ssse3_mod_init(void)
  341. {
  342. if (register_sha256_ssse3())
  343. goto fail;
  344. if (register_sha256_avx()) {
  345. unregister_sha256_ssse3();
  346. goto fail;
  347. }
  348. if (register_sha256_avx2()) {
  349. unregister_sha256_avx();
  350. unregister_sha256_ssse3();
  351. goto fail;
  352. }
  353. if (register_sha256_ni()) {
  354. unregister_sha256_avx2();
  355. unregister_sha256_avx();
  356. unregister_sha256_ssse3();
  357. goto fail;
  358. }
  359. return 0;
  360. fail:
  361. return -ENODEV;
  362. }
  363. static void __exit sha256_ssse3_mod_fini(void)
  364. {
  365. unregister_sha256_ni();
  366. unregister_sha256_avx2();
  367. unregister_sha256_avx();
  368. unregister_sha256_ssse3();
  369. }
  370. module_init(sha256_ssse3_mod_init);
  371. module_exit(sha256_ssse3_mod_fini);
  372. MODULE_LICENSE("GPL");
  373. MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
  374. MODULE_ALIAS_CRYPTO("sha256");
  375. MODULE_ALIAS_CRYPTO("sha224");