sha1_ssse3_glue.c 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  5. * Supplemental SSE3 instructions.
  6. *
  7. * This file is based on sha1_generic.c
  8. *
  9. * Copyright (c) Alan Smithee.
  10. * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
  11. * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
  12. * Copyright (c) Mathias Krause <minipli@googlemail.com>
  13. * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
  14. *
  15. * This program is free software; you can redistribute it and/or modify it
  16. * under the terms of the GNU General Public License as published by the Free
  17. * Software Foundation; either version 2 of the License, or (at your option)
  18. * any later version.
  19. *
  20. */
  21. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  22. #include <crypto/internal/hash.h>
  23. #include <linux/init.h>
  24. #include <linux/module.h>
  25. #include <linux/mm.h>
  26. #include <linux/cryptohash.h>
  27. #include <linux/types.h>
  28. #include <crypto/sha.h>
  29. #include <crypto/sha1_base.h>
  30. #include <asm/fpu/api.h>
  31. typedef void (sha1_transform_fn)(u32 *digest, const char *data,
  32. unsigned int rounds);
  33. static int sha1_update(struct shash_desc *desc, const u8 *data,
  34. unsigned int len, sha1_transform_fn *sha1_xform)
  35. {
  36. struct sha1_state *sctx = shash_desc_ctx(desc);
  37. if (!irq_fpu_usable() ||
  38. (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
  39. return crypto_sha1_update(desc, data, len);
  40. /* make sure casting to sha1_block_fn() is safe */
  41. BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
  42. kernel_fpu_begin();
  43. sha1_base_do_update(desc, data, len,
  44. (sha1_block_fn *)sha1_xform);
  45. kernel_fpu_end();
  46. return 0;
  47. }
  48. static int sha1_finup(struct shash_desc *desc, const u8 *data,
  49. unsigned int len, u8 *out, sha1_transform_fn *sha1_xform)
  50. {
  51. if (!irq_fpu_usable())
  52. return crypto_sha1_finup(desc, data, len, out);
  53. kernel_fpu_begin();
  54. if (len)
  55. sha1_base_do_update(desc, data, len,
  56. (sha1_block_fn *)sha1_xform);
  57. sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_xform);
  58. kernel_fpu_end();
  59. return sha1_base_finish(desc, out);
  60. }
  61. asmlinkage void sha1_transform_ssse3(u32 *digest, const char *data,
  62. unsigned int rounds);
  63. static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
  64. unsigned int len)
  65. {
  66. return sha1_update(desc, data, len,
  67. (sha1_transform_fn *) sha1_transform_ssse3);
  68. }
  69. static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
  70. unsigned int len, u8 *out)
  71. {
  72. return sha1_finup(desc, data, len, out,
  73. (sha1_transform_fn *) sha1_transform_ssse3);
  74. }
  75. /* Add padding and return the message digest. */
  76. static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
  77. {
  78. return sha1_ssse3_finup(desc, NULL, 0, out);
  79. }
  80. static struct shash_alg sha1_ssse3_alg = {
  81. .digestsize = SHA1_DIGEST_SIZE,
  82. .init = sha1_base_init,
  83. .update = sha1_ssse3_update,
  84. .final = sha1_ssse3_final,
  85. .finup = sha1_ssse3_finup,
  86. .descsize = sizeof(struct sha1_state),
  87. .base = {
  88. .cra_name = "sha1",
  89. .cra_driver_name = "sha1-ssse3",
  90. .cra_priority = 150,
  91. .cra_blocksize = SHA1_BLOCK_SIZE,
  92. .cra_module = THIS_MODULE,
  93. }
  94. };
  95. static int register_sha1_ssse3(void)
  96. {
  97. if (boot_cpu_has(X86_FEATURE_SSSE3))
  98. return crypto_register_shash(&sha1_ssse3_alg);
  99. return 0;
  100. }
  101. static void unregister_sha1_ssse3(void)
  102. {
  103. if (boot_cpu_has(X86_FEATURE_SSSE3))
  104. crypto_unregister_shash(&sha1_ssse3_alg);
  105. }
  106. #ifdef CONFIG_AS_AVX
  107. asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
  108. unsigned int rounds);
  109. static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
  110. unsigned int len)
  111. {
  112. return sha1_update(desc, data, len,
  113. (sha1_transform_fn *) sha1_transform_avx);
  114. }
  115. static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
  116. unsigned int len, u8 *out)
  117. {
  118. return sha1_finup(desc, data, len, out,
  119. (sha1_transform_fn *) sha1_transform_avx);
  120. }
  121. static int sha1_avx_final(struct shash_desc *desc, u8 *out)
  122. {
  123. return sha1_avx_finup(desc, NULL, 0, out);
  124. }
  125. static struct shash_alg sha1_avx_alg = {
  126. .digestsize = SHA1_DIGEST_SIZE,
  127. .init = sha1_base_init,
  128. .update = sha1_avx_update,
  129. .final = sha1_avx_final,
  130. .finup = sha1_avx_finup,
  131. .descsize = sizeof(struct sha1_state),
  132. .base = {
  133. .cra_name = "sha1",
  134. .cra_driver_name = "sha1-avx",
  135. .cra_priority = 160,
  136. .cra_blocksize = SHA1_BLOCK_SIZE,
  137. .cra_module = THIS_MODULE,
  138. }
  139. };
  140. static bool avx_usable(void)
  141. {
  142. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
  143. if (boot_cpu_has(X86_FEATURE_AVX))
  144. pr_info("AVX detected but unusable.\n");
  145. return false;
  146. }
  147. return true;
  148. }
  149. static int register_sha1_avx(void)
  150. {
  151. if (avx_usable())
  152. return crypto_register_shash(&sha1_avx_alg);
  153. return 0;
  154. }
  155. static void unregister_sha1_avx(void)
  156. {
  157. if (avx_usable())
  158. crypto_unregister_shash(&sha1_avx_alg);
  159. }
  160. #else /* CONFIG_AS_AVX */
  161. static inline int register_sha1_avx(void) { return 0; }
  162. static inline void unregister_sha1_avx(void) { }
  163. #endif /* CONFIG_AS_AVX */
  164. #if defined(CONFIG_AS_AVX2) && (CONFIG_AS_AVX)
  165. #define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
  166. asmlinkage void sha1_transform_avx2(u32 *digest, const char *data,
  167. unsigned int rounds);
  168. static bool avx2_usable(void)
  169. {
  170. if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
  171. && boot_cpu_has(X86_FEATURE_BMI1)
  172. && boot_cpu_has(X86_FEATURE_BMI2))
  173. return true;
  174. return false;
  175. }
  176. static void sha1_apply_transform_avx2(u32 *digest, const char *data,
  177. unsigned int rounds)
  178. {
  179. /* Select the optimal transform based on data block size */
  180. if (rounds >= SHA1_AVX2_BLOCK_OPTSIZE)
  181. sha1_transform_avx2(digest, data, rounds);
  182. else
  183. sha1_transform_avx(digest, data, rounds);
  184. }
  185. static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
  186. unsigned int len)
  187. {
  188. return sha1_update(desc, data, len,
  189. (sha1_transform_fn *) sha1_apply_transform_avx2);
  190. }
  191. static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
  192. unsigned int len, u8 *out)
  193. {
  194. return sha1_finup(desc, data, len, out,
  195. (sha1_transform_fn *) sha1_apply_transform_avx2);
  196. }
  197. static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
  198. {
  199. return sha1_avx2_finup(desc, NULL, 0, out);
  200. }
  201. static struct shash_alg sha1_avx2_alg = {
  202. .digestsize = SHA1_DIGEST_SIZE,
  203. .init = sha1_base_init,
  204. .update = sha1_avx2_update,
  205. .final = sha1_avx2_final,
  206. .finup = sha1_avx2_finup,
  207. .descsize = sizeof(struct sha1_state),
  208. .base = {
  209. .cra_name = "sha1",
  210. .cra_driver_name = "sha1-avx2",
  211. .cra_priority = 170,
  212. .cra_blocksize = SHA1_BLOCK_SIZE,
  213. .cra_module = THIS_MODULE,
  214. }
  215. };
  216. static int register_sha1_avx2(void)
  217. {
  218. if (avx2_usable())
  219. return crypto_register_shash(&sha1_avx2_alg);
  220. return 0;
  221. }
  222. static void unregister_sha1_avx2(void)
  223. {
  224. if (avx2_usable())
  225. crypto_unregister_shash(&sha1_avx2_alg);
  226. }
  227. #else
  228. static inline int register_sha1_avx2(void) { return 0; }
  229. static inline void unregister_sha1_avx2(void) { }
  230. #endif
  231. #ifdef CONFIG_AS_SHA1_NI
  232. asmlinkage void sha1_ni_transform(u32 *digest, const char *data,
  233. unsigned int rounds);
  234. static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
  235. unsigned int len)
  236. {
  237. return sha1_update(desc, data, len,
  238. (sha1_transform_fn *) sha1_ni_transform);
  239. }
  240. static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
  241. unsigned int len, u8 *out)
  242. {
  243. return sha1_finup(desc, data, len, out,
  244. (sha1_transform_fn *) sha1_ni_transform);
  245. }
  246. static int sha1_ni_final(struct shash_desc *desc, u8 *out)
  247. {
  248. return sha1_ni_finup(desc, NULL, 0, out);
  249. }
  250. static struct shash_alg sha1_ni_alg = {
  251. .digestsize = SHA1_DIGEST_SIZE,
  252. .init = sha1_base_init,
  253. .update = sha1_ni_update,
  254. .final = sha1_ni_final,
  255. .finup = sha1_ni_finup,
  256. .descsize = sizeof(struct sha1_state),
  257. .base = {
  258. .cra_name = "sha1",
  259. .cra_driver_name = "sha1-ni",
  260. .cra_priority = 250,
  261. .cra_blocksize = SHA1_BLOCK_SIZE,
  262. .cra_module = THIS_MODULE,
  263. }
  264. };
  265. static int register_sha1_ni(void)
  266. {
  267. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  268. return crypto_register_shash(&sha1_ni_alg);
  269. return 0;
  270. }
  271. static void unregister_sha1_ni(void)
  272. {
  273. if (boot_cpu_has(X86_FEATURE_SHA_NI))
  274. crypto_unregister_shash(&sha1_ni_alg);
  275. }
  276. #else
  277. static inline int register_sha1_ni(void) { return 0; }
  278. static inline void unregister_sha1_ni(void) { }
  279. #endif
  280. static int __init sha1_ssse3_mod_init(void)
  281. {
  282. if (register_sha1_ssse3())
  283. goto fail;
  284. if (register_sha1_avx()) {
  285. unregister_sha1_ssse3();
  286. goto fail;
  287. }
  288. if (register_sha1_avx2()) {
  289. unregister_sha1_avx();
  290. unregister_sha1_ssse3();
  291. goto fail;
  292. }
  293. if (register_sha1_ni()) {
  294. unregister_sha1_avx2();
  295. unregister_sha1_avx();
  296. unregister_sha1_ssse3();
  297. goto fail;
  298. }
  299. return 0;
  300. fail:
  301. return -ENODEV;
  302. }
  303. static void __exit sha1_ssse3_mod_fini(void)
  304. {
  305. unregister_sha1_ni();
  306. unregister_sha1_avx2();
  307. unregister_sha1_avx();
  308. unregister_sha1_ssse3();
  309. }
  310. module_init(sha1_ssse3_mod_init);
  311. module_exit(sha1_ssse3_mod_fini);
  312. MODULE_LICENSE("GPL");
  313. MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
  314. MODULE_ALIAS_CRYPTO("sha1");
  315. MODULE_ALIAS_CRYPTO("sha1-ssse3");
  316. MODULE_ALIAS_CRYPTO("sha1-avx");
  317. MODULE_ALIAS_CRYPTO("sha1-avx2");
  318. #ifdef CONFIG_AS_SHA1_NI
  319. MODULE_ALIAS_CRYPTO("sha1-ni");
  320. #endif