sha256-glue.c 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
  4. *
  5. * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/hwcap.h>
  8. #include <asm/neon.h>
  9. #include <asm/simd.h>
  10. #include <crypto/internal/hash.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/sha2.h>
  13. #include <crypto/sha256_base.h>
  14. #include <linux/module.h>
  15. #include <linux/string.h>
  16. #include <linux/types.h>
  17. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
  18. MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
  19. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  20. MODULE_LICENSE("GPL v2");
  21. MODULE_ALIAS_CRYPTO("sha224");
  22. MODULE_ALIAS_CRYPTO("sha256");
  23. asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
  24. unsigned int num_blks);
  25. EXPORT_SYMBOL(sha256_block_data_order);
  26. static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
  27. int blocks)
  28. {
  29. sha256_block_data_order(sst->state, src, blocks);
  30. }
  31. asmlinkage void sha256_block_neon(u32 *digest, const void *data,
  32. unsigned int num_blks);
  33. static void sha256_neon_transform(struct sha256_state *sst, u8 const *src,
  34. int blocks)
  35. {
  36. sha256_block_neon(sst->state, src, blocks);
  37. }
  38. static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
  39. unsigned int len)
  40. {
  41. return sha256_base_do_update(desc, data, len, sha256_arm64_transform);
  42. }
  43. static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
  44. unsigned int len, u8 *out)
  45. {
  46. if (len)
  47. sha256_base_do_update(desc, data, len, sha256_arm64_transform);
  48. sha256_base_do_finalize(desc, sha256_arm64_transform);
  49. return sha256_base_finish(desc, out);
  50. }
  51. static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out)
  52. {
  53. return crypto_sha256_arm64_finup(desc, NULL, 0, out);
  54. }
  55. static struct shash_alg algs[] = { {
  56. .digestsize = SHA256_DIGEST_SIZE,
  57. .init = sha256_base_init,
  58. .update = crypto_sha256_arm64_update,
  59. .final = crypto_sha256_arm64_final,
  60. .finup = crypto_sha256_arm64_finup,
  61. .descsize = sizeof(struct sha256_state),
  62. .base.cra_name = "sha256",
  63. .base.cra_driver_name = "sha256-arm64",
  64. .base.cra_priority = 125,
  65. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  66. .base.cra_module = THIS_MODULE,
  67. }, {
  68. .digestsize = SHA224_DIGEST_SIZE,
  69. .init = sha224_base_init,
  70. .update = crypto_sha256_arm64_update,
  71. .final = crypto_sha256_arm64_final,
  72. .finup = crypto_sha256_arm64_finup,
  73. .descsize = sizeof(struct sha256_state),
  74. .base.cra_name = "sha224",
  75. .base.cra_driver_name = "sha224-arm64",
  76. .base.cra_priority = 125,
  77. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  78. .base.cra_module = THIS_MODULE,
  79. } };
  80. static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
  81. unsigned int len)
  82. {
  83. struct sha256_state *sctx = shash_desc_ctx(desc);
  84. if (!crypto_simd_usable())
  85. return sha256_base_do_update(desc, data, len,
  86. sha256_arm64_transform);
  87. while (len > 0) {
  88. unsigned int chunk = len;
  89. /*
  90. * Don't hog the CPU for the entire time it takes to process all
  91. * input when running on a preemptible kernel, but process the
  92. * data block by block instead.
  93. */
  94. if (IS_ENABLED(CONFIG_PREEMPTION) &&
  95. chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
  96. chunk = SHA256_BLOCK_SIZE -
  97. sctx->count % SHA256_BLOCK_SIZE;
  98. kernel_neon_begin();
  99. sha256_base_do_update(desc, data, chunk, sha256_neon_transform);
  100. kernel_neon_end();
  101. data += chunk;
  102. len -= chunk;
  103. }
  104. return 0;
  105. }
  106. static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
  107. unsigned int len, u8 *out)
  108. {
  109. if (!crypto_simd_usable()) {
  110. if (len)
  111. sha256_base_do_update(desc, data, len,
  112. sha256_arm64_transform);
  113. sha256_base_do_finalize(desc, sha256_arm64_transform);
  114. } else {
  115. if (len)
  116. sha256_update_neon(desc, data, len);
  117. kernel_neon_begin();
  118. sha256_base_do_finalize(desc, sha256_neon_transform);
  119. kernel_neon_end();
  120. }
  121. return sha256_base_finish(desc, out);
  122. }
  123. static int sha256_final_neon(struct shash_desc *desc, u8 *out)
  124. {
  125. return sha256_finup_neon(desc, NULL, 0, out);
  126. }
  127. static struct shash_alg neon_algs[] = { {
  128. .digestsize = SHA256_DIGEST_SIZE,
  129. .init = sha256_base_init,
  130. .update = sha256_update_neon,
  131. .final = sha256_final_neon,
  132. .finup = sha256_finup_neon,
  133. .descsize = sizeof(struct sha256_state),
  134. .base.cra_name = "sha256",
  135. .base.cra_driver_name = "sha256-arm64-neon",
  136. .base.cra_priority = 150,
  137. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  138. .base.cra_module = THIS_MODULE,
  139. }, {
  140. .digestsize = SHA224_DIGEST_SIZE,
  141. .init = sha224_base_init,
  142. .update = sha256_update_neon,
  143. .final = sha256_final_neon,
  144. .finup = sha256_finup_neon,
  145. .descsize = sizeof(struct sha256_state),
  146. .base.cra_name = "sha224",
  147. .base.cra_driver_name = "sha224-arm64-neon",
  148. .base.cra_priority = 150,
  149. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  150. .base.cra_module = THIS_MODULE,
  151. } };
  152. static int __init sha256_mod_init(void)
  153. {
  154. int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
  155. if (ret)
  156. return ret;
  157. if (cpu_have_named_feature(ASIMD)) {
  158. ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  159. if (ret)
  160. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  161. }
  162. return ret;
  163. }
  164. static void __exit sha256_mod_fini(void)
  165. {
  166. if (cpu_have_named_feature(ASIMD))
  167. crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  168. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  169. }
  170. module_init(sha256_mod_init);
  171. module_exit(sha256_mod_fini);