sha256_base.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * sha256_base.h - core logic for SHA-256 implementations
  4. *
  5. * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #ifndef _CRYPTO_SHA256_BASE_H
  8. #define _CRYPTO_SHA256_BASE_H
  9. #include <asm/byteorder.h>
  10. #include <linux/unaligned.h>
  11. #include <crypto/internal/hash.h>
  12. #include <crypto/sha2.h>
  13. #include <linux/string.h>
  14. #include <linux/types.h>
  15. typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
  16. int blocks);
  17. static inline int sha224_base_init(struct shash_desc *desc)
  18. {
  19. struct sha256_state *sctx = shash_desc_ctx(desc);
  20. sha224_init(sctx);
  21. return 0;
  22. }
  23. static inline int sha256_base_init(struct shash_desc *desc)
  24. {
  25. struct sha256_state *sctx = shash_desc_ctx(desc);
  26. sha256_init(sctx);
  27. return 0;
  28. }
  29. static inline int lib_sha256_base_do_update(struct sha256_state *sctx,
  30. const u8 *data,
  31. unsigned int len,
  32. sha256_block_fn *block_fn)
  33. {
  34. unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  35. sctx->count += len;
  36. if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) {
  37. unsigned int blocks;
  38. if (partial) {
  39. int p = SHA256_BLOCK_SIZE - partial;
  40. memcpy(sctx->buf + partial, data, p);
  41. data += p;
  42. len -= p;
  43. block_fn(sctx, sctx->buf, 1);
  44. }
  45. blocks = len / SHA256_BLOCK_SIZE;
  46. len %= SHA256_BLOCK_SIZE;
  47. if (blocks) {
  48. block_fn(sctx, data, blocks);
  49. data += blocks * SHA256_BLOCK_SIZE;
  50. }
  51. partial = 0;
  52. }
  53. if (len)
  54. memcpy(sctx->buf + partial, data, len);
  55. return 0;
  56. }
  57. static inline int sha256_base_do_update(struct shash_desc *desc,
  58. const u8 *data,
  59. unsigned int len,
  60. sha256_block_fn *block_fn)
  61. {
  62. struct sha256_state *sctx = shash_desc_ctx(desc);
  63. return lib_sha256_base_do_update(sctx, data, len, block_fn);
  64. }
  65. static inline int lib_sha256_base_do_finalize(struct sha256_state *sctx,
  66. sha256_block_fn *block_fn)
  67. {
  68. const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
  69. __be64 *bits = (__be64 *)(sctx->buf + bit_offset);
  70. unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  71. sctx->buf[partial++] = 0x80;
  72. if (partial > bit_offset) {
  73. memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial);
  74. partial = 0;
  75. block_fn(sctx, sctx->buf, 1);
  76. }
  77. memset(sctx->buf + partial, 0x0, bit_offset - partial);
  78. *bits = cpu_to_be64(sctx->count << 3);
  79. block_fn(sctx, sctx->buf, 1);
  80. return 0;
  81. }
  82. static inline int sha256_base_do_finalize(struct shash_desc *desc,
  83. sha256_block_fn *block_fn)
  84. {
  85. struct sha256_state *sctx = shash_desc_ctx(desc);
  86. return lib_sha256_base_do_finalize(sctx, block_fn);
  87. }
  88. static inline int lib_sha256_base_finish(struct sha256_state *sctx, u8 *out,
  89. unsigned int digest_size)
  90. {
  91. __be32 *digest = (__be32 *)out;
  92. int i;
  93. for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32))
  94. put_unaligned_be32(sctx->state[i], digest++);
  95. memzero_explicit(sctx, sizeof(*sctx));
  96. return 0;
  97. }
  98. static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
  99. {
  100. unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
  101. struct sha256_state *sctx = shash_desc_ctx(desc);
  102. return lib_sha256_base_finish(sctx, out, digest_size);
  103. }
  104. #endif /* _CRYPTO_SHA256_BASE_H */