ghash-ce-glue.c 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353
  1. /*
  2. * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
  3. *
  4. * Copyright (C) 2015 Linaro Ltd. <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License version 2 as published
  8. * by the Free Software Foundation.
  9. */
  10. #include <asm/hwcap.h>
  11. #include <asm/neon.h>
  12. #include <asm/simd.h>
  13. #include <asm/unaligned.h>
  14. #include <crypto/cryptd.h>
  15. #include <crypto/internal/hash.h>
  16. #include <crypto/gf128mul.h>
  17. #include <linux/cpufeature.h>
  18. #include <linux/crypto.h>
  19. #include <linux/module.h>
  20. MODULE_DESCRIPTION("GHASH secure hash using ARMv8 Crypto Extensions");
  21. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  22. MODULE_LICENSE("GPL v2");
  23. MODULE_ALIAS_CRYPTO("ghash");
  24. #define GHASH_BLOCK_SIZE 16
  25. #define GHASH_DIGEST_SIZE 16
  26. struct ghash_key {
  27. u64 a;
  28. u64 b;
  29. };
  30. struct ghash_desc_ctx {
  31. u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
  32. u8 buf[GHASH_BLOCK_SIZE];
  33. u32 count;
  34. };
  35. struct ghash_async_ctx {
  36. struct cryptd_ahash *cryptd_tfm;
  37. };
  38. asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
  39. struct ghash_key const *k,
  40. const char *head);
  41. asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
  42. struct ghash_key const *k,
  43. const char *head);
  44. static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
  45. struct ghash_key const *k,
  46. const char *head);
  47. static int ghash_init(struct shash_desc *desc)
  48. {
  49. struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
  50. *ctx = (struct ghash_desc_ctx){};
  51. return 0;
  52. }
  53. static int ghash_update(struct shash_desc *desc, const u8 *src,
  54. unsigned int len)
  55. {
  56. struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
  57. unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
  58. ctx->count += len;
  59. if ((partial + len) >= GHASH_BLOCK_SIZE) {
  60. struct ghash_key *key = crypto_shash_ctx(desc->tfm);
  61. int blocks;
  62. if (partial) {
  63. int p = GHASH_BLOCK_SIZE - partial;
  64. memcpy(ctx->buf + partial, src, p);
  65. src += p;
  66. len -= p;
  67. }
  68. blocks = len / GHASH_BLOCK_SIZE;
  69. len %= GHASH_BLOCK_SIZE;
  70. kernel_neon_begin();
  71. pmull_ghash_update(blocks, ctx->digest, src, key,
  72. partial ? ctx->buf : NULL);
  73. kernel_neon_end();
  74. src += blocks * GHASH_BLOCK_SIZE;
  75. partial = 0;
  76. }
  77. if (len)
  78. memcpy(ctx->buf + partial, src, len);
  79. return 0;
  80. }
  81. static int ghash_final(struct shash_desc *desc, u8 *dst)
  82. {
  83. struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
  84. unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
  85. if (partial) {
  86. struct ghash_key *key = crypto_shash_ctx(desc->tfm);
  87. memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
  88. kernel_neon_begin();
  89. pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
  90. kernel_neon_end();
  91. }
  92. put_unaligned_be64(ctx->digest[1], dst);
  93. put_unaligned_be64(ctx->digest[0], dst + 8);
  94. *ctx = (struct ghash_desc_ctx){};
  95. return 0;
  96. }
  97. static int ghash_setkey(struct crypto_shash *tfm,
  98. const u8 *inkey, unsigned int keylen)
  99. {
  100. struct ghash_key *key = crypto_shash_ctx(tfm);
  101. u64 a, b;
  102. if (keylen != GHASH_BLOCK_SIZE) {
  103. crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  104. return -EINVAL;
  105. }
  106. /* perform multiplication by 'x' in GF(2^128) */
  107. b = get_unaligned_be64(inkey);
  108. a = get_unaligned_be64(inkey + 8);
  109. key->a = (a << 1) | (b >> 63);
  110. key->b = (b << 1) | (a >> 63);
  111. if (b >> 63)
  112. key->b ^= 0xc200000000000000UL;
  113. return 0;
  114. }
  115. static struct shash_alg ghash_alg = {
  116. .digestsize = GHASH_DIGEST_SIZE,
  117. .init = ghash_init,
  118. .update = ghash_update,
  119. .final = ghash_final,
  120. .setkey = ghash_setkey,
  121. .descsize = sizeof(struct ghash_desc_ctx),
  122. .base = {
  123. .cra_name = "__ghash",
  124. .cra_driver_name = "__driver-ghash-ce",
  125. .cra_priority = 0,
  126. .cra_flags = CRYPTO_ALG_INTERNAL,
  127. .cra_blocksize = GHASH_BLOCK_SIZE,
  128. .cra_ctxsize = sizeof(struct ghash_key),
  129. .cra_module = THIS_MODULE,
  130. },
  131. };
  132. static int ghash_async_init(struct ahash_request *req)
  133. {
  134. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  135. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  136. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  137. struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
  138. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  139. struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
  140. desc->tfm = child;
  141. desc->flags = req->base.flags;
  142. return crypto_shash_init(desc);
  143. }
  144. static int ghash_async_update(struct ahash_request *req)
  145. {
  146. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  147. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  148. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  149. struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
  150. if (!may_use_simd() ||
  151. (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
  152. memcpy(cryptd_req, req, sizeof(*req));
  153. ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
  154. return crypto_ahash_update(cryptd_req);
  155. } else {
  156. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  157. return shash_ahash_update(req, desc);
  158. }
  159. }
  160. static int ghash_async_final(struct ahash_request *req)
  161. {
  162. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  163. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  164. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  165. struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
  166. if (!may_use_simd() ||
  167. (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
  168. memcpy(cryptd_req, req, sizeof(*req));
  169. ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
  170. return crypto_ahash_final(cryptd_req);
  171. } else {
  172. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  173. return crypto_shash_final(desc, req->result);
  174. }
  175. }
  176. static int ghash_async_digest(struct ahash_request *req)
  177. {
  178. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  179. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  180. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  181. struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
  182. if (!may_use_simd() ||
  183. (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
  184. memcpy(cryptd_req, req, sizeof(*req));
  185. ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
  186. return crypto_ahash_digest(cryptd_req);
  187. } else {
  188. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  189. struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
  190. desc->tfm = child;
  191. desc->flags = req->base.flags;
  192. return shash_ahash_digest(req, desc);
  193. }
  194. }
  195. static int ghash_async_import(struct ahash_request *req, const void *in)
  196. {
  197. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  198. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  199. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  200. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  201. desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
  202. desc->flags = req->base.flags;
  203. return crypto_shash_import(desc, in);
  204. }
  205. static int ghash_async_export(struct ahash_request *req, void *out)
  206. {
  207. struct ahash_request *cryptd_req = ahash_request_ctx(req);
  208. struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
  209. return crypto_shash_export(desc, out);
  210. }
  211. static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
  212. unsigned int keylen)
  213. {
  214. struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
  215. struct crypto_ahash *child = &ctx->cryptd_tfm->base;
  216. int err;
  217. crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  218. crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
  219. & CRYPTO_TFM_REQ_MASK);
  220. err = crypto_ahash_setkey(child, key, keylen);
  221. crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
  222. & CRYPTO_TFM_RES_MASK);
  223. return err;
  224. }
  225. static int ghash_async_init_tfm(struct crypto_tfm *tfm)
  226. {
  227. struct cryptd_ahash *cryptd_tfm;
  228. struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
  229. cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce",
  230. CRYPTO_ALG_INTERNAL,
  231. CRYPTO_ALG_INTERNAL);
  232. if (IS_ERR(cryptd_tfm))
  233. return PTR_ERR(cryptd_tfm);
  234. ctx->cryptd_tfm = cryptd_tfm;
  235. crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
  236. sizeof(struct ahash_request) +
  237. crypto_ahash_reqsize(&cryptd_tfm->base));
  238. return 0;
  239. }
  240. static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
  241. {
  242. struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
  243. cryptd_free_ahash(ctx->cryptd_tfm);
  244. }
  245. static struct ahash_alg ghash_async_alg = {
  246. .init = ghash_async_init,
  247. .update = ghash_async_update,
  248. .final = ghash_async_final,
  249. .setkey = ghash_async_setkey,
  250. .digest = ghash_async_digest,
  251. .import = ghash_async_import,
  252. .export = ghash_async_export,
  253. .halg.digestsize = GHASH_DIGEST_SIZE,
  254. .halg.statesize = sizeof(struct ghash_desc_ctx),
  255. .halg.base = {
  256. .cra_name = "ghash",
  257. .cra_driver_name = "ghash-ce",
  258. .cra_priority = 300,
  259. .cra_flags = CRYPTO_ALG_ASYNC,
  260. .cra_blocksize = GHASH_BLOCK_SIZE,
  261. .cra_ctxsize = sizeof(struct ghash_async_ctx),
  262. .cra_module = THIS_MODULE,
  263. .cra_init = ghash_async_init_tfm,
  264. .cra_exit = ghash_async_exit_tfm,
  265. },
  266. };
  267. static int __init ghash_ce_mod_init(void)
  268. {
  269. int err;
  270. if (!(elf_hwcap & HWCAP_NEON))
  271. return -ENODEV;
  272. if (elf_hwcap2 & HWCAP2_PMULL)
  273. pmull_ghash_update = pmull_ghash_update_p64;
  274. else
  275. pmull_ghash_update = pmull_ghash_update_p8;
  276. err = crypto_register_shash(&ghash_alg);
  277. if (err)
  278. return err;
  279. err = crypto_register_ahash(&ghash_async_alg);
  280. if (err)
  281. goto err_shash;
  282. return 0;
  283. err_shash:
  284. crypto_unregister_shash(&ghash_alg);
  285. return err;
  286. }
  287. static void __exit ghash_ce_mod_exit(void)
  288. {
  289. crypto_unregister_ahash(&ghash_async_alg);
  290. crypto_unregister_shash(&ghash_alg);
  291. }
  292. module_init(ghash_ce_mod_init);
  293. module_exit(ghash_ce_mod_exit);