sm4-neon-glue.c 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * SM4 Cipher Algorithm, using ARMv8 NEON
  4. * as specified in
  5. * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
  6. *
  7. * Copyright (C) 2022, Alibaba Group.
  8. * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
  9. */
  10. #include <linux/module.h>
  11. #include <linux/crypto.h>
  12. #include <linux/kernel.h>
  13. #include <linux/cpufeature.h>
  14. #include <asm/neon.h>
  15. #include <asm/simd.h>
  16. #include <crypto/internal/simd.h>
  17. #include <crypto/internal/skcipher.h>
  18. #include <crypto/sm4.h>
  19. asmlinkage void sm4_neon_crypt(const u32 *rkey, u8 *dst, const u8 *src,
  20. unsigned int nblocks);
  21. asmlinkage void sm4_neon_cbc_dec(const u32 *rkey_dec, u8 *dst, const u8 *src,
  22. u8 *iv, unsigned int nblocks);
  23. asmlinkage void sm4_neon_ctr_crypt(const u32 *rkey_enc, u8 *dst, const u8 *src,
  24. u8 *iv, unsigned int nblocks);
  25. static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key,
  26. unsigned int key_len)
  27. {
  28. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  29. return sm4_expandkey(ctx, key, key_len);
  30. }
  31. static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey)
  32. {
  33. struct skcipher_walk walk;
  34. unsigned int nbytes;
  35. int err;
  36. err = skcipher_walk_virt(&walk, req, false);
  37. while ((nbytes = walk.nbytes) > 0) {
  38. const u8 *src = walk.src.virt.addr;
  39. u8 *dst = walk.dst.virt.addr;
  40. unsigned int nblocks;
  41. nblocks = nbytes / SM4_BLOCK_SIZE;
  42. if (nblocks) {
  43. kernel_neon_begin();
  44. sm4_neon_crypt(rkey, dst, src, nblocks);
  45. kernel_neon_end();
  46. }
  47. err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE);
  48. }
  49. return err;
  50. }
  51. static int sm4_ecb_encrypt(struct skcipher_request *req)
  52. {
  53. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  54. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  55. return sm4_ecb_do_crypt(req, ctx->rkey_enc);
  56. }
  57. static int sm4_ecb_decrypt(struct skcipher_request *req)
  58. {
  59. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  60. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  61. return sm4_ecb_do_crypt(req, ctx->rkey_dec);
  62. }
  63. static int sm4_cbc_encrypt(struct skcipher_request *req)
  64. {
  65. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  66. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  67. struct skcipher_walk walk;
  68. unsigned int nbytes;
  69. int err;
  70. err = skcipher_walk_virt(&walk, req, false);
  71. while ((nbytes = walk.nbytes) > 0) {
  72. const u8 *iv = walk.iv;
  73. const u8 *src = walk.src.virt.addr;
  74. u8 *dst = walk.dst.virt.addr;
  75. while (nbytes >= SM4_BLOCK_SIZE) {
  76. crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE);
  77. sm4_crypt_block(ctx->rkey_enc, dst, dst);
  78. iv = dst;
  79. src += SM4_BLOCK_SIZE;
  80. dst += SM4_BLOCK_SIZE;
  81. nbytes -= SM4_BLOCK_SIZE;
  82. }
  83. if (iv != walk.iv)
  84. memcpy(walk.iv, iv, SM4_BLOCK_SIZE);
  85. err = skcipher_walk_done(&walk, nbytes);
  86. }
  87. return err;
  88. }
  89. static int sm4_cbc_decrypt(struct skcipher_request *req)
  90. {
  91. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  92. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  93. struct skcipher_walk walk;
  94. unsigned int nbytes;
  95. int err;
  96. err = skcipher_walk_virt(&walk, req, false);
  97. while ((nbytes = walk.nbytes) > 0) {
  98. const u8 *src = walk.src.virt.addr;
  99. u8 *dst = walk.dst.virt.addr;
  100. unsigned int nblocks;
  101. nblocks = nbytes / SM4_BLOCK_SIZE;
  102. if (nblocks) {
  103. kernel_neon_begin();
  104. sm4_neon_cbc_dec(ctx->rkey_dec, dst, src,
  105. walk.iv, nblocks);
  106. kernel_neon_end();
  107. }
  108. err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE);
  109. }
  110. return err;
  111. }
  112. static int sm4_ctr_crypt(struct skcipher_request *req)
  113. {
  114. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  115. struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
  116. struct skcipher_walk walk;
  117. unsigned int nbytes;
  118. int err;
  119. err = skcipher_walk_virt(&walk, req, false);
  120. while ((nbytes = walk.nbytes) > 0) {
  121. const u8 *src = walk.src.virt.addr;
  122. u8 *dst = walk.dst.virt.addr;
  123. unsigned int nblocks;
  124. nblocks = nbytes / SM4_BLOCK_SIZE;
  125. if (nblocks) {
  126. kernel_neon_begin();
  127. sm4_neon_ctr_crypt(ctx->rkey_enc, dst, src,
  128. walk.iv, nblocks);
  129. kernel_neon_end();
  130. dst += nblocks * SM4_BLOCK_SIZE;
  131. src += nblocks * SM4_BLOCK_SIZE;
  132. nbytes -= nblocks * SM4_BLOCK_SIZE;
  133. }
  134. /* tail */
  135. if (walk.nbytes == walk.total && nbytes > 0) {
  136. u8 keystream[SM4_BLOCK_SIZE];
  137. sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv);
  138. crypto_inc(walk.iv, SM4_BLOCK_SIZE);
  139. crypto_xor_cpy(dst, src, keystream, nbytes);
  140. nbytes = 0;
  141. }
  142. err = skcipher_walk_done(&walk, nbytes);
  143. }
  144. return err;
  145. }
  146. static struct skcipher_alg sm4_algs[] = {
  147. {
  148. .base = {
  149. .cra_name = "ecb(sm4)",
  150. .cra_driver_name = "ecb-sm4-neon",
  151. .cra_priority = 200,
  152. .cra_blocksize = SM4_BLOCK_SIZE,
  153. .cra_ctxsize = sizeof(struct sm4_ctx),
  154. .cra_module = THIS_MODULE,
  155. },
  156. .min_keysize = SM4_KEY_SIZE,
  157. .max_keysize = SM4_KEY_SIZE,
  158. .setkey = sm4_setkey,
  159. .encrypt = sm4_ecb_encrypt,
  160. .decrypt = sm4_ecb_decrypt,
  161. }, {
  162. .base = {
  163. .cra_name = "cbc(sm4)",
  164. .cra_driver_name = "cbc-sm4-neon",
  165. .cra_priority = 200,
  166. .cra_blocksize = SM4_BLOCK_SIZE,
  167. .cra_ctxsize = sizeof(struct sm4_ctx),
  168. .cra_module = THIS_MODULE,
  169. },
  170. .min_keysize = SM4_KEY_SIZE,
  171. .max_keysize = SM4_KEY_SIZE,
  172. .ivsize = SM4_BLOCK_SIZE,
  173. .setkey = sm4_setkey,
  174. .encrypt = sm4_cbc_encrypt,
  175. .decrypt = sm4_cbc_decrypt,
  176. }, {
  177. .base = {
  178. .cra_name = "ctr(sm4)",
  179. .cra_driver_name = "ctr-sm4-neon",
  180. .cra_priority = 200,
  181. .cra_blocksize = 1,
  182. .cra_ctxsize = sizeof(struct sm4_ctx),
  183. .cra_module = THIS_MODULE,
  184. },
  185. .min_keysize = SM4_KEY_SIZE,
  186. .max_keysize = SM4_KEY_SIZE,
  187. .ivsize = SM4_BLOCK_SIZE,
  188. .chunksize = SM4_BLOCK_SIZE,
  189. .setkey = sm4_setkey,
  190. .encrypt = sm4_ctr_crypt,
  191. .decrypt = sm4_ctr_crypt,
  192. }
  193. };
  194. static int __init sm4_init(void)
  195. {
  196. return crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
  197. }
  198. static void __exit sm4_exit(void)
  199. {
  200. crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
  201. }
  202. module_init(sm4_init);
  203. module_exit(sm4_exit);
  204. MODULE_DESCRIPTION("SM4 ECB/CBC/CTR using ARMv8 NEON");
  205. MODULE_ALIAS_CRYPTO("sm4-neon");
  206. MODULE_ALIAS_CRYPTO("sm4");
  207. MODULE_ALIAS_CRYPTO("ecb(sm4)");
  208. MODULE_ALIAS_CRYPTO("cbc(sm4)");
  209. MODULE_ALIAS_CRYPTO("ctr(sm4)");
  210. MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
  211. MODULE_LICENSE("GPL v2");