aes-neonbs-glue.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Bit sliced AES using NEON instructions
  4. *
  5. * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/simd.h>
  9. #include <crypto/aes.h>
  10. #include <crypto/ctr.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <crypto/scatterwalk.h>
  14. #include <crypto/xts.h>
  15. #include <linux/module.h>
  16. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  17. MODULE_DESCRIPTION("Bit sliced AES using NEON instructions");
  18. MODULE_LICENSE("GPL v2");
  19. MODULE_ALIAS_CRYPTO("ecb(aes)");
  20. MODULE_ALIAS_CRYPTO("cbc(aes)");
  21. MODULE_ALIAS_CRYPTO("ctr(aes)");
  22. MODULE_ALIAS_CRYPTO("xts(aes)");
  23. asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
  24. asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  25. int rounds, int blocks);
  26. asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  27. int rounds, int blocks);
  28. asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  29. int rounds, int blocks, u8 iv[]);
  30. asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  31. int rounds, int blocks, u8 iv[]);
  32. asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
  33. int rounds, int blocks, u8 iv[]);
  34. asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
  35. int rounds, int blocks, u8 iv[]);
  36. /* borrowed from aes-neon-blk.ko */
  37. asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
  38. int rounds, int blocks);
  39. asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
  40. int rounds, int blocks, u8 iv[]);
  41. asmlinkage void neon_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  42. int rounds, int bytes, u8 ctr[]);
  43. asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
  44. u32 const rk1[], int rounds, int bytes,
  45. u32 const rk2[], u8 iv[], int first);
  46. asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
  47. u32 const rk1[], int rounds, int bytes,
  48. u32 const rk2[], u8 iv[], int first);
  49. struct aesbs_ctx {
  50. u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
  51. int rounds;
  52. } __aligned(AES_BLOCK_SIZE);
  53. struct aesbs_cbc_ctr_ctx {
  54. struct aesbs_ctx key;
  55. u32 enc[AES_MAX_KEYLENGTH_U32];
  56. };
  57. struct aesbs_xts_ctx {
  58. struct aesbs_ctx key;
  59. u32 twkey[AES_MAX_KEYLENGTH_U32];
  60. struct crypto_aes_ctx cts;
  61. };
  62. static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  63. unsigned int key_len)
  64. {
  65. struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
  66. struct crypto_aes_ctx rk;
  67. int err;
  68. err = aes_expandkey(&rk, in_key, key_len);
  69. if (err)
  70. return err;
  71. ctx->rounds = 6 + key_len / 4;
  72. kernel_neon_begin();
  73. aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
  74. kernel_neon_end();
  75. return 0;
  76. }
  77. static int __ecb_crypt(struct skcipher_request *req,
  78. void (*fn)(u8 out[], u8 const in[], u8 const rk[],
  79. int rounds, int blocks))
  80. {
  81. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  82. struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
  83. struct skcipher_walk walk;
  84. int err;
  85. err = skcipher_walk_virt(&walk, req, false);
  86. while (walk.nbytes >= AES_BLOCK_SIZE) {
  87. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  88. if (walk.nbytes < walk.total)
  89. blocks = round_down(blocks,
  90. walk.stride / AES_BLOCK_SIZE);
  91. kernel_neon_begin();
  92. fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
  93. ctx->rounds, blocks);
  94. kernel_neon_end();
  95. err = skcipher_walk_done(&walk,
  96. walk.nbytes - blocks * AES_BLOCK_SIZE);
  97. }
  98. return err;
  99. }
  100. static int ecb_encrypt(struct skcipher_request *req)
  101. {
  102. return __ecb_crypt(req, aesbs_ecb_encrypt);
  103. }
  104. static int ecb_decrypt(struct skcipher_request *req)
  105. {
  106. return __ecb_crypt(req, aesbs_ecb_decrypt);
  107. }
  108. static int aesbs_cbc_ctr_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  109. unsigned int key_len)
  110. {
  111. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  112. struct crypto_aes_ctx rk;
  113. int err;
  114. err = aes_expandkey(&rk, in_key, key_len);
  115. if (err)
  116. return err;
  117. ctx->key.rounds = 6 + key_len / 4;
  118. memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
  119. kernel_neon_begin();
  120. aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
  121. kernel_neon_end();
  122. memzero_explicit(&rk, sizeof(rk));
  123. return 0;
  124. }
  125. static int cbc_encrypt(struct skcipher_request *req)
  126. {
  127. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  128. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  129. struct skcipher_walk walk;
  130. int err;
  131. err = skcipher_walk_virt(&walk, req, false);
  132. while (walk.nbytes >= AES_BLOCK_SIZE) {
  133. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  134. /* fall back to the non-bitsliced NEON implementation */
  135. kernel_neon_begin();
  136. neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  137. ctx->enc, ctx->key.rounds, blocks,
  138. walk.iv);
  139. kernel_neon_end();
  140. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  141. }
  142. return err;
  143. }
  144. static int cbc_decrypt(struct skcipher_request *req)
  145. {
  146. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  147. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  148. struct skcipher_walk walk;
  149. int err;
  150. err = skcipher_walk_virt(&walk, req, false);
  151. while (walk.nbytes >= AES_BLOCK_SIZE) {
  152. unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
  153. if (walk.nbytes < walk.total)
  154. blocks = round_down(blocks,
  155. walk.stride / AES_BLOCK_SIZE);
  156. kernel_neon_begin();
  157. aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  158. ctx->key.rk, ctx->key.rounds, blocks,
  159. walk.iv);
  160. kernel_neon_end();
  161. err = skcipher_walk_done(&walk,
  162. walk.nbytes - blocks * AES_BLOCK_SIZE);
  163. }
  164. return err;
  165. }
  166. static int ctr_encrypt(struct skcipher_request *req)
  167. {
  168. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  169. struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  170. struct skcipher_walk walk;
  171. int err;
  172. err = skcipher_walk_virt(&walk, req, false);
  173. while (walk.nbytes > 0) {
  174. int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
  175. int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE);
  176. const u8 *src = walk.src.virt.addr;
  177. u8 *dst = walk.dst.virt.addr;
  178. kernel_neon_begin();
  179. if (blocks >= 8) {
  180. aesbs_ctr_encrypt(dst, src, ctx->key.rk, ctx->key.rounds,
  181. blocks, walk.iv);
  182. dst += blocks * AES_BLOCK_SIZE;
  183. src += blocks * AES_BLOCK_SIZE;
  184. }
  185. if (nbytes && walk.nbytes == walk.total) {
  186. u8 buf[AES_BLOCK_SIZE];
  187. u8 *d = dst;
  188. if (unlikely(nbytes < AES_BLOCK_SIZE))
  189. src = dst = memcpy(buf + sizeof(buf) - nbytes,
  190. src, nbytes);
  191. neon_aes_ctr_encrypt(dst, src, ctx->enc, ctx->key.rounds,
  192. nbytes, walk.iv);
  193. if (unlikely(nbytes < AES_BLOCK_SIZE))
  194. memcpy(d, dst, nbytes);
  195. nbytes = 0;
  196. }
  197. kernel_neon_end();
  198. err = skcipher_walk_done(&walk, nbytes);
  199. }
  200. return err;
  201. }
  202. static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  203. unsigned int key_len)
  204. {
  205. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  206. struct crypto_aes_ctx rk;
  207. int err;
  208. err = xts_verify_key(tfm, in_key, key_len);
  209. if (err)
  210. return err;
  211. key_len /= 2;
  212. err = aes_expandkey(&ctx->cts, in_key, key_len);
  213. if (err)
  214. return err;
  215. err = aes_expandkey(&rk, in_key + key_len, key_len);
  216. if (err)
  217. return err;
  218. memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
  219. return aesbs_setkey(tfm, in_key, key_len);
  220. }
  221. static int __xts_crypt(struct skcipher_request *req, bool encrypt,
  222. void (*fn)(u8 out[], u8 const in[], u8 const rk[],
  223. int rounds, int blocks, u8 iv[]))
  224. {
  225. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  226. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  227. int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
  228. struct scatterlist sg_src[2], sg_dst[2];
  229. struct skcipher_request subreq;
  230. struct scatterlist *src, *dst;
  231. struct skcipher_walk walk;
  232. int nbytes, err;
  233. int first = 1;
  234. u8 *out, *in;
  235. if (req->cryptlen < AES_BLOCK_SIZE)
  236. return -EINVAL;
  237. /* ensure that the cts tail is covered by a single step */
  238. if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
  239. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  240. AES_BLOCK_SIZE) - 2;
  241. skcipher_request_set_tfm(&subreq, tfm);
  242. skcipher_request_set_callback(&subreq,
  243. skcipher_request_flags(req),
  244. NULL, NULL);
  245. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  246. xts_blocks * AES_BLOCK_SIZE,
  247. req->iv);
  248. req = &subreq;
  249. } else {
  250. tail = 0;
  251. }
  252. err = skcipher_walk_virt(&walk, req, false);
  253. if (err)
  254. return err;
  255. while (walk.nbytes >= AES_BLOCK_SIZE) {
  256. int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
  257. out = walk.dst.virt.addr;
  258. in = walk.src.virt.addr;
  259. nbytes = walk.nbytes;
  260. kernel_neon_begin();
  261. if (blocks >= 8) {
  262. if (first == 1)
  263. neon_aes_ecb_encrypt(walk.iv, walk.iv,
  264. ctx->twkey,
  265. ctx->key.rounds, 1);
  266. first = 2;
  267. fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
  268. walk.iv);
  269. out += blocks * AES_BLOCK_SIZE;
  270. in += blocks * AES_BLOCK_SIZE;
  271. nbytes -= blocks * AES_BLOCK_SIZE;
  272. }
  273. if (walk.nbytes == walk.total && nbytes > 0) {
  274. if (encrypt)
  275. neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
  276. ctx->key.rounds, nbytes,
  277. ctx->twkey, walk.iv, first);
  278. else
  279. neon_aes_xts_decrypt(out, in, ctx->cts.key_dec,
  280. ctx->key.rounds, nbytes,
  281. ctx->twkey, walk.iv, first);
  282. nbytes = first = 0;
  283. }
  284. kernel_neon_end();
  285. err = skcipher_walk_done(&walk, nbytes);
  286. }
  287. if (err || likely(!tail))
  288. return err;
  289. /* handle ciphertext stealing */
  290. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  291. if (req->dst != req->src)
  292. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  293. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  294. req->iv);
  295. err = skcipher_walk_virt(&walk, req, false);
  296. if (err)
  297. return err;
  298. out = walk.dst.virt.addr;
  299. in = walk.src.virt.addr;
  300. nbytes = walk.nbytes;
  301. kernel_neon_begin();
  302. if (encrypt)
  303. neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
  304. nbytes, ctx->twkey, walk.iv, first);
  305. else
  306. neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
  307. nbytes, ctx->twkey, walk.iv, first);
  308. kernel_neon_end();
  309. return skcipher_walk_done(&walk, 0);
  310. }
  311. static int xts_encrypt(struct skcipher_request *req)
  312. {
  313. return __xts_crypt(req, true, aesbs_xts_encrypt);
  314. }
  315. static int xts_decrypt(struct skcipher_request *req)
  316. {
  317. return __xts_crypt(req, false, aesbs_xts_decrypt);
  318. }
  319. static struct skcipher_alg aes_algs[] = { {
  320. .base.cra_name = "ecb(aes)",
  321. .base.cra_driver_name = "ecb-aes-neonbs",
  322. .base.cra_priority = 250,
  323. .base.cra_blocksize = AES_BLOCK_SIZE,
  324. .base.cra_ctxsize = sizeof(struct aesbs_ctx),
  325. .base.cra_module = THIS_MODULE,
  326. .min_keysize = AES_MIN_KEY_SIZE,
  327. .max_keysize = AES_MAX_KEY_SIZE,
  328. .walksize = 8 * AES_BLOCK_SIZE,
  329. .setkey = aesbs_setkey,
  330. .encrypt = ecb_encrypt,
  331. .decrypt = ecb_decrypt,
  332. }, {
  333. .base.cra_name = "cbc(aes)",
  334. .base.cra_driver_name = "cbc-aes-neonbs",
  335. .base.cra_priority = 250,
  336. .base.cra_blocksize = AES_BLOCK_SIZE,
  337. .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
  338. .base.cra_module = THIS_MODULE,
  339. .min_keysize = AES_MIN_KEY_SIZE,
  340. .max_keysize = AES_MAX_KEY_SIZE,
  341. .walksize = 8 * AES_BLOCK_SIZE,
  342. .ivsize = AES_BLOCK_SIZE,
  343. .setkey = aesbs_cbc_ctr_setkey,
  344. .encrypt = cbc_encrypt,
  345. .decrypt = cbc_decrypt,
  346. }, {
  347. .base.cra_name = "ctr(aes)",
  348. .base.cra_driver_name = "ctr-aes-neonbs",
  349. .base.cra_priority = 250,
  350. .base.cra_blocksize = 1,
  351. .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctr_ctx),
  352. .base.cra_module = THIS_MODULE,
  353. .min_keysize = AES_MIN_KEY_SIZE,
  354. .max_keysize = AES_MAX_KEY_SIZE,
  355. .chunksize = AES_BLOCK_SIZE,
  356. .walksize = 8 * AES_BLOCK_SIZE,
  357. .ivsize = AES_BLOCK_SIZE,
  358. .setkey = aesbs_cbc_ctr_setkey,
  359. .encrypt = ctr_encrypt,
  360. .decrypt = ctr_encrypt,
  361. }, {
  362. .base.cra_name = "xts(aes)",
  363. .base.cra_driver_name = "xts-aes-neonbs",
  364. .base.cra_priority = 250,
  365. .base.cra_blocksize = AES_BLOCK_SIZE,
  366. .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
  367. .base.cra_module = THIS_MODULE,
  368. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  369. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  370. .walksize = 8 * AES_BLOCK_SIZE,
  371. .ivsize = AES_BLOCK_SIZE,
  372. .setkey = aesbs_xts_setkey,
  373. .encrypt = xts_encrypt,
  374. .decrypt = xts_decrypt,
  375. } };
  376. static void aes_exit(void)
  377. {
  378. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  379. }
  380. static int __init aes_init(void)
  381. {
  382. if (!cpu_have_named_feature(ASIMD))
  383. return -ENODEV;
  384. return crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  385. }
  386. module_init(aes_init);
  387. module_exit(aes_exit);