aes-ce-ccm-glue.c 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * aes-ce-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
  4. *
  5. * Copyright (C) 2013 - 2017 Linaro Ltd.
  6. * Copyright (C) 2024 Google LLC
  7. *
  8. * Author: Ard Biesheuvel <ardb@kernel.org>
  9. */
  10. #include <asm/neon.h>
  11. #include <linux/unaligned.h>
  12. #include <crypto/aes.h>
  13. #include <crypto/scatterwalk.h>
  14. #include <crypto/internal/aead.h>
  15. #include <crypto/internal/skcipher.h>
  16. #include <linux/module.h>
  17. #include "aes-ce-setkey.h"
  18. MODULE_IMPORT_NS(CRYPTO_INTERNAL);
  19. static int num_rounds(struct crypto_aes_ctx *ctx)
  20. {
  21. /*
  22. * # of rounds specified by AES:
  23. * 128 bit key 10 rounds
  24. * 192 bit key 12 rounds
  25. * 256 bit key 14 rounds
  26. * => n byte key => 6 + (n/4) rounds
  27. */
  28. return 6 + ctx->key_length / 4;
  29. }
  30. asmlinkage u32 ce_aes_mac_update(u8 const in[], u32 const rk[], int rounds,
  31. int blocks, u8 dg[], int enc_before,
  32. int enc_after);
  33. asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  34. u32 const rk[], u32 rounds, u8 mac[],
  35. u8 ctr[], u8 const final_iv[]);
  36. asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  37. u32 const rk[], u32 rounds, u8 mac[],
  38. u8 ctr[], u8 const final_iv[]);
  39. static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  40. unsigned int key_len)
  41. {
  42. struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  43. return ce_aes_expandkey(ctx, in_key, key_len);
  44. }
  45. static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  46. {
  47. if ((authsize & 1) || authsize < 4)
  48. return -EINVAL;
  49. return 0;
  50. }
  51. static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  52. {
  53. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  54. __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  55. u32 l = req->iv[0] + 1;
  56. /* verify that CCM dimension 'L' is set correctly in the IV */
  57. if (l < 2 || l > 8)
  58. return -EINVAL;
  59. /* verify that msglen can in fact be represented in L bytes */
  60. if (l < 4 && msglen >> (8 * l))
  61. return -EOVERFLOW;
  62. /*
  63. * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  64. * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  65. */
  66. n[0] = 0;
  67. n[1] = cpu_to_be32(msglen);
  68. memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  69. /*
  70. * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  71. * - bits 0..2 : max # of bytes required to represent msglen, minus 1
  72. * (already set by caller)
  73. * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  74. * - bit 6 : indicates presence of authenticate-only data
  75. */
  76. maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  77. if (req->assoclen)
  78. maciv[0] |= 0x40;
  79. memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
  80. return 0;
  81. }
  82. static u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  83. u32 macp, u32 const rk[], u32 rounds)
  84. {
  85. int enc_after = (macp + abytes) % AES_BLOCK_SIZE;
  86. do {
  87. u32 blocks = abytes / AES_BLOCK_SIZE;
  88. if (macp == AES_BLOCK_SIZE || (!macp && blocks > 0)) {
  89. u32 rem = ce_aes_mac_update(in, rk, rounds, blocks, mac,
  90. macp, enc_after);
  91. u32 adv = (blocks - rem) * AES_BLOCK_SIZE;
  92. macp = enc_after ? 0 : AES_BLOCK_SIZE;
  93. in += adv;
  94. abytes -= adv;
  95. if (unlikely(rem)) {
  96. kernel_neon_end();
  97. kernel_neon_begin();
  98. macp = 0;
  99. }
  100. } else {
  101. u32 l = min(AES_BLOCK_SIZE - macp, abytes);
  102. crypto_xor(&mac[macp], in, l);
  103. in += l;
  104. macp += l;
  105. abytes -= l;
  106. }
  107. } while (abytes > 0);
  108. return macp;
  109. }
  110. static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
  111. {
  112. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  113. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  114. struct __packed { __be16 l; __be32 h; u16 len; } ltag;
  115. struct scatter_walk walk;
  116. u32 len = req->assoclen;
  117. u32 macp = AES_BLOCK_SIZE;
  118. /* prepend the AAD with a length tag */
  119. if (len < 0xff00) {
  120. ltag.l = cpu_to_be16(len);
  121. ltag.len = 2;
  122. } else {
  123. ltag.l = cpu_to_be16(0xfffe);
  124. put_unaligned_be32(len, &ltag.h);
  125. ltag.len = 6;
  126. }
  127. macp = ce_aes_ccm_auth_data(mac, (u8 *)&ltag, ltag.len, macp,
  128. ctx->key_enc, num_rounds(ctx));
  129. scatterwalk_start(&walk, req->src);
  130. do {
  131. u32 n = scatterwalk_clamp(&walk, len);
  132. u8 *p;
  133. if (!n) {
  134. scatterwalk_start(&walk, sg_next(walk.sg));
  135. n = scatterwalk_clamp(&walk, len);
  136. }
  137. p = scatterwalk_map(&walk);
  138. macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc,
  139. num_rounds(ctx));
  140. len -= n;
  141. scatterwalk_unmap(p);
  142. scatterwalk_advance(&walk, n);
  143. scatterwalk_done(&walk, 0, len);
  144. } while (len);
  145. }
  146. static int ccm_encrypt(struct aead_request *req)
  147. {
  148. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  149. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  150. struct skcipher_walk walk;
  151. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  152. u8 orig_iv[AES_BLOCK_SIZE];
  153. u32 len = req->cryptlen;
  154. int err;
  155. err = ccm_init_mac(req, mac, len);
  156. if (err)
  157. return err;
  158. /* preserve the original iv for the final round */
  159. memcpy(orig_iv, req->iv, AES_BLOCK_SIZE);
  160. err = skcipher_walk_aead_encrypt(&walk, req, false);
  161. if (unlikely(err))
  162. return err;
  163. kernel_neon_begin();
  164. if (req->assoclen)
  165. ccm_calculate_auth_mac(req, mac);
  166. do {
  167. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  168. const u8 *src = walk.src.virt.addr;
  169. u8 *dst = walk.dst.virt.addr;
  170. u8 buf[AES_BLOCK_SIZE];
  171. u8 *final_iv = NULL;
  172. if (walk.nbytes == walk.total) {
  173. tail = 0;
  174. final_iv = orig_iv;
  175. }
  176. if (unlikely(walk.nbytes < AES_BLOCK_SIZE))
  177. src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes],
  178. src, walk.nbytes);
  179. ce_aes_ccm_encrypt(dst, src, walk.nbytes - tail,
  180. ctx->key_enc, num_rounds(ctx),
  181. mac, walk.iv, final_iv);
  182. if (unlikely(walk.nbytes < AES_BLOCK_SIZE))
  183. memcpy(walk.dst.virt.addr, dst, walk.nbytes);
  184. if (walk.nbytes) {
  185. err = skcipher_walk_done(&walk, tail);
  186. }
  187. } while (walk.nbytes);
  188. kernel_neon_end();
  189. if (unlikely(err))
  190. return err;
  191. /* copy authtag to end of dst */
  192. scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
  193. crypto_aead_authsize(aead), 1);
  194. return 0;
  195. }
  196. static int ccm_decrypt(struct aead_request *req)
  197. {
  198. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  199. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  200. unsigned int authsize = crypto_aead_authsize(aead);
  201. struct skcipher_walk walk;
  202. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  203. u8 orig_iv[AES_BLOCK_SIZE];
  204. u32 len = req->cryptlen - authsize;
  205. int err;
  206. err = ccm_init_mac(req, mac, len);
  207. if (err)
  208. return err;
  209. /* preserve the original iv for the final round */
  210. memcpy(orig_iv, req->iv, AES_BLOCK_SIZE);
  211. err = skcipher_walk_aead_decrypt(&walk, req, false);
  212. if (unlikely(err))
  213. return err;
  214. kernel_neon_begin();
  215. if (req->assoclen)
  216. ccm_calculate_auth_mac(req, mac);
  217. do {
  218. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  219. const u8 *src = walk.src.virt.addr;
  220. u8 *dst = walk.dst.virt.addr;
  221. u8 buf[AES_BLOCK_SIZE];
  222. u8 *final_iv = NULL;
  223. if (walk.nbytes == walk.total) {
  224. tail = 0;
  225. final_iv = orig_iv;
  226. }
  227. if (unlikely(walk.nbytes < AES_BLOCK_SIZE))
  228. src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes],
  229. src, walk.nbytes);
  230. ce_aes_ccm_decrypt(dst, src, walk.nbytes - tail,
  231. ctx->key_enc, num_rounds(ctx),
  232. mac, walk.iv, final_iv);
  233. if (unlikely(walk.nbytes < AES_BLOCK_SIZE))
  234. memcpy(walk.dst.virt.addr, dst, walk.nbytes);
  235. if (walk.nbytes) {
  236. err = skcipher_walk_done(&walk, tail);
  237. }
  238. } while (walk.nbytes);
  239. kernel_neon_end();
  240. if (unlikely(err))
  241. return err;
  242. /* compare calculated auth tag with the stored one */
  243. scatterwalk_map_and_copy(orig_iv, req->src,
  244. req->assoclen + req->cryptlen - authsize,
  245. authsize, 0);
  246. if (crypto_memneq(mac, orig_iv, authsize))
  247. return -EBADMSG;
  248. return 0;
  249. }
  250. static struct aead_alg ccm_aes_alg = {
  251. .base = {
  252. .cra_name = "ccm(aes)",
  253. .cra_driver_name = "ccm-aes-ce",
  254. .cra_priority = 300,
  255. .cra_blocksize = 1,
  256. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  257. .cra_module = THIS_MODULE,
  258. },
  259. .ivsize = AES_BLOCK_SIZE,
  260. .chunksize = AES_BLOCK_SIZE,
  261. .maxauthsize = AES_BLOCK_SIZE,
  262. .setkey = ccm_setkey,
  263. .setauthsize = ccm_setauthsize,
  264. .encrypt = ccm_encrypt,
  265. .decrypt = ccm_decrypt,
  266. };
  267. static int __init aes_mod_init(void)
  268. {
  269. if (!cpu_have_named_feature(AES))
  270. return -ENODEV;
  271. return crypto_register_aead(&ccm_aes_alg);
  272. }
  273. static void __exit aes_mod_exit(void)
  274. {
  275. crypto_unregister_aead(&ccm_aes_alg);
  276. }
  277. module_init(aes_mod_init);
  278. module_exit(aes_mod_exit);
  279. MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
  280. MODULE_AUTHOR("Ard Biesheuvel <ardb@kernel.org>");
  281. MODULE_LICENSE("GPL v2");
  282. MODULE_ALIAS_CRYPTO("ccm(aes)");