jh7110-aes.c 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * StarFive AES acceleration driver
  4. *
  5. * Copyright (c) 2022 StarFive Technology
  6. */
  7. #include <crypto/engine.h>
  8. #include <crypto/gcm.h>
  9. #include <crypto/internal/aead.h>
  10. #include <crypto/internal/skcipher.h>
  11. #include <crypto/scatterwalk.h>
  12. #include "jh7110-cryp.h"
  13. #include <linux/err.h>
  14. #include <linux/iopoll.h>
  15. #include <linux/kernel.h>
  16. #include <linux/slab.h>
  17. #include <linux/string.h>
  18. #define STARFIVE_AES_REGS_OFFSET 0x100
  19. #define STARFIVE_AES_AESDIO0R (STARFIVE_AES_REGS_OFFSET + 0x0)
  20. #define STARFIVE_AES_KEY0 (STARFIVE_AES_REGS_OFFSET + 0x4)
  21. #define STARFIVE_AES_KEY1 (STARFIVE_AES_REGS_OFFSET + 0x8)
  22. #define STARFIVE_AES_KEY2 (STARFIVE_AES_REGS_OFFSET + 0xC)
  23. #define STARFIVE_AES_KEY3 (STARFIVE_AES_REGS_OFFSET + 0x10)
  24. #define STARFIVE_AES_KEY4 (STARFIVE_AES_REGS_OFFSET + 0x14)
  25. #define STARFIVE_AES_KEY5 (STARFIVE_AES_REGS_OFFSET + 0x18)
  26. #define STARFIVE_AES_KEY6 (STARFIVE_AES_REGS_OFFSET + 0x1C)
  27. #define STARFIVE_AES_KEY7 (STARFIVE_AES_REGS_OFFSET + 0x20)
  28. #define STARFIVE_AES_CSR (STARFIVE_AES_REGS_OFFSET + 0x24)
  29. #define STARFIVE_AES_IV0 (STARFIVE_AES_REGS_OFFSET + 0x28)
  30. #define STARFIVE_AES_IV1 (STARFIVE_AES_REGS_OFFSET + 0x2C)
  31. #define STARFIVE_AES_IV2 (STARFIVE_AES_REGS_OFFSET + 0x30)
  32. #define STARFIVE_AES_IV3 (STARFIVE_AES_REGS_OFFSET + 0x34)
  33. #define STARFIVE_AES_NONCE0 (STARFIVE_AES_REGS_OFFSET + 0x3C)
  34. #define STARFIVE_AES_NONCE1 (STARFIVE_AES_REGS_OFFSET + 0x40)
  35. #define STARFIVE_AES_NONCE2 (STARFIVE_AES_REGS_OFFSET + 0x44)
  36. #define STARFIVE_AES_NONCE3 (STARFIVE_AES_REGS_OFFSET + 0x48)
  37. #define STARFIVE_AES_ALEN0 (STARFIVE_AES_REGS_OFFSET + 0x4C)
  38. #define STARFIVE_AES_ALEN1 (STARFIVE_AES_REGS_OFFSET + 0x50)
  39. #define STARFIVE_AES_MLEN0 (STARFIVE_AES_REGS_OFFSET + 0x54)
  40. #define STARFIVE_AES_MLEN1 (STARFIVE_AES_REGS_OFFSET + 0x58)
  41. #define STARFIVE_AES_IVLEN (STARFIVE_AES_REGS_OFFSET + 0x5C)
  42. #define FLG_MODE_MASK GENMASK(2, 0)
  43. #define FLG_ENCRYPT BIT(4)
  44. /* Misc */
  45. #define CCM_B0_ADATA 0x40
  46. #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
  47. static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
  48. {
  49. u32 status;
  50. return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
  51. !(status & STARFIVE_AES_BUSY), 10, 100000);
  52. }
  53. static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
  54. {
  55. u32 status;
  56. return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
  57. (status & STARFIVE_AES_KEY_DONE), 10, 100000);
  58. }
  59. static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
  60. {
  61. u32 status;
  62. return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
  63. (status & STARFIVE_AES_GCM_DONE), 10, 100000);
  64. }
  65. static inline int is_gcm(struct starfive_cryp_dev *cryp)
  66. {
  67. return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
  68. }
  69. static inline bool is_encrypt(struct starfive_cryp_dev *cryp)
  70. {
  71. return cryp->flags & FLG_ENCRYPT;
  72. }
  73. static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
  74. {
  75. struct starfive_cryp_dev *cryp = ctx->cryp;
  76. unsigned int value;
  77. switch (hw_mode) {
  78. case STARFIVE_AES_MODE_GCM:
  79. value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
  80. value |= STARFIVE_AES_GCM_START;
  81. writel(value, cryp->base + STARFIVE_AES_CSR);
  82. starfive_aes_wait_gcmdone(cryp);
  83. break;
  84. case STARFIVE_AES_MODE_CCM:
  85. value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
  86. value |= STARFIVE_AES_CCM_START;
  87. writel(value, cryp->base + STARFIVE_AES_CSR);
  88. break;
  89. }
  90. }
  91. static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
  92. {
  93. struct starfive_cryp_dev *cryp = ctx->cryp;
  94. writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
  95. writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
  96. }
  97. static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx)
  98. {
  99. struct starfive_cryp_dev *cryp = ctx->cryp;
  100. writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
  101. writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
  102. }
  103. static inline int starfive_aes_ccm_check_iv(const u8 *iv)
  104. {
  105. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  106. if (iv[0] < 1 || iv[0] > 7)
  107. return -EINVAL;
  108. return 0;
  109. }
  110. static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv)
  111. {
  112. struct starfive_cryp_dev *cryp = ctx->cryp;
  113. writel(iv[0], cryp->base + STARFIVE_AES_IV0);
  114. writel(iv[1], cryp->base + STARFIVE_AES_IV1);
  115. writel(iv[2], cryp->base + STARFIVE_AES_IV2);
  116. if (is_gcm(cryp)) {
  117. if (starfive_aes_wait_gcmdone(cryp))
  118. return -ETIMEDOUT;
  119. return 0;
  120. }
  121. writel(iv[3], cryp->base + STARFIVE_AES_IV3);
  122. return 0;
  123. }
  124. static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
  125. {
  126. iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
  127. iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
  128. iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
  129. iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
  130. }
  131. static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce)
  132. {
  133. struct starfive_cryp_dev *cryp = ctx->cryp;
  134. writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
  135. writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
  136. writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
  137. writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
  138. }
  139. static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx)
  140. {
  141. struct starfive_cryp_dev *cryp = ctx->cryp;
  142. u32 *key = (u32 *)ctx->key;
  143. if (ctx->keylen >= AES_KEYSIZE_128) {
  144. writel(key[0], cryp->base + STARFIVE_AES_KEY0);
  145. writel(key[1], cryp->base + STARFIVE_AES_KEY1);
  146. writel(key[2], cryp->base + STARFIVE_AES_KEY2);
  147. writel(key[3], cryp->base + STARFIVE_AES_KEY3);
  148. }
  149. if (ctx->keylen >= AES_KEYSIZE_192) {
  150. writel(key[4], cryp->base + STARFIVE_AES_KEY4);
  151. writel(key[5], cryp->base + STARFIVE_AES_KEY5);
  152. }
  153. if (ctx->keylen >= AES_KEYSIZE_256) {
  154. writel(key[6], cryp->base + STARFIVE_AES_KEY6);
  155. writel(key[7], cryp->base + STARFIVE_AES_KEY7);
  156. }
  157. if (starfive_aes_wait_keydone(cryp))
  158. return -ETIMEDOUT;
  159. return 0;
  160. }
  161. static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx)
  162. {
  163. struct starfive_cryp_dev *cryp = ctx->cryp;
  164. u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
  165. unsigned int textlen;
  166. memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
  167. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  168. /* Build B0 */
  169. memcpy(b0, iv, AES_BLOCK_SIZE);
  170. b0[0] |= (8 * ((cryp->authsize - 2) / 2));
  171. if (cryp->assoclen)
  172. b0[0] |= CCM_B0_ADATA;
  173. textlen = cryp->total_in;
  174. b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
  175. b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
  176. starfive_aes_write_nonce(ctx, (u32 *)b0);
  177. return 0;
  178. }
  179. static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx)
  180. {
  181. struct starfive_cryp_request_ctx *rctx = ctx->rctx;
  182. struct starfive_cryp_dev *cryp = ctx->cryp;
  183. u32 hw_mode;
  184. /* reset */
  185. rctx->csr.aes.v = 0;
  186. rctx->csr.aes.aesrst = 1;
  187. writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
  188. /* csr setup */
  189. hw_mode = cryp->flags & FLG_MODE_MASK;
  190. rctx->csr.aes.v = 0;
  191. switch (ctx->keylen) {
  192. case AES_KEYSIZE_128:
  193. rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128;
  194. break;
  195. case AES_KEYSIZE_192:
  196. rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192;
  197. break;
  198. case AES_KEYSIZE_256:
  199. rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
  200. break;
  201. }
  202. rctx->csr.aes.mode = hw_mode;
  203. rctx->csr.aes.cmode = !is_encrypt(cryp);
  204. rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
  205. if (cryp->side_chan) {
  206. rctx->csr.aes.delay_aes = 1;
  207. rctx->csr.aes.vaes_start = 1;
  208. }
  209. writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
  210. cryp->err = starfive_aes_write_key(ctx);
  211. if (cryp->err)
  212. return cryp->err;
  213. switch (hw_mode) {
  214. case STARFIVE_AES_MODE_GCM:
  215. starfive_aes_set_alen(ctx);
  216. starfive_aes_set_mlen(ctx);
  217. writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
  218. starfive_aes_aead_hw_start(ctx, hw_mode);
  219. starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
  220. break;
  221. case STARFIVE_AES_MODE_CCM:
  222. starfive_aes_set_alen(ctx);
  223. starfive_aes_set_mlen(ctx);
  224. starfive_aes_ccm_init(ctx);
  225. starfive_aes_aead_hw_start(ctx, hw_mode);
  226. break;
  227. case STARFIVE_AES_MODE_CBC:
  228. case STARFIVE_AES_MODE_CTR:
  229. starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
  230. break;
  231. default:
  232. break;
  233. }
  234. return cryp->err;
  235. }
  236. static int starfive_aes_read_authtag(struct starfive_cryp_ctx *ctx)
  237. {
  238. struct starfive_cryp_dev *cryp = ctx->cryp;
  239. struct starfive_cryp_request_ctx *rctx = ctx->rctx;
  240. int i;
  241. if (starfive_aes_wait_busy(cryp))
  242. return dev_err_probe(cryp->dev, -ETIMEDOUT,
  243. "Timeout waiting for tag generation.");
  244. if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM) {
  245. cryp->tag_out[0] = readl(cryp->base + STARFIVE_AES_NONCE0);
  246. cryp->tag_out[1] = readl(cryp->base + STARFIVE_AES_NONCE1);
  247. cryp->tag_out[2] = readl(cryp->base + STARFIVE_AES_NONCE2);
  248. cryp->tag_out[3] = readl(cryp->base + STARFIVE_AES_NONCE3);
  249. } else {
  250. for (i = 0; i < AES_BLOCK_32; i++)
  251. cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
  252. }
  253. if (is_encrypt(cryp)) {
  254. scatterwalk_map_and_copy(cryp->tag_out, rctx->out_sg,
  255. cryp->total_in, cryp->authsize, 1);
  256. } else {
  257. if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
  258. return -EBADMSG;
  259. }
  260. return 0;
  261. }
  262. static void starfive_aes_finish_req(struct starfive_cryp_ctx *ctx)
  263. {
  264. struct starfive_cryp_dev *cryp = ctx->cryp;
  265. int err = cryp->err;
  266. if (!err && cryp->authsize)
  267. err = starfive_aes_read_authtag(ctx);
  268. if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
  269. (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
  270. starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
  271. if (cryp->authsize)
  272. crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
  273. else
  274. crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
  275. err);
  276. }
  277. static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
  278. {
  279. struct starfive_cryp_dev *cryp = ctx->cryp;
  280. struct starfive_cryp_request_ctx *rctx = ctx->rctx;
  281. u32 *buffer;
  282. int total_len, loop;
  283. total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
  284. buffer = (u32 *)rctx->adata;
  285. for (loop = 0; loop < total_len; loop += 4) {
  286. writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
  287. buffer++;
  288. writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
  289. buffer++;
  290. writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
  291. buffer++;
  292. writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
  293. buffer++;
  294. }
  295. if (starfive_aes_wait_gcmdone(cryp))
  296. return dev_err_probe(cryp->dev, -ETIMEDOUT,
  297. "Timeout processing gcm aad block");
  298. return 0;
  299. }
  300. static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx)
  301. {
  302. struct starfive_cryp_dev *cryp = ctx->cryp;
  303. struct starfive_cryp_request_ctx *rctx = ctx->rctx;
  304. u32 *buffer;
  305. u8 *ci;
  306. int total_len, loop;
  307. total_len = cryp->assoclen;
  308. ci = rctx->adata;
  309. writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
  310. ci++;
  311. writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
  312. ci++;
  313. total_len -= 2;
  314. buffer = (u32 *)ci;
  315. for (loop = 0; loop < 3; loop++, buffer++)
  316. writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
  317. total_len -= 12;
  318. while (total_len > 0) {
  319. for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++)
  320. writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
  321. total_len -= AES_BLOCK_SIZE;
  322. }
  323. if (starfive_aes_wait_busy(cryp))
  324. return dev_err_probe(cryp->dev, -ETIMEDOUT,
  325. "Timeout processing ccm aad block");
  326. return 0;
  327. }
  328. static void starfive_aes_dma_done(void *param)
  329. {
  330. struct starfive_cryp_dev *cryp = param;
  331. complete(&cryp->dma_done);
  332. }
  333. static void starfive_aes_dma_init(struct starfive_cryp_dev *cryp)
  334. {
  335. cryp->cfg_in.direction = DMA_MEM_TO_DEV;
  336. cryp->cfg_in.src_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
  337. cryp->cfg_in.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
  338. cryp->cfg_in.src_maxburst = cryp->dma_maxburst;
  339. cryp->cfg_in.dst_maxburst = cryp->dma_maxburst;
  340. cryp->cfg_in.dst_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
  341. dmaengine_slave_config(cryp->tx, &cryp->cfg_in);
  342. cryp->cfg_out.direction = DMA_DEV_TO_MEM;
  343. cryp->cfg_out.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
  344. cryp->cfg_out.dst_addr_width = DMA_SLAVE_BUSWIDTH_16_BYTES;
  345. cryp->cfg_out.src_maxburst = 4;
  346. cryp->cfg_out.dst_maxburst = 4;
  347. cryp->cfg_out.src_addr = cryp->phys_base + STARFIVE_ALG_FIFO_OFFSET;
  348. dmaengine_slave_config(cryp->rx, &cryp->cfg_out);
  349. init_completion(&cryp->dma_done);
  350. }
  351. static int starfive_aes_dma_xfer(struct starfive_cryp_dev *cryp,
  352. struct scatterlist *src,
  353. struct scatterlist *dst,
  354. int len)
  355. {
  356. struct dma_async_tx_descriptor *in_desc, *out_desc;
  357. union starfive_alg_cr alg_cr;
  358. int ret = 0, in_save, out_save;
  359. alg_cr.v = 0;
  360. alg_cr.start = 1;
  361. alg_cr.aes_dma_en = 1;
  362. writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
  363. in_save = sg_dma_len(src);
  364. out_save = sg_dma_len(dst);
  365. writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_IN_LEN_OFFSET);
  366. writel(ALIGN(len, AES_BLOCK_SIZE), cryp->base + STARFIVE_DMA_OUT_LEN_OFFSET);
  367. sg_dma_len(src) = ALIGN(len, AES_BLOCK_SIZE);
  368. sg_dma_len(dst) = ALIGN(len, AES_BLOCK_SIZE);
  369. out_desc = dmaengine_prep_slave_sg(cryp->rx, dst, 1, DMA_DEV_TO_MEM,
  370. DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
  371. if (!out_desc) {
  372. ret = -EINVAL;
  373. goto dma_err;
  374. }
  375. out_desc->callback = starfive_aes_dma_done;
  376. out_desc->callback_param = cryp;
  377. reinit_completion(&cryp->dma_done);
  378. dmaengine_submit(out_desc);
  379. dma_async_issue_pending(cryp->rx);
  380. in_desc = dmaengine_prep_slave_sg(cryp->tx, src, 1, DMA_MEM_TO_DEV,
  381. DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
  382. if (!in_desc) {
  383. ret = -EINVAL;
  384. goto dma_err;
  385. }
  386. dmaengine_submit(in_desc);
  387. dma_async_issue_pending(cryp->tx);
  388. if (!wait_for_completion_timeout(&cryp->dma_done,
  389. msecs_to_jiffies(1000)))
  390. ret = -ETIMEDOUT;
  391. dma_err:
  392. sg_dma_len(src) = in_save;
  393. sg_dma_len(dst) = out_save;
  394. alg_cr.v = 0;
  395. alg_cr.clear = 1;
  396. writel(alg_cr.v, cryp->base + STARFIVE_ALG_CR_OFFSET);
  397. return ret;
  398. }
  399. static int starfive_aes_map_sg(struct starfive_cryp_dev *cryp,
  400. struct scatterlist *src,
  401. struct scatterlist *dst)
  402. {
  403. struct scatterlist *stsg, *dtsg;
  404. struct scatterlist _src[2], _dst[2];
  405. unsigned int remain = cryp->total_in;
  406. unsigned int len, src_nents, dst_nents;
  407. int ret;
  408. if (src == dst) {
  409. for (stsg = src, dtsg = dst; remain > 0;
  410. stsg = sg_next(stsg), dtsg = sg_next(dtsg)) {
  411. src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
  412. if (src_nents == 0)
  413. return dev_err_probe(cryp->dev, -ENOMEM,
  414. "dma_map_sg error\n");
  415. dst_nents = src_nents;
  416. len = min(sg_dma_len(stsg), remain);
  417. ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
  418. dma_unmap_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL);
  419. if (ret)
  420. return ret;
  421. remain -= len;
  422. }
  423. } else {
  424. for (stsg = src, dtsg = dst;;) {
  425. src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
  426. if (src_nents == 0)
  427. return dev_err_probe(cryp->dev, -ENOMEM,
  428. "dma_map_sg src error\n");
  429. dst_nents = dma_map_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
  430. if (dst_nents == 0)
  431. return dev_err_probe(cryp->dev, -ENOMEM,
  432. "dma_map_sg dst error\n");
  433. len = min(sg_dma_len(stsg), sg_dma_len(dtsg));
  434. len = min(len, remain);
  435. ret = starfive_aes_dma_xfer(cryp, stsg, dtsg, len);
  436. dma_unmap_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE);
  437. dma_unmap_sg(cryp->dev, dtsg, 1, DMA_FROM_DEVICE);
  438. if (ret)
  439. return ret;
  440. remain -= len;
  441. if (remain == 0)
  442. break;
  443. if (sg_dma_len(stsg) - len) {
  444. stsg = scatterwalk_ffwd(_src, stsg, len);
  445. dtsg = sg_next(dtsg);
  446. } else if (sg_dma_len(dtsg) - len) {
  447. dtsg = scatterwalk_ffwd(_dst, dtsg, len);
  448. stsg = sg_next(stsg);
  449. } else {
  450. stsg = sg_next(stsg);
  451. dtsg = sg_next(dtsg);
  452. }
  453. }
  454. }
  455. return 0;
  456. }
  457. static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
  458. {
  459. struct skcipher_request *req =
  460. container_of(areq, struct skcipher_request, base);
  461. struct starfive_cryp_ctx *ctx =
  462. crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
  463. struct starfive_cryp_request_ctx *rctx = skcipher_request_ctx(req);
  464. struct starfive_cryp_dev *cryp = ctx->cryp;
  465. int ret;
  466. cryp->req.sreq = req;
  467. cryp->total_in = req->cryptlen;
  468. cryp->total_out = req->cryptlen;
  469. cryp->assoclen = 0;
  470. cryp->authsize = 0;
  471. rctx->in_sg = req->src;
  472. rctx->out_sg = req->dst;
  473. ctx->rctx = rctx;
  474. ret = starfive_aes_hw_init(ctx);
  475. if (ret)
  476. return ret;
  477. if (!cryp->total_in)
  478. goto finish_req;
  479. starfive_aes_dma_init(cryp);
  480. ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
  481. if (ret)
  482. return ret;
  483. finish_req:
  484. starfive_aes_finish_req(ctx);
  485. return 0;
  486. }
  487. static int starfive_aes_init_tfm(struct crypto_skcipher *tfm,
  488. const char *alg_name)
  489. {
  490. struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  491. ctx->cryp = starfive_cryp_find_dev(ctx);
  492. if (!ctx->cryp)
  493. return -ENODEV;
  494. ctx->skcipher_fbk = crypto_alloc_skcipher(alg_name, 0,
  495. CRYPTO_ALG_NEED_FALLBACK);
  496. if (IS_ERR(ctx->skcipher_fbk))
  497. return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->skcipher_fbk),
  498. "%s() failed to allocate fallback for %s\n",
  499. __func__, alg_name);
  500. crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
  501. crypto_skcipher_reqsize(ctx->skcipher_fbk));
  502. return 0;
  503. }
  504. static void starfive_aes_exit_tfm(struct crypto_skcipher *tfm)
  505. {
  506. struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  507. crypto_free_skcipher(ctx->skcipher_fbk);
  508. }
  509. static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
  510. {
  511. struct aead_request *req =
  512. container_of(areq, struct aead_request, base);
  513. struct starfive_cryp_ctx *ctx =
  514. crypto_aead_ctx(crypto_aead_reqtfm(req));
  515. struct starfive_cryp_dev *cryp = ctx->cryp;
  516. struct starfive_cryp_request_ctx *rctx = aead_request_ctx(req);
  517. struct scatterlist _src[2], _dst[2];
  518. int ret;
  519. cryp->req.areq = req;
  520. cryp->assoclen = req->assoclen;
  521. cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
  522. rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen);
  523. if (req->src == req->dst)
  524. rctx->out_sg = rctx->in_sg;
  525. else
  526. rctx->out_sg = scatterwalk_ffwd(_dst, req->dst, cryp->assoclen);
  527. if (is_encrypt(cryp)) {
  528. cryp->total_in = req->cryptlen;
  529. cryp->total_out = req->cryptlen;
  530. } else {
  531. cryp->total_in = req->cryptlen - cryp->authsize;
  532. cryp->total_out = cryp->total_in;
  533. scatterwalk_map_and_copy(cryp->tag_in, req->src,
  534. cryp->total_in + cryp->assoclen,
  535. cryp->authsize, 0);
  536. }
  537. if (cryp->assoclen) {
  538. rctx->adata = kzalloc(cryp->assoclen + AES_BLOCK_SIZE, GFP_KERNEL);
  539. if (!rctx->adata)
  540. return dev_err_probe(cryp->dev, -ENOMEM,
  541. "Failed to alloc memory for adata");
  542. if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, cryp->assoclen),
  543. rctx->adata, cryp->assoclen) != cryp->assoclen)
  544. return -EINVAL;
  545. }
  546. if (cryp->total_in)
  547. sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg),
  548. sg_dma_len(rctx->in_sg) - cryp->total_in,
  549. cryp->total_in);
  550. ctx->rctx = rctx;
  551. ret = starfive_aes_hw_init(ctx);
  552. if (ret)
  553. return ret;
  554. if (!cryp->assoclen)
  555. goto write_text;
  556. if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
  557. ret = starfive_aes_ccm_write_adata(ctx);
  558. else
  559. ret = starfive_aes_gcm_write_adata(ctx);
  560. kfree(rctx->adata);
  561. if (ret)
  562. return ret;
  563. write_text:
  564. if (!cryp->total_in)
  565. goto finish_req;
  566. starfive_aes_dma_init(cryp);
  567. ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg);
  568. if (ret)
  569. return ret;
  570. finish_req:
  571. starfive_aes_finish_req(ctx);
  572. return 0;
  573. }
  574. static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm,
  575. const char *alg_name)
  576. {
  577. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  578. ctx->cryp = starfive_cryp_find_dev(ctx);
  579. if (!ctx->cryp)
  580. return -ENODEV;
  581. ctx->aead_fbk = crypto_alloc_aead(alg_name, 0,
  582. CRYPTO_ALG_NEED_FALLBACK);
  583. if (IS_ERR(ctx->aead_fbk))
  584. return dev_err_probe(ctx->cryp->dev, PTR_ERR(ctx->aead_fbk),
  585. "%s() failed to allocate fallback for %s\n",
  586. __func__, alg_name);
  587. crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
  588. crypto_aead_reqsize(ctx->aead_fbk));
  589. return 0;
  590. }
  591. static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
  592. {
  593. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  594. crypto_free_aead(ctx->aead_fbk);
  595. }
  596. static bool starfive_aes_check_unaligned(struct starfive_cryp_dev *cryp,
  597. struct scatterlist *src,
  598. struct scatterlist *dst)
  599. {
  600. struct scatterlist *tsg;
  601. int i;
  602. for_each_sg(src, tsg, sg_nents(src), i)
  603. if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
  604. (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
  605. !sg_is_last(tsg)))
  606. return true;
  607. if (src != dst)
  608. for_each_sg(dst, tsg, sg_nents(dst), i)
  609. if (!IS_ALIGNED(tsg->offset, sizeof(u32)) ||
  610. (!IS_ALIGNED(tsg->length, AES_BLOCK_SIZE) &&
  611. !sg_is_last(tsg)))
  612. return true;
  613. return false;
  614. }
  615. static int starfive_aes_do_fallback(struct skcipher_request *req, bool enc)
  616. {
  617. struct starfive_cryp_ctx *ctx =
  618. crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
  619. struct skcipher_request *subreq = skcipher_request_ctx(req);
  620. skcipher_request_set_tfm(subreq, ctx->skcipher_fbk);
  621. skcipher_request_set_callback(subreq, req->base.flags,
  622. req->base.complete,
  623. req->base.data);
  624. skcipher_request_set_crypt(subreq, req->src, req->dst,
  625. req->cryptlen, req->iv);
  626. return enc ? crypto_skcipher_encrypt(subreq) :
  627. crypto_skcipher_decrypt(subreq);
  628. }
  629. static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
  630. {
  631. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  632. struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  633. struct starfive_cryp_dev *cryp = ctx->cryp;
  634. unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
  635. cryp->flags = flags;
  636. if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
  637. (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
  638. if (req->cryptlen & blocksize_align)
  639. return -EINVAL;
  640. if (starfive_aes_check_unaligned(cryp, req->src, req->dst))
  641. return starfive_aes_do_fallback(req, is_encrypt(cryp));
  642. return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
  643. }
  644. static int starfive_aes_aead_do_fallback(struct aead_request *req, bool enc)
  645. {
  646. struct starfive_cryp_ctx *ctx =
  647. crypto_aead_ctx(crypto_aead_reqtfm(req));
  648. struct aead_request *subreq = aead_request_ctx(req);
  649. aead_request_set_tfm(subreq, ctx->aead_fbk);
  650. aead_request_set_callback(subreq, req->base.flags,
  651. req->base.complete,
  652. req->base.data);
  653. aead_request_set_crypt(subreq, req->src, req->dst,
  654. req->cryptlen, req->iv);
  655. aead_request_set_ad(subreq, req->assoclen);
  656. return enc ? crypto_aead_encrypt(subreq) :
  657. crypto_aead_decrypt(subreq);
  658. }
  659. static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
  660. {
  661. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  662. struct starfive_cryp_dev *cryp = ctx->cryp;
  663. struct scatterlist *src, *dst, _src[2], _dst[2];
  664. cryp->flags = flags;
  665. /* aes-ccm does not support tag verification for non-aligned text,
  666. * use fallback for ccm decryption instead.
  667. */
  668. if (((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM) &&
  669. !is_encrypt(cryp))
  670. return starfive_aes_aead_do_fallback(req, 0);
  671. src = scatterwalk_ffwd(_src, req->src, req->assoclen);
  672. if (req->src == req->dst)
  673. dst = src;
  674. else
  675. dst = scatterwalk_ffwd(_dst, req->dst, req->assoclen);
  676. if (starfive_aes_check_unaligned(cryp, src, dst))
  677. return starfive_aes_aead_do_fallback(req, is_encrypt(cryp));
  678. return crypto_transfer_aead_request_to_engine(cryp->engine, req);
  679. }
  680. static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  681. unsigned int keylen)
  682. {
  683. struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
  684. if (!key || !keylen)
  685. return -EINVAL;
  686. if (keylen != AES_KEYSIZE_128 &&
  687. keylen != AES_KEYSIZE_192 &&
  688. keylen != AES_KEYSIZE_256)
  689. return -EINVAL;
  690. memcpy(ctx->key, key, keylen);
  691. ctx->keylen = keylen;
  692. return crypto_skcipher_setkey(ctx->skcipher_fbk, key, keylen);
  693. }
  694. static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
  695. unsigned int keylen)
  696. {
  697. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  698. if (!key || !keylen)
  699. return -EINVAL;
  700. if (keylen != AES_KEYSIZE_128 &&
  701. keylen != AES_KEYSIZE_192 &&
  702. keylen != AES_KEYSIZE_256)
  703. return -EINVAL;
  704. memcpy(ctx->key, key, keylen);
  705. ctx->keylen = keylen;
  706. return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
  707. }
  708. static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
  709. unsigned int authsize)
  710. {
  711. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  712. int ret;
  713. ret = crypto_gcm_check_authsize(authsize);
  714. if (ret)
  715. return ret;
  716. return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
  717. }
  718. static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
  719. unsigned int authsize)
  720. {
  721. struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  722. switch (authsize) {
  723. case 4:
  724. case 6:
  725. case 8:
  726. case 10:
  727. case 12:
  728. case 14:
  729. case 16:
  730. break;
  731. default:
  732. return -EINVAL;
  733. }
  734. return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
  735. }
  736. static int starfive_aes_ecb_encrypt(struct skcipher_request *req)
  737. {
  738. return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT);
  739. }
  740. static int starfive_aes_ecb_decrypt(struct skcipher_request *req)
  741. {
  742. return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB);
  743. }
  744. static int starfive_aes_cbc_encrypt(struct skcipher_request *req)
  745. {
  746. return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT);
  747. }
  748. static int starfive_aes_cbc_decrypt(struct skcipher_request *req)
  749. {
  750. return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC);
  751. }
  752. static int starfive_aes_ctr_encrypt(struct skcipher_request *req)
  753. {
  754. return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT);
  755. }
  756. static int starfive_aes_ctr_decrypt(struct skcipher_request *req)
  757. {
  758. return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR);
  759. }
  760. static int starfive_aes_gcm_encrypt(struct aead_request *req)
  761. {
  762. return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT);
  763. }
  764. static int starfive_aes_gcm_decrypt(struct aead_request *req)
  765. {
  766. return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM);
  767. }
  768. static int starfive_aes_ccm_encrypt(struct aead_request *req)
  769. {
  770. int ret;
  771. ret = starfive_aes_ccm_check_iv(req->iv);
  772. if (ret)
  773. return ret;
  774. return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT);
  775. }
  776. static int starfive_aes_ccm_decrypt(struct aead_request *req)
  777. {
  778. int ret;
  779. ret = starfive_aes_ccm_check_iv(req->iv);
  780. if (ret)
  781. return ret;
  782. return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
  783. }
  784. static int starfive_aes_ecb_init_tfm(struct crypto_skcipher *tfm)
  785. {
  786. return starfive_aes_init_tfm(tfm, "ecb(aes-generic)");
  787. }
  788. static int starfive_aes_cbc_init_tfm(struct crypto_skcipher *tfm)
  789. {
  790. return starfive_aes_init_tfm(tfm, "cbc(aes-generic)");
  791. }
  792. static int starfive_aes_ctr_init_tfm(struct crypto_skcipher *tfm)
  793. {
  794. return starfive_aes_init_tfm(tfm, "ctr(aes-generic)");
  795. }
  796. static int starfive_aes_ccm_init_tfm(struct crypto_aead *tfm)
  797. {
  798. return starfive_aes_aead_init_tfm(tfm, "ccm_base(ctr(aes-generic),cbcmac(aes-generic))");
  799. }
  800. static int starfive_aes_gcm_init_tfm(struct crypto_aead *tfm)
  801. {
  802. return starfive_aes_aead_init_tfm(tfm, "gcm_base(ctr(aes-generic),ghash-generic)");
  803. }
  804. static struct skcipher_engine_alg skcipher_algs[] = {
  805. {
  806. .base.init = starfive_aes_ecb_init_tfm,
  807. .base.exit = starfive_aes_exit_tfm,
  808. .base.setkey = starfive_aes_setkey,
  809. .base.encrypt = starfive_aes_ecb_encrypt,
  810. .base.decrypt = starfive_aes_ecb_decrypt,
  811. .base.min_keysize = AES_MIN_KEY_SIZE,
  812. .base.max_keysize = AES_MAX_KEY_SIZE,
  813. .base.base = {
  814. .cra_name = "ecb(aes)",
  815. .cra_driver_name = "starfive-ecb-aes",
  816. .cra_priority = 200,
  817. .cra_flags = CRYPTO_ALG_ASYNC |
  818. CRYPTO_ALG_NEED_FALLBACK,
  819. .cra_blocksize = AES_BLOCK_SIZE,
  820. .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
  821. .cra_alignmask = 0xf,
  822. .cra_module = THIS_MODULE,
  823. },
  824. .op = {
  825. .do_one_request = starfive_aes_do_one_req,
  826. },
  827. }, {
  828. .base.init = starfive_aes_cbc_init_tfm,
  829. .base.exit = starfive_aes_exit_tfm,
  830. .base.setkey = starfive_aes_setkey,
  831. .base.encrypt = starfive_aes_cbc_encrypt,
  832. .base.decrypt = starfive_aes_cbc_decrypt,
  833. .base.min_keysize = AES_MIN_KEY_SIZE,
  834. .base.max_keysize = AES_MAX_KEY_SIZE,
  835. .base.ivsize = AES_BLOCK_SIZE,
  836. .base.base = {
  837. .cra_name = "cbc(aes)",
  838. .cra_driver_name = "starfive-cbc-aes",
  839. .cra_priority = 200,
  840. .cra_flags = CRYPTO_ALG_ASYNC |
  841. CRYPTO_ALG_NEED_FALLBACK,
  842. .cra_blocksize = AES_BLOCK_SIZE,
  843. .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
  844. .cra_alignmask = 0xf,
  845. .cra_module = THIS_MODULE,
  846. },
  847. .op = {
  848. .do_one_request = starfive_aes_do_one_req,
  849. },
  850. }, {
  851. .base.init = starfive_aes_ctr_init_tfm,
  852. .base.exit = starfive_aes_exit_tfm,
  853. .base.setkey = starfive_aes_setkey,
  854. .base.encrypt = starfive_aes_ctr_encrypt,
  855. .base.decrypt = starfive_aes_ctr_decrypt,
  856. .base.min_keysize = AES_MIN_KEY_SIZE,
  857. .base.max_keysize = AES_MAX_KEY_SIZE,
  858. .base.ivsize = AES_BLOCK_SIZE,
  859. .base.base = {
  860. .cra_name = "ctr(aes)",
  861. .cra_driver_name = "starfive-ctr-aes",
  862. .cra_priority = 200,
  863. .cra_flags = CRYPTO_ALG_ASYNC |
  864. CRYPTO_ALG_NEED_FALLBACK,
  865. .cra_blocksize = 1,
  866. .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
  867. .cra_alignmask = 0xf,
  868. .cra_module = THIS_MODULE,
  869. },
  870. .op = {
  871. .do_one_request = starfive_aes_do_one_req,
  872. },
  873. },
  874. };
  875. static struct aead_engine_alg aead_algs[] = {
  876. {
  877. .base.setkey = starfive_aes_aead_setkey,
  878. .base.setauthsize = starfive_aes_gcm_setauthsize,
  879. .base.encrypt = starfive_aes_gcm_encrypt,
  880. .base.decrypt = starfive_aes_gcm_decrypt,
  881. .base.init = starfive_aes_gcm_init_tfm,
  882. .base.exit = starfive_aes_aead_exit_tfm,
  883. .base.ivsize = GCM_AES_IV_SIZE,
  884. .base.maxauthsize = AES_BLOCK_SIZE,
  885. .base.base = {
  886. .cra_name = "gcm(aes)",
  887. .cra_driver_name = "starfive-gcm-aes",
  888. .cra_priority = 200,
  889. .cra_flags = CRYPTO_ALG_ASYNC |
  890. CRYPTO_ALG_NEED_FALLBACK,
  891. .cra_blocksize = 1,
  892. .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
  893. .cra_alignmask = 0xf,
  894. .cra_module = THIS_MODULE,
  895. },
  896. .op = {
  897. .do_one_request = starfive_aes_aead_do_one_req,
  898. },
  899. }, {
  900. .base.setkey = starfive_aes_aead_setkey,
  901. .base.setauthsize = starfive_aes_ccm_setauthsize,
  902. .base.encrypt = starfive_aes_ccm_encrypt,
  903. .base.decrypt = starfive_aes_ccm_decrypt,
  904. .base.init = starfive_aes_ccm_init_tfm,
  905. .base.exit = starfive_aes_aead_exit_tfm,
  906. .base.ivsize = AES_BLOCK_SIZE,
  907. .base.maxauthsize = AES_BLOCK_SIZE,
  908. .base.base = {
  909. .cra_name = "ccm(aes)",
  910. .cra_driver_name = "starfive-ccm-aes",
  911. .cra_priority = 200,
  912. .cra_flags = CRYPTO_ALG_ASYNC |
  913. CRYPTO_ALG_NEED_FALLBACK,
  914. .cra_blocksize = 1,
  915. .cra_ctxsize = sizeof(struct starfive_cryp_ctx),
  916. .cra_alignmask = 0xf,
  917. .cra_module = THIS_MODULE,
  918. },
  919. .op = {
  920. .do_one_request = starfive_aes_aead_do_one_req,
  921. },
  922. },
  923. };
  924. int starfive_aes_register_algs(void)
  925. {
  926. int ret;
  927. ret = crypto_engine_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
  928. if (ret)
  929. return ret;
  930. ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  931. if (ret)
  932. crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
  933. return ret;
  934. }
  935. void starfive_aes_unregister_algs(void)
  936. {
  937. crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  938. crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
  939. }