aspeed-hace-crypto.c 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Copyright (c) 2021 Aspeed Technology Inc.
  4. */
  5. #include "aspeed-hace.h"
  6. #include <crypto/des.h>
  7. #include <crypto/engine.h>
  8. #include <crypto/internal/des.h>
  9. #include <crypto/internal/skcipher.h>
  10. #include <linux/dma-mapping.h>
  11. #include <linux/err.h>
  12. #include <linux/io.h>
  13. #include <linux/kernel.h>
  14. #include <linux/module.h>
  15. #include <linux/scatterlist.h>
  16. #include <linux/string.h>
  17. #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
  18. #define CIPHER_DBG(h, fmt, ...) \
  19. dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
  20. #else
  21. #define CIPHER_DBG(h, fmt, ...) \
  22. dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
  23. #endif
  24. static int aspeed_crypto_do_fallback(struct skcipher_request *areq)
  25. {
  26. struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
  27. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  28. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
  29. int err;
  30. skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
  31. skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
  32. areq->base.complete, areq->base.data);
  33. skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
  34. areq->cryptlen, areq->iv);
  35. if (rctx->enc_cmd & HACE_CMD_ENCRYPT)
  36. err = crypto_skcipher_encrypt(&rctx->fallback_req);
  37. else
  38. err = crypto_skcipher_decrypt(&rctx->fallback_req);
  39. return err;
  40. }
  41. static bool aspeed_crypto_need_fallback(struct skcipher_request *areq)
  42. {
  43. struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
  44. if (areq->cryptlen == 0)
  45. return true;
  46. if ((rctx->enc_cmd & HACE_CMD_DES_SELECT) &&
  47. !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE))
  48. return true;
  49. if ((!(rctx->enc_cmd & HACE_CMD_DES_SELECT)) &&
  50. !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE))
  51. return true;
  52. return false;
  53. }
  54. static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
  55. struct skcipher_request *req)
  56. {
  57. if (hace_dev->version == AST2500_VERSION &&
  58. aspeed_crypto_need_fallback(req)) {
  59. CIPHER_DBG(hace_dev, "SW fallback\n");
  60. return aspeed_crypto_do_fallback(req);
  61. }
  62. return crypto_transfer_skcipher_request_to_engine(
  63. hace_dev->crypt_engine_crypto, req);
  64. }
  65. static int aspeed_crypto_do_request(struct crypto_engine *engine, void *areq)
  66. {
  67. struct skcipher_request *req = skcipher_request_cast(areq);
  68. struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
  69. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
  70. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  71. struct aspeed_engine_crypto *crypto_engine;
  72. int rc;
  73. crypto_engine = &hace_dev->crypto_engine;
  74. crypto_engine->req = req;
  75. crypto_engine->flags |= CRYPTO_FLAGS_BUSY;
  76. rc = ctx->start(hace_dev);
  77. if (rc != -EINPROGRESS)
  78. return -EIO;
  79. return 0;
  80. }
  81. static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
  82. {
  83. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  84. struct aspeed_cipher_reqctx *rctx;
  85. struct skcipher_request *req;
  86. CIPHER_DBG(hace_dev, "\n");
  87. req = crypto_engine->req;
  88. rctx = skcipher_request_ctx(req);
  89. if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
  90. if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
  91. memcpy(req->iv, crypto_engine->cipher_ctx +
  92. DES_KEY_SIZE, DES_KEY_SIZE);
  93. else
  94. memcpy(req->iv, crypto_engine->cipher_ctx,
  95. AES_BLOCK_SIZE);
  96. }
  97. crypto_engine->flags &= ~CRYPTO_FLAGS_BUSY;
  98. crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
  99. err);
  100. return err;
  101. }
  102. static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
  103. {
  104. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  105. struct device *dev = hace_dev->dev;
  106. struct aspeed_cipher_reqctx *rctx;
  107. struct skcipher_request *req;
  108. CIPHER_DBG(hace_dev, "\n");
  109. req = crypto_engine->req;
  110. rctx = skcipher_request_ctx(req);
  111. if (req->src == req->dst) {
  112. dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL);
  113. } else {
  114. dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
  115. dma_unmap_sg(dev, req->dst, rctx->dst_nents, DMA_FROM_DEVICE);
  116. }
  117. return aspeed_sk_complete(hace_dev, 0);
  118. }
  119. static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
  120. {
  121. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  122. struct aspeed_cipher_reqctx *rctx;
  123. struct skcipher_request *req;
  124. struct scatterlist *out_sg;
  125. int nbytes = 0;
  126. int rc = 0;
  127. req = crypto_engine->req;
  128. rctx = skcipher_request_ctx(req);
  129. out_sg = req->dst;
  130. /* Copy output buffer to dst scatter-gather lists */
  131. nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents,
  132. crypto_engine->cipher_addr, req->cryptlen);
  133. if (!nbytes) {
  134. dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
  135. "nbytes", nbytes, "cryptlen", req->cryptlen);
  136. rc = -EINVAL;
  137. }
  138. CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
  139. "nbytes", nbytes, "req->cryptlen", req->cryptlen,
  140. "nb_out_sg", rctx->dst_nents,
  141. "cipher addr", crypto_engine->cipher_addr);
  142. return aspeed_sk_complete(hace_dev, rc);
  143. }
  144. static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
  145. {
  146. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  147. struct aspeed_cipher_reqctx *rctx;
  148. struct skcipher_request *req;
  149. struct scatterlist *in_sg;
  150. int nbytes;
  151. req = crypto_engine->req;
  152. rctx = skcipher_request_ctx(req);
  153. in_sg = req->src;
  154. nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents,
  155. crypto_engine->cipher_addr, req->cryptlen);
  156. CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
  157. "nbytes", nbytes, "req->cryptlen", req->cryptlen,
  158. "nb_in_sg", rctx->src_nents,
  159. "cipher addr", crypto_engine->cipher_addr);
  160. if (!nbytes) {
  161. dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
  162. "nbytes", nbytes, "cryptlen", req->cryptlen);
  163. return -EINVAL;
  164. }
  165. crypto_engine->resume = aspeed_sk_transfer;
  166. /* Trigger engines */
  167. ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
  168. ASPEED_HACE_SRC);
  169. ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
  170. ASPEED_HACE_DEST);
  171. ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
  172. ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
  173. return -EINPROGRESS;
  174. }
  175. static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
  176. {
  177. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  178. struct aspeed_sg_list *src_list, *dst_list;
  179. dma_addr_t src_dma_addr, dst_dma_addr;
  180. struct aspeed_cipher_reqctx *rctx;
  181. struct skcipher_request *req;
  182. struct scatterlist *s;
  183. int src_sg_len;
  184. int dst_sg_len;
  185. int total, i;
  186. int rc;
  187. CIPHER_DBG(hace_dev, "\n");
  188. req = crypto_engine->req;
  189. rctx = skcipher_request_ctx(req);
  190. rctx->enc_cmd |= HACE_CMD_DES_SG_CTRL | HACE_CMD_SRC_SG_CTRL |
  191. HACE_CMD_AES_KEY_HW_EXP | HACE_CMD_MBUS_REQ_SYNC_EN;
  192. /* BIDIRECTIONAL */
  193. if (req->dst == req->src) {
  194. src_sg_len = dma_map_sg(hace_dev->dev, req->src,
  195. rctx->src_nents, DMA_BIDIRECTIONAL);
  196. dst_sg_len = src_sg_len;
  197. if (!src_sg_len) {
  198. dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
  199. return -EINVAL;
  200. }
  201. } else {
  202. src_sg_len = dma_map_sg(hace_dev->dev, req->src,
  203. rctx->src_nents, DMA_TO_DEVICE);
  204. if (!src_sg_len) {
  205. dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
  206. return -EINVAL;
  207. }
  208. dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
  209. rctx->dst_nents, DMA_FROM_DEVICE);
  210. if (!dst_sg_len) {
  211. dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
  212. rc = -EINVAL;
  213. goto free_req_src;
  214. }
  215. }
  216. src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr;
  217. src_dma_addr = crypto_engine->cipher_dma_addr;
  218. total = req->cryptlen;
  219. for_each_sg(req->src, s, src_sg_len, i) {
  220. u32 phy_addr = sg_dma_address(s);
  221. u32 len = sg_dma_len(s);
  222. if (total > len)
  223. total -= len;
  224. else {
  225. /* last sg list */
  226. len = total;
  227. len |= BIT(31);
  228. total = 0;
  229. }
  230. src_list[i].phy_addr = cpu_to_le32(phy_addr);
  231. src_list[i].len = cpu_to_le32(len);
  232. }
  233. if (total != 0) {
  234. rc = -EINVAL;
  235. goto free_req;
  236. }
  237. if (req->dst == req->src) {
  238. dst_list = src_list;
  239. dst_dma_addr = src_dma_addr;
  240. } else {
  241. dst_list = (struct aspeed_sg_list *)crypto_engine->dst_sg_addr;
  242. dst_dma_addr = crypto_engine->dst_sg_dma_addr;
  243. total = req->cryptlen;
  244. for_each_sg(req->dst, s, dst_sg_len, i) {
  245. u32 phy_addr = sg_dma_address(s);
  246. u32 len = sg_dma_len(s);
  247. if (total > len)
  248. total -= len;
  249. else {
  250. /* last sg list */
  251. len = total;
  252. len |= BIT(31);
  253. total = 0;
  254. }
  255. dst_list[i].phy_addr = cpu_to_le32(phy_addr);
  256. dst_list[i].len = cpu_to_le32(len);
  257. }
  258. dst_list[dst_sg_len].phy_addr = 0;
  259. dst_list[dst_sg_len].len = 0;
  260. }
  261. if (total != 0) {
  262. rc = -EINVAL;
  263. goto free_req;
  264. }
  265. crypto_engine->resume = aspeed_sk_transfer_sg;
  266. /* Memory barrier to ensure all data setup before engine starts */
  267. mb();
  268. /* Trigger engines */
  269. ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
  270. ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
  271. ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
  272. ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
  273. return -EINPROGRESS;
  274. free_req:
  275. if (req->dst == req->src) {
  276. dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
  277. DMA_BIDIRECTIONAL);
  278. } else {
  279. dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
  280. DMA_TO_DEVICE);
  281. dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
  282. DMA_TO_DEVICE);
  283. }
  284. return rc;
  285. free_req_src:
  286. dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
  287. return rc;
  288. }
  289. static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
  290. {
  291. struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
  292. struct aspeed_cipher_reqctx *rctx;
  293. struct crypto_skcipher *cipher;
  294. struct aspeed_cipher_ctx *ctx;
  295. struct skcipher_request *req;
  296. CIPHER_DBG(hace_dev, "\n");
  297. req = crypto_engine->req;
  298. rctx = skcipher_request_ctx(req);
  299. cipher = crypto_skcipher_reqtfm(req);
  300. ctx = crypto_skcipher_ctx(cipher);
  301. /* enable interrupt */
  302. rctx->enc_cmd |= HACE_CMD_ISR_EN;
  303. rctx->dst_nents = sg_nents(req->dst);
  304. rctx->src_nents = sg_nents(req->src);
  305. ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
  306. ASPEED_HACE_CONTEXT);
  307. if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
  308. if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
  309. memcpy(crypto_engine->cipher_ctx + DES_BLOCK_SIZE,
  310. req->iv, DES_BLOCK_SIZE);
  311. else
  312. memcpy(crypto_engine->cipher_ctx, req->iv,
  313. AES_BLOCK_SIZE);
  314. }
  315. if (hace_dev->version == AST2600_VERSION) {
  316. memcpy(crypto_engine->cipher_ctx + 16, ctx->key, ctx->key_len);
  317. return aspeed_sk_start_sg(hace_dev);
  318. }
  319. memcpy(crypto_engine->cipher_ctx + 16, ctx->key, AES_MAX_KEYLENGTH);
  320. return aspeed_sk_start(hace_dev);
  321. }
  322. static int aspeed_des_crypt(struct skcipher_request *req, u32 cmd)
  323. {
  324. struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
  325. struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
  326. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
  327. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  328. u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
  329. CIPHER_DBG(hace_dev, "\n");
  330. if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
  331. if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE))
  332. return -EINVAL;
  333. }
  334. rctx->enc_cmd = cmd | HACE_CMD_DES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
  335. HACE_CMD_DES | HACE_CMD_CONTEXT_LOAD_ENABLE |
  336. HACE_CMD_CONTEXT_SAVE_ENABLE;
  337. return aspeed_hace_crypto_handle_queue(hace_dev, req);
  338. }
  339. static int aspeed_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
  340. unsigned int keylen)
  341. {
  342. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
  343. struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
  344. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  345. int rc;
  346. CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
  347. if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
  348. dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
  349. return -EINVAL;
  350. }
  351. if (keylen == DES_KEY_SIZE) {
  352. rc = crypto_des_verify_key(tfm, key);
  353. if (rc)
  354. return rc;
  355. } else if (keylen == DES3_EDE_KEY_SIZE) {
  356. rc = crypto_des3_ede_verify_key(tfm, key);
  357. if (rc)
  358. return rc;
  359. }
  360. memcpy(ctx->key, key, keylen);
  361. ctx->key_len = keylen;
  362. crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  363. crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
  364. CRYPTO_TFM_REQ_MASK);
  365. return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
  366. }
  367. static int aspeed_tdes_ctr_decrypt(struct skcipher_request *req)
  368. {
  369. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
  370. HACE_CMD_TRIPLE_DES);
  371. }
  372. static int aspeed_tdes_ctr_encrypt(struct skcipher_request *req)
  373. {
  374. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
  375. HACE_CMD_TRIPLE_DES);
  376. }
  377. static int aspeed_tdes_cbc_decrypt(struct skcipher_request *req)
  378. {
  379. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
  380. HACE_CMD_TRIPLE_DES);
  381. }
  382. static int aspeed_tdes_cbc_encrypt(struct skcipher_request *req)
  383. {
  384. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
  385. HACE_CMD_TRIPLE_DES);
  386. }
  387. static int aspeed_tdes_ecb_decrypt(struct skcipher_request *req)
  388. {
  389. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
  390. HACE_CMD_TRIPLE_DES);
  391. }
  392. static int aspeed_tdes_ecb_encrypt(struct skcipher_request *req)
  393. {
  394. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
  395. HACE_CMD_TRIPLE_DES);
  396. }
  397. static int aspeed_des_ctr_decrypt(struct skcipher_request *req)
  398. {
  399. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
  400. HACE_CMD_SINGLE_DES);
  401. }
  402. static int aspeed_des_ctr_encrypt(struct skcipher_request *req)
  403. {
  404. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
  405. HACE_CMD_SINGLE_DES);
  406. }
  407. static int aspeed_des_cbc_decrypt(struct skcipher_request *req)
  408. {
  409. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
  410. HACE_CMD_SINGLE_DES);
  411. }
  412. static int aspeed_des_cbc_encrypt(struct skcipher_request *req)
  413. {
  414. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
  415. HACE_CMD_SINGLE_DES);
  416. }
  417. static int aspeed_des_ecb_decrypt(struct skcipher_request *req)
  418. {
  419. return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
  420. HACE_CMD_SINGLE_DES);
  421. }
  422. static int aspeed_des_ecb_encrypt(struct skcipher_request *req)
  423. {
  424. return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
  425. HACE_CMD_SINGLE_DES);
  426. }
  427. static int aspeed_aes_crypt(struct skcipher_request *req, u32 cmd)
  428. {
  429. struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
  430. struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
  431. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
  432. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  433. u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
  434. if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
  435. if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
  436. return -EINVAL;
  437. }
  438. CIPHER_DBG(hace_dev, "%s\n",
  439. (cmd & HACE_CMD_ENCRYPT) ? "encrypt" : "decrypt");
  440. cmd |= HACE_CMD_AES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
  441. HACE_CMD_CONTEXT_LOAD_ENABLE | HACE_CMD_CONTEXT_SAVE_ENABLE;
  442. switch (ctx->key_len) {
  443. case AES_KEYSIZE_128:
  444. cmd |= HACE_CMD_AES128;
  445. break;
  446. case AES_KEYSIZE_192:
  447. cmd |= HACE_CMD_AES192;
  448. break;
  449. case AES_KEYSIZE_256:
  450. cmd |= HACE_CMD_AES256;
  451. break;
  452. default:
  453. return -EINVAL;
  454. }
  455. rctx->enc_cmd = cmd;
  456. return aspeed_hace_crypto_handle_queue(hace_dev, req);
  457. }
  458. static int aspeed_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
  459. unsigned int keylen)
  460. {
  461. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
  462. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  463. struct crypto_aes_ctx gen_aes_key;
  464. CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
  465. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  466. keylen != AES_KEYSIZE_256)
  467. return -EINVAL;
  468. if (ctx->hace_dev->version == AST2500_VERSION) {
  469. aes_expandkey(&gen_aes_key, key, keylen);
  470. memcpy(ctx->key, gen_aes_key.key_enc, AES_MAX_KEYLENGTH);
  471. } else {
  472. memcpy(ctx->key, key, keylen);
  473. }
  474. ctx->key_len = keylen;
  475. crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  476. crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
  477. CRYPTO_TFM_REQ_MASK);
  478. return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
  479. }
  480. static int aspeed_aes_ctr_decrypt(struct skcipher_request *req)
  481. {
  482. return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR);
  483. }
  484. static int aspeed_aes_ctr_encrypt(struct skcipher_request *req)
  485. {
  486. return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR);
  487. }
  488. static int aspeed_aes_cbc_decrypt(struct skcipher_request *req)
  489. {
  490. return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC);
  491. }
  492. static int aspeed_aes_cbc_encrypt(struct skcipher_request *req)
  493. {
  494. return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC);
  495. }
  496. static int aspeed_aes_ecb_decrypt(struct skcipher_request *req)
  497. {
  498. return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB);
  499. }
  500. static int aspeed_aes_ecb_encrypt(struct skcipher_request *req)
  501. {
  502. return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB);
  503. }
  504. static int aspeed_crypto_cra_init(struct crypto_skcipher *tfm)
  505. {
  506. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
  507. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  508. const char *name = crypto_tfm_alg_name(&tfm->base);
  509. struct aspeed_hace_alg *crypto_alg;
  510. crypto_alg = container_of(alg, struct aspeed_hace_alg, alg.skcipher.base);
  511. ctx->hace_dev = crypto_alg->hace_dev;
  512. ctx->start = aspeed_hace_skcipher_trigger;
  513. CIPHER_DBG(ctx->hace_dev, "%s\n", name);
  514. ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_ASYNC |
  515. CRYPTO_ALG_NEED_FALLBACK);
  516. if (IS_ERR(ctx->fallback_tfm)) {
  517. dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
  518. name, PTR_ERR(ctx->fallback_tfm));
  519. return PTR_ERR(ctx->fallback_tfm);
  520. }
  521. crypto_skcipher_set_reqsize(tfm, sizeof(struct aspeed_cipher_reqctx) +
  522. crypto_skcipher_reqsize(ctx->fallback_tfm));
  523. return 0;
  524. }
  525. static void aspeed_crypto_cra_exit(struct crypto_skcipher *tfm)
  526. {
  527. struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
  528. struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
  529. CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
  530. crypto_free_skcipher(ctx->fallback_tfm);
  531. }
  532. static struct aspeed_hace_alg aspeed_crypto_algs[] = {
  533. {
  534. .alg.skcipher.base = {
  535. .min_keysize = AES_MIN_KEY_SIZE,
  536. .max_keysize = AES_MAX_KEY_SIZE,
  537. .setkey = aspeed_aes_setkey,
  538. .encrypt = aspeed_aes_ecb_encrypt,
  539. .decrypt = aspeed_aes_ecb_decrypt,
  540. .init = aspeed_crypto_cra_init,
  541. .exit = aspeed_crypto_cra_exit,
  542. .base = {
  543. .cra_name = "ecb(aes)",
  544. .cra_driver_name = "aspeed-ecb-aes",
  545. .cra_priority = 300,
  546. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  547. CRYPTO_ALG_ASYNC |
  548. CRYPTO_ALG_NEED_FALLBACK,
  549. .cra_blocksize = AES_BLOCK_SIZE,
  550. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  551. .cra_alignmask = 0x0f,
  552. .cra_module = THIS_MODULE,
  553. }
  554. },
  555. .alg.skcipher.op = {
  556. .do_one_request = aspeed_crypto_do_request,
  557. },
  558. },
  559. {
  560. .alg.skcipher.base = {
  561. .ivsize = AES_BLOCK_SIZE,
  562. .min_keysize = AES_MIN_KEY_SIZE,
  563. .max_keysize = AES_MAX_KEY_SIZE,
  564. .setkey = aspeed_aes_setkey,
  565. .encrypt = aspeed_aes_cbc_encrypt,
  566. .decrypt = aspeed_aes_cbc_decrypt,
  567. .init = aspeed_crypto_cra_init,
  568. .exit = aspeed_crypto_cra_exit,
  569. .base = {
  570. .cra_name = "cbc(aes)",
  571. .cra_driver_name = "aspeed-cbc-aes",
  572. .cra_priority = 300,
  573. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  574. CRYPTO_ALG_ASYNC |
  575. CRYPTO_ALG_NEED_FALLBACK,
  576. .cra_blocksize = AES_BLOCK_SIZE,
  577. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  578. .cra_alignmask = 0x0f,
  579. .cra_module = THIS_MODULE,
  580. }
  581. },
  582. .alg.skcipher.op = {
  583. .do_one_request = aspeed_crypto_do_request,
  584. },
  585. },
  586. {
  587. .alg.skcipher.base = {
  588. .min_keysize = DES_KEY_SIZE,
  589. .max_keysize = DES_KEY_SIZE,
  590. .setkey = aspeed_des_setkey,
  591. .encrypt = aspeed_des_ecb_encrypt,
  592. .decrypt = aspeed_des_ecb_decrypt,
  593. .init = aspeed_crypto_cra_init,
  594. .exit = aspeed_crypto_cra_exit,
  595. .base = {
  596. .cra_name = "ecb(des)",
  597. .cra_driver_name = "aspeed-ecb-des",
  598. .cra_priority = 300,
  599. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  600. CRYPTO_ALG_ASYNC |
  601. CRYPTO_ALG_NEED_FALLBACK,
  602. .cra_blocksize = DES_BLOCK_SIZE,
  603. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  604. .cra_alignmask = 0x0f,
  605. .cra_module = THIS_MODULE,
  606. }
  607. },
  608. .alg.skcipher.op = {
  609. .do_one_request = aspeed_crypto_do_request,
  610. },
  611. },
  612. {
  613. .alg.skcipher.base = {
  614. .ivsize = DES_BLOCK_SIZE,
  615. .min_keysize = DES_KEY_SIZE,
  616. .max_keysize = DES_KEY_SIZE,
  617. .setkey = aspeed_des_setkey,
  618. .encrypt = aspeed_des_cbc_encrypt,
  619. .decrypt = aspeed_des_cbc_decrypt,
  620. .init = aspeed_crypto_cra_init,
  621. .exit = aspeed_crypto_cra_exit,
  622. .base = {
  623. .cra_name = "cbc(des)",
  624. .cra_driver_name = "aspeed-cbc-des",
  625. .cra_priority = 300,
  626. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  627. CRYPTO_ALG_ASYNC |
  628. CRYPTO_ALG_NEED_FALLBACK,
  629. .cra_blocksize = DES_BLOCK_SIZE,
  630. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  631. .cra_alignmask = 0x0f,
  632. .cra_module = THIS_MODULE,
  633. }
  634. },
  635. .alg.skcipher.op = {
  636. .do_one_request = aspeed_crypto_do_request,
  637. },
  638. },
  639. {
  640. .alg.skcipher.base = {
  641. .min_keysize = DES3_EDE_KEY_SIZE,
  642. .max_keysize = DES3_EDE_KEY_SIZE,
  643. .setkey = aspeed_des_setkey,
  644. .encrypt = aspeed_tdes_ecb_encrypt,
  645. .decrypt = aspeed_tdes_ecb_decrypt,
  646. .init = aspeed_crypto_cra_init,
  647. .exit = aspeed_crypto_cra_exit,
  648. .base = {
  649. .cra_name = "ecb(des3_ede)",
  650. .cra_driver_name = "aspeed-ecb-tdes",
  651. .cra_priority = 300,
  652. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  653. CRYPTO_ALG_ASYNC |
  654. CRYPTO_ALG_NEED_FALLBACK,
  655. .cra_blocksize = DES_BLOCK_SIZE,
  656. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  657. .cra_alignmask = 0x0f,
  658. .cra_module = THIS_MODULE,
  659. }
  660. },
  661. .alg.skcipher.op = {
  662. .do_one_request = aspeed_crypto_do_request,
  663. },
  664. },
  665. {
  666. .alg.skcipher.base = {
  667. .ivsize = DES_BLOCK_SIZE,
  668. .min_keysize = DES3_EDE_KEY_SIZE,
  669. .max_keysize = DES3_EDE_KEY_SIZE,
  670. .setkey = aspeed_des_setkey,
  671. .encrypt = aspeed_tdes_cbc_encrypt,
  672. .decrypt = aspeed_tdes_cbc_decrypt,
  673. .init = aspeed_crypto_cra_init,
  674. .exit = aspeed_crypto_cra_exit,
  675. .base = {
  676. .cra_name = "cbc(des3_ede)",
  677. .cra_driver_name = "aspeed-cbc-tdes",
  678. .cra_priority = 300,
  679. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  680. CRYPTO_ALG_ASYNC |
  681. CRYPTO_ALG_NEED_FALLBACK,
  682. .cra_blocksize = DES_BLOCK_SIZE,
  683. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  684. .cra_alignmask = 0x0f,
  685. .cra_module = THIS_MODULE,
  686. }
  687. },
  688. .alg.skcipher.op = {
  689. .do_one_request = aspeed_crypto_do_request,
  690. },
  691. },
  692. };
  693. static struct aspeed_hace_alg aspeed_crypto_algs_g6[] = {
  694. {
  695. .alg.skcipher.base = {
  696. .ivsize = AES_BLOCK_SIZE,
  697. .min_keysize = AES_MIN_KEY_SIZE,
  698. .max_keysize = AES_MAX_KEY_SIZE,
  699. .setkey = aspeed_aes_setkey,
  700. .encrypt = aspeed_aes_ctr_encrypt,
  701. .decrypt = aspeed_aes_ctr_decrypt,
  702. .init = aspeed_crypto_cra_init,
  703. .exit = aspeed_crypto_cra_exit,
  704. .base = {
  705. .cra_name = "ctr(aes)",
  706. .cra_driver_name = "aspeed-ctr-aes",
  707. .cra_priority = 300,
  708. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  709. CRYPTO_ALG_ASYNC,
  710. .cra_blocksize = 1,
  711. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  712. .cra_alignmask = 0x0f,
  713. .cra_module = THIS_MODULE,
  714. }
  715. },
  716. .alg.skcipher.op = {
  717. .do_one_request = aspeed_crypto_do_request,
  718. },
  719. },
  720. {
  721. .alg.skcipher.base = {
  722. .ivsize = DES_BLOCK_SIZE,
  723. .min_keysize = DES_KEY_SIZE,
  724. .max_keysize = DES_KEY_SIZE,
  725. .setkey = aspeed_des_setkey,
  726. .encrypt = aspeed_des_ctr_encrypt,
  727. .decrypt = aspeed_des_ctr_decrypt,
  728. .init = aspeed_crypto_cra_init,
  729. .exit = aspeed_crypto_cra_exit,
  730. .base = {
  731. .cra_name = "ctr(des)",
  732. .cra_driver_name = "aspeed-ctr-des",
  733. .cra_priority = 300,
  734. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  735. CRYPTO_ALG_ASYNC,
  736. .cra_blocksize = 1,
  737. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  738. .cra_alignmask = 0x0f,
  739. .cra_module = THIS_MODULE,
  740. }
  741. },
  742. .alg.skcipher.op = {
  743. .do_one_request = aspeed_crypto_do_request,
  744. },
  745. },
  746. {
  747. .alg.skcipher.base = {
  748. .ivsize = DES_BLOCK_SIZE,
  749. .min_keysize = DES3_EDE_KEY_SIZE,
  750. .max_keysize = DES3_EDE_KEY_SIZE,
  751. .setkey = aspeed_des_setkey,
  752. .encrypt = aspeed_tdes_ctr_encrypt,
  753. .decrypt = aspeed_tdes_ctr_decrypt,
  754. .init = aspeed_crypto_cra_init,
  755. .exit = aspeed_crypto_cra_exit,
  756. .base = {
  757. .cra_name = "ctr(des3_ede)",
  758. .cra_driver_name = "aspeed-ctr-tdes",
  759. .cra_priority = 300,
  760. .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
  761. CRYPTO_ALG_ASYNC,
  762. .cra_blocksize = 1,
  763. .cra_ctxsize = sizeof(struct aspeed_cipher_ctx),
  764. .cra_alignmask = 0x0f,
  765. .cra_module = THIS_MODULE,
  766. }
  767. },
  768. .alg.skcipher.op = {
  769. .do_one_request = aspeed_crypto_do_request,
  770. },
  771. },
  772. };
  773. void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
  774. {
  775. int i;
  776. for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++)
  777. crypto_engine_unregister_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
  778. if (hace_dev->version != AST2600_VERSION)
  779. return;
  780. for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++)
  781. crypto_engine_unregister_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
  782. }
  783. void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
  784. {
  785. int rc, i;
  786. CIPHER_DBG(hace_dev, "\n");
  787. for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++) {
  788. aspeed_crypto_algs[i].hace_dev = hace_dev;
  789. rc = crypto_engine_register_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
  790. if (rc) {
  791. CIPHER_DBG(hace_dev, "Failed to register %s\n",
  792. aspeed_crypto_algs[i].alg.skcipher.base.base.cra_name);
  793. }
  794. }
  795. if (hace_dev->version != AST2600_VERSION)
  796. return;
  797. for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++) {
  798. aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
  799. rc = crypto_engine_register_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
  800. if (rc) {
  801. CIPHER_DBG(hace_dev, "Failed to register %s\n",
  802. aspeed_crypto_algs_g6[i].alg.skcipher.base.base.cra_name);
  803. }
  804. }
  805. }