sun8i-ss-cipher.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * sun8i-ss-cipher.c - hardware cryptographic offloader for
  4. * Allwinner A80/A83T SoC
  5. *
  6. * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
  7. *
  8. * This file add support for AES cipher with 128,192,256 bits keysize in
  9. * CBC and ECB mode.
  10. *
  11. * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
  12. */
  13. #include <linux/bottom_half.h>
  14. #include <linux/crypto.h>
  15. #include <linux/dma-mapping.h>
  16. #include <linux/io.h>
  17. #include <linux/pm_runtime.h>
  18. #include <crypto/scatterwalk.h>
  19. #include <crypto/internal/skcipher.h>
  20. #include "sun8i-ss.h"
  21. static bool sun8i_ss_need_fallback(struct skcipher_request *areq)
  22. {
  23. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  24. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  25. struct sun8i_ss_alg_template *algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
  26. struct scatterlist *in_sg = areq->src;
  27. struct scatterlist *out_sg = areq->dst;
  28. struct scatterlist *sg;
  29. unsigned int todo, len;
  30. if (areq->cryptlen == 0 || areq->cryptlen % 16) {
  31. algt->stat_fb_len++;
  32. return true;
  33. }
  34. if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 ||
  35. sg_nents_for_len(areq->dst, areq->cryptlen) > 8) {
  36. algt->stat_fb_sgnum++;
  37. return true;
  38. }
  39. len = areq->cryptlen;
  40. sg = areq->src;
  41. while (sg) {
  42. todo = min(len, sg->length);
  43. if ((todo % 16) != 0) {
  44. algt->stat_fb_sglen++;
  45. return true;
  46. }
  47. if (!IS_ALIGNED(sg->offset, 16)) {
  48. algt->stat_fb_align++;
  49. return true;
  50. }
  51. len -= todo;
  52. sg = sg_next(sg);
  53. }
  54. len = areq->cryptlen;
  55. sg = areq->dst;
  56. while (sg) {
  57. todo = min(len, sg->length);
  58. if ((todo % 16) != 0) {
  59. algt->stat_fb_sglen++;
  60. return true;
  61. }
  62. if (!IS_ALIGNED(sg->offset, 16)) {
  63. algt->stat_fb_align++;
  64. return true;
  65. }
  66. len -= todo;
  67. sg = sg_next(sg);
  68. }
  69. /* SS need same numbers of SG (with same length) for source and destination */
  70. in_sg = areq->src;
  71. out_sg = areq->dst;
  72. while (in_sg && out_sg) {
  73. if (in_sg->length != out_sg->length)
  74. return true;
  75. in_sg = sg_next(in_sg);
  76. out_sg = sg_next(out_sg);
  77. }
  78. if (in_sg || out_sg)
  79. return true;
  80. return false;
  81. }
  82. static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
  83. {
  84. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  85. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  86. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  87. int err;
  88. if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG)) {
  89. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  90. struct sun8i_ss_alg_template *algt __maybe_unused;
  91. algt = container_of(alg, struct sun8i_ss_alg_template,
  92. alg.skcipher.base);
  93. #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
  94. algt->stat_fb++;
  95. #endif
  96. }
  97. skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
  98. skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
  99. areq->base.complete, areq->base.data);
  100. skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
  101. areq->cryptlen, areq->iv);
  102. if (rctx->op_dir & SS_DECRYPTION)
  103. err = crypto_skcipher_decrypt(&rctx->fallback_req);
  104. else
  105. err = crypto_skcipher_encrypt(&rctx->fallback_req);
  106. return err;
  107. }
  108. static int sun8i_ss_setup_ivs(struct skcipher_request *areq)
  109. {
  110. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  111. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  112. struct sun8i_ss_dev *ss = op->ss;
  113. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  114. struct scatterlist *sg = areq->src;
  115. unsigned int todo, offset;
  116. unsigned int len = areq->cryptlen;
  117. unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  118. struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
  119. int i = 0;
  120. dma_addr_t a;
  121. int err;
  122. rctx->ivlen = ivsize;
  123. if (rctx->op_dir & SS_DECRYPTION) {
  124. offset = areq->cryptlen - ivsize;
  125. scatterwalk_map_and_copy(sf->biv, areq->src, offset,
  126. ivsize, 0);
  127. }
  128. /* we need to copy all IVs from source in case DMA is bi-directionnal */
  129. while (sg && len) {
  130. if (sg_dma_len(sg) == 0) {
  131. sg = sg_next(sg);
  132. continue;
  133. }
  134. if (i == 0)
  135. memcpy(sf->iv[0], areq->iv, ivsize);
  136. a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE);
  137. if (dma_mapping_error(ss->dev, a)) {
  138. memzero_explicit(sf->iv[i], ivsize);
  139. dev_err(ss->dev, "Cannot DMA MAP IV\n");
  140. err = -EFAULT;
  141. goto dma_iv_error;
  142. }
  143. rctx->p_iv[i] = a;
  144. /* we need to setup all others IVs only in the decrypt way */
  145. if (rctx->op_dir == SS_ENCRYPTION)
  146. return 0;
  147. todo = min(len, sg_dma_len(sg));
  148. len -= todo;
  149. i++;
  150. if (i < MAX_SG) {
  151. offset = sg->length - ivsize;
  152. scatterwalk_map_and_copy(sf->iv[i], sg, offset, ivsize, 0);
  153. }
  154. rctx->niv = i;
  155. sg = sg_next(sg);
  156. }
  157. return 0;
  158. dma_iv_error:
  159. i--;
  160. while (i >= 0) {
  161. dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
  162. memzero_explicit(sf->iv[i], ivsize);
  163. i--;
  164. }
  165. return err;
  166. }
  167. static int sun8i_ss_cipher(struct skcipher_request *areq)
  168. {
  169. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  170. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  171. struct sun8i_ss_dev *ss = op->ss;
  172. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  173. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  174. struct sun8i_ss_alg_template *algt;
  175. struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
  176. struct scatterlist *sg;
  177. unsigned int todo, len, offset, ivsize;
  178. int nr_sgs = 0;
  179. int nr_sgd = 0;
  180. int err = 0;
  181. int nsgs = sg_nents_for_len(areq->src, areq->cryptlen);
  182. int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen);
  183. int i;
  184. algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
  185. dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,
  186. crypto_tfm_alg_name(areq->base.tfm),
  187. areq->cryptlen,
  188. rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
  189. op->keylen);
  190. #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
  191. algt->stat_req++;
  192. #endif
  193. rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode];
  194. rctx->method = ss->variant->alg_cipher[algt->ss_algo_id];
  195. rctx->keylen = op->keylen;
  196. rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE);
  197. if (dma_mapping_error(ss->dev, rctx->p_key)) {
  198. dev_err(ss->dev, "Cannot DMA MAP KEY\n");
  199. err = -EFAULT;
  200. goto theend;
  201. }
  202. ivsize = crypto_skcipher_ivsize(tfm);
  203. if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
  204. err = sun8i_ss_setup_ivs(areq);
  205. if (err)
  206. goto theend_key;
  207. }
  208. if (areq->src == areq->dst) {
  209. nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
  210. if (nr_sgs <= 0 || nr_sgs > 8) {
  211. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
  212. err = -EINVAL;
  213. goto theend_iv;
  214. }
  215. nr_sgd = nr_sgs;
  216. } else {
  217. nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
  218. if (nr_sgs <= 0 || nr_sgs > 8) {
  219. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
  220. err = -EINVAL;
  221. goto theend_iv;
  222. }
  223. nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
  224. if (nr_sgd <= 0 || nr_sgd > 8) {
  225. dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd);
  226. err = -EINVAL;
  227. goto theend_sgs;
  228. }
  229. }
  230. len = areq->cryptlen;
  231. i = 0;
  232. sg = areq->src;
  233. while (i < nr_sgs && sg && len) {
  234. if (sg_dma_len(sg) == 0)
  235. goto sgs_next;
  236. rctx->t_src[i].addr = sg_dma_address(sg);
  237. todo = min(len, sg_dma_len(sg));
  238. rctx->t_src[i].len = todo / 4;
  239. dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,
  240. areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);
  241. len -= todo;
  242. i++;
  243. sgs_next:
  244. sg = sg_next(sg);
  245. }
  246. if (len > 0) {
  247. dev_err(ss->dev, "remaining len %d\n", len);
  248. err = -EINVAL;
  249. goto theend_sgs;
  250. }
  251. len = areq->cryptlen;
  252. i = 0;
  253. sg = areq->dst;
  254. while (i < nr_sgd && sg && len) {
  255. if (sg_dma_len(sg) == 0)
  256. goto sgd_next;
  257. rctx->t_dst[i].addr = sg_dma_address(sg);
  258. todo = min(len, sg_dma_len(sg));
  259. rctx->t_dst[i].len = todo / 4;
  260. dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,
  261. areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);
  262. len -= todo;
  263. i++;
  264. sgd_next:
  265. sg = sg_next(sg);
  266. }
  267. if (len > 0) {
  268. dev_err(ss->dev, "remaining len %d\n", len);
  269. err = -EINVAL;
  270. goto theend_sgs;
  271. }
  272. err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
  273. theend_sgs:
  274. if (areq->src == areq->dst) {
  275. dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
  276. } else {
  277. dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
  278. dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
  279. }
  280. theend_iv:
  281. if (areq->iv && ivsize > 0) {
  282. for (i = 0; i < rctx->niv; i++) {
  283. dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
  284. memzero_explicit(sf->iv[i], ivsize);
  285. }
  286. offset = areq->cryptlen - ivsize;
  287. if (rctx->op_dir & SS_DECRYPTION) {
  288. memcpy(areq->iv, sf->biv, ivsize);
  289. memzero_explicit(sf->biv, ivsize);
  290. } else {
  291. scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
  292. ivsize, 0);
  293. }
  294. }
  295. theend_key:
  296. dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE);
  297. theend:
  298. return err;
  299. }
  300. int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)
  301. {
  302. int err;
  303. struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
  304. err = sun8i_ss_cipher(breq);
  305. local_bh_disable();
  306. crypto_finalize_skcipher_request(engine, breq, err);
  307. local_bh_enable();
  308. return 0;
  309. }
  310. int sun8i_ss_skdecrypt(struct skcipher_request *areq)
  311. {
  312. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  313. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  314. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  315. struct crypto_engine *engine;
  316. int e;
  317. memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
  318. rctx->op_dir = SS_DECRYPTION;
  319. if (sun8i_ss_need_fallback(areq))
  320. return sun8i_ss_cipher_fallback(areq);
  321. e = sun8i_ss_get_engine_number(op->ss);
  322. engine = op->ss->flows[e].engine;
  323. rctx->flow = e;
  324. return crypto_transfer_skcipher_request_to_engine(engine, areq);
  325. }
  326. int sun8i_ss_skencrypt(struct skcipher_request *areq)
  327. {
  328. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
  329. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  330. struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
  331. struct crypto_engine *engine;
  332. int e;
  333. memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
  334. rctx->op_dir = SS_ENCRYPTION;
  335. if (sun8i_ss_need_fallback(areq))
  336. return sun8i_ss_cipher_fallback(areq);
  337. e = sun8i_ss_get_engine_number(op->ss);
  338. engine = op->ss->flows[e].engine;
  339. rctx->flow = e;
  340. return crypto_transfer_skcipher_request_to_engine(engine, areq);
  341. }
  342. int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
  343. {
  344. struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
  345. struct sun8i_ss_alg_template *algt;
  346. const char *name = crypto_tfm_alg_name(tfm);
  347. struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);
  348. struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);
  349. int err;
  350. memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));
  351. algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
  352. op->ss = algt->ss;
  353. op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
  354. if (IS_ERR(op->fallback_tfm)) {
  355. dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
  356. name, PTR_ERR(op->fallback_tfm));
  357. return PTR_ERR(op->fallback_tfm);
  358. }
  359. crypto_skcipher_set_reqsize(sktfm, sizeof(struct sun8i_cipher_req_ctx) +
  360. crypto_skcipher_reqsize(op->fallback_tfm));
  361. memcpy(algt->fbname,
  362. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)),
  363. CRYPTO_MAX_ALG_NAME);
  364. err = pm_runtime_resume_and_get(op->ss->dev);
  365. if (err < 0) {
  366. dev_err(op->ss->dev, "pm error %d\n", err);
  367. goto error_pm;
  368. }
  369. return 0;
  370. error_pm:
  371. crypto_free_skcipher(op->fallback_tfm);
  372. return err;
  373. }
  374. void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)
  375. {
  376. struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
  377. kfree_sensitive(op->key);
  378. crypto_free_skcipher(op->fallback_tfm);
  379. pm_runtime_put_sync(op->ss->dev);
  380. }
  381. int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
  382. unsigned int keylen)
  383. {
  384. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  385. struct sun8i_ss_dev *ss = op->ss;
  386. switch (keylen) {
  387. case 128 / 8:
  388. break;
  389. case 192 / 8:
  390. break;
  391. case 256 / 8:
  392. break;
  393. default:
  394. dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen);
  395. return -EINVAL;
  396. }
  397. kfree_sensitive(op->key);
  398. op->keylen = keylen;
  399. op->key = kmemdup(key, keylen, GFP_KERNEL);
  400. if (!op->key)
  401. return -ENOMEM;
  402. crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  403. crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
  404. return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
  405. }
  406. int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
  407. unsigned int keylen)
  408. {
  409. struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
  410. struct sun8i_ss_dev *ss = op->ss;
  411. if (unlikely(keylen != 3 * DES_KEY_SIZE)) {
  412. dev_dbg(ss->dev, "Invalid keylen %u\n", keylen);
  413. return -EINVAL;
  414. }
  415. kfree_sensitive(op->key);
  416. op->keylen = keylen;
  417. op->key = kmemdup(key, keylen, GFP_KERNEL);
  418. if (!op->key)
  419. return -ENOMEM;
  420. crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
  421. crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
  422. return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
  423. }