safexcel_cipher.c 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) 2017 Marvell
  4. *
  5. * Antoine Tenart <antoine.tenart@free-electrons.com>
  6. */
  7. #include <linux/device.h>
  8. #include <linux/dma-mapping.h>
  9. #include <linux/dmapool.h>
  10. #include <crypto/aead.h>
  11. #include <crypto/aes.h>
  12. #include <crypto/authenc.h>
  13. #include <crypto/des.h>
  14. #include <crypto/sha.h>
  15. #include <crypto/skcipher.h>
  16. #include <crypto/internal/aead.h>
  17. #include <crypto/internal/skcipher.h>
  18. #include "safexcel.h"
  19. enum safexcel_cipher_direction {
  20. SAFEXCEL_ENCRYPT,
  21. SAFEXCEL_DECRYPT,
  22. };
  23. enum safexcel_cipher_alg {
  24. SAFEXCEL_DES,
  25. SAFEXCEL_3DES,
  26. SAFEXCEL_AES,
  27. };
  28. struct safexcel_cipher_ctx {
  29. struct safexcel_context base;
  30. struct safexcel_crypto_priv *priv;
  31. u32 mode;
  32. enum safexcel_cipher_alg alg;
  33. bool aead;
  34. __le32 key[8];
  35. unsigned int key_len;
  36. /* All the below is AEAD specific */
  37. u32 hash_alg;
  38. u32 state_sz;
  39. u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
  40. u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
  41. };
  42. struct safexcel_cipher_req {
  43. enum safexcel_cipher_direction direction;
  44. /* Number of result descriptors associated to the request */
  45. unsigned int rdescs;
  46. bool needs_inv;
  47. };
  48. static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  49. struct safexcel_command_desc *cdesc,
  50. u32 length)
  51. {
  52. struct safexcel_token *token;
  53. unsigned offset = 0;
  54. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
  55. switch (ctx->alg) {
  56. case SAFEXCEL_DES:
  57. offset = DES_BLOCK_SIZE / sizeof(u32);
  58. memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE);
  59. cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
  60. break;
  61. case SAFEXCEL_3DES:
  62. offset = DES3_EDE_BLOCK_SIZE / sizeof(u32);
  63. memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE);
  64. cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
  65. break;
  66. case SAFEXCEL_AES:
  67. offset = AES_BLOCK_SIZE / sizeof(u32);
  68. memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
  69. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  70. break;
  71. }
  72. }
  73. token = (struct safexcel_token *)(cdesc->control_data.token + offset);
  74. token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  75. token[0].packet_length = length;
  76. token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
  77. EIP197_TOKEN_STAT_LAST_HASH;
  78. token[0].instructions = EIP197_TOKEN_INS_LAST |
  79. EIP197_TOKEN_INS_TYPE_CRYTO |
  80. EIP197_TOKEN_INS_TYPE_OUTPUT;
  81. }
  82. static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  83. struct safexcel_command_desc *cdesc,
  84. enum safexcel_cipher_direction direction,
  85. u32 cryptlen, u32 assoclen, u32 digestsize)
  86. {
  87. struct safexcel_token *token;
  88. unsigned offset = 0;
  89. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
  90. offset = AES_BLOCK_SIZE / sizeof(u32);
  91. memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
  92. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  93. }
  94. token = (struct safexcel_token *)(cdesc->control_data.token + offset);
  95. if (direction == SAFEXCEL_DECRYPT)
  96. cryptlen -= digestsize;
  97. token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  98. token[0].packet_length = assoclen;
  99. token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
  100. EIP197_TOKEN_INS_TYPE_OUTPUT;
  101. token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  102. token[1].packet_length = cryptlen;
  103. token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  104. token[1].instructions = EIP197_TOKEN_INS_LAST |
  105. EIP197_TOKEN_INS_TYPE_CRYTO |
  106. EIP197_TOKEN_INS_TYPE_HASH |
  107. EIP197_TOKEN_INS_TYPE_OUTPUT;
  108. if (direction == SAFEXCEL_ENCRYPT) {
  109. token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
  110. token[2].packet_length = digestsize;
  111. token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
  112. EIP197_TOKEN_STAT_LAST_PACKET;
  113. token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
  114. EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  115. } else {
  116. token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
  117. token[2].packet_length = digestsize;
  118. token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
  119. EIP197_TOKEN_STAT_LAST_PACKET;
  120. token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  121. token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
  122. token[3].packet_length = digestsize |
  123. EIP197_TOKEN_HASH_RESULT_VERIFY;
  124. token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
  125. EIP197_TOKEN_STAT_LAST_PACKET;
  126. token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
  127. }
  128. }
  129. static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
  130. const u8 *key, unsigned int len)
  131. {
  132. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  133. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  134. struct safexcel_crypto_priv *priv = ctx->priv;
  135. struct crypto_aes_ctx aes;
  136. int ret, i;
  137. ret = crypto_aes_expand_key(&aes, key, len);
  138. if (ret) {
  139. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  140. return ret;
  141. }
  142. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  143. for (i = 0; i < len / sizeof(u32); i++) {
  144. if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
  145. ctx->base.needs_inv = true;
  146. break;
  147. }
  148. }
  149. }
  150. for (i = 0; i < len / sizeof(u32); i++)
  151. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  152. ctx->key_len = len;
  153. memzero_explicit(&aes, sizeof(aes));
  154. return 0;
  155. }
  156. static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
  157. unsigned int len)
  158. {
  159. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  160. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  161. struct safexcel_ahash_export_state istate, ostate;
  162. struct safexcel_crypto_priv *priv = ctx->priv;
  163. struct crypto_authenc_keys keys;
  164. if (crypto_authenc_extractkeys(&keys, key, len) != 0)
  165. goto badkey;
  166. if (keys.enckeylen > sizeof(ctx->key))
  167. goto badkey;
  168. /* Encryption key */
  169. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
  170. memcmp(ctx->key, keys.enckey, keys.enckeylen))
  171. ctx->base.needs_inv = true;
  172. /* Auth key */
  173. switch (ctx->hash_alg) {
  174. case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
  175. if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
  176. keys.authkeylen, &istate, &ostate))
  177. goto badkey;
  178. break;
  179. case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
  180. if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
  181. keys.authkeylen, &istate, &ostate))
  182. goto badkey;
  183. break;
  184. case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
  185. if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
  186. keys.authkeylen, &istate, &ostate))
  187. goto badkey;
  188. break;
  189. case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
  190. if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
  191. keys.authkeylen, &istate, &ostate))
  192. goto badkey;
  193. break;
  194. case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
  195. if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
  196. keys.authkeylen, &istate, &ostate))
  197. goto badkey;
  198. break;
  199. default:
  200. dev_err(priv->dev, "aead: unsupported hash algorithm\n");
  201. goto badkey;
  202. }
  203. crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
  204. CRYPTO_TFM_RES_MASK);
  205. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
  206. (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
  207. memcmp(ctx->opad, ostate.state, ctx->state_sz)))
  208. ctx->base.needs_inv = true;
  209. /* Now copy the keys into the context */
  210. memcpy(ctx->key, keys.enckey, keys.enckeylen);
  211. ctx->key_len = keys.enckeylen;
  212. memcpy(ctx->ipad, &istate.state, ctx->state_sz);
  213. memcpy(ctx->opad, &ostate.state, ctx->state_sz);
  214. memzero_explicit(&keys, sizeof(keys));
  215. return 0;
  216. badkey:
  217. crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  218. memzero_explicit(&keys, sizeof(keys));
  219. return -EINVAL;
  220. }
  221. static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
  222. struct crypto_async_request *async,
  223. struct safexcel_cipher_req *sreq,
  224. struct safexcel_command_desc *cdesc)
  225. {
  226. struct safexcel_crypto_priv *priv = ctx->priv;
  227. int ctrl_size;
  228. if (ctx->aead) {
  229. if (sreq->direction == SAFEXCEL_ENCRYPT)
  230. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
  231. else
  232. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
  233. } else {
  234. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
  235. /* The decryption control type is a combination of the
  236. * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
  237. * types.
  238. */
  239. if (sreq->direction == SAFEXCEL_DECRYPT)
  240. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
  241. }
  242. cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
  243. cdesc->control_data.control1 |= ctx->mode;
  244. if (ctx->aead)
  245. cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
  246. ctx->hash_alg;
  247. if (ctx->alg == SAFEXCEL_DES) {
  248. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
  249. } else if (ctx->alg == SAFEXCEL_3DES) {
  250. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
  251. } else if (ctx->alg == SAFEXCEL_AES) {
  252. switch (ctx->key_len) {
  253. case AES_KEYSIZE_128:
  254. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
  255. break;
  256. case AES_KEYSIZE_192:
  257. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
  258. break;
  259. case AES_KEYSIZE_256:
  260. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
  261. break;
  262. default:
  263. dev_err(priv->dev, "aes keysize not supported: %u\n",
  264. ctx->key_len);
  265. return -EINVAL;
  266. }
  267. }
  268. ctrl_size = ctx->key_len / sizeof(u32);
  269. if (ctx->aead)
  270. /* Take in account the ipad+opad digests */
  271. ctrl_size += ctx->state_sz / sizeof(u32) * 2;
  272. cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
  273. return 0;
  274. }
  275. static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
  276. struct crypto_async_request *async,
  277. struct scatterlist *src,
  278. struct scatterlist *dst,
  279. unsigned int cryptlen,
  280. struct safexcel_cipher_req *sreq,
  281. bool *should_complete, int *ret)
  282. {
  283. struct safexcel_result_desc *rdesc;
  284. int ndesc = 0;
  285. *ret = 0;
  286. if (unlikely(!sreq->rdescs))
  287. return 0;
  288. while (sreq->rdescs--) {
  289. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  290. if (IS_ERR(rdesc)) {
  291. dev_err(priv->dev,
  292. "cipher: result: could not retrieve the result descriptor\n");
  293. *ret = PTR_ERR(rdesc);
  294. break;
  295. }
  296. if (likely(!*ret))
  297. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  298. ndesc++;
  299. }
  300. safexcel_complete(priv, ring);
  301. if (src == dst) {
  302. dma_unmap_sg(priv->dev, src,
  303. sg_nents_for_len(src, cryptlen),
  304. DMA_BIDIRECTIONAL);
  305. } else {
  306. dma_unmap_sg(priv->dev, src,
  307. sg_nents_for_len(src, cryptlen),
  308. DMA_TO_DEVICE);
  309. dma_unmap_sg(priv->dev, dst,
  310. sg_nents_for_len(dst, cryptlen),
  311. DMA_FROM_DEVICE);
  312. }
  313. *should_complete = true;
  314. return ndesc;
  315. }
  316. static int safexcel_send_req(struct crypto_async_request *base, int ring,
  317. struct safexcel_cipher_req *sreq,
  318. struct scatterlist *src, struct scatterlist *dst,
  319. unsigned int cryptlen, unsigned int assoclen,
  320. unsigned int digestsize, u8 *iv, int *commands,
  321. int *results)
  322. {
  323. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  324. struct safexcel_crypto_priv *priv = ctx->priv;
  325. struct safexcel_command_desc *cdesc;
  326. struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
  327. struct scatterlist *sg;
  328. unsigned int totlen = cryptlen + assoclen;
  329. int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
  330. int i, ret = 0;
  331. if (src == dst) {
  332. nr_src = dma_map_sg(priv->dev, src,
  333. sg_nents_for_len(src, totlen),
  334. DMA_BIDIRECTIONAL);
  335. nr_dst = nr_src;
  336. if (!nr_src)
  337. return -EINVAL;
  338. } else {
  339. nr_src = dma_map_sg(priv->dev, src,
  340. sg_nents_for_len(src, totlen),
  341. DMA_TO_DEVICE);
  342. if (!nr_src)
  343. return -EINVAL;
  344. nr_dst = dma_map_sg(priv->dev, dst,
  345. sg_nents_for_len(dst, totlen),
  346. DMA_FROM_DEVICE);
  347. if (!nr_dst) {
  348. dma_unmap_sg(priv->dev, src,
  349. sg_nents_for_len(src, totlen),
  350. DMA_TO_DEVICE);
  351. return -EINVAL;
  352. }
  353. }
  354. memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
  355. if (ctx->aead) {
  356. memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
  357. ctx->ipad, ctx->state_sz);
  358. memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
  359. ctx->opad, ctx->state_sz);
  360. }
  361. /* command descriptors */
  362. for_each_sg(src, sg, nr_src, i) {
  363. int len = sg_dma_len(sg);
  364. /* Do not overflow the request */
  365. if (queued - len < 0)
  366. len = queued;
  367. cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
  368. sg_dma_address(sg), len, totlen,
  369. ctx->base.ctxr_dma);
  370. if (IS_ERR(cdesc)) {
  371. /* No space left in the command descriptor ring */
  372. ret = PTR_ERR(cdesc);
  373. goto cdesc_rollback;
  374. }
  375. n_cdesc++;
  376. if (n_cdesc == 1) {
  377. safexcel_context_control(ctx, base, sreq, cdesc);
  378. if (ctx->aead)
  379. safexcel_aead_token(ctx, iv, cdesc,
  380. sreq->direction, cryptlen,
  381. assoclen, digestsize);
  382. else
  383. safexcel_skcipher_token(ctx, iv, cdesc,
  384. cryptlen);
  385. }
  386. queued -= len;
  387. if (!queued)
  388. break;
  389. }
  390. /* result descriptors */
  391. for_each_sg(dst, sg, nr_dst, i) {
  392. bool first = !i, last = (i == nr_dst - 1);
  393. u32 len = sg_dma_len(sg);
  394. rdesc = safexcel_add_rdesc(priv, ring, first, last,
  395. sg_dma_address(sg), len);
  396. if (IS_ERR(rdesc)) {
  397. /* No space left in the result descriptor ring */
  398. ret = PTR_ERR(rdesc);
  399. goto rdesc_rollback;
  400. }
  401. if (first)
  402. first_rdesc = rdesc;
  403. n_rdesc++;
  404. }
  405. safexcel_rdr_req_set(priv, ring, first_rdesc, base);
  406. *commands = n_cdesc;
  407. *results = n_rdesc;
  408. return 0;
  409. rdesc_rollback:
  410. for (i = 0; i < n_rdesc; i++)
  411. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
  412. cdesc_rollback:
  413. for (i = 0; i < n_cdesc; i++)
  414. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
  415. if (src == dst) {
  416. dma_unmap_sg(priv->dev, src,
  417. sg_nents_for_len(src, totlen),
  418. DMA_BIDIRECTIONAL);
  419. } else {
  420. dma_unmap_sg(priv->dev, src,
  421. sg_nents_for_len(src, totlen),
  422. DMA_TO_DEVICE);
  423. dma_unmap_sg(priv->dev, dst,
  424. sg_nents_for_len(dst, totlen),
  425. DMA_FROM_DEVICE);
  426. }
  427. return ret;
  428. }
  429. static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
  430. int ring,
  431. struct crypto_async_request *base,
  432. struct safexcel_cipher_req *sreq,
  433. bool *should_complete, int *ret)
  434. {
  435. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  436. struct safexcel_result_desc *rdesc;
  437. int ndesc = 0, enq_ret;
  438. *ret = 0;
  439. if (unlikely(!sreq->rdescs))
  440. return 0;
  441. while (sreq->rdescs--) {
  442. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  443. if (IS_ERR(rdesc)) {
  444. dev_err(priv->dev,
  445. "cipher: invalidate: could not retrieve the result descriptor\n");
  446. *ret = PTR_ERR(rdesc);
  447. break;
  448. }
  449. if (likely(!*ret))
  450. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  451. ndesc++;
  452. }
  453. safexcel_complete(priv, ring);
  454. if (ctx->base.exit_inv) {
  455. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  456. ctx->base.ctxr_dma);
  457. *should_complete = true;
  458. return ndesc;
  459. }
  460. ring = safexcel_select_ring(priv);
  461. ctx->base.ring = ring;
  462. spin_lock_bh(&priv->ring[ring].queue_lock);
  463. enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  464. spin_unlock_bh(&priv->ring[ring].queue_lock);
  465. if (enq_ret != -EINPROGRESS)
  466. *ret = enq_ret;
  467. queue_work(priv->ring[ring].workqueue,
  468. &priv->ring[ring].work_data.work);
  469. *should_complete = false;
  470. return ndesc;
  471. }
  472. static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
  473. int ring,
  474. struct crypto_async_request *async,
  475. bool *should_complete, int *ret)
  476. {
  477. struct skcipher_request *req = skcipher_request_cast(async);
  478. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  479. int err;
  480. if (sreq->needs_inv) {
  481. sreq->needs_inv = false;
  482. err = safexcel_handle_inv_result(priv, ring, async, sreq,
  483. should_complete, ret);
  484. } else {
  485. err = safexcel_handle_req_result(priv, ring, async, req->src,
  486. req->dst, req->cryptlen, sreq,
  487. should_complete, ret);
  488. }
  489. return err;
  490. }
  491. static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
  492. int ring,
  493. struct crypto_async_request *async,
  494. bool *should_complete, int *ret)
  495. {
  496. struct aead_request *req = aead_request_cast(async);
  497. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  498. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  499. int err;
  500. if (sreq->needs_inv) {
  501. sreq->needs_inv = false;
  502. err = safexcel_handle_inv_result(priv, ring, async, sreq,
  503. should_complete, ret);
  504. } else {
  505. err = safexcel_handle_req_result(priv, ring, async, req->src,
  506. req->dst,
  507. req->cryptlen + crypto_aead_authsize(tfm),
  508. sreq, should_complete, ret);
  509. }
  510. return err;
  511. }
  512. static int safexcel_cipher_send_inv(struct crypto_async_request *base,
  513. int ring, int *commands, int *results)
  514. {
  515. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  516. struct safexcel_crypto_priv *priv = ctx->priv;
  517. int ret;
  518. ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
  519. if (unlikely(ret))
  520. return ret;
  521. *commands = 1;
  522. *results = 1;
  523. return 0;
  524. }
  525. static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
  526. int *commands, int *results)
  527. {
  528. struct skcipher_request *req = skcipher_request_cast(async);
  529. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  530. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  531. struct safexcel_crypto_priv *priv = ctx->priv;
  532. int ret;
  533. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  534. if (sreq->needs_inv)
  535. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  536. else
  537. ret = safexcel_send_req(async, ring, sreq, req->src,
  538. req->dst, req->cryptlen, 0, 0, req->iv,
  539. commands, results);
  540. sreq->rdescs = *results;
  541. return ret;
  542. }
  543. static int safexcel_aead_send(struct crypto_async_request *async, int ring,
  544. int *commands, int *results)
  545. {
  546. struct aead_request *req = aead_request_cast(async);
  547. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  548. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  549. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  550. struct safexcel_crypto_priv *priv = ctx->priv;
  551. int ret;
  552. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  553. if (sreq->needs_inv)
  554. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  555. else
  556. ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
  557. req->cryptlen, req->assoclen,
  558. crypto_aead_authsize(tfm), req->iv,
  559. commands, results);
  560. sreq->rdescs = *results;
  561. return ret;
  562. }
  563. static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
  564. struct crypto_async_request *base,
  565. struct safexcel_cipher_req *sreq,
  566. struct safexcel_inv_result *result)
  567. {
  568. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  569. struct safexcel_crypto_priv *priv = ctx->priv;
  570. int ring = ctx->base.ring;
  571. init_completion(&result->completion);
  572. ctx = crypto_tfm_ctx(base->tfm);
  573. ctx->base.exit_inv = true;
  574. sreq->needs_inv = true;
  575. spin_lock_bh(&priv->ring[ring].queue_lock);
  576. crypto_enqueue_request(&priv->ring[ring].queue, base);
  577. spin_unlock_bh(&priv->ring[ring].queue_lock);
  578. queue_work(priv->ring[ring].workqueue,
  579. &priv->ring[ring].work_data.work);
  580. wait_for_completion(&result->completion);
  581. if (result->error) {
  582. dev_warn(priv->dev,
  583. "cipher: sync: invalidate: completion error %d\n",
  584. result->error);
  585. return result->error;
  586. }
  587. return 0;
  588. }
  589. static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
  590. {
  591. EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
  592. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  593. struct safexcel_inv_result result = {};
  594. memset(req, 0, sizeof(struct skcipher_request));
  595. skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  596. safexcel_inv_complete, &result);
  597. skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
  598. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  599. }
  600. static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
  601. {
  602. EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
  603. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  604. struct safexcel_inv_result result = {};
  605. memset(req, 0, sizeof(struct aead_request));
  606. aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  607. safexcel_inv_complete, &result);
  608. aead_request_set_tfm(req, __crypto_aead_cast(tfm));
  609. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  610. }
  611. static int safexcel_queue_req(struct crypto_async_request *base,
  612. struct safexcel_cipher_req *sreq,
  613. enum safexcel_cipher_direction dir, u32 mode,
  614. enum safexcel_cipher_alg alg)
  615. {
  616. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  617. struct safexcel_crypto_priv *priv = ctx->priv;
  618. int ret, ring;
  619. sreq->needs_inv = false;
  620. sreq->direction = dir;
  621. ctx->alg = alg;
  622. ctx->mode = mode;
  623. if (ctx->base.ctxr) {
  624. if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
  625. sreq->needs_inv = true;
  626. ctx->base.needs_inv = false;
  627. }
  628. } else {
  629. ctx->base.ring = safexcel_select_ring(priv);
  630. ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
  631. EIP197_GFP_FLAGS(*base),
  632. &ctx->base.ctxr_dma);
  633. if (!ctx->base.ctxr)
  634. return -ENOMEM;
  635. }
  636. ring = ctx->base.ring;
  637. spin_lock_bh(&priv->ring[ring].queue_lock);
  638. ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  639. spin_unlock_bh(&priv->ring[ring].queue_lock);
  640. queue_work(priv->ring[ring].workqueue,
  641. &priv->ring[ring].work_data.work);
  642. return ret;
  643. }
  644. static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
  645. {
  646. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  647. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  648. SAFEXCEL_AES);
  649. }
  650. static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
  651. {
  652. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  653. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  654. SAFEXCEL_AES);
  655. }
  656. static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
  657. {
  658. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  659. struct safexcel_alg_template *tmpl =
  660. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  661. alg.skcipher.base);
  662. crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
  663. sizeof(struct safexcel_cipher_req));
  664. ctx->priv = tmpl->priv;
  665. ctx->base.send = safexcel_skcipher_send;
  666. ctx->base.handle_result = safexcel_skcipher_handle_result;
  667. return 0;
  668. }
  669. static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
  670. {
  671. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  672. memzero_explicit(ctx->key, sizeof(ctx->key));
  673. /* context not allocated, skip invalidation */
  674. if (!ctx->base.ctxr)
  675. return -ENOMEM;
  676. memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
  677. return 0;
  678. }
  679. static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
  680. {
  681. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  682. struct safexcel_crypto_priv *priv = ctx->priv;
  683. int ret;
  684. if (safexcel_cipher_cra_exit(tfm))
  685. return;
  686. if (priv->flags & EIP197_TRC_CACHE) {
  687. ret = safexcel_skcipher_exit_inv(tfm);
  688. if (ret)
  689. dev_warn(priv->dev, "skcipher: invalidation error %d\n",
  690. ret);
  691. } else {
  692. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  693. ctx->base.ctxr_dma);
  694. }
  695. }
  696. static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
  697. {
  698. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  699. struct safexcel_crypto_priv *priv = ctx->priv;
  700. int ret;
  701. if (safexcel_cipher_cra_exit(tfm))
  702. return;
  703. if (priv->flags & EIP197_TRC_CACHE) {
  704. ret = safexcel_aead_exit_inv(tfm);
  705. if (ret)
  706. dev_warn(priv->dev, "aead: invalidation error %d\n",
  707. ret);
  708. } else {
  709. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  710. ctx->base.ctxr_dma);
  711. }
  712. }
  713. struct safexcel_alg_template safexcel_alg_ecb_aes = {
  714. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  715. .engines = EIP97IES | EIP197B | EIP197D,
  716. .alg.skcipher = {
  717. .setkey = safexcel_skcipher_aes_setkey,
  718. .encrypt = safexcel_ecb_aes_encrypt,
  719. .decrypt = safexcel_ecb_aes_decrypt,
  720. .min_keysize = AES_MIN_KEY_SIZE,
  721. .max_keysize = AES_MAX_KEY_SIZE,
  722. .base = {
  723. .cra_name = "ecb(aes)",
  724. .cra_driver_name = "safexcel-ecb-aes",
  725. .cra_priority = 300,
  726. .cra_flags = CRYPTO_ALG_ASYNC |
  727. CRYPTO_ALG_KERN_DRIVER_ONLY,
  728. .cra_blocksize = AES_BLOCK_SIZE,
  729. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  730. .cra_alignmask = 0,
  731. .cra_init = safexcel_skcipher_cra_init,
  732. .cra_exit = safexcel_skcipher_cra_exit,
  733. .cra_module = THIS_MODULE,
  734. },
  735. },
  736. };
  737. static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
  738. {
  739. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  740. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  741. SAFEXCEL_AES);
  742. }
  743. static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
  744. {
  745. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  746. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  747. SAFEXCEL_AES);
  748. }
  749. struct safexcel_alg_template safexcel_alg_cbc_aes = {
  750. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  751. .engines = EIP97IES | EIP197B | EIP197D,
  752. .alg.skcipher = {
  753. .setkey = safexcel_skcipher_aes_setkey,
  754. .encrypt = safexcel_cbc_aes_encrypt,
  755. .decrypt = safexcel_cbc_aes_decrypt,
  756. .min_keysize = AES_MIN_KEY_SIZE,
  757. .max_keysize = AES_MAX_KEY_SIZE,
  758. .ivsize = AES_BLOCK_SIZE,
  759. .base = {
  760. .cra_name = "cbc(aes)",
  761. .cra_driver_name = "safexcel-cbc-aes",
  762. .cra_priority = 300,
  763. .cra_flags = CRYPTO_ALG_ASYNC |
  764. CRYPTO_ALG_KERN_DRIVER_ONLY,
  765. .cra_blocksize = AES_BLOCK_SIZE,
  766. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  767. .cra_alignmask = 0,
  768. .cra_init = safexcel_skcipher_cra_init,
  769. .cra_exit = safexcel_skcipher_cra_exit,
  770. .cra_module = THIS_MODULE,
  771. },
  772. },
  773. };
  774. static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
  775. {
  776. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  777. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  778. SAFEXCEL_DES);
  779. }
  780. static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
  781. {
  782. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  783. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  784. SAFEXCEL_DES);
  785. }
  786. static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
  787. unsigned int len)
  788. {
  789. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  790. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  791. u32 tmp[DES_EXPKEY_WORDS];
  792. int ret;
  793. if (len != DES_KEY_SIZE) {
  794. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  795. return -EINVAL;
  796. }
  797. ret = des_ekey(tmp, key);
  798. if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
  799. tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
  800. return -EINVAL;
  801. }
  802. /* if context exits and key changed, need to invalidate it */
  803. if (ctx->base.ctxr_dma)
  804. if (memcmp(ctx->key, key, len))
  805. ctx->base.needs_inv = true;
  806. memcpy(ctx->key, key, len);
  807. ctx->key_len = len;
  808. return 0;
  809. }
  810. struct safexcel_alg_template safexcel_alg_cbc_des = {
  811. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  812. .engines = EIP97IES | EIP197B | EIP197D,
  813. .alg.skcipher = {
  814. .setkey = safexcel_des_setkey,
  815. .encrypt = safexcel_cbc_des_encrypt,
  816. .decrypt = safexcel_cbc_des_decrypt,
  817. .min_keysize = DES_KEY_SIZE,
  818. .max_keysize = DES_KEY_SIZE,
  819. .ivsize = DES_BLOCK_SIZE,
  820. .base = {
  821. .cra_name = "cbc(des)",
  822. .cra_driver_name = "safexcel-cbc-des",
  823. .cra_priority = 300,
  824. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  825. CRYPTO_ALG_KERN_DRIVER_ONLY,
  826. .cra_blocksize = DES_BLOCK_SIZE,
  827. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  828. .cra_alignmask = 0,
  829. .cra_init = safexcel_skcipher_cra_init,
  830. .cra_exit = safexcel_skcipher_cra_exit,
  831. .cra_module = THIS_MODULE,
  832. },
  833. },
  834. };
  835. static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
  836. {
  837. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  838. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  839. SAFEXCEL_DES);
  840. }
  841. static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
  842. {
  843. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  844. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  845. SAFEXCEL_DES);
  846. }
  847. struct safexcel_alg_template safexcel_alg_ecb_des = {
  848. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  849. .engines = EIP97IES | EIP197B | EIP197D,
  850. .alg.skcipher = {
  851. .setkey = safexcel_des_setkey,
  852. .encrypt = safexcel_ecb_des_encrypt,
  853. .decrypt = safexcel_ecb_des_decrypt,
  854. .min_keysize = DES_KEY_SIZE,
  855. .max_keysize = DES_KEY_SIZE,
  856. .ivsize = DES_BLOCK_SIZE,
  857. .base = {
  858. .cra_name = "ecb(des)",
  859. .cra_driver_name = "safexcel-ecb-des",
  860. .cra_priority = 300,
  861. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  862. CRYPTO_ALG_KERN_DRIVER_ONLY,
  863. .cra_blocksize = DES_BLOCK_SIZE,
  864. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  865. .cra_alignmask = 0,
  866. .cra_init = safexcel_skcipher_cra_init,
  867. .cra_exit = safexcel_skcipher_cra_exit,
  868. .cra_module = THIS_MODULE,
  869. },
  870. },
  871. };
  872. static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
  873. {
  874. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  875. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  876. SAFEXCEL_3DES);
  877. }
  878. static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
  879. {
  880. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  881. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  882. SAFEXCEL_3DES);
  883. }
  884. static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
  885. const u8 *key, unsigned int len)
  886. {
  887. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  888. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  889. if (len != DES3_EDE_KEY_SIZE) {
  890. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  891. return -EINVAL;
  892. }
  893. /* if context exits and key changed, need to invalidate it */
  894. if (ctx->base.ctxr_dma) {
  895. if (memcmp(ctx->key, key, len))
  896. ctx->base.needs_inv = true;
  897. }
  898. memcpy(ctx->key, key, len);
  899. ctx->key_len = len;
  900. return 0;
  901. }
  902. struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
  903. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  904. .engines = EIP97IES | EIP197B | EIP197D,
  905. .alg.skcipher = {
  906. .setkey = safexcel_des3_ede_setkey,
  907. .encrypt = safexcel_cbc_des3_ede_encrypt,
  908. .decrypt = safexcel_cbc_des3_ede_decrypt,
  909. .min_keysize = DES3_EDE_KEY_SIZE,
  910. .max_keysize = DES3_EDE_KEY_SIZE,
  911. .ivsize = DES3_EDE_BLOCK_SIZE,
  912. .base = {
  913. .cra_name = "cbc(des3_ede)",
  914. .cra_driver_name = "safexcel-cbc-des3_ede",
  915. .cra_priority = 300,
  916. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  917. CRYPTO_ALG_KERN_DRIVER_ONLY,
  918. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  919. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  920. .cra_alignmask = 0,
  921. .cra_init = safexcel_skcipher_cra_init,
  922. .cra_exit = safexcel_skcipher_cra_exit,
  923. .cra_module = THIS_MODULE,
  924. },
  925. },
  926. };
  927. static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
  928. {
  929. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  930. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  931. SAFEXCEL_3DES);
  932. }
  933. static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
  934. {
  935. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  936. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  937. SAFEXCEL_3DES);
  938. }
  939. struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
  940. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  941. .engines = EIP97IES | EIP197B | EIP197D,
  942. .alg.skcipher = {
  943. .setkey = safexcel_des3_ede_setkey,
  944. .encrypt = safexcel_ecb_des3_ede_encrypt,
  945. .decrypt = safexcel_ecb_des3_ede_decrypt,
  946. .min_keysize = DES3_EDE_KEY_SIZE,
  947. .max_keysize = DES3_EDE_KEY_SIZE,
  948. .ivsize = DES3_EDE_BLOCK_SIZE,
  949. .base = {
  950. .cra_name = "ecb(des3_ede)",
  951. .cra_driver_name = "safexcel-ecb-des3_ede",
  952. .cra_priority = 300,
  953. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  954. CRYPTO_ALG_KERN_DRIVER_ONLY,
  955. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  956. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  957. .cra_alignmask = 0,
  958. .cra_init = safexcel_skcipher_cra_init,
  959. .cra_exit = safexcel_skcipher_cra_exit,
  960. .cra_module = THIS_MODULE,
  961. },
  962. },
  963. };
  964. static int safexcel_aead_encrypt(struct aead_request *req)
  965. {
  966. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  967. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
  968. CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
  969. }
  970. static int safexcel_aead_decrypt(struct aead_request *req)
  971. {
  972. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  973. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
  974. CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
  975. }
  976. static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
  977. {
  978. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  979. struct safexcel_alg_template *tmpl =
  980. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  981. alg.aead.base);
  982. crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
  983. sizeof(struct safexcel_cipher_req));
  984. ctx->priv = tmpl->priv;
  985. ctx->aead = true;
  986. ctx->base.send = safexcel_aead_send;
  987. ctx->base.handle_result = safexcel_aead_handle_result;
  988. return 0;
  989. }
  990. static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
  991. {
  992. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  993. safexcel_aead_cra_init(tfm);
  994. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
  995. ctx->state_sz = SHA1_DIGEST_SIZE;
  996. return 0;
  997. }
  998. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
  999. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1000. .engines = EIP97IES | EIP197B | EIP197D,
  1001. .alg.aead = {
  1002. .setkey = safexcel_aead_aes_setkey,
  1003. .encrypt = safexcel_aead_encrypt,
  1004. .decrypt = safexcel_aead_decrypt,
  1005. .ivsize = AES_BLOCK_SIZE,
  1006. .maxauthsize = SHA1_DIGEST_SIZE,
  1007. .base = {
  1008. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1009. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
  1010. .cra_priority = 300,
  1011. .cra_flags = CRYPTO_ALG_ASYNC |
  1012. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1013. .cra_blocksize = AES_BLOCK_SIZE,
  1014. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1015. .cra_alignmask = 0,
  1016. .cra_init = safexcel_aead_sha1_cra_init,
  1017. .cra_exit = safexcel_aead_cra_exit,
  1018. .cra_module = THIS_MODULE,
  1019. },
  1020. },
  1021. };
  1022. static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
  1023. {
  1024. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1025. safexcel_aead_cra_init(tfm);
  1026. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
  1027. ctx->state_sz = SHA256_DIGEST_SIZE;
  1028. return 0;
  1029. }
  1030. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
  1031. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1032. .engines = EIP97IES | EIP197B | EIP197D,
  1033. .alg.aead = {
  1034. .setkey = safexcel_aead_aes_setkey,
  1035. .encrypt = safexcel_aead_encrypt,
  1036. .decrypt = safexcel_aead_decrypt,
  1037. .ivsize = AES_BLOCK_SIZE,
  1038. .maxauthsize = SHA256_DIGEST_SIZE,
  1039. .base = {
  1040. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  1041. .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
  1042. .cra_priority = 300,
  1043. .cra_flags = CRYPTO_ALG_ASYNC |
  1044. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1045. .cra_blocksize = AES_BLOCK_SIZE,
  1046. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1047. .cra_alignmask = 0,
  1048. .cra_init = safexcel_aead_sha256_cra_init,
  1049. .cra_exit = safexcel_aead_cra_exit,
  1050. .cra_module = THIS_MODULE,
  1051. },
  1052. },
  1053. };
  1054. static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
  1055. {
  1056. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1057. safexcel_aead_cra_init(tfm);
  1058. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
  1059. ctx->state_sz = SHA256_DIGEST_SIZE;
  1060. return 0;
  1061. }
  1062. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
  1063. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1064. .engines = EIP97IES | EIP197B | EIP197D,
  1065. .alg.aead = {
  1066. .setkey = safexcel_aead_aes_setkey,
  1067. .encrypt = safexcel_aead_encrypt,
  1068. .decrypt = safexcel_aead_decrypt,
  1069. .ivsize = AES_BLOCK_SIZE,
  1070. .maxauthsize = SHA224_DIGEST_SIZE,
  1071. .base = {
  1072. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1073. .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
  1074. .cra_priority = 300,
  1075. .cra_flags = CRYPTO_ALG_ASYNC |
  1076. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1077. .cra_blocksize = AES_BLOCK_SIZE,
  1078. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1079. .cra_alignmask = 0,
  1080. .cra_init = safexcel_aead_sha224_cra_init,
  1081. .cra_exit = safexcel_aead_cra_exit,
  1082. .cra_module = THIS_MODULE,
  1083. },
  1084. },
  1085. };
  1086. static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
  1087. {
  1088. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1089. safexcel_aead_cra_init(tfm);
  1090. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
  1091. ctx->state_sz = SHA512_DIGEST_SIZE;
  1092. return 0;
  1093. }
  1094. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
  1095. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1096. .engines = EIP97IES | EIP197B | EIP197D,
  1097. .alg.aead = {
  1098. .setkey = safexcel_aead_aes_setkey,
  1099. .encrypt = safexcel_aead_encrypt,
  1100. .decrypt = safexcel_aead_decrypt,
  1101. .ivsize = AES_BLOCK_SIZE,
  1102. .maxauthsize = SHA512_DIGEST_SIZE,
  1103. .base = {
  1104. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  1105. .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
  1106. .cra_priority = 300,
  1107. .cra_flags = CRYPTO_ALG_ASYNC |
  1108. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1109. .cra_blocksize = AES_BLOCK_SIZE,
  1110. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1111. .cra_alignmask = 0,
  1112. .cra_init = safexcel_aead_sha512_cra_init,
  1113. .cra_exit = safexcel_aead_cra_exit,
  1114. .cra_module = THIS_MODULE,
  1115. },
  1116. },
  1117. };
  1118. static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
  1119. {
  1120. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1121. safexcel_aead_cra_init(tfm);
  1122. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
  1123. ctx->state_sz = SHA512_DIGEST_SIZE;
  1124. return 0;
  1125. }
  1126. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
  1127. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1128. .engines = EIP97IES | EIP197B | EIP197D,
  1129. .alg.aead = {
  1130. .setkey = safexcel_aead_aes_setkey,
  1131. .encrypt = safexcel_aead_encrypt,
  1132. .decrypt = safexcel_aead_decrypt,
  1133. .ivsize = AES_BLOCK_SIZE,
  1134. .maxauthsize = SHA384_DIGEST_SIZE,
  1135. .base = {
  1136. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  1137. .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
  1138. .cra_priority = 300,
  1139. .cra_flags = CRYPTO_ALG_ASYNC |
  1140. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1141. .cra_blocksize = AES_BLOCK_SIZE,
  1142. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1143. .cra_alignmask = 0,
  1144. .cra_init = safexcel_aead_sha384_cra_init,
  1145. .cra_exit = safexcel_aead_cra_exit,
  1146. .cra_module = THIS_MODULE,
  1147. },
  1148. },
  1149. };