safexcel_cipher.c 103 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) 2017 Marvell
  4. *
  5. * Antoine Tenart <antoine.tenart@free-electrons.com>
  6. */
  7. #include <linux/unaligned.h>
  8. #include <linux/device.h>
  9. #include <linux/dma-mapping.h>
  10. #include <linux/dmapool.h>
  11. #include <crypto/aead.h>
  12. #include <crypto/aes.h>
  13. #include <crypto/authenc.h>
  14. #include <crypto/chacha.h>
  15. #include <crypto/ctr.h>
  16. #include <crypto/internal/des.h>
  17. #include <crypto/gcm.h>
  18. #include <crypto/ghash.h>
  19. #include <crypto/poly1305.h>
  20. #include <crypto/sha1.h>
  21. #include <crypto/sha2.h>
  22. #include <crypto/sm3.h>
  23. #include <crypto/sm4.h>
  24. #include <crypto/xts.h>
  25. #include <crypto/skcipher.h>
  26. #include <crypto/internal/aead.h>
  27. #include <crypto/internal/skcipher.h>
  28. #include "safexcel.h"
  29. enum safexcel_cipher_direction {
  30. SAFEXCEL_ENCRYPT,
  31. SAFEXCEL_DECRYPT,
  32. };
  33. enum safexcel_cipher_alg {
  34. SAFEXCEL_DES,
  35. SAFEXCEL_3DES,
  36. SAFEXCEL_AES,
  37. SAFEXCEL_CHACHA20,
  38. SAFEXCEL_SM4,
  39. };
  40. struct safexcel_cipher_ctx {
  41. struct safexcel_context base;
  42. struct safexcel_crypto_priv *priv;
  43. u32 mode;
  44. enum safexcel_cipher_alg alg;
  45. u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
  46. u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
  47. u8 aadskip;
  48. u8 blocksz;
  49. u32 ivmask;
  50. u32 ctrinit;
  51. __le32 key[16];
  52. u32 nonce;
  53. unsigned int key_len, xts;
  54. /* All the below is AEAD specific */
  55. u32 hash_alg;
  56. u32 state_sz;
  57. struct crypto_aead *fback;
  58. };
  59. struct safexcel_cipher_req {
  60. enum safexcel_cipher_direction direction;
  61. /* Number of result descriptors associated to the request */
  62. unsigned int rdescs;
  63. bool needs_inv;
  64. int nr_src, nr_dst;
  65. };
  66. static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
  67. struct safexcel_command_desc *cdesc)
  68. {
  69. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  70. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  71. /* 32 bit nonce */
  72. cdesc->control_data.token[0] = ctx->nonce;
  73. /* 64 bit IV part */
  74. memcpy(&cdesc->control_data.token[1], iv, 8);
  75. /* 32 bit counter, start at 0 or 1 (big endian!) */
  76. cdesc->control_data.token[3] =
  77. (__force u32)cpu_to_be32(ctx->ctrinit);
  78. return 4;
  79. }
  80. if (ctx->alg == SAFEXCEL_CHACHA20) {
  81. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  82. /* 96 bit nonce part */
  83. memcpy(&cdesc->control_data.token[0], &iv[4], 12);
  84. /* 32 bit counter */
  85. cdesc->control_data.token[3] = *(u32 *)iv;
  86. return 4;
  87. }
  88. cdesc->control_data.options |= ctx->ivmask;
  89. memcpy(cdesc->control_data.token, iv, ctx->blocksz);
  90. return ctx->blocksz / sizeof(u32);
  91. }
  92. static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  93. struct safexcel_command_desc *cdesc,
  94. struct safexcel_token *atoken,
  95. u32 length)
  96. {
  97. struct safexcel_token *token;
  98. int ivlen;
  99. ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
  100. if (ivlen == 4) {
  101. /* No space in cdesc, instruction moves to atoken */
  102. cdesc->additional_cdata_size = 1;
  103. token = atoken;
  104. } else {
  105. /* Everything fits in cdesc */
  106. token = (struct safexcel_token *)(cdesc->control_data.token + 2);
  107. /* Need to pad with NOP */
  108. eip197_noop_token(&token[1]);
  109. }
  110. token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  111. token->packet_length = length;
  112. token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
  113. EIP197_TOKEN_STAT_LAST_HASH;
  114. token->instructions = EIP197_TOKEN_INS_LAST |
  115. EIP197_TOKEN_INS_TYPE_CRYPTO |
  116. EIP197_TOKEN_INS_TYPE_OUTPUT;
  117. }
  118. static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
  119. struct safexcel_command_desc *cdesc)
  120. {
  121. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
  122. ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
  123. /* 32 bit nonce */
  124. cdesc->control_data.token[0] = ctx->nonce;
  125. /* 64 bit IV part */
  126. memcpy(&cdesc->control_data.token[1], iv, 8);
  127. /* 32 bit counter, start at 0 or 1 (big endian!) */
  128. cdesc->control_data.token[3] =
  129. (__force u32)cpu_to_be32(ctx->ctrinit);
  130. return;
  131. }
  132. if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
  133. /* 96 bit IV part */
  134. memcpy(&cdesc->control_data.token[0], iv, 12);
  135. /* 32 bit counter, start at 0 or 1 (big endian!) */
  136. cdesc->control_data.token[3] =
  137. (__force u32)cpu_to_be32(ctx->ctrinit);
  138. return;
  139. }
  140. /* CBC */
  141. memcpy(cdesc->control_data.token, iv, ctx->blocksz);
  142. }
  143. static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  144. struct safexcel_command_desc *cdesc,
  145. struct safexcel_token *atoken,
  146. enum safexcel_cipher_direction direction,
  147. u32 cryptlen, u32 assoclen, u32 digestsize)
  148. {
  149. struct safexcel_token *aadref;
  150. int atoksize = 2; /* Start with minimum size */
  151. int assocadj = assoclen - ctx->aadskip, aadalign;
  152. /* Always 4 dwords of embedded IV for AEAD modes */
  153. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  154. if (direction == SAFEXCEL_DECRYPT)
  155. cryptlen -= digestsize;
  156. if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
  157. /* Construct IV block B0 for the CBC-MAC */
  158. u8 *final_iv = (u8 *)cdesc->control_data.token;
  159. u8 *cbcmaciv = (u8 *)&atoken[1];
  160. __le32 *aadlen = (__le32 *)&atoken[5];
  161. if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
  162. /* Length + nonce */
  163. cdesc->control_data.token[0] = ctx->nonce;
  164. /* Fixup flags byte */
  165. *(__le32 *)cbcmaciv =
  166. cpu_to_le32(ctx->nonce |
  167. ((assocadj > 0) << 6) |
  168. ((digestsize - 2) << 2));
  169. /* 64 bit IV part */
  170. memcpy(&cdesc->control_data.token[1], iv, 8);
  171. memcpy(cbcmaciv + 4, iv, 8);
  172. /* Start counter at 0 */
  173. cdesc->control_data.token[3] = 0;
  174. /* Message length */
  175. *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
  176. } else {
  177. /* Variable length IV part */
  178. memcpy(final_iv, iv, 15 - iv[0]);
  179. memcpy(cbcmaciv, iv, 15 - iv[0]);
  180. /* Start variable length counter at 0 */
  181. memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
  182. memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
  183. /* fixup flags byte */
  184. cbcmaciv[0] |= ((assocadj > 0) << 6) |
  185. ((digestsize - 2) << 2);
  186. /* insert lower 2 bytes of message length */
  187. cbcmaciv[14] = cryptlen >> 8;
  188. cbcmaciv[15] = cryptlen & 255;
  189. }
  190. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  191. atoken->packet_length = AES_BLOCK_SIZE +
  192. ((assocadj > 0) << 1);
  193. atoken->stat = 0;
  194. atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
  195. EIP197_TOKEN_INS_TYPE_HASH;
  196. if (likely(assocadj)) {
  197. *aadlen = cpu_to_le32((assocadj >> 8) |
  198. (assocadj & 255) << 8);
  199. atoken += 6;
  200. atoksize += 7;
  201. } else {
  202. atoken += 5;
  203. atoksize += 6;
  204. }
  205. /* Process AAD data */
  206. aadref = atoken;
  207. atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  208. atoken->packet_length = assocadj;
  209. atoken->stat = 0;
  210. atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
  211. atoken++;
  212. /* For CCM only, align AAD data towards hash engine */
  213. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  214. aadalign = (assocadj + 2) & 15;
  215. atoken->packet_length = assocadj && aadalign ?
  216. 16 - aadalign :
  217. 0;
  218. if (likely(cryptlen)) {
  219. atoken->stat = 0;
  220. atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
  221. } else {
  222. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
  223. atoken->instructions = EIP197_TOKEN_INS_LAST |
  224. EIP197_TOKEN_INS_TYPE_HASH;
  225. }
  226. } else {
  227. safexcel_aead_iv(ctx, iv, cdesc);
  228. /* Process AAD data */
  229. aadref = atoken;
  230. atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  231. atoken->packet_length = assocadj;
  232. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
  233. atoken->instructions = EIP197_TOKEN_INS_LAST |
  234. EIP197_TOKEN_INS_TYPE_HASH;
  235. }
  236. atoken++;
  237. if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
  238. /* For ESP mode (and not GMAC), skip over the IV */
  239. atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  240. atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
  241. atoken->stat = 0;
  242. atoken->instructions = 0;
  243. atoken++;
  244. atoksize++;
  245. } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
  246. direction == SAFEXCEL_DECRYPT)) {
  247. /* Poly-chacha decryption needs a dummy NOP here ... */
  248. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  249. atoken->packet_length = 16; /* According to Op Manual */
  250. atoken->stat = 0;
  251. atoken->instructions = 0;
  252. atoken++;
  253. atoksize++;
  254. }
  255. if (ctx->xcm) {
  256. /* For GCM and CCM, obtain enc(Y0) */
  257. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
  258. atoken->packet_length = 0;
  259. atoken->stat = 0;
  260. atoken->instructions = AES_BLOCK_SIZE;
  261. atoken++;
  262. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  263. atoken->packet_length = AES_BLOCK_SIZE;
  264. atoken->stat = 0;
  265. atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
  266. EIP197_TOKEN_INS_TYPE_CRYPTO;
  267. atoken++;
  268. atoksize += 2;
  269. }
  270. if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
  271. /* Fixup stat field for AAD direction instruction */
  272. aadref->stat = 0;
  273. /* Process crypto data */
  274. atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  275. atoken->packet_length = cryptlen;
  276. if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
  277. /* Fixup instruction field for AAD dir instruction */
  278. aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
  279. /* Do not send to crypt engine in case of GMAC */
  280. atoken->instructions = EIP197_TOKEN_INS_LAST |
  281. EIP197_TOKEN_INS_TYPE_HASH |
  282. EIP197_TOKEN_INS_TYPE_OUTPUT;
  283. } else {
  284. atoken->instructions = EIP197_TOKEN_INS_LAST |
  285. EIP197_TOKEN_INS_TYPE_CRYPTO |
  286. EIP197_TOKEN_INS_TYPE_HASH |
  287. EIP197_TOKEN_INS_TYPE_OUTPUT;
  288. }
  289. cryptlen &= 15;
  290. if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
  291. atoken->stat = 0;
  292. /* For CCM only, pad crypto data to the hash engine */
  293. atoken++;
  294. atoksize++;
  295. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  296. atoken->packet_length = 16 - cryptlen;
  297. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
  298. atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
  299. } else {
  300. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
  301. }
  302. atoken++;
  303. atoksize++;
  304. }
  305. if (direction == SAFEXCEL_ENCRYPT) {
  306. /* Append ICV */
  307. atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
  308. atoken->packet_length = digestsize;
  309. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
  310. EIP197_TOKEN_STAT_LAST_PACKET;
  311. atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
  312. EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  313. } else {
  314. /* Extract ICV */
  315. atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
  316. atoken->packet_length = digestsize;
  317. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
  318. EIP197_TOKEN_STAT_LAST_PACKET;
  319. atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  320. atoken++;
  321. atoksize++;
  322. /* Verify ICV */
  323. atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
  324. atoken->packet_length = digestsize |
  325. EIP197_TOKEN_HASH_RESULT_VERIFY;
  326. atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
  327. EIP197_TOKEN_STAT_LAST_PACKET;
  328. atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
  329. }
  330. /* Fixup length of the token in the command descriptor */
  331. cdesc->additional_cdata_size = atoksize;
  332. }
  333. static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
  334. const u8 *key, unsigned int len)
  335. {
  336. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  337. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  338. struct safexcel_crypto_priv *priv = ctx->base.priv;
  339. struct crypto_aes_ctx aes;
  340. int ret, i;
  341. ret = aes_expandkey(&aes, key, len);
  342. if (ret)
  343. return ret;
  344. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  345. for (i = 0; i < len / sizeof(u32); i++) {
  346. if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
  347. ctx->base.needs_inv = true;
  348. break;
  349. }
  350. }
  351. }
  352. for (i = 0; i < len / sizeof(u32); i++)
  353. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  354. ctx->key_len = len;
  355. memzero_explicit(&aes, sizeof(aes));
  356. return 0;
  357. }
  358. static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
  359. unsigned int len)
  360. {
  361. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  362. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  363. struct safexcel_crypto_priv *priv = ctx->base.priv;
  364. struct crypto_authenc_keys keys;
  365. struct crypto_aes_ctx aes;
  366. int err = -EINVAL, i;
  367. const char *alg;
  368. if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
  369. goto badkey;
  370. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  371. /* Must have at least space for the nonce here */
  372. if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
  373. goto badkey;
  374. /* last 4 bytes of key are the nonce! */
  375. ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
  376. CTR_RFC3686_NONCE_SIZE);
  377. /* exclude the nonce here */
  378. keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
  379. }
  380. /* Encryption key */
  381. switch (ctx->alg) {
  382. case SAFEXCEL_DES:
  383. err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
  384. if (unlikely(err))
  385. goto badkey;
  386. break;
  387. case SAFEXCEL_3DES:
  388. err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
  389. if (unlikely(err))
  390. goto badkey;
  391. break;
  392. case SAFEXCEL_AES:
  393. err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
  394. if (unlikely(err))
  395. goto badkey;
  396. break;
  397. case SAFEXCEL_SM4:
  398. if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
  399. goto badkey;
  400. break;
  401. default:
  402. dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
  403. goto badkey;
  404. }
  405. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  406. for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
  407. if (le32_to_cpu(ctx->key[i]) !=
  408. ((u32 *)keys.enckey)[i]) {
  409. ctx->base.needs_inv = true;
  410. break;
  411. }
  412. }
  413. }
  414. /* Auth key */
  415. switch (ctx->hash_alg) {
  416. case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
  417. alg = "safexcel-sha1";
  418. break;
  419. case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
  420. alg = "safexcel-sha224";
  421. break;
  422. case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
  423. alg = "safexcel-sha256";
  424. break;
  425. case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
  426. alg = "safexcel-sha384";
  427. break;
  428. case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
  429. alg = "safexcel-sha512";
  430. break;
  431. case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
  432. alg = "safexcel-sm3";
  433. break;
  434. default:
  435. dev_err(priv->dev, "aead: unsupported hash algorithm\n");
  436. goto badkey;
  437. }
  438. if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
  439. alg, ctx->state_sz))
  440. goto badkey;
  441. /* Now copy the keys into the context */
  442. for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
  443. ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
  444. ctx->key_len = keys.enckeylen;
  445. memzero_explicit(&keys, sizeof(keys));
  446. return 0;
  447. badkey:
  448. memzero_explicit(&keys, sizeof(keys));
  449. return err;
  450. }
  451. static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
  452. struct crypto_async_request *async,
  453. struct safexcel_cipher_req *sreq,
  454. struct safexcel_command_desc *cdesc)
  455. {
  456. struct safexcel_crypto_priv *priv = ctx->base.priv;
  457. int ctrl_size = ctx->key_len / sizeof(u32);
  458. cdesc->control_data.control1 = ctx->mode;
  459. if (ctx->aead) {
  460. /* Take in account the ipad+opad digests */
  461. if (ctx->xcm) {
  462. ctrl_size += ctx->state_sz / sizeof(u32);
  463. cdesc->control_data.control0 =
  464. CONTEXT_CONTROL_KEY_EN |
  465. CONTEXT_CONTROL_DIGEST_XCM |
  466. ctx->hash_alg |
  467. CONTEXT_CONTROL_SIZE(ctrl_size);
  468. } else if (ctx->alg == SAFEXCEL_CHACHA20) {
  469. /* Chacha20-Poly1305 */
  470. cdesc->control_data.control0 =
  471. CONTEXT_CONTROL_KEY_EN |
  472. CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
  473. (sreq->direction == SAFEXCEL_ENCRYPT ?
  474. CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
  475. CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
  476. ctx->hash_alg |
  477. CONTEXT_CONTROL_SIZE(ctrl_size);
  478. return 0;
  479. } else {
  480. ctrl_size += ctx->state_sz / sizeof(u32) * 2;
  481. cdesc->control_data.control0 =
  482. CONTEXT_CONTROL_KEY_EN |
  483. CONTEXT_CONTROL_DIGEST_HMAC |
  484. ctx->hash_alg |
  485. CONTEXT_CONTROL_SIZE(ctrl_size);
  486. }
  487. if (sreq->direction == SAFEXCEL_ENCRYPT &&
  488. (ctx->xcm == EIP197_XCM_MODE_CCM ||
  489. ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
  490. cdesc->control_data.control0 |=
  491. CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
  492. else if (sreq->direction == SAFEXCEL_ENCRYPT)
  493. cdesc->control_data.control0 |=
  494. CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
  495. else if (ctx->xcm == EIP197_XCM_MODE_CCM)
  496. cdesc->control_data.control0 |=
  497. CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
  498. else
  499. cdesc->control_data.control0 |=
  500. CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
  501. } else {
  502. if (sreq->direction == SAFEXCEL_ENCRYPT)
  503. cdesc->control_data.control0 =
  504. CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
  505. CONTEXT_CONTROL_KEY_EN |
  506. CONTEXT_CONTROL_SIZE(ctrl_size);
  507. else
  508. cdesc->control_data.control0 =
  509. CONTEXT_CONTROL_TYPE_CRYPTO_IN |
  510. CONTEXT_CONTROL_KEY_EN |
  511. CONTEXT_CONTROL_SIZE(ctrl_size);
  512. }
  513. if (ctx->alg == SAFEXCEL_DES) {
  514. cdesc->control_data.control0 |=
  515. CONTEXT_CONTROL_CRYPTO_ALG_DES;
  516. } else if (ctx->alg == SAFEXCEL_3DES) {
  517. cdesc->control_data.control0 |=
  518. CONTEXT_CONTROL_CRYPTO_ALG_3DES;
  519. } else if (ctx->alg == SAFEXCEL_AES) {
  520. switch (ctx->key_len >> ctx->xts) {
  521. case AES_KEYSIZE_128:
  522. cdesc->control_data.control0 |=
  523. CONTEXT_CONTROL_CRYPTO_ALG_AES128;
  524. break;
  525. case AES_KEYSIZE_192:
  526. cdesc->control_data.control0 |=
  527. CONTEXT_CONTROL_CRYPTO_ALG_AES192;
  528. break;
  529. case AES_KEYSIZE_256:
  530. cdesc->control_data.control0 |=
  531. CONTEXT_CONTROL_CRYPTO_ALG_AES256;
  532. break;
  533. default:
  534. dev_err(priv->dev, "aes keysize not supported: %u\n",
  535. ctx->key_len >> ctx->xts);
  536. return -EINVAL;
  537. }
  538. } else if (ctx->alg == SAFEXCEL_CHACHA20) {
  539. cdesc->control_data.control0 |=
  540. CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
  541. } else if (ctx->alg == SAFEXCEL_SM4) {
  542. cdesc->control_data.control0 |=
  543. CONTEXT_CONTROL_CRYPTO_ALG_SM4;
  544. }
  545. return 0;
  546. }
  547. static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
  548. struct crypto_async_request *async,
  549. struct scatterlist *src,
  550. struct scatterlist *dst,
  551. unsigned int cryptlen,
  552. struct safexcel_cipher_req *sreq,
  553. bool *should_complete, int *ret)
  554. {
  555. struct skcipher_request *areq = skcipher_request_cast(async);
  556. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
  557. struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
  558. struct safexcel_result_desc *rdesc;
  559. int ndesc = 0;
  560. *ret = 0;
  561. if (unlikely(!sreq->rdescs))
  562. return 0;
  563. while (sreq->rdescs--) {
  564. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  565. if (IS_ERR(rdesc)) {
  566. dev_err(priv->dev,
  567. "cipher: result: could not retrieve the result descriptor\n");
  568. *ret = PTR_ERR(rdesc);
  569. break;
  570. }
  571. if (likely(!*ret))
  572. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  573. ndesc++;
  574. }
  575. safexcel_complete(priv, ring);
  576. if (src == dst) {
  577. if (sreq->nr_src > 0)
  578. dma_unmap_sg(priv->dev, src, sreq->nr_src,
  579. DMA_BIDIRECTIONAL);
  580. } else {
  581. if (sreq->nr_src > 0)
  582. dma_unmap_sg(priv->dev, src, sreq->nr_src,
  583. DMA_TO_DEVICE);
  584. if (sreq->nr_dst > 0)
  585. dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
  586. DMA_FROM_DEVICE);
  587. }
  588. /*
  589. * Update IV in req from last crypto output word for CBC modes
  590. */
  591. if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
  592. (sreq->direction == SAFEXCEL_ENCRYPT)) {
  593. /* For encrypt take the last output word */
  594. sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
  595. crypto_skcipher_ivsize(skcipher),
  596. (cryptlen -
  597. crypto_skcipher_ivsize(skcipher)));
  598. }
  599. *should_complete = true;
  600. return ndesc;
  601. }
  602. static int safexcel_send_req(struct crypto_async_request *base, int ring,
  603. struct safexcel_cipher_req *sreq,
  604. struct scatterlist *src, struct scatterlist *dst,
  605. unsigned int cryptlen, unsigned int assoclen,
  606. unsigned int digestsize, u8 *iv, int *commands,
  607. int *results)
  608. {
  609. struct skcipher_request *areq = skcipher_request_cast(base);
  610. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
  611. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  612. struct safexcel_crypto_priv *priv = ctx->base.priv;
  613. struct safexcel_command_desc *cdesc;
  614. struct safexcel_command_desc *first_cdesc = NULL;
  615. struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
  616. struct scatterlist *sg;
  617. unsigned int totlen;
  618. unsigned int totlen_src = cryptlen + assoclen;
  619. unsigned int totlen_dst = totlen_src;
  620. struct safexcel_token *atoken;
  621. int n_cdesc = 0, n_rdesc = 0;
  622. int queued, i, ret = 0;
  623. bool first = true;
  624. sreq->nr_src = sg_nents_for_len(src, totlen_src);
  625. if (ctx->aead) {
  626. /*
  627. * AEAD has auth tag appended to output for encrypt and
  628. * removed from the output for decrypt!
  629. */
  630. if (sreq->direction == SAFEXCEL_DECRYPT)
  631. totlen_dst -= digestsize;
  632. else
  633. totlen_dst += digestsize;
  634. memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
  635. &ctx->base.ipad, ctx->state_sz);
  636. if (!ctx->xcm)
  637. memcpy(ctx->base.ctxr->data + (ctx->key_len +
  638. ctx->state_sz) / sizeof(u32), &ctx->base.opad,
  639. ctx->state_sz);
  640. } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
  641. (sreq->direction == SAFEXCEL_DECRYPT)) {
  642. /*
  643. * Save IV from last crypto input word for CBC modes in decrypt
  644. * direction. Need to do this first in case of inplace operation
  645. * as it will be overwritten.
  646. */
  647. sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
  648. crypto_skcipher_ivsize(skcipher),
  649. (totlen_src -
  650. crypto_skcipher_ivsize(skcipher)));
  651. }
  652. sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
  653. /*
  654. * Remember actual input length, source buffer length may be
  655. * updated in case of inline operation below.
  656. */
  657. totlen = totlen_src;
  658. queued = totlen_src;
  659. if (src == dst) {
  660. sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
  661. sreq->nr_dst = sreq->nr_src;
  662. if (unlikely((totlen_src || totlen_dst) &&
  663. (sreq->nr_src <= 0))) {
  664. dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
  665. max(totlen_src, totlen_dst));
  666. return -EINVAL;
  667. }
  668. if (sreq->nr_src > 0 &&
  669. !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
  670. return -EIO;
  671. } else {
  672. if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
  673. dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
  674. totlen_src);
  675. return -EINVAL;
  676. }
  677. if (sreq->nr_src > 0 &&
  678. !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
  679. return -EIO;
  680. if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
  681. dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
  682. totlen_dst);
  683. ret = -EINVAL;
  684. goto unmap;
  685. }
  686. if (sreq->nr_dst > 0 &&
  687. !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
  688. ret = -EIO;
  689. goto unmap;
  690. }
  691. }
  692. memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
  693. if (!totlen) {
  694. /*
  695. * The EIP97 cannot deal with zero length input packets!
  696. * So stuff a dummy command descriptor indicating a 1 byte
  697. * (dummy) input packet, using the context record as source.
  698. */
  699. first_cdesc = safexcel_add_cdesc(priv, ring,
  700. 1, 1, ctx->base.ctxr_dma,
  701. 1, 1, ctx->base.ctxr_dma,
  702. &atoken);
  703. if (IS_ERR(first_cdesc)) {
  704. /* No space left in the command descriptor ring */
  705. ret = PTR_ERR(first_cdesc);
  706. goto cdesc_rollback;
  707. }
  708. n_cdesc = 1;
  709. goto skip_cdesc;
  710. }
  711. /* command descriptors */
  712. for_each_sg(src, sg, sreq->nr_src, i) {
  713. int len = sg_dma_len(sg);
  714. /* Do not overflow the request */
  715. if (queued < len)
  716. len = queued;
  717. cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
  718. !(queued - len),
  719. sg_dma_address(sg), len, totlen,
  720. ctx->base.ctxr_dma, &atoken);
  721. if (IS_ERR(cdesc)) {
  722. /* No space left in the command descriptor ring */
  723. ret = PTR_ERR(cdesc);
  724. goto cdesc_rollback;
  725. }
  726. if (!n_cdesc)
  727. first_cdesc = cdesc;
  728. n_cdesc++;
  729. queued -= len;
  730. if (!queued)
  731. break;
  732. }
  733. skip_cdesc:
  734. /* Add context control words and token to first command descriptor */
  735. safexcel_context_control(ctx, base, sreq, first_cdesc);
  736. if (ctx->aead)
  737. safexcel_aead_token(ctx, iv, first_cdesc, atoken,
  738. sreq->direction, cryptlen,
  739. assoclen, digestsize);
  740. else
  741. safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
  742. cryptlen);
  743. /* result descriptors */
  744. for_each_sg(dst, sg, sreq->nr_dst, i) {
  745. bool last = (i == sreq->nr_dst - 1);
  746. u32 len = sg_dma_len(sg);
  747. /* only allow the part of the buffer we know we need */
  748. if (len > totlen_dst)
  749. len = totlen_dst;
  750. if (unlikely(!len))
  751. break;
  752. totlen_dst -= len;
  753. /* skip over AAD space in buffer - not written */
  754. if (assoclen) {
  755. if (assoclen >= len) {
  756. assoclen -= len;
  757. continue;
  758. }
  759. rdesc = safexcel_add_rdesc(priv, ring, first, last,
  760. sg_dma_address(sg) +
  761. assoclen,
  762. len - assoclen);
  763. assoclen = 0;
  764. } else {
  765. rdesc = safexcel_add_rdesc(priv, ring, first, last,
  766. sg_dma_address(sg),
  767. len);
  768. }
  769. if (IS_ERR(rdesc)) {
  770. /* No space left in the result descriptor ring */
  771. ret = PTR_ERR(rdesc);
  772. goto rdesc_rollback;
  773. }
  774. if (first) {
  775. first_rdesc = rdesc;
  776. first = false;
  777. }
  778. n_rdesc++;
  779. }
  780. if (unlikely(first)) {
  781. /*
  782. * Special case: AEAD decrypt with only AAD data.
  783. * In this case there is NO output data from the engine,
  784. * but the engine still needs a result descriptor!
  785. * Create a dummy one just for catching the result token.
  786. */
  787. rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
  788. if (IS_ERR(rdesc)) {
  789. /* No space left in the result descriptor ring */
  790. ret = PTR_ERR(rdesc);
  791. goto rdesc_rollback;
  792. }
  793. first_rdesc = rdesc;
  794. n_rdesc = 1;
  795. }
  796. safexcel_rdr_req_set(priv, ring, first_rdesc, base);
  797. *commands = n_cdesc;
  798. *results = n_rdesc;
  799. return 0;
  800. rdesc_rollback:
  801. for (i = 0; i < n_rdesc; i++)
  802. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
  803. cdesc_rollback:
  804. for (i = 0; i < n_cdesc; i++)
  805. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
  806. unmap:
  807. if (src == dst) {
  808. if (sreq->nr_src > 0)
  809. dma_unmap_sg(priv->dev, src, sreq->nr_src,
  810. DMA_BIDIRECTIONAL);
  811. } else {
  812. if (sreq->nr_src > 0)
  813. dma_unmap_sg(priv->dev, src, sreq->nr_src,
  814. DMA_TO_DEVICE);
  815. if (sreq->nr_dst > 0)
  816. dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
  817. DMA_FROM_DEVICE);
  818. }
  819. return ret;
  820. }
  821. static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
  822. int ring,
  823. struct crypto_async_request *base,
  824. struct safexcel_cipher_req *sreq,
  825. bool *should_complete, int *ret)
  826. {
  827. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  828. struct safexcel_result_desc *rdesc;
  829. int ndesc = 0, enq_ret;
  830. *ret = 0;
  831. if (unlikely(!sreq->rdescs))
  832. return 0;
  833. while (sreq->rdescs--) {
  834. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  835. if (IS_ERR(rdesc)) {
  836. dev_err(priv->dev,
  837. "cipher: invalidate: could not retrieve the result descriptor\n");
  838. *ret = PTR_ERR(rdesc);
  839. break;
  840. }
  841. if (likely(!*ret))
  842. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  843. ndesc++;
  844. }
  845. safexcel_complete(priv, ring);
  846. if (ctx->base.exit_inv) {
  847. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  848. ctx->base.ctxr_dma);
  849. *should_complete = true;
  850. return ndesc;
  851. }
  852. ring = safexcel_select_ring(priv);
  853. ctx->base.ring = ring;
  854. spin_lock_bh(&priv->ring[ring].queue_lock);
  855. enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  856. spin_unlock_bh(&priv->ring[ring].queue_lock);
  857. if (enq_ret != -EINPROGRESS)
  858. *ret = enq_ret;
  859. queue_work(priv->ring[ring].workqueue,
  860. &priv->ring[ring].work_data.work);
  861. *should_complete = false;
  862. return ndesc;
  863. }
  864. static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
  865. int ring,
  866. struct crypto_async_request *async,
  867. bool *should_complete, int *ret)
  868. {
  869. struct skcipher_request *req = skcipher_request_cast(async);
  870. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  871. int err;
  872. if (sreq->needs_inv) {
  873. sreq->needs_inv = false;
  874. err = safexcel_handle_inv_result(priv, ring, async, sreq,
  875. should_complete, ret);
  876. } else {
  877. err = safexcel_handle_req_result(priv, ring, async, req->src,
  878. req->dst, req->cryptlen, sreq,
  879. should_complete, ret);
  880. }
  881. return err;
  882. }
  883. static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
  884. int ring,
  885. struct crypto_async_request *async,
  886. bool *should_complete, int *ret)
  887. {
  888. struct aead_request *req = aead_request_cast(async);
  889. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  890. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  891. int err;
  892. if (sreq->needs_inv) {
  893. sreq->needs_inv = false;
  894. err = safexcel_handle_inv_result(priv, ring, async, sreq,
  895. should_complete, ret);
  896. } else {
  897. err = safexcel_handle_req_result(priv, ring, async, req->src,
  898. req->dst,
  899. req->cryptlen + crypto_aead_authsize(tfm),
  900. sreq, should_complete, ret);
  901. }
  902. return err;
  903. }
  904. static int safexcel_cipher_send_inv(struct crypto_async_request *base,
  905. int ring, int *commands, int *results)
  906. {
  907. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  908. struct safexcel_crypto_priv *priv = ctx->base.priv;
  909. int ret;
  910. ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
  911. if (unlikely(ret))
  912. return ret;
  913. *commands = 1;
  914. *results = 1;
  915. return 0;
  916. }
  917. static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
  918. int *commands, int *results)
  919. {
  920. struct skcipher_request *req = skcipher_request_cast(async);
  921. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  922. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  923. struct safexcel_crypto_priv *priv = ctx->base.priv;
  924. int ret;
  925. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  926. if (sreq->needs_inv) {
  927. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  928. } else {
  929. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  930. u8 input_iv[AES_BLOCK_SIZE];
  931. /*
  932. * Save input IV in case of CBC decrypt mode
  933. * Will be overwritten with output IV prior to use!
  934. */
  935. memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
  936. ret = safexcel_send_req(async, ring, sreq, req->src,
  937. req->dst, req->cryptlen, 0, 0, input_iv,
  938. commands, results);
  939. }
  940. sreq->rdescs = *results;
  941. return ret;
  942. }
  943. static int safexcel_aead_send(struct crypto_async_request *async, int ring,
  944. int *commands, int *results)
  945. {
  946. struct aead_request *req = aead_request_cast(async);
  947. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  948. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  949. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  950. struct safexcel_crypto_priv *priv = ctx->base.priv;
  951. int ret;
  952. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  953. if (sreq->needs_inv)
  954. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  955. else
  956. ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
  957. req->cryptlen, req->assoclen,
  958. crypto_aead_authsize(tfm), req->iv,
  959. commands, results);
  960. sreq->rdescs = *results;
  961. return ret;
  962. }
  963. static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
  964. struct crypto_async_request *base,
  965. struct safexcel_cipher_req *sreq,
  966. struct crypto_wait *result)
  967. {
  968. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  969. struct safexcel_crypto_priv *priv = ctx->base.priv;
  970. int ring = ctx->base.ring;
  971. int err;
  972. ctx = crypto_tfm_ctx(base->tfm);
  973. ctx->base.exit_inv = true;
  974. sreq->needs_inv = true;
  975. spin_lock_bh(&priv->ring[ring].queue_lock);
  976. crypto_enqueue_request(&priv->ring[ring].queue, base);
  977. spin_unlock_bh(&priv->ring[ring].queue_lock);
  978. queue_work(priv->ring[ring].workqueue,
  979. &priv->ring[ring].work_data.work);
  980. err = crypto_wait_req(-EINPROGRESS, result);
  981. if (err) {
  982. dev_warn(priv->dev,
  983. "cipher: sync: invalidate: completion error %d\n",
  984. err);
  985. return err;
  986. }
  987. return 0;
  988. }
  989. static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
  990. {
  991. EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
  992. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  993. DECLARE_CRYPTO_WAIT(result);
  994. memset(req, 0, sizeof(struct skcipher_request));
  995. skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  996. crypto_req_done, &result);
  997. skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
  998. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  999. }
  1000. static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
  1001. {
  1002. EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
  1003. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  1004. DECLARE_CRYPTO_WAIT(result);
  1005. memset(req, 0, sizeof(struct aead_request));
  1006. aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1007. crypto_req_done, &result);
  1008. aead_request_set_tfm(req, __crypto_aead_cast(tfm));
  1009. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  1010. }
  1011. static int safexcel_queue_req(struct crypto_async_request *base,
  1012. struct safexcel_cipher_req *sreq,
  1013. enum safexcel_cipher_direction dir)
  1014. {
  1015. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  1016. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1017. int ret, ring;
  1018. sreq->needs_inv = false;
  1019. sreq->direction = dir;
  1020. if (ctx->base.ctxr) {
  1021. if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
  1022. sreq->needs_inv = true;
  1023. ctx->base.needs_inv = false;
  1024. }
  1025. } else {
  1026. ctx->base.ring = safexcel_select_ring(priv);
  1027. ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
  1028. EIP197_GFP_FLAGS(*base),
  1029. &ctx->base.ctxr_dma);
  1030. if (!ctx->base.ctxr)
  1031. return -ENOMEM;
  1032. }
  1033. ring = ctx->base.ring;
  1034. spin_lock_bh(&priv->ring[ring].queue_lock);
  1035. ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  1036. spin_unlock_bh(&priv->ring[ring].queue_lock);
  1037. queue_work(priv->ring[ring].workqueue,
  1038. &priv->ring[ring].work_data.work);
  1039. return ret;
  1040. }
  1041. static int safexcel_encrypt(struct skcipher_request *req)
  1042. {
  1043. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  1044. SAFEXCEL_ENCRYPT);
  1045. }
  1046. static int safexcel_decrypt(struct skcipher_request *req)
  1047. {
  1048. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  1049. SAFEXCEL_DECRYPT);
  1050. }
  1051. static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
  1052. {
  1053. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1054. struct safexcel_alg_template *tmpl =
  1055. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  1056. alg.skcipher.base);
  1057. crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
  1058. sizeof(struct safexcel_cipher_req));
  1059. ctx->base.priv = tmpl->priv;
  1060. ctx->base.send = safexcel_skcipher_send;
  1061. ctx->base.handle_result = safexcel_skcipher_handle_result;
  1062. ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
  1063. ctx->ctrinit = 1;
  1064. return 0;
  1065. }
  1066. static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
  1067. {
  1068. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1069. memzero_explicit(ctx->key, sizeof(ctx->key));
  1070. /* context not allocated, skip invalidation */
  1071. if (!ctx->base.ctxr)
  1072. return -ENOMEM;
  1073. memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
  1074. return 0;
  1075. }
  1076. static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
  1077. {
  1078. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1079. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1080. int ret;
  1081. if (safexcel_cipher_cra_exit(tfm))
  1082. return;
  1083. if (priv->flags & EIP197_TRC_CACHE) {
  1084. ret = safexcel_skcipher_exit_inv(tfm);
  1085. if (ret)
  1086. dev_warn(priv->dev, "skcipher: invalidation error %d\n",
  1087. ret);
  1088. } else {
  1089. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  1090. ctx->base.ctxr_dma);
  1091. }
  1092. }
  1093. static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
  1094. {
  1095. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1096. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1097. int ret;
  1098. if (safexcel_cipher_cra_exit(tfm))
  1099. return;
  1100. if (priv->flags & EIP197_TRC_CACHE) {
  1101. ret = safexcel_aead_exit_inv(tfm);
  1102. if (ret)
  1103. dev_warn(priv->dev, "aead: invalidation error %d\n",
  1104. ret);
  1105. } else {
  1106. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  1107. ctx->base.ctxr_dma);
  1108. }
  1109. }
  1110. static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
  1111. {
  1112. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1113. safexcel_skcipher_cra_init(tfm);
  1114. ctx->alg = SAFEXCEL_AES;
  1115. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
  1116. ctx->blocksz = 0;
  1117. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1118. return 0;
  1119. }
  1120. struct safexcel_alg_template safexcel_alg_ecb_aes = {
  1121. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1122. .algo_mask = SAFEXCEL_ALG_AES,
  1123. .alg.skcipher = {
  1124. .setkey = safexcel_skcipher_aes_setkey,
  1125. .encrypt = safexcel_encrypt,
  1126. .decrypt = safexcel_decrypt,
  1127. .min_keysize = AES_MIN_KEY_SIZE,
  1128. .max_keysize = AES_MAX_KEY_SIZE,
  1129. .base = {
  1130. .cra_name = "ecb(aes)",
  1131. .cra_driver_name = "safexcel-ecb-aes",
  1132. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1133. .cra_flags = CRYPTO_ALG_ASYNC |
  1134. CRYPTO_ALG_ALLOCATES_MEMORY |
  1135. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1136. .cra_blocksize = AES_BLOCK_SIZE,
  1137. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1138. .cra_alignmask = 0,
  1139. .cra_init = safexcel_skcipher_aes_ecb_cra_init,
  1140. .cra_exit = safexcel_skcipher_cra_exit,
  1141. .cra_module = THIS_MODULE,
  1142. },
  1143. },
  1144. };
  1145. static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
  1146. {
  1147. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1148. safexcel_skcipher_cra_init(tfm);
  1149. ctx->alg = SAFEXCEL_AES;
  1150. ctx->blocksz = AES_BLOCK_SIZE;
  1151. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
  1152. return 0;
  1153. }
  1154. struct safexcel_alg_template safexcel_alg_cbc_aes = {
  1155. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1156. .algo_mask = SAFEXCEL_ALG_AES,
  1157. .alg.skcipher = {
  1158. .setkey = safexcel_skcipher_aes_setkey,
  1159. .encrypt = safexcel_encrypt,
  1160. .decrypt = safexcel_decrypt,
  1161. .min_keysize = AES_MIN_KEY_SIZE,
  1162. .max_keysize = AES_MAX_KEY_SIZE,
  1163. .ivsize = AES_BLOCK_SIZE,
  1164. .base = {
  1165. .cra_name = "cbc(aes)",
  1166. .cra_driver_name = "safexcel-cbc-aes",
  1167. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1168. .cra_flags = CRYPTO_ALG_ASYNC |
  1169. CRYPTO_ALG_ALLOCATES_MEMORY |
  1170. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1171. .cra_blocksize = AES_BLOCK_SIZE,
  1172. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1173. .cra_alignmask = 0,
  1174. .cra_init = safexcel_skcipher_aes_cbc_cra_init,
  1175. .cra_exit = safexcel_skcipher_cra_exit,
  1176. .cra_module = THIS_MODULE,
  1177. },
  1178. },
  1179. };
  1180. static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
  1181. const u8 *key, unsigned int len)
  1182. {
  1183. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  1184. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1185. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1186. struct crypto_aes_ctx aes;
  1187. int ret, i;
  1188. unsigned int keylen;
  1189. /* last 4 bytes of key are the nonce! */
  1190. ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
  1191. /* exclude the nonce here */
  1192. keylen = len - CTR_RFC3686_NONCE_SIZE;
  1193. ret = aes_expandkey(&aes, key, keylen);
  1194. if (ret)
  1195. return ret;
  1196. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  1197. for (i = 0; i < keylen / sizeof(u32); i++) {
  1198. if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
  1199. ctx->base.needs_inv = true;
  1200. break;
  1201. }
  1202. }
  1203. }
  1204. for (i = 0; i < keylen / sizeof(u32); i++)
  1205. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  1206. ctx->key_len = keylen;
  1207. memzero_explicit(&aes, sizeof(aes));
  1208. return 0;
  1209. }
  1210. static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
  1211. {
  1212. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1213. safexcel_skcipher_cra_init(tfm);
  1214. ctx->alg = SAFEXCEL_AES;
  1215. ctx->blocksz = AES_BLOCK_SIZE;
  1216. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
  1217. return 0;
  1218. }
  1219. struct safexcel_alg_template safexcel_alg_ctr_aes = {
  1220. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1221. .algo_mask = SAFEXCEL_ALG_AES,
  1222. .alg.skcipher = {
  1223. .setkey = safexcel_skcipher_aesctr_setkey,
  1224. .encrypt = safexcel_encrypt,
  1225. .decrypt = safexcel_decrypt,
  1226. /* Add nonce size */
  1227. .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
  1228. .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
  1229. .ivsize = CTR_RFC3686_IV_SIZE,
  1230. .base = {
  1231. .cra_name = "rfc3686(ctr(aes))",
  1232. .cra_driver_name = "safexcel-ctr-aes",
  1233. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1234. .cra_flags = CRYPTO_ALG_ASYNC |
  1235. CRYPTO_ALG_ALLOCATES_MEMORY |
  1236. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1237. .cra_blocksize = 1,
  1238. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1239. .cra_alignmask = 0,
  1240. .cra_init = safexcel_skcipher_aes_ctr_cra_init,
  1241. .cra_exit = safexcel_skcipher_cra_exit,
  1242. .cra_module = THIS_MODULE,
  1243. },
  1244. },
  1245. };
  1246. static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
  1247. unsigned int len)
  1248. {
  1249. struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
  1250. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1251. int ret;
  1252. ret = verify_skcipher_des_key(ctfm, key);
  1253. if (ret)
  1254. return ret;
  1255. /* if context exits and key changed, need to invalidate it */
  1256. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
  1257. if (memcmp(ctx->key, key, len))
  1258. ctx->base.needs_inv = true;
  1259. memcpy(ctx->key, key, len);
  1260. ctx->key_len = len;
  1261. return 0;
  1262. }
  1263. static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
  1264. {
  1265. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1266. safexcel_skcipher_cra_init(tfm);
  1267. ctx->alg = SAFEXCEL_DES;
  1268. ctx->blocksz = DES_BLOCK_SIZE;
  1269. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1270. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
  1271. return 0;
  1272. }
  1273. struct safexcel_alg_template safexcel_alg_cbc_des = {
  1274. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1275. .algo_mask = SAFEXCEL_ALG_DES,
  1276. .alg.skcipher = {
  1277. .setkey = safexcel_des_setkey,
  1278. .encrypt = safexcel_encrypt,
  1279. .decrypt = safexcel_decrypt,
  1280. .min_keysize = DES_KEY_SIZE,
  1281. .max_keysize = DES_KEY_SIZE,
  1282. .ivsize = DES_BLOCK_SIZE,
  1283. .base = {
  1284. .cra_name = "cbc(des)",
  1285. .cra_driver_name = "safexcel-cbc-des",
  1286. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1287. .cra_flags = CRYPTO_ALG_ASYNC |
  1288. CRYPTO_ALG_ALLOCATES_MEMORY |
  1289. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1290. .cra_blocksize = DES_BLOCK_SIZE,
  1291. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1292. .cra_alignmask = 0,
  1293. .cra_init = safexcel_skcipher_des_cbc_cra_init,
  1294. .cra_exit = safexcel_skcipher_cra_exit,
  1295. .cra_module = THIS_MODULE,
  1296. },
  1297. },
  1298. };
  1299. static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
  1300. {
  1301. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1302. safexcel_skcipher_cra_init(tfm);
  1303. ctx->alg = SAFEXCEL_DES;
  1304. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
  1305. ctx->blocksz = 0;
  1306. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1307. return 0;
  1308. }
  1309. struct safexcel_alg_template safexcel_alg_ecb_des = {
  1310. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1311. .algo_mask = SAFEXCEL_ALG_DES,
  1312. .alg.skcipher = {
  1313. .setkey = safexcel_des_setkey,
  1314. .encrypt = safexcel_encrypt,
  1315. .decrypt = safexcel_decrypt,
  1316. .min_keysize = DES_KEY_SIZE,
  1317. .max_keysize = DES_KEY_SIZE,
  1318. .base = {
  1319. .cra_name = "ecb(des)",
  1320. .cra_driver_name = "safexcel-ecb-des",
  1321. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1322. .cra_flags = CRYPTO_ALG_ASYNC |
  1323. CRYPTO_ALG_ALLOCATES_MEMORY |
  1324. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1325. .cra_blocksize = DES_BLOCK_SIZE,
  1326. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1327. .cra_alignmask = 0,
  1328. .cra_init = safexcel_skcipher_des_ecb_cra_init,
  1329. .cra_exit = safexcel_skcipher_cra_exit,
  1330. .cra_module = THIS_MODULE,
  1331. },
  1332. },
  1333. };
  1334. static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
  1335. const u8 *key, unsigned int len)
  1336. {
  1337. struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
  1338. struct safexcel_crypto_priv *priv = ctx->base.priv;
  1339. int err;
  1340. err = verify_skcipher_des3_key(ctfm, key);
  1341. if (err)
  1342. return err;
  1343. /* if context exits and key changed, need to invalidate it */
  1344. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
  1345. if (memcmp(ctx->key, key, len))
  1346. ctx->base.needs_inv = true;
  1347. memcpy(ctx->key, key, len);
  1348. ctx->key_len = len;
  1349. return 0;
  1350. }
  1351. static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
  1352. {
  1353. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1354. safexcel_skcipher_cra_init(tfm);
  1355. ctx->alg = SAFEXCEL_3DES;
  1356. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1357. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1358. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
  1359. return 0;
  1360. }
  1361. struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
  1362. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1363. .algo_mask = SAFEXCEL_ALG_DES,
  1364. .alg.skcipher = {
  1365. .setkey = safexcel_des3_ede_setkey,
  1366. .encrypt = safexcel_encrypt,
  1367. .decrypt = safexcel_decrypt,
  1368. .min_keysize = DES3_EDE_KEY_SIZE,
  1369. .max_keysize = DES3_EDE_KEY_SIZE,
  1370. .ivsize = DES3_EDE_BLOCK_SIZE,
  1371. .base = {
  1372. .cra_name = "cbc(des3_ede)",
  1373. .cra_driver_name = "safexcel-cbc-des3_ede",
  1374. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1375. .cra_flags = CRYPTO_ALG_ASYNC |
  1376. CRYPTO_ALG_ALLOCATES_MEMORY |
  1377. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1378. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1379. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1380. .cra_alignmask = 0,
  1381. .cra_init = safexcel_skcipher_des3_cbc_cra_init,
  1382. .cra_exit = safexcel_skcipher_cra_exit,
  1383. .cra_module = THIS_MODULE,
  1384. },
  1385. },
  1386. };
  1387. static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
  1388. {
  1389. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1390. safexcel_skcipher_cra_init(tfm);
  1391. ctx->alg = SAFEXCEL_3DES;
  1392. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
  1393. ctx->blocksz = 0;
  1394. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1395. return 0;
  1396. }
  1397. struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
  1398. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  1399. .algo_mask = SAFEXCEL_ALG_DES,
  1400. .alg.skcipher = {
  1401. .setkey = safexcel_des3_ede_setkey,
  1402. .encrypt = safexcel_encrypt,
  1403. .decrypt = safexcel_decrypt,
  1404. .min_keysize = DES3_EDE_KEY_SIZE,
  1405. .max_keysize = DES3_EDE_KEY_SIZE,
  1406. .base = {
  1407. .cra_name = "ecb(des3_ede)",
  1408. .cra_driver_name = "safexcel-ecb-des3_ede",
  1409. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1410. .cra_flags = CRYPTO_ALG_ASYNC |
  1411. CRYPTO_ALG_ALLOCATES_MEMORY |
  1412. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1413. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1414. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1415. .cra_alignmask = 0,
  1416. .cra_init = safexcel_skcipher_des3_ecb_cra_init,
  1417. .cra_exit = safexcel_skcipher_cra_exit,
  1418. .cra_module = THIS_MODULE,
  1419. },
  1420. },
  1421. };
  1422. static int safexcel_aead_encrypt(struct aead_request *req)
  1423. {
  1424. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  1425. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
  1426. }
  1427. static int safexcel_aead_decrypt(struct aead_request *req)
  1428. {
  1429. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  1430. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
  1431. }
  1432. static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
  1433. {
  1434. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1435. struct safexcel_alg_template *tmpl =
  1436. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  1437. alg.aead.base);
  1438. crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
  1439. sizeof(struct safexcel_cipher_req));
  1440. ctx->base.priv = tmpl->priv;
  1441. ctx->alg = SAFEXCEL_AES; /* default */
  1442. ctx->blocksz = AES_BLOCK_SIZE;
  1443. ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
  1444. ctx->ctrinit = 1;
  1445. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
  1446. ctx->aead = true;
  1447. ctx->base.send = safexcel_aead_send;
  1448. ctx->base.handle_result = safexcel_aead_handle_result;
  1449. return 0;
  1450. }
  1451. static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
  1452. {
  1453. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1454. safexcel_aead_cra_init(tfm);
  1455. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
  1456. ctx->state_sz = SHA1_DIGEST_SIZE;
  1457. return 0;
  1458. }
  1459. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
  1460. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1461. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
  1462. .alg.aead = {
  1463. .setkey = safexcel_aead_setkey,
  1464. .encrypt = safexcel_aead_encrypt,
  1465. .decrypt = safexcel_aead_decrypt,
  1466. .ivsize = AES_BLOCK_SIZE,
  1467. .maxauthsize = SHA1_DIGEST_SIZE,
  1468. .base = {
  1469. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1470. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
  1471. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1472. .cra_flags = CRYPTO_ALG_ASYNC |
  1473. CRYPTO_ALG_ALLOCATES_MEMORY |
  1474. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1475. .cra_blocksize = AES_BLOCK_SIZE,
  1476. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1477. .cra_alignmask = 0,
  1478. .cra_init = safexcel_aead_sha1_cra_init,
  1479. .cra_exit = safexcel_aead_cra_exit,
  1480. .cra_module = THIS_MODULE,
  1481. },
  1482. },
  1483. };
  1484. static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
  1485. {
  1486. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1487. safexcel_aead_cra_init(tfm);
  1488. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
  1489. ctx->state_sz = SHA256_DIGEST_SIZE;
  1490. return 0;
  1491. }
  1492. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
  1493. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1494. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
  1495. .alg.aead = {
  1496. .setkey = safexcel_aead_setkey,
  1497. .encrypt = safexcel_aead_encrypt,
  1498. .decrypt = safexcel_aead_decrypt,
  1499. .ivsize = AES_BLOCK_SIZE,
  1500. .maxauthsize = SHA256_DIGEST_SIZE,
  1501. .base = {
  1502. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  1503. .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
  1504. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1505. .cra_flags = CRYPTO_ALG_ASYNC |
  1506. CRYPTO_ALG_ALLOCATES_MEMORY |
  1507. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1508. .cra_blocksize = AES_BLOCK_SIZE,
  1509. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1510. .cra_alignmask = 0,
  1511. .cra_init = safexcel_aead_sha256_cra_init,
  1512. .cra_exit = safexcel_aead_cra_exit,
  1513. .cra_module = THIS_MODULE,
  1514. },
  1515. },
  1516. };
  1517. static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
  1518. {
  1519. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1520. safexcel_aead_cra_init(tfm);
  1521. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
  1522. ctx->state_sz = SHA256_DIGEST_SIZE;
  1523. return 0;
  1524. }
  1525. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
  1526. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1527. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
  1528. .alg.aead = {
  1529. .setkey = safexcel_aead_setkey,
  1530. .encrypt = safexcel_aead_encrypt,
  1531. .decrypt = safexcel_aead_decrypt,
  1532. .ivsize = AES_BLOCK_SIZE,
  1533. .maxauthsize = SHA224_DIGEST_SIZE,
  1534. .base = {
  1535. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1536. .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
  1537. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1538. .cra_flags = CRYPTO_ALG_ASYNC |
  1539. CRYPTO_ALG_ALLOCATES_MEMORY |
  1540. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1541. .cra_blocksize = AES_BLOCK_SIZE,
  1542. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1543. .cra_alignmask = 0,
  1544. .cra_init = safexcel_aead_sha224_cra_init,
  1545. .cra_exit = safexcel_aead_cra_exit,
  1546. .cra_module = THIS_MODULE,
  1547. },
  1548. },
  1549. };
  1550. static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
  1551. {
  1552. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1553. safexcel_aead_cra_init(tfm);
  1554. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
  1555. ctx->state_sz = SHA512_DIGEST_SIZE;
  1556. return 0;
  1557. }
  1558. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
  1559. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1560. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
  1561. .alg.aead = {
  1562. .setkey = safexcel_aead_setkey,
  1563. .encrypt = safexcel_aead_encrypt,
  1564. .decrypt = safexcel_aead_decrypt,
  1565. .ivsize = AES_BLOCK_SIZE,
  1566. .maxauthsize = SHA512_DIGEST_SIZE,
  1567. .base = {
  1568. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  1569. .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
  1570. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1571. .cra_flags = CRYPTO_ALG_ASYNC |
  1572. CRYPTO_ALG_ALLOCATES_MEMORY |
  1573. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1574. .cra_blocksize = AES_BLOCK_SIZE,
  1575. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1576. .cra_alignmask = 0,
  1577. .cra_init = safexcel_aead_sha512_cra_init,
  1578. .cra_exit = safexcel_aead_cra_exit,
  1579. .cra_module = THIS_MODULE,
  1580. },
  1581. },
  1582. };
  1583. static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
  1584. {
  1585. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1586. safexcel_aead_cra_init(tfm);
  1587. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
  1588. ctx->state_sz = SHA512_DIGEST_SIZE;
  1589. return 0;
  1590. }
  1591. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
  1592. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1593. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
  1594. .alg.aead = {
  1595. .setkey = safexcel_aead_setkey,
  1596. .encrypt = safexcel_aead_encrypt,
  1597. .decrypt = safexcel_aead_decrypt,
  1598. .ivsize = AES_BLOCK_SIZE,
  1599. .maxauthsize = SHA384_DIGEST_SIZE,
  1600. .base = {
  1601. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  1602. .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
  1603. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1604. .cra_flags = CRYPTO_ALG_ASYNC |
  1605. CRYPTO_ALG_ALLOCATES_MEMORY |
  1606. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1607. .cra_blocksize = AES_BLOCK_SIZE,
  1608. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1609. .cra_alignmask = 0,
  1610. .cra_init = safexcel_aead_sha384_cra_init,
  1611. .cra_exit = safexcel_aead_cra_exit,
  1612. .cra_module = THIS_MODULE,
  1613. },
  1614. },
  1615. };
  1616. static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
  1617. {
  1618. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1619. safexcel_aead_sha1_cra_init(tfm);
  1620. ctx->alg = SAFEXCEL_3DES; /* override default */
  1621. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1622. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1623. return 0;
  1624. }
  1625. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
  1626. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1627. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
  1628. .alg.aead = {
  1629. .setkey = safexcel_aead_setkey,
  1630. .encrypt = safexcel_aead_encrypt,
  1631. .decrypt = safexcel_aead_decrypt,
  1632. .ivsize = DES3_EDE_BLOCK_SIZE,
  1633. .maxauthsize = SHA1_DIGEST_SIZE,
  1634. .base = {
  1635. .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
  1636. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
  1637. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1638. .cra_flags = CRYPTO_ALG_ASYNC |
  1639. CRYPTO_ALG_ALLOCATES_MEMORY |
  1640. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1641. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1642. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1643. .cra_alignmask = 0,
  1644. .cra_init = safexcel_aead_sha1_des3_cra_init,
  1645. .cra_exit = safexcel_aead_cra_exit,
  1646. .cra_module = THIS_MODULE,
  1647. },
  1648. },
  1649. };
  1650. static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
  1651. {
  1652. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1653. safexcel_aead_sha256_cra_init(tfm);
  1654. ctx->alg = SAFEXCEL_3DES; /* override default */
  1655. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1656. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1657. return 0;
  1658. }
  1659. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
  1660. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1661. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
  1662. .alg.aead = {
  1663. .setkey = safexcel_aead_setkey,
  1664. .encrypt = safexcel_aead_encrypt,
  1665. .decrypt = safexcel_aead_decrypt,
  1666. .ivsize = DES3_EDE_BLOCK_SIZE,
  1667. .maxauthsize = SHA256_DIGEST_SIZE,
  1668. .base = {
  1669. .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
  1670. .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
  1671. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1672. .cra_flags = CRYPTO_ALG_ASYNC |
  1673. CRYPTO_ALG_ALLOCATES_MEMORY |
  1674. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1675. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1676. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1677. .cra_alignmask = 0,
  1678. .cra_init = safexcel_aead_sha256_des3_cra_init,
  1679. .cra_exit = safexcel_aead_cra_exit,
  1680. .cra_module = THIS_MODULE,
  1681. },
  1682. },
  1683. };
  1684. static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
  1685. {
  1686. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1687. safexcel_aead_sha224_cra_init(tfm);
  1688. ctx->alg = SAFEXCEL_3DES; /* override default */
  1689. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1690. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1691. return 0;
  1692. }
  1693. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
  1694. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1695. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
  1696. .alg.aead = {
  1697. .setkey = safexcel_aead_setkey,
  1698. .encrypt = safexcel_aead_encrypt,
  1699. .decrypt = safexcel_aead_decrypt,
  1700. .ivsize = DES3_EDE_BLOCK_SIZE,
  1701. .maxauthsize = SHA224_DIGEST_SIZE,
  1702. .base = {
  1703. .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
  1704. .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
  1705. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1706. .cra_flags = CRYPTO_ALG_ASYNC |
  1707. CRYPTO_ALG_ALLOCATES_MEMORY |
  1708. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1709. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1710. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1711. .cra_alignmask = 0,
  1712. .cra_init = safexcel_aead_sha224_des3_cra_init,
  1713. .cra_exit = safexcel_aead_cra_exit,
  1714. .cra_module = THIS_MODULE,
  1715. },
  1716. },
  1717. };
  1718. static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
  1719. {
  1720. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1721. safexcel_aead_sha512_cra_init(tfm);
  1722. ctx->alg = SAFEXCEL_3DES; /* override default */
  1723. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1724. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1725. return 0;
  1726. }
  1727. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
  1728. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1729. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
  1730. .alg.aead = {
  1731. .setkey = safexcel_aead_setkey,
  1732. .encrypt = safexcel_aead_encrypt,
  1733. .decrypt = safexcel_aead_decrypt,
  1734. .ivsize = DES3_EDE_BLOCK_SIZE,
  1735. .maxauthsize = SHA512_DIGEST_SIZE,
  1736. .base = {
  1737. .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
  1738. .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
  1739. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1740. .cra_flags = CRYPTO_ALG_ASYNC |
  1741. CRYPTO_ALG_ALLOCATES_MEMORY |
  1742. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1743. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1744. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1745. .cra_alignmask = 0,
  1746. .cra_init = safexcel_aead_sha512_des3_cra_init,
  1747. .cra_exit = safexcel_aead_cra_exit,
  1748. .cra_module = THIS_MODULE,
  1749. },
  1750. },
  1751. };
  1752. static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
  1753. {
  1754. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1755. safexcel_aead_sha384_cra_init(tfm);
  1756. ctx->alg = SAFEXCEL_3DES; /* override default */
  1757. ctx->blocksz = DES3_EDE_BLOCK_SIZE;
  1758. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1759. return 0;
  1760. }
  1761. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
  1762. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1763. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
  1764. .alg.aead = {
  1765. .setkey = safexcel_aead_setkey,
  1766. .encrypt = safexcel_aead_encrypt,
  1767. .decrypt = safexcel_aead_decrypt,
  1768. .ivsize = DES3_EDE_BLOCK_SIZE,
  1769. .maxauthsize = SHA384_DIGEST_SIZE,
  1770. .base = {
  1771. .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
  1772. .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
  1773. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1774. .cra_flags = CRYPTO_ALG_ASYNC |
  1775. CRYPTO_ALG_ALLOCATES_MEMORY |
  1776. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1777. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1778. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1779. .cra_alignmask = 0,
  1780. .cra_init = safexcel_aead_sha384_des3_cra_init,
  1781. .cra_exit = safexcel_aead_cra_exit,
  1782. .cra_module = THIS_MODULE,
  1783. },
  1784. },
  1785. };
  1786. static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
  1787. {
  1788. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1789. safexcel_aead_sha1_cra_init(tfm);
  1790. ctx->alg = SAFEXCEL_DES; /* override default */
  1791. ctx->blocksz = DES_BLOCK_SIZE;
  1792. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1793. return 0;
  1794. }
  1795. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
  1796. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1797. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
  1798. .alg.aead = {
  1799. .setkey = safexcel_aead_setkey,
  1800. .encrypt = safexcel_aead_encrypt,
  1801. .decrypt = safexcel_aead_decrypt,
  1802. .ivsize = DES_BLOCK_SIZE,
  1803. .maxauthsize = SHA1_DIGEST_SIZE,
  1804. .base = {
  1805. .cra_name = "authenc(hmac(sha1),cbc(des))",
  1806. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
  1807. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1808. .cra_flags = CRYPTO_ALG_ASYNC |
  1809. CRYPTO_ALG_ALLOCATES_MEMORY |
  1810. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1811. .cra_blocksize = DES_BLOCK_SIZE,
  1812. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1813. .cra_alignmask = 0,
  1814. .cra_init = safexcel_aead_sha1_des_cra_init,
  1815. .cra_exit = safexcel_aead_cra_exit,
  1816. .cra_module = THIS_MODULE,
  1817. },
  1818. },
  1819. };
  1820. static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
  1821. {
  1822. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1823. safexcel_aead_sha256_cra_init(tfm);
  1824. ctx->alg = SAFEXCEL_DES; /* override default */
  1825. ctx->blocksz = DES_BLOCK_SIZE;
  1826. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1827. return 0;
  1828. }
  1829. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
  1830. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1831. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
  1832. .alg.aead = {
  1833. .setkey = safexcel_aead_setkey,
  1834. .encrypt = safexcel_aead_encrypt,
  1835. .decrypt = safexcel_aead_decrypt,
  1836. .ivsize = DES_BLOCK_SIZE,
  1837. .maxauthsize = SHA256_DIGEST_SIZE,
  1838. .base = {
  1839. .cra_name = "authenc(hmac(sha256),cbc(des))",
  1840. .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
  1841. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1842. .cra_flags = CRYPTO_ALG_ASYNC |
  1843. CRYPTO_ALG_ALLOCATES_MEMORY |
  1844. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1845. .cra_blocksize = DES_BLOCK_SIZE,
  1846. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1847. .cra_alignmask = 0,
  1848. .cra_init = safexcel_aead_sha256_des_cra_init,
  1849. .cra_exit = safexcel_aead_cra_exit,
  1850. .cra_module = THIS_MODULE,
  1851. },
  1852. },
  1853. };
  1854. static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
  1855. {
  1856. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1857. safexcel_aead_sha224_cra_init(tfm);
  1858. ctx->alg = SAFEXCEL_DES; /* override default */
  1859. ctx->blocksz = DES_BLOCK_SIZE;
  1860. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1861. return 0;
  1862. }
  1863. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
  1864. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1865. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
  1866. .alg.aead = {
  1867. .setkey = safexcel_aead_setkey,
  1868. .encrypt = safexcel_aead_encrypt,
  1869. .decrypt = safexcel_aead_decrypt,
  1870. .ivsize = DES_BLOCK_SIZE,
  1871. .maxauthsize = SHA224_DIGEST_SIZE,
  1872. .base = {
  1873. .cra_name = "authenc(hmac(sha224),cbc(des))",
  1874. .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
  1875. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1876. .cra_flags = CRYPTO_ALG_ASYNC |
  1877. CRYPTO_ALG_ALLOCATES_MEMORY |
  1878. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1879. .cra_blocksize = DES_BLOCK_SIZE,
  1880. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1881. .cra_alignmask = 0,
  1882. .cra_init = safexcel_aead_sha224_des_cra_init,
  1883. .cra_exit = safexcel_aead_cra_exit,
  1884. .cra_module = THIS_MODULE,
  1885. },
  1886. },
  1887. };
  1888. static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
  1889. {
  1890. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1891. safexcel_aead_sha512_cra_init(tfm);
  1892. ctx->alg = SAFEXCEL_DES; /* override default */
  1893. ctx->blocksz = DES_BLOCK_SIZE;
  1894. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1895. return 0;
  1896. }
  1897. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
  1898. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1899. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
  1900. .alg.aead = {
  1901. .setkey = safexcel_aead_setkey,
  1902. .encrypt = safexcel_aead_encrypt,
  1903. .decrypt = safexcel_aead_decrypt,
  1904. .ivsize = DES_BLOCK_SIZE,
  1905. .maxauthsize = SHA512_DIGEST_SIZE,
  1906. .base = {
  1907. .cra_name = "authenc(hmac(sha512),cbc(des))",
  1908. .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
  1909. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1910. .cra_flags = CRYPTO_ALG_ASYNC |
  1911. CRYPTO_ALG_ALLOCATES_MEMORY |
  1912. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1913. .cra_blocksize = DES_BLOCK_SIZE,
  1914. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1915. .cra_alignmask = 0,
  1916. .cra_init = safexcel_aead_sha512_des_cra_init,
  1917. .cra_exit = safexcel_aead_cra_exit,
  1918. .cra_module = THIS_MODULE,
  1919. },
  1920. },
  1921. };
  1922. static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
  1923. {
  1924. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1925. safexcel_aead_sha384_cra_init(tfm);
  1926. ctx->alg = SAFEXCEL_DES; /* override default */
  1927. ctx->blocksz = DES_BLOCK_SIZE;
  1928. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  1929. return 0;
  1930. }
  1931. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
  1932. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1933. .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
  1934. .alg.aead = {
  1935. .setkey = safexcel_aead_setkey,
  1936. .encrypt = safexcel_aead_encrypt,
  1937. .decrypt = safexcel_aead_decrypt,
  1938. .ivsize = DES_BLOCK_SIZE,
  1939. .maxauthsize = SHA384_DIGEST_SIZE,
  1940. .base = {
  1941. .cra_name = "authenc(hmac(sha384),cbc(des))",
  1942. .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
  1943. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1944. .cra_flags = CRYPTO_ALG_ASYNC |
  1945. CRYPTO_ALG_ALLOCATES_MEMORY |
  1946. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1947. .cra_blocksize = DES_BLOCK_SIZE,
  1948. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1949. .cra_alignmask = 0,
  1950. .cra_init = safexcel_aead_sha384_des_cra_init,
  1951. .cra_exit = safexcel_aead_cra_exit,
  1952. .cra_module = THIS_MODULE,
  1953. },
  1954. },
  1955. };
  1956. static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
  1957. {
  1958. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1959. safexcel_aead_sha1_cra_init(tfm);
  1960. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
  1961. return 0;
  1962. }
  1963. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
  1964. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1965. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
  1966. .alg.aead = {
  1967. .setkey = safexcel_aead_setkey,
  1968. .encrypt = safexcel_aead_encrypt,
  1969. .decrypt = safexcel_aead_decrypt,
  1970. .ivsize = CTR_RFC3686_IV_SIZE,
  1971. .maxauthsize = SHA1_DIGEST_SIZE,
  1972. .base = {
  1973. .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
  1974. .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
  1975. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  1976. .cra_flags = CRYPTO_ALG_ASYNC |
  1977. CRYPTO_ALG_ALLOCATES_MEMORY |
  1978. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1979. .cra_blocksize = 1,
  1980. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1981. .cra_alignmask = 0,
  1982. .cra_init = safexcel_aead_sha1_ctr_cra_init,
  1983. .cra_exit = safexcel_aead_cra_exit,
  1984. .cra_module = THIS_MODULE,
  1985. },
  1986. },
  1987. };
  1988. static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
  1989. {
  1990. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1991. safexcel_aead_sha256_cra_init(tfm);
  1992. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
  1993. return 0;
  1994. }
  1995. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
  1996. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1997. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
  1998. .alg.aead = {
  1999. .setkey = safexcel_aead_setkey,
  2000. .encrypt = safexcel_aead_encrypt,
  2001. .decrypt = safexcel_aead_decrypt,
  2002. .ivsize = CTR_RFC3686_IV_SIZE,
  2003. .maxauthsize = SHA256_DIGEST_SIZE,
  2004. .base = {
  2005. .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
  2006. .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
  2007. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2008. .cra_flags = CRYPTO_ALG_ASYNC |
  2009. CRYPTO_ALG_ALLOCATES_MEMORY |
  2010. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2011. .cra_blocksize = 1,
  2012. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2013. .cra_alignmask = 0,
  2014. .cra_init = safexcel_aead_sha256_ctr_cra_init,
  2015. .cra_exit = safexcel_aead_cra_exit,
  2016. .cra_module = THIS_MODULE,
  2017. },
  2018. },
  2019. };
  2020. static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
  2021. {
  2022. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2023. safexcel_aead_sha224_cra_init(tfm);
  2024. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
  2025. return 0;
  2026. }
  2027. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
  2028. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2029. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
  2030. .alg.aead = {
  2031. .setkey = safexcel_aead_setkey,
  2032. .encrypt = safexcel_aead_encrypt,
  2033. .decrypt = safexcel_aead_decrypt,
  2034. .ivsize = CTR_RFC3686_IV_SIZE,
  2035. .maxauthsize = SHA224_DIGEST_SIZE,
  2036. .base = {
  2037. .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
  2038. .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
  2039. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2040. .cra_flags = CRYPTO_ALG_ASYNC |
  2041. CRYPTO_ALG_ALLOCATES_MEMORY |
  2042. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2043. .cra_blocksize = 1,
  2044. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2045. .cra_alignmask = 0,
  2046. .cra_init = safexcel_aead_sha224_ctr_cra_init,
  2047. .cra_exit = safexcel_aead_cra_exit,
  2048. .cra_module = THIS_MODULE,
  2049. },
  2050. },
  2051. };
  2052. static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
  2053. {
  2054. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2055. safexcel_aead_sha512_cra_init(tfm);
  2056. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
  2057. return 0;
  2058. }
  2059. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
  2060. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2061. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
  2062. .alg.aead = {
  2063. .setkey = safexcel_aead_setkey,
  2064. .encrypt = safexcel_aead_encrypt,
  2065. .decrypt = safexcel_aead_decrypt,
  2066. .ivsize = CTR_RFC3686_IV_SIZE,
  2067. .maxauthsize = SHA512_DIGEST_SIZE,
  2068. .base = {
  2069. .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
  2070. .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
  2071. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2072. .cra_flags = CRYPTO_ALG_ASYNC |
  2073. CRYPTO_ALG_ALLOCATES_MEMORY |
  2074. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2075. .cra_blocksize = 1,
  2076. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2077. .cra_alignmask = 0,
  2078. .cra_init = safexcel_aead_sha512_ctr_cra_init,
  2079. .cra_exit = safexcel_aead_cra_exit,
  2080. .cra_module = THIS_MODULE,
  2081. },
  2082. },
  2083. };
  2084. static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
  2085. {
  2086. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2087. safexcel_aead_sha384_cra_init(tfm);
  2088. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
  2089. return 0;
  2090. }
  2091. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
  2092. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2093. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
  2094. .alg.aead = {
  2095. .setkey = safexcel_aead_setkey,
  2096. .encrypt = safexcel_aead_encrypt,
  2097. .decrypt = safexcel_aead_decrypt,
  2098. .ivsize = CTR_RFC3686_IV_SIZE,
  2099. .maxauthsize = SHA384_DIGEST_SIZE,
  2100. .base = {
  2101. .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
  2102. .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
  2103. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2104. .cra_flags = CRYPTO_ALG_ASYNC |
  2105. CRYPTO_ALG_ALLOCATES_MEMORY |
  2106. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2107. .cra_blocksize = 1,
  2108. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2109. .cra_alignmask = 0,
  2110. .cra_init = safexcel_aead_sha384_ctr_cra_init,
  2111. .cra_exit = safexcel_aead_cra_exit,
  2112. .cra_module = THIS_MODULE,
  2113. },
  2114. },
  2115. };
  2116. static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
  2117. const u8 *key, unsigned int len)
  2118. {
  2119. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  2120. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2121. struct safexcel_crypto_priv *priv = ctx->base.priv;
  2122. struct crypto_aes_ctx aes;
  2123. int ret, i;
  2124. unsigned int keylen;
  2125. /* Check for illegal XTS keys */
  2126. ret = xts_verify_key(ctfm, key, len);
  2127. if (ret)
  2128. return ret;
  2129. /* Only half of the key data is cipher key */
  2130. keylen = (len >> 1);
  2131. ret = aes_expandkey(&aes, key, keylen);
  2132. if (ret)
  2133. return ret;
  2134. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  2135. for (i = 0; i < keylen / sizeof(u32); i++) {
  2136. if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
  2137. ctx->base.needs_inv = true;
  2138. break;
  2139. }
  2140. }
  2141. }
  2142. for (i = 0; i < keylen / sizeof(u32); i++)
  2143. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  2144. /* The other half is the tweak key */
  2145. ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
  2146. if (ret)
  2147. return ret;
  2148. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  2149. for (i = 0; i < keylen / sizeof(u32); i++) {
  2150. if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
  2151. aes.key_enc[i]) {
  2152. ctx->base.needs_inv = true;
  2153. break;
  2154. }
  2155. }
  2156. }
  2157. for (i = 0; i < keylen / sizeof(u32); i++)
  2158. ctx->key[i + keylen / sizeof(u32)] =
  2159. cpu_to_le32(aes.key_enc[i]);
  2160. ctx->key_len = keylen << 1;
  2161. memzero_explicit(&aes, sizeof(aes));
  2162. return 0;
  2163. }
  2164. static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
  2165. {
  2166. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2167. safexcel_skcipher_cra_init(tfm);
  2168. ctx->alg = SAFEXCEL_AES;
  2169. ctx->blocksz = AES_BLOCK_SIZE;
  2170. ctx->xts = 1;
  2171. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
  2172. return 0;
  2173. }
  2174. static int safexcel_encrypt_xts(struct skcipher_request *req)
  2175. {
  2176. if (req->cryptlen < XTS_BLOCK_SIZE)
  2177. return -EINVAL;
  2178. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  2179. SAFEXCEL_ENCRYPT);
  2180. }
  2181. static int safexcel_decrypt_xts(struct skcipher_request *req)
  2182. {
  2183. if (req->cryptlen < XTS_BLOCK_SIZE)
  2184. return -EINVAL;
  2185. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  2186. SAFEXCEL_DECRYPT);
  2187. }
  2188. struct safexcel_alg_template safexcel_alg_xts_aes = {
  2189. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  2190. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
  2191. .alg.skcipher = {
  2192. .setkey = safexcel_skcipher_aesxts_setkey,
  2193. .encrypt = safexcel_encrypt_xts,
  2194. .decrypt = safexcel_decrypt_xts,
  2195. /* XTS actually uses 2 AES keys glued together */
  2196. .min_keysize = AES_MIN_KEY_SIZE * 2,
  2197. .max_keysize = AES_MAX_KEY_SIZE * 2,
  2198. .ivsize = XTS_BLOCK_SIZE,
  2199. .base = {
  2200. .cra_name = "xts(aes)",
  2201. .cra_driver_name = "safexcel-xts-aes",
  2202. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2203. .cra_flags = CRYPTO_ALG_ASYNC |
  2204. CRYPTO_ALG_ALLOCATES_MEMORY |
  2205. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2206. .cra_blocksize = XTS_BLOCK_SIZE,
  2207. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2208. .cra_alignmask = 0,
  2209. .cra_init = safexcel_skcipher_aes_xts_cra_init,
  2210. .cra_exit = safexcel_skcipher_cra_exit,
  2211. .cra_module = THIS_MODULE,
  2212. },
  2213. },
  2214. };
  2215. static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
  2216. unsigned int len)
  2217. {
  2218. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  2219. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2220. struct safexcel_crypto_priv *priv = ctx->base.priv;
  2221. struct crypto_aes_ctx aes;
  2222. u32 hashkey[AES_BLOCK_SIZE >> 2];
  2223. int ret, i;
  2224. ret = aes_expandkey(&aes, key, len);
  2225. if (ret) {
  2226. memzero_explicit(&aes, sizeof(aes));
  2227. return ret;
  2228. }
  2229. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  2230. for (i = 0; i < len / sizeof(u32); i++) {
  2231. if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
  2232. ctx->base.needs_inv = true;
  2233. break;
  2234. }
  2235. }
  2236. }
  2237. for (i = 0; i < len / sizeof(u32); i++)
  2238. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  2239. ctx->key_len = len;
  2240. /* Compute hash key by encrypting zeroes with cipher key */
  2241. memset(hashkey, 0, AES_BLOCK_SIZE);
  2242. aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
  2243. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  2244. for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
  2245. if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
  2246. ctx->base.needs_inv = true;
  2247. break;
  2248. }
  2249. }
  2250. }
  2251. for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
  2252. ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
  2253. memzero_explicit(hashkey, AES_BLOCK_SIZE);
  2254. memzero_explicit(&aes, sizeof(aes));
  2255. return 0;
  2256. }
  2257. static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
  2258. {
  2259. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2260. safexcel_aead_cra_init(tfm);
  2261. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
  2262. ctx->state_sz = GHASH_BLOCK_SIZE;
  2263. ctx->xcm = EIP197_XCM_MODE_GCM;
  2264. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
  2265. return 0;
  2266. }
  2267. static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
  2268. {
  2269. safexcel_aead_cra_exit(tfm);
  2270. }
  2271. static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
  2272. unsigned int authsize)
  2273. {
  2274. return crypto_gcm_check_authsize(authsize);
  2275. }
  2276. struct safexcel_alg_template safexcel_alg_gcm = {
  2277. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2278. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
  2279. .alg.aead = {
  2280. .setkey = safexcel_aead_gcm_setkey,
  2281. .setauthsize = safexcel_aead_gcm_setauthsize,
  2282. .encrypt = safexcel_aead_encrypt,
  2283. .decrypt = safexcel_aead_decrypt,
  2284. .ivsize = GCM_AES_IV_SIZE,
  2285. .maxauthsize = GHASH_DIGEST_SIZE,
  2286. .base = {
  2287. .cra_name = "gcm(aes)",
  2288. .cra_driver_name = "safexcel-gcm-aes",
  2289. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2290. .cra_flags = CRYPTO_ALG_ASYNC |
  2291. CRYPTO_ALG_ALLOCATES_MEMORY |
  2292. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2293. .cra_blocksize = 1,
  2294. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2295. .cra_alignmask = 0,
  2296. .cra_init = safexcel_aead_gcm_cra_init,
  2297. .cra_exit = safexcel_aead_gcm_cra_exit,
  2298. .cra_module = THIS_MODULE,
  2299. },
  2300. },
  2301. };
  2302. static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
  2303. unsigned int len)
  2304. {
  2305. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  2306. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2307. struct safexcel_crypto_priv *priv = ctx->base.priv;
  2308. struct crypto_aes_ctx aes;
  2309. int ret, i;
  2310. ret = aes_expandkey(&aes, key, len);
  2311. if (ret) {
  2312. memzero_explicit(&aes, sizeof(aes));
  2313. return ret;
  2314. }
  2315. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  2316. for (i = 0; i < len / sizeof(u32); i++) {
  2317. if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
  2318. ctx->base.needs_inv = true;
  2319. break;
  2320. }
  2321. }
  2322. }
  2323. for (i = 0; i < len / sizeof(u32); i++) {
  2324. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  2325. ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
  2326. cpu_to_be32(aes.key_enc[i]);
  2327. }
  2328. ctx->key_len = len;
  2329. ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
  2330. if (len == AES_KEYSIZE_192)
  2331. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
  2332. else if (len == AES_KEYSIZE_256)
  2333. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
  2334. else
  2335. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
  2336. memzero_explicit(&aes, sizeof(aes));
  2337. return 0;
  2338. }
  2339. static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
  2340. {
  2341. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2342. safexcel_aead_cra_init(tfm);
  2343. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
  2344. ctx->state_sz = 3 * AES_BLOCK_SIZE;
  2345. ctx->xcm = EIP197_XCM_MODE_CCM;
  2346. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
  2347. ctx->ctrinit = 0;
  2348. return 0;
  2349. }
  2350. static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
  2351. unsigned int authsize)
  2352. {
  2353. /* Borrowed from crypto/ccm.c */
  2354. switch (authsize) {
  2355. case 4:
  2356. case 6:
  2357. case 8:
  2358. case 10:
  2359. case 12:
  2360. case 14:
  2361. case 16:
  2362. break;
  2363. default:
  2364. return -EINVAL;
  2365. }
  2366. return 0;
  2367. }
  2368. static int safexcel_ccm_encrypt(struct aead_request *req)
  2369. {
  2370. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  2371. if (req->iv[0] < 1 || req->iv[0] > 7)
  2372. return -EINVAL;
  2373. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
  2374. }
  2375. static int safexcel_ccm_decrypt(struct aead_request *req)
  2376. {
  2377. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  2378. if (req->iv[0] < 1 || req->iv[0] > 7)
  2379. return -EINVAL;
  2380. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
  2381. }
  2382. struct safexcel_alg_template safexcel_alg_ccm = {
  2383. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2384. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
  2385. .alg.aead = {
  2386. .setkey = safexcel_aead_ccm_setkey,
  2387. .setauthsize = safexcel_aead_ccm_setauthsize,
  2388. .encrypt = safexcel_ccm_encrypt,
  2389. .decrypt = safexcel_ccm_decrypt,
  2390. .ivsize = AES_BLOCK_SIZE,
  2391. .maxauthsize = AES_BLOCK_SIZE,
  2392. .base = {
  2393. .cra_name = "ccm(aes)",
  2394. .cra_driver_name = "safexcel-ccm-aes",
  2395. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2396. .cra_flags = CRYPTO_ALG_ASYNC |
  2397. CRYPTO_ALG_ALLOCATES_MEMORY |
  2398. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2399. .cra_blocksize = 1,
  2400. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2401. .cra_alignmask = 0,
  2402. .cra_init = safexcel_aead_ccm_cra_init,
  2403. .cra_exit = safexcel_aead_cra_exit,
  2404. .cra_module = THIS_MODULE,
  2405. },
  2406. },
  2407. };
  2408. static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
  2409. const u8 *key)
  2410. {
  2411. struct safexcel_crypto_priv *priv = ctx->base.priv;
  2412. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
  2413. if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
  2414. ctx->base.needs_inv = true;
  2415. memcpy(ctx->key, key, CHACHA_KEY_SIZE);
  2416. ctx->key_len = CHACHA_KEY_SIZE;
  2417. }
  2418. static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
  2419. const u8 *key, unsigned int len)
  2420. {
  2421. struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
  2422. if (len != CHACHA_KEY_SIZE)
  2423. return -EINVAL;
  2424. safexcel_chacha20_setkey(ctx, key);
  2425. return 0;
  2426. }
  2427. static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
  2428. {
  2429. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2430. safexcel_skcipher_cra_init(tfm);
  2431. ctx->alg = SAFEXCEL_CHACHA20;
  2432. ctx->ctrinit = 0;
  2433. ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
  2434. return 0;
  2435. }
  2436. struct safexcel_alg_template safexcel_alg_chacha20 = {
  2437. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  2438. .algo_mask = SAFEXCEL_ALG_CHACHA20,
  2439. .alg.skcipher = {
  2440. .setkey = safexcel_skcipher_chacha20_setkey,
  2441. .encrypt = safexcel_encrypt,
  2442. .decrypt = safexcel_decrypt,
  2443. .min_keysize = CHACHA_KEY_SIZE,
  2444. .max_keysize = CHACHA_KEY_SIZE,
  2445. .ivsize = CHACHA_IV_SIZE,
  2446. .base = {
  2447. .cra_name = "chacha20",
  2448. .cra_driver_name = "safexcel-chacha20",
  2449. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2450. .cra_flags = CRYPTO_ALG_ASYNC |
  2451. CRYPTO_ALG_ALLOCATES_MEMORY |
  2452. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2453. .cra_blocksize = 1,
  2454. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2455. .cra_alignmask = 0,
  2456. .cra_init = safexcel_skcipher_chacha20_cra_init,
  2457. .cra_exit = safexcel_skcipher_cra_exit,
  2458. .cra_module = THIS_MODULE,
  2459. },
  2460. },
  2461. };
  2462. static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
  2463. const u8 *key, unsigned int len)
  2464. {
  2465. struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
  2466. if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
  2467. len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
  2468. /* ESP variant has nonce appended to key */
  2469. len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
  2470. ctx->nonce = *(u32 *)(key + len);
  2471. }
  2472. if (len != CHACHA_KEY_SIZE)
  2473. return -EINVAL;
  2474. safexcel_chacha20_setkey(ctx, key);
  2475. return 0;
  2476. }
  2477. static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
  2478. unsigned int authsize)
  2479. {
  2480. if (authsize != POLY1305_DIGEST_SIZE)
  2481. return -EINVAL;
  2482. return 0;
  2483. }
  2484. static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
  2485. enum safexcel_cipher_direction dir)
  2486. {
  2487. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  2488. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  2489. struct crypto_tfm *tfm = crypto_aead_tfm(aead);
  2490. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2491. struct aead_request *subreq = aead_request_ctx(req);
  2492. u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
  2493. int ret = 0;
  2494. /*
  2495. * Instead of wasting time detecting umpteen silly corner cases,
  2496. * just dump all "small" requests to the fallback implementation.
  2497. * HW would not be faster on such small requests anyway.
  2498. */
  2499. if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
  2500. req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
  2501. req->cryptlen > POLY1305_DIGEST_SIZE)) {
  2502. return safexcel_queue_req(&req->base, creq, dir);
  2503. }
  2504. /* HW cannot do full (AAD+payload) zero length, use fallback */
  2505. memcpy(key, ctx->key, CHACHA_KEY_SIZE);
  2506. if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
  2507. /* ESP variant has nonce appended to the key */
  2508. key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
  2509. ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
  2510. CHACHA_KEY_SIZE +
  2511. EIP197_AEAD_IPSEC_NONCE_SIZE);
  2512. } else {
  2513. ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
  2514. CHACHA_KEY_SIZE);
  2515. }
  2516. if (ret) {
  2517. crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
  2518. crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
  2519. CRYPTO_TFM_REQ_MASK);
  2520. return ret;
  2521. }
  2522. aead_request_set_tfm(subreq, ctx->fback);
  2523. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  2524. req->base.data);
  2525. aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
  2526. req->iv);
  2527. aead_request_set_ad(subreq, req->assoclen);
  2528. return (dir == SAFEXCEL_ENCRYPT) ?
  2529. crypto_aead_encrypt(subreq) :
  2530. crypto_aead_decrypt(subreq);
  2531. }
  2532. static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
  2533. {
  2534. return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
  2535. }
  2536. static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
  2537. {
  2538. return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
  2539. }
  2540. static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
  2541. {
  2542. struct crypto_aead *aead = __crypto_aead_cast(tfm);
  2543. struct aead_alg *alg = crypto_aead_alg(aead);
  2544. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2545. safexcel_aead_cra_init(tfm);
  2546. /* Allocate fallback implementation */
  2547. ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
  2548. CRYPTO_ALG_ASYNC |
  2549. CRYPTO_ALG_NEED_FALLBACK);
  2550. if (IS_ERR(ctx->fback))
  2551. return PTR_ERR(ctx->fback);
  2552. crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
  2553. sizeof(struct aead_request) +
  2554. crypto_aead_reqsize(ctx->fback)));
  2555. return 0;
  2556. }
  2557. static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
  2558. {
  2559. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2560. safexcel_aead_fallback_cra_init(tfm);
  2561. ctx->alg = SAFEXCEL_CHACHA20;
  2562. ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
  2563. CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
  2564. ctx->ctrinit = 0;
  2565. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
  2566. ctx->state_sz = 0; /* Precomputed by HW */
  2567. return 0;
  2568. }
  2569. static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
  2570. {
  2571. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2572. crypto_free_aead(ctx->fback);
  2573. safexcel_aead_cra_exit(tfm);
  2574. }
  2575. struct safexcel_alg_template safexcel_alg_chachapoly = {
  2576. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2577. .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
  2578. .alg.aead = {
  2579. .setkey = safexcel_aead_chachapoly_setkey,
  2580. .setauthsize = safexcel_aead_chachapoly_setauthsize,
  2581. .encrypt = safexcel_aead_chachapoly_encrypt,
  2582. .decrypt = safexcel_aead_chachapoly_decrypt,
  2583. .ivsize = CHACHAPOLY_IV_SIZE,
  2584. .maxauthsize = POLY1305_DIGEST_SIZE,
  2585. .base = {
  2586. .cra_name = "rfc7539(chacha20,poly1305)",
  2587. .cra_driver_name = "safexcel-chacha20-poly1305",
  2588. /* +1 to put it above HW chacha + SW poly */
  2589. .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
  2590. .cra_flags = CRYPTO_ALG_ASYNC |
  2591. CRYPTO_ALG_ALLOCATES_MEMORY |
  2592. CRYPTO_ALG_KERN_DRIVER_ONLY |
  2593. CRYPTO_ALG_NEED_FALLBACK,
  2594. .cra_blocksize = 1,
  2595. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2596. .cra_alignmask = 0,
  2597. .cra_init = safexcel_aead_chachapoly_cra_init,
  2598. .cra_exit = safexcel_aead_fallback_cra_exit,
  2599. .cra_module = THIS_MODULE,
  2600. },
  2601. },
  2602. };
  2603. static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
  2604. {
  2605. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2606. int ret;
  2607. ret = safexcel_aead_chachapoly_cra_init(tfm);
  2608. ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
  2609. ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
  2610. return ret;
  2611. }
  2612. struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
  2613. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2614. .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
  2615. .alg.aead = {
  2616. .setkey = safexcel_aead_chachapoly_setkey,
  2617. .setauthsize = safexcel_aead_chachapoly_setauthsize,
  2618. .encrypt = safexcel_aead_chachapoly_encrypt,
  2619. .decrypt = safexcel_aead_chachapoly_decrypt,
  2620. .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
  2621. .maxauthsize = POLY1305_DIGEST_SIZE,
  2622. .base = {
  2623. .cra_name = "rfc7539esp(chacha20,poly1305)",
  2624. .cra_driver_name = "safexcel-chacha20-poly1305-esp",
  2625. /* +1 to put it above HW chacha + SW poly */
  2626. .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
  2627. .cra_flags = CRYPTO_ALG_ASYNC |
  2628. CRYPTO_ALG_ALLOCATES_MEMORY |
  2629. CRYPTO_ALG_KERN_DRIVER_ONLY |
  2630. CRYPTO_ALG_NEED_FALLBACK,
  2631. .cra_blocksize = 1,
  2632. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2633. .cra_alignmask = 0,
  2634. .cra_init = safexcel_aead_chachapolyesp_cra_init,
  2635. .cra_exit = safexcel_aead_fallback_cra_exit,
  2636. .cra_module = THIS_MODULE,
  2637. },
  2638. },
  2639. };
  2640. static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
  2641. const u8 *key, unsigned int len)
  2642. {
  2643. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  2644. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2645. struct safexcel_crypto_priv *priv = ctx->base.priv;
  2646. if (len != SM4_KEY_SIZE)
  2647. return -EINVAL;
  2648. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
  2649. if (memcmp(ctx->key, key, SM4_KEY_SIZE))
  2650. ctx->base.needs_inv = true;
  2651. memcpy(ctx->key, key, SM4_KEY_SIZE);
  2652. ctx->key_len = SM4_KEY_SIZE;
  2653. return 0;
  2654. }
  2655. static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
  2656. {
  2657. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2658. if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
  2659. return -EINVAL;
  2660. else
  2661. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  2662. SAFEXCEL_ENCRYPT);
  2663. }
  2664. static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
  2665. {
  2666. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2667. if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
  2668. return -EINVAL;
  2669. else
  2670. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  2671. SAFEXCEL_DECRYPT);
  2672. }
  2673. static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
  2674. {
  2675. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2676. safexcel_skcipher_cra_init(tfm);
  2677. ctx->alg = SAFEXCEL_SM4;
  2678. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
  2679. ctx->blocksz = 0;
  2680. ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
  2681. return 0;
  2682. }
  2683. struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
  2684. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  2685. .algo_mask = SAFEXCEL_ALG_SM4,
  2686. .alg.skcipher = {
  2687. .setkey = safexcel_skcipher_sm4_setkey,
  2688. .encrypt = safexcel_sm4_blk_encrypt,
  2689. .decrypt = safexcel_sm4_blk_decrypt,
  2690. .min_keysize = SM4_KEY_SIZE,
  2691. .max_keysize = SM4_KEY_SIZE,
  2692. .base = {
  2693. .cra_name = "ecb(sm4)",
  2694. .cra_driver_name = "safexcel-ecb-sm4",
  2695. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2696. .cra_flags = CRYPTO_ALG_ASYNC |
  2697. CRYPTO_ALG_ALLOCATES_MEMORY |
  2698. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2699. .cra_blocksize = SM4_BLOCK_SIZE,
  2700. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2701. .cra_alignmask = 0,
  2702. .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
  2703. .cra_exit = safexcel_skcipher_cra_exit,
  2704. .cra_module = THIS_MODULE,
  2705. },
  2706. },
  2707. };
  2708. static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
  2709. {
  2710. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2711. safexcel_skcipher_cra_init(tfm);
  2712. ctx->alg = SAFEXCEL_SM4;
  2713. ctx->blocksz = SM4_BLOCK_SIZE;
  2714. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
  2715. return 0;
  2716. }
  2717. struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
  2718. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  2719. .algo_mask = SAFEXCEL_ALG_SM4,
  2720. .alg.skcipher = {
  2721. .setkey = safexcel_skcipher_sm4_setkey,
  2722. .encrypt = safexcel_sm4_blk_encrypt,
  2723. .decrypt = safexcel_sm4_blk_decrypt,
  2724. .min_keysize = SM4_KEY_SIZE,
  2725. .max_keysize = SM4_KEY_SIZE,
  2726. .ivsize = SM4_BLOCK_SIZE,
  2727. .base = {
  2728. .cra_name = "cbc(sm4)",
  2729. .cra_driver_name = "safexcel-cbc-sm4",
  2730. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2731. .cra_flags = CRYPTO_ALG_ASYNC |
  2732. CRYPTO_ALG_ALLOCATES_MEMORY |
  2733. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2734. .cra_blocksize = SM4_BLOCK_SIZE,
  2735. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2736. .cra_alignmask = 0,
  2737. .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
  2738. .cra_exit = safexcel_skcipher_cra_exit,
  2739. .cra_module = THIS_MODULE,
  2740. },
  2741. },
  2742. };
  2743. static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
  2744. const u8 *key, unsigned int len)
  2745. {
  2746. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  2747. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2748. /* last 4 bytes of key are the nonce! */
  2749. ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
  2750. /* exclude the nonce here */
  2751. len -= CTR_RFC3686_NONCE_SIZE;
  2752. return safexcel_skcipher_sm4_setkey(ctfm, key, len);
  2753. }
  2754. static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
  2755. {
  2756. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2757. safexcel_skcipher_cra_init(tfm);
  2758. ctx->alg = SAFEXCEL_SM4;
  2759. ctx->blocksz = SM4_BLOCK_SIZE;
  2760. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
  2761. return 0;
  2762. }
  2763. struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
  2764. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  2765. .algo_mask = SAFEXCEL_ALG_SM4,
  2766. .alg.skcipher = {
  2767. .setkey = safexcel_skcipher_sm4ctr_setkey,
  2768. .encrypt = safexcel_encrypt,
  2769. .decrypt = safexcel_decrypt,
  2770. /* Add nonce size */
  2771. .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
  2772. .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
  2773. .ivsize = CTR_RFC3686_IV_SIZE,
  2774. .base = {
  2775. .cra_name = "rfc3686(ctr(sm4))",
  2776. .cra_driver_name = "safexcel-ctr-sm4",
  2777. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2778. .cra_flags = CRYPTO_ALG_ASYNC |
  2779. CRYPTO_ALG_ALLOCATES_MEMORY |
  2780. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2781. .cra_blocksize = 1,
  2782. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2783. .cra_alignmask = 0,
  2784. .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
  2785. .cra_exit = safexcel_skcipher_cra_exit,
  2786. .cra_module = THIS_MODULE,
  2787. },
  2788. },
  2789. };
  2790. static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
  2791. {
  2792. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2793. if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
  2794. return -EINVAL;
  2795. return safexcel_queue_req(&req->base, aead_request_ctx(req),
  2796. SAFEXCEL_ENCRYPT);
  2797. }
  2798. static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
  2799. {
  2800. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2801. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2802. if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
  2803. return -EINVAL;
  2804. return safexcel_queue_req(&req->base, aead_request_ctx(req),
  2805. SAFEXCEL_DECRYPT);
  2806. }
  2807. static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
  2808. {
  2809. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2810. safexcel_aead_cra_init(tfm);
  2811. ctx->alg = SAFEXCEL_SM4;
  2812. ctx->blocksz = SM4_BLOCK_SIZE;
  2813. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
  2814. ctx->state_sz = SHA1_DIGEST_SIZE;
  2815. return 0;
  2816. }
  2817. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
  2818. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2819. .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
  2820. .alg.aead = {
  2821. .setkey = safexcel_aead_setkey,
  2822. .encrypt = safexcel_aead_sm4_blk_encrypt,
  2823. .decrypt = safexcel_aead_sm4_blk_decrypt,
  2824. .ivsize = SM4_BLOCK_SIZE,
  2825. .maxauthsize = SHA1_DIGEST_SIZE,
  2826. .base = {
  2827. .cra_name = "authenc(hmac(sha1),cbc(sm4))",
  2828. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
  2829. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2830. .cra_flags = CRYPTO_ALG_ASYNC |
  2831. CRYPTO_ALG_ALLOCATES_MEMORY |
  2832. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2833. .cra_blocksize = SM4_BLOCK_SIZE,
  2834. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2835. .cra_alignmask = 0,
  2836. .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
  2837. .cra_exit = safexcel_aead_cra_exit,
  2838. .cra_module = THIS_MODULE,
  2839. },
  2840. },
  2841. };
  2842. static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
  2843. const u8 *key, unsigned int len)
  2844. {
  2845. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  2846. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2847. /* Keep fallback cipher synchronized */
  2848. return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
  2849. safexcel_aead_setkey(ctfm, key, len);
  2850. }
  2851. static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
  2852. unsigned int authsize)
  2853. {
  2854. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  2855. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2856. /* Keep fallback cipher synchronized */
  2857. return crypto_aead_setauthsize(ctx->fback, authsize);
  2858. }
  2859. static int safexcel_aead_fallback_crypt(struct aead_request *req,
  2860. enum safexcel_cipher_direction dir)
  2861. {
  2862. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  2863. struct crypto_tfm *tfm = crypto_aead_tfm(aead);
  2864. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2865. struct aead_request *subreq = aead_request_ctx(req);
  2866. aead_request_set_tfm(subreq, ctx->fback);
  2867. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  2868. req->base.data);
  2869. aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
  2870. req->iv);
  2871. aead_request_set_ad(subreq, req->assoclen);
  2872. return (dir == SAFEXCEL_ENCRYPT) ?
  2873. crypto_aead_encrypt(subreq) :
  2874. crypto_aead_decrypt(subreq);
  2875. }
  2876. static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
  2877. {
  2878. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  2879. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2880. if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
  2881. return -EINVAL;
  2882. else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
  2883. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
  2884. /* HW cannot do full (AAD+payload) zero length, use fallback */
  2885. return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
  2886. }
  2887. static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
  2888. {
  2889. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  2890. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2891. /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
  2892. if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
  2893. return -EINVAL;
  2894. else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
  2895. /* If input length > 0 only */
  2896. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
  2897. /* HW cannot do full (AAD+payload) zero length, use fallback */
  2898. return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
  2899. }
  2900. static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
  2901. {
  2902. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2903. safexcel_aead_fallback_cra_init(tfm);
  2904. ctx->alg = SAFEXCEL_SM4;
  2905. ctx->blocksz = SM4_BLOCK_SIZE;
  2906. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
  2907. ctx->state_sz = SM3_DIGEST_SIZE;
  2908. return 0;
  2909. }
  2910. struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
  2911. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2912. .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
  2913. .alg.aead = {
  2914. .setkey = safexcel_aead_fallback_setkey,
  2915. .setauthsize = safexcel_aead_fallback_setauthsize,
  2916. .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
  2917. .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
  2918. .ivsize = SM4_BLOCK_SIZE,
  2919. .maxauthsize = SM3_DIGEST_SIZE,
  2920. .base = {
  2921. .cra_name = "authenc(hmac(sm3),cbc(sm4))",
  2922. .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
  2923. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2924. .cra_flags = CRYPTO_ALG_ASYNC |
  2925. CRYPTO_ALG_ALLOCATES_MEMORY |
  2926. CRYPTO_ALG_KERN_DRIVER_ONLY |
  2927. CRYPTO_ALG_NEED_FALLBACK,
  2928. .cra_blocksize = SM4_BLOCK_SIZE,
  2929. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2930. .cra_alignmask = 0,
  2931. .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
  2932. .cra_exit = safexcel_aead_fallback_cra_exit,
  2933. .cra_module = THIS_MODULE,
  2934. },
  2935. },
  2936. };
  2937. static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
  2938. {
  2939. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2940. safexcel_aead_sm4cbc_sha1_cra_init(tfm);
  2941. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
  2942. return 0;
  2943. }
  2944. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
  2945. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2946. .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
  2947. .alg.aead = {
  2948. .setkey = safexcel_aead_setkey,
  2949. .encrypt = safexcel_aead_encrypt,
  2950. .decrypt = safexcel_aead_decrypt,
  2951. .ivsize = CTR_RFC3686_IV_SIZE,
  2952. .maxauthsize = SHA1_DIGEST_SIZE,
  2953. .base = {
  2954. .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
  2955. .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
  2956. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2957. .cra_flags = CRYPTO_ALG_ASYNC |
  2958. CRYPTO_ALG_ALLOCATES_MEMORY |
  2959. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2960. .cra_blocksize = 1,
  2961. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2962. .cra_alignmask = 0,
  2963. .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
  2964. .cra_exit = safexcel_aead_cra_exit,
  2965. .cra_module = THIS_MODULE,
  2966. },
  2967. },
  2968. };
  2969. static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
  2970. {
  2971. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  2972. safexcel_aead_sm4cbc_sm3_cra_init(tfm);
  2973. ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
  2974. return 0;
  2975. }
  2976. struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
  2977. .type = SAFEXCEL_ALG_TYPE_AEAD,
  2978. .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
  2979. .alg.aead = {
  2980. .setkey = safexcel_aead_setkey,
  2981. .encrypt = safexcel_aead_encrypt,
  2982. .decrypt = safexcel_aead_decrypt,
  2983. .ivsize = CTR_RFC3686_IV_SIZE,
  2984. .maxauthsize = SM3_DIGEST_SIZE,
  2985. .base = {
  2986. .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
  2987. .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
  2988. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  2989. .cra_flags = CRYPTO_ALG_ASYNC |
  2990. CRYPTO_ALG_ALLOCATES_MEMORY |
  2991. CRYPTO_ALG_KERN_DRIVER_ONLY,
  2992. .cra_blocksize = 1,
  2993. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  2994. .cra_alignmask = 0,
  2995. .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
  2996. .cra_exit = safexcel_aead_cra_exit,
  2997. .cra_module = THIS_MODULE,
  2998. },
  2999. },
  3000. };
  3001. static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
  3002. unsigned int len)
  3003. {
  3004. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  3005. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  3006. /* last 4 bytes of key are the nonce! */
  3007. ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
  3008. len -= CTR_RFC3686_NONCE_SIZE;
  3009. return safexcel_aead_gcm_setkey(ctfm, key, len);
  3010. }
  3011. static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
  3012. unsigned int authsize)
  3013. {
  3014. return crypto_rfc4106_check_authsize(authsize);
  3015. }
  3016. static int safexcel_rfc4106_encrypt(struct aead_request *req)
  3017. {
  3018. return crypto_ipsec_check_assoclen(req->assoclen) ?:
  3019. safexcel_aead_encrypt(req);
  3020. }
  3021. static int safexcel_rfc4106_decrypt(struct aead_request *req)
  3022. {
  3023. return crypto_ipsec_check_assoclen(req->assoclen) ?:
  3024. safexcel_aead_decrypt(req);
  3025. }
  3026. static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
  3027. {
  3028. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  3029. int ret;
  3030. ret = safexcel_aead_gcm_cra_init(tfm);
  3031. ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
  3032. ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
  3033. return ret;
  3034. }
  3035. struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
  3036. .type = SAFEXCEL_ALG_TYPE_AEAD,
  3037. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
  3038. .alg.aead = {
  3039. .setkey = safexcel_rfc4106_gcm_setkey,
  3040. .setauthsize = safexcel_rfc4106_gcm_setauthsize,
  3041. .encrypt = safexcel_rfc4106_encrypt,
  3042. .decrypt = safexcel_rfc4106_decrypt,
  3043. .ivsize = GCM_RFC4106_IV_SIZE,
  3044. .maxauthsize = GHASH_DIGEST_SIZE,
  3045. .base = {
  3046. .cra_name = "rfc4106(gcm(aes))",
  3047. .cra_driver_name = "safexcel-rfc4106-gcm-aes",
  3048. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  3049. .cra_flags = CRYPTO_ALG_ASYNC |
  3050. CRYPTO_ALG_ALLOCATES_MEMORY |
  3051. CRYPTO_ALG_KERN_DRIVER_ONLY,
  3052. .cra_blocksize = 1,
  3053. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  3054. .cra_alignmask = 0,
  3055. .cra_init = safexcel_rfc4106_gcm_cra_init,
  3056. .cra_exit = safexcel_aead_gcm_cra_exit,
  3057. },
  3058. },
  3059. };
  3060. static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
  3061. unsigned int authsize)
  3062. {
  3063. if (authsize != GHASH_DIGEST_SIZE)
  3064. return -EINVAL;
  3065. return 0;
  3066. }
  3067. static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
  3068. {
  3069. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  3070. int ret;
  3071. ret = safexcel_aead_gcm_cra_init(tfm);
  3072. ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
  3073. return ret;
  3074. }
  3075. struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
  3076. .type = SAFEXCEL_ALG_TYPE_AEAD,
  3077. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
  3078. .alg.aead = {
  3079. .setkey = safexcel_rfc4106_gcm_setkey,
  3080. .setauthsize = safexcel_rfc4543_gcm_setauthsize,
  3081. .encrypt = safexcel_rfc4106_encrypt,
  3082. .decrypt = safexcel_rfc4106_decrypt,
  3083. .ivsize = GCM_RFC4543_IV_SIZE,
  3084. .maxauthsize = GHASH_DIGEST_SIZE,
  3085. .base = {
  3086. .cra_name = "rfc4543(gcm(aes))",
  3087. .cra_driver_name = "safexcel-rfc4543-gcm-aes",
  3088. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  3089. .cra_flags = CRYPTO_ALG_ASYNC |
  3090. CRYPTO_ALG_ALLOCATES_MEMORY |
  3091. CRYPTO_ALG_KERN_DRIVER_ONLY,
  3092. .cra_blocksize = 1,
  3093. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  3094. .cra_alignmask = 0,
  3095. .cra_init = safexcel_rfc4543_gcm_cra_init,
  3096. .cra_exit = safexcel_aead_gcm_cra_exit,
  3097. },
  3098. },
  3099. };
  3100. static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
  3101. unsigned int len)
  3102. {
  3103. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  3104. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  3105. /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
  3106. *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
  3107. /* last 3 bytes of key are the nonce! */
  3108. memcpy((u8 *)&ctx->nonce + 1, key + len -
  3109. EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
  3110. EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
  3111. len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
  3112. return safexcel_aead_ccm_setkey(ctfm, key, len);
  3113. }
  3114. static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
  3115. unsigned int authsize)
  3116. {
  3117. /* Borrowed from crypto/ccm.c */
  3118. switch (authsize) {
  3119. case 8:
  3120. case 12:
  3121. case 16:
  3122. break;
  3123. default:
  3124. return -EINVAL;
  3125. }
  3126. return 0;
  3127. }
  3128. static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
  3129. {
  3130. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  3131. /* Borrowed from crypto/ccm.c */
  3132. if (req->assoclen != 16 && req->assoclen != 20)
  3133. return -EINVAL;
  3134. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
  3135. }
  3136. static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
  3137. {
  3138. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  3139. /* Borrowed from crypto/ccm.c */
  3140. if (req->assoclen != 16 && req->assoclen != 20)
  3141. return -EINVAL;
  3142. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
  3143. }
  3144. static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
  3145. {
  3146. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  3147. int ret;
  3148. ret = safexcel_aead_ccm_cra_init(tfm);
  3149. ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
  3150. ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
  3151. return ret;
  3152. }
  3153. struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
  3154. .type = SAFEXCEL_ALG_TYPE_AEAD,
  3155. .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
  3156. .alg.aead = {
  3157. .setkey = safexcel_rfc4309_ccm_setkey,
  3158. .setauthsize = safexcel_rfc4309_ccm_setauthsize,
  3159. .encrypt = safexcel_rfc4309_ccm_encrypt,
  3160. .decrypt = safexcel_rfc4309_ccm_decrypt,
  3161. .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
  3162. .maxauthsize = AES_BLOCK_SIZE,
  3163. .base = {
  3164. .cra_name = "rfc4309(ccm(aes))",
  3165. .cra_driver_name = "safexcel-rfc4309-ccm-aes",
  3166. .cra_priority = SAFEXCEL_CRA_PRIORITY,
  3167. .cra_flags = CRYPTO_ALG_ASYNC |
  3168. CRYPTO_ALG_ALLOCATES_MEMORY |
  3169. CRYPTO_ALG_KERN_DRIVER_ONLY,
  3170. .cra_blocksize = 1,
  3171. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  3172. .cra_alignmask = 0,
  3173. .cra_init = safexcel_rfc4309_ccm_cra_init,
  3174. .cra_exit = safexcel_aead_cra_exit,
  3175. .cra_module = THIS_MODULE,
  3176. },
  3177. },
  3178. };