testmgr.c 153 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Algorithm testing framework and tests.
  4. *
  5. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  6. * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
  7. * Copyright (c) 2007 Nokia Siemens Networks
  8. * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  9. * Copyright (c) 2019 Google LLC
  10. *
  11. * Updated RFC4106 AES-GCM testing.
  12. * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  13. * Adrian Hoban <adrian.hoban@intel.com>
  14. * Gabriele Paoloni <gabriele.paoloni@intel.com>
  15. * Tadeusz Struk (tadeusz.struk@intel.com)
  16. * Copyright (c) 2010, Intel Corporation.
  17. */
  18. #include <crypto/aead.h>
  19. #include <crypto/hash.h>
  20. #include <crypto/skcipher.h>
  21. #include <linux/err.h>
  22. #include <linux/fips.h>
  23. #include <linux/module.h>
  24. #include <linux/once.h>
  25. #include <linux/random.h>
  26. #include <linux/scatterlist.h>
  27. #include <linux/slab.h>
  28. #include <linux/string.h>
  29. #include <linux/uio.h>
  30. #include <crypto/rng.h>
  31. #include <crypto/drbg.h>
  32. #include <crypto/akcipher.h>
  33. #include <crypto/kpp.h>
  34. #include <crypto/acompress.h>
  35. #include <crypto/internal/cipher.h>
  36. #include <crypto/internal/simd.h>
  37. #include "internal.h"
  38. MODULE_IMPORT_NS(CRYPTO_INTERNAL);
  39. static bool notests;
  40. module_param(notests, bool, 0644);
  41. MODULE_PARM_DESC(notests, "disable crypto self-tests");
  42. static bool panic_on_fail;
  43. module_param(panic_on_fail, bool, 0444);
  44. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  45. static bool noextratests;
  46. module_param(noextratests, bool, 0644);
  47. MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
  48. static unsigned int fuzz_iterations = 100;
  49. module_param(fuzz_iterations, uint, 0644);
  50. MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
  51. #endif
  52. #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  53. /* a perfect nop */
  54. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  55. {
  56. return 0;
  57. }
  58. #else
  59. #include "testmgr.h"
  60. /*
  61. * Need slab memory for testing (size in number of pages).
  62. */
  63. #define XBUFSIZE 8
  64. /*
  65. * Used by test_cipher()
  66. */
  67. #define ENCRYPT 1
  68. #define DECRYPT 0
  69. struct aead_test_suite {
  70. const struct aead_testvec *vecs;
  71. unsigned int count;
  72. /*
  73. * Set if trying to decrypt an inauthentic ciphertext with this
  74. * algorithm might result in EINVAL rather than EBADMSG, due to other
  75. * validation the algorithm does on the inputs such as length checks.
  76. */
  77. unsigned int einval_allowed : 1;
  78. /*
  79. * Set if this algorithm requires that the IV be located at the end of
  80. * the AAD buffer, in addition to being given in the normal way. The
  81. * behavior when the two IV copies differ is implementation-defined.
  82. */
  83. unsigned int aad_iv : 1;
  84. };
  85. struct cipher_test_suite {
  86. const struct cipher_testvec *vecs;
  87. unsigned int count;
  88. };
  89. struct comp_test_suite {
  90. struct {
  91. const struct comp_testvec *vecs;
  92. unsigned int count;
  93. } comp, decomp;
  94. };
  95. struct hash_test_suite {
  96. const struct hash_testvec *vecs;
  97. unsigned int count;
  98. };
  99. struct cprng_test_suite {
  100. const struct cprng_testvec *vecs;
  101. unsigned int count;
  102. };
  103. struct drbg_test_suite {
  104. const struct drbg_testvec *vecs;
  105. unsigned int count;
  106. };
  107. struct akcipher_test_suite {
  108. const struct akcipher_testvec *vecs;
  109. unsigned int count;
  110. };
  111. struct kpp_test_suite {
  112. const struct kpp_testvec *vecs;
  113. unsigned int count;
  114. };
  115. struct alg_test_desc {
  116. const char *alg;
  117. const char *generic_driver;
  118. int (*test)(const struct alg_test_desc *desc, const char *driver,
  119. u32 type, u32 mask);
  120. int fips_allowed; /* set if alg is allowed in fips mode */
  121. union {
  122. struct aead_test_suite aead;
  123. struct cipher_test_suite cipher;
  124. struct comp_test_suite comp;
  125. struct hash_test_suite hash;
  126. struct cprng_test_suite cprng;
  127. struct drbg_test_suite drbg;
  128. struct akcipher_test_suite akcipher;
  129. struct kpp_test_suite kpp;
  130. } suite;
  131. };
  132. static void hexdump(unsigned char *buf, unsigned int len)
  133. {
  134. print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
  135. 16, 1,
  136. buf, len, false);
  137. }
  138. static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
  139. {
  140. int i;
  141. for (i = 0; i < XBUFSIZE; i++) {
  142. buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
  143. if (!buf[i])
  144. goto err_free_buf;
  145. }
  146. return 0;
  147. err_free_buf:
  148. while (i-- > 0)
  149. free_pages((unsigned long)buf[i], order);
  150. return -ENOMEM;
  151. }
  152. static int testmgr_alloc_buf(char *buf[XBUFSIZE])
  153. {
  154. return __testmgr_alloc_buf(buf, 0);
  155. }
  156. static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
  157. {
  158. int i;
  159. for (i = 0; i < XBUFSIZE; i++)
  160. free_pages((unsigned long)buf[i], order);
  161. }
  162. static void testmgr_free_buf(char *buf[XBUFSIZE])
  163. {
  164. __testmgr_free_buf(buf, 0);
  165. }
  166. #define TESTMGR_POISON_BYTE 0xfe
  167. #define TESTMGR_POISON_LEN 16
  168. static inline void testmgr_poison(void *addr, size_t len)
  169. {
  170. memset(addr, TESTMGR_POISON_BYTE, len);
  171. }
  172. /* Is the memory region still fully poisoned? */
  173. static inline bool testmgr_is_poison(const void *addr, size_t len)
  174. {
  175. return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
  176. }
  177. /* flush type for hash algorithms */
  178. enum flush_type {
  179. /* merge with update of previous buffer(s) */
  180. FLUSH_TYPE_NONE = 0,
  181. /* update with previous buffer(s) before doing this one */
  182. FLUSH_TYPE_FLUSH,
  183. /* likewise, but also export and re-import the intermediate state */
  184. FLUSH_TYPE_REIMPORT,
  185. };
  186. /* finalization function for hash algorithms */
  187. enum finalization_type {
  188. FINALIZATION_TYPE_FINAL, /* use final() */
  189. FINALIZATION_TYPE_FINUP, /* use finup() */
  190. FINALIZATION_TYPE_DIGEST, /* use digest() */
  191. };
  192. /*
  193. * Whether the crypto operation will occur in-place, and if so whether the
  194. * source and destination scatterlist pointers will coincide (req->src ==
  195. * req->dst), or whether they'll merely point to two separate scatterlists
  196. * (req->src != req->dst) that reference the same underlying memory.
  197. *
  198. * This is only relevant for algorithm types that support in-place operation.
  199. */
  200. enum inplace_mode {
  201. OUT_OF_PLACE,
  202. INPLACE_ONE_SGLIST,
  203. INPLACE_TWO_SGLISTS,
  204. };
  205. #define TEST_SG_TOTAL 10000
  206. /**
  207. * struct test_sg_division - description of a scatterlist entry
  208. *
  209. * This struct describes one entry of a scatterlist being constructed to check a
  210. * crypto test vector.
  211. *
  212. * @proportion_of_total: length of this chunk relative to the total length,
  213. * given as a proportion out of TEST_SG_TOTAL so that it
  214. * scales to fit any test vector
  215. * @offset: byte offset into a 2-page buffer at which this chunk will start
  216. * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
  217. * @offset
  218. * @flush_type: for hashes, whether an update() should be done now vs.
  219. * continuing to accumulate data
  220. * @nosimd: if doing the pending update(), do it with SIMD disabled?
  221. */
  222. struct test_sg_division {
  223. unsigned int proportion_of_total;
  224. unsigned int offset;
  225. bool offset_relative_to_alignmask;
  226. enum flush_type flush_type;
  227. bool nosimd;
  228. };
  229. /**
  230. * struct testvec_config - configuration for testing a crypto test vector
  231. *
  232. * This struct describes the data layout and other parameters with which each
  233. * crypto test vector can be tested.
  234. *
  235. * @name: name of this config, logged for debugging purposes if a test fails
  236. * @inplace_mode: whether and how to operate on the data in-place, if applicable
  237. * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
  238. * @src_divs: description of how to arrange the source scatterlist
  239. * @dst_divs: description of how to arrange the dst scatterlist, if applicable
  240. * for the algorithm type. Defaults to @src_divs if unset.
  241. * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
  242. * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
  243. * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
  244. * the @iv_offset
  245. * @key_offset: misalignment of the key, where 0 is default alignment
  246. * @key_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
  247. * the @key_offset
  248. * @finalization_type: what finalization function to use for hashes
  249. * @nosimd: execute with SIMD disabled? Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
  250. * This applies to the parts of the operation that aren't controlled
  251. * individually by @nosimd_setkey or @src_divs[].nosimd.
  252. * @nosimd_setkey: set the key (if applicable) with SIMD disabled? Requires
  253. * !CRYPTO_TFM_REQ_MAY_SLEEP.
  254. */
  255. struct testvec_config {
  256. const char *name;
  257. enum inplace_mode inplace_mode;
  258. u32 req_flags;
  259. struct test_sg_division src_divs[XBUFSIZE];
  260. struct test_sg_division dst_divs[XBUFSIZE];
  261. unsigned int iv_offset;
  262. unsigned int key_offset;
  263. bool iv_offset_relative_to_alignmask;
  264. bool key_offset_relative_to_alignmask;
  265. enum finalization_type finalization_type;
  266. bool nosimd;
  267. bool nosimd_setkey;
  268. };
  269. #define TESTVEC_CONFIG_NAMELEN 192
  270. /*
  271. * The following are the lists of testvec_configs to test for each algorithm
  272. * type when the basic crypto self-tests are enabled, i.e. when
  273. * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test
  274. * coverage, while keeping the test time much shorter than the full fuzz tests
  275. * so that the basic tests can be enabled in a wider range of circumstances.
  276. */
  277. /* Configs for skciphers and aeads */
  278. static const struct testvec_config default_cipher_testvec_configs[] = {
  279. {
  280. .name = "in-place (one sglist)",
  281. .inplace_mode = INPLACE_ONE_SGLIST,
  282. .src_divs = { { .proportion_of_total = 10000 } },
  283. }, {
  284. .name = "in-place (two sglists)",
  285. .inplace_mode = INPLACE_TWO_SGLISTS,
  286. .src_divs = { { .proportion_of_total = 10000 } },
  287. }, {
  288. .name = "out-of-place",
  289. .inplace_mode = OUT_OF_PLACE,
  290. .src_divs = { { .proportion_of_total = 10000 } },
  291. }, {
  292. .name = "unaligned buffer, offset=1",
  293. .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
  294. .iv_offset = 1,
  295. .key_offset = 1,
  296. }, {
  297. .name = "buffer aligned only to alignmask",
  298. .src_divs = {
  299. {
  300. .proportion_of_total = 10000,
  301. .offset = 1,
  302. .offset_relative_to_alignmask = true,
  303. },
  304. },
  305. .iv_offset = 1,
  306. .iv_offset_relative_to_alignmask = true,
  307. .key_offset = 1,
  308. .key_offset_relative_to_alignmask = true,
  309. }, {
  310. .name = "two even aligned splits",
  311. .src_divs = {
  312. { .proportion_of_total = 5000 },
  313. { .proportion_of_total = 5000 },
  314. },
  315. }, {
  316. .name = "one src, two even splits dst",
  317. .inplace_mode = OUT_OF_PLACE,
  318. .src_divs = { { .proportion_of_total = 10000 } },
  319. .dst_divs = {
  320. { .proportion_of_total = 5000 },
  321. { .proportion_of_total = 5000 },
  322. },
  323. }, {
  324. .name = "uneven misaligned splits, may sleep",
  325. .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
  326. .src_divs = {
  327. { .proportion_of_total = 1900, .offset = 33 },
  328. { .proportion_of_total = 3300, .offset = 7 },
  329. { .proportion_of_total = 4800, .offset = 18 },
  330. },
  331. .iv_offset = 3,
  332. .key_offset = 3,
  333. }, {
  334. .name = "misaligned splits crossing pages, inplace",
  335. .inplace_mode = INPLACE_ONE_SGLIST,
  336. .src_divs = {
  337. {
  338. .proportion_of_total = 7500,
  339. .offset = PAGE_SIZE - 32
  340. }, {
  341. .proportion_of_total = 2500,
  342. .offset = PAGE_SIZE - 7
  343. },
  344. },
  345. }
  346. };
  347. static const struct testvec_config default_hash_testvec_configs[] = {
  348. {
  349. .name = "init+update+final aligned buffer",
  350. .src_divs = { { .proportion_of_total = 10000 } },
  351. .finalization_type = FINALIZATION_TYPE_FINAL,
  352. }, {
  353. .name = "init+finup aligned buffer",
  354. .src_divs = { { .proportion_of_total = 10000 } },
  355. .finalization_type = FINALIZATION_TYPE_FINUP,
  356. }, {
  357. .name = "digest aligned buffer",
  358. .src_divs = { { .proportion_of_total = 10000 } },
  359. .finalization_type = FINALIZATION_TYPE_DIGEST,
  360. }, {
  361. .name = "init+update+final misaligned buffer",
  362. .src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
  363. .finalization_type = FINALIZATION_TYPE_FINAL,
  364. .key_offset = 1,
  365. }, {
  366. .name = "digest misaligned buffer",
  367. .src_divs = {
  368. {
  369. .proportion_of_total = 10000,
  370. .offset = 1,
  371. },
  372. },
  373. .finalization_type = FINALIZATION_TYPE_DIGEST,
  374. .key_offset = 1,
  375. }, {
  376. .name = "init+update+update+final two even splits",
  377. .src_divs = {
  378. { .proportion_of_total = 5000 },
  379. {
  380. .proportion_of_total = 5000,
  381. .flush_type = FLUSH_TYPE_FLUSH,
  382. },
  383. },
  384. .finalization_type = FINALIZATION_TYPE_FINAL,
  385. }, {
  386. .name = "digest uneven misaligned splits, may sleep",
  387. .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
  388. .src_divs = {
  389. { .proportion_of_total = 1900, .offset = 33 },
  390. { .proportion_of_total = 3300, .offset = 7 },
  391. { .proportion_of_total = 4800, .offset = 18 },
  392. },
  393. .finalization_type = FINALIZATION_TYPE_DIGEST,
  394. }, {
  395. .name = "digest misaligned splits crossing pages",
  396. .src_divs = {
  397. {
  398. .proportion_of_total = 7500,
  399. .offset = PAGE_SIZE - 32,
  400. }, {
  401. .proportion_of_total = 2500,
  402. .offset = PAGE_SIZE - 7,
  403. },
  404. },
  405. .finalization_type = FINALIZATION_TYPE_DIGEST,
  406. }, {
  407. .name = "import/export",
  408. .src_divs = {
  409. {
  410. .proportion_of_total = 6500,
  411. .flush_type = FLUSH_TYPE_REIMPORT,
  412. }, {
  413. .proportion_of_total = 3500,
  414. .flush_type = FLUSH_TYPE_REIMPORT,
  415. },
  416. },
  417. .finalization_type = FINALIZATION_TYPE_FINAL,
  418. }
  419. };
  420. static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
  421. {
  422. unsigned int remaining = TEST_SG_TOTAL;
  423. unsigned int ndivs = 0;
  424. do {
  425. remaining -= divs[ndivs++].proportion_of_total;
  426. } while (remaining);
  427. return ndivs;
  428. }
  429. #define SGDIVS_HAVE_FLUSHES BIT(0)
  430. #define SGDIVS_HAVE_NOSIMD BIT(1)
  431. static bool valid_sg_divisions(const struct test_sg_division *divs,
  432. unsigned int count, int *flags_ret)
  433. {
  434. unsigned int total = 0;
  435. unsigned int i;
  436. for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
  437. if (divs[i].proportion_of_total <= 0 ||
  438. divs[i].proportion_of_total > TEST_SG_TOTAL - total)
  439. return false;
  440. total += divs[i].proportion_of_total;
  441. if (divs[i].flush_type != FLUSH_TYPE_NONE)
  442. *flags_ret |= SGDIVS_HAVE_FLUSHES;
  443. if (divs[i].nosimd)
  444. *flags_ret |= SGDIVS_HAVE_NOSIMD;
  445. }
  446. return total == TEST_SG_TOTAL &&
  447. memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
  448. }
  449. /*
  450. * Check whether the given testvec_config is valid. This isn't strictly needed
  451. * since every testvec_config should be valid, but check anyway so that people
  452. * don't unknowingly add broken configs that don't do what they wanted.
  453. */
  454. static bool valid_testvec_config(const struct testvec_config *cfg)
  455. {
  456. int flags = 0;
  457. if (cfg->name == NULL)
  458. return false;
  459. if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
  460. &flags))
  461. return false;
  462. if (cfg->dst_divs[0].proportion_of_total) {
  463. if (!valid_sg_divisions(cfg->dst_divs,
  464. ARRAY_SIZE(cfg->dst_divs), &flags))
  465. return false;
  466. } else {
  467. if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
  468. return false;
  469. /* defaults to dst_divs=src_divs */
  470. }
  471. if (cfg->iv_offset +
  472. (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
  473. MAX_ALGAPI_ALIGNMASK + 1)
  474. return false;
  475. if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
  476. cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
  477. return false;
  478. if ((cfg->nosimd || cfg->nosimd_setkey ||
  479. (flags & SGDIVS_HAVE_NOSIMD)) &&
  480. (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
  481. return false;
  482. return true;
  483. }
  484. struct test_sglist {
  485. char *bufs[XBUFSIZE];
  486. struct scatterlist sgl[XBUFSIZE];
  487. struct scatterlist sgl_saved[XBUFSIZE];
  488. struct scatterlist *sgl_ptr;
  489. unsigned int nents;
  490. };
  491. static int init_test_sglist(struct test_sglist *tsgl)
  492. {
  493. return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
  494. }
  495. static void destroy_test_sglist(struct test_sglist *tsgl)
  496. {
  497. return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
  498. }
  499. /**
  500. * build_test_sglist() - build a scatterlist for a crypto test
  501. *
  502. * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
  503. * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
  504. * @divs: the layout specification on which the scatterlist will be based
  505. * @alignmask: the algorithm's alignmask
  506. * @total_len: the total length of the scatterlist to build in bytes
  507. * @data: if non-NULL, the buffers will be filled with this data until it ends.
  508. * Otherwise the buffers will be poisoned. In both cases, some bytes
  509. * past the end of each buffer will be poisoned to help detect overruns.
  510. * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
  511. * corresponds will be returned here. This will match @divs except
  512. * that divisions resolving to a length of 0 are omitted as they are
  513. * not included in the scatterlist.
  514. *
  515. * Return: 0 or a -errno value
  516. */
  517. static int build_test_sglist(struct test_sglist *tsgl,
  518. const struct test_sg_division *divs,
  519. const unsigned int alignmask,
  520. const unsigned int total_len,
  521. struct iov_iter *data,
  522. const struct test_sg_division *out_divs[XBUFSIZE])
  523. {
  524. struct {
  525. const struct test_sg_division *div;
  526. size_t length;
  527. } partitions[XBUFSIZE];
  528. const unsigned int ndivs = count_test_sg_divisions(divs);
  529. unsigned int len_remaining = total_len;
  530. unsigned int i;
  531. BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
  532. if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
  533. return -EINVAL;
  534. /* Calculate the (div, length) pairs */
  535. tsgl->nents = 0;
  536. for (i = 0; i < ndivs; i++) {
  537. unsigned int len_this_sg =
  538. min(len_remaining,
  539. (total_len * divs[i].proportion_of_total +
  540. TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
  541. if (len_this_sg != 0) {
  542. partitions[tsgl->nents].div = &divs[i];
  543. partitions[tsgl->nents].length = len_this_sg;
  544. tsgl->nents++;
  545. len_remaining -= len_this_sg;
  546. }
  547. }
  548. if (tsgl->nents == 0) {
  549. partitions[tsgl->nents].div = &divs[0];
  550. partitions[tsgl->nents].length = 0;
  551. tsgl->nents++;
  552. }
  553. partitions[tsgl->nents - 1].length += len_remaining;
  554. /* Set up the sgl entries and fill the data or poison */
  555. sg_init_table(tsgl->sgl, tsgl->nents);
  556. for (i = 0; i < tsgl->nents; i++) {
  557. unsigned int offset = partitions[i].div->offset;
  558. void *addr;
  559. if (partitions[i].div->offset_relative_to_alignmask)
  560. offset += alignmask;
  561. while (offset + partitions[i].length + TESTMGR_POISON_LEN >
  562. 2 * PAGE_SIZE) {
  563. if (WARN_ON(offset <= 0))
  564. return -EINVAL;
  565. offset /= 2;
  566. }
  567. addr = &tsgl->bufs[i][offset];
  568. sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
  569. if (out_divs)
  570. out_divs[i] = partitions[i].div;
  571. if (data) {
  572. size_t copy_len, copied;
  573. copy_len = min(partitions[i].length, data->count);
  574. copied = copy_from_iter(addr, copy_len, data);
  575. if (WARN_ON(copied != copy_len))
  576. return -EINVAL;
  577. testmgr_poison(addr + copy_len, partitions[i].length +
  578. TESTMGR_POISON_LEN - copy_len);
  579. } else {
  580. testmgr_poison(addr, partitions[i].length +
  581. TESTMGR_POISON_LEN);
  582. }
  583. }
  584. sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
  585. tsgl->sgl_ptr = tsgl->sgl;
  586. memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
  587. return 0;
  588. }
  589. /*
  590. * Verify that a scatterlist crypto operation produced the correct output.
  591. *
  592. * @tsgl: scatterlist containing the actual output
  593. * @expected_output: buffer containing the expected output
  594. * @len_to_check: length of @expected_output in bytes
  595. * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
  596. * @check_poison: verify that the poison bytes after each chunk are intact?
  597. *
  598. * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
  599. */
  600. static int verify_correct_output(const struct test_sglist *tsgl,
  601. const char *expected_output,
  602. unsigned int len_to_check,
  603. unsigned int unchecked_prefix_len,
  604. bool check_poison)
  605. {
  606. unsigned int i;
  607. for (i = 0; i < tsgl->nents; i++) {
  608. struct scatterlist *sg = &tsgl->sgl_ptr[i];
  609. unsigned int len = sg->length;
  610. unsigned int offset = sg->offset;
  611. const char *actual_output;
  612. if (unchecked_prefix_len) {
  613. if (unchecked_prefix_len >= len) {
  614. unchecked_prefix_len -= len;
  615. continue;
  616. }
  617. offset += unchecked_prefix_len;
  618. len -= unchecked_prefix_len;
  619. unchecked_prefix_len = 0;
  620. }
  621. len = min(len, len_to_check);
  622. actual_output = page_address(sg_page(sg)) + offset;
  623. if (memcmp(expected_output, actual_output, len) != 0)
  624. return -EINVAL;
  625. if (check_poison &&
  626. !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
  627. return -EOVERFLOW;
  628. len_to_check -= len;
  629. expected_output += len;
  630. }
  631. if (WARN_ON(len_to_check != 0))
  632. return -EINVAL;
  633. return 0;
  634. }
  635. static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
  636. {
  637. unsigned int i;
  638. for (i = 0; i < tsgl->nents; i++) {
  639. if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
  640. return true;
  641. if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
  642. return true;
  643. if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
  644. return true;
  645. }
  646. return false;
  647. }
  648. struct cipher_test_sglists {
  649. struct test_sglist src;
  650. struct test_sglist dst;
  651. };
  652. static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
  653. {
  654. struct cipher_test_sglists *tsgls;
  655. tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
  656. if (!tsgls)
  657. return NULL;
  658. if (init_test_sglist(&tsgls->src) != 0)
  659. goto fail_kfree;
  660. if (init_test_sglist(&tsgls->dst) != 0)
  661. goto fail_destroy_src;
  662. return tsgls;
  663. fail_destroy_src:
  664. destroy_test_sglist(&tsgls->src);
  665. fail_kfree:
  666. kfree(tsgls);
  667. return NULL;
  668. }
  669. static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
  670. {
  671. if (tsgls) {
  672. destroy_test_sglist(&tsgls->src);
  673. destroy_test_sglist(&tsgls->dst);
  674. kfree(tsgls);
  675. }
  676. }
  677. /* Build the src and dst scatterlists for an skcipher or AEAD test */
  678. static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
  679. const struct testvec_config *cfg,
  680. unsigned int alignmask,
  681. unsigned int src_total_len,
  682. unsigned int dst_total_len,
  683. const struct kvec *inputs,
  684. unsigned int nr_inputs)
  685. {
  686. struct iov_iter input;
  687. int err;
  688. iov_iter_kvec(&input, ITER_SOURCE, inputs, nr_inputs, src_total_len);
  689. err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
  690. cfg->inplace_mode != OUT_OF_PLACE ?
  691. max(dst_total_len, src_total_len) :
  692. src_total_len,
  693. &input, NULL);
  694. if (err)
  695. return err;
  696. /*
  697. * In-place crypto operations can use the same scatterlist for both the
  698. * source and destination (req->src == req->dst), or can use separate
  699. * scatterlists (req->src != req->dst) which point to the same
  700. * underlying memory. Make sure to test both cases.
  701. */
  702. if (cfg->inplace_mode == INPLACE_ONE_SGLIST) {
  703. tsgls->dst.sgl_ptr = tsgls->src.sgl;
  704. tsgls->dst.nents = tsgls->src.nents;
  705. return 0;
  706. }
  707. if (cfg->inplace_mode == INPLACE_TWO_SGLISTS) {
  708. /*
  709. * For now we keep it simple and only test the case where the
  710. * two scatterlists have identical entries, rather than
  711. * different entries that split up the same memory differently.
  712. */
  713. memcpy(tsgls->dst.sgl, tsgls->src.sgl,
  714. tsgls->src.nents * sizeof(tsgls->src.sgl[0]));
  715. memcpy(tsgls->dst.sgl_saved, tsgls->src.sgl,
  716. tsgls->src.nents * sizeof(tsgls->src.sgl[0]));
  717. tsgls->dst.sgl_ptr = tsgls->dst.sgl;
  718. tsgls->dst.nents = tsgls->src.nents;
  719. return 0;
  720. }
  721. /* Out of place */
  722. return build_test_sglist(&tsgls->dst,
  723. cfg->dst_divs[0].proportion_of_total ?
  724. cfg->dst_divs : cfg->src_divs,
  725. alignmask, dst_total_len, NULL, NULL);
  726. }
  727. /*
  728. * Support for testing passing a misaligned key to setkey():
  729. *
  730. * If cfg->key_offset is set, copy the key into a new buffer at that offset,
  731. * optionally adding alignmask. Else, just use the key directly.
  732. */
  733. static int prepare_keybuf(const u8 *key, unsigned int ksize,
  734. const struct testvec_config *cfg,
  735. unsigned int alignmask,
  736. const u8 **keybuf_ret, const u8 **keyptr_ret)
  737. {
  738. unsigned int key_offset = cfg->key_offset;
  739. u8 *keybuf = NULL, *keyptr = (u8 *)key;
  740. if (key_offset != 0) {
  741. if (cfg->key_offset_relative_to_alignmask)
  742. key_offset += alignmask;
  743. keybuf = kmalloc(key_offset + ksize, GFP_KERNEL);
  744. if (!keybuf)
  745. return -ENOMEM;
  746. keyptr = keybuf + key_offset;
  747. memcpy(keyptr, key, ksize);
  748. }
  749. *keybuf_ret = keybuf;
  750. *keyptr_ret = keyptr;
  751. return 0;
  752. }
  753. /*
  754. * Like setkey_f(tfm, key, ksize), but sometimes misalign the key.
  755. * In addition, run the setkey function in no-SIMD context if requested.
  756. */
  757. #define do_setkey(setkey_f, tfm, key, ksize, cfg, alignmask) \
  758. ({ \
  759. const u8 *keybuf, *keyptr; \
  760. int err; \
  761. \
  762. err = prepare_keybuf((key), (ksize), (cfg), (alignmask), \
  763. &keybuf, &keyptr); \
  764. if (err == 0) { \
  765. if ((cfg)->nosimd_setkey) \
  766. crypto_disable_simd_for_test(); \
  767. err = setkey_f((tfm), keyptr, (ksize)); \
  768. if ((cfg)->nosimd_setkey) \
  769. crypto_reenable_simd_for_test(); \
  770. kfree(keybuf); \
  771. } \
  772. err; \
  773. })
  774. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  775. /*
  776. * The fuzz tests use prandom instead of the normal Linux RNG since they don't
  777. * need cryptographically secure random numbers. This greatly improves the
  778. * performance of these tests, especially if they are run before the Linux RNG
  779. * has been initialized or if they are run on a lockdep-enabled kernel.
  780. */
  781. static inline void init_rnd_state(struct rnd_state *rng)
  782. {
  783. prandom_seed_state(rng, get_random_u64());
  784. }
  785. static inline u8 prandom_u8(struct rnd_state *rng)
  786. {
  787. return prandom_u32_state(rng);
  788. }
  789. static inline u32 prandom_u32_below(struct rnd_state *rng, u32 ceil)
  790. {
  791. /*
  792. * This is slightly biased for non-power-of-2 values of 'ceil', but this
  793. * isn't important here.
  794. */
  795. return prandom_u32_state(rng) % ceil;
  796. }
  797. static inline bool prandom_bool(struct rnd_state *rng)
  798. {
  799. return prandom_u32_below(rng, 2);
  800. }
  801. static inline u32 prandom_u32_inclusive(struct rnd_state *rng,
  802. u32 floor, u32 ceil)
  803. {
  804. return floor + prandom_u32_below(rng, ceil - floor + 1);
  805. }
  806. /* Generate a random length in range [0, max_len], but prefer smaller values */
  807. static unsigned int generate_random_length(struct rnd_state *rng,
  808. unsigned int max_len)
  809. {
  810. unsigned int len = prandom_u32_below(rng, max_len + 1);
  811. switch (prandom_u32_below(rng, 4)) {
  812. case 0:
  813. len %= 64;
  814. break;
  815. case 1:
  816. len %= 256;
  817. break;
  818. case 2:
  819. len %= 1024;
  820. break;
  821. default:
  822. break;
  823. }
  824. if (len && prandom_u32_below(rng, 4) == 0)
  825. len = rounddown_pow_of_two(len);
  826. return len;
  827. }
  828. /* Flip a random bit in the given nonempty data buffer */
  829. static void flip_random_bit(struct rnd_state *rng, u8 *buf, size_t size)
  830. {
  831. size_t bitpos;
  832. bitpos = prandom_u32_below(rng, size * 8);
  833. buf[bitpos / 8] ^= 1 << (bitpos % 8);
  834. }
  835. /* Flip a random byte in the given nonempty data buffer */
  836. static void flip_random_byte(struct rnd_state *rng, u8 *buf, size_t size)
  837. {
  838. buf[prandom_u32_below(rng, size)] ^= 0xff;
  839. }
  840. /* Sometimes make some random changes to the given nonempty data buffer */
  841. static void mutate_buffer(struct rnd_state *rng, u8 *buf, size_t size)
  842. {
  843. size_t num_flips;
  844. size_t i;
  845. /* Sometimes flip some bits */
  846. if (prandom_u32_below(rng, 4) == 0) {
  847. num_flips = min_t(size_t, 1 << prandom_u32_below(rng, 8),
  848. size * 8);
  849. for (i = 0; i < num_flips; i++)
  850. flip_random_bit(rng, buf, size);
  851. }
  852. /* Sometimes flip some bytes */
  853. if (prandom_u32_below(rng, 4) == 0) {
  854. num_flips = min_t(size_t, 1 << prandom_u32_below(rng, 8), size);
  855. for (i = 0; i < num_flips; i++)
  856. flip_random_byte(rng, buf, size);
  857. }
  858. }
  859. /* Randomly generate 'count' bytes, but sometimes make them "interesting" */
  860. static void generate_random_bytes(struct rnd_state *rng, u8 *buf, size_t count)
  861. {
  862. u8 b;
  863. u8 increment;
  864. size_t i;
  865. if (count == 0)
  866. return;
  867. switch (prandom_u32_below(rng, 8)) { /* Choose a generation strategy */
  868. case 0:
  869. case 1:
  870. /* All the same byte, plus optional mutations */
  871. switch (prandom_u32_below(rng, 4)) {
  872. case 0:
  873. b = 0x00;
  874. break;
  875. case 1:
  876. b = 0xff;
  877. break;
  878. default:
  879. b = prandom_u8(rng);
  880. break;
  881. }
  882. memset(buf, b, count);
  883. mutate_buffer(rng, buf, count);
  884. break;
  885. case 2:
  886. /* Ascending or descending bytes, plus optional mutations */
  887. increment = prandom_u8(rng);
  888. b = prandom_u8(rng);
  889. for (i = 0; i < count; i++, b += increment)
  890. buf[i] = b;
  891. mutate_buffer(rng, buf, count);
  892. break;
  893. default:
  894. /* Fully random bytes */
  895. prandom_bytes_state(rng, buf, count);
  896. }
  897. }
  898. static char *generate_random_sgl_divisions(struct rnd_state *rng,
  899. struct test_sg_division *divs,
  900. size_t max_divs, char *p, char *end,
  901. bool gen_flushes, u32 req_flags)
  902. {
  903. struct test_sg_division *div = divs;
  904. unsigned int remaining = TEST_SG_TOTAL;
  905. do {
  906. unsigned int this_len;
  907. const char *flushtype_str;
  908. if (div == &divs[max_divs - 1] || prandom_bool(rng))
  909. this_len = remaining;
  910. else if (prandom_u32_below(rng, 4) == 0)
  911. this_len = (remaining + 1) / 2;
  912. else
  913. this_len = prandom_u32_inclusive(rng, 1, remaining);
  914. div->proportion_of_total = this_len;
  915. if (prandom_u32_below(rng, 4) == 0)
  916. div->offset = prandom_u32_inclusive(rng,
  917. PAGE_SIZE - 128,
  918. PAGE_SIZE - 1);
  919. else if (prandom_bool(rng))
  920. div->offset = prandom_u32_below(rng, 32);
  921. else
  922. div->offset = prandom_u32_below(rng, PAGE_SIZE);
  923. if (prandom_u32_below(rng, 8) == 0)
  924. div->offset_relative_to_alignmask = true;
  925. div->flush_type = FLUSH_TYPE_NONE;
  926. if (gen_flushes) {
  927. switch (prandom_u32_below(rng, 4)) {
  928. case 0:
  929. div->flush_type = FLUSH_TYPE_REIMPORT;
  930. break;
  931. case 1:
  932. div->flush_type = FLUSH_TYPE_FLUSH;
  933. break;
  934. }
  935. }
  936. if (div->flush_type != FLUSH_TYPE_NONE &&
  937. !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
  938. prandom_bool(rng))
  939. div->nosimd = true;
  940. switch (div->flush_type) {
  941. case FLUSH_TYPE_FLUSH:
  942. if (div->nosimd)
  943. flushtype_str = "<flush,nosimd>";
  944. else
  945. flushtype_str = "<flush>";
  946. break;
  947. case FLUSH_TYPE_REIMPORT:
  948. if (div->nosimd)
  949. flushtype_str = "<reimport,nosimd>";
  950. else
  951. flushtype_str = "<reimport>";
  952. break;
  953. default:
  954. flushtype_str = "";
  955. break;
  956. }
  957. BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
  958. p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
  959. this_len / 100, this_len % 100,
  960. div->offset_relative_to_alignmask ?
  961. "alignmask" : "",
  962. div->offset, this_len == remaining ? "" : ", ");
  963. remaining -= this_len;
  964. div++;
  965. } while (remaining);
  966. return p;
  967. }
  968. /* Generate a random testvec_config for fuzz testing */
  969. static void generate_random_testvec_config(struct rnd_state *rng,
  970. struct testvec_config *cfg,
  971. char *name, size_t max_namelen)
  972. {
  973. char *p = name;
  974. char * const end = name + max_namelen;
  975. memset(cfg, 0, sizeof(*cfg));
  976. cfg->name = name;
  977. p += scnprintf(p, end - p, "random:");
  978. switch (prandom_u32_below(rng, 4)) {
  979. case 0:
  980. case 1:
  981. cfg->inplace_mode = OUT_OF_PLACE;
  982. break;
  983. case 2:
  984. cfg->inplace_mode = INPLACE_ONE_SGLIST;
  985. p += scnprintf(p, end - p, " inplace_one_sglist");
  986. break;
  987. default:
  988. cfg->inplace_mode = INPLACE_TWO_SGLISTS;
  989. p += scnprintf(p, end - p, " inplace_two_sglists");
  990. break;
  991. }
  992. if (prandom_bool(rng)) {
  993. cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
  994. p += scnprintf(p, end - p, " may_sleep");
  995. }
  996. switch (prandom_u32_below(rng, 4)) {
  997. case 0:
  998. cfg->finalization_type = FINALIZATION_TYPE_FINAL;
  999. p += scnprintf(p, end - p, " use_final");
  1000. break;
  1001. case 1:
  1002. cfg->finalization_type = FINALIZATION_TYPE_FINUP;
  1003. p += scnprintf(p, end - p, " use_finup");
  1004. break;
  1005. default:
  1006. cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
  1007. p += scnprintf(p, end - p, " use_digest");
  1008. break;
  1009. }
  1010. if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP)) {
  1011. if (prandom_bool(rng)) {
  1012. cfg->nosimd = true;
  1013. p += scnprintf(p, end - p, " nosimd");
  1014. }
  1015. if (prandom_bool(rng)) {
  1016. cfg->nosimd_setkey = true;
  1017. p += scnprintf(p, end - p, " nosimd_setkey");
  1018. }
  1019. }
  1020. p += scnprintf(p, end - p, " src_divs=[");
  1021. p = generate_random_sgl_divisions(rng, cfg->src_divs,
  1022. ARRAY_SIZE(cfg->src_divs), p, end,
  1023. (cfg->finalization_type !=
  1024. FINALIZATION_TYPE_DIGEST),
  1025. cfg->req_flags);
  1026. p += scnprintf(p, end - p, "]");
  1027. if (cfg->inplace_mode == OUT_OF_PLACE && prandom_bool(rng)) {
  1028. p += scnprintf(p, end - p, " dst_divs=[");
  1029. p = generate_random_sgl_divisions(rng, cfg->dst_divs,
  1030. ARRAY_SIZE(cfg->dst_divs),
  1031. p, end, false,
  1032. cfg->req_flags);
  1033. p += scnprintf(p, end - p, "]");
  1034. }
  1035. if (prandom_bool(rng)) {
  1036. cfg->iv_offset = prandom_u32_inclusive(rng, 1,
  1037. MAX_ALGAPI_ALIGNMASK);
  1038. p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
  1039. }
  1040. if (prandom_bool(rng)) {
  1041. cfg->key_offset = prandom_u32_inclusive(rng, 1,
  1042. MAX_ALGAPI_ALIGNMASK);
  1043. p += scnprintf(p, end - p, " key_offset=%u", cfg->key_offset);
  1044. }
  1045. WARN_ON_ONCE(!valid_testvec_config(cfg));
  1046. }
  1047. static void crypto_disable_simd_for_test(void)
  1048. {
  1049. migrate_disable();
  1050. __this_cpu_write(crypto_simd_disabled_for_test, true);
  1051. }
  1052. static void crypto_reenable_simd_for_test(void)
  1053. {
  1054. __this_cpu_write(crypto_simd_disabled_for_test, false);
  1055. migrate_enable();
  1056. }
  1057. /*
  1058. * Given an algorithm name, build the name of the generic implementation of that
  1059. * algorithm, assuming the usual naming convention. Specifically, this appends
  1060. * "-generic" to every part of the name that is not a template name. Examples:
  1061. *
  1062. * aes => aes-generic
  1063. * cbc(aes) => cbc(aes-generic)
  1064. * cts(cbc(aes)) => cts(cbc(aes-generic))
  1065. * rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
  1066. *
  1067. * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
  1068. */
  1069. static int build_generic_driver_name(const char *algname,
  1070. char driver_name[CRYPTO_MAX_ALG_NAME])
  1071. {
  1072. const char *in = algname;
  1073. char *out = driver_name;
  1074. size_t len = strlen(algname);
  1075. if (len >= CRYPTO_MAX_ALG_NAME)
  1076. goto too_long;
  1077. do {
  1078. const char *in_saved = in;
  1079. while (*in && *in != '(' && *in != ')' && *in != ',')
  1080. *out++ = *in++;
  1081. if (*in != '(' && in > in_saved) {
  1082. len += 8;
  1083. if (len >= CRYPTO_MAX_ALG_NAME)
  1084. goto too_long;
  1085. memcpy(out, "-generic", 8);
  1086. out += 8;
  1087. }
  1088. } while ((*out++ = *in++) != '\0');
  1089. return 0;
  1090. too_long:
  1091. pr_err("alg: generic driver name for \"%s\" would be too long\n",
  1092. algname);
  1093. return -ENAMETOOLONG;
  1094. }
  1095. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1096. static void crypto_disable_simd_for_test(void)
  1097. {
  1098. }
  1099. static void crypto_reenable_simd_for_test(void)
  1100. {
  1101. }
  1102. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1103. static int build_hash_sglist(struct test_sglist *tsgl,
  1104. const struct hash_testvec *vec,
  1105. const struct testvec_config *cfg,
  1106. unsigned int alignmask,
  1107. const struct test_sg_division *divs[XBUFSIZE])
  1108. {
  1109. struct kvec kv;
  1110. struct iov_iter input;
  1111. kv.iov_base = (void *)vec->plaintext;
  1112. kv.iov_len = vec->psize;
  1113. iov_iter_kvec(&input, ITER_SOURCE, &kv, 1, vec->psize);
  1114. return build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
  1115. &input, divs);
  1116. }
  1117. static int check_hash_result(const char *type,
  1118. const u8 *result, unsigned int digestsize,
  1119. const struct hash_testvec *vec,
  1120. const char *vec_name,
  1121. const char *driver,
  1122. const struct testvec_config *cfg)
  1123. {
  1124. if (memcmp(result, vec->digest, digestsize) != 0) {
  1125. pr_err("alg: %s: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  1126. type, driver, vec_name, cfg->name);
  1127. return -EINVAL;
  1128. }
  1129. if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
  1130. pr_err("alg: %s: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
  1131. type, driver, vec_name, cfg->name);
  1132. return -EOVERFLOW;
  1133. }
  1134. return 0;
  1135. }
  1136. static inline int check_shash_op(const char *op, int err,
  1137. const char *driver, const char *vec_name,
  1138. const struct testvec_config *cfg)
  1139. {
  1140. if (err)
  1141. pr_err("alg: shash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1142. driver, op, err, vec_name, cfg->name);
  1143. return err;
  1144. }
  1145. /* Test one hash test vector in one configuration, using the shash API */
  1146. static int test_shash_vec_cfg(const struct hash_testvec *vec,
  1147. const char *vec_name,
  1148. const struct testvec_config *cfg,
  1149. struct shash_desc *desc,
  1150. struct test_sglist *tsgl,
  1151. u8 *hashstate)
  1152. {
  1153. struct crypto_shash *tfm = desc->tfm;
  1154. const unsigned int digestsize = crypto_shash_digestsize(tfm);
  1155. const unsigned int statesize = crypto_shash_statesize(tfm);
  1156. const char *driver = crypto_shash_driver_name(tfm);
  1157. const struct test_sg_division *divs[XBUFSIZE];
  1158. unsigned int i;
  1159. u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
  1160. int err;
  1161. /* Set the key, if specified */
  1162. if (vec->ksize) {
  1163. err = do_setkey(crypto_shash_setkey, tfm, vec->key, vec->ksize,
  1164. cfg, 0);
  1165. if (err) {
  1166. if (err == vec->setkey_error)
  1167. return 0;
  1168. pr_err("alg: shash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1169. driver, vec_name, vec->setkey_error, err,
  1170. crypto_shash_get_flags(tfm));
  1171. return err;
  1172. }
  1173. if (vec->setkey_error) {
  1174. pr_err("alg: shash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1175. driver, vec_name, vec->setkey_error);
  1176. return -EINVAL;
  1177. }
  1178. }
  1179. /* Build the scatterlist for the source data */
  1180. err = build_hash_sglist(tsgl, vec, cfg, 0, divs);
  1181. if (err) {
  1182. pr_err("alg: shash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
  1183. driver, vec_name, cfg->name);
  1184. return err;
  1185. }
  1186. /* Do the actual hashing */
  1187. testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
  1188. testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
  1189. if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
  1190. vec->digest_error) {
  1191. /* Just using digest() */
  1192. if (tsgl->nents != 1)
  1193. return 0;
  1194. if (cfg->nosimd)
  1195. crypto_disable_simd_for_test();
  1196. err = crypto_shash_digest(desc, sg_virt(&tsgl->sgl[0]),
  1197. tsgl->sgl[0].length, result);
  1198. if (cfg->nosimd)
  1199. crypto_reenable_simd_for_test();
  1200. if (err) {
  1201. if (err == vec->digest_error)
  1202. return 0;
  1203. pr_err("alg: shash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  1204. driver, vec_name, vec->digest_error, err,
  1205. cfg->name);
  1206. return err;
  1207. }
  1208. if (vec->digest_error) {
  1209. pr_err("alg: shash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  1210. driver, vec_name, vec->digest_error, cfg->name);
  1211. return -EINVAL;
  1212. }
  1213. goto result_ready;
  1214. }
  1215. /* Using init(), zero or more update(), then final() or finup() */
  1216. if (cfg->nosimd)
  1217. crypto_disable_simd_for_test();
  1218. err = crypto_shash_init(desc);
  1219. if (cfg->nosimd)
  1220. crypto_reenable_simd_for_test();
  1221. err = check_shash_op("init", err, driver, vec_name, cfg);
  1222. if (err)
  1223. return err;
  1224. for (i = 0; i < tsgl->nents; i++) {
  1225. if (i + 1 == tsgl->nents &&
  1226. cfg->finalization_type == FINALIZATION_TYPE_FINUP) {
  1227. if (divs[i]->nosimd)
  1228. crypto_disable_simd_for_test();
  1229. err = crypto_shash_finup(desc, sg_virt(&tsgl->sgl[i]),
  1230. tsgl->sgl[i].length, result);
  1231. if (divs[i]->nosimd)
  1232. crypto_reenable_simd_for_test();
  1233. err = check_shash_op("finup", err, driver, vec_name,
  1234. cfg);
  1235. if (err)
  1236. return err;
  1237. goto result_ready;
  1238. }
  1239. if (divs[i]->nosimd)
  1240. crypto_disable_simd_for_test();
  1241. err = crypto_shash_update(desc, sg_virt(&tsgl->sgl[i]),
  1242. tsgl->sgl[i].length);
  1243. if (divs[i]->nosimd)
  1244. crypto_reenable_simd_for_test();
  1245. err = check_shash_op("update", err, driver, vec_name, cfg);
  1246. if (err)
  1247. return err;
  1248. if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
  1249. /* Test ->export() and ->import() */
  1250. testmgr_poison(hashstate + statesize,
  1251. TESTMGR_POISON_LEN);
  1252. err = crypto_shash_export(desc, hashstate);
  1253. err = check_shash_op("export", err, driver, vec_name,
  1254. cfg);
  1255. if (err)
  1256. return err;
  1257. if (!testmgr_is_poison(hashstate + statesize,
  1258. TESTMGR_POISON_LEN)) {
  1259. pr_err("alg: shash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
  1260. driver, vec_name, cfg->name);
  1261. return -EOVERFLOW;
  1262. }
  1263. testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
  1264. err = crypto_shash_import(desc, hashstate);
  1265. err = check_shash_op("import", err, driver, vec_name,
  1266. cfg);
  1267. if (err)
  1268. return err;
  1269. }
  1270. }
  1271. if (cfg->nosimd)
  1272. crypto_disable_simd_for_test();
  1273. err = crypto_shash_final(desc, result);
  1274. if (cfg->nosimd)
  1275. crypto_reenable_simd_for_test();
  1276. err = check_shash_op("final", err, driver, vec_name, cfg);
  1277. if (err)
  1278. return err;
  1279. result_ready:
  1280. return check_hash_result("shash", result, digestsize, vec, vec_name,
  1281. driver, cfg);
  1282. }
  1283. static int do_ahash_op(int (*op)(struct ahash_request *req),
  1284. struct ahash_request *req,
  1285. struct crypto_wait *wait, bool nosimd)
  1286. {
  1287. int err;
  1288. if (nosimd)
  1289. crypto_disable_simd_for_test();
  1290. err = op(req);
  1291. if (nosimd)
  1292. crypto_reenable_simd_for_test();
  1293. return crypto_wait_req(err, wait);
  1294. }
  1295. static int check_nonfinal_ahash_op(const char *op, int err,
  1296. u8 *result, unsigned int digestsize,
  1297. const char *driver, const char *vec_name,
  1298. const struct testvec_config *cfg)
  1299. {
  1300. if (err) {
  1301. pr_err("alg: ahash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1302. driver, op, err, vec_name, cfg->name);
  1303. return err;
  1304. }
  1305. if (!testmgr_is_poison(result, digestsize)) {
  1306. pr_err("alg: ahash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
  1307. driver, op, vec_name, cfg->name);
  1308. return -EINVAL;
  1309. }
  1310. return 0;
  1311. }
  1312. /* Test one hash test vector in one configuration, using the ahash API */
  1313. static int test_ahash_vec_cfg(const struct hash_testvec *vec,
  1314. const char *vec_name,
  1315. const struct testvec_config *cfg,
  1316. struct ahash_request *req,
  1317. struct test_sglist *tsgl,
  1318. u8 *hashstate)
  1319. {
  1320. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  1321. const unsigned int digestsize = crypto_ahash_digestsize(tfm);
  1322. const unsigned int statesize = crypto_ahash_statesize(tfm);
  1323. const char *driver = crypto_ahash_driver_name(tfm);
  1324. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  1325. const struct test_sg_division *divs[XBUFSIZE];
  1326. DECLARE_CRYPTO_WAIT(wait);
  1327. unsigned int i;
  1328. struct scatterlist *pending_sgl;
  1329. unsigned int pending_len;
  1330. u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
  1331. int err;
  1332. /* Set the key, if specified */
  1333. if (vec->ksize) {
  1334. err = do_setkey(crypto_ahash_setkey, tfm, vec->key, vec->ksize,
  1335. cfg, 0);
  1336. if (err) {
  1337. if (err == vec->setkey_error)
  1338. return 0;
  1339. pr_err("alg: ahash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1340. driver, vec_name, vec->setkey_error, err,
  1341. crypto_ahash_get_flags(tfm));
  1342. return err;
  1343. }
  1344. if (vec->setkey_error) {
  1345. pr_err("alg: ahash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1346. driver, vec_name, vec->setkey_error);
  1347. return -EINVAL;
  1348. }
  1349. }
  1350. /* Build the scatterlist for the source data */
  1351. err = build_hash_sglist(tsgl, vec, cfg, 0, divs);
  1352. if (err) {
  1353. pr_err("alg: ahash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
  1354. driver, vec_name, cfg->name);
  1355. return err;
  1356. }
  1357. /* Do the actual hashing */
  1358. testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
  1359. testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
  1360. if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
  1361. vec->digest_error) {
  1362. /* Just using digest() */
  1363. ahash_request_set_callback(req, req_flags, crypto_req_done,
  1364. &wait);
  1365. ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
  1366. err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
  1367. if (err) {
  1368. if (err == vec->digest_error)
  1369. return 0;
  1370. pr_err("alg: ahash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  1371. driver, vec_name, vec->digest_error, err,
  1372. cfg->name);
  1373. return err;
  1374. }
  1375. if (vec->digest_error) {
  1376. pr_err("alg: ahash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  1377. driver, vec_name, vec->digest_error, cfg->name);
  1378. return -EINVAL;
  1379. }
  1380. goto result_ready;
  1381. }
  1382. /* Using init(), zero or more update(), then final() or finup() */
  1383. ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1384. ahash_request_set_crypt(req, NULL, result, 0);
  1385. err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
  1386. err = check_nonfinal_ahash_op("init", err, result, digestsize,
  1387. driver, vec_name, cfg);
  1388. if (err)
  1389. return err;
  1390. pending_sgl = NULL;
  1391. pending_len = 0;
  1392. for (i = 0; i < tsgl->nents; i++) {
  1393. if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
  1394. pending_sgl != NULL) {
  1395. /* update() with the pending data */
  1396. ahash_request_set_callback(req, req_flags,
  1397. crypto_req_done, &wait);
  1398. ahash_request_set_crypt(req, pending_sgl, result,
  1399. pending_len);
  1400. err = do_ahash_op(crypto_ahash_update, req, &wait,
  1401. divs[i]->nosimd);
  1402. err = check_nonfinal_ahash_op("update", err,
  1403. result, digestsize,
  1404. driver, vec_name, cfg);
  1405. if (err)
  1406. return err;
  1407. pending_sgl = NULL;
  1408. pending_len = 0;
  1409. }
  1410. if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
  1411. /* Test ->export() and ->import() */
  1412. testmgr_poison(hashstate + statesize,
  1413. TESTMGR_POISON_LEN);
  1414. err = crypto_ahash_export(req, hashstate);
  1415. err = check_nonfinal_ahash_op("export", err,
  1416. result, digestsize,
  1417. driver, vec_name, cfg);
  1418. if (err)
  1419. return err;
  1420. if (!testmgr_is_poison(hashstate + statesize,
  1421. TESTMGR_POISON_LEN)) {
  1422. pr_err("alg: ahash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
  1423. driver, vec_name, cfg->name);
  1424. return -EOVERFLOW;
  1425. }
  1426. testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
  1427. err = crypto_ahash_import(req, hashstate);
  1428. err = check_nonfinal_ahash_op("import", err,
  1429. result, digestsize,
  1430. driver, vec_name, cfg);
  1431. if (err)
  1432. return err;
  1433. }
  1434. if (pending_sgl == NULL)
  1435. pending_sgl = &tsgl->sgl[i];
  1436. pending_len += tsgl->sgl[i].length;
  1437. }
  1438. ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1439. ahash_request_set_crypt(req, pending_sgl, result, pending_len);
  1440. if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
  1441. /* finish with update() and final() */
  1442. err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
  1443. err = check_nonfinal_ahash_op("update", err, result, digestsize,
  1444. driver, vec_name, cfg);
  1445. if (err)
  1446. return err;
  1447. err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
  1448. if (err) {
  1449. pr_err("alg: ahash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1450. driver, err, vec_name, cfg->name);
  1451. return err;
  1452. }
  1453. } else {
  1454. /* finish with finup() */
  1455. err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
  1456. if (err) {
  1457. pr_err("alg: ahash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
  1458. driver, err, vec_name, cfg->name);
  1459. return err;
  1460. }
  1461. }
  1462. result_ready:
  1463. return check_hash_result("ahash", result, digestsize, vec, vec_name,
  1464. driver, cfg);
  1465. }
  1466. static int test_hash_vec_cfg(const struct hash_testvec *vec,
  1467. const char *vec_name,
  1468. const struct testvec_config *cfg,
  1469. struct ahash_request *req,
  1470. struct shash_desc *desc,
  1471. struct test_sglist *tsgl,
  1472. u8 *hashstate)
  1473. {
  1474. int err;
  1475. /*
  1476. * For algorithms implemented as "shash", most bugs will be detected by
  1477. * both the shash and ahash tests. Test the shash API first so that the
  1478. * failures involve less indirection, so are easier to debug.
  1479. */
  1480. if (desc) {
  1481. err = test_shash_vec_cfg(vec, vec_name, cfg, desc, tsgl,
  1482. hashstate);
  1483. if (err)
  1484. return err;
  1485. }
  1486. return test_ahash_vec_cfg(vec, vec_name, cfg, req, tsgl, hashstate);
  1487. }
  1488. static int test_hash_vec(const struct hash_testvec *vec, unsigned int vec_num,
  1489. struct ahash_request *req, struct shash_desc *desc,
  1490. struct test_sglist *tsgl, u8 *hashstate)
  1491. {
  1492. char vec_name[16];
  1493. unsigned int i;
  1494. int err;
  1495. sprintf(vec_name, "%u", vec_num);
  1496. for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
  1497. err = test_hash_vec_cfg(vec, vec_name,
  1498. &default_hash_testvec_configs[i],
  1499. req, desc, tsgl, hashstate);
  1500. if (err)
  1501. return err;
  1502. }
  1503. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  1504. if (!noextratests) {
  1505. struct rnd_state rng;
  1506. struct testvec_config cfg;
  1507. char cfgname[TESTVEC_CONFIG_NAMELEN];
  1508. init_rnd_state(&rng);
  1509. for (i = 0; i < fuzz_iterations; i++) {
  1510. generate_random_testvec_config(&rng, &cfg, cfgname,
  1511. sizeof(cfgname));
  1512. err = test_hash_vec_cfg(vec, vec_name, &cfg,
  1513. req, desc, tsgl, hashstate);
  1514. if (err)
  1515. return err;
  1516. cond_resched();
  1517. }
  1518. }
  1519. #endif
  1520. return 0;
  1521. }
  1522. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  1523. /*
  1524. * Generate a hash test vector from the given implementation.
  1525. * Assumes the buffers in 'vec' were already allocated.
  1526. */
  1527. static void generate_random_hash_testvec(struct rnd_state *rng,
  1528. struct shash_desc *desc,
  1529. struct hash_testvec *vec,
  1530. unsigned int maxkeysize,
  1531. unsigned int maxdatasize,
  1532. char *name, size_t max_namelen)
  1533. {
  1534. /* Data */
  1535. vec->psize = generate_random_length(rng, maxdatasize);
  1536. generate_random_bytes(rng, (u8 *)vec->plaintext, vec->psize);
  1537. /*
  1538. * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
  1539. * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
  1540. */
  1541. vec->setkey_error = 0;
  1542. vec->ksize = 0;
  1543. if (maxkeysize) {
  1544. vec->ksize = maxkeysize;
  1545. if (prandom_u32_below(rng, 4) == 0)
  1546. vec->ksize = prandom_u32_inclusive(rng, 1, maxkeysize);
  1547. generate_random_bytes(rng, (u8 *)vec->key, vec->ksize);
  1548. vec->setkey_error = crypto_shash_setkey(desc->tfm, vec->key,
  1549. vec->ksize);
  1550. /* If the key couldn't be set, no need to continue to digest. */
  1551. if (vec->setkey_error)
  1552. goto done;
  1553. }
  1554. /* Digest */
  1555. vec->digest_error = crypto_shash_digest(desc, vec->plaintext,
  1556. vec->psize, (u8 *)vec->digest);
  1557. done:
  1558. snprintf(name, max_namelen, "\"random: psize=%u ksize=%u\"",
  1559. vec->psize, vec->ksize);
  1560. }
  1561. /*
  1562. * Test the hash algorithm represented by @req against the corresponding generic
  1563. * implementation, if one is available.
  1564. */
  1565. static int test_hash_vs_generic_impl(const char *generic_driver,
  1566. unsigned int maxkeysize,
  1567. struct ahash_request *req,
  1568. struct shash_desc *desc,
  1569. struct test_sglist *tsgl,
  1570. u8 *hashstate)
  1571. {
  1572. struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
  1573. const unsigned int digestsize = crypto_ahash_digestsize(tfm);
  1574. const unsigned int blocksize = crypto_ahash_blocksize(tfm);
  1575. const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  1576. const char *algname = crypto_hash_alg_common(tfm)->base.cra_name;
  1577. const char *driver = crypto_ahash_driver_name(tfm);
  1578. struct rnd_state rng;
  1579. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  1580. struct crypto_shash *generic_tfm = NULL;
  1581. struct shash_desc *generic_desc = NULL;
  1582. unsigned int i;
  1583. struct hash_testvec vec = { 0 };
  1584. char vec_name[64];
  1585. struct testvec_config *cfg;
  1586. char cfgname[TESTVEC_CONFIG_NAMELEN];
  1587. int err;
  1588. if (noextratests)
  1589. return 0;
  1590. init_rnd_state(&rng);
  1591. if (!generic_driver) { /* Use default naming convention? */
  1592. err = build_generic_driver_name(algname, _generic_driver);
  1593. if (err)
  1594. return err;
  1595. generic_driver = _generic_driver;
  1596. }
  1597. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  1598. return 0;
  1599. generic_tfm = crypto_alloc_shash(generic_driver, 0, 0);
  1600. if (IS_ERR(generic_tfm)) {
  1601. err = PTR_ERR(generic_tfm);
  1602. if (err == -ENOENT) {
  1603. pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
  1604. driver, generic_driver);
  1605. return 0;
  1606. }
  1607. pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
  1608. generic_driver, algname, err);
  1609. return err;
  1610. }
  1611. cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
  1612. if (!cfg) {
  1613. err = -ENOMEM;
  1614. goto out;
  1615. }
  1616. generic_desc = kzalloc(sizeof(*desc) +
  1617. crypto_shash_descsize(generic_tfm), GFP_KERNEL);
  1618. if (!generic_desc) {
  1619. err = -ENOMEM;
  1620. goto out;
  1621. }
  1622. generic_desc->tfm = generic_tfm;
  1623. /* Check the algorithm properties for consistency. */
  1624. if (digestsize != crypto_shash_digestsize(generic_tfm)) {
  1625. pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
  1626. driver, digestsize,
  1627. crypto_shash_digestsize(generic_tfm));
  1628. err = -EINVAL;
  1629. goto out;
  1630. }
  1631. if (blocksize != crypto_shash_blocksize(generic_tfm)) {
  1632. pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  1633. driver, blocksize, crypto_shash_blocksize(generic_tfm));
  1634. err = -EINVAL;
  1635. goto out;
  1636. }
  1637. /*
  1638. * Now generate test vectors using the generic implementation, and test
  1639. * the other implementation against them.
  1640. */
  1641. vec.key = kmalloc(maxkeysize, GFP_KERNEL);
  1642. vec.plaintext = kmalloc(maxdatasize, GFP_KERNEL);
  1643. vec.digest = kmalloc(digestsize, GFP_KERNEL);
  1644. if (!vec.key || !vec.plaintext || !vec.digest) {
  1645. err = -ENOMEM;
  1646. goto out;
  1647. }
  1648. for (i = 0; i < fuzz_iterations * 8; i++) {
  1649. generate_random_hash_testvec(&rng, generic_desc, &vec,
  1650. maxkeysize, maxdatasize,
  1651. vec_name, sizeof(vec_name));
  1652. generate_random_testvec_config(&rng, cfg, cfgname,
  1653. sizeof(cfgname));
  1654. err = test_hash_vec_cfg(&vec, vec_name, cfg,
  1655. req, desc, tsgl, hashstate);
  1656. if (err)
  1657. goto out;
  1658. cond_resched();
  1659. }
  1660. err = 0;
  1661. out:
  1662. kfree(cfg);
  1663. kfree(vec.key);
  1664. kfree(vec.plaintext);
  1665. kfree(vec.digest);
  1666. crypto_free_shash(generic_tfm);
  1667. kfree_sensitive(generic_desc);
  1668. return err;
  1669. }
  1670. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1671. static int test_hash_vs_generic_impl(const char *generic_driver,
  1672. unsigned int maxkeysize,
  1673. struct ahash_request *req,
  1674. struct shash_desc *desc,
  1675. struct test_sglist *tsgl,
  1676. u8 *hashstate)
  1677. {
  1678. return 0;
  1679. }
  1680. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  1681. static int alloc_shash(const char *driver, u32 type, u32 mask,
  1682. struct crypto_shash **tfm_ret,
  1683. struct shash_desc **desc_ret)
  1684. {
  1685. struct crypto_shash *tfm;
  1686. struct shash_desc *desc;
  1687. tfm = crypto_alloc_shash(driver, type, mask);
  1688. if (IS_ERR(tfm)) {
  1689. if (PTR_ERR(tfm) == -ENOENT) {
  1690. /*
  1691. * This algorithm is only available through the ahash
  1692. * API, not the shash API, so skip the shash tests.
  1693. */
  1694. return 0;
  1695. }
  1696. pr_err("alg: hash: failed to allocate shash transform for %s: %ld\n",
  1697. driver, PTR_ERR(tfm));
  1698. return PTR_ERR(tfm);
  1699. }
  1700. desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(tfm), GFP_KERNEL);
  1701. if (!desc) {
  1702. crypto_free_shash(tfm);
  1703. return -ENOMEM;
  1704. }
  1705. desc->tfm = tfm;
  1706. *tfm_ret = tfm;
  1707. *desc_ret = desc;
  1708. return 0;
  1709. }
  1710. static int __alg_test_hash(const struct hash_testvec *vecs,
  1711. unsigned int num_vecs, const char *driver,
  1712. u32 type, u32 mask,
  1713. const char *generic_driver, unsigned int maxkeysize)
  1714. {
  1715. struct crypto_ahash *atfm = NULL;
  1716. struct ahash_request *req = NULL;
  1717. struct crypto_shash *stfm = NULL;
  1718. struct shash_desc *desc = NULL;
  1719. struct test_sglist *tsgl = NULL;
  1720. u8 *hashstate = NULL;
  1721. unsigned int statesize;
  1722. unsigned int i;
  1723. int err;
  1724. /*
  1725. * Always test the ahash API. This works regardless of whether the
  1726. * algorithm is implemented as ahash or shash.
  1727. */
  1728. atfm = crypto_alloc_ahash(driver, type, mask);
  1729. if (IS_ERR(atfm)) {
  1730. if (PTR_ERR(atfm) == -ENOENT)
  1731. return 0;
  1732. pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
  1733. driver, PTR_ERR(atfm));
  1734. return PTR_ERR(atfm);
  1735. }
  1736. driver = crypto_ahash_driver_name(atfm);
  1737. req = ahash_request_alloc(atfm, GFP_KERNEL);
  1738. if (!req) {
  1739. pr_err("alg: hash: failed to allocate request for %s\n",
  1740. driver);
  1741. err = -ENOMEM;
  1742. goto out;
  1743. }
  1744. /*
  1745. * If available also test the shash API, to cover corner cases that may
  1746. * be missed by testing the ahash API only.
  1747. */
  1748. err = alloc_shash(driver, type, mask, &stfm, &desc);
  1749. if (err)
  1750. goto out;
  1751. tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
  1752. if (!tsgl || init_test_sglist(tsgl) != 0) {
  1753. pr_err("alg: hash: failed to allocate test buffers for %s\n",
  1754. driver);
  1755. kfree(tsgl);
  1756. tsgl = NULL;
  1757. err = -ENOMEM;
  1758. goto out;
  1759. }
  1760. statesize = crypto_ahash_statesize(atfm);
  1761. if (stfm)
  1762. statesize = max(statesize, crypto_shash_statesize(stfm));
  1763. hashstate = kmalloc(statesize + TESTMGR_POISON_LEN, GFP_KERNEL);
  1764. if (!hashstate) {
  1765. pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
  1766. driver);
  1767. err = -ENOMEM;
  1768. goto out;
  1769. }
  1770. for (i = 0; i < num_vecs; i++) {
  1771. if (fips_enabled && vecs[i].fips_skip)
  1772. continue;
  1773. err = test_hash_vec(&vecs[i], i, req, desc, tsgl, hashstate);
  1774. if (err)
  1775. goto out;
  1776. cond_resched();
  1777. }
  1778. err = test_hash_vs_generic_impl(generic_driver, maxkeysize, req,
  1779. desc, tsgl, hashstate);
  1780. out:
  1781. kfree(hashstate);
  1782. if (tsgl) {
  1783. destroy_test_sglist(tsgl);
  1784. kfree(tsgl);
  1785. }
  1786. kfree(desc);
  1787. crypto_free_shash(stfm);
  1788. ahash_request_free(req);
  1789. crypto_free_ahash(atfm);
  1790. return err;
  1791. }
  1792. static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
  1793. u32 type, u32 mask)
  1794. {
  1795. const struct hash_testvec *template = desc->suite.hash.vecs;
  1796. unsigned int tcount = desc->suite.hash.count;
  1797. unsigned int nr_unkeyed, nr_keyed;
  1798. unsigned int maxkeysize = 0;
  1799. int err;
  1800. /*
  1801. * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
  1802. * first, before setting a key on the tfm. To make this easier, we
  1803. * require that the unkeyed test vectors (if any) are listed first.
  1804. */
  1805. for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
  1806. if (template[nr_unkeyed].ksize)
  1807. break;
  1808. }
  1809. for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
  1810. if (!template[nr_unkeyed + nr_keyed].ksize) {
  1811. pr_err("alg: hash: test vectors for %s out of order, "
  1812. "unkeyed ones must come first\n", desc->alg);
  1813. return -EINVAL;
  1814. }
  1815. maxkeysize = max_t(unsigned int, maxkeysize,
  1816. template[nr_unkeyed + nr_keyed].ksize);
  1817. }
  1818. err = 0;
  1819. if (nr_unkeyed) {
  1820. err = __alg_test_hash(template, nr_unkeyed, driver, type, mask,
  1821. desc->generic_driver, maxkeysize);
  1822. template += nr_unkeyed;
  1823. }
  1824. if (!err && nr_keyed)
  1825. err = __alg_test_hash(template, nr_keyed, driver, type, mask,
  1826. desc->generic_driver, maxkeysize);
  1827. return err;
  1828. }
  1829. static int test_aead_vec_cfg(int enc, const struct aead_testvec *vec,
  1830. const char *vec_name,
  1831. const struct testvec_config *cfg,
  1832. struct aead_request *req,
  1833. struct cipher_test_sglists *tsgls)
  1834. {
  1835. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  1836. const unsigned int alignmask = crypto_aead_alignmask(tfm);
  1837. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  1838. const unsigned int authsize = vec->clen - vec->plen;
  1839. const char *driver = crypto_aead_driver_name(tfm);
  1840. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  1841. const char *op = enc ? "encryption" : "decryption";
  1842. DECLARE_CRYPTO_WAIT(wait);
  1843. u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
  1844. u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
  1845. cfg->iv_offset +
  1846. (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
  1847. struct kvec input[2];
  1848. int err;
  1849. /* Set the key */
  1850. if (vec->wk)
  1851. crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  1852. else
  1853. crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  1854. err = do_setkey(crypto_aead_setkey, tfm, vec->key, vec->klen,
  1855. cfg, alignmask);
  1856. if (err && err != vec->setkey_error) {
  1857. pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  1858. driver, vec_name, vec->setkey_error, err,
  1859. crypto_aead_get_flags(tfm));
  1860. return err;
  1861. }
  1862. if (!err && vec->setkey_error) {
  1863. pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1864. driver, vec_name, vec->setkey_error);
  1865. return -EINVAL;
  1866. }
  1867. /* Set the authentication tag size */
  1868. err = crypto_aead_setauthsize(tfm, authsize);
  1869. if (err && err != vec->setauthsize_error) {
  1870. pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
  1871. driver, vec_name, vec->setauthsize_error, err);
  1872. return err;
  1873. }
  1874. if (!err && vec->setauthsize_error) {
  1875. pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
  1876. driver, vec_name, vec->setauthsize_error);
  1877. return -EINVAL;
  1878. }
  1879. if (vec->setkey_error || vec->setauthsize_error)
  1880. return 0;
  1881. /* The IV must be copied to a buffer, as the algorithm may modify it */
  1882. if (WARN_ON(ivsize > MAX_IVLEN))
  1883. return -EINVAL;
  1884. if (vec->iv)
  1885. memcpy(iv, vec->iv, ivsize);
  1886. else
  1887. memset(iv, 0, ivsize);
  1888. /* Build the src/dst scatterlists */
  1889. input[0].iov_base = (void *)vec->assoc;
  1890. input[0].iov_len = vec->alen;
  1891. input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
  1892. input[1].iov_len = enc ? vec->plen : vec->clen;
  1893. err = build_cipher_test_sglists(tsgls, cfg, alignmask,
  1894. vec->alen + (enc ? vec->plen :
  1895. vec->clen),
  1896. vec->alen + (enc ? vec->clen :
  1897. vec->plen),
  1898. input, 2);
  1899. if (err) {
  1900. pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
  1901. driver, op, vec_name, cfg->name);
  1902. return err;
  1903. }
  1904. /* Do the actual encryption or decryption */
  1905. testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
  1906. aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
  1907. aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
  1908. enc ? vec->plen : vec->clen, iv);
  1909. aead_request_set_ad(req, vec->alen);
  1910. if (cfg->nosimd)
  1911. crypto_disable_simd_for_test();
  1912. err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
  1913. if (cfg->nosimd)
  1914. crypto_reenable_simd_for_test();
  1915. err = crypto_wait_req(err, &wait);
  1916. /* Check that the algorithm didn't overwrite things it shouldn't have */
  1917. if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
  1918. req->assoclen != vec->alen ||
  1919. req->iv != iv ||
  1920. req->src != tsgls->src.sgl_ptr ||
  1921. req->dst != tsgls->dst.sgl_ptr ||
  1922. crypto_aead_reqtfm(req) != tfm ||
  1923. req->base.complete != crypto_req_done ||
  1924. req->base.flags != req_flags ||
  1925. req->base.data != &wait) {
  1926. pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
  1927. driver, op, vec_name, cfg->name);
  1928. if (req->cryptlen != (enc ? vec->plen : vec->clen))
  1929. pr_err("alg: aead: changed 'req->cryptlen'\n");
  1930. if (req->assoclen != vec->alen)
  1931. pr_err("alg: aead: changed 'req->assoclen'\n");
  1932. if (req->iv != iv)
  1933. pr_err("alg: aead: changed 'req->iv'\n");
  1934. if (req->src != tsgls->src.sgl_ptr)
  1935. pr_err("alg: aead: changed 'req->src'\n");
  1936. if (req->dst != tsgls->dst.sgl_ptr)
  1937. pr_err("alg: aead: changed 'req->dst'\n");
  1938. if (crypto_aead_reqtfm(req) != tfm)
  1939. pr_err("alg: aead: changed 'req->base.tfm'\n");
  1940. if (req->base.complete != crypto_req_done)
  1941. pr_err("alg: aead: changed 'req->base.complete'\n");
  1942. if (req->base.flags != req_flags)
  1943. pr_err("alg: aead: changed 'req->base.flags'\n");
  1944. if (req->base.data != &wait)
  1945. pr_err("alg: aead: changed 'req->base.data'\n");
  1946. return -EINVAL;
  1947. }
  1948. if (is_test_sglist_corrupted(&tsgls->src)) {
  1949. pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
  1950. driver, op, vec_name, cfg->name);
  1951. return -EINVAL;
  1952. }
  1953. if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
  1954. is_test_sglist_corrupted(&tsgls->dst)) {
  1955. pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
  1956. driver, op, vec_name, cfg->name);
  1957. return -EINVAL;
  1958. }
  1959. /* Check for unexpected success or failure, or wrong error code */
  1960. if ((err == 0 && vec->novrfy) ||
  1961. (err != vec->crypt_error && !(err == -EBADMSG && vec->novrfy))) {
  1962. char expected_error[32];
  1963. if (vec->novrfy &&
  1964. vec->crypt_error != 0 && vec->crypt_error != -EBADMSG)
  1965. sprintf(expected_error, "-EBADMSG or %d",
  1966. vec->crypt_error);
  1967. else if (vec->novrfy)
  1968. sprintf(expected_error, "-EBADMSG");
  1969. else
  1970. sprintf(expected_error, "%d", vec->crypt_error);
  1971. if (err) {
  1972. pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%s, actual_error=%d, cfg=\"%s\"\n",
  1973. driver, op, vec_name, expected_error, err,
  1974. cfg->name);
  1975. return err;
  1976. }
  1977. pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%s, cfg=\"%s\"\n",
  1978. driver, op, vec_name, expected_error, cfg->name);
  1979. return -EINVAL;
  1980. }
  1981. if (err) /* Expectedly failed. */
  1982. return 0;
  1983. /* Check for the correct output (ciphertext or plaintext) */
  1984. err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
  1985. enc ? vec->clen : vec->plen,
  1986. vec->alen,
  1987. enc || cfg->inplace_mode == OUT_OF_PLACE);
  1988. if (err == -EOVERFLOW) {
  1989. pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
  1990. driver, op, vec_name, cfg->name);
  1991. return err;
  1992. }
  1993. if (err) {
  1994. pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  1995. driver, op, vec_name, cfg->name);
  1996. return err;
  1997. }
  1998. return 0;
  1999. }
  2000. static int test_aead_vec(int enc, const struct aead_testvec *vec,
  2001. unsigned int vec_num, struct aead_request *req,
  2002. struct cipher_test_sglists *tsgls)
  2003. {
  2004. char vec_name[16];
  2005. unsigned int i;
  2006. int err;
  2007. if (enc && vec->novrfy)
  2008. return 0;
  2009. sprintf(vec_name, "%u", vec_num);
  2010. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
  2011. err = test_aead_vec_cfg(enc, vec, vec_name,
  2012. &default_cipher_testvec_configs[i],
  2013. req, tsgls);
  2014. if (err)
  2015. return err;
  2016. }
  2017. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2018. if (!noextratests) {
  2019. struct rnd_state rng;
  2020. struct testvec_config cfg;
  2021. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2022. init_rnd_state(&rng);
  2023. for (i = 0; i < fuzz_iterations; i++) {
  2024. generate_random_testvec_config(&rng, &cfg, cfgname,
  2025. sizeof(cfgname));
  2026. err = test_aead_vec_cfg(enc, vec, vec_name,
  2027. &cfg, req, tsgls);
  2028. if (err)
  2029. return err;
  2030. cond_resched();
  2031. }
  2032. }
  2033. #endif
  2034. return 0;
  2035. }
  2036. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2037. struct aead_extra_tests_ctx {
  2038. struct rnd_state rng;
  2039. struct aead_request *req;
  2040. struct crypto_aead *tfm;
  2041. const struct alg_test_desc *test_desc;
  2042. struct cipher_test_sglists *tsgls;
  2043. unsigned int maxdatasize;
  2044. unsigned int maxkeysize;
  2045. struct aead_testvec vec;
  2046. char vec_name[64];
  2047. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2048. struct testvec_config cfg;
  2049. };
  2050. /*
  2051. * Make at least one random change to a (ciphertext, AAD) pair. "Ciphertext"
  2052. * here means the full ciphertext including the authentication tag. The
  2053. * authentication tag (and hence also the ciphertext) is assumed to be nonempty.
  2054. */
  2055. static void mutate_aead_message(struct rnd_state *rng,
  2056. struct aead_testvec *vec, bool aad_iv,
  2057. unsigned int ivsize)
  2058. {
  2059. const unsigned int aad_tail_size = aad_iv ? ivsize : 0;
  2060. const unsigned int authsize = vec->clen - vec->plen;
  2061. if (prandom_bool(rng) && vec->alen > aad_tail_size) {
  2062. /* Mutate the AAD */
  2063. flip_random_bit(rng, (u8 *)vec->assoc,
  2064. vec->alen - aad_tail_size);
  2065. if (prandom_bool(rng))
  2066. return;
  2067. }
  2068. if (prandom_bool(rng)) {
  2069. /* Mutate auth tag (assuming it's at the end of ciphertext) */
  2070. flip_random_bit(rng, (u8 *)vec->ctext + vec->plen, authsize);
  2071. } else {
  2072. /* Mutate any part of the ciphertext */
  2073. flip_random_bit(rng, (u8 *)vec->ctext, vec->clen);
  2074. }
  2075. }
  2076. /*
  2077. * Minimum authentication tag size in bytes at which we assume that we can
  2078. * reliably generate inauthentic messages, i.e. not generate an authentic
  2079. * message by chance.
  2080. */
  2081. #define MIN_COLLISION_FREE_AUTHSIZE 8
  2082. static void generate_aead_message(struct rnd_state *rng,
  2083. struct aead_request *req,
  2084. const struct aead_test_suite *suite,
  2085. struct aead_testvec *vec,
  2086. bool prefer_inauthentic)
  2087. {
  2088. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2089. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  2090. const unsigned int authsize = vec->clen - vec->plen;
  2091. const bool inauthentic = (authsize >= MIN_COLLISION_FREE_AUTHSIZE) &&
  2092. (prefer_inauthentic ||
  2093. prandom_u32_below(rng, 4) == 0);
  2094. /* Generate the AAD. */
  2095. generate_random_bytes(rng, (u8 *)vec->assoc, vec->alen);
  2096. if (suite->aad_iv && vec->alen >= ivsize)
  2097. /* Avoid implementation-defined behavior. */
  2098. memcpy((u8 *)vec->assoc + vec->alen - ivsize, vec->iv, ivsize);
  2099. if (inauthentic && prandom_bool(rng)) {
  2100. /* Generate a random ciphertext. */
  2101. generate_random_bytes(rng, (u8 *)vec->ctext, vec->clen);
  2102. } else {
  2103. int i = 0;
  2104. struct scatterlist src[2], dst;
  2105. u8 iv[MAX_IVLEN];
  2106. DECLARE_CRYPTO_WAIT(wait);
  2107. /* Generate a random plaintext and encrypt it. */
  2108. sg_init_table(src, 2);
  2109. if (vec->alen)
  2110. sg_set_buf(&src[i++], vec->assoc, vec->alen);
  2111. if (vec->plen) {
  2112. generate_random_bytes(rng, (u8 *)vec->ptext, vec->plen);
  2113. sg_set_buf(&src[i++], vec->ptext, vec->plen);
  2114. }
  2115. sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
  2116. memcpy(iv, vec->iv, ivsize);
  2117. aead_request_set_callback(req, 0, crypto_req_done, &wait);
  2118. aead_request_set_crypt(req, src, &dst, vec->plen, iv);
  2119. aead_request_set_ad(req, vec->alen);
  2120. vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req),
  2121. &wait);
  2122. /* If encryption failed, we're done. */
  2123. if (vec->crypt_error != 0)
  2124. return;
  2125. memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
  2126. if (!inauthentic)
  2127. return;
  2128. /*
  2129. * Mutate the authentic (ciphertext, AAD) pair to get an
  2130. * inauthentic one.
  2131. */
  2132. mutate_aead_message(rng, vec, suite->aad_iv, ivsize);
  2133. }
  2134. vec->novrfy = 1;
  2135. if (suite->einval_allowed)
  2136. vec->crypt_error = -EINVAL;
  2137. }
  2138. /*
  2139. * Generate an AEAD test vector 'vec' using the implementation specified by
  2140. * 'req'. The buffers in 'vec' must already be allocated.
  2141. *
  2142. * If 'prefer_inauthentic' is true, then this function will generate inauthentic
  2143. * test vectors (i.e. vectors with 'vec->novrfy=1') more often.
  2144. */
  2145. static void generate_random_aead_testvec(struct rnd_state *rng,
  2146. struct aead_request *req,
  2147. struct aead_testvec *vec,
  2148. const struct aead_test_suite *suite,
  2149. unsigned int maxkeysize,
  2150. unsigned int maxdatasize,
  2151. char *name, size_t max_namelen,
  2152. bool prefer_inauthentic)
  2153. {
  2154. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  2155. const unsigned int ivsize = crypto_aead_ivsize(tfm);
  2156. const unsigned int maxauthsize = crypto_aead_maxauthsize(tfm);
  2157. unsigned int authsize;
  2158. unsigned int total_len;
  2159. /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
  2160. vec->klen = maxkeysize;
  2161. if (prandom_u32_below(rng, 4) == 0)
  2162. vec->klen = prandom_u32_below(rng, maxkeysize + 1);
  2163. generate_random_bytes(rng, (u8 *)vec->key, vec->klen);
  2164. vec->setkey_error = crypto_aead_setkey(tfm, vec->key, vec->klen);
  2165. /* IV */
  2166. generate_random_bytes(rng, (u8 *)vec->iv, ivsize);
  2167. /* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
  2168. authsize = maxauthsize;
  2169. if (prandom_u32_below(rng, 4) == 0)
  2170. authsize = prandom_u32_below(rng, maxauthsize + 1);
  2171. if (prefer_inauthentic && authsize < MIN_COLLISION_FREE_AUTHSIZE)
  2172. authsize = MIN_COLLISION_FREE_AUTHSIZE;
  2173. if (WARN_ON(authsize > maxdatasize))
  2174. authsize = maxdatasize;
  2175. maxdatasize -= authsize;
  2176. vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
  2177. /* AAD, plaintext, and ciphertext lengths */
  2178. total_len = generate_random_length(rng, maxdatasize);
  2179. if (prandom_u32_below(rng, 4) == 0)
  2180. vec->alen = 0;
  2181. else
  2182. vec->alen = generate_random_length(rng, total_len);
  2183. vec->plen = total_len - vec->alen;
  2184. vec->clen = vec->plen + authsize;
  2185. /*
  2186. * Generate the AAD, plaintext, and ciphertext. Not applicable if the
  2187. * key or the authentication tag size couldn't be set.
  2188. */
  2189. vec->novrfy = 0;
  2190. vec->crypt_error = 0;
  2191. if (vec->setkey_error == 0 && vec->setauthsize_error == 0)
  2192. generate_aead_message(rng, req, suite, vec, prefer_inauthentic);
  2193. snprintf(name, max_namelen,
  2194. "\"random: alen=%u plen=%u authsize=%u klen=%u novrfy=%d\"",
  2195. vec->alen, vec->plen, authsize, vec->klen, vec->novrfy);
  2196. }
  2197. static void try_to_generate_inauthentic_testvec(
  2198. struct aead_extra_tests_ctx *ctx)
  2199. {
  2200. int i;
  2201. for (i = 0; i < 10; i++) {
  2202. generate_random_aead_testvec(&ctx->rng, ctx->req, &ctx->vec,
  2203. &ctx->test_desc->suite.aead,
  2204. ctx->maxkeysize, ctx->maxdatasize,
  2205. ctx->vec_name,
  2206. sizeof(ctx->vec_name), true);
  2207. if (ctx->vec.novrfy)
  2208. return;
  2209. }
  2210. }
  2211. /*
  2212. * Generate inauthentic test vectors (i.e. ciphertext, AAD pairs that aren't the
  2213. * result of an encryption with the key) and verify that decryption fails.
  2214. */
  2215. static int test_aead_inauthentic_inputs(struct aead_extra_tests_ctx *ctx)
  2216. {
  2217. unsigned int i;
  2218. int err;
  2219. for (i = 0; i < fuzz_iterations * 8; i++) {
  2220. /*
  2221. * Since this part of the tests isn't comparing the
  2222. * implementation to another, there's no point in testing any
  2223. * test vectors other than inauthentic ones (vec.novrfy=1) here.
  2224. *
  2225. * If we're having trouble generating such a test vector, e.g.
  2226. * if the algorithm keeps rejecting the generated keys, don't
  2227. * retry forever; just continue on.
  2228. */
  2229. try_to_generate_inauthentic_testvec(ctx);
  2230. if (ctx->vec.novrfy) {
  2231. generate_random_testvec_config(&ctx->rng, &ctx->cfg,
  2232. ctx->cfgname,
  2233. sizeof(ctx->cfgname));
  2234. err = test_aead_vec_cfg(DECRYPT, &ctx->vec,
  2235. ctx->vec_name, &ctx->cfg,
  2236. ctx->req, ctx->tsgls);
  2237. if (err)
  2238. return err;
  2239. }
  2240. cond_resched();
  2241. }
  2242. return 0;
  2243. }
  2244. /*
  2245. * Test the AEAD algorithm against the corresponding generic implementation, if
  2246. * one is available.
  2247. */
  2248. static int test_aead_vs_generic_impl(struct aead_extra_tests_ctx *ctx)
  2249. {
  2250. struct crypto_aead *tfm = ctx->tfm;
  2251. const char *algname = crypto_aead_alg(tfm)->base.cra_name;
  2252. const char *driver = crypto_aead_driver_name(tfm);
  2253. const char *generic_driver = ctx->test_desc->generic_driver;
  2254. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  2255. struct crypto_aead *generic_tfm = NULL;
  2256. struct aead_request *generic_req = NULL;
  2257. unsigned int i;
  2258. int err;
  2259. if (!generic_driver) { /* Use default naming convention? */
  2260. err = build_generic_driver_name(algname, _generic_driver);
  2261. if (err)
  2262. return err;
  2263. generic_driver = _generic_driver;
  2264. }
  2265. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  2266. return 0;
  2267. generic_tfm = crypto_alloc_aead(generic_driver, 0, 0);
  2268. if (IS_ERR(generic_tfm)) {
  2269. err = PTR_ERR(generic_tfm);
  2270. if (err == -ENOENT) {
  2271. pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
  2272. driver, generic_driver);
  2273. return 0;
  2274. }
  2275. pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
  2276. generic_driver, algname, err);
  2277. return err;
  2278. }
  2279. generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
  2280. if (!generic_req) {
  2281. err = -ENOMEM;
  2282. goto out;
  2283. }
  2284. /* Check the algorithm properties for consistency. */
  2285. if (crypto_aead_maxauthsize(tfm) !=
  2286. crypto_aead_maxauthsize(generic_tfm)) {
  2287. pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
  2288. driver, crypto_aead_maxauthsize(tfm),
  2289. crypto_aead_maxauthsize(generic_tfm));
  2290. err = -EINVAL;
  2291. goto out;
  2292. }
  2293. if (crypto_aead_ivsize(tfm) != crypto_aead_ivsize(generic_tfm)) {
  2294. pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
  2295. driver, crypto_aead_ivsize(tfm),
  2296. crypto_aead_ivsize(generic_tfm));
  2297. err = -EINVAL;
  2298. goto out;
  2299. }
  2300. if (crypto_aead_blocksize(tfm) != crypto_aead_blocksize(generic_tfm)) {
  2301. pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  2302. driver, crypto_aead_blocksize(tfm),
  2303. crypto_aead_blocksize(generic_tfm));
  2304. err = -EINVAL;
  2305. goto out;
  2306. }
  2307. /*
  2308. * Now generate test vectors using the generic implementation, and test
  2309. * the other implementation against them.
  2310. */
  2311. for (i = 0; i < fuzz_iterations * 8; i++) {
  2312. generate_random_aead_testvec(&ctx->rng, generic_req, &ctx->vec,
  2313. &ctx->test_desc->suite.aead,
  2314. ctx->maxkeysize, ctx->maxdatasize,
  2315. ctx->vec_name,
  2316. sizeof(ctx->vec_name), false);
  2317. generate_random_testvec_config(&ctx->rng, &ctx->cfg,
  2318. ctx->cfgname,
  2319. sizeof(ctx->cfgname));
  2320. if (!ctx->vec.novrfy) {
  2321. err = test_aead_vec_cfg(ENCRYPT, &ctx->vec,
  2322. ctx->vec_name, &ctx->cfg,
  2323. ctx->req, ctx->tsgls);
  2324. if (err)
  2325. goto out;
  2326. }
  2327. if (ctx->vec.crypt_error == 0 || ctx->vec.novrfy) {
  2328. err = test_aead_vec_cfg(DECRYPT, &ctx->vec,
  2329. ctx->vec_name, &ctx->cfg,
  2330. ctx->req, ctx->tsgls);
  2331. if (err)
  2332. goto out;
  2333. }
  2334. cond_resched();
  2335. }
  2336. err = 0;
  2337. out:
  2338. crypto_free_aead(generic_tfm);
  2339. aead_request_free(generic_req);
  2340. return err;
  2341. }
  2342. static int test_aead_extra(const struct alg_test_desc *test_desc,
  2343. struct aead_request *req,
  2344. struct cipher_test_sglists *tsgls)
  2345. {
  2346. struct aead_extra_tests_ctx *ctx;
  2347. unsigned int i;
  2348. int err;
  2349. if (noextratests)
  2350. return 0;
  2351. ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
  2352. if (!ctx)
  2353. return -ENOMEM;
  2354. init_rnd_state(&ctx->rng);
  2355. ctx->req = req;
  2356. ctx->tfm = crypto_aead_reqtfm(req);
  2357. ctx->test_desc = test_desc;
  2358. ctx->tsgls = tsgls;
  2359. ctx->maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  2360. ctx->maxkeysize = 0;
  2361. for (i = 0; i < test_desc->suite.aead.count; i++)
  2362. ctx->maxkeysize = max_t(unsigned int, ctx->maxkeysize,
  2363. test_desc->suite.aead.vecs[i].klen);
  2364. ctx->vec.key = kmalloc(ctx->maxkeysize, GFP_KERNEL);
  2365. ctx->vec.iv = kmalloc(crypto_aead_ivsize(ctx->tfm), GFP_KERNEL);
  2366. ctx->vec.assoc = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2367. ctx->vec.ptext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2368. ctx->vec.ctext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
  2369. if (!ctx->vec.key || !ctx->vec.iv || !ctx->vec.assoc ||
  2370. !ctx->vec.ptext || !ctx->vec.ctext) {
  2371. err = -ENOMEM;
  2372. goto out;
  2373. }
  2374. err = test_aead_vs_generic_impl(ctx);
  2375. if (err)
  2376. goto out;
  2377. err = test_aead_inauthentic_inputs(ctx);
  2378. out:
  2379. kfree(ctx->vec.key);
  2380. kfree(ctx->vec.iv);
  2381. kfree(ctx->vec.assoc);
  2382. kfree(ctx->vec.ptext);
  2383. kfree(ctx->vec.ctext);
  2384. kfree(ctx);
  2385. return err;
  2386. }
  2387. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2388. static int test_aead_extra(const struct alg_test_desc *test_desc,
  2389. struct aead_request *req,
  2390. struct cipher_test_sglists *tsgls)
  2391. {
  2392. return 0;
  2393. }
  2394. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2395. static int test_aead(int enc, const struct aead_test_suite *suite,
  2396. struct aead_request *req,
  2397. struct cipher_test_sglists *tsgls)
  2398. {
  2399. unsigned int i;
  2400. int err;
  2401. for (i = 0; i < suite->count; i++) {
  2402. err = test_aead_vec(enc, &suite->vecs[i], i, req, tsgls);
  2403. if (err)
  2404. return err;
  2405. cond_resched();
  2406. }
  2407. return 0;
  2408. }
  2409. static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
  2410. u32 type, u32 mask)
  2411. {
  2412. const struct aead_test_suite *suite = &desc->suite.aead;
  2413. struct crypto_aead *tfm;
  2414. struct aead_request *req = NULL;
  2415. struct cipher_test_sglists *tsgls = NULL;
  2416. int err;
  2417. if (suite->count <= 0) {
  2418. pr_err("alg: aead: empty test suite for %s\n", driver);
  2419. return -EINVAL;
  2420. }
  2421. tfm = crypto_alloc_aead(driver, type, mask);
  2422. if (IS_ERR(tfm)) {
  2423. if (PTR_ERR(tfm) == -ENOENT)
  2424. return 0;
  2425. pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
  2426. driver, PTR_ERR(tfm));
  2427. return PTR_ERR(tfm);
  2428. }
  2429. driver = crypto_aead_driver_name(tfm);
  2430. req = aead_request_alloc(tfm, GFP_KERNEL);
  2431. if (!req) {
  2432. pr_err("alg: aead: failed to allocate request for %s\n",
  2433. driver);
  2434. err = -ENOMEM;
  2435. goto out;
  2436. }
  2437. tsgls = alloc_cipher_test_sglists();
  2438. if (!tsgls) {
  2439. pr_err("alg: aead: failed to allocate test buffers for %s\n",
  2440. driver);
  2441. err = -ENOMEM;
  2442. goto out;
  2443. }
  2444. err = test_aead(ENCRYPT, suite, req, tsgls);
  2445. if (err)
  2446. goto out;
  2447. err = test_aead(DECRYPT, suite, req, tsgls);
  2448. if (err)
  2449. goto out;
  2450. err = test_aead_extra(desc, req, tsgls);
  2451. out:
  2452. free_cipher_test_sglists(tsgls);
  2453. aead_request_free(req);
  2454. crypto_free_aead(tfm);
  2455. return err;
  2456. }
  2457. static int test_cipher(struct crypto_cipher *tfm, int enc,
  2458. const struct cipher_testvec *template,
  2459. unsigned int tcount)
  2460. {
  2461. const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
  2462. unsigned int i, j, k;
  2463. char *q;
  2464. const char *e;
  2465. const char *input, *result;
  2466. void *data;
  2467. char *xbuf[XBUFSIZE];
  2468. int ret = -ENOMEM;
  2469. if (testmgr_alloc_buf(xbuf))
  2470. goto out_nobuf;
  2471. if (enc == ENCRYPT)
  2472. e = "encryption";
  2473. else
  2474. e = "decryption";
  2475. j = 0;
  2476. for (i = 0; i < tcount; i++) {
  2477. if (fips_enabled && template[i].fips_skip)
  2478. continue;
  2479. input = enc ? template[i].ptext : template[i].ctext;
  2480. result = enc ? template[i].ctext : template[i].ptext;
  2481. j++;
  2482. ret = -EINVAL;
  2483. if (WARN_ON(template[i].len > PAGE_SIZE))
  2484. goto out;
  2485. data = xbuf[0];
  2486. memcpy(data, input, template[i].len);
  2487. crypto_cipher_clear_flags(tfm, ~0);
  2488. if (template[i].wk)
  2489. crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2490. ret = crypto_cipher_setkey(tfm, template[i].key,
  2491. template[i].klen);
  2492. if (ret) {
  2493. if (ret == template[i].setkey_error)
  2494. continue;
  2495. pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
  2496. algo, j, template[i].setkey_error, ret,
  2497. crypto_cipher_get_flags(tfm));
  2498. goto out;
  2499. }
  2500. if (template[i].setkey_error) {
  2501. pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
  2502. algo, j, template[i].setkey_error);
  2503. ret = -EINVAL;
  2504. goto out;
  2505. }
  2506. for (k = 0; k < template[i].len;
  2507. k += crypto_cipher_blocksize(tfm)) {
  2508. if (enc)
  2509. crypto_cipher_encrypt_one(tfm, data + k,
  2510. data + k);
  2511. else
  2512. crypto_cipher_decrypt_one(tfm, data + k,
  2513. data + k);
  2514. }
  2515. q = data;
  2516. if (memcmp(q, result, template[i].len)) {
  2517. printk(KERN_ERR "alg: cipher: Test %d failed "
  2518. "on %s for %s\n", j, e, algo);
  2519. hexdump(q, template[i].len);
  2520. ret = -EINVAL;
  2521. goto out;
  2522. }
  2523. }
  2524. ret = 0;
  2525. out:
  2526. testmgr_free_buf(xbuf);
  2527. out_nobuf:
  2528. return ret;
  2529. }
  2530. static int test_skcipher_vec_cfg(int enc, const struct cipher_testvec *vec,
  2531. const char *vec_name,
  2532. const struct testvec_config *cfg,
  2533. struct skcipher_request *req,
  2534. struct cipher_test_sglists *tsgls)
  2535. {
  2536. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2537. const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
  2538. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2539. const char *driver = crypto_skcipher_driver_name(tfm);
  2540. const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
  2541. const char *op = enc ? "encryption" : "decryption";
  2542. DECLARE_CRYPTO_WAIT(wait);
  2543. u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
  2544. u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
  2545. cfg->iv_offset +
  2546. (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
  2547. struct kvec input;
  2548. int err;
  2549. /* Set the key */
  2550. if (vec->wk)
  2551. crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2552. else
  2553. crypto_skcipher_clear_flags(tfm,
  2554. CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
  2555. err = do_setkey(crypto_skcipher_setkey, tfm, vec->key, vec->klen,
  2556. cfg, alignmask);
  2557. if (err) {
  2558. if (err == vec->setkey_error)
  2559. return 0;
  2560. pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
  2561. driver, vec_name, vec->setkey_error, err,
  2562. crypto_skcipher_get_flags(tfm));
  2563. return err;
  2564. }
  2565. if (vec->setkey_error) {
  2566. pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
  2567. driver, vec_name, vec->setkey_error);
  2568. return -EINVAL;
  2569. }
  2570. /* The IV must be copied to a buffer, as the algorithm may modify it */
  2571. if (ivsize) {
  2572. if (WARN_ON(ivsize > MAX_IVLEN))
  2573. return -EINVAL;
  2574. if (vec->generates_iv && !enc)
  2575. memcpy(iv, vec->iv_out, ivsize);
  2576. else if (vec->iv)
  2577. memcpy(iv, vec->iv, ivsize);
  2578. else
  2579. memset(iv, 0, ivsize);
  2580. } else {
  2581. if (vec->generates_iv) {
  2582. pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
  2583. driver, vec_name);
  2584. return -EINVAL;
  2585. }
  2586. iv = NULL;
  2587. }
  2588. /* Build the src/dst scatterlists */
  2589. input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
  2590. input.iov_len = vec->len;
  2591. err = build_cipher_test_sglists(tsgls, cfg, alignmask,
  2592. vec->len, vec->len, &input, 1);
  2593. if (err) {
  2594. pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
  2595. driver, op, vec_name, cfg->name);
  2596. return err;
  2597. }
  2598. /* Do the actual encryption or decryption */
  2599. testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
  2600. skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
  2601. skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
  2602. vec->len, iv);
  2603. if (cfg->nosimd)
  2604. crypto_disable_simd_for_test();
  2605. err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
  2606. if (cfg->nosimd)
  2607. crypto_reenable_simd_for_test();
  2608. err = crypto_wait_req(err, &wait);
  2609. /* Check that the algorithm didn't overwrite things it shouldn't have */
  2610. if (req->cryptlen != vec->len ||
  2611. req->iv != iv ||
  2612. req->src != tsgls->src.sgl_ptr ||
  2613. req->dst != tsgls->dst.sgl_ptr ||
  2614. crypto_skcipher_reqtfm(req) != tfm ||
  2615. req->base.complete != crypto_req_done ||
  2616. req->base.flags != req_flags ||
  2617. req->base.data != &wait) {
  2618. pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
  2619. driver, op, vec_name, cfg->name);
  2620. if (req->cryptlen != vec->len)
  2621. pr_err("alg: skcipher: changed 'req->cryptlen'\n");
  2622. if (req->iv != iv)
  2623. pr_err("alg: skcipher: changed 'req->iv'\n");
  2624. if (req->src != tsgls->src.sgl_ptr)
  2625. pr_err("alg: skcipher: changed 'req->src'\n");
  2626. if (req->dst != tsgls->dst.sgl_ptr)
  2627. pr_err("alg: skcipher: changed 'req->dst'\n");
  2628. if (crypto_skcipher_reqtfm(req) != tfm)
  2629. pr_err("alg: skcipher: changed 'req->base.tfm'\n");
  2630. if (req->base.complete != crypto_req_done)
  2631. pr_err("alg: skcipher: changed 'req->base.complete'\n");
  2632. if (req->base.flags != req_flags)
  2633. pr_err("alg: skcipher: changed 'req->base.flags'\n");
  2634. if (req->base.data != &wait)
  2635. pr_err("alg: skcipher: changed 'req->base.data'\n");
  2636. return -EINVAL;
  2637. }
  2638. if (is_test_sglist_corrupted(&tsgls->src)) {
  2639. pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
  2640. driver, op, vec_name, cfg->name);
  2641. return -EINVAL;
  2642. }
  2643. if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
  2644. is_test_sglist_corrupted(&tsgls->dst)) {
  2645. pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
  2646. driver, op, vec_name, cfg->name);
  2647. return -EINVAL;
  2648. }
  2649. /* Check for success or failure */
  2650. if (err) {
  2651. if (err == vec->crypt_error)
  2652. return 0;
  2653. pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
  2654. driver, op, vec_name, vec->crypt_error, err, cfg->name);
  2655. return err;
  2656. }
  2657. if (vec->crypt_error) {
  2658. pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
  2659. driver, op, vec_name, vec->crypt_error, cfg->name);
  2660. return -EINVAL;
  2661. }
  2662. /* Check for the correct output (ciphertext or plaintext) */
  2663. err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
  2664. vec->len, 0, true);
  2665. if (err == -EOVERFLOW) {
  2666. pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
  2667. driver, op, vec_name, cfg->name);
  2668. return err;
  2669. }
  2670. if (err) {
  2671. pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
  2672. driver, op, vec_name, cfg->name);
  2673. return err;
  2674. }
  2675. /* If applicable, check that the algorithm generated the correct IV */
  2676. if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
  2677. pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
  2678. driver, op, vec_name, cfg->name);
  2679. hexdump(iv, ivsize);
  2680. return -EINVAL;
  2681. }
  2682. return 0;
  2683. }
  2684. static int test_skcipher_vec(int enc, const struct cipher_testvec *vec,
  2685. unsigned int vec_num,
  2686. struct skcipher_request *req,
  2687. struct cipher_test_sglists *tsgls)
  2688. {
  2689. char vec_name[16];
  2690. unsigned int i;
  2691. int err;
  2692. if (fips_enabled && vec->fips_skip)
  2693. return 0;
  2694. sprintf(vec_name, "%u", vec_num);
  2695. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
  2696. err = test_skcipher_vec_cfg(enc, vec, vec_name,
  2697. &default_cipher_testvec_configs[i],
  2698. req, tsgls);
  2699. if (err)
  2700. return err;
  2701. }
  2702. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2703. if (!noextratests) {
  2704. struct rnd_state rng;
  2705. struct testvec_config cfg;
  2706. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2707. init_rnd_state(&rng);
  2708. for (i = 0; i < fuzz_iterations; i++) {
  2709. generate_random_testvec_config(&rng, &cfg, cfgname,
  2710. sizeof(cfgname));
  2711. err = test_skcipher_vec_cfg(enc, vec, vec_name,
  2712. &cfg, req, tsgls);
  2713. if (err)
  2714. return err;
  2715. cond_resched();
  2716. }
  2717. }
  2718. #endif
  2719. return 0;
  2720. }
  2721. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  2722. /*
  2723. * Generate a symmetric cipher test vector from the given implementation.
  2724. * Assumes the buffers in 'vec' were already allocated.
  2725. */
  2726. static void generate_random_cipher_testvec(struct rnd_state *rng,
  2727. struct skcipher_request *req,
  2728. struct cipher_testvec *vec,
  2729. unsigned int maxdatasize,
  2730. char *name, size_t max_namelen)
  2731. {
  2732. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2733. const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
  2734. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2735. struct scatterlist src, dst;
  2736. u8 iv[MAX_IVLEN];
  2737. DECLARE_CRYPTO_WAIT(wait);
  2738. /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
  2739. vec->klen = maxkeysize;
  2740. if (prandom_u32_below(rng, 4) == 0)
  2741. vec->klen = prandom_u32_below(rng, maxkeysize + 1);
  2742. generate_random_bytes(rng, (u8 *)vec->key, vec->klen);
  2743. vec->setkey_error = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
  2744. /* IV */
  2745. generate_random_bytes(rng, (u8 *)vec->iv, ivsize);
  2746. /* Plaintext */
  2747. vec->len = generate_random_length(rng, maxdatasize);
  2748. generate_random_bytes(rng, (u8 *)vec->ptext, vec->len);
  2749. /* If the key couldn't be set, no need to continue to encrypt. */
  2750. if (vec->setkey_error)
  2751. goto done;
  2752. /* Ciphertext */
  2753. sg_init_one(&src, vec->ptext, vec->len);
  2754. sg_init_one(&dst, vec->ctext, vec->len);
  2755. memcpy(iv, vec->iv, ivsize);
  2756. skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
  2757. skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
  2758. vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
  2759. if (vec->crypt_error != 0) {
  2760. /*
  2761. * The only acceptable error here is for an invalid length, so
  2762. * skcipher decryption should fail with the same error too.
  2763. * We'll test for this. But to keep the API usage well-defined,
  2764. * explicitly initialize the ciphertext buffer too.
  2765. */
  2766. memset((u8 *)vec->ctext, 0, vec->len);
  2767. }
  2768. done:
  2769. snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
  2770. vec->len, vec->klen);
  2771. }
  2772. /*
  2773. * Test the skcipher algorithm represented by @req against the corresponding
  2774. * generic implementation, if one is available.
  2775. */
  2776. static int test_skcipher_vs_generic_impl(const char *generic_driver,
  2777. struct skcipher_request *req,
  2778. struct cipher_test_sglists *tsgls)
  2779. {
  2780. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  2781. const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
  2782. const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  2783. const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
  2784. const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
  2785. const char *algname = crypto_skcipher_alg(tfm)->base.cra_name;
  2786. const char *driver = crypto_skcipher_driver_name(tfm);
  2787. struct rnd_state rng;
  2788. char _generic_driver[CRYPTO_MAX_ALG_NAME];
  2789. struct crypto_skcipher *generic_tfm = NULL;
  2790. struct skcipher_request *generic_req = NULL;
  2791. unsigned int i;
  2792. struct cipher_testvec vec = { 0 };
  2793. char vec_name[64];
  2794. struct testvec_config *cfg;
  2795. char cfgname[TESTVEC_CONFIG_NAMELEN];
  2796. int err;
  2797. if (noextratests)
  2798. return 0;
  2799. /* Keywrap isn't supported here yet as it handles its IV differently. */
  2800. if (strncmp(algname, "kw(", 3) == 0)
  2801. return 0;
  2802. init_rnd_state(&rng);
  2803. if (!generic_driver) { /* Use default naming convention? */
  2804. err = build_generic_driver_name(algname, _generic_driver);
  2805. if (err)
  2806. return err;
  2807. generic_driver = _generic_driver;
  2808. }
  2809. if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
  2810. return 0;
  2811. generic_tfm = crypto_alloc_skcipher(generic_driver, 0, 0);
  2812. if (IS_ERR(generic_tfm)) {
  2813. err = PTR_ERR(generic_tfm);
  2814. if (err == -ENOENT) {
  2815. pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
  2816. driver, generic_driver);
  2817. return 0;
  2818. }
  2819. pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
  2820. generic_driver, algname, err);
  2821. return err;
  2822. }
  2823. cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
  2824. if (!cfg) {
  2825. err = -ENOMEM;
  2826. goto out;
  2827. }
  2828. generic_req = skcipher_request_alloc(generic_tfm, GFP_KERNEL);
  2829. if (!generic_req) {
  2830. err = -ENOMEM;
  2831. goto out;
  2832. }
  2833. /* Check the algorithm properties for consistency. */
  2834. if (crypto_skcipher_min_keysize(tfm) !=
  2835. crypto_skcipher_min_keysize(generic_tfm)) {
  2836. pr_err("alg: skcipher: min keysize for %s (%u) doesn't match generic impl (%u)\n",
  2837. driver, crypto_skcipher_min_keysize(tfm),
  2838. crypto_skcipher_min_keysize(generic_tfm));
  2839. err = -EINVAL;
  2840. goto out;
  2841. }
  2842. if (maxkeysize != crypto_skcipher_max_keysize(generic_tfm)) {
  2843. pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
  2844. driver, maxkeysize,
  2845. crypto_skcipher_max_keysize(generic_tfm));
  2846. err = -EINVAL;
  2847. goto out;
  2848. }
  2849. if (ivsize != crypto_skcipher_ivsize(generic_tfm)) {
  2850. pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
  2851. driver, ivsize, crypto_skcipher_ivsize(generic_tfm));
  2852. err = -EINVAL;
  2853. goto out;
  2854. }
  2855. if (blocksize != crypto_skcipher_blocksize(generic_tfm)) {
  2856. pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
  2857. driver, blocksize,
  2858. crypto_skcipher_blocksize(generic_tfm));
  2859. err = -EINVAL;
  2860. goto out;
  2861. }
  2862. /*
  2863. * Now generate test vectors using the generic implementation, and test
  2864. * the other implementation against them.
  2865. */
  2866. vec.key = kmalloc(maxkeysize, GFP_KERNEL);
  2867. vec.iv = kmalloc(ivsize, GFP_KERNEL);
  2868. vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
  2869. vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
  2870. if (!vec.key || !vec.iv || !vec.ptext || !vec.ctext) {
  2871. err = -ENOMEM;
  2872. goto out;
  2873. }
  2874. for (i = 0; i < fuzz_iterations * 8; i++) {
  2875. generate_random_cipher_testvec(&rng, generic_req, &vec,
  2876. maxdatasize,
  2877. vec_name, sizeof(vec_name));
  2878. generate_random_testvec_config(&rng, cfg, cfgname,
  2879. sizeof(cfgname));
  2880. err = test_skcipher_vec_cfg(ENCRYPT, &vec, vec_name,
  2881. cfg, req, tsgls);
  2882. if (err)
  2883. goto out;
  2884. err = test_skcipher_vec_cfg(DECRYPT, &vec, vec_name,
  2885. cfg, req, tsgls);
  2886. if (err)
  2887. goto out;
  2888. cond_resched();
  2889. }
  2890. err = 0;
  2891. out:
  2892. kfree(cfg);
  2893. kfree(vec.key);
  2894. kfree(vec.iv);
  2895. kfree(vec.ptext);
  2896. kfree(vec.ctext);
  2897. crypto_free_skcipher(generic_tfm);
  2898. skcipher_request_free(generic_req);
  2899. return err;
  2900. }
  2901. #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2902. static int test_skcipher_vs_generic_impl(const char *generic_driver,
  2903. struct skcipher_request *req,
  2904. struct cipher_test_sglists *tsgls)
  2905. {
  2906. return 0;
  2907. }
  2908. #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
  2909. static int test_skcipher(int enc, const struct cipher_test_suite *suite,
  2910. struct skcipher_request *req,
  2911. struct cipher_test_sglists *tsgls)
  2912. {
  2913. unsigned int i;
  2914. int err;
  2915. for (i = 0; i < suite->count; i++) {
  2916. err = test_skcipher_vec(enc, &suite->vecs[i], i, req, tsgls);
  2917. if (err)
  2918. return err;
  2919. cond_resched();
  2920. }
  2921. return 0;
  2922. }
  2923. static int alg_test_skcipher(const struct alg_test_desc *desc,
  2924. const char *driver, u32 type, u32 mask)
  2925. {
  2926. const struct cipher_test_suite *suite = &desc->suite.cipher;
  2927. struct crypto_skcipher *tfm;
  2928. struct skcipher_request *req = NULL;
  2929. struct cipher_test_sglists *tsgls = NULL;
  2930. int err;
  2931. if (suite->count <= 0) {
  2932. pr_err("alg: skcipher: empty test suite for %s\n", driver);
  2933. return -EINVAL;
  2934. }
  2935. tfm = crypto_alloc_skcipher(driver, type, mask);
  2936. if (IS_ERR(tfm)) {
  2937. if (PTR_ERR(tfm) == -ENOENT)
  2938. return 0;
  2939. pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
  2940. driver, PTR_ERR(tfm));
  2941. return PTR_ERR(tfm);
  2942. }
  2943. driver = crypto_skcipher_driver_name(tfm);
  2944. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  2945. if (!req) {
  2946. pr_err("alg: skcipher: failed to allocate request for %s\n",
  2947. driver);
  2948. err = -ENOMEM;
  2949. goto out;
  2950. }
  2951. tsgls = alloc_cipher_test_sglists();
  2952. if (!tsgls) {
  2953. pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
  2954. driver);
  2955. err = -ENOMEM;
  2956. goto out;
  2957. }
  2958. err = test_skcipher(ENCRYPT, suite, req, tsgls);
  2959. if (err)
  2960. goto out;
  2961. err = test_skcipher(DECRYPT, suite, req, tsgls);
  2962. if (err)
  2963. goto out;
  2964. err = test_skcipher_vs_generic_impl(desc->generic_driver, req, tsgls);
  2965. out:
  2966. free_cipher_test_sglists(tsgls);
  2967. skcipher_request_free(req);
  2968. crypto_free_skcipher(tfm);
  2969. return err;
  2970. }
  2971. static int test_comp(struct crypto_comp *tfm,
  2972. const struct comp_testvec *ctemplate,
  2973. const struct comp_testvec *dtemplate,
  2974. int ctcount, int dtcount)
  2975. {
  2976. const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
  2977. char *output, *decomp_output;
  2978. unsigned int i;
  2979. int ret;
  2980. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  2981. if (!output)
  2982. return -ENOMEM;
  2983. decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  2984. if (!decomp_output) {
  2985. kfree(output);
  2986. return -ENOMEM;
  2987. }
  2988. for (i = 0; i < ctcount; i++) {
  2989. int ilen;
  2990. unsigned int dlen = COMP_BUF_SIZE;
  2991. memset(output, 0, COMP_BUF_SIZE);
  2992. memset(decomp_output, 0, COMP_BUF_SIZE);
  2993. ilen = ctemplate[i].inlen;
  2994. ret = crypto_comp_compress(tfm, ctemplate[i].input,
  2995. ilen, output, &dlen);
  2996. if (ret) {
  2997. printk(KERN_ERR "alg: comp: compression failed "
  2998. "on test %d for %s: ret=%d\n", i + 1, algo,
  2999. -ret);
  3000. goto out;
  3001. }
  3002. ilen = dlen;
  3003. dlen = COMP_BUF_SIZE;
  3004. ret = crypto_comp_decompress(tfm, output,
  3005. ilen, decomp_output, &dlen);
  3006. if (ret) {
  3007. pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
  3008. i + 1, algo, -ret);
  3009. goto out;
  3010. }
  3011. if (dlen != ctemplate[i].inlen) {
  3012. printk(KERN_ERR "alg: comp: Compression test %d "
  3013. "failed for %s: output len = %d\n", i + 1, algo,
  3014. dlen);
  3015. ret = -EINVAL;
  3016. goto out;
  3017. }
  3018. if (memcmp(decomp_output, ctemplate[i].input,
  3019. ctemplate[i].inlen)) {
  3020. pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
  3021. i + 1, algo);
  3022. hexdump(decomp_output, dlen);
  3023. ret = -EINVAL;
  3024. goto out;
  3025. }
  3026. }
  3027. for (i = 0; i < dtcount; i++) {
  3028. int ilen;
  3029. unsigned int dlen = COMP_BUF_SIZE;
  3030. memset(decomp_output, 0, COMP_BUF_SIZE);
  3031. ilen = dtemplate[i].inlen;
  3032. ret = crypto_comp_decompress(tfm, dtemplate[i].input,
  3033. ilen, decomp_output, &dlen);
  3034. if (ret) {
  3035. printk(KERN_ERR "alg: comp: decompression failed "
  3036. "on test %d for %s: ret=%d\n", i + 1, algo,
  3037. -ret);
  3038. goto out;
  3039. }
  3040. if (dlen != dtemplate[i].outlen) {
  3041. printk(KERN_ERR "alg: comp: Decompression test %d "
  3042. "failed for %s: output len = %d\n", i + 1, algo,
  3043. dlen);
  3044. ret = -EINVAL;
  3045. goto out;
  3046. }
  3047. if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
  3048. printk(KERN_ERR "alg: comp: Decompression test %d "
  3049. "failed for %s\n", i + 1, algo);
  3050. hexdump(decomp_output, dlen);
  3051. ret = -EINVAL;
  3052. goto out;
  3053. }
  3054. }
  3055. ret = 0;
  3056. out:
  3057. kfree(decomp_output);
  3058. kfree(output);
  3059. return ret;
  3060. }
  3061. static int test_acomp(struct crypto_acomp *tfm,
  3062. const struct comp_testvec *ctemplate,
  3063. const struct comp_testvec *dtemplate,
  3064. int ctcount, int dtcount)
  3065. {
  3066. const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
  3067. unsigned int i;
  3068. char *output, *decomp_out;
  3069. int ret;
  3070. struct scatterlist src, dst;
  3071. struct acomp_req *req;
  3072. struct crypto_wait wait;
  3073. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  3074. if (!output)
  3075. return -ENOMEM;
  3076. decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  3077. if (!decomp_out) {
  3078. kfree(output);
  3079. return -ENOMEM;
  3080. }
  3081. for (i = 0; i < ctcount; i++) {
  3082. unsigned int dlen = COMP_BUF_SIZE;
  3083. int ilen = ctemplate[i].inlen;
  3084. void *input_vec;
  3085. input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
  3086. if (!input_vec) {
  3087. ret = -ENOMEM;
  3088. goto out;
  3089. }
  3090. memset(output, 0, dlen);
  3091. crypto_init_wait(&wait);
  3092. sg_init_one(&src, input_vec, ilen);
  3093. sg_init_one(&dst, output, dlen);
  3094. req = acomp_request_alloc(tfm);
  3095. if (!req) {
  3096. pr_err("alg: acomp: request alloc failed for %s\n",
  3097. algo);
  3098. kfree(input_vec);
  3099. ret = -ENOMEM;
  3100. goto out;
  3101. }
  3102. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3103. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3104. crypto_req_done, &wait);
  3105. ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
  3106. if (ret) {
  3107. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  3108. i + 1, algo, -ret);
  3109. kfree(input_vec);
  3110. acomp_request_free(req);
  3111. goto out;
  3112. }
  3113. ilen = req->dlen;
  3114. dlen = COMP_BUF_SIZE;
  3115. sg_init_one(&src, output, ilen);
  3116. sg_init_one(&dst, decomp_out, dlen);
  3117. crypto_init_wait(&wait);
  3118. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3119. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3120. if (ret) {
  3121. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  3122. i + 1, algo, -ret);
  3123. kfree(input_vec);
  3124. acomp_request_free(req);
  3125. goto out;
  3126. }
  3127. if (req->dlen != ctemplate[i].inlen) {
  3128. pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
  3129. i + 1, algo, req->dlen);
  3130. ret = -EINVAL;
  3131. kfree(input_vec);
  3132. acomp_request_free(req);
  3133. goto out;
  3134. }
  3135. if (memcmp(input_vec, decomp_out, req->dlen)) {
  3136. pr_err("alg: acomp: Compression test %d failed for %s\n",
  3137. i + 1, algo);
  3138. hexdump(output, req->dlen);
  3139. ret = -EINVAL;
  3140. kfree(input_vec);
  3141. acomp_request_free(req);
  3142. goto out;
  3143. }
  3144. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  3145. crypto_init_wait(&wait);
  3146. sg_init_one(&src, input_vec, ilen);
  3147. acomp_request_set_params(req, &src, NULL, ilen, 0);
  3148. ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
  3149. if (ret) {
  3150. pr_err("alg: acomp: compression failed on NULL dst buffer test %d for %s: ret=%d\n",
  3151. i + 1, algo, -ret);
  3152. kfree(input_vec);
  3153. acomp_request_free(req);
  3154. goto out;
  3155. }
  3156. #endif
  3157. kfree(input_vec);
  3158. acomp_request_free(req);
  3159. }
  3160. for (i = 0; i < dtcount; i++) {
  3161. unsigned int dlen = COMP_BUF_SIZE;
  3162. int ilen = dtemplate[i].inlen;
  3163. void *input_vec;
  3164. input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
  3165. if (!input_vec) {
  3166. ret = -ENOMEM;
  3167. goto out;
  3168. }
  3169. memset(output, 0, dlen);
  3170. crypto_init_wait(&wait);
  3171. sg_init_one(&src, input_vec, ilen);
  3172. sg_init_one(&dst, output, dlen);
  3173. req = acomp_request_alloc(tfm);
  3174. if (!req) {
  3175. pr_err("alg: acomp: request alloc failed for %s\n",
  3176. algo);
  3177. kfree(input_vec);
  3178. ret = -ENOMEM;
  3179. goto out;
  3180. }
  3181. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  3182. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3183. crypto_req_done, &wait);
  3184. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3185. if (ret) {
  3186. pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
  3187. i + 1, algo, -ret);
  3188. kfree(input_vec);
  3189. acomp_request_free(req);
  3190. goto out;
  3191. }
  3192. if (req->dlen != dtemplate[i].outlen) {
  3193. pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
  3194. i + 1, algo, req->dlen);
  3195. ret = -EINVAL;
  3196. kfree(input_vec);
  3197. acomp_request_free(req);
  3198. goto out;
  3199. }
  3200. if (memcmp(output, dtemplate[i].output, req->dlen)) {
  3201. pr_err("alg: acomp: Decompression test %d failed for %s\n",
  3202. i + 1, algo);
  3203. hexdump(output, req->dlen);
  3204. ret = -EINVAL;
  3205. kfree(input_vec);
  3206. acomp_request_free(req);
  3207. goto out;
  3208. }
  3209. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  3210. crypto_init_wait(&wait);
  3211. acomp_request_set_params(req, &src, NULL, ilen, 0);
  3212. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  3213. if (ret) {
  3214. pr_err("alg: acomp: decompression failed on NULL dst buffer test %d for %s: ret=%d\n",
  3215. i + 1, algo, -ret);
  3216. kfree(input_vec);
  3217. acomp_request_free(req);
  3218. goto out;
  3219. }
  3220. #endif
  3221. kfree(input_vec);
  3222. acomp_request_free(req);
  3223. }
  3224. ret = 0;
  3225. out:
  3226. kfree(decomp_out);
  3227. kfree(output);
  3228. return ret;
  3229. }
  3230. static int test_cprng(struct crypto_rng *tfm,
  3231. const struct cprng_testvec *template,
  3232. unsigned int tcount)
  3233. {
  3234. const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
  3235. int err = 0, i, j, seedsize;
  3236. u8 *seed;
  3237. char result[32];
  3238. seedsize = crypto_rng_seedsize(tfm);
  3239. seed = kmalloc(seedsize, GFP_KERNEL);
  3240. if (!seed) {
  3241. printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
  3242. "for %s\n", algo);
  3243. return -ENOMEM;
  3244. }
  3245. for (i = 0; i < tcount; i++) {
  3246. memset(result, 0, 32);
  3247. memcpy(seed, template[i].v, template[i].vlen);
  3248. memcpy(seed + template[i].vlen, template[i].key,
  3249. template[i].klen);
  3250. memcpy(seed + template[i].vlen + template[i].klen,
  3251. template[i].dt, template[i].dtlen);
  3252. err = crypto_rng_reset(tfm, seed, seedsize);
  3253. if (err) {
  3254. printk(KERN_ERR "alg: cprng: Failed to reset rng "
  3255. "for %s\n", algo);
  3256. goto out;
  3257. }
  3258. for (j = 0; j < template[i].loops; j++) {
  3259. err = crypto_rng_get_bytes(tfm, result,
  3260. template[i].rlen);
  3261. if (err < 0) {
  3262. printk(KERN_ERR "alg: cprng: Failed to obtain "
  3263. "the correct amount of random data for "
  3264. "%s (requested %d)\n", algo,
  3265. template[i].rlen);
  3266. goto out;
  3267. }
  3268. }
  3269. err = memcmp(result, template[i].result,
  3270. template[i].rlen);
  3271. if (err) {
  3272. printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
  3273. i, algo);
  3274. hexdump(result, template[i].rlen);
  3275. err = -EINVAL;
  3276. goto out;
  3277. }
  3278. }
  3279. out:
  3280. kfree(seed);
  3281. return err;
  3282. }
  3283. static int alg_test_cipher(const struct alg_test_desc *desc,
  3284. const char *driver, u32 type, u32 mask)
  3285. {
  3286. const struct cipher_test_suite *suite = &desc->suite.cipher;
  3287. struct crypto_cipher *tfm;
  3288. int err;
  3289. tfm = crypto_alloc_cipher(driver, type, mask);
  3290. if (IS_ERR(tfm)) {
  3291. if (PTR_ERR(tfm) == -ENOENT)
  3292. return 0;
  3293. printk(KERN_ERR "alg: cipher: Failed to load transform for "
  3294. "%s: %ld\n", driver, PTR_ERR(tfm));
  3295. return PTR_ERR(tfm);
  3296. }
  3297. err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
  3298. if (!err)
  3299. err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
  3300. crypto_free_cipher(tfm);
  3301. return err;
  3302. }
  3303. static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
  3304. u32 type, u32 mask)
  3305. {
  3306. struct crypto_comp *comp;
  3307. struct crypto_acomp *acomp;
  3308. int err;
  3309. u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
  3310. if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
  3311. acomp = crypto_alloc_acomp(driver, type, mask);
  3312. if (IS_ERR(acomp)) {
  3313. if (PTR_ERR(acomp) == -ENOENT)
  3314. return 0;
  3315. pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
  3316. driver, PTR_ERR(acomp));
  3317. return PTR_ERR(acomp);
  3318. }
  3319. err = test_acomp(acomp, desc->suite.comp.comp.vecs,
  3320. desc->suite.comp.decomp.vecs,
  3321. desc->suite.comp.comp.count,
  3322. desc->suite.comp.decomp.count);
  3323. crypto_free_acomp(acomp);
  3324. } else {
  3325. comp = crypto_alloc_comp(driver, type, mask);
  3326. if (IS_ERR(comp)) {
  3327. if (PTR_ERR(comp) == -ENOENT)
  3328. return 0;
  3329. pr_err("alg: comp: Failed to load transform for %s: %ld\n",
  3330. driver, PTR_ERR(comp));
  3331. return PTR_ERR(comp);
  3332. }
  3333. err = test_comp(comp, desc->suite.comp.comp.vecs,
  3334. desc->suite.comp.decomp.vecs,
  3335. desc->suite.comp.comp.count,
  3336. desc->suite.comp.decomp.count);
  3337. crypto_free_comp(comp);
  3338. }
  3339. return err;
  3340. }
  3341. static int alg_test_crc32c(const struct alg_test_desc *desc,
  3342. const char *driver, u32 type, u32 mask)
  3343. {
  3344. struct crypto_shash *tfm;
  3345. __le32 val;
  3346. int err;
  3347. err = alg_test_hash(desc, driver, type, mask);
  3348. if (err)
  3349. return err;
  3350. tfm = crypto_alloc_shash(driver, type, mask);
  3351. if (IS_ERR(tfm)) {
  3352. if (PTR_ERR(tfm) == -ENOENT) {
  3353. /*
  3354. * This crc32c implementation is only available through
  3355. * ahash API, not the shash API, so the remaining part
  3356. * of the test is not applicable to it.
  3357. */
  3358. return 0;
  3359. }
  3360. printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
  3361. "%ld\n", driver, PTR_ERR(tfm));
  3362. return PTR_ERR(tfm);
  3363. }
  3364. driver = crypto_shash_driver_name(tfm);
  3365. do {
  3366. SHASH_DESC_ON_STACK(shash, tfm);
  3367. u32 *ctx = (u32 *)shash_desc_ctx(shash);
  3368. shash->tfm = tfm;
  3369. *ctx = 420553207;
  3370. err = crypto_shash_final(shash, (u8 *)&val);
  3371. if (err) {
  3372. printk(KERN_ERR "alg: crc32c: Operation failed for "
  3373. "%s: %d\n", driver, err);
  3374. break;
  3375. }
  3376. if (val != cpu_to_le32(~420553207)) {
  3377. pr_err("alg: crc32c: Test failed for %s: %u\n",
  3378. driver, le32_to_cpu(val));
  3379. err = -EINVAL;
  3380. }
  3381. } while (0);
  3382. crypto_free_shash(tfm);
  3383. return err;
  3384. }
  3385. static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
  3386. u32 type, u32 mask)
  3387. {
  3388. struct crypto_rng *rng;
  3389. int err;
  3390. rng = crypto_alloc_rng(driver, type, mask);
  3391. if (IS_ERR(rng)) {
  3392. if (PTR_ERR(rng) == -ENOENT)
  3393. return 0;
  3394. printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
  3395. "%ld\n", driver, PTR_ERR(rng));
  3396. return PTR_ERR(rng);
  3397. }
  3398. err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
  3399. crypto_free_rng(rng);
  3400. return err;
  3401. }
  3402. static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
  3403. const char *driver, u32 type, u32 mask)
  3404. {
  3405. int ret = -EAGAIN;
  3406. struct crypto_rng *drng;
  3407. struct drbg_test_data test_data;
  3408. struct drbg_string addtl, pers, testentropy;
  3409. unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
  3410. if (!buf)
  3411. return -ENOMEM;
  3412. drng = crypto_alloc_rng(driver, type, mask);
  3413. if (IS_ERR(drng)) {
  3414. kfree_sensitive(buf);
  3415. if (PTR_ERR(drng) == -ENOENT)
  3416. return 0;
  3417. printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
  3418. "%s\n", driver);
  3419. return PTR_ERR(drng);
  3420. }
  3421. test_data.testentropy = &testentropy;
  3422. drbg_string_fill(&testentropy, test->entropy, test->entropylen);
  3423. drbg_string_fill(&pers, test->pers, test->perslen);
  3424. ret = crypto_drbg_reset_test(drng, &pers, &test_data);
  3425. if (ret) {
  3426. printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
  3427. goto outbuf;
  3428. }
  3429. drbg_string_fill(&addtl, test->addtla, test->addtllen);
  3430. if (pr) {
  3431. drbg_string_fill(&testentropy, test->entpra, test->entprlen);
  3432. ret = crypto_drbg_get_bytes_addtl_test(drng,
  3433. buf, test->expectedlen, &addtl, &test_data);
  3434. } else {
  3435. ret = crypto_drbg_get_bytes_addtl(drng,
  3436. buf, test->expectedlen, &addtl);
  3437. }
  3438. if (ret < 0) {
  3439. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  3440. "driver %s\n", driver);
  3441. goto outbuf;
  3442. }
  3443. drbg_string_fill(&addtl, test->addtlb, test->addtllen);
  3444. if (pr) {
  3445. drbg_string_fill(&testentropy, test->entprb, test->entprlen);
  3446. ret = crypto_drbg_get_bytes_addtl_test(drng,
  3447. buf, test->expectedlen, &addtl, &test_data);
  3448. } else {
  3449. ret = crypto_drbg_get_bytes_addtl(drng,
  3450. buf, test->expectedlen, &addtl);
  3451. }
  3452. if (ret < 0) {
  3453. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  3454. "driver %s\n", driver);
  3455. goto outbuf;
  3456. }
  3457. ret = memcmp(test->expected, buf, test->expectedlen);
  3458. outbuf:
  3459. crypto_free_rng(drng);
  3460. kfree_sensitive(buf);
  3461. return ret;
  3462. }
  3463. static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
  3464. u32 type, u32 mask)
  3465. {
  3466. int err = 0;
  3467. int pr = 0;
  3468. int i = 0;
  3469. const struct drbg_testvec *template = desc->suite.drbg.vecs;
  3470. unsigned int tcount = desc->suite.drbg.count;
  3471. if (0 == memcmp(driver, "drbg_pr_", 8))
  3472. pr = 1;
  3473. for (i = 0; i < tcount; i++) {
  3474. err = drbg_cavs_test(&template[i], pr, driver, type, mask);
  3475. if (err) {
  3476. printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
  3477. i, driver);
  3478. err = -EINVAL;
  3479. break;
  3480. }
  3481. }
  3482. return err;
  3483. }
  3484. static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
  3485. const char *alg)
  3486. {
  3487. struct kpp_request *req;
  3488. void *input_buf = NULL;
  3489. void *output_buf = NULL;
  3490. void *a_public = NULL;
  3491. void *a_ss = NULL;
  3492. void *shared_secret = NULL;
  3493. struct crypto_wait wait;
  3494. unsigned int out_len_max;
  3495. int err = -ENOMEM;
  3496. struct scatterlist src, dst;
  3497. req = kpp_request_alloc(tfm, GFP_KERNEL);
  3498. if (!req)
  3499. return err;
  3500. crypto_init_wait(&wait);
  3501. err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
  3502. if (err < 0)
  3503. goto free_req;
  3504. out_len_max = crypto_kpp_maxsize(tfm);
  3505. output_buf = kzalloc(out_len_max, GFP_KERNEL);
  3506. if (!output_buf) {
  3507. err = -ENOMEM;
  3508. goto free_req;
  3509. }
  3510. /* Use appropriate parameter as base */
  3511. kpp_request_set_input(req, NULL, 0);
  3512. sg_init_one(&dst, output_buf, out_len_max);
  3513. kpp_request_set_output(req, &dst, out_len_max);
  3514. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3515. crypto_req_done, &wait);
  3516. /* Compute party A's public key */
  3517. err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
  3518. if (err) {
  3519. pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
  3520. alg, err);
  3521. goto free_output;
  3522. }
  3523. if (vec->genkey) {
  3524. /* Save party A's public key */
  3525. a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
  3526. if (!a_public) {
  3527. err = -ENOMEM;
  3528. goto free_output;
  3529. }
  3530. } else {
  3531. /* Verify calculated public key */
  3532. if (memcmp(vec->expected_a_public, sg_virt(req->dst),
  3533. vec->expected_a_public_size)) {
  3534. pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
  3535. alg);
  3536. err = -EINVAL;
  3537. goto free_output;
  3538. }
  3539. }
  3540. /* Calculate shared secret key by using counter part (b) public key. */
  3541. input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
  3542. if (!input_buf) {
  3543. err = -ENOMEM;
  3544. goto free_output;
  3545. }
  3546. sg_init_one(&src, input_buf, vec->b_public_size);
  3547. sg_init_one(&dst, output_buf, out_len_max);
  3548. kpp_request_set_input(req, &src, vec->b_public_size);
  3549. kpp_request_set_output(req, &dst, out_len_max);
  3550. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3551. crypto_req_done, &wait);
  3552. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
  3553. if (err) {
  3554. pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
  3555. alg, err);
  3556. goto free_all;
  3557. }
  3558. if (vec->genkey) {
  3559. /* Save the shared secret obtained by party A */
  3560. a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
  3561. if (!a_ss) {
  3562. err = -ENOMEM;
  3563. goto free_all;
  3564. }
  3565. /*
  3566. * Calculate party B's shared secret by using party A's
  3567. * public key.
  3568. */
  3569. err = crypto_kpp_set_secret(tfm, vec->b_secret,
  3570. vec->b_secret_size);
  3571. if (err < 0)
  3572. goto free_all;
  3573. sg_init_one(&src, a_public, vec->expected_a_public_size);
  3574. sg_init_one(&dst, output_buf, out_len_max);
  3575. kpp_request_set_input(req, &src, vec->expected_a_public_size);
  3576. kpp_request_set_output(req, &dst, out_len_max);
  3577. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3578. crypto_req_done, &wait);
  3579. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
  3580. &wait);
  3581. if (err) {
  3582. pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
  3583. alg, err);
  3584. goto free_all;
  3585. }
  3586. shared_secret = a_ss;
  3587. } else {
  3588. shared_secret = (void *)vec->expected_ss;
  3589. }
  3590. /*
  3591. * verify shared secret from which the user will derive
  3592. * secret key by executing whatever hash it has chosen
  3593. */
  3594. if (memcmp(shared_secret, sg_virt(req->dst),
  3595. vec->expected_ss_size)) {
  3596. pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
  3597. alg);
  3598. err = -EINVAL;
  3599. }
  3600. free_all:
  3601. kfree(a_ss);
  3602. kfree(input_buf);
  3603. free_output:
  3604. kfree(a_public);
  3605. kfree(output_buf);
  3606. free_req:
  3607. kpp_request_free(req);
  3608. return err;
  3609. }
  3610. static int test_kpp(struct crypto_kpp *tfm, const char *alg,
  3611. const struct kpp_testvec *vecs, unsigned int tcount)
  3612. {
  3613. int ret, i;
  3614. for (i = 0; i < tcount; i++) {
  3615. ret = do_test_kpp(tfm, vecs++, alg);
  3616. if (ret) {
  3617. pr_err("alg: %s: test failed on vector %d, err=%d\n",
  3618. alg, i + 1, ret);
  3619. return ret;
  3620. }
  3621. }
  3622. return 0;
  3623. }
  3624. static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
  3625. u32 type, u32 mask)
  3626. {
  3627. struct crypto_kpp *tfm;
  3628. int err = 0;
  3629. tfm = crypto_alloc_kpp(driver, type, mask);
  3630. if (IS_ERR(tfm)) {
  3631. if (PTR_ERR(tfm) == -ENOENT)
  3632. return 0;
  3633. pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
  3634. driver, PTR_ERR(tfm));
  3635. return PTR_ERR(tfm);
  3636. }
  3637. if (desc->suite.kpp.vecs)
  3638. err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
  3639. desc->suite.kpp.count);
  3640. crypto_free_kpp(tfm);
  3641. return err;
  3642. }
  3643. static u8 *test_pack_u32(u8 *dst, u32 val)
  3644. {
  3645. memcpy(dst, &val, sizeof(val));
  3646. return dst + sizeof(val);
  3647. }
  3648. static int test_akcipher_one(struct crypto_akcipher *tfm,
  3649. const struct akcipher_testvec *vecs)
  3650. {
  3651. char *xbuf[XBUFSIZE];
  3652. struct akcipher_request *req;
  3653. void *outbuf_enc = NULL;
  3654. void *outbuf_dec = NULL;
  3655. struct crypto_wait wait;
  3656. unsigned int out_len_max, out_len = 0;
  3657. int err = -ENOMEM;
  3658. struct scatterlist src, dst, src_tab[3];
  3659. const char *m, *c;
  3660. unsigned int m_size, c_size;
  3661. const char *op;
  3662. u8 *key, *ptr;
  3663. if (testmgr_alloc_buf(xbuf))
  3664. return err;
  3665. req = akcipher_request_alloc(tfm, GFP_KERNEL);
  3666. if (!req)
  3667. goto free_xbuf;
  3668. crypto_init_wait(&wait);
  3669. key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
  3670. GFP_KERNEL);
  3671. if (!key)
  3672. goto free_req;
  3673. memcpy(key, vecs->key, vecs->key_len);
  3674. ptr = key + vecs->key_len;
  3675. ptr = test_pack_u32(ptr, vecs->algo);
  3676. ptr = test_pack_u32(ptr, vecs->param_len);
  3677. memcpy(ptr, vecs->params, vecs->param_len);
  3678. if (vecs->public_key_vec)
  3679. err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
  3680. else
  3681. err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
  3682. if (err)
  3683. goto free_key;
  3684. /*
  3685. * First run test which do not require a private key, such as
  3686. * encrypt or verify.
  3687. */
  3688. err = -ENOMEM;
  3689. out_len_max = crypto_akcipher_maxsize(tfm);
  3690. outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
  3691. if (!outbuf_enc)
  3692. goto free_key;
  3693. if (!vecs->siggen_sigver_test) {
  3694. m = vecs->m;
  3695. m_size = vecs->m_size;
  3696. c = vecs->c;
  3697. c_size = vecs->c_size;
  3698. op = "encrypt";
  3699. } else {
  3700. /* Swap args so we could keep plaintext (digest)
  3701. * in vecs->m, and cooked signature in vecs->c.
  3702. */
  3703. m = vecs->c; /* signature */
  3704. m_size = vecs->c_size;
  3705. c = vecs->m; /* digest */
  3706. c_size = vecs->m_size;
  3707. op = "verify";
  3708. }
  3709. err = -E2BIG;
  3710. if (WARN_ON(m_size > PAGE_SIZE))
  3711. goto free_all;
  3712. memcpy(xbuf[0], m, m_size);
  3713. sg_init_table(src_tab, 3);
  3714. sg_set_buf(&src_tab[0], xbuf[0], 8);
  3715. sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
  3716. if (vecs->siggen_sigver_test) {
  3717. if (WARN_ON(c_size > PAGE_SIZE))
  3718. goto free_all;
  3719. memcpy(xbuf[1], c, c_size);
  3720. sg_set_buf(&src_tab[2], xbuf[1], c_size);
  3721. akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
  3722. } else {
  3723. sg_init_one(&dst, outbuf_enc, out_len_max);
  3724. akcipher_request_set_crypt(req, src_tab, &dst, m_size,
  3725. out_len_max);
  3726. }
  3727. akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  3728. crypto_req_done, &wait);
  3729. err = crypto_wait_req(vecs->siggen_sigver_test ?
  3730. /* Run asymmetric signature verification */
  3731. crypto_akcipher_verify(req) :
  3732. /* Run asymmetric encrypt */
  3733. crypto_akcipher_encrypt(req), &wait);
  3734. if (err) {
  3735. pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
  3736. goto free_all;
  3737. }
  3738. if (!vecs->siggen_sigver_test && c) {
  3739. if (req->dst_len != c_size) {
  3740. pr_err("alg: akcipher: %s test failed. Invalid output len\n",
  3741. op);
  3742. err = -EINVAL;
  3743. goto free_all;
  3744. }
  3745. /* verify that encrypted message is equal to expected */
  3746. if (memcmp(c, outbuf_enc, c_size) != 0) {
  3747. pr_err("alg: akcipher: %s test failed. Invalid output\n",
  3748. op);
  3749. hexdump(outbuf_enc, c_size);
  3750. err = -EINVAL;
  3751. goto free_all;
  3752. }
  3753. }
  3754. /*
  3755. * Don't invoke (decrypt or sign) test which require a private key
  3756. * for vectors with only a public key.
  3757. */
  3758. if (vecs->public_key_vec) {
  3759. err = 0;
  3760. goto free_all;
  3761. }
  3762. outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
  3763. if (!outbuf_dec) {
  3764. err = -ENOMEM;
  3765. goto free_all;
  3766. }
  3767. if (!vecs->siggen_sigver_test && !c) {
  3768. c = outbuf_enc;
  3769. c_size = req->dst_len;
  3770. }
  3771. err = -E2BIG;
  3772. op = vecs->siggen_sigver_test ? "sign" : "decrypt";
  3773. if (WARN_ON(c_size > PAGE_SIZE))
  3774. goto free_all;
  3775. memcpy(xbuf[0], c, c_size);
  3776. sg_init_one(&src, xbuf[0], c_size);
  3777. sg_init_one(&dst, outbuf_dec, out_len_max);
  3778. crypto_init_wait(&wait);
  3779. akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
  3780. err = crypto_wait_req(vecs->siggen_sigver_test ?
  3781. /* Run asymmetric signature generation */
  3782. crypto_akcipher_sign(req) :
  3783. /* Run asymmetric decrypt */
  3784. crypto_akcipher_decrypt(req), &wait);
  3785. if (err) {
  3786. pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
  3787. goto free_all;
  3788. }
  3789. out_len = req->dst_len;
  3790. if (out_len < m_size) {
  3791. pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
  3792. op, out_len);
  3793. err = -EINVAL;
  3794. goto free_all;
  3795. }
  3796. /* verify that decrypted message is equal to the original msg */
  3797. if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
  3798. memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
  3799. pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
  3800. hexdump(outbuf_dec, out_len);
  3801. err = -EINVAL;
  3802. }
  3803. free_all:
  3804. kfree(outbuf_dec);
  3805. kfree(outbuf_enc);
  3806. free_key:
  3807. kfree(key);
  3808. free_req:
  3809. akcipher_request_free(req);
  3810. free_xbuf:
  3811. testmgr_free_buf(xbuf);
  3812. return err;
  3813. }
  3814. static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
  3815. const struct akcipher_testvec *vecs,
  3816. unsigned int tcount)
  3817. {
  3818. const char *algo =
  3819. crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
  3820. int ret, i;
  3821. for (i = 0; i < tcount; i++) {
  3822. ret = test_akcipher_one(tfm, vecs++);
  3823. if (!ret)
  3824. continue;
  3825. pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
  3826. i + 1, algo, ret);
  3827. return ret;
  3828. }
  3829. return 0;
  3830. }
  3831. static int alg_test_akcipher(const struct alg_test_desc *desc,
  3832. const char *driver, u32 type, u32 mask)
  3833. {
  3834. struct crypto_akcipher *tfm;
  3835. int err = 0;
  3836. tfm = crypto_alloc_akcipher(driver, type, mask);
  3837. if (IS_ERR(tfm)) {
  3838. if (PTR_ERR(tfm) == -ENOENT)
  3839. return 0;
  3840. pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
  3841. driver, PTR_ERR(tfm));
  3842. return PTR_ERR(tfm);
  3843. }
  3844. if (desc->suite.akcipher.vecs)
  3845. err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
  3846. desc->suite.akcipher.count);
  3847. crypto_free_akcipher(tfm);
  3848. return err;
  3849. }
  3850. static int alg_test_null(const struct alg_test_desc *desc,
  3851. const char *driver, u32 type, u32 mask)
  3852. {
  3853. return 0;
  3854. }
  3855. #define ____VECS(tv) .vecs = tv, .count = ARRAY_SIZE(tv)
  3856. #define __VECS(tv) { ____VECS(tv) }
  3857. /* Please keep this list sorted by algorithm name. */
  3858. static const struct alg_test_desc alg_test_descs[] = {
  3859. {
  3860. .alg = "adiantum(xchacha12,aes)",
  3861. .generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
  3862. .test = alg_test_skcipher,
  3863. .suite = {
  3864. .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
  3865. },
  3866. }, {
  3867. .alg = "adiantum(xchacha20,aes)",
  3868. .generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
  3869. .test = alg_test_skcipher,
  3870. .suite = {
  3871. .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
  3872. },
  3873. }, {
  3874. .alg = "aegis128",
  3875. .test = alg_test_aead,
  3876. .suite = {
  3877. .aead = __VECS(aegis128_tv_template)
  3878. }
  3879. }, {
  3880. .alg = "ansi_cprng",
  3881. .test = alg_test_cprng,
  3882. .suite = {
  3883. .cprng = __VECS(ansi_cprng_aes_tv_template)
  3884. }
  3885. }, {
  3886. .alg = "authenc(hmac(md5),ecb(cipher_null))",
  3887. .test = alg_test_aead,
  3888. .suite = {
  3889. .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
  3890. }
  3891. }, {
  3892. .alg = "authenc(hmac(sha1),cbc(aes))",
  3893. .test = alg_test_aead,
  3894. .fips_allowed = 1,
  3895. .suite = {
  3896. .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
  3897. }
  3898. }, {
  3899. .alg = "authenc(hmac(sha1),cbc(des))",
  3900. .test = alg_test_aead,
  3901. .suite = {
  3902. .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
  3903. }
  3904. }, {
  3905. .alg = "authenc(hmac(sha1),cbc(des3_ede))",
  3906. .test = alg_test_aead,
  3907. .suite = {
  3908. .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
  3909. }
  3910. }, {
  3911. .alg = "authenc(hmac(sha1),ctr(aes))",
  3912. .test = alg_test_null,
  3913. .fips_allowed = 1,
  3914. }, {
  3915. .alg = "authenc(hmac(sha1),ecb(cipher_null))",
  3916. .test = alg_test_aead,
  3917. .suite = {
  3918. .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
  3919. }
  3920. }, {
  3921. .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
  3922. .test = alg_test_null,
  3923. .fips_allowed = 1,
  3924. }, {
  3925. .alg = "authenc(hmac(sha224),cbc(des))",
  3926. .test = alg_test_aead,
  3927. .suite = {
  3928. .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
  3929. }
  3930. }, {
  3931. .alg = "authenc(hmac(sha224),cbc(des3_ede))",
  3932. .test = alg_test_aead,
  3933. .suite = {
  3934. .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
  3935. }
  3936. }, {
  3937. .alg = "authenc(hmac(sha256),cbc(aes))",
  3938. .test = alg_test_aead,
  3939. .fips_allowed = 1,
  3940. .suite = {
  3941. .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
  3942. }
  3943. }, {
  3944. .alg = "authenc(hmac(sha256),cbc(des))",
  3945. .test = alg_test_aead,
  3946. .suite = {
  3947. .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
  3948. }
  3949. }, {
  3950. .alg = "authenc(hmac(sha256),cbc(des3_ede))",
  3951. .test = alg_test_aead,
  3952. .suite = {
  3953. .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
  3954. }
  3955. }, {
  3956. .alg = "authenc(hmac(sha256),ctr(aes))",
  3957. .test = alg_test_null,
  3958. .fips_allowed = 1,
  3959. }, {
  3960. .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
  3961. .test = alg_test_null,
  3962. .fips_allowed = 1,
  3963. }, {
  3964. .alg = "authenc(hmac(sha384),cbc(des))",
  3965. .test = alg_test_aead,
  3966. .suite = {
  3967. .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
  3968. }
  3969. }, {
  3970. .alg = "authenc(hmac(sha384),cbc(des3_ede))",
  3971. .test = alg_test_aead,
  3972. .suite = {
  3973. .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
  3974. }
  3975. }, {
  3976. .alg = "authenc(hmac(sha384),ctr(aes))",
  3977. .test = alg_test_null,
  3978. .fips_allowed = 1,
  3979. }, {
  3980. .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
  3981. .test = alg_test_null,
  3982. .fips_allowed = 1,
  3983. }, {
  3984. .alg = "authenc(hmac(sha512),cbc(aes))",
  3985. .fips_allowed = 1,
  3986. .test = alg_test_aead,
  3987. .suite = {
  3988. .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
  3989. }
  3990. }, {
  3991. .alg = "authenc(hmac(sha512),cbc(des))",
  3992. .test = alg_test_aead,
  3993. .suite = {
  3994. .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
  3995. }
  3996. }, {
  3997. .alg = "authenc(hmac(sha512),cbc(des3_ede))",
  3998. .test = alg_test_aead,
  3999. .suite = {
  4000. .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
  4001. }
  4002. }, {
  4003. .alg = "authenc(hmac(sha512),ctr(aes))",
  4004. .test = alg_test_null,
  4005. .fips_allowed = 1,
  4006. }, {
  4007. .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
  4008. .test = alg_test_null,
  4009. .fips_allowed = 1,
  4010. }, {
  4011. .alg = "blake2b-160",
  4012. .test = alg_test_hash,
  4013. .fips_allowed = 0,
  4014. .suite = {
  4015. .hash = __VECS(blake2b_160_tv_template)
  4016. }
  4017. }, {
  4018. .alg = "blake2b-256",
  4019. .test = alg_test_hash,
  4020. .fips_allowed = 0,
  4021. .suite = {
  4022. .hash = __VECS(blake2b_256_tv_template)
  4023. }
  4024. }, {
  4025. .alg = "blake2b-384",
  4026. .test = alg_test_hash,
  4027. .fips_allowed = 0,
  4028. .suite = {
  4029. .hash = __VECS(blake2b_384_tv_template)
  4030. }
  4031. }, {
  4032. .alg = "blake2b-512",
  4033. .test = alg_test_hash,
  4034. .fips_allowed = 0,
  4035. .suite = {
  4036. .hash = __VECS(blake2b_512_tv_template)
  4037. }
  4038. }, {
  4039. .alg = "cbc(aes)",
  4040. .test = alg_test_skcipher,
  4041. .fips_allowed = 1,
  4042. .suite = {
  4043. .cipher = __VECS(aes_cbc_tv_template)
  4044. },
  4045. }, {
  4046. .alg = "cbc(anubis)",
  4047. .test = alg_test_skcipher,
  4048. .suite = {
  4049. .cipher = __VECS(anubis_cbc_tv_template)
  4050. },
  4051. }, {
  4052. .alg = "cbc(aria)",
  4053. .test = alg_test_skcipher,
  4054. .suite = {
  4055. .cipher = __VECS(aria_cbc_tv_template)
  4056. },
  4057. }, {
  4058. .alg = "cbc(blowfish)",
  4059. .test = alg_test_skcipher,
  4060. .suite = {
  4061. .cipher = __VECS(bf_cbc_tv_template)
  4062. },
  4063. }, {
  4064. .alg = "cbc(camellia)",
  4065. .test = alg_test_skcipher,
  4066. .suite = {
  4067. .cipher = __VECS(camellia_cbc_tv_template)
  4068. },
  4069. }, {
  4070. .alg = "cbc(cast5)",
  4071. .test = alg_test_skcipher,
  4072. .suite = {
  4073. .cipher = __VECS(cast5_cbc_tv_template)
  4074. },
  4075. }, {
  4076. .alg = "cbc(cast6)",
  4077. .test = alg_test_skcipher,
  4078. .suite = {
  4079. .cipher = __VECS(cast6_cbc_tv_template)
  4080. },
  4081. }, {
  4082. .alg = "cbc(des)",
  4083. .test = alg_test_skcipher,
  4084. .suite = {
  4085. .cipher = __VECS(des_cbc_tv_template)
  4086. },
  4087. }, {
  4088. .alg = "cbc(des3_ede)",
  4089. .test = alg_test_skcipher,
  4090. .suite = {
  4091. .cipher = __VECS(des3_ede_cbc_tv_template)
  4092. },
  4093. }, {
  4094. /* Same as cbc(aes) except the key is stored in
  4095. * hardware secure memory which we reference by index
  4096. */
  4097. .alg = "cbc(paes)",
  4098. .test = alg_test_null,
  4099. .fips_allowed = 1,
  4100. }, {
  4101. /* Same as cbc(sm4) except the key is stored in
  4102. * hardware secure memory which we reference by index
  4103. */
  4104. .alg = "cbc(psm4)",
  4105. .test = alg_test_null,
  4106. }, {
  4107. .alg = "cbc(serpent)",
  4108. .test = alg_test_skcipher,
  4109. .suite = {
  4110. .cipher = __VECS(serpent_cbc_tv_template)
  4111. },
  4112. }, {
  4113. .alg = "cbc(sm4)",
  4114. .test = alg_test_skcipher,
  4115. .suite = {
  4116. .cipher = __VECS(sm4_cbc_tv_template)
  4117. }
  4118. }, {
  4119. .alg = "cbc(twofish)",
  4120. .test = alg_test_skcipher,
  4121. .suite = {
  4122. .cipher = __VECS(tf_cbc_tv_template)
  4123. },
  4124. }, {
  4125. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4126. .alg = "cbc-paes-s390",
  4127. .fips_allowed = 1,
  4128. .test = alg_test_skcipher,
  4129. .suite = {
  4130. .cipher = __VECS(aes_cbc_tv_template)
  4131. }
  4132. }, {
  4133. #endif
  4134. .alg = "cbcmac(aes)",
  4135. .test = alg_test_hash,
  4136. .suite = {
  4137. .hash = __VECS(aes_cbcmac_tv_template)
  4138. }
  4139. }, {
  4140. .alg = "cbcmac(sm4)",
  4141. .test = alg_test_hash,
  4142. .suite = {
  4143. .hash = __VECS(sm4_cbcmac_tv_template)
  4144. }
  4145. }, {
  4146. .alg = "ccm(aes)",
  4147. .generic_driver = "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
  4148. .test = alg_test_aead,
  4149. .fips_allowed = 1,
  4150. .suite = {
  4151. .aead = {
  4152. ____VECS(aes_ccm_tv_template),
  4153. .einval_allowed = 1,
  4154. }
  4155. }
  4156. }, {
  4157. .alg = "ccm(sm4)",
  4158. .generic_driver = "ccm_base(ctr(sm4-generic),cbcmac(sm4-generic))",
  4159. .test = alg_test_aead,
  4160. .suite = {
  4161. .aead = {
  4162. ____VECS(sm4_ccm_tv_template),
  4163. .einval_allowed = 1,
  4164. }
  4165. }
  4166. }, {
  4167. .alg = "chacha20",
  4168. .test = alg_test_skcipher,
  4169. .suite = {
  4170. .cipher = __VECS(chacha20_tv_template)
  4171. },
  4172. }, {
  4173. .alg = "cmac(aes)",
  4174. .fips_allowed = 1,
  4175. .test = alg_test_hash,
  4176. .suite = {
  4177. .hash = __VECS(aes_cmac128_tv_template)
  4178. }
  4179. }, {
  4180. .alg = "cmac(camellia)",
  4181. .test = alg_test_hash,
  4182. .suite = {
  4183. .hash = __VECS(camellia_cmac128_tv_template)
  4184. }
  4185. }, {
  4186. .alg = "cmac(des3_ede)",
  4187. .test = alg_test_hash,
  4188. .suite = {
  4189. .hash = __VECS(des3_ede_cmac64_tv_template)
  4190. }
  4191. }, {
  4192. .alg = "cmac(sm4)",
  4193. .test = alg_test_hash,
  4194. .suite = {
  4195. .hash = __VECS(sm4_cmac128_tv_template)
  4196. }
  4197. }, {
  4198. .alg = "compress_null",
  4199. .test = alg_test_null,
  4200. }, {
  4201. .alg = "crc32",
  4202. .test = alg_test_hash,
  4203. .fips_allowed = 1,
  4204. .suite = {
  4205. .hash = __VECS(crc32_tv_template)
  4206. }
  4207. }, {
  4208. .alg = "crc32c",
  4209. .test = alg_test_crc32c,
  4210. .fips_allowed = 1,
  4211. .suite = {
  4212. .hash = __VECS(crc32c_tv_template)
  4213. }
  4214. }, {
  4215. .alg = "crc64-rocksoft",
  4216. .test = alg_test_hash,
  4217. .fips_allowed = 1,
  4218. .suite = {
  4219. .hash = __VECS(crc64_rocksoft_tv_template)
  4220. }
  4221. }, {
  4222. .alg = "crct10dif",
  4223. .test = alg_test_hash,
  4224. .fips_allowed = 1,
  4225. .suite = {
  4226. .hash = __VECS(crct10dif_tv_template)
  4227. }
  4228. }, {
  4229. .alg = "ctr(aes)",
  4230. .test = alg_test_skcipher,
  4231. .fips_allowed = 1,
  4232. .suite = {
  4233. .cipher = __VECS(aes_ctr_tv_template)
  4234. }
  4235. }, {
  4236. .alg = "ctr(aria)",
  4237. .test = alg_test_skcipher,
  4238. .suite = {
  4239. .cipher = __VECS(aria_ctr_tv_template)
  4240. }
  4241. }, {
  4242. .alg = "ctr(blowfish)",
  4243. .test = alg_test_skcipher,
  4244. .suite = {
  4245. .cipher = __VECS(bf_ctr_tv_template)
  4246. }
  4247. }, {
  4248. .alg = "ctr(camellia)",
  4249. .test = alg_test_skcipher,
  4250. .suite = {
  4251. .cipher = __VECS(camellia_ctr_tv_template)
  4252. }
  4253. }, {
  4254. .alg = "ctr(cast5)",
  4255. .test = alg_test_skcipher,
  4256. .suite = {
  4257. .cipher = __VECS(cast5_ctr_tv_template)
  4258. }
  4259. }, {
  4260. .alg = "ctr(cast6)",
  4261. .test = alg_test_skcipher,
  4262. .suite = {
  4263. .cipher = __VECS(cast6_ctr_tv_template)
  4264. }
  4265. }, {
  4266. .alg = "ctr(des)",
  4267. .test = alg_test_skcipher,
  4268. .suite = {
  4269. .cipher = __VECS(des_ctr_tv_template)
  4270. }
  4271. }, {
  4272. .alg = "ctr(des3_ede)",
  4273. .test = alg_test_skcipher,
  4274. .suite = {
  4275. .cipher = __VECS(des3_ede_ctr_tv_template)
  4276. }
  4277. }, {
  4278. /* Same as ctr(aes) except the key is stored in
  4279. * hardware secure memory which we reference by index
  4280. */
  4281. .alg = "ctr(paes)",
  4282. .test = alg_test_null,
  4283. .fips_allowed = 1,
  4284. }, {
  4285. /* Same as ctr(sm4) except the key is stored in
  4286. * hardware secure memory which we reference by index
  4287. */
  4288. .alg = "ctr(psm4)",
  4289. .test = alg_test_null,
  4290. }, {
  4291. .alg = "ctr(serpent)",
  4292. .test = alg_test_skcipher,
  4293. .suite = {
  4294. .cipher = __VECS(serpent_ctr_tv_template)
  4295. }
  4296. }, {
  4297. .alg = "ctr(sm4)",
  4298. .test = alg_test_skcipher,
  4299. .suite = {
  4300. .cipher = __VECS(sm4_ctr_tv_template)
  4301. }
  4302. }, {
  4303. .alg = "ctr(twofish)",
  4304. .test = alg_test_skcipher,
  4305. .suite = {
  4306. .cipher = __VECS(tf_ctr_tv_template)
  4307. }
  4308. }, {
  4309. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4310. .alg = "ctr-paes-s390",
  4311. .fips_allowed = 1,
  4312. .test = alg_test_skcipher,
  4313. .suite = {
  4314. .cipher = __VECS(aes_ctr_tv_template)
  4315. }
  4316. }, {
  4317. #endif
  4318. .alg = "cts(cbc(aes))",
  4319. .test = alg_test_skcipher,
  4320. .fips_allowed = 1,
  4321. .suite = {
  4322. .cipher = __VECS(cts_mode_tv_template)
  4323. }
  4324. }, {
  4325. /* Same as cts(cbc((aes)) except the key is stored in
  4326. * hardware secure memory which we reference by index
  4327. */
  4328. .alg = "cts(cbc(paes))",
  4329. .test = alg_test_null,
  4330. .fips_allowed = 1,
  4331. }, {
  4332. .alg = "cts(cbc(sm4))",
  4333. .test = alg_test_skcipher,
  4334. .suite = {
  4335. .cipher = __VECS(sm4_cts_tv_template)
  4336. }
  4337. }, {
  4338. .alg = "curve25519",
  4339. .test = alg_test_kpp,
  4340. .suite = {
  4341. .kpp = __VECS(curve25519_tv_template)
  4342. }
  4343. }, {
  4344. .alg = "deflate",
  4345. .test = alg_test_comp,
  4346. .fips_allowed = 1,
  4347. .suite = {
  4348. .comp = {
  4349. .comp = __VECS(deflate_comp_tv_template),
  4350. .decomp = __VECS(deflate_decomp_tv_template)
  4351. }
  4352. }
  4353. }, {
  4354. .alg = "deflate-iaa",
  4355. .test = alg_test_comp,
  4356. .fips_allowed = 1,
  4357. .suite = {
  4358. .comp = {
  4359. .comp = __VECS(deflate_comp_tv_template),
  4360. .decomp = __VECS(deflate_decomp_tv_template)
  4361. }
  4362. }
  4363. }, {
  4364. .alg = "dh",
  4365. .test = alg_test_kpp,
  4366. .suite = {
  4367. .kpp = __VECS(dh_tv_template)
  4368. }
  4369. }, {
  4370. .alg = "digest_null",
  4371. .test = alg_test_null,
  4372. }, {
  4373. .alg = "drbg_nopr_ctr_aes128",
  4374. .test = alg_test_drbg,
  4375. .fips_allowed = 1,
  4376. .suite = {
  4377. .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
  4378. }
  4379. }, {
  4380. .alg = "drbg_nopr_ctr_aes192",
  4381. .test = alg_test_drbg,
  4382. .fips_allowed = 1,
  4383. .suite = {
  4384. .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
  4385. }
  4386. }, {
  4387. .alg = "drbg_nopr_ctr_aes256",
  4388. .test = alg_test_drbg,
  4389. .fips_allowed = 1,
  4390. .suite = {
  4391. .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
  4392. }
  4393. }, {
  4394. .alg = "drbg_nopr_hmac_sha256",
  4395. .test = alg_test_drbg,
  4396. .fips_allowed = 1,
  4397. .suite = {
  4398. .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
  4399. }
  4400. }, {
  4401. /*
  4402. * There is no need to specifically test the DRBG with every
  4403. * backend cipher -- covered by drbg_nopr_hmac_sha512 test
  4404. */
  4405. .alg = "drbg_nopr_hmac_sha384",
  4406. .test = alg_test_null,
  4407. }, {
  4408. .alg = "drbg_nopr_hmac_sha512",
  4409. .test = alg_test_drbg,
  4410. .fips_allowed = 1,
  4411. .suite = {
  4412. .drbg = __VECS(drbg_nopr_hmac_sha512_tv_template)
  4413. }
  4414. }, {
  4415. .alg = "drbg_nopr_sha256",
  4416. .test = alg_test_drbg,
  4417. .fips_allowed = 1,
  4418. .suite = {
  4419. .drbg = __VECS(drbg_nopr_sha256_tv_template)
  4420. }
  4421. }, {
  4422. /* covered by drbg_nopr_sha256 test */
  4423. .alg = "drbg_nopr_sha384",
  4424. .test = alg_test_null,
  4425. }, {
  4426. .alg = "drbg_nopr_sha512",
  4427. .fips_allowed = 1,
  4428. .test = alg_test_null,
  4429. }, {
  4430. .alg = "drbg_pr_ctr_aes128",
  4431. .test = alg_test_drbg,
  4432. .fips_allowed = 1,
  4433. .suite = {
  4434. .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
  4435. }
  4436. }, {
  4437. /* covered by drbg_pr_ctr_aes128 test */
  4438. .alg = "drbg_pr_ctr_aes192",
  4439. .fips_allowed = 1,
  4440. .test = alg_test_null,
  4441. }, {
  4442. .alg = "drbg_pr_ctr_aes256",
  4443. .fips_allowed = 1,
  4444. .test = alg_test_null,
  4445. }, {
  4446. .alg = "drbg_pr_hmac_sha256",
  4447. .test = alg_test_drbg,
  4448. .fips_allowed = 1,
  4449. .suite = {
  4450. .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
  4451. }
  4452. }, {
  4453. /* covered by drbg_pr_hmac_sha256 test */
  4454. .alg = "drbg_pr_hmac_sha384",
  4455. .test = alg_test_null,
  4456. }, {
  4457. .alg = "drbg_pr_hmac_sha512",
  4458. .test = alg_test_null,
  4459. .fips_allowed = 1,
  4460. }, {
  4461. .alg = "drbg_pr_sha256",
  4462. .test = alg_test_drbg,
  4463. .fips_allowed = 1,
  4464. .suite = {
  4465. .drbg = __VECS(drbg_pr_sha256_tv_template)
  4466. }
  4467. }, {
  4468. /* covered by drbg_pr_sha256 test */
  4469. .alg = "drbg_pr_sha384",
  4470. .test = alg_test_null,
  4471. }, {
  4472. .alg = "drbg_pr_sha512",
  4473. .fips_allowed = 1,
  4474. .test = alg_test_null,
  4475. }, {
  4476. .alg = "ecb(aes)",
  4477. .test = alg_test_skcipher,
  4478. .fips_allowed = 1,
  4479. .suite = {
  4480. .cipher = __VECS(aes_tv_template)
  4481. }
  4482. }, {
  4483. .alg = "ecb(anubis)",
  4484. .test = alg_test_skcipher,
  4485. .suite = {
  4486. .cipher = __VECS(anubis_tv_template)
  4487. }
  4488. }, {
  4489. .alg = "ecb(arc4)",
  4490. .generic_driver = "arc4-generic",
  4491. .test = alg_test_skcipher,
  4492. .suite = {
  4493. .cipher = __VECS(arc4_tv_template)
  4494. }
  4495. }, {
  4496. .alg = "ecb(aria)",
  4497. .test = alg_test_skcipher,
  4498. .suite = {
  4499. .cipher = __VECS(aria_tv_template)
  4500. }
  4501. }, {
  4502. .alg = "ecb(blowfish)",
  4503. .test = alg_test_skcipher,
  4504. .suite = {
  4505. .cipher = __VECS(bf_tv_template)
  4506. }
  4507. }, {
  4508. .alg = "ecb(camellia)",
  4509. .test = alg_test_skcipher,
  4510. .suite = {
  4511. .cipher = __VECS(camellia_tv_template)
  4512. }
  4513. }, {
  4514. .alg = "ecb(cast5)",
  4515. .test = alg_test_skcipher,
  4516. .suite = {
  4517. .cipher = __VECS(cast5_tv_template)
  4518. }
  4519. }, {
  4520. .alg = "ecb(cast6)",
  4521. .test = alg_test_skcipher,
  4522. .suite = {
  4523. .cipher = __VECS(cast6_tv_template)
  4524. }
  4525. }, {
  4526. .alg = "ecb(cipher_null)",
  4527. .test = alg_test_null,
  4528. .fips_allowed = 1,
  4529. }, {
  4530. .alg = "ecb(des)",
  4531. .test = alg_test_skcipher,
  4532. .suite = {
  4533. .cipher = __VECS(des_tv_template)
  4534. }
  4535. }, {
  4536. .alg = "ecb(des3_ede)",
  4537. .test = alg_test_skcipher,
  4538. .suite = {
  4539. .cipher = __VECS(des3_ede_tv_template)
  4540. }
  4541. }, {
  4542. .alg = "ecb(fcrypt)",
  4543. .test = alg_test_skcipher,
  4544. .suite = {
  4545. .cipher = {
  4546. .vecs = fcrypt_pcbc_tv_template,
  4547. .count = 1
  4548. }
  4549. }
  4550. }, {
  4551. .alg = "ecb(khazad)",
  4552. .test = alg_test_skcipher,
  4553. .suite = {
  4554. .cipher = __VECS(khazad_tv_template)
  4555. }
  4556. }, {
  4557. /* Same as ecb(aes) except the key is stored in
  4558. * hardware secure memory which we reference by index
  4559. */
  4560. .alg = "ecb(paes)",
  4561. .test = alg_test_null,
  4562. .fips_allowed = 1,
  4563. }, {
  4564. .alg = "ecb(seed)",
  4565. .test = alg_test_skcipher,
  4566. .suite = {
  4567. .cipher = __VECS(seed_tv_template)
  4568. }
  4569. }, {
  4570. .alg = "ecb(serpent)",
  4571. .test = alg_test_skcipher,
  4572. .suite = {
  4573. .cipher = __VECS(serpent_tv_template)
  4574. }
  4575. }, {
  4576. .alg = "ecb(sm4)",
  4577. .test = alg_test_skcipher,
  4578. .suite = {
  4579. .cipher = __VECS(sm4_tv_template)
  4580. }
  4581. }, {
  4582. .alg = "ecb(tea)",
  4583. .test = alg_test_skcipher,
  4584. .suite = {
  4585. .cipher = __VECS(tea_tv_template)
  4586. }
  4587. }, {
  4588. .alg = "ecb(twofish)",
  4589. .test = alg_test_skcipher,
  4590. .suite = {
  4591. .cipher = __VECS(tf_tv_template)
  4592. }
  4593. }, {
  4594. .alg = "ecb(xeta)",
  4595. .test = alg_test_skcipher,
  4596. .suite = {
  4597. .cipher = __VECS(xeta_tv_template)
  4598. }
  4599. }, {
  4600. .alg = "ecb(xtea)",
  4601. .test = alg_test_skcipher,
  4602. .suite = {
  4603. .cipher = __VECS(xtea_tv_template)
  4604. }
  4605. }, {
  4606. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  4607. .alg = "ecb-paes-s390",
  4608. .fips_allowed = 1,
  4609. .test = alg_test_skcipher,
  4610. .suite = {
  4611. .cipher = __VECS(aes_tv_template)
  4612. }
  4613. }, {
  4614. #endif
  4615. .alg = "ecdh-nist-p192",
  4616. .test = alg_test_kpp,
  4617. .suite = {
  4618. .kpp = __VECS(ecdh_p192_tv_template)
  4619. }
  4620. }, {
  4621. .alg = "ecdh-nist-p256",
  4622. .test = alg_test_kpp,
  4623. .fips_allowed = 1,
  4624. .suite = {
  4625. .kpp = __VECS(ecdh_p256_tv_template)
  4626. }
  4627. }, {
  4628. .alg = "ecdh-nist-p384",
  4629. .test = alg_test_kpp,
  4630. .fips_allowed = 1,
  4631. .suite = {
  4632. .kpp = __VECS(ecdh_p384_tv_template)
  4633. }
  4634. }, {
  4635. .alg = "ecdsa-nist-p192",
  4636. .test = alg_test_akcipher,
  4637. .suite = {
  4638. .akcipher = __VECS(ecdsa_nist_p192_tv_template)
  4639. }
  4640. }, {
  4641. .alg = "ecdsa-nist-p256",
  4642. .test = alg_test_akcipher,
  4643. .fips_allowed = 1,
  4644. .suite = {
  4645. .akcipher = __VECS(ecdsa_nist_p256_tv_template)
  4646. }
  4647. }, {
  4648. .alg = "ecdsa-nist-p384",
  4649. .test = alg_test_akcipher,
  4650. .fips_allowed = 1,
  4651. .suite = {
  4652. .akcipher = __VECS(ecdsa_nist_p384_tv_template)
  4653. }
  4654. }, {
  4655. .alg = "ecdsa-nist-p521",
  4656. .test = alg_test_akcipher,
  4657. .fips_allowed = 1,
  4658. .suite = {
  4659. .akcipher = __VECS(ecdsa_nist_p521_tv_template)
  4660. }
  4661. }, {
  4662. .alg = "ecrdsa",
  4663. .test = alg_test_akcipher,
  4664. .suite = {
  4665. .akcipher = __VECS(ecrdsa_tv_template)
  4666. }
  4667. }, {
  4668. .alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
  4669. .test = alg_test_aead,
  4670. .fips_allowed = 1,
  4671. .suite = {
  4672. .aead = __VECS(essiv_hmac_sha256_aes_cbc_tv_temp)
  4673. }
  4674. }, {
  4675. .alg = "essiv(cbc(aes),sha256)",
  4676. .test = alg_test_skcipher,
  4677. .fips_allowed = 1,
  4678. .suite = {
  4679. .cipher = __VECS(essiv_aes_cbc_tv_template)
  4680. }
  4681. }, {
  4682. #if IS_ENABLED(CONFIG_CRYPTO_DH_RFC7919_GROUPS)
  4683. .alg = "ffdhe2048(dh)",
  4684. .test = alg_test_kpp,
  4685. .fips_allowed = 1,
  4686. .suite = {
  4687. .kpp = __VECS(ffdhe2048_dh_tv_template)
  4688. }
  4689. }, {
  4690. .alg = "ffdhe3072(dh)",
  4691. .test = alg_test_kpp,
  4692. .fips_allowed = 1,
  4693. .suite = {
  4694. .kpp = __VECS(ffdhe3072_dh_tv_template)
  4695. }
  4696. }, {
  4697. .alg = "ffdhe4096(dh)",
  4698. .test = alg_test_kpp,
  4699. .fips_allowed = 1,
  4700. .suite = {
  4701. .kpp = __VECS(ffdhe4096_dh_tv_template)
  4702. }
  4703. }, {
  4704. .alg = "ffdhe6144(dh)",
  4705. .test = alg_test_kpp,
  4706. .fips_allowed = 1,
  4707. .suite = {
  4708. .kpp = __VECS(ffdhe6144_dh_tv_template)
  4709. }
  4710. }, {
  4711. .alg = "ffdhe8192(dh)",
  4712. .test = alg_test_kpp,
  4713. .fips_allowed = 1,
  4714. .suite = {
  4715. .kpp = __VECS(ffdhe8192_dh_tv_template)
  4716. }
  4717. }, {
  4718. #endif /* CONFIG_CRYPTO_DH_RFC7919_GROUPS */
  4719. .alg = "gcm(aes)",
  4720. .generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
  4721. .test = alg_test_aead,
  4722. .fips_allowed = 1,
  4723. .suite = {
  4724. .aead = __VECS(aes_gcm_tv_template)
  4725. }
  4726. }, {
  4727. .alg = "gcm(aria)",
  4728. .generic_driver = "gcm_base(ctr(aria-generic),ghash-generic)",
  4729. .test = alg_test_aead,
  4730. .suite = {
  4731. .aead = __VECS(aria_gcm_tv_template)
  4732. }
  4733. }, {
  4734. .alg = "gcm(sm4)",
  4735. .generic_driver = "gcm_base(ctr(sm4-generic),ghash-generic)",
  4736. .test = alg_test_aead,
  4737. .suite = {
  4738. .aead = __VECS(sm4_gcm_tv_template)
  4739. }
  4740. }, {
  4741. .alg = "ghash",
  4742. .test = alg_test_hash,
  4743. .suite = {
  4744. .hash = __VECS(ghash_tv_template)
  4745. }
  4746. }, {
  4747. .alg = "hctr2(aes)",
  4748. .generic_driver =
  4749. "hctr2_base(xctr(aes-generic),polyval-generic)",
  4750. .test = alg_test_skcipher,
  4751. .suite = {
  4752. .cipher = __VECS(aes_hctr2_tv_template)
  4753. }
  4754. }, {
  4755. .alg = "hmac(md5)",
  4756. .test = alg_test_hash,
  4757. .suite = {
  4758. .hash = __VECS(hmac_md5_tv_template)
  4759. }
  4760. }, {
  4761. .alg = "hmac(rmd160)",
  4762. .test = alg_test_hash,
  4763. .suite = {
  4764. .hash = __VECS(hmac_rmd160_tv_template)
  4765. }
  4766. }, {
  4767. .alg = "hmac(sha1)",
  4768. .test = alg_test_hash,
  4769. .fips_allowed = 1,
  4770. .suite = {
  4771. .hash = __VECS(hmac_sha1_tv_template)
  4772. }
  4773. }, {
  4774. .alg = "hmac(sha224)",
  4775. .test = alg_test_hash,
  4776. .fips_allowed = 1,
  4777. .suite = {
  4778. .hash = __VECS(hmac_sha224_tv_template)
  4779. }
  4780. }, {
  4781. .alg = "hmac(sha256)",
  4782. .test = alg_test_hash,
  4783. .fips_allowed = 1,
  4784. .suite = {
  4785. .hash = __VECS(hmac_sha256_tv_template)
  4786. }
  4787. }, {
  4788. .alg = "hmac(sha3-224)",
  4789. .test = alg_test_hash,
  4790. .fips_allowed = 1,
  4791. .suite = {
  4792. .hash = __VECS(hmac_sha3_224_tv_template)
  4793. }
  4794. }, {
  4795. .alg = "hmac(sha3-256)",
  4796. .test = alg_test_hash,
  4797. .fips_allowed = 1,
  4798. .suite = {
  4799. .hash = __VECS(hmac_sha3_256_tv_template)
  4800. }
  4801. }, {
  4802. .alg = "hmac(sha3-384)",
  4803. .test = alg_test_hash,
  4804. .fips_allowed = 1,
  4805. .suite = {
  4806. .hash = __VECS(hmac_sha3_384_tv_template)
  4807. }
  4808. }, {
  4809. .alg = "hmac(sha3-512)",
  4810. .test = alg_test_hash,
  4811. .fips_allowed = 1,
  4812. .suite = {
  4813. .hash = __VECS(hmac_sha3_512_tv_template)
  4814. }
  4815. }, {
  4816. .alg = "hmac(sha384)",
  4817. .test = alg_test_hash,
  4818. .fips_allowed = 1,
  4819. .suite = {
  4820. .hash = __VECS(hmac_sha384_tv_template)
  4821. }
  4822. }, {
  4823. .alg = "hmac(sha512)",
  4824. .test = alg_test_hash,
  4825. .fips_allowed = 1,
  4826. .suite = {
  4827. .hash = __VECS(hmac_sha512_tv_template)
  4828. }
  4829. }, {
  4830. .alg = "hmac(sm3)",
  4831. .test = alg_test_hash,
  4832. .suite = {
  4833. .hash = __VECS(hmac_sm3_tv_template)
  4834. }
  4835. }, {
  4836. .alg = "hmac(streebog256)",
  4837. .test = alg_test_hash,
  4838. .suite = {
  4839. .hash = __VECS(hmac_streebog256_tv_template)
  4840. }
  4841. }, {
  4842. .alg = "hmac(streebog512)",
  4843. .test = alg_test_hash,
  4844. .suite = {
  4845. .hash = __VECS(hmac_streebog512_tv_template)
  4846. }
  4847. }, {
  4848. .alg = "jitterentropy_rng",
  4849. .fips_allowed = 1,
  4850. .test = alg_test_null,
  4851. }, {
  4852. .alg = "kw(aes)",
  4853. .test = alg_test_skcipher,
  4854. .fips_allowed = 1,
  4855. .suite = {
  4856. .cipher = __VECS(aes_kw_tv_template)
  4857. }
  4858. }, {
  4859. .alg = "lrw(aes)",
  4860. .generic_driver = "lrw(ecb(aes-generic))",
  4861. .test = alg_test_skcipher,
  4862. .suite = {
  4863. .cipher = __VECS(aes_lrw_tv_template)
  4864. }
  4865. }, {
  4866. .alg = "lrw(camellia)",
  4867. .generic_driver = "lrw(ecb(camellia-generic))",
  4868. .test = alg_test_skcipher,
  4869. .suite = {
  4870. .cipher = __VECS(camellia_lrw_tv_template)
  4871. }
  4872. }, {
  4873. .alg = "lrw(cast6)",
  4874. .generic_driver = "lrw(ecb(cast6-generic))",
  4875. .test = alg_test_skcipher,
  4876. .suite = {
  4877. .cipher = __VECS(cast6_lrw_tv_template)
  4878. }
  4879. }, {
  4880. .alg = "lrw(serpent)",
  4881. .generic_driver = "lrw(ecb(serpent-generic))",
  4882. .test = alg_test_skcipher,
  4883. .suite = {
  4884. .cipher = __VECS(serpent_lrw_tv_template)
  4885. }
  4886. }, {
  4887. .alg = "lrw(twofish)",
  4888. .generic_driver = "lrw(ecb(twofish-generic))",
  4889. .test = alg_test_skcipher,
  4890. .suite = {
  4891. .cipher = __VECS(tf_lrw_tv_template)
  4892. }
  4893. }, {
  4894. .alg = "lz4",
  4895. .test = alg_test_comp,
  4896. .fips_allowed = 1,
  4897. .suite = {
  4898. .comp = {
  4899. .comp = __VECS(lz4_comp_tv_template),
  4900. .decomp = __VECS(lz4_decomp_tv_template)
  4901. }
  4902. }
  4903. }, {
  4904. .alg = "lz4hc",
  4905. .test = alg_test_comp,
  4906. .fips_allowed = 1,
  4907. .suite = {
  4908. .comp = {
  4909. .comp = __VECS(lz4hc_comp_tv_template),
  4910. .decomp = __VECS(lz4hc_decomp_tv_template)
  4911. }
  4912. }
  4913. }, {
  4914. .alg = "lzo",
  4915. .test = alg_test_comp,
  4916. .fips_allowed = 1,
  4917. .suite = {
  4918. .comp = {
  4919. .comp = __VECS(lzo_comp_tv_template),
  4920. .decomp = __VECS(lzo_decomp_tv_template)
  4921. }
  4922. }
  4923. }, {
  4924. .alg = "lzo-rle",
  4925. .test = alg_test_comp,
  4926. .fips_allowed = 1,
  4927. .suite = {
  4928. .comp = {
  4929. .comp = __VECS(lzorle_comp_tv_template),
  4930. .decomp = __VECS(lzorle_decomp_tv_template)
  4931. }
  4932. }
  4933. }, {
  4934. .alg = "md4",
  4935. .test = alg_test_hash,
  4936. .suite = {
  4937. .hash = __VECS(md4_tv_template)
  4938. }
  4939. }, {
  4940. .alg = "md5",
  4941. .test = alg_test_hash,
  4942. .suite = {
  4943. .hash = __VECS(md5_tv_template)
  4944. }
  4945. }, {
  4946. .alg = "michael_mic",
  4947. .test = alg_test_hash,
  4948. .suite = {
  4949. .hash = __VECS(michael_mic_tv_template)
  4950. }
  4951. }, {
  4952. .alg = "nhpoly1305",
  4953. .test = alg_test_hash,
  4954. .suite = {
  4955. .hash = __VECS(nhpoly1305_tv_template)
  4956. }
  4957. }, {
  4958. .alg = "pcbc(fcrypt)",
  4959. .test = alg_test_skcipher,
  4960. .suite = {
  4961. .cipher = __VECS(fcrypt_pcbc_tv_template)
  4962. }
  4963. }, {
  4964. .alg = "pkcs1pad(rsa,sha224)",
  4965. .test = alg_test_null,
  4966. .fips_allowed = 1,
  4967. }, {
  4968. .alg = "pkcs1pad(rsa,sha256)",
  4969. .test = alg_test_akcipher,
  4970. .fips_allowed = 1,
  4971. .suite = {
  4972. .akcipher = __VECS(pkcs1pad_rsa_tv_template)
  4973. }
  4974. }, {
  4975. .alg = "pkcs1pad(rsa,sha3-256)",
  4976. .test = alg_test_null,
  4977. .fips_allowed = 1,
  4978. }, {
  4979. .alg = "pkcs1pad(rsa,sha3-384)",
  4980. .test = alg_test_null,
  4981. .fips_allowed = 1,
  4982. }, {
  4983. .alg = "pkcs1pad(rsa,sha3-512)",
  4984. .test = alg_test_null,
  4985. .fips_allowed = 1,
  4986. }, {
  4987. .alg = "pkcs1pad(rsa,sha384)",
  4988. .test = alg_test_null,
  4989. .fips_allowed = 1,
  4990. }, {
  4991. .alg = "pkcs1pad(rsa,sha512)",
  4992. .test = alg_test_null,
  4993. .fips_allowed = 1,
  4994. }, {
  4995. .alg = "poly1305",
  4996. .test = alg_test_hash,
  4997. .suite = {
  4998. .hash = __VECS(poly1305_tv_template)
  4999. }
  5000. }, {
  5001. .alg = "polyval",
  5002. .test = alg_test_hash,
  5003. .suite = {
  5004. .hash = __VECS(polyval_tv_template)
  5005. }
  5006. }, {
  5007. .alg = "rfc3686(ctr(aes))",
  5008. .test = alg_test_skcipher,
  5009. .fips_allowed = 1,
  5010. .suite = {
  5011. .cipher = __VECS(aes_ctr_rfc3686_tv_template)
  5012. }
  5013. }, {
  5014. .alg = "rfc3686(ctr(sm4))",
  5015. .test = alg_test_skcipher,
  5016. .suite = {
  5017. .cipher = __VECS(sm4_ctr_rfc3686_tv_template)
  5018. }
  5019. }, {
  5020. .alg = "rfc4106(gcm(aes))",
  5021. .generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
  5022. .test = alg_test_aead,
  5023. .fips_allowed = 1,
  5024. .suite = {
  5025. .aead = {
  5026. ____VECS(aes_gcm_rfc4106_tv_template),
  5027. .einval_allowed = 1,
  5028. .aad_iv = 1,
  5029. }
  5030. }
  5031. }, {
  5032. .alg = "rfc4309(ccm(aes))",
  5033. .generic_driver = "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
  5034. .test = alg_test_aead,
  5035. .fips_allowed = 1,
  5036. .suite = {
  5037. .aead = {
  5038. ____VECS(aes_ccm_rfc4309_tv_template),
  5039. .einval_allowed = 1,
  5040. .aad_iv = 1,
  5041. }
  5042. }
  5043. }, {
  5044. .alg = "rfc4543(gcm(aes))",
  5045. .generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
  5046. .test = alg_test_aead,
  5047. .suite = {
  5048. .aead = {
  5049. ____VECS(aes_gcm_rfc4543_tv_template),
  5050. .einval_allowed = 1,
  5051. .aad_iv = 1,
  5052. }
  5053. }
  5054. }, {
  5055. .alg = "rfc7539(chacha20,poly1305)",
  5056. .test = alg_test_aead,
  5057. .suite = {
  5058. .aead = __VECS(rfc7539_tv_template)
  5059. }
  5060. }, {
  5061. .alg = "rfc7539esp(chacha20,poly1305)",
  5062. .test = alg_test_aead,
  5063. .suite = {
  5064. .aead = {
  5065. ____VECS(rfc7539esp_tv_template),
  5066. .einval_allowed = 1,
  5067. .aad_iv = 1,
  5068. }
  5069. }
  5070. }, {
  5071. .alg = "rmd160",
  5072. .test = alg_test_hash,
  5073. .suite = {
  5074. .hash = __VECS(rmd160_tv_template)
  5075. }
  5076. }, {
  5077. .alg = "rsa",
  5078. .test = alg_test_akcipher,
  5079. .fips_allowed = 1,
  5080. .suite = {
  5081. .akcipher = __VECS(rsa_tv_template)
  5082. }
  5083. }, {
  5084. .alg = "sha1",
  5085. .test = alg_test_hash,
  5086. .fips_allowed = 1,
  5087. .suite = {
  5088. .hash = __VECS(sha1_tv_template)
  5089. }
  5090. }, {
  5091. .alg = "sha224",
  5092. .test = alg_test_hash,
  5093. .fips_allowed = 1,
  5094. .suite = {
  5095. .hash = __VECS(sha224_tv_template)
  5096. }
  5097. }, {
  5098. .alg = "sha256",
  5099. .test = alg_test_hash,
  5100. .fips_allowed = 1,
  5101. .suite = {
  5102. .hash = __VECS(sha256_tv_template)
  5103. }
  5104. }, {
  5105. .alg = "sha3-224",
  5106. .test = alg_test_hash,
  5107. .fips_allowed = 1,
  5108. .suite = {
  5109. .hash = __VECS(sha3_224_tv_template)
  5110. }
  5111. }, {
  5112. .alg = "sha3-256",
  5113. .test = alg_test_hash,
  5114. .fips_allowed = 1,
  5115. .suite = {
  5116. .hash = __VECS(sha3_256_tv_template)
  5117. }
  5118. }, {
  5119. .alg = "sha3-384",
  5120. .test = alg_test_hash,
  5121. .fips_allowed = 1,
  5122. .suite = {
  5123. .hash = __VECS(sha3_384_tv_template)
  5124. }
  5125. }, {
  5126. .alg = "sha3-512",
  5127. .test = alg_test_hash,
  5128. .fips_allowed = 1,
  5129. .suite = {
  5130. .hash = __VECS(sha3_512_tv_template)
  5131. }
  5132. }, {
  5133. .alg = "sha384",
  5134. .test = alg_test_hash,
  5135. .fips_allowed = 1,
  5136. .suite = {
  5137. .hash = __VECS(sha384_tv_template)
  5138. }
  5139. }, {
  5140. .alg = "sha512",
  5141. .test = alg_test_hash,
  5142. .fips_allowed = 1,
  5143. .suite = {
  5144. .hash = __VECS(sha512_tv_template)
  5145. }
  5146. }, {
  5147. .alg = "sm3",
  5148. .test = alg_test_hash,
  5149. .suite = {
  5150. .hash = __VECS(sm3_tv_template)
  5151. }
  5152. }, {
  5153. .alg = "streebog256",
  5154. .test = alg_test_hash,
  5155. .suite = {
  5156. .hash = __VECS(streebog256_tv_template)
  5157. }
  5158. }, {
  5159. .alg = "streebog512",
  5160. .test = alg_test_hash,
  5161. .suite = {
  5162. .hash = __VECS(streebog512_tv_template)
  5163. }
  5164. }, {
  5165. .alg = "vmac64(aes)",
  5166. .test = alg_test_hash,
  5167. .suite = {
  5168. .hash = __VECS(vmac64_aes_tv_template)
  5169. }
  5170. }, {
  5171. .alg = "wp256",
  5172. .test = alg_test_hash,
  5173. .suite = {
  5174. .hash = __VECS(wp256_tv_template)
  5175. }
  5176. }, {
  5177. .alg = "wp384",
  5178. .test = alg_test_hash,
  5179. .suite = {
  5180. .hash = __VECS(wp384_tv_template)
  5181. }
  5182. }, {
  5183. .alg = "wp512",
  5184. .test = alg_test_hash,
  5185. .suite = {
  5186. .hash = __VECS(wp512_tv_template)
  5187. }
  5188. }, {
  5189. .alg = "xcbc(aes)",
  5190. .test = alg_test_hash,
  5191. .suite = {
  5192. .hash = __VECS(aes_xcbc128_tv_template)
  5193. }
  5194. }, {
  5195. .alg = "xcbc(sm4)",
  5196. .test = alg_test_hash,
  5197. .suite = {
  5198. .hash = __VECS(sm4_xcbc128_tv_template)
  5199. }
  5200. }, {
  5201. .alg = "xchacha12",
  5202. .test = alg_test_skcipher,
  5203. .suite = {
  5204. .cipher = __VECS(xchacha12_tv_template)
  5205. },
  5206. }, {
  5207. .alg = "xchacha20",
  5208. .test = alg_test_skcipher,
  5209. .suite = {
  5210. .cipher = __VECS(xchacha20_tv_template)
  5211. },
  5212. }, {
  5213. .alg = "xctr(aes)",
  5214. .test = alg_test_skcipher,
  5215. .suite = {
  5216. .cipher = __VECS(aes_xctr_tv_template)
  5217. }
  5218. }, {
  5219. .alg = "xts(aes)",
  5220. .generic_driver = "xts(ecb(aes-generic))",
  5221. .test = alg_test_skcipher,
  5222. .fips_allowed = 1,
  5223. .suite = {
  5224. .cipher = __VECS(aes_xts_tv_template)
  5225. }
  5226. }, {
  5227. .alg = "xts(camellia)",
  5228. .generic_driver = "xts(ecb(camellia-generic))",
  5229. .test = alg_test_skcipher,
  5230. .suite = {
  5231. .cipher = __VECS(camellia_xts_tv_template)
  5232. }
  5233. }, {
  5234. .alg = "xts(cast6)",
  5235. .generic_driver = "xts(ecb(cast6-generic))",
  5236. .test = alg_test_skcipher,
  5237. .suite = {
  5238. .cipher = __VECS(cast6_xts_tv_template)
  5239. }
  5240. }, {
  5241. /* Same as xts(aes) except the key is stored in
  5242. * hardware secure memory which we reference by index
  5243. */
  5244. .alg = "xts(paes)",
  5245. .test = alg_test_null,
  5246. .fips_allowed = 1,
  5247. }, {
  5248. .alg = "xts(serpent)",
  5249. .generic_driver = "xts(ecb(serpent-generic))",
  5250. .test = alg_test_skcipher,
  5251. .suite = {
  5252. .cipher = __VECS(serpent_xts_tv_template)
  5253. }
  5254. }, {
  5255. .alg = "xts(sm4)",
  5256. .generic_driver = "xts(ecb(sm4-generic))",
  5257. .test = alg_test_skcipher,
  5258. .suite = {
  5259. .cipher = __VECS(sm4_xts_tv_template)
  5260. }
  5261. }, {
  5262. .alg = "xts(twofish)",
  5263. .generic_driver = "xts(ecb(twofish-generic))",
  5264. .test = alg_test_skcipher,
  5265. .suite = {
  5266. .cipher = __VECS(tf_xts_tv_template)
  5267. }
  5268. }, {
  5269. #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
  5270. .alg = "xts-paes-s390",
  5271. .fips_allowed = 1,
  5272. .test = alg_test_skcipher,
  5273. .suite = {
  5274. .cipher = __VECS(aes_xts_tv_template)
  5275. }
  5276. }, {
  5277. #endif
  5278. .alg = "xxhash64",
  5279. .test = alg_test_hash,
  5280. .fips_allowed = 1,
  5281. .suite = {
  5282. .hash = __VECS(xxhash64_tv_template)
  5283. }
  5284. }, {
  5285. .alg = "zstd",
  5286. .test = alg_test_comp,
  5287. .fips_allowed = 1,
  5288. .suite = {
  5289. .comp = {
  5290. .comp = __VECS(zstd_comp_tv_template),
  5291. .decomp = __VECS(zstd_decomp_tv_template)
  5292. }
  5293. }
  5294. }
  5295. };
  5296. static void alg_check_test_descs_order(void)
  5297. {
  5298. int i;
  5299. for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
  5300. int diff = strcmp(alg_test_descs[i - 1].alg,
  5301. alg_test_descs[i].alg);
  5302. if (WARN_ON(diff > 0)) {
  5303. pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
  5304. alg_test_descs[i - 1].alg,
  5305. alg_test_descs[i].alg);
  5306. }
  5307. if (WARN_ON(diff == 0)) {
  5308. pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
  5309. alg_test_descs[i].alg);
  5310. }
  5311. }
  5312. }
  5313. static void alg_check_testvec_configs(void)
  5314. {
  5315. int i;
  5316. for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
  5317. WARN_ON(!valid_testvec_config(
  5318. &default_cipher_testvec_configs[i]));
  5319. for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
  5320. WARN_ON(!valid_testvec_config(
  5321. &default_hash_testvec_configs[i]));
  5322. }
  5323. static void testmgr_onetime_init(void)
  5324. {
  5325. alg_check_test_descs_order();
  5326. alg_check_testvec_configs();
  5327. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  5328. pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n");
  5329. #endif
  5330. }
  5331. static int alg_find_test(const char *alg)
  5332. {
  5333. int start = 0;
  5334. int end = ARRAY_SIZE(alg_test_descs);
  5335. while (start < end) {
  5336. int i = (start + end) / 2;
  5337. int diff = strcmp(alg_test_descs[i].alg, alg);
  5338. if (diff > 0) {
  5339. end = i;
  5340. continue;
  5341. }
  5342. if (diff < 0) {
  5343. start = i + 1;
  5344. continue;
  5345. }
  5346. return i;
  5347. }
  5348. return -1;
  5349. }
  5350. static int alg_fips_disabled(const char *driver, const char *alg)
  5351. {
  5352. pr_info("alg: %s (%s) is disabled due to FIPS\n", alg, driver);
  5353. return -ECANCELED;
  5354. }
  5355. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  5356. {
  5357. int i;
  5358. int j;
  5359. int rc;
  5360. if (!fips_enabled && notests) {
  5361. printk_once(KERN_INFO "alg: self-tests disabled\n");
  5362. return 0;
  5363. }
  5364. DO_ONCE(testmgr_onetime_init);
  5365. if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
  5366. char nalg[CRYPTO_MAX_ALG_NAME];
  5367. if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
  5368. sizeof(nalg))
  5369. return -ENAMETOOLONG;
  5370. i = alg_find_test(nalg);
  5371. if (i < 0)
  5372. goto notest;
  5373. if (fips_enabled && !alg_test_descs[i].fips_allowed)
  5374. goto non_fips_alg;
  5375. rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
  5376. goto test_done;
  5377. }
  5378. i = alg_find_test(alg);
  5379. j = alg_find_test(driver);
  5380. if (i < 0 && j < 0)
  5381. goto notest;
  5382. if (fips_enabled) {
  5383. if (j >= 0 && !alg_test_descs[j].fips_allowed)
  5384. return -EINVAL;
  5385. if (i >= 0 && !alg_test_descs[i].fips_allowed)
  5386. goto non_fips_alg;
  5387. }
  5388. rc = 0;
  5389. if (i >= 0)
  5390. rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
  5391. type, mask);
  5392. if (j >= 0 && j != i)
  5393. rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
  5394. type, mask);
  5395. test_done:
  5396. if (rc) {
  5397. if (fips_enabled || panic_on_fail) {
  5398. fips_fail_notify();
  5399. panic("alg: self-tests for %s (%s) failed in %s mode!\n",
  5400. driver, alg,
  5401. fips_enabled ? "fips" : "panic_on_fail");
  5402. }
  5403. pr_warn("alg: self-tests for %s using %s failed (rc=%d)",
  5404. alg, driver, rc);
  5405. WARN(rc != -ENOENT,
  5406. "alg: self-tests for %s using %s failed (rc=%d)",
  5407. alg, driver, rc);
  5408. } else {
  5409. if (fips_enabled)
  5410. pr_info("alg: self-tests for %s (%s) passed\n",
  5411. driver, alg);
  5412. }
  5413. return rc;
  5414. notest:
  5415. if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_LSKCIPHER) {
  5416. char nalg[CRYPTO_MAX_ALG_NAME];
  5417. if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
  5418. sizeof(nalg))
  5419. goto notest2;
  5420. i = alg_find_test(nalg);
  5421. if (i < 0)
  5422. goto notest2;
  5423. if (fips_enabled && !alg_test_descs[i].fips_allowed)
  5424. goto non_fips_alg;
  5425. rc = alg_test_skcipher(alg_test_descs + i, driver, type, mask);
  5426. goto test_done;
  5427. }
  5428. notest2:
  5429. printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
  5430. if (type & CRYPTO_ALG_FIPS_INTERNAL)
  5431. return alg_fips_disabled(driver, alg);
  5432. return 0;
  5433. non_fips_alg:
  5434. return alg_fips_disabled(driver, alg);
  5435. }
  5436. #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
  5437. EXPORT_SYMBOL_GPL(alg_test);