algapi.c 24 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Cryptographic API for algorithms (i.e., low-level API).
  4. *
  5. * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  6. */
  7. #include <crypto/algapi.h>
  8. #include <crypto/internal/simd.h>
  9. #include <linux/err.h>
  10. #include <linux/errno.h>
  11. #include <linux/fips.h>
  12. #include <linux/init.h>
  13. #include <linux/kernel.h>
  14. #include <linux/list.h>
  15. #include <linux/module.h>
  16. #include <linux/rtnetlink.h>
  17. #include <linux/slab.h>
  18. #include <linux/string.h>
  19. #include <linux/workqueue.h>
  20. #include "internal.h"
  21. static LIST_HEAD(crypto_template_list);
  22. #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  23. DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
  24. EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
  25. #endif
  26. static inline void crypto_check_module_sig(struct module *mod)
  27. {
  28. if (fips_enabled && mod && !module_sig_ok(mod))
  29. panic("Module %s signature verification failed in FIPS mode\n",
  30. module_name(mod));
  31. }
  32. static int crypto_check_alg(struct crypto_alg *alg)
  33. {
  34. crypto_check_module_sig(alg->cra_module);
  35. if (!alg->cra_name[0] || !alg->cra_driver_name[0])
  36. return -EINVAL;
  37. if (alg->cra_alignmask & (alg->cra_alignmask + 1))
  38. return -EINVAL;
  39. /* General maximums for all algs. */
  40. if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
  41. return -EINVAL;
  42. if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
  43. return -EINVAL;
  44. /* Lower maximums for specific alg types. */
  45. if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  46. CRYPTO_ALG_TYPE_CIPHER) {
  47. if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
  48. return -EINVAL;
  49. if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
  50. return -EINVAL;
  51. }
  52. if (alg->cra_priority < 0)
  53. return -EINVAL;
  54. refcount_set(&alg->cra_refcnt, 1);
  55. return 0;
  56. }
  57. static void crypto_free_instance(struct crypto_instance *inst)
  58. {
  59. inst->alg.cra_type->free(inst);
  60. }
  61. static void crypto_destroy_instance_workfn(struct work_struct *w)
  62. {
  63. struct crypto_instance *inst = container_of(w, struct crypto_instance,
  64. free_work);
  65. struct crypto_template *tmpl = inst->tmpl;
  66. crypto_free_instance(inst);
  67. crypto_tmpl_put(tmpl);
  68. }
  69. static void crypto_destroy_instance(struct crypto_alg *alg)
  70. {
  71. struct crypto_instance *inst = container_of(alg,
  72. struct crypto_instance,
  73. alg);
  74. INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
  75. schedule_work(&inst->free_work);
  76. }
  77. /*
  78. * This function adds a spawn to the list secondary_spawns which
  79. * will be used at the end of crypto_remove_spawns to unregister
  80. * instances, unless the spawn happens to be one that is depended
  81. * on by the new algorithm (nalg in crypto_remove_spawns).
  82. *
  83. * This function is also responsible for resurrecting any algorithms
  84. * in the dependency chain of nalg by unsetting n->dead.
  85. */
  86. static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
  87. struct list_head *stack,
  88. struct list_head *top,
  89. struct list_head *secondary_spawns)
  90. {
  91. struct crypto_spawn *spawn, *n;
  92. spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
  93. if (!spawn)
  94. return NULL;
  95. n = list_prev_entry(spawn, list);
  96. list_move(&spawn->list, secondary_spawns);
  97. if (list_is_last(&n->list, stack))
  98. return top;
  99. n = list_next_entry(n, list);
  100. if (!spawn->dead)
  101. n->dead = false;
  102. return &n->inst->alg.cra_users;
  103. }
  104. static void crypto_remove_instance(struct crypto_instance *inst,
  105. struct list_head *list)
  106. {
  107. struct crypto_template *tmpl = inst->tmpl;
  108. if (crypto_is_dead(&inst->alg))
  109. return;
  110. inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
  111. if (!tmpl || !crypto_tmpl_get(tmpl))
  112. return;
  113. list_move(&inst->alg.cra_list, list);
  114. hlist_del(&inst->list);
  115. inst->alg.cra_destroy = crypto_destroy_instance;
  116. BUG_ON(!list_empty(&inst->alg.cra_users));
  117. }
  118. /*
  119. * Given an algorithm alg, remove all algorithms that depend on it
  120. * through spawns. If nalg is not null, then exempt any algorithms
  121. * that is depended on by nalg. This is useful when nalg itself
  122. * depends on alg.
  123. */
  124. void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
  125. struct crypto_alg *nalg)
  126. {
  127. u32 new_type = (nalg ?: alg)->cra_flags;
  128. struct crypto_spawn *spawn, *n;
  129. LIST_HEAD(secondary_spawns);
  130. struct list_head *spawns;
  131. LIST_HEAD(stack);
  132. LIST_HEAD(top);
  133. spawns = &alg->cra_users;
  134. list_for_each_entry_safe(spawn, n, spawns, list) {
  135. if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
  136. continue;
  137. list_move(&spawn->list, &top);
  138. }
  139. /*
  140. * Perform a depth-first walk starting from alg through
  141. * the cra_users tree. The list stack records the path
  142. * from alg to the current spawn.
  143. */
  144. spawns = &top;
  145. do {
  146. while (!list_empty(spawns)) {
  147. struct crypto_instance *inst;
  148. spawn = list_first_entry(spawns, struct crypto_spawn,
  149. list);
  150. inst = spawn->inst;
  151. list_move(&spawn->list, &stack);
  152. spawn->dead = !spawn->registered || &inst->alg != nalg;
  153. if (!spawn->registered)
  154. break;
  155. BUG_ON(&inst->alg == alg);
  156. if (&inst->alg == nalg)
  157. break;
  158. spawns = &inst->alg.cra_users;
  159. /*
  160. * Even if spawn->registered is true, the
  161. * instance itself may still be unregistered.
  162. * This is because it may have failed during
  163. * registration. Therefore we still need to
  164. * make the following test.
  165. *
  166. * We may encounter an unregistered instance here, since
  167. * an instance's spawns are set up prior to the instance
  168. * being registered. An unregistered instance will have
  169. * NULL ->cra_users.next, since ->cra_users isn't
  170. * properly initialized until registration. But an
  171. * unregistered instance cannot have any users, so treat
  172. * it the same as ->cra_users being empty.
  173. */
  174. if (spawns->next == NULL)
  175. break;
  176. }
  177. } while ((spawns = crypto_more_spawns(alg, &stack, &top,
  178. &secondary_spawns)));
  179. /*
  180. * Remove all instances that are marked as dead. Also
  181. * complete the resurrection of the others by moving them
  182. * back to the cra_users list.
  183. */
  184. list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
  185. if (!spawn->dead)
  186. list_move(&spawn->list, &spawn->alg->cra_users);
  187. else if (spawn->registered)
  188. crypto_remove_instance(spawn->inst, list);
  189. }
  190. }
  191. EXPORT_SYMBOL_GPL(crypto_remove_spawns);
  192. static void crypto_alg_finish_registration(struct crypto_alg *alg,
  193. struct list_head *algs_to_put)
  194. {
  195. struct crypto_alg *q;
  196. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  197. if (q == alg)
  198. continue;
  199. if (crypto_is_moribund(q))
  200. continue;
  201. if (crypto_is_larval(q))
  202. continue;
  203. if (strcmp(alg->cra_name, q->cra_name))
  204. continue;
  205. if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
  206. q->cra_priority > alg->cra_priority)
  207. continue;
  208. crypto_remove_spawns(q, algs_to_put, alg);
  209. }
  210. crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
  211. }
  212. static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
  213. {
  214. struct crypto_larval *larval;
  215. if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
  216. IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
  217. (alg->cra_flags & CRYPTO_ALG_INTERNAL))
  218. return NULL; /* No self-test needed */
  219. larval = crypto_larval_alloc(alg->cra_name,
  220. alg->cra_flags | CRYPTO_ALG_TESTED, 0);
  221. if (IS_ERR(larval))
  222. return larval;
  223. larval->adult = crypto_mod_get(alg);
  224. if (!larval->adult) {
  225. kfree(larval);
  226. return ERR_PTR(-ENOENT);
  227. }
  228. refcount_set(&larval->alg.cra_refcnt, 1);
  229. memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
  230. CRYPTO_MAX_ALG_NAME);
  231. larval->alg.cra_priority = alg->cra_priority;
  232. return larval;
  233. }
  234. static struct crypto_larval *
  235. __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
  236. {
  237. struct crypto_alg *q;
  238. struct crypto_larval *larval;
  239. int ret = -EAGAIN;
  240. if (crypto_is_dead(alg))
  241. goto err;
  242. INIT_LIST_HEAD(&alg->cra_users);
  243. ret = -EEXIST;
  244. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  245. if (q == alg)
  246. goto err;
  247. if (crypto_is_moribund(q))
  248. continue;
  249. if (crypto_is_larval(q)) {
  250. if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
  251. goto err;
  252. continue;
  253. }
  254. if (!strcmp(q->cra_driver_name, alg->cra_name) ||
  255. !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
  256. !strcmp(q->cra_name, alg->cra_driver_name))
  257. goto err;
  258. }
  259. larval = crypto_alloc_test_larval(alg);
  260. if (IS_ERR(larval))
  261. goto out;
  262. list_add(&alg->cra_list, &crypto_alg_list);
  263. if (larval) {
  264. /* No cheating! */
  265. alg->cra_flags &= ~CRYPTO_ALG_TESTED;
  266. list_add(&larval->alg.cra_list, &crypto_alg_list);
  267. } else {
  268. alg->cra_flags |= CRYPTO_ALG_TESTED;
  269. crypto_alg_finish_registration(alg, algs_to_put);
  270. }
  271. out:
  272. return larval;
  273. err:
  274. larval = ERR_PTR(ret);
  275. goto out;
  276. }
  277. void crypto_alg_tested(const char *name, int err)
  278. {
  279. struct crypto_larval *test;
  280. struct crypto_alg *alg;
  281. struct crypto_alg *q;
  282. LIST_HEAD(list);
  283. down_write(&crypto_alg_sem);
  284. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  285. if (crypto_is_moribund(q) || !crypto_is_larval(q))
  286. continue;
  287. test = (struct crypto_larval *)q;
  288. if (!strcmp(q->cra_driver_name, name))
  289. goto found;
  290. }
  291. pr_err("alg: Unexpected test result for %s: %d\n", name, err);
  292. up_write(&crypto_alg_sem);
  293. return;
  294. found:
  295. q->cra_flags |= CRYPTO_ALG_DEAD;
  296. alg = test->adult;
  297. if (crypto_is_dead(alg))
  298. goto complete;
  299. if (err == -ECANCELED)
  300. alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
  301. else if (err)
  302. goto complete;
  303. else
  304. alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
  305. alg->cra_flags |= CRYPTO_ALG_TESTED;
  306. crypto_alg_finish_registration(alg, &list);
  307. complete:
  308. list_del_init(&test->alg.cra_list);
  309. complete_all(&test->completion);
  310. up_write(&crypto_alg_sem);
  311. crypto_alg_put(&test->alg);
  312. crypto_remove_final(&list);
  313. }
  314. EXPORT_SYMBOL_GPL(crypto_alg_tested);
  315. void crypto_remove_final(struct list_head *list)
  316. {
  317. struct crypto_alg *alg;
  318. struct crypto_alg *n;
  319. list_for_each_entry_safe(alg, n, list, cra_list) {
  320. list_del_init(&alg->cra_list);
  321. crypto_alg_put(alg);
  322. }
  323. }
  324. EXPORT_SYMBOL_GPL(crypto_remove_final);
  325. int crypto_register_alg(struct crypto_alg *alg)
  326. {
  327. struct crypto_larval *larval;
  328. LIST_HEAD(algs_to_put);
  329. int err;
  330. alg->cra_flags &= ~CRYPTO_ALG_DEAD;
  331. err = crypto_check_alg(alg);
  332. if (err)
  333. return err;
  334. down_write(&crypto_alg_sem);
  335. larval = __crypto_register_alg(alg, &algs_to_put);
  336. if (!IS_ERR_OR_NULL(larval)) {
  337. bool test_started = crypto_boot_test_finished();
  338. larval->test_started = test_started;
  339. if (test_started)
  340. crypto_schedule_test(larval);
  341. }
  342. up_write(&crypto_alg_sem);
  343. if (IS_ERR(larval))
  344. return PTR_ERR(larval);
  345. crypto_remove_final(&algs_to_put);
  346. return 0;
  347. }
  348. EXPORT_SYMBOL_GPL(crypto_register_alg);
  349. static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
  350. {
  351. if (unlikely(list_empty(&alg->cra_list)))
  352. return -ENOENT;
  353. alg->cra_flags |= CRYPTO_ALG_DEAD;
  354. list_del_init(&alg->cra_list);
  355. crypto_remove_spawns(alg, list, NULL);
  356. return 0;
  357. }
  358. void crypto_unregister_alg(struct crypto_alg *alg)
  359. {
  360. int ret;
  361. LIST_HEAD(list);
  362. down_write(&crypto_alg_sem);
  363. ret = crypto_remove_alg(alg, &list);
  364. up_write(&crypto_alg_sem);
  365. if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
  366. return;
  367. if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
  368. return;
  369. if (alg->cra_destroy)
  370. alg->cra_destroy(alg);
  371. crypto_remove_final(&list);
  372. }
  373. EXPORT_SYMBOL_GPL(crypto_unregister_alg);
  374. int crypto_register_algs(struct crypto_alg *algs, int count)
  375. {
  376. int i, ret;
  377. for (i = 0; i < count; i++) {
  378. ret = crypto_register_alg(&algs[i]);
  379. if (ret)
  380. goto err;
  381. }
  382. return 0;
  383. err:
  384. for (--i; i >= 0; --i)
  385. crypto_unregister_alg(&algs[i]);
  386. return ret;
  387. }
  388. EXPORT_SYMBOL_GPL(crypto_register_algs);
  389. void crypto_unregister_algs(struct crypto_alg *algs, int count)
  390. {
  391. int i;
  392. for (i = 0; i < count; i++)
  393. crypto_unregister_alg(&algs[i]);
  394. }
  395. EXPORT_SYMBOL_GPL(crypto_unregister_algs);
  396. int crypto_register_template(struct crypto_template *tmpl)
  397. {
  398. struct crypto_template *q;
  399. int err = -EEXIST;
  400. down_write(&crypto_alg_sem);
  401. crypto_check_module_sig(tmpl->module);
  402. list_for_each_entry(q, &crypto_template_list, list) {
  403. if (q == tmpl)
  404. goto out;
  405. }
  406. list_add(&tmpl->list, &crypto_template_list);
  407. err = 0;
  408. out:
  409. up_write(&crypto_alg_sem);
  410. return err;
  411. }
  412. EXPORT_SYMBOL_GPL(crypto_register_template);
  413. int crypto_register_templates(struct crypto_template *tmpls, int count)
  414. {
  415. int i, err;
  416. for (i = 0; i < count; i++) {
  417. err = crypto_register_template(&tmpls[i]);
  418. if (err)
  419. goto out;
  420. }
  421. return 0;
  422. out:
  423. for (--i; i >= 0; --i)
  424. crypto_unregister_template(&tmpls[i]);
  425. return err;
  426. }
  427. EXPORT_SYMBOL_GPL(crypto_register_templates);
  428. void crypto_unregister_template(struct crypto_template *tmpl)
  429. {
  430. struct crypto_instance *inst;
  431. struct hlist_node *n;
  432. struct hlist_head *list;
  433. LIST_HEAD(users);
  434. down_write(&crypto_alg_sem);
  435. BUG_ON(list_empty(&tmpl->list));
  436. list_del_init(&tmpl->list);
  437. list = &tmpl->instances;
  438. hlist_for_each_entry(inst, list, list) {
  439. int err = crypto_remove_alg(&inst->alg, &users);
  440. BUG_ON(err);
  441. }
  442. up_write(&crypto_alg_sem);
  443. hlist_for_each_entry_safe(inst, n, list, list) {
  444. BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
  445. crypto_free_instance(inst);
  446. }
  447. crypto_remove_final(&users);
  448. }
  449. EXPORT_SYMBOL_GPL(crypto_unregister_template);
  450. void crypto_unregister_templates(struct crypto_template *tmpls, int count)
  451. {
  452. int i;
  453. for (i = count - 1; i >= 0; --i)
  454. crypto_unregister_template(&tmpls[i]);
  455. }
  456. EXPORT_SYMBOL_GPL(crypto_unregister_templates);
  457. static struct crypto_template *__crypto_lookup_template(const char *name)
  458. {
  459. struct crypto_template *q, *tmpl = NULL;
  460. down_read(&crypto_alg_sem);
  461. list_for_each_entry(q, &crypto_template_list, list) {
  462. if (strcmp(q->name, name))
  463. continue;
  464. if (unlikely(!crypto_tmpl_get(q)))
  465. continue;
  466. tmpl = q;
  467. break;
  468. }
  469. up_read(&crypto_alg_sem);
  470. return tmpl;
  471. }
  472. struct crypto_template *crypto_lookup_template(const char *name)
  473. {
  474. return try_then_request_module(__crypto_lookup_template(name),
  475. "crypto-%s", name);
  476. }
  477. EXPORT_SYMBOL_GPL(crypto_lookup_template);
  478. int crypto_register_instance(struct crypto_template *tmpl,
  479. struct crypto_instance *inst)
  480. {
  481. struct crypto_larval *larval;
  482. struct crypto_spawn *spawn;
  483. u32 fips_internal = 0;
  484. LIST_HEAD(algs_to_put);
  485. int err;
  486. err = crypto_check_alg(&inst->alg);
  487. if (err)
  488. return err;
  489. inst->alg.cra_module = tmpl->module;
  490. inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
  491. down_write(&crypto_alg_sem);
  492. larval = ERR_PTR(-EAGAIN);
  493. for (spawn = inst->spawns; spawn;) {
  494. struct crypto_spawn *next;
  495. if (spawn->dead)
  496. goto unlock;
  497. next = spawn->next;
  498. spawn->inst = inst;
  499. spawn->registered = true;
  500. fips_internal |= spawn->alg->cra_flags;
  501. crypto_mod_put(spawn->alg);
  502. spawn = next;
  503. }
  504. inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
  505. larval = __crypto_register_alg(&inst->alg, &algs_to_put);
  506. if (IS_ERR(larval))
  507. goto unlock;
  508. else if (larval) {
  509. larval->test_started = true;
  510. crypto_schedule_test(larval);
  511. }
  512. hlist_add_head(&inst->list, &tmpl->instances);
  513. inst->tmpl = tmpl;
  514. unlock:
  515. up_write(&crypto_alg_sem);
  516. if (IS_ERR(larval))
  517. return PTR_ERR(larval);
  518. crypto_remove_final(&algs_to_put);
  519. return 0;
  520. }
  521. EXPORT_SYMBOL_GPL(crypto_register_instance);
  522. void crypto_unregister_instance(struct crypto_instance *inst)
  523. {
  524. LIST_HEAD(list);
  525. down_write(&crypto_alg_sem);
  526. crypto_remove_spawns(&inst->alg, &list, NULL);
  527. crypto_remove_instance(inst, &list);
  528. up_write(&crypto_alg_sem);
  529. crypto_remove_final(&list);
  530. }
  531. EXPORT_SYMBOL_GPL(crypto_unregister_instance);
  532. int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
  533. const char *name, u32 type, u32 mask)
  534. {
  535. struct crypto_alg *alg;
  536. int err = -EAGAIN;
  537. if (WARN_ON_ONCE(inst == NULL))
  538. return -EINVAL;
  539. /* Allow the result of crypto_attr_alg_name() to be passed directly */
  540. if (IS_ERR(name))
  541. return PTR_ERR(name);
  542. alg = crypto_find_alg(name, spawn->frontend,
  543. type | CRYPTO_ALG_FIPS_INTERNAL, mask);
  544. if (IS_ERR(alg))
  545. return PTR_ERR(alg);
  546. down_write(&crypto_alg_sem);
  547. if (!crypto_is_moribund(alg)) {
  548. list_add(&spawn->list, &alg->cra_users);
  549. spawn->alg = alg;
  550. spawn->mask = mask;
  551. spawn->next = inst->spawns;
  552. inst->spawns = spawn;
  553. inst->alg.cra_flags |=
  554. (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
  555. err = 0;
  556. }
  557. up_write(&crypto_alg_sem);
  558. if (err)
  559. crypto_mod_put(alg);
  560. return err;
  561. }
  562. EXPORT_SYMBOL_GPL(crypto_grab_spawn);
  563. void crypto_drop_spawn(struct crypto_spawn *spawn)
  564. {
  565. if (!spawn->alg) /* not yet initialized? */
  566. return;
  567. down_write(&crypto_alg_sem);
  568. if (!spawn->dead)
  569. list_del(&spawn->list);
  570. up_write(&crypto_alg_sem);
  571. if (!spawn->registered)
  572. crypto_mod_put(spawn->alg);
  573. }
  574. EXPORT_SYMBOL_GPL(crypto_drop_spawn);
  575. static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
  576. {
  577. struct crypto_alg *alg = ERR_PTR(-EAGAIN);
  578. struct crypto_alg *target;
  579. bool shoot = false;
  580. down_read(&crypto_alg_sem);
  581. if (!spawn->dead) {
  582. alg = spawn->alg;
  583. if (!crypto_mod_get(alg)) {
  584. target = crypto_alg_get(alg);
  585. shoot = true;
  586. alg = ERR_PTR(-EAGAIN);
  587. }
  588. }
  589. up_read(&crypto_alg_sem);
  590. if (shoot) {
  591. crypto_shoot_alg(target);
  592. crypto_alg_put(target);
  593. }
  594. return alg;
  595. }
  596. struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
  597. u32 mask)
  598. {
  599. struct crypto_alg *alg;
  600. struct crypto_tfm *tfm;
  601. alg = crypto_spawn_alg(spawn);
  602. if (IS_ERR(alg))
  603. return ERR_CAST(alg);
  604. tfm = ERR_PTR(-EINVAL);
  605. if (unlikely((alg->cra_flags ^ type) & mask))
  606. goto out_put_alg;
  607. tfm = __crypto_alloc_tfm(alg, type, mask);
  608. if (IS_ERR(tfm))
  609. goto out_put_alg;
  610. return tfm;
  611. out_put_alg:
  612. crypto_mod_put(alg);
  613. return tfm;
  614. }
  615. EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
  616. void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
  617. {
  618. struct crypto_alg *alg;
  619. struct crypto_tfm *tfm;
  620. alg = crypto_spawn_alg(spawn);
  621. if (IS_ERR(alg))
  622. return ERR_CAST(alg);
  623. tfm = crypto_create_tfm(alg, spawn->frontend);
  624. if (IS_ERR(tfm))
  625. goto out_put_alg;
  626. return tfm;
  627. out_put_alg:
  628. crypto_mod_put(alg);
  629. return tfm;
  630. }
  631. EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
  632. int crypto_register_notifier(struct notifier_block *nb)
  633. {
  634. return blocking_notifier_chain_register(&crypto_chain, nb);
  635. }
  636. EXPORT_SYMBOL_GPL(crypto_register_notifier);
  637. int crypto_unregister_notifier(struct notifier_block *nb)
  638. {
  639. return blocking_notifier_chain_unregister(&crypto_chain, nb);
  640. }
  641. EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
  642. struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
  643. {
  644. struct rtattr *rta = tb[0];
  645. struct crypto_attr_type *algt;
  646. if (!rta)
  647. return ERR_PTR(-ENOENT);
  648. if (RTA_PAYLOAD(rta) < sizeof(*algt))
  649. return ERR_PTR(-EINVAL);
  650. if (rta->rta_type != CRYPTOA_TYPE)
  651. return ERR_PTR(-EINVAL);
  652. algt = RTA_DATA(rta);
  653. return algt;
  654. }
  655. EXPORT_SYMBOL_GPL(crypto_get_attr_type);
  656. /**
  657. * crypto_check_attr_type() - check algorithm type and compute inherited mask
  658. * @tb: the template parameters
  659. * @type: the algorithm type the template would be instantiated as
  660. * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
  661. * to restrict the flags of any inner algorithms
  662. *
  663. * Validate that the algorithm type the user requested is compatible with the
  664. * one the template would actually be instantiated as. E.g., if the user is
  665. * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
  666. * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
  667. *
  668. * Also compute the mask to use to restrict the flags of any inner algorithms.
  669. *
  670. * Return: 0 on success; -errno on failure
  671. */
  672. int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
  673. {
  674. struct crypto_attr_type *algt;
  675. algt = crypto_get_attr_type(tb);
  676. if (IS_ERR(algt))
  677. return PTR_ERR(algt);
  678. if ((algt->type ^ type) & algt->mask)
  679. return -EINVAL;
  680. *mask_ret = crypto_algt_inherited_mask(algt);
  681. return 0;
  682. }
  683. EXPORT_SYMBOL_GPL(crypto_check_attr_type);
  684. const char *crypto_attr_alg_name(struct rtattr *rta)
  685. {
  686. struct crypto_attr_alg *alga;
  687. if (!rta)
  688. return ERR_PTR(-ENOENT);
  689. if (RTA_PAYLOAD(rta) < sizeof(*alga))
  690. return ERR_PTR(-EINVAL);
  691. if (rta->rta_type != CRYPTOA_ALG)
  692. return ERR_PTR(-EINVAL);
  693. alga = RTA_DATA(rta);
  694. alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
  695. return alga->name;
  696. }
  697. EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
  698. int crypto_inst_setname(struct crypto_instance *inst, const char *name,
  699. struct crypto_alg *alg)
  700. {
  701. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
  702. alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
  703. return -ENAMETOOLONG;
  704. if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
  705. name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  706. return -ENAMETOOLONG;
  707. return 0;
  708. }
  709. EXPORT_SYMBOL_GPL(crypto_inst_setname);
  710. void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
  711. {
  712. INIT_LIST_HEAD(&queue->list);
  713. queue->backlog = &queue->list;
  714. queue->qlen = 0;
  715. queue->max_qlen = max_qlen;
  716. }
  717. EXPORT_SYMBOL_GPL(crypto_init_queue);
  718. int crypto_enqueue_request(struct crypto_queue *queue,
  719. struct crypto_async_request *request)
  720. {
  721. int err = -EINPROGRESS;
  722. if (unlikely(queue->qlen >= queue->max_qlen)) {
  723. if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
  724. err = -ENOSPC;
  725. goto out;
  726. }
  727. err = -EBUSY;
  728. if (queue->backlog == &queue->list)
  729. queue->backlog = &request->list;
  730. }
  731. queue->qlen++;
  732. list_add_tail(&request->list, &queue->list);
  733. out:
  734. return err;
  735. }
  736. EXPORT_SYMBOL_GPL(crypto_enqueue_request);
  737. void crypto_enqueue_request_head(struct crypto_queue *queue,
  738. struct crypto_async_request *request)
  739. {
  740. if (unlikely(queue->qlen >= queue->max_qlen))
  741. queue->backlog = queue->backlog->prev;
  742. queue->qlen++;
  743. list_add(&request->list, &queue->list);
  744. }
  745. EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
  746. struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
  747. {
  748. struct list_head *request;
  749. if (unlikely(!queue->qlen))
  750. return NULL;
  751. queue->qlen--;
  752. if (queue->backlog != &queue->list)
  753. queue->backlog = queue->backlog->next;
  754. request = queue->list.next;
  755. list_del(request);
  756. return list_entry(request, struct crypto_async_request, list);
  757. }
  758. EXPORT_SYMBOL_GPL(crypto_dequeue_request);
  759. static inline void crypto_inc_byte(u8 *a, unsigned int size)
  760. {
  761. u8 *b = (a + size);
  762. u8 c;
  763. for (; size; size--) {
  764. c = *--b + 1;
  765. *b = c;
  766. if (c)
  767. break;
  768. }
  769. }
  770. void crypto_inc(u8 *a, unsigned int size)
  771. {
  772. __be32 *b = (__be32 *)(a + size);
  773. u32 c;
  774. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
  775. IS_ALIGNED((unsigned long)b, __alignof__(*b)))
  776. for (; size >= 4; size -= 4) {
  777. c = be32_to_cpu(*--b) + 1;
  778. *b = cpu_to_be32(c);
  779. if (likely(c))
  780. return;
  781. }
  782. crypto_inc_byte(a, size);
  783. }
  784. EXPORT_SYMBOL_GPL(crypto_inc);
  785. unsigned int crypto_alg_extsize(struct crypto_alg *alg)
  786. {
  787. return alg->cra_ctxsize +
  788. (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
  789. }
  790. EXPORT_SYMBOL_GPL(crypto_alg_extsize);
  791. int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
  792. u32 type, u32 mask)
  793. {
  794. int ret = 0;
  795. struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
  796. if (!IS_ERR(alg)) {
  797. crypto_mod_put(alg);
  798. ret = 1;
  799. }
  800. return ret;
  801. }
  802. EXPORT_SYMBOL_GPL(crypto_type_has_alg);
  803. static void __init crypto_start_tests(void)
  804. {
  805. if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI))
  806. return;
  807. if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
  808. return;
  809. set_crypto_boot_test_finished();
  810. for (;;) {
  811. struct crypto_larval *larval = NULL;
  812. struct crypto_alg *q;
  813. down_write(&crypto_alg_sem);
  814. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  815. struct crypto_larval *l;
  816. if (!crypto_is_larval(q))
  817. continue;
  818. l = (void *)q;
  819. if (!crypto_is_test_larval(l))
  820. continue;
  821. if (l->test_started)
  822. continue;
  823. l->test_started = true;
  824. larval = l;
  825. crypto_schedule_test(larval);
  826. break;
  827. }
  828. up_write(&crypto_alg_sem);
  829. if (!larval)
  830. break;
  831. }
  832. }
  833. static int __init crypto_algapi_init(void)
  834. {
  835. crypto_init_proc();
  836. crypto_start_tests();
  837. return 0;
  838. }
  839. static void __exit crypto_algapi_exit(void)
  840. {
  841. crypto_exit_proc();
  842. }
  843. /*
  844. * We run this at late_initcall so that all the built-in algorithms
  845. * have had a chance to register themselves first.
  846. */
  847. late_initcall(crypto_algapi_init);
  848. module_exit(crypto_algapi_exit);
  849. MODULE_LICENSE("GPL");
  850. MODULE_DESCRIPTION("Cryptographic algorithms API");
  851. MODULE_SOFTDEP("pre: cryptomgr");