algapi.c 30 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Cryptographic API for algorithms (i.e., low-level API).
  4. *
  5. * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  6. */
  7. #include <crypto/algapi.h>
  8. #include <linux/err.h>
  9. #include <linux/errno.h>
  10. #include <linux/fips.h>
  11. #include <linux/init.h>
  12. #include <linux/kernel.h>
  13. #include <linux/list.h>
  14. #include <linux/module.h>
  15. #include <linux/rtnetlink.h>
  16. #include <linux/slab.h>
  17. #include <linux/string.h>
  18. #include "internal.h"
  19. static LIST_HEAD(crypto_template_list);
  20. static inline void crypto_check_module_sig(struct module *mod)
  21. {
  22. if (fips_enabled && mod && !module_sig_ok(mod))
  23. panic("Module %s signature verification failed in FIPS mode\n",
  24. module_name(mod));
  25. }
  26. static int crypto_check_alg(struct crypto_alg *alg)
  27. {
  28. crypto_check_module_sig(alg->cra_module);
  29. if (!alg->cra_name[0] || !alg->cra_driver_name[0])
  30. return -EINVAL;
  31. if (alg->cra_alignmask & (alg->cra_alignmask + 1))
  32. return -EINVAL;
  33. /* General maximums for all algs. */
  34. if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
  35. return -EINVAL;
  36. if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
  37. return -EINVAL;
  38. /* Lower maximums for specific alg types. */
  39. if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  40. CRYPTO_ALG_TYPE_CIPHER) {
  41. if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
  42. return -EINVAL;
  43. if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
  44. return -EINVAL;
  45. }
  46. if (alg->cra_priority < 0)
  47. return -EINVAL;
  48. refcount_set(&alg->cra_refcnt, 1);
  49. return 0;
  50. }
  51. static void crypto_free_instance(struct crypto_instance *inst)
  52. {
  53. inst->alg.cra_type->free(inst);
  54. }
  55. static void crypto_destroy_instance(struct crypto_alg *alg)
  56. {
  57. struct crypto_instance *inst = (void *)alg;
  58. struct crypto_template *tmpl = inst->tmpl;
  59. crypto_free_instance(inst);
  60. crypto_tmpl_put(tmpl);
  61. }
  62. /*
  63. * This function adds a spawn to the list secondary_spawns which
  64. * will be used at the end of crypto_remove_spawns to unregister
  65. * instances, unless the spawn happens to be one that is depended
  66. * on by the new algorithm (nalg in crypto_remove_spawns).
  67. *
  68. * This function is also responsible for resurrecting any algorithms
  69. * in the dependency chain of nalg by unsetting n->dead.
  70. */
  71. static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
  72. struct list_head *stack,
  73. struct list_head *top,
  74. struct list_head *secondary_spawns)
  75. {
  76. struct crypto_spawn *spawn, *n;
  77. spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
  78. if (!spawn)
  79. return NULL;
  80. n = list_prev_entry(spawn, list);
  81. list_move(&spawn->list, secondary_spawns);
  82. if (list_is_last(&n->list, stack))
  83. return top;
  84. n = list_next_entry(n, list);
  85. if (!spawn->dead)
  86. n->dead = false;
  87. return &n->inst->alg.cra_users;
  88. }
  89. static void crypto_remove_instance(struct crypto_instance *inst,
  90. struct list_head *list)
  91. {
  92. struct crypto_template *tmpl = inst->tmpl;
  93. if (crypto_is_dead(&inst->alg))
  94. return;
  95. inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
  96. if (!tmpl || !crypto_tmpl_get(tmpl))
  97. return;
  98. list_move(&inst->alg.cra_list, list);
  99. hlist_del(&inst->list);
  100. inst->alg.cra_destroy = crypto_destroy_instance;
  101. BUG_ON(!list_empty(&inst->alg.cra_users));
  102. }
  103. /*
  104. * Given an algorithm alg, remove all algorithms that depend on it
  105. * through spawns. If nalg is not null, then exempt any algorithms
  106. * that is depended on by nalg. This is useful when nalg itself
  107. * depends on alg.
  108. */
  109. void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
  110. struct crypto_alg *nalg)
  111. {
  112. u32 new_type = (nalg ?: alg)->cra_flags;
  113. struct crypto_spawn *spawn, *n;
  114. LIST_HEAD(secondary_spawns);
  115. struct list_head *spawns;
  116. LIST_HEAD(stack);
  117. LIST_HEAD(top);
  118. spawns = &alg->cra_users;
  119. list_for_each_entry_safe(spawn, n, spawns, list) {
  120. if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
  121. continue;
  122. list_move(&spawn->list, &top);
  123. }
  124. /*
  125. * Perform a depth-first walk starting from alg through
  126. * the cra_users tree. The list stack records the path
  127. * from alg to the current spawn.
  128. */
  129. spawns = &top;
  130. do {
  131. while (!list_empty(spawns)) {
  132. struct crypto_instance *inst;
  133. spawn = list_first_entry(spawns, struct crypto_spawn,
  134. list);
  135. inst = spawn->inst;
  136. list_move(&spawn->list, &stack);
  137. spawn->dead = !spawn->registered || &inst->alg != nalg;
  138. if (!spawn->registered)
  139. break;
  140. BUG_ON(&inst->alg == alg);
  141. if (&inst->alg == nalg)
  142. break;
  143. spawns = &inst->alg.cra_users;
  144. /*
  145. * Even if spawn->registered is true, the
  146. * instance itself may still be unregistered.
  147. * This is because it may have failed during
  148. * registration. Therefore we still need to
  149. * make the following test.
  150. *
  151. * We may encounter an unregistered instance here, since
  152. * an instance's spawns are set up prior to the instance
  153. * being registered. An unregistered instance will have
  154. * NULL ->cra_users.next, since ->cra_users isn't
  155. * properly initialized until registration. But an
  156. * unregistered instance cannot have any users, so treat
  157. * it the same as ->cra_users being empty.
  158. */
  159. if (spawns->next == NULL)
  160. break;
  161. }
  162. } while ((spawns = crypto_more_spawns(alg, &stack, &top,
  163. &secondary_spawns)));
  164. /*
  165. * Remove all instances that are marked as dead. Also
  166. * complete the resurrection of the others by moving them
  167. * back to the cra_users list.
  168. */
  169. list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
  170. if (!spawn->dead)
  171. list_move(&spawn->list, &spawn->alg->cra_users);
  172. else if (spawn->registered)
  173. crypto_remove_instance(spawn->inst, list);
  174. }
  175. }
  176. EXPORT_SYMBOL_GPL(crypto_remove_spawns);
  177. static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
  178. {
  179. struct crypto_alg *q;
  180. struct crypto_larval *larval;
  181. int ret = -EAGAIN;
  182. if (crypto_is_dead(alg))
  183. goto err;
  184. INIT_LIST_HEAD(&alg->cra_users);
  185. /* No cheating! */
  186. alg->cra_flags &= ~CRYPTO_ALG_TESTED;
  187. ret = -EEXIST;
  188. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  189. if (q == alg)
  190. goto err;
  191. if (crypto_is_moribund(q))
  192. continue;
  193. if (crypto_is_larval(q)) {
  194. if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
  195. goto err;
  196. continue;
  197. }
  198. if (!strcmp(q->cra_driver_name, alg->cra_name) ||
  199. !strcmp(q->cra_name, alg->cra_driver_name))
  200. goto err;
  201. }
  202. larval = crypto_larval_alloc(alg->cra_name,
  203. alg->cra_flags | CRYPTO_ALG_TESTED, 0);
  204. if (IS_ERR(larval))
  205. goto out;
  206. ret = -ENOENT;
  207. larval->adult = crypto_mod_get(alg);
  208. if (!larval->adult)
  209. goto free_larval;
  210. refcount_set(&larval->alg.cra_refcnt, 1);
  211. memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
  212. CRYPTO_MAX_ALG_NAME);
  213. larval->alg.cra_priority = alg->cra_priority;
  214. list_add(&alg->cra_list, &crypto_alg_list);
  215. list_add(&larval->alg.cra_list, &crypto_alg_list);
  216. crypto_stats_init(alg);
  217. out:
  218. return larval;
  219. free_larval:
  220. kfree(larval);
  221. err:
  222. larval = ERR_PTR(ret);
  223. goto out;
  224. }
  225. void crypto_alg_tested(const char *name, int err)
  226. {
  227. struct crypto_larval *test;
  228. struct crypto_alg *alg;
  229. struct crypto_alg *q;
  230. LIST_HEAD(list);
  231. bool best;
  232. down_write(&crypto_alg_sem);
  233. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  234. if (crypto_is_moribund(q) || !crypto_is_larval(q))
  235. continue;
  236. test = (struct crypto_larval *)q;
  237. if (!strcmp(q->cra_driver_name, name))
  238. goto found;
  239. }
  240. pr_err("alg: Unexpected test result for %s: %d\n", name, err);
  241. goto unlock;
  242. found:
  243. q->cra_flags |= CRYPTO_ALG_DEAD;
  244. alg = test->adult;
  245. if (err || list_empty(&alg->cra_list))
  246. goto complete;
  247. alg->cra_flags |= CRYPTO_ALG_TESTED;
  248. /* Only satisfy larval waiters if we are the best. */
  249. best = true;
  250. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  251. if (crypto_is_moribund(q) || !crypto_is_larval(q))
  252. continue;
  253. if (strcmp(alg->cra_name, q->cra_name))
  254. continue;
  255. if (q->cra_priority > alg->cra_priority) {
  256. best = false;
  257. break;
  258. }
  259. }
  260. list_for_each_entry(q, &crypto_alg_list, cra_list) {
  261. if (q == alg)
  262. continue;
  263. if (crypto_is_moribund(q))
  264. continue;
  265. if (crypto_is_larval(q)) {
  266. struct crypto_larval *larval = (void *)q;
  267. /*
  268. * Check to see if either our generic name or
  269. * specific name can satisfy the name requested
  270. * by the larval entry q.
  271. */
  272. if (strcmp(alg->cra_name, q->cra_name) &&
  273. strcmp(alg->cra_driver_name, q->cra_name))
  274. continue;
  275. if (larval->adult)
  276. continue;
  277. if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
  278. continue;
  279. if (best && crypto_mod_get(alg))
  280. larval->adult = alg;
  281. else
  282. larval->adult = ERR_PTR(-EAGAIN);
  283. continue;
  284. }
  285. if (strcmp(alg->cra_name, q->cra_name))
  286. continue;
  287. if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
  288. q->cra_priority > alg->cra_priority)
  289. continue;
  290. crypto_remove_spawns(q, &list, alg);
  291. }
  292. complete:
  293. complete_all(&test->completion);
  294. unlock:
  295. up_write(&crypto_alg_sem);
  296. crypto_remove_final(&list);
  297. }
  298. EXPORT_SYMBOL_GPL(crypto_alg_tested);
  299. void crypto_remove_final(struct list_head *list)
  300. {
  301. struct crypto_alg *alg;
  302. struct crypto_alg *n;
  303. list_for_each_entry_safe(alg, n, list, cra_list) {
  304. list_del_init(&alg->cra_list);
  305. crypto_alg_put(alg);
  306. }
  307. }
  308. EXPORT_SYMBOL_GPL(crypto_remove_final);
  309. static void crypto_wait_for_test(struct crypto_larval *larval)
  310. {
  311. int err;
  312. err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
  313. if (err != NOTIFY_STOP) {
  314. if (WARN_ON(err != NOTIFY_DONE))
  315. goto out;
  316. crypto_alg_tested(larval->alg.cra_driver_name, 0);
  317. }
  318. err = wait_for_completion_killable(&larval->completion);
  319. WARN_ON(err);
  320. if (!err)
  321. crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
  322. out:
  323. crypto_larval_kill(&larval->alg);
  324. }
  325. int crypto_register_alg(struct crypto_alg *alg)
  326. {
  327. struct crypto_larval *larval;
  328. int err;
  329. alg->cra_flags &= ~CRYPTO_ALG_DEAD;
  330. err = crypto_check_alg(alg);
  331. if (err)
  332. return err;
  333. down_write(&crypto_alg_sem);
  334. larval = __crypto_register_alg(alg);
  335. up_write(&crypto_alg_sem);
  336. if (IS_ERR(larval))
  337. return PTR_ERR(larval);
  338. crypto_wait_for_test(larval);
  339. return 0;
  340. }
  341. EXPORT_SYMBOL_GPL(crypto_register_alg);
  342. static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
  343. {
  344. if (unlikely(list_empty(&alg->cra_list)))
  345. return -ENOENT;
  346. alg->cra_flags |= CRYPTO_ALG_DEAD;
  347. list_del_init(&alg->cra_list);
  348. crypto_remove_spawns(alg, list, NULL);
  349. return 0;
  350. }
  351. void crypto_unregister_alg(struct crypto_alg *alg)
  352. {
  353. int ret;
  354. LIST_HEAD(list);
  355. down_write(&crypto_alg_sem);
  356. ret = crypto_remove_alg(alg, &list);
  357. up_write(&crypto_alg_sem);
  358. if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
  359. return;
  360. BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
  361. if (alg->cra_destroy)
  362. alg->cra_destroy(alg);
  363. crypto_remove_final(&list);
  364. }
  365. EXPORT_SYMBOL_GPL(crypto_unregister_alg);
  366. int crypto_register_algs(struct crypto_alg *algs, int count)
  367. {
  368. int i, ret;
  369. for (i = 0; i < count; i++) {
  370. ret = crypto_register_alg(&algs[i]);
  371. if (ret)
  372. goto err;
  373. }
  374. return 0;
  375. err:
  376. for (--i; i >= 0; --i)
  377. crypto_unregister_alg(&algs[i]);
  378. return ret;
  379. }
  380. EXPORT_SYMBOL_GPL(crypto_register_algs);
  381. void crypto_unregister_algs(struct crypto_alg *algs, int count)
  382. {
  383. int i;
  384. for (i = 0; i < count; i++)
  385. crypto_unregister_alg(&algs[i]);
  386. }
  387. EXPORT_SYMBOL_GPL(crypto_unregister_algs);
  388. int crypto_register_template(struct crypto_template *tmpl)
  389. {
  390. struct crypto_template *q;
  391. int err = -EEXIST;
  392. down_write(&crypto_alg_sem);
  393. crypto_check_module_sig(tmpl->module);
  394. list_for_each_entry(q, &crypto_template_list, list) {
  395. if (q == tmpl)
  396. goto out;
  397. }
  398. list_add(&tmpl->list, &crypto_template_list);
  399. err = 0;
  400. out:
  401. up_write(&crypto_alg_sem);
  402. return err;
  403. }
  404. EXPORT_SYMBOL_GPL(crypto_register_template);
  405. int crypto_register_templates(struct crypto_template *tmpls, int count)
  406. {
  407. int i, err;
  408. for (i = 0; i < count; i++) {
  409. err = crypto_register_template(&tmpls[i]);
  410. if (err)
  411. goto out;
  412. }
  413. return 0;
  414. out:
  415. for (--i; i >= 0; --i)
  416. crypto_unregister_template(&tmpls[i]);
  417. return err;
  418. }
  419. EXPORT_SYMBOL_GPL(crypto_register_templates);
  420. void crypto_unregister_template(struct crypto_template *tmpl)
  421. {
  422. struct crypto_instance *inst;
  423. struct hlist_node *n;
  424. struct hlist_head *list;
  425. LIST_HEAD(users);
  426. down_write(&crypto_alg_sem);
  427. BUG_ON(list_empty(&tmpl->list));
  428. list_del_init(&tmpl->list);
  429. list = &tmpl->instances;
  430. hlist_for_each_entry(inst, list, list) {
  431. int err = crypto_remove_alg(&inst->alg, &users);
  432. BUG_ON(err);
  433. }
  434. up_write(&crypto_alg_sem);
  435. hlist_for_each_entry_safe(inst, n, list, list) {
  436. BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
  437. crypto_free_instance(inst);
  438. }
  439. crypto_remove_final(&users);
  440. }
  441. EXPORT_SYMBOL_GPL(crypto_unregister_template);
  442. void crypto_unregister_templates(struct crypto_template *tmpls, int count)
  443. {
  444. int i;
  445. for (i = count - 1; i >= 0; --i)
  446. crypto_unregister_template(&tmpls[i]);
  447. }
  448. EXPORT_SYMBOL_GPL(crypto_unregister_templates);
  449. static struct crypto_template *__crypto_lookup_template(const char *name)
  450. {
  451. struct crypto_template *q, *tmpl = NULL;
  452. down_read(&crypto_alg_sem);
  453. list_for_each_entry(q, &crypto_template_list, list) {
  454. if (strcmp(q->name, name))
  455. continue;
  456. if (unlikely(!crypto_tmpl_get(q)))
  457. continue;
  458. tmpl = q;
  459. break;
  460. }
  461. up_read(&crypto_alg_sem);
  462. return tmpl;
  463. }
  464. struct crypto_template *crypto_lookup_template(const char *name)
  465. {
  466. return try_then_request_module(__crypto_lookup_template(name),
  467. "crypto-%s", name);
  468. }
  469. EXPORT_SYMBOL_GPL(crypto_lookup_template);
  470. int crypto_register_instance(struct crypto_template *tmpl,
  471. struct crypto_instance *inst)
  472. {
  473. struct crypto_larval *larval;
  474. struct crypto_spawn *spawn;
  475. int err;
  476. err = crypto_check_alg(&inst->alg);
  477. if (err)
  478. return err;
  479. inst->alg.cra_module = tmpl->module;
  480. inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
  481. down_write(&crypto_alg_sem);
  482. larval = ERR_PTR(-EAGAIN);
  483. for (spawn = inst->spawns; spawn;) {
  484. struct crypto_spawn *next;
  485. if (spawn->dead)
  486. goto unlock;
  487. next = spawn->next;
  488. spawn->inst = inst;
  489. spawn->registered = true;
  490. crypto_mod_put(spawn->alg);
  491. spawn = next;
  492. }
  493. larval = __crypto_register_alg(&inst->alg);
  494. if (IS_ERR(larval))
  495. goto unlock;
  496. hlist_add_head(&inst->list, &tmpl->instances);
  497. inst->tmpl = tmpl;
  498. unlock:
  499. up_write(&crypto_alg_sem);
  500. err = PTR_ERR(larval);
  501. if (IS_ERR(larval))
  502. goto err;
  503. crypto_wait_for_test(larval);
  504. err = 0;
  505. err:
  506. return err;
  507. }
  508. EXPORT_SYMBOL_GPL(crypto_register_instance);
  509. void crypto_unregister_instance(struct crypto_instance *inst)
  510. {
  511. LIST_HEAD(list);
  512. down_write(&crypto_alg_sem);
  513. crypto_remove_spawns(&inst->alg, &list, NULL);
  514. crypto_remove_instance(inst, &list);
  515. up_write(&crypto_alg_sem);
  516. crypto_remove_final(&list);
  517. }
  518. EXPORT_SYMBOL_GPL(crypto_unregister_instance);
  519. int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
  520. const char *name, u32 type, u32 mask)
  521. {
  522. struct crypto_alg *alg;
  523. int err = -EAGAIN;
  524. if (WARN_ON_ONCE(inst == NULL))
  525. return -EINVAL;
  526. /* Allow the result of crypto_attr_alg_name() to be passed directly */
  527. if (IS_ERR(name))
  528. return PTR_ERR(name);
  529. alg = crypto_find_alg(name, spawn->frontend, type, mask);
  530. if (IS_ERR(alg))
  531. return PTR_ERR(alg);
  532. down_write(&crypto_alg_sem);
  533. if (!crypto_is_moribund(alg)) {
  534. list_add(&spawn->list, &alg->cra_users);
  535. spawn->alg = alg;
  536. spawn->mask = mask;
  537. spawn->next = inst->spawns;
  538. inst->spawns = spawn;
  539. inst->alg.cra_flags |=
  540. (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
  541. err = 0;
  542. }
  543. up_write(&crypto_alg_sem);
  544. if (err)
  545. crypto_mod_put(alg);
  546. return err;
  547. }
  548. EXPORT_SYMBOL_GPL(crypto_grab_spawn);
  549. void crypto_drop_spawn(struct crypto_spawn *spawn)
  550. {
  551. if (!spawn->alg) /* not yet initialized? */
  552. return;
  553. down_write(&crypto_alg_sem);
  554. if (!spawn->dead)
  555. list_del(&spawn->list);
  556. up_write(&crypto_alg_sem);
  557. if (!spawn->registered)
  558. crypto_mod_put(spawn->alg);
  559. }
  560. EXPORT_SYMBOL_GPL(crypto_drop_spawn);
  561. static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
  562. {
  563. struct crypto_alg *alg = ERR_PTR(-EAGAIN);
  564. struct crypto_alg *target;
  565. bool shoot = false;
  566. down_read(&crypto_alg_sem);
  567. if (!spawn->dead) {
  568. alg = spawn->alg;
  569. if (!crypto_mod_get(alg)) {
  570. target = crypto_alg_get(alg);
  571. shoot = true;
  572. alg = ERR_PTR(-EAGAIN);
  573. }
  574. }
  575. up_read(&crypto_alg_sem);
  576. if (shoot) {
  577. crypto_shoot_alg(target);
  578. crypto_alg_put(target);
  579. }
  580. return alg;
  581. }
  582. struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
  583. u32 mask)
  584. {
  585. struct crypto_alg *alg;
  586. struct crypto_tfm *tfm;
  587. alg = crypto_spawn_alg(spawn);
  588. if (IS_ERR(alg))
  589. return ERR_CAST(alg);
  590. tfm = ERR_PTR(-EINVAL);
  591. if (unlikely((alg->cra_flags ^ type) & mask))
  592. goto out_put_alg;
  593. tfm = __crypto_alloc_tfm(alg, type, mask);
  594. if (IS_ERR(tfm))
  595. goto out_put_alg;
  596. return tfm;
  597. out_put_alg:
  598. crypto_mod_put(alg);
  599. return tfm;
  600. }
  601. EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
  602. void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
  603. {
  604. struct crypto_alg *alg;
  605. struct crypto_tfm *tfm;
  606. alg = crypto_spawn_alg(spawn);
  607. if (IS_ERR(alg))
  608. return ERR_CAST(alg);
  609. tfm = crypto_create_tfm(alg, spawn->frontend);
  610. if (IS_ERR(tfm))
  611. goto out_put_alg;
  612. return tfm;
  613. out_put_alg:
  614. crypto_mod_put(alg);
  615. return tfm;
  616. }
  617. EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
  618. int crypto_register_notifier(struct notifier_block *nb)
  619. {
  620. return blocking_notifier_chain_register(&crypto_chain, nb);
  621. }
  622. EXPORT_SYMBOL_GPL(crypto_register_notifier);
  623. int crypto_unregister_notifier(struct notifier_block *nb)
  624. {
  625. return blocking_notifier_chain_unregister(&crypto_chain, nb);
  626. }
  627. EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
  628. struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
  629. {
  630. struct rtattr *rta = tb[0];
  631. struct crypto_attr_type *algt;
  632. if (!rta)
  633. return ERR_PTR(-ENOENT);
  634. if (RTA_PAYLOAD(rta) < sizeof(*algt))
  635. return ERR_PTR(-EINVAL);
  636. if (rta->rta_type != CRYPTOA_TYPE)
  637. return ERR_PTR(-EINVAL);
  638. algt = RTA_DATA(rta);
  639. return algt;
  640. }
  641. EXPORT_SYMBOL_GPL(crypto_get_attr_type);
  642. /**
  643. * crypto_check_attr_type() - check algorithm type and compute inherited mask
  644. * @tb: the template parameters
  645. * @type: the algorithm type the template would be instantiated as
  646. * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
  647. * to restrict the flags of any inner algorithms
  648. *
  649. * Validate that the algorithm type the user requested is compatible with the
  650. * one the template would actually be instantiated as. E.g., if the user is
  651. * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
  652. * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
  653. *
  654. * Also compute the mask to use to restrict the flags of any inner algorithms.
  655. *
  656. * Return: 0 on success; -errno on failure
  657. */
  658. int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
  659. {
  660. struct crypto_attr_type *algt;
  661. algt = crypto_get_attr_type(tb);
  662. if (IS_ERR(algt))
  663. return PTR_ERR(algt);
  664. if ((algt->type ^ type) & algt->mask)
  665. return -EINVAL;
  666. *mask_ret = crypto_algt_inherited_mask(algt);
  667. return 0;
  668. }
  669. EXPORT_SYMBOL_GPL(crypto_check_attr_type);
  670. const char *crypto_attr_alg_name(struct rtattr *rta)
  671. {
  672. struct crypto_attr_alg *alga;
  673. if (!rta)
  674. return ERR_PTR(-ENOENT);
  675. if (RTA_PAYLOAD(rta) < sizeof(*alga))
  676. return ERR_PTR(-EINVAL);
  677. if (rta->rta_type != CRYPTOA_ALG)
  678. return ERR_PTR(-EINVAL);
  679. alga = RTA_DATA(rta);
  680. alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
  681. return alga->name;
  682. }
  683. EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
  684. int crypto_attr_u32(struct rtattr *rta, u32 *num)
  685. {
  686. struct crypto_attr_u32 *nu32;
  687. if (!rta)
  688. return -ENOENT;
  689. if (RTA_PAYLOAD(rta) < sizeof(*nu32))
  690. return -EINVAL;
  691. if (rta->rta_type != CRYPTOA_U32)
  692. return -EINVAL;
  693. nu32 = RTA_DATA(rta);
  694. *num = nu32->num;
  695. return 0;
  696. }
  697. EXPORT_SYMBOL_GPL(crypto_attr_u32);
  698. int crypto_inst_setname(struct crypto_instance *inst, const char *name,
  699. struct crypto_alg *alg)
  700. {
  701. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
  702. alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
  703. return -ENAMETOOLONG;
  704. if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
  705. name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  706. return -ENAMETOOLONG;
  707. return 0;
  708. }
  709. EXPORT_SYMBOL_GPL(crypto_inst_setname);
  710. void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
  711. {
  712. INIT_LIST_HEAD(&queue->list);
  713. queue->backlog = &queue->list;
  714. queue->qlen = 0;
  715. queue->max_qlen = max_qlen;
  716. }
  717. EXPORT_SYMBOL_GPL(crypto_init_queue);
  718. int crypto_enqueue_request(struct crypto_queue *queue,
  719. struct crypto_async_request *request)
  720. {
  721. int err = -EINPROGRESS;
  722. if (unlikely(queue->qlen >= queue->max_qlen)) {
  723. if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
  724. err = -ENOSPC;
  725. goto out;
  726. }
  727. err = -EBUSY;
  728. if (queue->backlog == &queue->list)
  729. queue->backlog = &request->list;
  730. }
  731. queue->qlen++;
  732. list_add_tail(&request->list, &queue->list);
  733. out:
  734. return err;
  735. }
  736. EXPORT_SYMBOL_GPL(crypto_enqueue_request);
  737. void crypto_enqueue_request_head(struct crypto_queue *queue,
  738. struct crypto_async_request *request)
  739. {
  740. queue->qlen++;
  741. list_add(&request->list, &queue->list);
  742. }
  743. EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
  744. struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
  745. {
  746. struct list_head *request;
  747. if (unlikely(!queue->qlen))
  748. return NULL;
  749. queue->qlen--;
  750. if (queue->backlog != &queue->list)
  751. queue->backlog = queue->backlog->next;
  752. request = queue->list.next;
  753. list_del(request);
  754. return list_entry(request, struct crypto_async_request, list);
  755. }
  756. EXPORT_SYMBOL_GPL(crypto_dequeue_request);
  757. static inline void crypto_inc_byte(u8 *a, unsigned int size)
  758. {
  759. u8 *b = (a + size);
  760. u8 c;
  761. for (; size; size--) {
  762. c = *--b + 1;
  763. *b = c;
  764. if (c)
  765. break;
  766. }
  767. }
  768. void crypto_inc(u8 *a, unsigned int size)
  769. {
  770. __be32 *b = (__be32 *)(a + size);
  771. u32 c;
  772. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
  773. IS_ALIGNED((unsigned long)b, __alignof__(*b)))
  774. for (; size >= 4; size -= 4) {
  775. c = be32_to_cpu(*--b) + 1;
  776. *b = cpu_to_be32(c);
  777. if (likely(c))
  778. return;
  779. }
  780. crypto_inc_byte(a, size);
  781. }
  782. EXPORT_SYMBOL_GPL(crypto_inc);
  783. void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
  784. {
  785. int relalign = 0;
  786. if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
  787. int size = sizeof(unsigned long);
  788. int d = (((unsigned long)dst ^ (unsigned long)src1) |
  789. ((unsigned long)dst ^ (unsigned long)src2)) &
  790. (size - 1);
  791. relalign = d ? 1 << __ffs(d) : size;
  792. /*
  793. * If we care about alignment, process as many bytes as
  794. * needed to advance dst and src to values whose alignments
  795. * equal their relative alignment. This will allow us to
  796. * process the remainder of the input using optimal strides.
  797. */
  798. while (((unsigned long)dst & (relalign - 1)) && len > 0) {
  799. *dst++ = *src1++ ^ *src2++;
  800. len--;
  801. }
  802. }
  803. while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
  804. *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
  805. dst += 8;
  806. src1 += 8;
  807. src2 += 8;
  808. len -= 8;
  809. }
  810. while (len >= 4 && !(relalign & 3)) {
  811. *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
  812. dst += 4;
  813. src1 += 4;
  814. src2 += 4;
  815. len -= 4;
  816. }
  817. while (len >= 2 && !(relalign & 1)) {
  818. *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
  819. dst += 2;
  820. src1 += 2;
  821. src2 += 2;
  822. len -= 2;
  823. }
  824. while (len--)
  825. *dst++ = *src1++ ^ *src2++;
  826. }
  827. EXPORT_SYMBOL_GPL(__crypto_xor);
  828. unsigned int crypto_alg_extsize(struct crypto_alg *alg)
  829. {
  830. return alg->cra_ctxsize +
  831. (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
  832. }
  833. EXPORT_SYMBOL_GPL(crypto_alg_extsize);
  834. int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
  835. u32 type, u32 mask)
  836. {
  837. int ret = 0;
  838. struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
  839. if (!IS_ERR(alg)) {
  840. crypto_mod_put(alg);
  841. ret = 1;
  842. }
  843. return ret;
  844. }
  845. EXPORT_SYMBOL_GPL(crypto_type_has_alg);
  846. #ifdef CONFIG_CRYPTO_STATS
  847. void crypto_stats_init(struct crypto_alg *alg)
  848. {
  849. memset(&alg->stats, 0, sizeof(alg->stats));
  850. }
  851. EXPORT_SYMBOL_GPL(crypto_stats_init);
  852. void crypto_stats_get(struct crypto_alg *alg)
  853. {
  854. crypto_alg_get(alg);
  855. }
  856. EXPORT_SYMBOL_GPL(crypto_stats_get);
  857. void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
  858. int ret)
  859. {
  860. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  861. atomic64_inc(&alg->stats.aead.err_cnt);
  862. } else {
  863. atomic64_inc(&alg->stats.aead.encrypt_cnt);
  864. atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
  865. }
  866. crypto_alg_put(alg);
  867. }
  868. EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
  869. void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
  870. int ret)
  871. {
  872. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  873. atomic64_inc(&alg->stats.aead.err_cnt);
  874. } else {
  875. atomic64_inc(&alg->stats.aead.decrypt_cnt);
  876. atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
  877. }
  878. crypto_alg_put(alg);
  879. }
  880. EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
  881. void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
  882. struct crypto_alg *alg)
  883. {
  884. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  885. atomic64_inc(&alg->stats.akcipher.err_cnt);
  886. } else {
  887. atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
  888. atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
  889. }
  890. crypto_alg_put(alg);
  891. }
  892. EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
  893. void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
  894. struct crypto_alg *alg)
  895. {
  896. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  897. atomic64_inc(&alg->stats.akcipher.err_cnt);
  898. } else {
  899. atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
  900. atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
  901. }
  902. crypto_alg_put(alg);
  903. }
  904. EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
  905. void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
  906. {
  907. if (ret && ret != -EINPROGRESS && ret != -EBUSY)
  908. atomic64_inc(&alg->stats.akcipher.err_cnt);
  909. else
  910. atomic64_inc(&alg->stats.akcipher.sign_cnt);
  911. crypto_alg_put(alg);
  912. }
  913. EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
  914. void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
  915. {
  916. if (ret && ret != -EINPROGRESS && ret != -EBUSY)
  917. atomic64_inc(&alg->stats.akcipher.err_cnt);
  918. else
  919. atomic64_inc(&alg->stats.akcipher.verify_cnt);
  920. crypto_alg_put(alg);
  921. }
  922. EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
  923. void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
  924. {
  925. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  926. atomic64_inc(&alg->stats.compress.err_cnt);
  927. } else {
  928. atomic64_inc(&alg->stats.compress.compress_cnt);
  929. atomic64_add(slen, &alg->stats.compress.compress_tlen);
  930. }
  931. crypto_alg_put(alg);
  932. }
  933. EXPORT_SYMBOL_GPL(crypto_stats_compress);
  934. void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
  935. {
  936. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  937. atomic64_inc(&alg->stats.compress.err_cnt);
  938. } else {
  939. atomic64_inc(&alg->stats.compress.decompress_cnt);
  940. atomic64_add(slen, &alg->stats.compress.decompress_tlen);
  941. }
  942. crypto_alg_put(alg);
  943. }
  944. EXPORT_SYMBOL_GPL(crypto_stats_decompress);
  945. void crypto_stats_ahash_update(unsigned int nbytes, int ret,
  946. struct crypto_alg *alg)
  947. {
  948. if (ret && ret != -EINPROGRESS && ret != -EBUSY)
  949. atomic64_inc(&alg->stats.hash.err_cnt);
  950. else
  951. atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
  952. crypto_alg_put(alg);
  953. }
  954. EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
  955. void crypto_stats_ahash_final(unsigned int nbytes, int ret,
  956. struct crypto_alg *alg)
  957. {
  958. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  959. atomic64_inc(&alg->stats.hash.err_cnt);
  960. } else {
  961. atomic64_inc(&alg->stats.hash.hash_cnt);
  962. atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
  963. }
  964. crypto_alg_put(alg);
  965. }
  966. EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
  967. void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
  968. {
  969. if (ret)
  970. atomic64_inc(&alg->stats.kpp.err_cnt);
  971. else
  972. atomic64_inc(&alg->stats.kpp.setsecret_cnt);
  973. crypto_alg_put(alg);
  974. }
  975. EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
  976. void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
  977. {
  978. if (ret)
  979. atomic64_inc(&alg->stats.kpp.err_cnt);
  980. else
  981. atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
  982. crypto_alg_put(alg);
  983. }
  984. EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
  985. void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
  986. {
  987. if (ret)
  988. atomic64_inc(&alg->stats.kpp.err_cnt);
  989. else
  990. atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
  991. crypto_alg_put(alg);
  992. }
  993. EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
  994. void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
  995. {
  996. if (ret && ret != -EINPROGRESS && ret != -EBUSY)
  997. atomic64_inc(&alg->stats.rng.err_cnt);
  998. else
  999. atomic64_inc(&alg->stats.rng.seed_cnt);
  1000. crypto_alg_put(alg);
  1001. }
  1002. EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
  1003. void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
  1004. int ret)
  1005. {
  1006. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  1007. atomic64_inc(&alg->stats.rng.err_cnt);
  1008. } else {
  1009. atomic64_inc(&alg->stats.rng.generate_cnt);
  1010. atomic64_add(dlen, &alg->stats.rng.generate_tlen);
  1011. }
  1012. crypto_alg_put(alg);
  1013. }
  1014. EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
  1015. void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
  1016. struct crypto_alg *alg)
  1017. {
  1018. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  1019. atomic64_inc(&alg->stats.cipher.err_cnt);
  1020. } else {
  1021. atomic64_inc(&alg->stats.cipher.encrypt_cnt);
  1022. atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
  1023. }
  1024. crypto_alg_put(alg);
  1025. }
  1026. EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
  1027. void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
  1028. struct crypto_alg *alg)
  1029. {
  1030. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  1031. atomic64_inc(&alg->stats.cipher.err_cnt);
  1032. } else {
  1033. atomic64_inc(&alg->stats.cipher.decrypt_cnt);
  1034. atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
  1035. }
  1036. crypto_alg_put(alg);
  1037. }
  1038. EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
  1039. #endif
  1040. static int __init crypto_algapi_init(void)
  1041. {
  1042. crypto_init_proc();
  1043. return 0;
  1044. }
  1045. static void __exit crypto_algapi_exit(void)
  1046. {
  1047. crypto_exit_proc();
  1048. }
  1049. module_init(crypto_algapi_init);
  1050. module_exit(crypto_algapi_exit);
  1051. MODULE_LICENSE("GPL");
  1052. MODULE_DESCRIPTION("Cryptographic algorithms API");
  1053. MODULE_SOFTDEP("pre: cryptomgr");