shash.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Synchronous Cryptographic Hash operations.
  4. *
  5. * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  6. */
  7. #include <crypto/scatterwalk.h>
  8. #include <crypto/internal/hash.h>
  9. #include <linux/err.h>
  10. #include <linux/kernel.h>
  11. #include <linux/module.h>
  12. #include <linux/slab.h>
  13. #include <linux/seq_file.h>
  14. #include <linux/cryptouser.h>
  15. #include <net/netlink.h>
  16. #include <linux/compiler.h>
  17. #include "internal.h"
  18. static const struct crypto_type crypto_shash_type;
  19. static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
  20. unsigned int keylen)
  21. {
  22. return -ENOSYS;
  23. }
  24. /*
  25. * Check whether an shash algorithm has a setkey function.
  26. *
  27. * For CFI compatibility, this must not be an inline function. This is because
  28. * when CFI is enabled, modules won't get the same address for shash_no_setkey
  29. * (if it were exported, which inlining would require) as the core kernel will.
  30. */
  31. bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
  32. {
  33. return alg->setkey != shash_no_setkey;
  34. }
  35. EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
  36. static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
  37. unsigned int keylen)
  38. {
  39. struct shash_alg *shash = crypto_shash_alg(tfm);
  40. unsigned long alignmask = crypto_shash_alignmask(tfm);
  41. unsigned long absize;
  42. u8 *buffer, *alignbuffer;
  43. int err;
  44. absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
  45. buffer = kmalloc(absize, GFP_ATOMIC);
  46. if (!buffer)
  47. return -ENOMEM;
  48. alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  49. memcpy(alignbuffer, key, keylen);
  50. err = shash->setkey(tfm, alignbuffer, keylen);
  51. kfree_sensitive(buffer);
  52. return err;
  53. }
  54. static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
  55. {
  56. if (crypto_shash_alg_needs_key(alg))
  57. crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
  58. }
  59. int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
  60. unsigned int keylen)
  61. {
  62. struct shash_alg *shash = crypto_shash_alg(tfm);
  63. unsigned long alignmask = crypto_shash_alignmask(tfm);
  64. int err;
  65. if ((unsigned long)key & alignmask)
  66. err = shash_setkey_unaligned(tfm, key, keylen);
  67. else
  68. err = shash->setkey(tfm, key, keylen);
  69. if (unlikely(err)) {
  70. shash_set_needkey(tfm, shash);
  71. return err;
  72. }
  73. crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
  74. return 0;
  75. }
  76. EXPORT_SYMBOL_GPL(crypto_shash_setkey);
  77. static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
  78. unsigned int len)
  79. {
  80. struct crypto_shash *tfm = desc->tfm;
  81. struct shash_alg *shash = crypto_shash_alg(tfm);
  82. unsigned long alignmask = crypto_shash_alignmask(tfm);
  83. unsigned int unaligned_len = alignmask + 1 -
  84. ((unsigned long)data & alignmask);
  85. /*
  86. * We cannot count on __aligned() working for large values:
  87. * https://patchwork.kernel.org/patch/9507697/
  88. */
  89. u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
  90. u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
  91. int err;
  92. if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
  93. return -EINVAL;
  94. if (unaligned_len > len)
  95. unaligned_len = len;
  96. memcpy(buf, data, unaligned_len);
  97. err = shash->update(desc, buf, unaligned_len);
  98. memset(buf, 0, unaligned_len);
  99. return err ?:
  100. shash->update(desc, data + unaligned_len, len - unaligned_len);
  101. }
  102. int crypto_shash_update(struct shash_desc *desc, const u8 *data,
  103. unsigned int len)
  104. {
  105. struct crypto_shash *tfm = desc->tfm;
  106. struct shash_alg *shash = crypto_shash_alg(tfm);
  107. unsigned long alignmask = crypto_shash_alignmask(tfm);
  108. if ((unsigned long)data & alignmask)
  109. return shash_update_unaligned(desc, data, len);
  110. return shash->update(desc, data, len);
  111. }
  112. EXPORT_SYMBOL_GPL(crypto_shash_update);
  113. static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
  114. {
  115. struct crypto_shash *tfm = desc->tfm;
  116. unsigned long alignmask = crypto_shash_alignmask(tfm);
  117. struct shash_alg *shash = crypto_shash_alg(tfm);
  118. unsigned int ds = crypto_shash_digestsize(tfm);
  119. /*
  120. * We cannot count on __aligned() working for large values:
  121. * https://patchwork.kernel.org/patch/9507697/
  122. */
  123. u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
  124. u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
  125. int err;
  126. if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
  127. return -EINVAL;
  128. err = shash->final(desc, buf);
  129. if (err)
  130. goto out;
  131. memcpy(out, buf, ds);
  132. out:
  133. memset(buf, 0, ds);
  134. return err;
  135. }
  136. int crypto_shash_final(struct shash_desc *desc, u8 *out)
  137. {
  138. struct crypto_shash *tfm = desc->tfm;
  139. struct shash_alg *shash = crypto_shash_alg(tfm);
  140. unsigned long alignmask = crypto_shash_alignmask(tfm);
  141. if ((unsigned long)out & alignmask)
  142. return shash_final_unaligned(desc, out);
  143. return shash->final(desc, out);
  144. }
  145. EXPORT_SYMBOL_GPL(crypto_shash_final);
  146. static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
  147. unsigned int len, u8 *out)
  148. {
  149. return crypto_shash_update(desc, data, len) ?:
  150. crypto_shash_final(desc, out);
  151. }
  152. int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
  153. unsigned int len, u8 *out)
  154. {
  155. struct crypto_shash *tfm = desc->tfm;
  156. struct shash_alg *shash = crypto_shash_alg(tfm);
  157. unsigned long alignmask = crypto_shash_alignmask(tfm);
  158. if (((unsigned long)data | (unsigned long)out) & alignmask)
  159. return shash_finup_unaligned(desc, data, len, out);
  160. return shash->finup(desc, data, len, out);
  161. }
  162. EXPORT_SYMBOL_GPL(crypto_shash_finup);
  163. static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
  164. unsigned int len, u8 *out)
  165. {
  166. return crypto_shash_init(desc) ?:
  167. crypto_shash_finup(desc, data, len, out);
  168. }
  169. int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
  170. unsigned int len, u8 *out)
  171. {
  172. struct crypto_shash *tfm = desc->tfm;
  173. struct shash_alg *shash = crypto_shash_alg(tfm);
  174. unsigned long alignmask = crypto_shash_alignmask(tfm);
  175. if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
  176. return -ENOKEY;
  177. if (((unsigned long)data | (unsigned long)out) & alignmask)
  178. return shash_digest_unaligned(desc, data, len, out);
  179. return shash->digest(desc, data, len, out);
  180. }
  181. EXPORT_SYMBOL_GPL(crypto_shash_digest);
  182. int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
  183. unsigned int len, u8 *out)
  184. {
  185. SHASH_DESC_ON_STACK(desc, tfm);
  186. int err;
  187. desc->tfm = tfm;
  188. err = crypto_shash_digest(desc, data, len, out);
  189. shash_desc_zero(desc);
  190. return err;
  191. }
  192. EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
  193. static int shash_default_export(struct shash_desc *desc, void *out)
  194. {
  195. memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
  196. return 0;
  197. }
  198. static int shash_default_import(struct shash_desc *desc, const void *in)
  199. {
  200. memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
  201. return 0;
  202. }
  203. static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
  204. unsigned int keylen)
  205. {
  206. struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
  207. return crypto_shash_setkey(*ctx, key, keylen);
  208. }
  209. static int shash_async_init(struct ahash_request *req)
  210. {
  211. struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  212. struct shash_desc *desc = ahash_request_ctx(req);
  213. desc->tfm = *ctx;
  214. return crypto_shash_init(desc);
  215. }
  216. int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
  217. {
  218. struct crypto_hash_walk walk;
  219. int nbytes;
  220. for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
  221. nbytes = crypto_hash_walk_done(&walk, nbytes))
  222. nbytes = crypto_shash_update(desc, walk.data, nbytes);
  223. return nbytes;
  224. }
  225. EXPORT_SYMBOL_GPL(shash_ahash_update);
  226. static int shash_async_update(struct ahash_request *req)
  227. {
  228. return shash_ahash_update(req, ahash_request_ctx(req));
  229. }
  230. static int shash_async_final(struct ahash_request *req)
  231. {
  232. return crypto_shash_final(ahash_request_ctx(req), req->result);
  233. }
  234. int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
  235. {
  236. struct crypto_hash_walk walk;
  237. int nbytes;
  238. nbytes = crypto_hash_walk_first(req, &walk);
  239. if (!nbytes)
  240. return crypto_shash_final(desc, req->result);
  241. do {
  242. nbytes = crypto_hash_walk_last(&walk) ?
  243. crypto_shash_finup(desc, walk.data, nbytes,
  244. req->result) :
  245. crypto_shash_update(desc, walk.data, nbytes);
  246. nbytes = crypto_hash_walk_done(&walk, nbytes);
  247. } while (nbytes > 0);
  248. return nbytes;
  249. }
  250. EXPORT_SYMBOL_GPL(shash_ahash_finup);
  251. static int shash_async_finup(struct ahash_request *req)
  252. {
  253. struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  254. struct shash_desc *desc = ahash_request_ctx(req);
  255. desc->tfm = *ctx;
  256. return shash_ahash_finup(req, desc);
  257. }
  258. int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
  259. {
  260. unsigned int nbytes = req->nbytes;
  261. struct scatterlist *sg;
  262. unsigned int offset;
  263. int err;
  264. if (nbytes &&
  265. (sg = req->src, offset = sg->offset,
  266. nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
  267. void *data;
  268. data = kmap_atomic(sg_page(sg));
  269. err = crypto_shash_digest(desc, data + offset, nbytes,
  270. req->result);
  271. kunmap_atomic(data);
  272. } else
  273. err = crypto_shash_init(desc) ?:
  274. shash_ahash_finup(req, desc);
  275. return err;
  276. }
  277. EXPORT_SYMBOL_GPL(shash_ahash_digest);
  278. static int shash_async_digest(struct ahash_request *req)
  279. {
  280. struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  281. struct shash_desc *desc = ahash_request_ctx(req);
  282. desc->tfm = *ctx;
  283. return shash_ahash_digest(req, desc);
  284. }
  285. static int shash_async_export(struct ahash_request *req, void *out)
  286. {
  287. return crypto_shash_export(ahash_request_ctx(req), out);
  288. }
  289. static int shash_async_import(struct ahash_request *req, const void *in)
  290. {
  291. struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
  292. struct shash_desc *desc = ahash_request_ctx(req);
  293. desc->tfm = *ctx;
  294. return crypto_shash_import(desc, in);
  295. }
  296. static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
  297. {
  298. struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
  299. crypto_free_shash(*ctx);
  300. }
  301. int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
  302. {
  303. struct crypto_alg *calg = tfm->__crt_alg;
  304. struct shash_alg *alg = __crypto_shash_alg(calg);
  305. struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
  306. struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
  307. struct crypto_shash *shash;
  308. if (!crypto_mod_get(calg))
  309. return -EAGAIN;
  310. shash = crypto_create_tfm(calg, &crypto_shash_type);
  311. if (IS_ERR(shash)) {
  312. crypto_mod_put(calg);
  313. return PTR_ERR(shash);
  314. }
  315. *ctx = shash;
  316. tfm->exit = crypto_exit_shash_ops_async;
  317. crt->init = shash_async_init;
  318. crt->update = shash_async_update;
  319. crt->final = shash_async_final;
  320. crt->finup = shash_async_finup;
  321. crt->digest = shash_async_digest;
  322. if (crypto_shash_alg_has_setkey(alg))
  323. crt->setkey = shash_async_setkey;
  324. crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
  325. CRYPTO_TFM_NEED_KEY);
  326. crt->export = shash_async_export;
  327. crt->import = shash_async_import;
  328. crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
  329. return 0;
  330. }
  331. static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
  332. {
  333. struct crypto_shash *hash = __crypto_shash_cast(tfm);
  334. struct shash_alg *alg = crypto_shash_alg(hash);
  335. alg->exit_tfm(hash);
  336. }
  337. static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
  338. {
  339. struct crypto_shash *hash = __crypto_shash_cast(tfm);
  340. struct shash_alg *alg = crypto_shash_alg(hash);
  341. int err;
  342. hash->descsize = alg->descsize;
  343. shash_set_needkey(hash, alg);
  344. if (alg->exit_tfm)
  345. tfm->exit = crypto_shash_exit_tfm;
  346. if (!alg->init_tfm)
  347. return 0;
  348. err = alg->init_tfm(hash);
  349. if (err)
  350. return err;
  351. /* ->init_tfm() may have increased the descsize. */
  352. if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
  353. if (alg->exit_tfm)
  354. alg->exit_tfm(hash);
  355. return -EINVAL;
  356. }
  357. return 0;
  358. }
  359. static void crypto_shash_free_instance(struct crypto_instance *inst)
  360. {
  361. struct shash_instance *shash = shash_instance(inst);
  362. shash->free(shash);
  363. }
  364. #ifdef CONFIG_NET
  365. static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
  366. {
  367. struct crypto_report_hash rhash;
  368. struct shash_alg *salg = __crypto_shash_alg(alg);
  369. memset(&rhash, 0, sizeof(rhash));
  370. strscpy(rhash.type, "shash", sizeof(rhash.type));
  371. rhash.blocksize = alg->cra_blocksize;
  372. rhash.digestsize = salg->digestsize;
  373. return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
  374. }
  375. #else
  376. static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
  377. {
  378. return -ENOSYS;
  379. }
  380. #endif
  381. static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
  382. __maybe_unused;
  383. static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
  384. {
  385. struct shash_alg *salg = __crypto_shash_alg(alg);
  386. seq_printf(m, "type : shash\n");
  387. seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
  388. seq_printf(m, "digestsize : %u\n", salg->digestsize);
  389. }
  390. static const struct crypto_type crypto_shash_type = {
  391. .extsize = crypto_alg_extsize,
  392. .init_tfm = crypto_shash_init_tfm,
  393. .free = crypto_shash_free_instance,
  394. #ifdef CONFIG_PROC_FS
  395. .show = crypto_shash_show,
  396. #endif
  397. .report = crypto_shash_report,
  398. .maskclear = ~CRYPTO_ALG_TYPE_MASK,
  399. .maskset = CRYPTO_ALG_TYPE_MASK,
  400. .type = CRYPTO_ALG_TYPE_SHASH,
  401. .tfmsize = offsetof(struct crypto_shash, base),
  402. };
  403. int crypto_grab_shash(struct crypto_shash_spawn *spawn,
  404. struct crypto_instance *inst,
  405. const char *name, u32 type, u32 mask)
  406. {
  407. spawn->base.frontend = &crypto_shash_type;
  408. return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
  409. }
  410. EXPORT_SYMBOL_GPL(crypto_grab_shash);
  411. struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
  412. u32 mask)
  413. {
  414. return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
  415. }
  416. EXPORT_SYMBOL_GPL(crypto_alloc_shash);
  417. static int shash_prepare_alg(struct shash_alg *alg)
  418. {
  419. struct crypto_alg *base = &alg->base;
  420. if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
  421. alg->descsize > HASH_MAX_DESCSIZE ||
  422. alg->statesize > HASH_MAX_STATESIZE)
  423. return -EINVAL;
  424. if ((alg->export && !alg->import) || (alg->import && !alg->export))
  425. return -EINVAL;
  426. base->cra_type = &crypto_shash_type;
  427. base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
  428. base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
  429. if (!alg->finup)
  430. alg->finup = shash_finup_unaligned;
  431. if (!alg->digest)
  432. alg->digest = shash_digest_unaligned;
  433. if (!alg->export) {
  434. alg->export = shash_default_export;
  435. alg->import = shash_default_import;
  436. alg->statesize = alg->descsize;
  437. }
  438. if (!alg->setkey)
  439. alg->setkey = shash_no_setkey;
  440. return 0;
  441. }
  442. int crypto_register_shash(struct shash_alg *alg)
  443. {
  444. struct crypto_alg *base = &alg->base;
  445. int err;
  446. err = shash_prepare_alg(alg);
  447. if (err)
  448. return err;
  449. return crypto_register_alg(base);
  450. }
  451. EXPORT_SYMBOL_GPL(crypto_register_shash);
  452. void crypto_unregister_shash(struct shash_alg *alg)
  453. {
  454. crypto_unregister_alg(&alg->base);
  455. }
  456. EXPORT_SYMBOL_GPL(crypto_unregister_shash);
  457. int crypto_register_shashes(struct shash_alg *algs, int count)
  458. {
  459. int i, ret;
  460. for (i = 0; i < count; i++) {
  461. ret = crypto_register_shash(&algs[i]);
  462. if (ret)
  463. goto err;
  464. }
  465. return 0;
  466. err:
  467. for (--i; i >= 0; --i)
  468. crypto_unregister_shash(&algs[i]);
  469. return ret;
  470. }
  471. EXPORT_SYMBOL_GPL(crypto_register_shashes);
  472. void crypto_unregister_shashes(struct shash_alg *algs, int count)
  473. {
  474. int i;
  475. for (i = count - 1; i >= 0; --i)
  476. crypto_unregister_shash(&algs[i]);
  477. }
  478. EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
  479. int shash_register_instance(struct crypto_template *tmpl,
  480. struct shash_instance *inst)
  481. {
  482. int err;
  483. if (WARN_ON(!inst->free))
  484. return -EINVAL;
  485. err = shash_prepare_alg(&inst->alg);
  486. if (err)
  487. return err;
  488. return crypto_register_instance(tmpl, shash_crypto_instance(inst));
  489. }
  490. EXPORT_SYMBOL_GPL(shash_register_instance);
  491. void shash_free_singlespawn_instance(struct shash_instance *inst)
  492. {
  493. crypto_drop_spawn(shash_instance_ctx(inst));
  494. kfree(inst);
  495. }
  496. EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
  497. MODULE_LICENSE("GPL");
  498. MODULE_DESCRIPTION("Synchronous cryptographic hash type");