authencesn.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
  4. * derived from authenc.c
  5. *
  6. * Copyright (C) 2010 secunet Security Networks AG
  7. * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
  8. * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
  9. */
  10. #include <crypto/internal/aead.h>
  11. #include <crypto/internal/hash.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <crypto/authenc.h>
  14. #include <crypto/null.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/rtnetlink.h>
  21. #include <linux/slab.h>
  22. #include <linux/spinlock.h>
  23. struct authenc_esn_instance_ctx {
  24. struct crypto_ahash_spawn auth;
  25. struct crypto_skcipher_spawn enc;
  26. };
  27. struct crypto_authenc_esn_ctx {
  28. unsigned int reqoff;
  29. struct crypto_ahash *auth;
  30. struct crypto_skcipher *enc;
  31. struct crypto_sync_skcipher *null;
  32. };
  33. struct authenc_esn_request_ctx {
  34. struct scatterlist src[2];
  35. struct scatterlist dst[2];
  36. char tail[];
  37. };
  38. static void authenc_esn_request_complete(struct aead_request *req, int err)
  39. {
  40. if (err != -EINPROGRESS)
  41. aead_request_complete(req, err);
  42. }
  43. static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
  44. unsigned int authsize)
  45. {
  46. if (authsize > 0 && authsize < 4)
  47. return -EINVAL;
  48. return 0;
  49. }
  50. static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
  51. unsigned int keylen)
  52. {
  53. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  54. struct crypto_ahash *auth = ctx->auth;
  55. struct crypto_skcipher *enc = ctx->enc;
  56. struct crypto_authenc_keys keys;
  57. int err = -EINVAL;
  58. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  59. goto out;
  60. crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
  61. crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
  62. CRYPTO_TFM_REQ_MASK);
  63. err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
  64. if (err)
  65. goto out;
  66. crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
  67. crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
  68. CRYPTO_TFM_REQ_MASK);
  69. err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
  70. out:
  71. memzero_explicit(&keys, sizeof(keys));
  72. return err;
  73. }
  74. static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
  75. unsigned int flags)
  76. {
  77. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  78. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  79. struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
  80. struct crypto_ahash *auth = ctx->auth;
  81. u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
  82. crypto_ahash_alignmask(auth) + 1);
  83. unsigned int authsize = crypto_aead_authsize(authenc_esn);
  84. unsigned int assoclen = req->assoclen;
  85. unsigned int cryptlen = req->cryptlen;
  86. struct scatterlist *dst = req->dst;
  87. u32 tmp[2];
  88. /* Move high-order bits of sequence number back. */
  89. scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
  90. scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
  91. scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
  92. scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
  93. return 0;
  94. }
  95. static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
  96. int err)
  97. {
  98. struct aead_request *req = areq->data;
  99. err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
  100. aead_request_complete(req, err);
  101. }
  102. static int crypto_authenc_esn_genicv(struct aead_request *req,
  103. unsigned int flags)
  104. {
  105. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  106. struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
  107. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  108. struct crypto_ahash *auth = ctx->auth;
  109. u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
  110. crypto_ahash_alignmask(auth) + 1);
  111. struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
  112. unsigned int authsize = crypto_aead_authsize(authenc_esn);
  113. unsigned int assoclen = req->assoclen;
  114. unsigned int cryptlen = req->cryptlen;
  115. struct scatterlist *dst = req->dst;
  116. u32 tmp[2];
  117. if (!authsize)
  118. return 0;
  119. /* Move high-order bits of sequence number to the end. */
  120. scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
  121. scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
  122. scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
  123. sg_init_table(areq_ctx->dst, 2);
  124. dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
  125. ahash_request_set_tfm(ahreq, auth);
  126. ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
  127. ahash_request_set_callback(ahreq, flags,
  128. authenc_esn_geniv_ahash_done, req);
  129. return crypto_ahash_digest(ahreq) ?:
  130. crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
  131. }
  132. static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
  133. int err)
  134. {
  135. struct aead_request *areq = req->data;
  136. if (!err)
  137. err = crypto_authenc_esn_genicv(areq, 0);
  138. authenc_esn_request_complete(areq, err);
  139. }
  140. static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
  141. {
  142. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  143. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  144. SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
  145. skcipher_request_set_sync_tfm(skreq, ctx->null);
  146. skcipher_request_set_callback(skreq, aead_request_flags(req),
  147. NULL, NULL);
  148. skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
  149. return crypto_skcipher_encrypt(skreq);
  150. }
  151. static int crypto_authenc_esn_encrypt(struct aead_request *req)
  152. {
  153. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  154. struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
  155. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  156. struct skcipher_request *skreq = (void *)(areq_ctx->tail +
  157. ctx->reqoff);
  158. struct crypto_skcipher *enc = ctx->enc;
  159. unsigned int assoclen = req->assoclen;
  160. unsigned int cryptlen = req->cryptlen;
  161. struct scatterlist *src, *dst;
  162. int err;
  163. sg_init_table(areq_ctx->src, 2);
  164. src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
  165. dst = src;
  166. if (req->src != req->dst) {
  167. err = crypto_authenc_esn_copy(req, assoclen);
  168. if (err)
  169. return err;
  170. sg_init_table(areq_ctx->dst, 2);
  171. dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
  172. }
  173. skcipher_request_set_tfm(skreq, enc);
  174. skcipher_request_set_callback(skreq, aead_request_flags(req),
  175. crypto_authenc_esn_encrypt_done, req);
  176. skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
  177. err = crypto_skcipher_encrypt(skreq);
  178. if (err)
  179. return err;
  180. return crypto_authenc_esn_genicv(req, aead_request_flags(req));
  181. }
  182. static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
  183. unsigned int flags)
  184. {
  185. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  186. unsigned int authsize = crypto_aead_authsize(authenc_esn);
  187. struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
  188. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  189. struct skcipher_request *skreq = (void *)(areq_ctx->tail +
  190. ctx->reqoff);
  191. struct crypto_ahash *auth = ctx->auth;
  192. u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
  193. crypto_ahash_alignmask(auth) + 1);
  194. unsigned int cryptlen = req->cryptlen - authsize;
  195. unsigned int assoclen = req->assoclen;
  196. struct scatterlist *dst = req->dst;
  197. u8 *ihash = ohash + crypto_ahash_digestsize(auth);
  198. u32 tmp[2];
  199. if (!authsize)
  200. goto decrypt;
  201. /* Move high-order bits of sequence number back. */
  202. scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
  203. scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
  204. scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
  205. if (crypto_memneq(ihash, ohash, authsize))
  206. return -EBADMSG;
  207. decrypt:
  208. sg_init_table(areq_ctx->dst, 2);
  209. dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
  210. skcipher_request_set_tfm(skreq, ctx->enc);
  211. skcipher_request_set_callback(skreq, flags,
  212. req->base.complete, req->base.data);
  213. skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
  214. return crypto_skcipher_decrypt(skreq);
  215. }
  216. static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
  217. int err)
  218. {
  219. struct aead_request *req = areq->data;
  220. err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
  221. authenc_esn_request_complete(req, err);
  222. }
  223. static int crypto_authenc_esn_decrypt(struct aead_request *req)
  224. {
  225. struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
  226. struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
  227. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
  228. struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
  229. unsigned int authsize = crypto_aead_authsize(authenc_esn);
  230. struct crypto_ahash *auth = ctx->auth;
  231. u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
  232. crypto_ahash_alignmask(auth) + 1);
  233. unsigned int assoclen = req->assoclen;
  234. unsigned int cryptlen = req->cryptlen;
  235. u8 *ihash = ohash + crypto_ahash_digestsize(auth);
  236. struct scatterlist *dst = req->dst;
  237. u32 tmp[2];
  238. int err;
  239. cryptlen -= authsize;
  240. if (req->src != dst) {
  241. err = crypto_authenc_esn_copy(req, assoclen + cryptlen);
  242. if (err)
  243. return err;
  244. }
  245. scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
  246. authsize, 0);
  247. if (!authsize)
  248. goto tail;
  249. /* Move high-order bits of sequence number to the end. */
  250. scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
  251. scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
  252. scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
  253. sg_init_table(areq_ctx->dst, 2);
  254. dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
  255. ahash_request_set_tfm(ahreq, auth);
  256. ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
  257. ahash_request_set_callback(ahreq, aead_request_flags(req),
  258. authenc_esn_verify_ahash_done, req);
  259. err = crypto_ahash_digest(ahreq);
  260. if (err)
  261. return err;
  262. tail:
  263. return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
  264. }
  265. static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
  266. {
  267. struct aead_instance *inst = aead_alg_instance(tfm);
  268. struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
  269. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
  270. struct crypto_ahash *auth;
  271. struct crypto_skcipher *enc;
  272. struct crypto_sync_skcipher *null;
  273. int err;
  274. auth = crypto_spawn_ahash(&ictx->auth);
  275. if (IS_ERR(auth))
  276. return PTR_ERR(auth);
  277. enc = crypto_spawn_skcipher(&ictx->enc);
  278. err = PTR_ERR(enc);
  279. if (IS_ERR(enc))
  280. goto err_free_ahash;
  281. null = crypto_get_default_null_skcipher();
  282. err = PTR_ERR(null);
  283. if (IS_ERR(null))
  284. goto err_free_skcipher;
  285. ctx->auth = auth;
  286. ctx->enc = enc;
  287. ctx->null = null;
  288. ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
  289. crypto_ahash_alignmask(auth) + 1);
  290. crypto_aead_set_reqsize(
  291. tfm,
  292. sizeof(struct authenc_esn_request_ctx) +
  293. ctx->reqoff +
  294. max_t(unsigned int,
  295. crypto_ahash_reqsize(auth) +
  296. sizeof(struct ahash_request),
  297. sizeof(struct skcipher_request) +
  298. crypto_skcipher_reqsize(enc)));
  299. return 0;
  300. err_free_skcipher:
  301. crypto_free_skcipher(enc);
  302. err_free_ahash:
  303. crypto_free_ahash(auth);
  304. return err;
  305. }
  306. static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
  307. {
  308. struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
  309. crypto_free_ahash(ctx->auth);
  310. crypto_free_skcipher(ctx->enc);
  311. crypto_put_default_null_skcipher();
  312. }
  313. static void crypto_authenc_esn_free(struct aead_instance *inst)
  314. {
  315. struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
  316. crypto_drop_skcipher(&ctx->enc);
  317. crypto_drop_ahash(&ctx->auth);
  318. kfree(inst);
  319. }
  320. static int crypto_authenc_esn_create(struct crypto_template *tmpl,
  321. struct rtattr **tb)
  322. {
  323. u32 mask;
  324. struct aead_instance *inst;
  325. struct authenc_esn_instance_ctx *ctx;
  326. struct hash_alg_common *auth;
  327. struct crypto_alg *auth_base;
  328. struct skcipher_alg *enc;
  329. int err;
  330. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
  331. if (err)
  332. return err;
  333. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  334. if (!inst)
  335. return -ENOMEM;
  336. ctx = aead_instance_ctx(inst);
  337. err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
  338. crypto_attr_alg_name(tb[1]), 0, mask);
  339. if (err)
  340. goto err_free_inst;
  341. auth = crypto_spawn_ahash_alg(&ctx->auth);
  342. auth_base = &auth->base;
  343. err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
  344. crypto_attr_alg_name(tb[2]), 0, mask);
  345. if (err)
  346. goto err_free_inst;
  347. enc = crypto_spawn_skcipher_alg(&ctx->enc);
  348. err = -ENAMETOOLONG;
  349. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  350. "authencesn(%s,%s)", auth_base->cra_name,
  351. enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
  352. goto err_free_inst;
  353. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  354. "authencesn(%s,%s)", auth_base->cra_driver_name,
  355. enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  356. goto err_free_inst;
  357. inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
  358. auth_base->cra_priority;
  359. inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
  360. inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
  361. enc->base.cra_alignmask;
  362. inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
  363. inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
  364. inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
  365. inst->alg.maxauthsize = auth->digestsize;
  366. inst->alg.init = crypto_authenc_esn_init_tfm;
  367. inst->alg.exit = crypto_authenc_esn_exit_tfm;
  368. inst->alg.setkey = crypto_authenc_esn_setkey;
  369. inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
  370. inst->alg.encrypt = crypto_authenc_esn_encrypt;
  371. inst->alg.decrypt = crypto_authenc_esn_decrypt;
  372. inst->free = crypto_authenc_esn_free;
  373. err = aead_register_instance(tmpl, inst);
  374. if (err) {
  375. err_free_inst:
  376. crypto_authenc_esn_free(inst);
  377. }
  378. return err;
  379. }
  380. static struct crypto_template crypto_authenc_esn_tmpl = {
  381. .name = "authencesn",
  382. .create = crypto_authenc_esn_create,
  383. .module = THIS_MODULE,
  384. };
  385. static int __init crypto_authenc_esn_module_init(void)
  386. {
  387. return crypto_register_template(&crypto_authenc_esn_tmpl);
  388. }
  389. static void __exit crypto_authenc_esn_module_exit(void)
  390. {
  391. crypto_unregister_template(&crypto_authenc_esn_tmpl);
  392. }
  393. subsys_initcall(crypto_authenc_esn_module_init);
  394. module_exit(crypto_authenc_esn_module_exit);
  395. MODULE_LICENSE("GPL");
  396. MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
  397. MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
  398. MODULE_ALIAS_CRYPTO("authencesn");