aes-glue.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
  4. *
  5. * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/hwcap.h>
  9. #include <asm/simd.h>
  10. #include <crypto/aes.h>
  11. #include <crypto/ctr.h>
  12. #include <crypto/sha.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/simd.h>
  15. #include <crypto/internal/skcipher.h>
  16. #include <crypto/scatterwalk.h>
  17. #include <linux/module.h>
  18. #include <linux/cpufeature.h>
  19. #include <crypto/xts.h>
  20. #include "aes-ce-setkey.h"
  21. #ifdef USE_V8_CRYPTO_EXTENSIONS
  22. #define MODE "ce"
  23. #define PRIO 300
  24. #define aes_expandkey ce_aes_expandkey
  25. #define aes_ecb_encrypt ce_aes_ecb_encrypt
  26. #define aes_ecb_decrypt ce_aes_ecb_decrypt
  27. #define aes_cbc_encrypt ce_aes_cbc_encrypt
  28. #define aes_cbc_decrypt ce_aes_cbc_decrypt
  29. #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
  30. #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
  31. #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
  32. #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
  33. #define aes_ctr_encrypt ce_aes_ctr_encrypt
  34. #define aes_xts_encrypt ce_aes_xts_encrypt
  35. #define aes_xts_decrypt ce_aes_xts_decrypt
  36. #define aes_mac_update ce_aes_mac_update
  37. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  38. #else
  39. #define MODE "neon"
  40. #define PRIO 200
  41. #define aes_ecb_encrypt neon_aes_ecb_encrypt
  42. #define aes_ecb_decrypt neon_aes_ecb_decrypt
  43. #define aes_cbc_encrypt neon_aes_cbc_encrypt
  44. #define aes_cbc_decrypt neon_aes_cbc_decrypt
  45. #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
  46. #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
  47. #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
  48. #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
  49. #define aes_ctr_encrypt neon_aes_ctr_encrypt
  50. #define aes_xts_encrypt neon_aes_xts_encrypt
  51. #define aes_xts_decrypt neon_aes_xts_decrypt
  52. #define aes_mac_update neon_aes_mac_update
  53. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
  54. #endif
  55. #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
  56. MODULE_ALIAS_CRYPTO("ecb(aes)");
  57. MODULE_ALIAS_CRYPTO("cbc(aes)");
  58. MODULE_ALIAS_CRYPTO("ctr(aes)");
  59. MODULE_ALIAS_CRYPTO("xts(aes)");
  60. #endif
  61. MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
  62. MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
  63. MODULE_ALIAS_CRYPTO("cmac(aes)");
  64. MODULE_ALIAS_CRYPTO("xcbc(aes)");
  65. MODULE_ALIAS_CRYPTO("cbcmac(aes)");
  66. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  67. MODULE_LICENSE("GPL v2");
  68. /* defined in aes-modes.S */
  69. asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
  70. int rounds, int blocks);
  71. asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
  72. int rounds, int blocks);
  73. asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
  74. int rounds, int blocks, u8 iv[]);
  75. asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
  76. int rounds, int blocks, u8 iv[]);
  77. asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
  78. int rounds, int bytes, u8 const iv[]);
  79. asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
  80. int rounds, int bytes, u8 const iv[]);
  81. asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  82. int rounds, int blocks, u8 ctr[]);
  83. asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
  84. int rounds, int bytes, u32 const rk2[], u8 iv[],
  85. int first);
  86. asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
  87. int rounds, int bytes, u32 const rk2[], u8 iv[],
  88. int first);
  89. asmlinkage void aes_essiv_cbc_encrypt(u8 out[], u8 const in[], u32 const rk1[],
  90. int rounds, int blocks, u8 iv[],
  91. u32 const rk2[]);
  92. asmlinkage void aes_essiv_cbc_decrypt(u8 out[], u8 const in[], u32 const rk1[],
  93. int rounds, int blocks, u8 iv[],
  94. u32 const rk2[]);
  95. asmlinkage int aes_mac_update(u8 const in[], u32 const rk[], int rounds,
  96. int blocks, u8 dg[], int enc_before,
  97. int enc_after);
  98. struct crypto_aes_xts_ctx {
  99. struct crypto_aes_ctx key1;
  100. struct crypto_aes_ctx __aligned(8) key2;
  101. };
  102. struct crypto_aes_essiv_cbc_ctx {
  103. struct crypto_aes_ctx key1;
  104. struct crypto_aes_ctx __aligned(8) key2;
  105. struct crypto_shash *hash;
  106. };
  107. struct mac_tfm_ctx {
  108. struct crypto_aes_ctx key;
  109. u8 __aligned(8) consts[];
  110. };
  111. struct mac_desc_ctx {
  112. unsigned int len;
  113. u8 dg[AES_BLOCK_SIZE];
  114. };
  115. static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  116. unsigned int key_len)
  117. {
  118. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  119. return aes_expandkey(ctx, in_key, key_len);
  120. }
  121. static int __maybe_unused xts_set_key(struct crypto_skcipher *tfm,
  122. const u8 *in_key, unsigned int key_len)
  123. {
  124. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  125. int ret;
  126. ret = xts_verify_key(tfm, in_key, key_len);
  127. if (ret)
  128. return ret;
  129. ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
  130. if (!ret)
  131. ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
  132. key_len / 2);
  133. return ret;
  134. }
  135. static int __maybe_unused essiv_cbc_set_key(struct crypto_skcipher *tfm,
  136. const u8 *in_key,
  137. unsigned int key_len)
  138. {
  139. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  140. u8 digest[SHA256_DIGEST_SIZE];
  141. int ret;
  142. ret = aes_expandkey(&ctx->key1, in_key, key_len);
  143. if (ret)
  144. return ret;
  145. crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest);
  146. return aes_expandkey(&ctx->key2, digest, sizeof(digest));
  147. }
  148. static int __maybe_unused ecb_encrypt(struct skcipher_request *req)
  149. {
  150. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  151. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  152. int err, rounds = 6 + ctx->key_length / 4;
  153. struct skcipher_walk walk;
  154. unsigned int blocks;
  155. err = skcipher_walk_virt(&walk, req, false);
  156. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  157. kernel_neon_begin();
  158. aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  159. ctx->key_enc, rounds, blocks);
  160. kernel_neon_end();
  161. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  162. }
  163. return err;
  164. }
  165. static int __maybe_unused ecb_decrypt(struct skcipher_request *req)
  166. {
  167. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  168. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  169. int err, rounds = 6 + ctx->key_length / 4;
  170. struct skcipher_walk walk;
  171. unsigned int blocks;
  172. err = skcipher_walk_virt(&walk, req, false);
  173. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  174. kernel_neon_begin();
  175. aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  176. ctx->key_dec, rounds, blocks);
  177. kernel_neon_end();
  178. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  179. }
  180. return err;
  181. }
  182. static int cbc_encrypt_walk(struct skcipher_request *req,
  183. struct skcipher_walk *walk)
  184. {
  185. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  186. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  187. int err = 0, rounds = 6 + ctx->key_length / 4;
  188. unsigned int blocks;
  189. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  190. kernel_neon_begin();
  191. aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
  192. ctx->key_enc, rounds, blocks, walk->iv);
  193. kernel_neon_end();
  194. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  195. }
  196. return err;
  197. }
  198. static int __maybe_unused cbc_encrypt(struct skcipher_request *req)
  199. {
  200. struct skcipher_walk walk;
  201. int err;
  202. err = skcipher_walk_virt(&walk, req, false);
  203. if (err)
  204. return err;
  205. return cbc_encrypt_walk(req, &walk);
  206. }
  207. static int cbc_decrypt_walk(struct skcipher_request *req,
  208. struct skcipher_walk *walk)
  209. {
  210. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  211. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  212. int err = 0, rounds = 6 + ctx->key_length / 4;
  213. unsigned int blocks;
  214. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  215. kernel_neon_begin();
  216. aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
  217. ctx->key_dec, rounds, blocks, walk->iv);
  218. kernel_neon_end();
  219. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  220. }
  221. return err;
  222. }
  223. static int __maybe_unused cbc_decrypt(struct skcipher_request *req)
  224. {
  225. struct skcipher_walk walk;
  226. int err;
  227. err = skcipher_walk_virt(&walk, req, false);
  228. if (err)
  229. return err;
  230. return cbc_decrypt_walk(req, &walk);
  231. }
  232. static int cts_cbc_encrypt(struct skcipher_request *req)
  233. {
  234. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  235. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  236. int err, rounds = 6 + ctx->key_length / 4;
  237. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  238. struct scatterlist *src = req->src, *dst = req->dst;
  239. struct scatterlist sg_src[2], sg_dst[2];
  240. struct skcipher_request subreq;
  241. struct skcipher_walk walk;
  242. skcipher_request_set_tfm(&subreq, tfm);
  243. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  244. NULL, NULL);
  245. if (req->cryptlen <= AES_BLOCK_SIZE) {
  246. if (req->cryptlen < AES_BLOCK_SIZE)
  247. return -EINVAL;
  248. cbc_blocks = 1;
  249. }
  250. if (cbc_blocks > 0) {
  251. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  252. cbc_blocks * AES_BLOCK_SIZE,
  253. req->iv);
  254. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  255. cbc_encrypt_walk(&subreq, &walk);
  256. if (err)
  257. return err;
  258. if (req->cryptlen == AES_BLOCK_SIZE)
  259. return 0;
  260. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  261. if (req->dst != req->src)
  262. dst = scatterwalk_ffwd(sg_dst, req->dst,
  263. subreq.cryptlen);
  264. }
  265. /* handle ciphertext stealing */
  266. skcipher_request_set_crypt(&subreq, src, dst,
  267. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  268. req->iv);
  269. err = skcipher_walk_virt(&walk, &subreq, false);
  270. if (err)
  271. return err;
  272. kernel_neon_begin();
  273. aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  274. ctx->key_enc, rounds, walk.nbytes, walk.iv);
  275. kernel_neon_end();
  276. return skcipher_walk_done(&walk, 0);
  277. }
  278. static int cts_cbc_decrypt(struct skcipher_request *req)
  279. {
  280. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  281. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  282. int err, rounds = 6 + ctx->key_length / 4;
  283. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  284. struct scatterlist *src = req->src, *dst = req->dst;
  285. struct scatterlist sg_src[2], sg_dst[2];
  286. struct skcipher_request subreq;
  287. struct skcipher_walk walk;
  288. skcipher_request_set_tfm(&subreq, tfm);
  289. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  290. NULL, NULL);
  291. if (req->cryptlen <= AES_BLOCK_SIZE) {
  292. if (req->cryptlen < AES_BLOCK_SIZE)
  293. return -EINVAL;
  294. cbc_blocks = 1;
  295. }
  296. if (cbc_blocks > 0) {
  297. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  298. cbc_blocks * AES_BLOCK_SIZE,
  299. req->iv);
  300. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  301. cbc_decrypt_walk(&subreq, &walk);
  302. if (err)
  303. return err;
  304. if (req->cryptlen == AES_BLOCK_SIZE)
  305. return 0;
  306. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  307. if (req->dst != req->src)
  308. dst = scatterwalk_ffwd(sg_dst, req->dst,
  309. subreq.cryptlen);
  310. }
  311. /* handle ciphertext stealing */
  312. skcipher_request_set_crypt(&subreq, src, dst,
  313. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  314. req->iv);
  315. err = skcipher_walk_virt(&walk, &subreq, false);
  316. if (err)
  317. return err;
  318. kernel_neon_begin();
  319. aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  320. ctx->key_dec, rounds, walk.nbytes, walk.iv);
  321. kernel_neon_end();
  322. return skcipher_walk_done(&walk, 0);
  323. }
  324. static int __maybe_unused essiv_cbc_init_tfm(struct crypto_skcipher *tfm)
  325. {
  326. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  327. ctx->hash = crypto_alloc_shash("sha256", 0, 0);
  328. return PTR_ERR_OR_ZERO(ctx->hash);
  329. }
  330. static void __maybe_unused essiv_cbc_exit_tfm(struct crypto_skcipher *tfm)
  331. {
  332. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  333. crypto_free_shash(ctx->hash);
  334. }
  335. static int __maybe_unused essiv_cbc_encrypt(struct skcipher_request *req)
  336. {
  337. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  338. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  339. int err, rounds = 6 + ctx->key1.key_length / 4;
  340. struct skcipher_walk walk;
  341. unsigned int blocks;
  342. err = skcipher_walk_virt(&walk, req, false);
  343. blocks = walk.nbytes / AES_BLOCK_SIZE;
  344. if (blocks) {
  345. kernel_neon_begin();
  346. aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  347. ctx->key1.key_enc, rounds, blocks,
  348. req->iv, ctx->key2.key_enc);
  349. kernel_neon_end();
  350. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  351. }
  352. return err ?: cbc_encrypt_walk(req, &walk);
  353. }
  354. static int __maybe_unused essiv_cbc_decrypt(struct skcipher_request *req)
  355. {
  356. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  357. struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  358. int err, rounds = 6 + ctx->key1.key_length / 4;
  359. struct skcipher_walk walk;
  360. unsigned int blocks;
  361. err = skcipher_walk_virt(&walk, req, false);
  362. blocks = walk.nbytes / AES_BLOCK_SIZE;
  363. if (blocks) {
  364. kernel_neon_begin();
  365. aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  366. ctx->key1.key_dec, rounds, blocks,
  367. req->iv, ctx->key2.key_enc);
  368. kernel_neon_end();
  369. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  370. }
  371. return err ?: cbc_decrypt_walk(req, &walk);
  372. }
  373. static int __maybe_unused ctr_encrypt(struct skcipher_request *req)
  374. {
  375. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  376. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  377. int err, rounds = 6 + ctx->key_length / 4;
  378. struct skcipher_walk walk;
  379. int blocks;
  380. err = skcipher_walk_virt(&walk, req, false);
  381. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  382. kernel_neon_begin();
  383. aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  384. ctx->key_enc, rounds, blocks, walk.iv);
  385. kernel_neon_end();
  386. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  387. }
  388. if (walk.nbytes) {
  389. u8 __aligned(8) tail[AES_BLOCK_SIZE];
  390. unsigned int nbytes = walk.nbytes;
  391. u8 *tdst = walk.dst.virt.addr;
  392. u8 *tsrc = walk.src.virt.addr;
  393. /*
  394. * Tell aes_ctr_encrypt() to process a tail block.
  395. */
  396. blocks = -1;
  397. kernel_neon_begin();
  398. aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds,
  399. blocks, walk.iv);
  400. kernel_neon_end();
  401. crypto_xor_cpy(tdst, tsrc, tail, nbytes);
  402. err = skcipher_walk_done(&walk, 0);
  403. }
  404. return err;
  405. }
  406. static int __maybe_unused xts_encrypt(struct skcipher_request *req)
  407. {
  408. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  409. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  410. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  411. int tail = req->cryptlen % AES_BLOCK_SIZE;
  412. struct scatterlist sg_src[2], sg_dst[2];
  413. struct skcipher_request subreq;
  414. struct scatterlist *src, *dst;
  415. struct skcipher_walk walk;
  416. if (req->cryptlen < AES_BLOCK_SIZE)
  417. return -EINVAL;
  418. err = skcipher_walk_virt(&walk, req, false);
  419. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  420. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  421. AES_BLOCK_SIZE) - 2;
  422. skcipher_walk_abort(&walk);
  423. skcipher_request_set_tfm(&subreq, tfm);
  424. skcipher_request_set_callback(&subreq,
  425. skcipher_request_flags(req),
  426. NULL, NULL);
  427. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  428. xts_blocks * AES_BLOCK_SIZE,
  429. req->iv);
  430. req = &subreq;
  431. err = skcipher_walk_virt(&walk, req, false);
  432. } else {
  433. tail = 0;
  434. }
  435. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  436. int nbytes = walk.nbytes;
  437. if (walk.nbytes < walk.total)
  438. nbytes &= ~(AES_BLOCK_SIZE - 1);
  439. kernel_neon_begin();
  440. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  441. ctx->key1.key_enc, rounds, nbytes,
  442. ctx->key2.key_enc, walk.iv, first);
  443. kernel_neon_end();
  444. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  445. }
  446. if (err || likely(!tail))
  447. return err;
  448. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  449. if (req->dst != req->src)
  450. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  451. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  452. req->iv);
  453. err = skcipher_walk_virt(&walk, &subreq, false);
  454. if (err)
  455. return err;
  456. kernel_neon_begin();
  457. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  458. ctx->key1.key_enc, rounds, walk.nbytes,
  459. ctx->key2.key_enc, walk.iv, first);
  460. kernel_neon_end();
  461. return skcipher_walk_done(&walk, 0);
  462. }
  463. static int __maybe_unused xts_decrypt(struct skcipher_request *req)
  464. {
  465. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  466. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  467. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  468. int tail = req->cryptlen % AES_BLOCK_SIZE;
  469. struct scatterlist sg_src[2], sg_dst[2];
  470. struct skcipher_request subreq;
  471. struct scatterlist *src, *dst;
  472. struct skcipher_walk walk;
  473. if (req->cryptlen < AES_BLOCK_SIZE)
  474. return -EINVAL;
  475. err = skcipher_walk_virt(&walk, req, false);
  476. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  477. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  478. AES_BLOCK_SIZE) - 2;
  479. skcipher_walk_abort(&walk);
  480. skcipher_request_set_tfm(&subreq, tfm);
  481. skcipher_request_set_callback(&subreq,
  482. skcipher_request_flags(req),
  483. NULL, NULL);
  484. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  485. xts_blocks * AES_BLOCK_SIZE,
  486. req->iv);
  487. req = &subreq;
  488. err = skcipher_walk_virt(&walk, req, false);
  489. } else {
  490. tail = 0;
  491. }
  492. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  493. int nbytes = walk.nbytes;
  494. if (walk.nbytes < walk.total)
  495. nbytes &= ~(AES_BLOCK_SIZE - 1);
  496. kernel_neon_begin();
  497. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  498. ctx->key1.key_dec, rounds, nbytes,
  499. ctx->key2.key_enc, walk.iv, first);
  500. kernel_neon_end();
  501. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  502. }
  503. if (err || likely(!tail))
  504. return err;
  505. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  506. if (req->dst != req->src)
  507. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  508. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  509. req->iv);
  510. err = skcipher_walk_virt(&walk, &subreq, false);
  511. if (err)
  512. return err;
  513. kernel_neon_begin();
  514. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  515. ctx->key1.key_dec, rounds, walk.nbytes,
  516. ctx->key2.key_enc, walk.iv, first);
  517. kernel_neon_end();
  518. return skcipher_walk_done(&walk, 0);
  519. }
  520. static struct skcipher_alg aes_algs[] = { {
  521. #if defined(USE_V8_CRYPTO_EXTENSIONS) || !IS_ENABLED(CONFIG_CRYPTO_AES_ARM64_BS)
  522. .base = {
  523. .cra_name = "ecb(aes)",
  524. .cra_driver_name = "ecb-aes-" MODE,
  525. .cra_priority = PRIO,
  526. .cra_blocksize = AES_BLOCK_SIZE,
  527. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  528. .cra_module = THIS_MODULE,
  529. },
  530. .min_keysize = AES_MIN_KEY_SIZE,
  531. .max_keysize = AES_MAX_KEY_SIZE,
  532. .setkey = skcipher_aes_setkey,
  533. .encrypt = ecb_encrypt,
  534. .decrypt = ecb_decrypt,
  535. }, {
  536. .base = {
  537. .cra_name = "cbc(aes)",
  538. .cra_driver_name = "cbc-aes-" MODE,
  539. .cra_priority = PRIO,
  540. .cra_blocksize = AES_BLOCK_SIZE,
  541. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  542. .cra_module = THIS_MODULE,
  543. },
  544. .min_keysize = AES_MIN_KEY_SIZE,
  545. .max_keysize = AES_MAX_KEY_SIZE,
  546. .ivsize = AES_BLOCK_SIZE,
  547. .setkey = skcipher_aes_setkey,
  548. .encrypt = cbc_encrypt,
  549. .decrypt = cbc_decrypt,
  550. }, {
  551. .base = {
  552. .cra_name = "ctr(aes)",
  553. .cra_driver_name = "ctr-aes-" MODE,
  554. .cra_priority = PRIO,
  555. .cra_blocksize = 1,
  556. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  557. .cra_module = THIS_MODULE,
  558. },
  559. .min_keysize = AES_MIN_KEY_SIZE,
  560. .max_keysize = AES_MAX_KEY_SIZE,
  561. .ivsize = AES_BLOCK_SIZE,
  562. .chunksize = AES_BLOCK_SIZE,
  563. .setkey = skcipher_aes_setkey,
  564. .encrypt = ctr_encrypt,
  565. .decrypt = ctr_encrypt,
  566. }, {
  567. .base = {
  568. .cra_name = "xts(aes)",
  569. .cra_driver_name = "xts-aes-" MODE,
  570. .cra_priority = PRIO,
  571. .cra_blocksize = AES_BLOCK_SIZE,
  572. .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
  573. .cra_module = THIS_MODULE,
  574. },
  575. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  576. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  577. .ivsize = AES_BLOCK_SIZE,
  578. .walksize = 2 * AES_BLOCK_SIZE,
  579. .setkey = xts_set_key,
  580. .encrypt = xts_encrypt,
  581. .decrypt = xts_decrypt,
  582. }, {
  583. #endif
  584. .base = {
  585. .cra_name = "cts(cbc(aes))",
  586. .cra_driver_name = "cts-cbc-aes-" MODE,
  587. .cra_priority = PRIO,
  588. .cra_blocksize = AES_BLOCK_SIZE,
  589. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  590. .cra_module = THIS_MODULE,
  591. },
  592. .min_keysize = AES_MIN_KEY_SIZE,
  593. .max_keysize = AES_MAX_KEY_SIZE,
  594. .ivsize = AES_BLOCK_SIZE,
  595. .walksize = 2 * AES_BLOCK_SIZE,
  596. .setkey = skcipher_aes_setkey,
  597. .encrypt = cts_cbc_encrypt,
  598. .decrypt = cts_cbc_decrypt,
  599. }, {
  600. .base = {
  601. .cra_name = "essiv(cbc(aes),sha256)",
  602. .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
  603. .cra_priority = PRIO + 1,
  604. .cra_blocksize = AES_BLOCK_SIZE,
  605. .cra_ctxsize = sizeof(struct crypto_aes_essiv_cbc_ctx),
  606. .cra_module = THIS_MODULE,
  607. },
  608. .min_keysize = AES_MIN_KEY_SIZE,
  609. .max_keysize = AES_MAX_KEY_SIZE,
  610. .ivsize = AES_BLOCK_SIZE,
  611. .setkey = essiv_cbc_set_key,
  612. .encrypt = essiv_cbc_encrypt,
  613. .decrypt = essiv_cbc_decrypt,
  614. .init = essiv_cbc_init_tfm,
  615. .exit = essiv_cbc_exit_tfm,
  616. } };
  617. static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
  618. unsigned int key_len)
  619. {
  620. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  621. return aes_expandkey(&ctx->key, in_key, key_len);
  622. }
  623. static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
  624. {
  625. u64 a = be64_to_cpu(x->a);
  626. u64 b = be64_to_cpu(x->b);
  627. y->a = cpu_to_be64((a << 1) | (b >> 63));
  628. y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
  629. }
  630. static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
  631. unsigned int key_len)
  632. {
  633. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  634. be128 *consts = (be128 *)ctx->consts;
  635. int rounds = 6 + key_len / 4;
  636. int err;
  637. err = cbcmac_setkey(tfm, in_key, key_len);
  638. if (err)
  639. return err;
  640. /* encrypt the zero vector */
  641. kernel_neon_begin();
  642. aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
  643. rounds, 1);
  644. kernel_neon_end();
  645. cmac_gf128_mul_by_x(consts, consts);
  646. cmac_gf128_mul_by_x(consts + 1, consts);
  647. return 0;
  648. }
  649. static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
  650. unsigned int key_len)
  651. {
  652. static u8 const ks[3][AES_BLOCK_SIZE] = {
  653. { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
  654. { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
  655. { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
  656. };
  657. struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
  658. int rounds = 6 + key_len / 4;
  659. u8 key[AES_BLOCK_SIZE];
  660. int err;
  661. err = cbcmac_setkey(tfm, in_key, key_len);
  662. if (err)
  663. return err;
  664. kernel_neon_begin();
  665. aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
  666. aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
  667. kernel_neon_end();
  668. return cbcmac_setkey(tfm, key, sizeof(key));
  669. }
  670. static int mac_init(struct shash_desc *desc)
  671. {
  672. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  673. memset(ctx->dg, 0, AES_BLOCK_SIZE);
  674. ctx->len = 0;
  675. return 0;
  676. }
  677. static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
  678. u8 dg[], int enc_before, int enc_after)
  679. {
  680. int rounds = 6 + ctx->key_length / 4;
  681. if (crypto_simd_usable()) {
  682. int rem;
  683. do {
  684. kernel_neon_begin();
  685. rem = aes_mac_update(in, ctx->key_enc, rounds, blocks,
  686. dg, enc_before, enc_after);
  687. kernel_neon_end();
  688. in += (blocks - rem) * AES_BLOCK_SIZE;
  689. blocks = rem;
  690. enc_before = 0;
  691. } while (blocks);
  692. } else {
  693. if (enc_before)
  694. aes_encrypt(ctx, dg, dg);
  695. while (blocks--) {
  696. crypto_xor(dg, in, AES_BLOCK_SIZE);
  697. in += AES_BLOCK_SIZE;
  698. if (blocks || enc_after)
  699. aes_encrypt(ctx, dg, dg);
  700. }
  701. }
  702. }
  703. static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
  704. {
  705. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  706. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  707. while (len > 0) {
  708. unsigned int l;
  709. if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
  710. (ctx->len + len) > AES_BLOCK_SIZE) {
  711. int blocks = len / AES_BLOCK_SIZE;
  712. len %= AES_BLOCK_SIZE;
  713. mac_do_update(&tctx->key, p, blocks, ctx->dg,
  714. (ctx->len != 0), (len != 0));
  715. p += blocks * AES_BLOCK_SIZE;
  716. if (!len) {
  717. ctx->len = AES_BLOCK_SIZE;
  718. break;
  719. }
  720. ctx->len = 0;
  721. }
  722. l = min(len, AES_BLOCK_SIZE - ctx->len);
  723. if (l <= AES_BLOCK_SIZE) {
  724. crypto_xor(ctx->dg + ctx->len, p, l);
  725. ctx->len += l;
  726. len -= l;
  727. p += l;
  728. }
  729. }
  730. return 0;
  731. }
  732. static int cbcmac_final(struct shash_desc *desc, u8 *out)
  733. {
  734. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  735. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  736. mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
  737. memcpy(out, ctx->dg, AES_BLOCK_SIZE);
  738. return 0;
  739. }
  740. static int cmac_final(struct shash_desc *desc, u8 *out)
  741. {
  742. struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
  743. struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
  744. u8 *consts = tctx->consts;
  745. if (ctx->len != AES_BLOCK_SIZE) {
  746. ctx->dg[ctx->len] ^= 0x80;
  747. consts += AES_BLOCK_SIZE;
  748. }
  749. mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
  750. memcpy(out, ctx->dg, AES_BLOCK_SIZE);
  751. return 0;
  752. }
  753. static struct shash_alg mac_algs[] = { {
  754. .base.cra_name = "cmac(aes)",
  755. .base.cra_driver_name = "cmac-aes-" MODE,
  756. .base.cra_priority = PRIO,
  757. .base.cra_blocksize = AES_BLOCK_SIZE,
  758. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
  759. 2 * AES_BLOCK_SIZE,
  760. .base.cra_module = THIS_MODULE,
  761. .digestsize = AES_BLOCK_SIZE,
  762. .init = mac_init,
  763. .update = mac_update,
  764. .final = cmac_final,
  765. .setkey = cmac_setkey,
  766. .descsize = sizeof(struct mac_desc_ctx),
  767. }, {
  768. .base.cra_name = "xcbc(aes)",
  769. .base.cra_driver_name = "xcbc-aes-" MODE,
  770. .base.cra_priority = PRIO,
  771. .base.cra_blocksize = AES_BLOCK_SIZE,
  772. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
  773. 2 * AES_BLOCK_SIZE,
  774. .base.cra_module = THIS_MODULE,
  775. .digestsize = AES_BLOCK_SIZE,
  776. .init = mac_init,
  777. .update = mac_update,
  778. .final = cmac_final,
  779. .setkey = xcbc_setkey,
  780. .descsize = sizeof(struct mac_desc_ctx),
  781. }, {
  782. .base.cra_name = "cbcmac(aes)",
  783. .base.cra_driver_name = "cbcmac-aes-" MODE,
  784. .base.cra_priority = PRIO,
  785. .base.cra_blocksize = 1,
  786. .base.cra_ctxsize = sizeof(struct mac_tfm_ctx),
  787. .base.cra_module = THIS_MODULE,
  788. .digestsize = AES_BLOCK_SIZE,
  789. .init = mac_init,
  790. .update = mac_update,
  791. .final = cbcmac_final,
  792. .setkey = cbcmac_setkey,
  793. .descsize = sizeof(struct mac_desc_ctx),
  794. } };
  795. static void aes_exit(void)
  796. {
  797. crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
  798. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  799. }
  800. static int __init aes_init(void)
  801. {
  802. int err;
  803. err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  804. if (err)
  805. return err;
  806. err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
  807. if (err)
  808. goto unregister_ciphers;
  809. return 0;
  810. unregister_ciphers:
  811. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  812. return err;
  813. }
  814. #ifdef USE_V8_CRYPTO_EXTENSIONS
  815. module_cpu_feature_match(AES, aes_init);
  816. #else
  817. module_init(aes_init);
  818. EXPORT_SYMBOL(neon_aes_ecb_encrypt);
  819. EXPORT_SYMBOL(neon_aes_cbc_encrypt);
  820. EXPORT_SYMBOL(neon_aes_xts_encrypt);
  821. EXPORT_SYMBOL(neon_aes_xts_decrypt);
  822. #endif
  823. module_exit(aes_exit);