aes-ce-glue.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * aes-ce-glue.c - wrapper code for ARMv8 AES
  4. *
  5. * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/hwcap.h>
  8. #include <asm/neon.h>
  9. #include <asm/simd.h>
  10. #include <asm/unaligned.h>
  11. #include <crypto/aes.h>
  12. #include <crypto/ctr.h>
  13. #include <crypto/internal/simd.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/cpufeature.h>
  17. #include <linux/module.h>
  18. #include <crypto/xts.h>
  19. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  20. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  21. MODULE_LICENSE("GPL v2");
  22. /* defined in aes-ce-core.S */
  23. asmlinkage u32 ce_aes_sub(u32 input);
  24. asmlinkage void ce_aes_invert(void *dst, void *src);
  25. asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
  26. int rounds, int blocks);
  27. asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
  28. int rounds, int blocks);
  29. asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
  30. int rounds, int blocks, u8 iv[]);
  31. asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
  32. int rounds, int blocks, u8 iv[]);
  33. asmlinkage void ce_aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
  34. int rounds, int bytes, u8 const iv[]);
  35. asmlinkage void ce_aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
  36. int rounds, int bytes, u8 const iv[]);
  37. asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
  38. int rounds, int blocks, u8 ctr[]);
  39. asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[],
  40. int rounds, int bytes, u8 iv[],
  41. u32 const rk2[], int first);
  42. asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[],
  43. int rounds, int bytes, u8 iv[],
  44. u32 const rk2[], int first);
  45. struct aes_block {
  46. u8 b[AES_BLOCK_SIZE];
  47. };
  48. static int num_rounds(struct crypto_aes_ctx *ctx)
  49. {
  50. /*
  51. * # of rounds specified by AES:
  52. * 128 bit key 10 rounds
  53. * 192 bit key 12 rounds
  54. * 256 bit key 14 rounds
  55. * => n byte key => 6 + (n/4) rounds
  56. */
  57. return 6 + ctx->key_length / 4;
  58. }
  59. static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
  60. unsigned int key_len)
  61. {
  62. /*
  63. * The AES key schedule round constants
  64. */
  65. static u8 const rcon[] = {
  66. 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
  67. };
  68. u32 kwords = key_len / sizeof(u32);
  69. struct aes_block *key_enc, *key_dec;
  70. int i, j;
  71. if (key_len != AES_KEYSIZE_128 &&
  72. key_len != AES_KEYSIZE_192 &&
  73. key_len != AES_KEYSIZE_256)
  74. return -EINVAL;
  75. ctx->key_length = key_len;
  76. for (i = 0; i < kwords; i++)
  77. ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
  78. kernel_neon_begin();
  79. for (i = 0; i < sizeof(rcon); i++) {
  80. u32 *rki = ctx->key_enc + (i * kwords);
  81. u32 *rko = rki + kwords;
  82. rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8);
  83. rko[0] = rko[0] ^ rki[0] ^ rcon[i];
  84. rko[1] = rko[0] ^ rki[1];
  85. rko[2] = rko[1] ^ rki[2];
  86. rko[3] = rko[2] ^ rki[3];
  87. if (key_len == AES_KEYSIZE_192) {
  88. if (i >= 7)
  89. break;
  90. rko[4] = rko[3] ^ rki[4];
  91. rko[5] = rko[4] ^ rki[5];
  92. } else if (key_len == AES_KEYSIZE_256) {
  93. if (i >= 6)
  94. break;
  95. rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
  96. rko[5] = rko[4] ^ rki[5];
  97. rko[6] = rko[5] ^ rki[6];
  98. rko[7] = rko[6] ^ rki[7];
  99. }
  100. }
  101. /*
  102. * Generate the decryption keys for the Equivalent Inverse Cipher.
  103. * This involves reversing the order of the round keys, and applying
  104. * the Inverse Mix Columns transformation on all but the first and
  105. * the last one.
  106. */
  107. key_enc = (struct aes_block *)ctx->key_enc;
  108. key_dec = (struct aes_block *)ctx->key_dec;
  109. j = num_rounds(ctx);
  110. key_dec[0] = key_enc[j];
  111. for (i = 1, j--; j > 0; i++, j--)
  112. ce_aes_invert(key_dec + i, key_enc + j);
  113. key_dec[i] = key_enc[0];
  114. kernel_neon_end();
  115. return 0;
  116. }
  117. static int ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
  118. unsigned int key_len)
  119. {
  120. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  121. return ce_aes_expandkey(ctx, in_key, key_len);
  122. }
  123. struct crypto_aes_xts_ctx {
  124. struct crypto_aes_ctx key1;
  125. struct crypto_aes_ctx __aligned(8) key2;
  126. };
  127. static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
  128. unsigned int key_len)
  129. {
  130. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  131. int ret;
  132. ret = xts_verify_key(tfm, in_key, key_len);
  133. if (ret)
  134. return ret;
  135. ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
  136. if (!ret)
  137. ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
  138. key_len / 2);
  139. return ret;
  140. }
  141. static int ecb_encrypt(struct skcipher_request *req)
  142. {
  143. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  144. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  145. struct skcipher_walk walk;
  146. unsigned int blocks;
  147. int err;
  148. err = skcipher_walk_virt(&walk, req, false);
  149. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  150. kernel_neon_begin();
  151. ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  152. ctx->key_enc, num_rounds(ctx), blocks);
  153. kernel_neon_end();
  154. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  155. }
  156. return err;
  157. }
  158. static int ecb_decrypt(struct skcipher_request *req)
  159. {
  160. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  161. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  162. struct skcipher_walk walk;
  163. unsigned int blocks;
  164. int err;
  165. err = skcipher_walk_virt(&walk, req, false);
  166. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  167. kernel_neon_begin();
  168. ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  169. ctx->key_dec, num_rounds(ctx), blocks);
  170. kernel_neon_end();
  171. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  172. }
  173. return err;
  174. }
  175. static int cbc_encrypt_walk(struct skcipher_request *req,
  176. struct skcipher_walk *walk)
  177. {
  178. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  179. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  180. unsigned int blocks;
  181. int err = 0;
  182. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  183. kernel_neon_begin();
  184. ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
  185. ctx->key_enc, num_rounds(ctx), blocks,
  186. walk->iv);
  187. kernel_neon_end();
  188. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  189. }
  190. return err;
  191. }
  192. static int cbc_encrypt(struct skcipher_request *req)
  193. {
  194. struct skcipher_walk walk;
  195. int err;
  196. err = skcipher_walk_virt(&walk, req, false);
  197. if (err)
  198. return err;
  199. return cbc_encrypt_walk(req, &walk);
  200. }
  201. static int cbc_decrypt_walk(struct skcipher_request *req,
  202. struct skcipher_walk *walk)
  203. {
  204. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  205. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  206. unsigned int blocks;
  207. int err = 0;
  208. while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
  209. kernel_neon_begin();
  210. ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
  211. ctx->key_dec, num_rounds(ctx), blocks,
  212. walk->iv);
  213. kernel_neon_end();
  214. err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
  215. }
  216. return err;
  217. }
  218. static int cbc_decrypt(struct skcipher_request *req)
  219. {
  220. struct skcipher_walk walk;
  221. int err;
  222. err = skcipher_walk_virt(&walk, req, false);
  223. if (err)
  224. return err;
  225. return cbc_decrypt_walk(req, &walk);
  226. }
  227. static int cts_cbc_encrypt(struct skcipher_request *req)
  228. {
  229. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  230. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  231. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  232. struct scatterlist *src = req->src, *dst = req->dst;
  233. struct scatterlist sg_src[2], sg_dst[2];
  234. struct skcipher_request subreq;
  235. struct skcipher_walk walk;
  236. int err;
  237. skcipher_request_set_tfm(&subreq, tfm);
  238. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  239. NULL, NULL);
  240. if (req->cryptlen <= AES_BLOCK_SIZE) {
  241. if (req->cryptlen < AES_BLOCK_SIZE)
  242. return -EINVAL;
  243. cbc_blocks = 1;
  244. }
  245. if (cbc_blocks > 0) {
  246. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  247. cbc_blocks * AES_BLOCK_SIZE,
  248. req->iv);
  249. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  250. cbc_encrypt_walk(&subreq, &walk);
  251. if (err)
  252. return err;
  253. if (req->cryptlen == AES_BLOCK_SIZE)
  254. return 0;
  255. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  256. if (req->dst != req->src)
  257. dst = scatterwalk_ffwd(sg_dst, req->dst,
  258. subreq.cryptlen);
  259. }
  260. /* handle ciphertext stealing */
  261. skcipher_request_set_crypt(&subreq, src, dst,
  262. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  263. req->iv);
  264. err = skcipher_walk_virt(&walk, &subreq, false);
  265. if (err)
  266. return err;
  267. kernel_neon_begin();
  268. ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  269. ctx->key_enc, num_rounds(ctx), walk.nbytes,
  270. walk.iv);
  271. kernel_neon_end();
  272. return skcipher_walk_done(&walk, 0);
  273. }
  274. static int cts_cbc_decrypt(struct skcipher_request *req)
  275. {
  276. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  277. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  278. int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
  279. struct scatterlist *src = req->src, *dst = req->dst;
  280. struct scatterlist sg_src[2], sg_dst[2];
  281. struct skcipher_request subreq;
  282. struct skcipher_walk walk;
  283. int err;
  284. skcipher_request_set_tfm(&subreq, tfm);
  285. skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
  286. NULL, NULL);
  287. if (req->cryptlen <= AES_BLOCK_SIZE) {
  288. if (req->cryptlen < AES_BLOCK_SIZE)
  289. return -EINVAL;
  290. cbc_blocks = 1;
  291. }
  292. if (cbc_blocks > 0) {
  293. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  294. cbc_blocks * AES_BLOCK_SIZE,
  295. req->iv);
  296. err = skcipher_walk_virt(&walk, &subreq, false) ?:
  297. cbc_decrypt_walk(&subreq, &walk);
  298. if (err)
  299. return err;
  300. if (req->cryptlen == AES_BLOCK_SIZE)
  301. return 0;
  302. dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
  303. if (req->dst != req->src)
  304. dst = scatterwalk_ffwd(sg_dst, req->dst,
  305. subreq.cryptlen);
  306. }
  307. /* handle ciphertext stealing */
  308. skcipher_request_set_crypt(&subreq, src, dst,
  309. req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
  310. req->iv);
  311. err = skcipher_walk_virt(&walk, &subreq, false);
  312. if (err)
  313. return err;
  314. kernel_neon_begin();
  315. ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  316. ctx->key_dec, num_rounds(ctx), walk.nbytes,
  317. walk.iv);
  318. kernel_neon_end();
  319. return skcipher_walk_done(&walk, 0);
  320. }
  321. static int ctr_encrypt(struct skcipher_request *req)
  322. {
  323. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  324. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  325. struct skcipher_walk walk;
  326. int err, blocks;
  327. err = skcipher_walk_virt(&walk, req, false);
  328. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  329. kernel_neon_begin();
  330. ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  331. ctx->key_enc, num_rounds(ctx), blocks,
  332. walk.iv);
  333. kernel_neon_end();
  334. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  335. }
  336. if (walk.nbytes) {
  337. u8 __aligned(8) tail[AES_BLOCK_SIZE];
  338. unsigned int nbytes = walk.nbytes;
  339. u8 *tdst = walk.dst.virt.addr;
  340. u8 *tsrc = walk.src.virt.addr;
  341. /*
  342. * Tell aes_ctr_encrypt() to process a tail block.
  343. */
  344. blocks = -1;
  345. kernel_neon_begin();
  346. ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx),
  347. blocks, walk.iv);
  348. kernel_neon_end();
  349. crypto_xor_cpy(tdst, tsrc, tail, nbytes);
  350. err = skcipher_walk_done(&walk, 0);
  351. }
  352. return err;
  353. }
  354. static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
  355. {
  356. struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
  357. unsigned long flags;
  358. /*
  359. * Temporarily disable interrupts to avoid races where
  360. * cachelines are evicted when the CPU is interrupted
  361. * to do something else.
  362. */
  363. local_irq_save(flags);
  364. aes_encrypt(ctx, dst, src);
  365. local_irq_restore(flags);
  366. }
  367. static int ctr_encrypt_sync(struct skcipher_request *req)
  368. {
  369. if (!crypto_simd_usable())
  370. return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
  371. return ctr_encrypt(req);
  372. }
  373. static int xts_encrypt(struct skcipher_request *req)
  374. {
  375. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  376. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  377. int err, first, rounds = num_rounds(&ctx->key1);
  378. int tail = req->cryptlen % AES_BLOCK_SIZE;
  379. struct scatterlist sg_src[2], sg_dst[2];
  380. struct skcipher_request subreq;
  381. struct scatterlist *src, *dst;
  382. struct skcipher_walk walk;
  383. if (req->cryptlen < AES_BLOCK_SIZE)
  384. return -EINVAL;
  385. err = skcipher_walk_virt(&walk, req, false);
  386. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  387. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  388. AES_BLOCK_SIZE) - 2;
  389. skcipher_walk_abort(&walk);
  390. skcipher_request_set_tfm(&subreq, tfm);
  391. skcipher_request_set_callback(&subreq,
  392. skcipher_request_flags(req),
  393. NULL, NULL);
  394. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  395. xts_blocks * AES_BLOCK_SIZE,
  396. req->iv);
  397. req = &subreq;
  398. err = skcipher_walk_virt(&walk, req, false);
  399. } else {
  400. tail = 0;
  401. }
  402. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  403. int nbytes = walk.nbytes;
  404. if (walk.nbytes < walk.total)
  405. nbytes &= ~(AES_BLOCK_SIZE - 1);
  406. kernel_neon_begin();
  407. ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  408. ctx->key1.key_enc, rounds, nbytes, walk.iv,
  409. ctx->key2.key_enc, first);
  410. kernel_neon_end();
  411. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  412. }
  413. if (err || likely(!tail))
  414. return err;
  415. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  416. if (req->dst != req->src)
  417. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  418. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  419. req->iv);
  420. err = skcipher_walk_virt(&walk, req, false);
  421. if (err)
  422. return err;
  423. kernel_neon_begin();
  424. ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  425. ctx->key1.key_enc, rounds, walk.nbytes, walk.iv,
  426. ctx->key2.key_enc, first);
  427. kernel_neon_end();
  428. return skcipher_walk_done(&walk, 0);
  429. }
  430. static int xts_decrypt(struct skcipher_request *req)
  431. {
  432. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  433. struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  434. int err, first, rounds = num_rounds(&ctx->key1);
  435. int tail = req->cryptlen % AES_BLOCK_SIZE;
  436. struct scatterlist sg_src[2], sg_dst[2];
  437. struct skcipher_request subreq;
  438. struct scatterlist *src, *dst;
  439. struct skcipher_walk walk;
  440. if (req->cryptlen < AES_BLOCK_SIZE)
  441. return -EINVAL;
  442. err = skcipher_walk_virt(&walk, req, false);
  443. if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
  444. int xts_blocks = DIV_ROUND_UP(req->cryptlen,
  445. AES_BLOCK_SIZE) - 2;
  446. skcipher_walk_abort(&walk);
  447. skcipher_request_set_tfm(&subreq, tfm);
  448. skcipher_request_set_callback(&subreq,
  449. skcipher_request_flags(req),
  450. NULL, NULL);
  451. skcipher_request_set_crypt(&subreq, req->src, req->dst,
  452. xts_blocks * AES_BLOCK_SIZE,
  453. req->iv);
  454. req = &subreq;
  455. err = skcipher_walk_virt(&walk, req, false);
  456. } else {
  457. tail = 0;
  458. }
  459. for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
  460. int nbytes = walk.nbytes;
  461. if (walk.nbytes < walk.total)
  462. nbytes &= ~(AES_BLOCK_SIZE - 1);
  463. kernel_neon_begin();
  464. ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  465. ctx->key1.key_dec, rounds, nbytes, walk.iv,
  466. ctx->key2.key_enc, first);
  467. kernel_neon_end();
  468. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  469. }
  470. if (err || likely(!tail))
  471. return err;
  472. dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
  473. if (req->dst != req->src)
  474. dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
  475. skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
  476. req->iv);
  477. err = skcipher_walk_virt(&walk, req, false);
  478. if (err)
  479. return err;
  480. kernel_neon_begin();
  481. ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  482. ctx->key1.key_dec, rounds, walk.nbytes, walk.iv,
  483. ctx->key2.key_enc, first);
  484. kernel_neon_end();
  485. return skcipher_walk_done(&walk, 0);
  486. }
  487. static struct skcipher_alg aes_algs[] = { {
  488. .base.cra_name = "__ecb(aes)",
  489. .base.cra_driver_name = "__ecb-aes-ce",
  490. .base.cra_priority = 300,
  491. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  492. .base.cra_blocksize = AES_BLOCK_SIZE,
  493. .base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
  494. .base.cra_module = THIS_MODULE,
  495. .min_keysize = AES_MIN_KEY_SIZE,
  496. .max_keysize = AES_MAX_KEY_SIZE,
  497. .setkey = ce_aes_setkey,
  498. .encrypt = ecb_encrypt,
  499. .decrypt = ecb_decrypt,
  500. }, {
  501. .base.cra_name = "__cbc(aes)",
  502. .base.cra_driver_name = "__cbc-aes-ce",
  503. .base.cra_priority = 300,
  504. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  505. .base.cra_blocksize = AES_BLOCK_SIZE,
  506. .base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
  507. .base.cra_module = THIS_MODULE,
  508. .min_keysize = AES_MIN_KEY_SIZE,
  509. .max_keysize = AES_MAX_KEY_SIZE,
  510. .ivsize = AES_BLOCK_SIZE,
  511. .setkey = ce_aes_setkey,
  512. .encrypt = cbc_encrypt,
  513. .decrypt = cbc_decrypt,
  514. }, {
  515. .base.cra_name = "__cts(cbc(aes))",
  516. .base.cra_driver_name = "__cts-cbc-aes-ce",
  517. .base.cra_priority = 300,
  518. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  519. .base.cra_blocksize = AES_BLOCK_SIZE,
  520. .base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
  521. .base.cra_module = THIS_MODULE,
  522. .min_keysize = AES_MIN_KEY_SIZE,
  523. .max_keysize = AES_MAX_KEY_SIZE,
  524. .ivsize = AES_BLOCK_SIZE,
  525. .walksize = 2 * AES_BLOCK_SIZE,
  526. .setkey = ce_aes_setkey,
  527. .encrypt = cts_cbc_encrypt,
  528. .decrypt = cts_cbc_decrypt,
  529. }, {
  530. .base.cra_name = "__ctr(aes)",
  531. .base.cra_driver_name = "__ctr-aes-ce",
  532. .base.cra_priority = 300,
  533. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  534. .base.cra_blocksize = 1,
  535. .base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
  536. .base.cra_module = THIS_MODULE,
  537. .min_keysize = AES_MIN_KEY_SIZE,
  538. .max_keysize = AES_MAX_KEY_SIZE,
  539. .ivsize = AES_BLOCK_SIZE,
  540. .chunksize = AES_BLOCK_SIZE,
  541. .setkey = ce_aes_setkey,
  542. .encrypt = ctr_encrypt,
  543. .decrypt = ctr_encrypt,
  544. }, {
  545. .base.cra_name = "ctr(aes)",
  546. .base.cra_driver_name = "ctr-aes-ce-sync",
  547. .base.cra_priority = 300 - 1,
  548. .base.cra_blocksize = 1,
  549. .base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
  550. .base.cra_module = THIS_MODULE,
  551. .min_keysize = AES_MIN_KEY_SIZE,
  552. .max_keysize = AES_MAX_KEY_SIZE,
  553. .ivsize = AES_BLOCK_SIZE,
  554. .chunksize = AES_BLOCK_SIZE,
  555. .setkey = ce_aes_setkey,
  556. .encrypt = ctr_encrypt_sync,
  557. .decrypt = ctr_encrypt_sync,
  558. }, {
  559. .base.cra_name = "__xts(aes)",
  560. .base.cra_driver_name = "__xts-aes-ce",
  561. .base.cra_priority = 300,
  562. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  563. .base.cra_blocksize = AES_BLOCK_SIZE,
  564. .base.cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
  565. .base.cra_module = THIS_MODULE,
  566. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  567. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  568. .ivsize = AES_BLOCK_SIZE,
  569. .walksize = 2 * AES_BLOCK_SIZE,
  570. .setkey = xts_set_key,
  571. .encrypt = xts_encrypt,
  572. .decrypt = xts_decrypt,
  573. } };
  574. static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
  575. static void aes_exit(void)
  576. {
  577. int i;
  578. for (i = 0; i < ARRAY_SIZE(aes_simd_algs) && aes_simd_algs[i]; i++)
  579. simd_skcipher_free(aes_simd_algs[i]);
  580. crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  581. }
  582. static int __init aes_init(void)
  583. {
  584. struct simd_skcipher_alg *simd;
  585. const char *basename;
  586. const char *algname;
  587. const char *drvname;
  588. int err;
  589. int i;
  590. err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
  591. if (err)
  592. return err;
  593. for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
  594. if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
  595. continue;
  596. algname = aes_algs[i].base.cra_name + 2;
  597. drvname = aes_algs[i].base.cra_driver_name + 2;
  598. basename = aes_algs[i].base.cra_driver_name;
  599. simd = simd_skcipher_create_compat(algname, drvname, basename);
  600. err = PTR_ERR(simd);
  601. if (IS_ERR(simd))
  602. goto unregister_simds;
  603. aes_simd_algs[i] = simd;
  604. }
  605. return 0;
  606. unregister_simds:
  607. aes_exit();
  608. return err;
  609. }
  610. module_cpu_feature_match(AES, aes_init);
  611. module_exit(aes_exit);