chacha-glue.c 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * ARM NEON accelerated ChaCha and XChaCha stream ciphers,
  4. * including ChaCha20 (RFC7539)
  5. *
  6. * Copyright (C) 2016-2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
  7. * Copyright (C) 2015 Martin Willi
  8. */
  9. #include <crypto/algapi.h>
  10. #include <crypto/internal/chacha.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <linux/jump_label.h>
  14. #include <linux/kernel.h>
  15. #include <linux/module.h>
  16. #include <asm/cputype.h>
  17. #include <asm/hwcap.h>
  18. #include <asm/neon.h>
  19. #include <asm/simd.h>
  20. asmlinkage void chacha_block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
  21. int nrounds);
  22. asmlinkage void chacha_4block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
  23. int nrounds);
  24. asmlinkage void hchacha_block_arm(const u32 *state, u32 *out, int nrounds);
  25. asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
  26. asmlinkage void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes,
  27. const u32 *state, int nrounds);
  28. static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_neon);
  29. static inline bool neon_usable(void)
  30. {
  31. return static_branch_likely(&use_neon) && crypto_simd_usable();
  32. }
  33. static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
  34. unsigned int bytes, int nrounds)
  35. {
  36. u8 buf[CHACHA_BLOCK_SIZE];
  37. while (bytes >= CHACHA_BLOCK_SIZE * 4) {
  38. chacha_4block_xor_neon(state, dst, src, nrounds);
  39. bytes -= CHACHA_BLOCK_SIZE * 4;
  40. src += CHACHA_BLOCK_SIZE * 4;
  41. dst += CHACHA_BLOCK_SIZE * 4;
  42. state[12] += 4;
  43. }
  44. while (bytes >= CHACHA_BLOCK_SIZE) {
  45. chacha_block_xor_neon(state, dst, src, nrounds);
  46. bytes -= CHACHA_BLOCK_SIZE;
  47. src += CHACHA_BLOCK_SIZE;
  48. dst += CHACHA_BLOCK_SIZE;
  49. state[12]++;
  50. }
  51. if (bytes) {
  52. memcpy(buf, src, bytes);
  53. chacha_block_xor_neon(state, buf, buf, nrounds);
  54. memcpy(dst, buf, bytes);
  55. }
  56. }
  57. void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
  58. {
  59. if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) {
  60. hchacha_block_arm(state, stream, nrounds);
  61. } else {
  62. kernel_neon_begin();
  63. hchacha_block_neon(state, stream, nrounds);
  64. kernel_neon_end();
  65. }
  66. }
  67. EXPORT_SYMBOL(hchacha_block_arch);
  68. void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
  69. {
  70. chacha_init_generic(state, key, iv);
  71. }
  72. EXPORT_SYMBOL(chacha_init_arch);
  73. void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
  74. int nrounds)
  75. {
  76. if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() ||
  77. bytes <= CHACHA_BLOCK_SIZE) {
  78. chacha_doarm(dst, src, bytes, state, nrounds);
  79. state[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE);
  80. return;
  81. }
  82. do {
  83. unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
  84. kernel_neon_begin();
  85. chacha_doneon(state, dst, src, todo, nrounds);
  86. kernel_neon_end();
  87. bytes -= todo;
  88. src += todo;
  89. dst += todo;
  90. } while (bytes);
  91. }
  92. EXPORT_SYMBOL(chacha_crypt_arch);
  93. static int chacha_stream_xor(struct skcipher_request *req,
  94. const struct chacha_ctx *ctx, const u8 *iv,
  95. bool neon)
  96. {
  97. struct skcipher_walk walk;
  98. u32 state[16];
  99. int err;
  100. err = skcipher_walk_virt(&walk, req, false);
  101. chacha_init_generic(state, ctx->key, iv);
  102. while (walk.nbytes > 0) {
  103. unsigned int nbytes = walk.nbytes;
  104. if (nbytes < walk.total)
  105. nbytes = round_down(nbytes, walk.stride);
  106. if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
  107. chacha_doarm(walk.dst.virt.addr, walk.src.virt.addr,
  108. nbytes, state, ctx->nrounds);
  109. state[12] += DIV_ROUND_UP(nbytes, CHACHA_BLOCK_SIZE);
  110. } else {
  111. kernel_neon_begin();
  112. chacha_doneon(state, walk.dst.virt.addr,
  113. walk.src.virt.addr, nbytes, ctx->nrounds);
  114. kernel_neon_end();
  115. }
  116. err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
  117. }
  118. return err;
  119. }
  120. static int do_chacha(struct skcipher_request *req, bool neon)
  121. {
  122. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  123. struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
  124. return chacha_stream_xor(req, ctx, req->iv, neon);
  125. }
  126. static int chacha_arm(struct skcipher_request *req)
  127. {
  128. return do_chacha(req, false);
  129. }
  130. static int chacha_neon(struct skcipher_request *req)
  131. {
  132. return do_chacha(req, neon_usable());
  133. }
  134. static int do_xchacha(struct skcipher_request *req, bool neon)
  135. {
  136. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  137. struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
  138. struct chacha_ctx subctx;
  139. u32 state[16];
  140. u8 real_iv[16];
  141. chacha_init_generic(state, ctx->key, req->iv);
  142. if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
  143. hchacha_block_arm(state, subctx.key, ctx->nrounds);
  144. } else {
  145. kernel_neon_begin();
  146. hchacha_block_neon(state, subctx.key, ctx->nrounds);
  147. kernel_neon_end();
  148. }
  149. subctx.nrounds = ctx->nrounds;
  150. memcpy(&real_iv[0], req->iv + 24, 8);
  151. memcpy(&real_iv[8], req->iv + 16, 8);
  152. return chacha_stream_xor(req, &subctx, real_iv, neon);
  153. }
  154. static int xchacha_arm(struct skcipher_request *req)
  155. {
  156. return do_xchacha(req, false);
  157. }
  158. static int xchacha_neon(struct skcipher_request *req)
  159. {
  160. return do_xchacha(req, neon_usable());
  161. }
  162. static struct skcipher_alg arm_algs[] = {
  163. {
  164. .base.cra_name = "chacha20",
  165. .base.cra_driver_name = "chacha20-arm",
  166. .base.cra_priority = 200,
  167. .base.cra_blocksize = 1,
  168. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  169. .base.cra_module = THIS_MODULE,
  170. .min_keysize = CHACHA_KEY_SIZE,
  171. .max_keysize = CHACHA_KEY_SIZE,
  172. .ivsize = CHACHA_IV_SIZE,
  173. .chunksize = CHACHA_BLOCK_SIZE,
  174. .setkey = chacha20_setkey,
  175. .encrypt = chacha_arm,
  176. .decrypt = chacha_arm,
  177. }, {
  178. .base.cra_name = "xchacha20",
  179. .base.cra_driver_name = "xchacha20-arm",
  180. .base.cra_priority = 200,
  181. .base.cra_blocksize = 1,
  182. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  183. .base.cra_module = THIS_MODULE,
  184. .min_keysize = CHACHA_KEY_SIZE,
  185. .max_keysize = CHACHA_KEY_SIZE,
  186. .ivsize = XCHACHA_IV_SIZE,
  187. .chunksize = CHACHA_BLOCK_SIZE,
  188. .setkey = chacha20_setkey,
  189. .encrypt = xchacha_arm,
  190. .decrypt = xchacha_arm,
  191. }, {
  192. .base.cra_name = "xchacha12",
  193. .base.cra_driver_name = "xchacha12-arm",
  194. .base.cra_priority = 200,
  195. .base.cra_blocksize = 1,
  196. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  197. .base.cra_module = THIS_MODULE,
  198. .min_keysize = CHACHA_KEY_SIZE,
  199. .max_keysize = CHACHA_KEY_SIZE,
  200. .ivsize = XCHACHA_IV_SIZE,
  201. .chunksize = CHACHA_BLOCK_SIZE,
  202. .setkey = chacha12_setkey,
  203. .encrypt = xchacha_arm,
  204. .decrypt = xchacha_arm,
  205. },
  206. };
  207. static struct skcipher_alg neon_algs[] = {
  208. {
  209. .base.cra_name = "chacha20",
  210. .base.cra_driver_name = "chacha20-neon",
  211. .base.cra_priority = 300,
  212. .base.cra_blocksize = 1,
  213. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  214. .base.cra_module = THIS_MODULE,
  215. .min_keysize = CHACHA_KEY_SIZE,
  216. .max_keysize = CHACHA_KEY_SIZE,
  217. .ivsize = CHACHA_IV_SIZE,
  218. .chunksize = CHACHA_BLOCK_SIZE,
  219. .walksize = 4 * CHACHA_BLOCK_SIZE,
  220. .setkey = chacha20_setkey,
  221. .encrypt = chacha_neon,
  222. .decrypt = chacha_neon,
  223. }, {
  224. .base.cra_name = "xchacha20",
  225. .base.cra_driver_name = "xchacha20-neon",
  226. .base.cra_priority = 300,
  227. .base.cra_blocksize = 1,
  228. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  229. .base.cra_module = THIS_MODULE,
  230. .min_keysize = CHACHA_KEY_SIZE,
  231. .max_keysize = CHACHA_KEY_SIZE,
  232. .ivsize = XCHACHA_IV_SIZE,
  233. .chunksize = CHACHA_BLOCK_SIZE,
  234. .walksize = 4 * CHACHA_BLOCK_SIZE,
  235. .setkey = chacha20_setkey,
  236. .encrypt = xchacha_neon,
  237. .decrypt = xchacha_neon,
  238. }, {
  239. .base.cra_name = "xchacha12",
  240. .base.cra_driver_name = "xchacha12-neon",
  241. .base.cra_priority = 300,
  242. .base.cra_blocksize = 1,
  243. .base.cra_ctxsize = sizeof(struct chacha_ctx),
  244. .base.cra_module = THIS_MODULE,
  245. .min_keysize = CHACHA_KEY_SIZE,
  246. .max_keysize = CHACHA_KEY_SIZE,
  247. .ivsize = XCHACHA_IV_SIZE,
  248. .chunksize = CHACHA_BLOCK_SIZE,
  249. .walksize = 4 * CHACHA_BLOCK_SIZE,
  250. .setkey = chacha12_setkey,
  251. .encrypt = xchacha_neon,
  252. .decrypt = xchacha_neon,
  253. }
  254. };
  255. static int __init chacha_simd_mod_init(void)
  256. {
  257. int err = 0;
  258. if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
  259. err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
  260. if (err)
  261. return err;
  262. }
  263. if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
  264. int i;
  265. switch (read_cpuid_part()) {
  266. case ARM_CPU_PART_CORTEX_A7:
  267. case ARM_CPU_PART_CORTEX_A5:
  268. /*
  269. * The Cortex-A7 and Cortex-A5 do not perform well with
  270. * the NEON implementation but do incredibly with the
  271. * scalar one and use less power.
  272. */
  273. for (i = 0; i < ARRAY_SIZE(neon_algs); i++)
  274. neon_algs[i].base.cra_priority = 0;
  275. break;
  276. default:
  277. static_branch_enable(&use_neon);
  278. }
  279. if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
  280. err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
  281. if (err)
  282. crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
  283. }
  284. }
  285. return err;
  286. }
  287. static void __exit chacha_simd_mod_fini(void)
  288. {
  289. if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
  290. crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
  291. if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
  292. crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
  293. }
  294. }
  295. module_init(chacha_simd_mod_init);
  296. module_exit(chacha_simd_mod_fini);
  297. MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (scalar and NEON accelerated)");
  298. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  299. MODULE_LICENSE("GPL v2");
  300. MODULE_ALIAS_CRYPTO("chacha20");
  301. MODULE_ALIAS_CRYPTO("chacha20-arm");
  302. MODULE_ALIAS_CRYPTO("xchacha20");
  303. MODULE_ALIAS_CRYPTO("xchacha20-arm");
  304. MODULE_ALIAS_CRYPTO("xchacha12");
  305. MODULE_ALIAS_CRYPTO("xchacha12-arm");
  306. #ifdef CONFIG_KERNEL_MODE_NEON
  307. MODULE_ALIAS_CRYPTO("chacha20-neon");
  308. MODULE_ALIAS_CRYPTO("xchacha20-neon");
  309. MODULE_ALIAS_CRYPTO("xchacha12-neon");
  310. #endif