poly1305-glue.c 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * OpenSSL/Cryptogams accelerated Poly1305 transform for ARM
  4. *
  5. * Copyright (C) 2019 Linaro Ltd. <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/hwcap.h>
  8. #include <asm/neon.h>
  9. #include <asm/simd.h>
  10. #include <asm/unaligned.h>
  11. #include <crypto/algapi.h>
  12. #include <crypto/internal/hash.h>
  13. #include <crypto/internal/poly1305.h>
  14. #include <crypto/internal/simd.h>
  15. #include <linux/cpufeature.h>
  16. #include <linux/crypto.h>
  17. #include <linux/jump_label.h>
  18. #include <linux/module.h>
  19. void poly1305_init_arm(void *state, const u8 *key);
  20. void poly1305_blocks_arm(void *state, const u8 *src, u32 len, u32 hibit);
  21. void poly1305_blocks_neon(void *state, const u8 *src, u32 len, u32 hibit);
  22. void poly1305_emit_arm(void *state, u8 *digest, const u32 *nonce);
  23. void __weak poly1305_blocks_neon(void *state, const u8 *src, u32 len, u32 hibit)
  24. {
  25. }
  26. static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
  27. void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE])
  28. {
  29. poly1305_init_arm(&dctx->h, key);
  30. dctx->s[0] = get_unaligned_le32(key + 16);
  31. dctx->s[1] = get_unaligned_le32(key + 20);
  32. dctx->s[2] = get_unaligned_le32(key + 24);
  33. dctx->s[3] = get_unaligned_le32(key + 28);
  34. dctx->buflen = 0;
  35. }
  36. EXPORT_SYMBOL(poly1305_init_arch);
  37. static int arm_poly1305_init(struct shash_desc *desc)
  38. {
  39. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  40. dctx->buflen = 0;
  41. dctx->rset = 0;
  42. dctx->sset = false;
  43. return 0;
  44. }
  45. static void arm_poly1305_blocks(struct poly1305_desc_ctx *dctx, const u8 *src,
  46. u32 len, u32 hibit, bool do_neon)
  47. {
  48. if (unlikely(!dctx->sset)) {
  49. if (!dctx->rset) {
  50. poly1305_init_arm(&dctx->h, src);
  51. src += POLY1305_BLOCK_SIZE;
  52. len -= POLY1305_BLOCK_SIZE;
  53. dctx->rset = 1;
  54. }
  55. if (len >= POLY1305_BLOCK_SIZE) {
  56. dctx->s[0] = get_unaligned_le32(src + 0);
  57. dctx->s[1] = get_unaligned_le32(src + 4);
  58. dctx->s[2] = get_unaligned_le32(src + 8);
  59. dctx->s[3] = get_unaligned_le32(src + 12);
  60. src += POLY1305_BLOCK_SIZE;
  61. len -= POLY1305_BLOCK_SIZE;
  62. dctx->sset = true;
  63. }
  64. if (len < POLY1305_BLOCK_SIZE)
  65. return;
  66. }
  67. len &= ~(POLY1305_BLOCK_SIZE - 1);
  68. if (static_branch_likely(&have_neon) && likely(do_neon))
  69. poly1305_blocks_neon(&dctx->h, src, len, hibit);
  70. else
  71. poly1305_blocks_arm(&dctx->h, src, len, hibit);
  72. }
  73. static void arm_poly1305_do_update(struct poly1305_desc_ctx *dctx,
  74. const u8 *src, u32 len, bool do_neon)
  75. {
  76. if (unlikely(dctx->buflen)) {
  77. u32 bytes = min(len, POLY1305_BLOCK_SIZE - dctx->buflen);
  78. memcpy(dctx->buf + dctx->buflen, src, bytes);
  79. src += bytes;
  80. len -= bytes;
  81. dctx->buflen += bytes;
  82. if (dctx->buflen == POLY1305_BLOCK_SIZE) {
  83. arm_poly1305_blocks(dctx, dctx->buf,
  84. POLY1305_BLOCK_SIZE, 1, false);
  85. dctx->buflen = 0;
  86. }
  87. }
  88. if (likely(len >= POLY1305_BLOCK_SIZE)) {
  89. arm_poly1305_blocks(dctx, src, len, 1, do_neon);
  90. src += round_down(len, POLY1305_BLOCK_SIZE);
  91. len %= POLY1305_BLOCK_SIZE;
  92. }
  93. if (unlikely(len)) {
  94. dctx->buflen = len;
  95. memcpy(dctx->buf, src, len);
  96. }
  97. }
  98. static int arm_poly1305_update(struct shash_desc *desc,
  99. const u8 *src, unsigned int srclen)
  100. {
  101. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  102. arm_poly1305_do_update(dctx, src, srclen, false);
  103. return 0;
  104. }
  105. static int __maybe_unused arm_poly1305_update_neon(struct shash_desc *desc,
  106. const u8 *src,
  107. unsigned int srclen)
  108. {
  109. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  110. bool do_neon = crypto_simd_usable() && srclen > 128;
  111. if (static_branch_likely(&have_neon) && do_neon)
  112. kernel_neon_begin();
  113. arm_poly1305_do_update(dctx, src, srclen, do_neon);
  114. if (static_branch_likely(&have_neon) && do_neon)
  115. kernel_neon_end();
  116. return 0;
  117. }
  118. void poly1305_update_arch(struct poly1305_desc_ctx *dctx, const u8 *src,
  119. unsigned int nbytes)
  120. {
  121. bool do_neon = IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
  122. crypto_simd_usable();
  123. if (unlikely(dctx->buflen)) {
  124. u32 bytes = min(nbytes, POLY1305_BLOCK_SIZE - dctx->buflen);
  125. memcpy(dctx->buf + dctx->buflen, src, bytes);
  126. src += bytes;
  127. nbytes -= bytes;
  128. dctx->buflen += bytes;
  129. if (dctx->buflen == POLY1305_BLOCK_SIZE) {
  130. poly1305_blocks_arm(&dctx->h, dctx->buf,
  131. POLY1305_BLOCK_SIZE, 1);
  132. dctx->buflen = 0;
  133. }
  134. }
  135. if (likely(nbytes >= POLY1305_BLOCK_SIZE)) {
  136. unsigned int len = round_down(nbytes, POLY1305_BLOCK_SIZE);
  137. if (static_branch_likely(&have_neon) && do_neon) {
  138. do {
  139. unsigned int todo = min_t(unsigned int, len, SZ_4K);
  140. kernel_neon_begin();
  141. poly1305_blocks_neon(&dctx->h, src, todo, 1);
  142. kernel_neon_end();
  143. len -= todo;
  144. src += todo;
  145. } while (len);
  146. } else {
  147. poly1305_blocks_arm(&dctx->h, src, len, 1);
  148. src += len;
  149. }
  150. nbytes %= POLY1305_BLOCK_SIZE;
  151. }
  152. if (unlikely(nbytes)) {
  153. dctx->buflen = nbytes;
  154. memcpy(dctx->buf, src, nbytes);
  155. }
  156. }
  157. EXPORT_SYMBOL(poly1305_update_arch);
  158. void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
  159. {
  160. if (unlikely(dctx->buflen)) {
  161. dctx->buf[dctx->buflen++] = 1;
  162. memset(dctx->buf + dctx->buflen, 0,
  163. POLY1305_BLOCK_SIZE - dctx->buflen);
  164. poly1305_blocks_arm(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0);
  165. }
  166. poly1305_emit_arm(&dctx->h, dst, dctx->s);
  167. *dctx = (struct poly1305_desc_ctx){};
  168. }
  169. EXPORT_SYMBOL(poly1305_final_arch);
  170. static int arm_poly1305_final(struct shash_desc *desc, u8 *dst)
  171. {
  172. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  173. if (unlikely(!dctx->sset))
  174. return -ENOKEY;
  175. poly1305_final_arch(dctx, dst);
  176. return 0;
  177. }
  178. static struct shash_alg arm_poly1305_algs[] = {{
  179. .init = arm_poly1305_init,
  180. .update = arm_poly1305_update,
  181. .final = arm_poly1305_final,
  182. .digestsize = POLY1305_DIGEST_SIZE,
  183. .descsize = sizeof(struct poly1305_desc_ctx),
  184. .base.cra_name = "poly1305",
  185. .base.cra_driver_name = "poly1305-arm",
  186. .base.cra_priority = 150,
  187. .base.cra_blocksize = POLY1305_BLOCK_SIZE,
  188. .base.cra_module = THIS_MODULE,
  189. #ifdef CONFIG_KERNEL_MODE_NEON
  190. }, {
  191. .init = arm_poly1305_init,
  192. .update = arm_poly1305_update_neon,
  193. .final = arm_poly1305_final,
  194. .digestsize = POLY1305_DIGEST_SIZE,
  195. .descsize = sizeof(struct poly1305_desc_ctx),
  196. .base.cra_name = "poly1305",
  197. .base.cra_driver_name = "poly1305-neon",
  198. .base.cra_priority = 200,
  199. .base.cra_blocksize = POLY1305_BLOCK_SIZE,
  200. .base.cra_module = THIS_MODULE,
  201. #endif
  202. }};
  203. static int __init arm_poly1305_mod_init(void)
  204. {
  205. if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
  206. (elf_hwcap & HWCAP_NEON))
  207. static_branch_enable(&have_neon);
  208. else if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
  209. /* register only the first entry */
  210. return crypto_register_shash(&arm_poly1305_algs[0]);
  211. return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
  212. crypto_register_shashes(arm_poly1305_algs,
  213. ARRAY_SIZE(arm_poly1305_algs)) : 0;
  214. }
  215. static void __exit arm_poly1305_mod_exit(void)
  216. {
  217. if (!IS_REACHABLE(CONFIG_CRYPTO_HASH))
  218. return;
  219. if (!static_branch_likely(&have_neon)) {
  220. crypto_unregister_shash(&arm_poly1305_algs[0]);
  221. return;
  222. }
  223. crypto_unregister_shashes(arm_poly1305_algs,
  224. ARRAY_SIZE(arm_poly1305_algs));
  225. }
  226. module_init(arm_poly1305_mod_init);
  227. module_exit(arm_poly1305_mod_exit);
  228. MODULE_LICENSE("GPL v2");
  229. MODULE_ALIAS_CRYPTO("poly1305");
  230. MODULE_ALIAS_CRYPTO("poly1305-arm");
  231. MODULE_ALIAS_CRYPTO("poly1305-neon");