sha2-ce-glue.c 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
  4. *
  5. * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. */
  7. #include <asm/neon.h>
  8. #include <asm/simd.h>
  9. #include <asm/unaligned.h>
  10. #include <crypto/internal/hash.h>
  11. #include <crypto/internal/simd.h>
  12. #include <crypto/sha.h>
  13. #include <crypto/sha256_base.h>
  14. #include <linux/cpufeature.h>
  15. #include <linux/crypto.h>
  16. #include <linux/module.h>
  17. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
  18. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  19. MODULE_LICENSE("GPL v2");
  20. MODULE_ALIAS_CRYPTO("sha224");
  21. MODULE_ALIAS_CRYPTO("sha256");
  22. struct sha256_ce_state {
  23. struct sha256_state sst;
  24. u32 finalize;
  25. };
  26. extern const u32 sha256_ce_offsetof_count;
  27. extern const u32 sha256_ce_offsetof_finalize;
  28. asmlinkage int sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
  29. int blocks);
  30. static void __sha2_ce_transform(struct sha256_state *sst, u8 const *src,
  31. int blocks)
  32. {
  33. while (blocks) {
  34. int rem;
  35. kernel_neon_begin();
  36. rem = sha2_ce_transform(container_of(sst, struct sha256_ce_state,
  37. sst), src, blocks);
  38. kernel_neon_end();
  39. src += (blocks - rem) * SHA256_BLOCK_SIZE;
  40. blocks = rem;
  41. }
  42. }
  43. const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
  44. sst.count);
  45. const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
  46. finalize);
  47. asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
  48. static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
  49. int blocks)
  50. {
  51. sha256_block_data_order(sst->state, src, blocks);
  52. }
  53. static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
  54. unsigned int len)
  55. {
  56. struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  57. if (!crypto_simd_usable())
  58. return sha256_base_do_update(desc, data, len,
  59. __sha256_block_data_order);
  60. sctx->finalize = 0;
  61. sha256_base_do_update(desc, data, len, __sha2_ce_transform);
  62. return 0;
  63. }
  64. static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
  65. unsigned int len, u8 *out)
  66. {
  67. struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  68. bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
  69. if (!crypto_simd_usable()) {
  70. if (len)
  71. sha256_base_do_update(desc, data, len,
  72. __sha256_block_data_order);
  73. sha256_base_do_finalize(desc, __sha256_block_data_order);
  74. return sha256_base_finish(desc, out);
  75. }
  76. /*
  77. * Allow the asm code to perform the finalization if there is no
  78. * partial data and the input is a round multiple of the block size.
  79. */
  80. sctx->finalize = finalize;
  81. sha256_base_do_update(desc, data, len, __sha2_ce_transform);
  82. if (!finalize)
  83. sha256_base_do_finalize(desc, __sha2_ce_transform);
  84. return sha256_base_finish(desc, out);
  85. }
  86. static int sha256_ce_final(struct shash_desc *desc, u8 *out)
  87. {
  88. struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  89. if (!crypto_simd_usable()) {
  90. sha256_base_do_finalize(desc, __sha256_block_data_order);
  91. return sha256_base_finish(desc, out);
  92. }
  93. sctx->finalize = 0;
  94. sha256_base_do_finalize(desc, __sha2_ce_transform);
  95. return sha256_base_finish(desc, out);
  96. }
  97. static int sha256_ce_export(struct shash_desc *desc, void *out)
  98. {
  99. struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  100. memcpy(out, &sctx->sst, sizeof(struct sha256_state));
  101. return 0;
  102. }
  103. static int sha256_ce_import(struct shash_desc *desc, const void *in)
  104. {
  105. struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  106. memcpy(&sctx->sst, in, sizeof(struct sha256_state));
  107. sctx->finalize = 0;
  108. return 0;
  109. }
  110. static struct shash_alg algs[] = { {
  111. .init = sha224_base_init,
  112. .update = sha256_ce_update,
  113. .final = sha256_ce_final,
  114. .finup = sha256_ce_finup,
  115. .export = sha256_ce_export,
  116. .import = sha256_ce_import,
  117. .descsize = sizeof(struct sha256_ce_state),
  118. .statesize = sizeof(struct sha256_state),
  119. .digestsize = SHA224_DIGEST_SIZE,
  120. .base = {
  121. .cra_name = "sha224",
  122. .cra_driver_name = "sha224-ce",
  123. .cra_priority = 200,
  124. .cra_blocksize = SHA256_BLOCK_SIZE,
  125. .cra_module = THIS_MODULE,
  126. }
  127. }, {
  128. .init = sha256_base_init,
  129. .update = sha256_ce_update,
  130. .final = sha256_ce_final,
  131. .finup = sha256_ce_finup,
  132. .export = sha256_ce_export,
  133. .import = sha256_ce_import,
  134. .descsize = sizeof(struct sha256_ce_state),
  135. .statesize = sizeof(struct sha256_state),
  136. .digestsize = SHA256_DIGEST_SIZE,
  137. .base = {
  138. .cra_name = "sha256",
  139. .cra_driver_name = "sha256-ce",
  140. .cra_priority = 200,
  141. .cra_blocksize = SHA256_BLOCK_SIZE,
  142. .cra_module = THIS_MODULE,
  143. }
  144. } };
  145. static int __init sha2_ce_mod_init(void)
  146. {
  147. return crypto_register_shashes(algs, ARRAY_SIZE(algs));
  148. }
  149. static void __exit sha2_ce_mod_fini(void)
  150. {
  151. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  152. }
  153. module_cpu_feature_match(SHA2, sha2_ce_mod_init);
  154. module_exit(sha2_ce_mod_fini);