poly1305-donna64.c 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186
  1. // SPDX-License-Identifier: GPL-2.0 OR MIT
  2. /*
  3. * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
  4. *
  5. * This is based in part on Andrew Moon's poly1305-donna, which is in the
  6. * public domain.
  7. */
  8. #include <linux/kernel.h>
  9. #include <asm/unaligned.h>
  10. #include <crypto/internal/poly1305.h>
  11. typedef __uint128_t u128;
  12. void poly1305_core_setkey(struct poly1305_core_key *key,
  13. const u8 raw_key[POLY1305_BLOCK_SIZE])
  14. {
  15. u64 t0, t1;
  16. /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
  17. t0 = get_unaligned_le64(&raw_key[0]);
  18. t1 = get_unaligned_le64(&raw_key[8]);
  19. key->key.r64[0] = t0 & 0xffc0fffffffULL;
  20. key->key.r64[1] = ((t0 >> 44) | (t1 << 20)) & 0xfffffc0ffffULL;
  21. key->key.r64[2] = ((t1 >> 24)) & 0x00ffffffc0fULL;
  22. /* s = 20*r */
  23. key->precomputed_s.r64[0] = key->key.r64[1] * 20;
  24. key->precomputed_s.r64[1] = key->key.r64[2] * 20;
  25. }
  26. EXPORT_SYMBOL(poly1305_core_setkey);
  27. void poly1305_core_blocks(struct poly1305_state *state,
  28. const struct poly1305_core_key *key, const void *src,
  29. unsigned int nblocks, u32 hibit)
  30. {
  31. const u8 *input = src;
  32. u64 hibit64;
  33. u64 r0, r1, r2;
  34. u64 s1, s2;
  35. u64 h0, h1, h2;
  36. u64 c;
  37. u128 d0, d1, d2, d;
  38. if (!nblocks)
  39. return;
  40. hibit64 = ((u64)hibit) << 40;
  41. r0 = key->key.r64[0];
  42. r1 = key->key.r64[1];
  43. r2 = key->key.r64[2];
  44. h0 = state->h64[0];
  45. h1 = state->h64[1];
  46. h2 = state->h64[2];
  47. s1 = key->precomputed_s.r64[0];
  48. s2 = key->precomputed_s.r64[1];
  49. do {
  50. u64 t0, t1;
  51. /* h += m[i] */
  52. t0 = get_unaligned_le64(&input[0]);
  53. t1 = get_unaligned_le64(&input[8]);
  54. h0 += t0 & 0xfffffffffffULL;
  55. h1 += ((t0 >> 44) | (t1 << 20)) & 0xfffffffffffULL;
  56. h2 += (((t1 >> 24)) & 0x3ffffffffffULL) | hibit64;
  57. /* h *= r */
  58. d0 = (u128)h0 * r0;
  59. d = (u128)h1 * s2;
  60. d0 += d;
  61. d = (u128)h2 * s1;
  62. d0 += d;
  63. d1 = (u128)h0 * r1;
  64. d = (u128)h1 * r0;
  65. d1 += d;
  66. d = (u128)h2 * s2;
  67. d1 += d;
  68. d2 = (u128)h0 * r2;
  69. d = (u128)h1 * r1;
  70. d2 += d;
  71. d = (u128)h2 * r0;
  72. d2 += d;
  73. /* (partial) h %= p */
  74. c = (u64)(d0 >> 44);
  75. h0 = (u64)d0 & 0xfffffffffffULL;
  76. d1 += c;
  77. c = (u64)(d1 >> 44);
  78. h1 = (u64)d1 & 0xfffffffffffULL;
  79. d2 += c;
  80. c = (u64)(d2 >> 42);
  81. h2 = (u64)d2 & 0x3ffffffffffULL;
  82. h0 += c * 5;
  83. c = h0 >> 44;
  84. h0 = h0 & 0xfffffffffffULL;
  85. h1 += c;
  86. input += POLY1305_BLOCK_SIZE;
  87. } while (--nblocks);
  88. state->h64[0] = h0;
  89. state->h64[1] = h1;
  90. state->h64[2] = h2;
  91. }
  92. EXPORT_SYMBOL(poly1305_core_blocks);
  93. void poly1305_core_emit(const struct poly1305_state *state, const u32 nonce[4],
  94. void *dst)
  95. {
  96. u8 *mac = dst;
  97. u64 h0, h1, h2, c;
  98. u64 g0, g1, g2;
  99. u64 t0, t1;
  100. /* fully carry h */
  101. h0 = state->h64[0];
  102. h1 = state->h64[1];
  103. h2 = state->h64[2];
  104. c = h1 >> 44;
  105. h1 &= 0xfffffffffffULL;
  106. h2 += c;
  107. c = h2 >> 42;
  108. h2 &= 0x3ffffffffffULL;
  109. h0 += c * 5;
  110. c = h0 >> 44;
  111. h0 &= 0xfffffffffffULL;
  112. h1 += c;
  113. c = h1 >> 44;
  114. h1 &= 0xfffffffffffULL;
  115. h2 += c;
  116. c = h2 >> 42;
  117. h2 &= 0x3ffffffffffULL;
  118. h0 += c * 5;
  119. c = h0 >> 44;
  120. h0 &= 0xfffffffffffULL;
  121. h1 += c;
  122. /* compute h + -p */
  123. g0 = h0 + 5;
  124. c = g0 >> 44;
  125. g0 &= 0xfffffffffffULL;
  126. g1 = h1 + c;
  127. c = g1 >> 44;
  128. g1 &= 0xfffffffffffULL;
  129. g2 = h2 + c - (1ULL << 42);
  130. /* select h if h < p, or h + -p if h >= p */
  131. c = (g2 >> ((sizeof(u64) * 8) - 1)) - 1;
  132. g0 &= c;
  133. g1 &= c;
  134. g2 &= c;
  135. c = ~c;
  136. h0 = (h0 & c) | g0;
  137. h1 = (h1 & c) | g1;
  138. h2 = (h2 & c) | g2;
  139. if (likely(nonce)) {
  140. /* h = (h + nonce) */
  141. t0 = ((u64)nonce[1] << 32) | nonce[0];
  142. t1 = ((u64)nonce[3] << 32) | nonce[2];
  143. h0 += t0 & 0xfffffffffffULL;
  144. c = h0 >> 44;
  145. h0 &= 0xfffffffffffULL;
  146. h1 += (((t0 >> 44) | (t1 << 20)) & 0xfffffffffffULL) + c;
  147. c = h1 >> 44;
  148. h1 &= 0xfffffffffffULL;
  149. h2 += (((t1 >> 24)) & 0x3ffffffffffULL) + c;
  150. h2 &= 0x3ffffffffffULL;
  151. }
  152. /* mac = h % (2^128) */
  153. h0 = h0 | (h1 << 44);
  154. h1 = (h1 >> 20) | (h2 << 24);
  155. put_unaligned_le64(h0, &mac[0]);
  156. put_unaligned_le64(h1, &mac[8]);
  157. }
  158. EXPORT_SYMBOL(poly1305_core_emit);