poly1305_generic.c 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305
  1. /*
  2. * Poly1305 authenticator algorithm, RFC7539
  3. *
  4. * Copyright (C) 2015 Martin Willi
  5. *
  6. * Based on public domain code by Andrew Moon and Daniel J. Bernstein.
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. */
  13. #include <crypto/algapi.h>
  14. #include <crypto/internal/hash.h>
  15. #include <crypto/poly1305.h>
  16. #include <linux/crypto.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <asm/unaligned.h>
  20. static inline u64 mlt(u64 a, u64 b)
  21. {
  22. return a * b;
  23. }
  24. static inline u32 sr(u64 v, u_char n)
  25. {
  26. return v >> n;
  27. }
  28. static inline u32 and(u32 v, u32 mask)
  29. {
  30. return v & mask;
  31. }
  32. int crypto_poly1305_init(struct shash_desc *desc)
  33. {
  34. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  35. memset(dctx->h, 0, sizeof(dctx->h));
  36. dctx->buflen = 0;
  37. dctx->rset = false;
  38. dctx->sset = false;
  39. return 0;
  40. }
  41. EXPORT_SYMBOL_GPL(crypto_poly1305_init);
  42. static void poly1305_setrkey(struct poly1305_desc_ctx *dctx, const u8 *key)
  43. {
  44. /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
  45. dctx->r[0] = (get_unaligned_le32(key + 0) >> 0) & 0x3ffffff;
  46. dctx->r[1] = (get_unaligned_le32(key + 3) >> 2) & 0x3ffff03;
  47. dctx->r[2] = (get_unaligned_le32(key + 6) >> 4) & 0x3ffc0ff;
  48. dctx->r[3] = (get_unaligned_le32(key + 9) >> 6) & 0x3f03fff;
  49. dctx->r[4] = (get_unaligned_le32(key + 12) >> 8) & 0x00fffff;
  50. }
  51. static void poly1305_setskey(struct poly1305_desc_ctx *dctx, const u8 *key)
  52. {
  53. dctx->s[0] = get_unaligned_le32(key + 0);
  54. dctx->s[1] = get_unaligned_le32(key + 4);
  55. dctx->s[2] = get_unaligned_le32(key + 8);
  56. dctx->s[3] = get_unaligned_le32(key + 12);
  57. }
  58. /*
  59. * Poly1305 requires a unique key for each tag, which implies that we can't set
  60. * it on the tfm that gets accessed by multiple users simultaneously. Instead we
  61. * expect the key as the first 32 bytes in the update() call.
  62. */
  63. unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
  64. const u8 *src, unsigned int srclen)
  65. {
  66. if (!dctx->sset) {
  67. if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) {
  68. poly1305_setrkey(dctx, src);
  69. src += POLY1305_BLOCK_SIZE;
  70. srclen -= POLY1305_BLOCK_SIZE;
  71. dctx->rset = true;
  72. }
  73. if (srclen >= POLY1305_BLOCK_SIZE) {
  74. poly1305_setskey(dctx, src);
  75. src += POLY1305_BLOCK_SIZE;
  76. srclen -= POLY1305_BLOCK_SIZE;
  77. dctx->sset = true;
  78. }
  79. }
  80. return srclen;
  81. }
  82. EXPORT_SYMBOL_GPL(crypto_poly1305_setdesckey);
  83. static unsigned int poly1305_blocks(struct poly1305_desc_ctx *dctx,
  84. const u8 *src, unsigned int srclen,
  85. u32 hibit)
  86. {
  87. u32 r0, r1, r2, r3, r4;
  88. u32 s1, s2, s3, s4;
  89. u32 h0, h1, h2, h3, h4;
  90. u64 d0, d1, d2, d3, d4;
  91. unsigned int datalen;
  92. if (unlikely(!dctx->sset)) {
  93. datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
  94. src += srclen - datalen;
  95. srclen = datalen;
  96. }
  97. r0 = dctx->r[0];
  98. r1 = dctx->r[1];
  99. r2 = dctx->r[2];
  100. r3 = dctx->r[3];
  101. r4 = dctx->r[4];
  102. s1 = r1 * 5;
  103. s2 = r2 * 5;
  104. s3 = r3 * 5;
  105. s4 = r4 * 5;
  106. h0 = dctx->h[0];
  107. h1 = dctx->h[1];
  108. h2 = dctx->h[2];
  109. h3 = dctx->h[3];
  110. h4 = dctx->h[4];
  111. while (likely(srclen >= POLY1305_BLOCK_SIZE)) {
  112. /* h += m[i] */
  113. h0 += (get_unaligned_le32(src + 0) >> 0) & 0x3ffffff;
  114. h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff;
  115. h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff;
  116. h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff;
  117. h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
  118. /* h *= r */
  119. d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +
  120. mlt(h3, s2) + mlt(h4, s1);
  121. d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) +
  122. mlt(h3, s3) + mlt(h4, s2);
  123. d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) +
  124. mlt(h3, s4) + mlt(h4, s3);
  125. d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) +
  126. mlt(h3, r0) + mlt(h4, s4);
  127. d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) +
  128. mlt(h3, r1) + mlt(h4, r0);
  129. /* (partial) h %= p */
  130. d1 += sr(d0, 26); h0 = and(d0, 0x3ffffff);
  131. d2 += sr(d1, 26); h1 = and(d1, 0x3ffffff);
  132. d3 += sr(d2, 26); h2 = and(d2, 0x3ffffff);
  133. d4 += sr(d3, 26); h3 = and(d3, 0x3ffffff);
  134. h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff);
  135. h1 += h0 >> 26; h0 = h0 & 0x3ffffff;
  136. src += POLY1305_BLOCK_SIZE;
  137. srclen -= POLY1305_BLOCK_SIZE;
  138. }
  139. dctx->h[0] = h0;
  140. dctx->h[1] = h1;
  141. dctx->h[2] = h2;
  142. dctx->h[3] = h3;
  143. dctx->h[4] = h4;
  144. return srclen;
  145. }
  146. int crypto_poly1305_update(struct shash_desc *desc,
  147. const u8 *src, unsigned int srclen)
  148. {
  149. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  150. unsigned int bytes;
  151. if (unlikely(dctx->buflen)) {
  152. bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
  153. memcpy(dctx->buf + dctx->buflen, src, bytes);
  154. src += bytes;
  155. srclen -= bytes;
  156. dctx->buflen += bytes;
  157. if (dctx->buflen == POLY1305_BLOCK_SIZE) {
  158. poly1305_blocks(dctx, dctx->buf,
  159. POLY1305_BLOCK_SIZE, 1 << 24);
  160. dctx->buflen = 0;
  161. }
  162. }
  163. if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
  164. bytes = poly1305_blocks(dctx, src, srclen, 1 << 24);
  165. src += srclen - bytes;
  166. srclen = bytes;
  167. }
  168. if (unlikely(srclen)) {
  169. dctx->buflen = srclen;
  170. memcpy(dctx->buf, src, srclen);
  171. }
  172. return 0;
  173. }
  174. EXPORT_SYMBOL_GPL(crypto_poly1305_update);
  175. int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
  176. {
  177. struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
  178. u32 h0, h1, h2, h3, h4;
  179. u32 g0, g1, g2, g3, g4;
  180. u32 mask;
  181. u64 f = 0;
  182. if (unlikely(!dctx->sset))
  183. return -ENOKEY;
  184. if (unlikely(dctx->buflen)) {
  185. dctx->buf[dctx->buflen++] = 1;
  186. memset(dctx->buf + dctx->buflen, 0,
  187. POLY1305_BLOCK_SIZE - dctx->buflen);
  188. poly1305_blocks(dctx, dctx->buf, POLY1305_BLOCK_SIZE, 0);
  189. }
  190. /* fully carry h */
  191. h0 = dctx->h[0];
  192. h1 = dctx->h[1];
  193. h2 = dctx->h[2];
  194. h3 = dctx->h[3];
  195. h4 = dctx->h[4];
  196. h2 += (h1 >> 26); h1 = h1 & 0x3ffffff;
  197. h3 += (h2 >> 26); h2 = h2 & 0x3ffffff;
  198. h4 += (h3 >> 26); h3 = h3 & 0x3ffffff;
  199. h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff;
  200. h1 += (h0 >> 26); h0 = h0 & 0x3ffffff;
  201. /* compute h + -p */
  202. g0 = h0 + 5;
  203. g1 = h1 + (g0 >> 26); g0 &= 0x3ffffff;
  204. g2 = h2 + (g1 >> 26); g1 &= 0x3ffffff;
  205. g3 = h3 + (g2 >> 26); g2 &= 0x3ffffff;
  206. g4 = h4 + (g3 >> 26) - (1 << 26); g3 &= 0x3ffffff;
  207. /* select h if h < p, or h + -p if h >= p */
  208. mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1;
  209. g0 &= mask;
  210. g1 &= mask;
  211. g2 &= mask;
  212. g3 &= mask;
  213. g4 &= mask;
  214. mask = ~mask;
  215. h0 = (h0 & mask) | g0;
  216. h1 = (h1 & mask) | g1;
  217. h2 = (h2 & mask) | g2;
  218. h3 = (h3 & mask) | g3;
  219. h4 = (h4 & mask) | g4;
  220. /* h = h % (2^128) */
  221. h0 = (h0 >> 0) | (h1 << 26);
  222. h1 = (h1 >> 6) | (h2 << 20);
  223. h2 = (h2 >> 12) | (h3 << 14);
  224. h3 = (h3 >> 18) | (h4 << 8);
  225. /* mac = (h + s) % (2^128) */
  226. f = (f >> 32) + h0 + dctx->s[0]; put_unaligned_le32(f, dst + 0);
  227. f = (f >> 32) + h1 + dctx->s[1]; put_unaligned_le32(f, dst + 4);
  228. f = (f >> 32) + h2 + dctx->s[2]; put_unaligned_le32(f, dst + 8);
  229. f = (f >> 32) + h3 + dctx->s[3]; put_unaligned_le32(f, dst + 12);
  230. return 0;
  231. }
  232. EXPORT_SYMBOL_GPL(crypto_poly1305_final);
  233. static struct shash_alg poly1305_alg = {
  234. .digestsize = POLY1305_DIGEST_SIZE,
  235. .init = crypto_poly1305_init,
  236. .update = crypto_poly1305_update,
  237. .final = crypto_poly1305_final,
  238. .descsize = sizeof(struct poly1305_desc_ctx),
  239. .base = {
  240. .cra_name = "poly1305",
  241. .cra_driver_name = "poly1305-generic",
  242. .cra_priority = 100,
  243. .cra_blocksize = POLY1305_BLOCK_SIZE,
  244. .cra_module = THIS_MODULE,
  245. },
  246. };
  247. static int __init poly1305_mod_init(void)
  248. {
  249. return crypto_register_shash(&poly1305_alg);
  250. }
  251. static void __exit poly1305_mod_exit(void)
  252. {
  253. crypto_unregister_shash(&poly1305_alg);
  254. }
  255. module_init(poly1305_mod_init);
  256. module_exit(poly1305_mod_exit);
  257. MODULE_LICENSE("GPL");
  258. MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
  259. MODULE_DESCRIPTION("Poly1305 authenticator");
  260. MODULE_ALIAS_CRYPTO("poly1305");
  261. MODULE_ALIAS_CRYPTO("poly1305-generic");