siphash.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552
  1. /* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
  2. *
  3. * This file is provided under a dual BSD/GPLv2 license.
  4. *
  5. * SipHash: a fast short-input PRF
  6. * https://131002.net/siphash/
  7. *
  8. * This implementation is specifically for SipHash2-4 for a secure PRF
  9. * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
  10. * hashtables.
  11. */
  12. #include <linux/siphash.h>
  13. #include <asm/unaligned.h>
  14. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  15. #include <linux/dcache.h>
  16. #include <asm/word-at-a-time.h>
  17. #endif
  18. #define SIPROUND \
  19. do { \
  20. v0 += v1; v1 = rol64(v1, 13); v1 ^= v0; v0 = rol64(v0, 32); \
  21. v2 += v3; v3 = rol64(v3, 16); v3 ^= v2; \
  22. v0 += v3; v3 = rol64(v3, 21); v3 ^= v0; \
  23. v2 += v1; v1 = rol64(v1, 17); v1 ^= v2; v2 = rol64(v2, 32); \
  24. } while (0)
  25. #define PREAMBLE(len) \
  26. u64 v0 = 0x736f6d6570736575ULL; \
  27. u64 v1 = 0x646f72616e646f6dULL; \
  28. u64 v2 = 0x6c7967656e657261ULL; \
  29. u64 v3 = 0x7465646279746573ULL; \
  30. u64 b = ((u64)(len)) << 56; \
  31. v3 ^= key->key[1]; \
  32. v2 ^= key->key[0]; \
  33. v1 ^= key->key[1]; \
  34. v0 ^= key->key[0];
  35. #define POSTAMBLE \
  36. v3 ^= b; \
  37. SIPROUND; \
  38. SIPROUND; \
  39. v0 ^= b; \
  40. v2 ^= 0xff; \
  41. SIPROUND; \
  42. SIPROUND; \
  43. SIPROUND; \
  44. SIPROUND; \
  45. return (v0 ^ v1) ^ (v2 ^ v3);
  46. u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
  47. {
  48. const u8 *end = data + len - (len % sizeof(u64));
  49. const u8 left = len & (sizeof(u64) - 1);
  50. u64 m;
  51. PREAMBLE(len)
  52. for (; data != end; data += sizeof(u64)) {
  53. m = le64_to_cpup(data);
  54. v3 ^= m;
  55. SIPROUND;
  56. SIPROUND;
  57. v0 ^= m;
  58. }
  59. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  60. if (left)
  61. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  62. bytemask_from_count(left)));
  63. #else
  64. switch (left) {
  65. case 7: b |= ((u64)end[6]) << 48;
  66. case 6: b |= ((u64)end[5]) << 40;
  67. case 5: b |= ((u64)end[4]) << 32;
  68. case 4: b |= le32_to_cpup(data); break;
  69. case 3: b |= ((u64)end[2]) << 16;
  70. case 2: b |= le16_to_cpup(data); break;
  71. case 1: b |= end[0];
  72. }
  73. #endif
  74. POSTAMBLE
  75. }
  76. EXPORT_SYMBOL(__siphash_aligned);
  77. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  78. u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
  79. {
  80. const u8 *end = data + len - (len % sizeof(u64));
  81. const u8 left = len & (sizeof(u64) - 1);
  82. u64 m;
  83. PREAMBLE(len)
  84. for (; data != end; data += sizeof(u64)) {
  85. m = get_unaligned_le64(data);
  86. v3 ^= m;
  87. SIPROUND;
  88. SIPROUND;
  89. v0 ^= m;
  90. }
  91. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  92. if (left)
  93. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  94. bytemask_from_count(left)));
  95. #else
  96. switch (left) {
  97. case 7: b |= ((u64)end[6]) << 48;
  98. case 6: b |= ((u64)end[5]) << 40;
  99. case 5: b |= ((u64)end[4]) << 32;
  100. case 4: b |= get_unaligned_le32(end); break;
  101. case 3: b |= ((u64)end[2]) << 16;
  102. case 2: b |= get_unaligned_le16(end); break;
  103. case 1: b |= end[0];
  104. }
  105. #endif
  106. POSTAMBLE
  107. }
  108. EXPORT_SYMBOL(__siphash_unaligned);
  109. #endif
  110. /**
  111. * siphash_1u64 - compute 64-bit siphash PRF value of a u64
  112. * @first: first u64
  113. * @key: the siphash key
  114. */
  115. u64 siphash_1u64(const u64 first, const siphash_key_t *key)
  116. {
  117. PREAMBLE(8)
  118. v3 ^= first;
  119. SIPROUND;
  120. SIPROUND;
  121. v0 ^= first;
  122. POSTAMBLE
  123. }
  124. EXPORT_SYMBOL(siphash_1u64);
  125. /**
  126. * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
  127. * @first: first u64
  128. * @second: second u64
  129. * @key: the siphash key
  130. */
  131. u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
  132. {
  133. PREAMBLE(16)
  134. v3 ^= first;
  135. SIPROUND;
  136. SIPROUND;
  137. v0 ^= first;
  138. v3 ^= second;
  139. SIPROUND;
  140. SIPROUND;
  141. v0 ^= second;
  142. POSTAMBLE
  143. }
  144. EXPORT_SYMBOL(siphash_2u64);
  145. /**
  146. * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
  147. * @first: first u64
  148. * @second: second u64
  149. * @third: third u64
  150. * @key: the siphash key
  151. */
  152. u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
  153. const siphash_key_t *key)
  154. {
  155. PREAMBLE(24)
  156. v3 ^= first;
  157. SIPROUND;
  158. SIPROUND;
  159. v0 ^= first;
  160. v3 ^= second;
  161. SIPROUND;
  162. SIPROUND;
  163. v0 ^= second;
  164. v3 ^= third;
  165. SIPROUND;
  166. SIPROUND;
  167. v0 ^= third;
  168. POSTAMBLE
  169. }
  170. EXPORT_SYMBOL(siphash_3u64);
  171. /**
  172. * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
  173. * @first: first u64
  174. * @second: second u64
  175. * @third: third u64
  176. * @forth: forth u64
  177. * @key: the siphash key
  178. */
  179. u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
  180. const u64 forth, const siphash_key_t *key)
  181. {
  182. PREAMBLE(32)
  183. v3 ^= first;
  184. SIPROUND;
  185. SIPROUND;
  186. v0 ^= first;
  187. v3 ^= second;
  188. SIPROUND;
  189. SIPROUND;
  190. v0 ^= second;
  191. v3 ^= third;
  192. SIPROUND;
  193. SIPROUND;
  194. v0 ^= third;
  195. v3 ^= forth;
  196. SIPROUND;
  197. SIPROUND;
  198. v0 ^= forth;
  199. POSTAMBLE
  200. }
  201. EXPORT_SYMBOL(siphash_4u64);
  202. u64 siphash_1u32(const u32 first, const siphash_key_t *key)
  203. {
  204. PREAMBLE(4)
  205. b |= first;
  206. POSTAMBLE
  207. }
  208. EXPORT_SYMBOL(siphash_1u32);
  209. u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
  210. const siphash_key_t *key)
  211. {
  212. u64 combined = (u64)second << 32 | first;
  213. PREAMBLE(12)
  214. v3 ^= combined;
  215. SIPROUND;
  216. SIPROUND;
  217. v0 ^= combined;
  218. b |= third;
  219. POSTAMBLE
  220. }
  221. EXPORT_SYMBOL(siphash_3u32);
  222. #if BITS_PER_LONG == 64
  223. /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
  224. * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
  225. */
  226. #define HSIPROUND SIPROUND
  227. #define HPREAMBLE(len) PREAMBLE(len)
  228. #define HPOSTAMBLE \
  229. v3 ^= b; \
  230. HSIPROUND; \
  231. v0 ^= b; \
  232. v2 ^= 0xff; \
  233. HSIPROUND; \
  234. HSIPROUND; \
  235. HSIPROUND; \
  236. return (v0 ^ v1) ^ (v2 ^ v3);
  237. u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
  238. {
  239. const u8 *end = data + len - (len % sizeof(u64));
  240. const u8 left = len & (sizeof(u64) - 1);
  241. u64 m;
  242. HPREAMBLE(len)
  243. for (; data != end; data += sizeof(u64)) {
  244. m = le64_to_cpup(data);
  245. v3 ^= m;
  246. HSIPROUND;
  247. v0 ^= m;
  248. }
  249. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  250. if (left)
  251. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  252. bytemask_from_count(left)));
  253. #else
  254. switch (left) {
  255. case 7: b |= ((u64)end[6]) << 48;
  256. case 6: b |= ((u64)end[5]) << 40;
  257. case 5: b |= ((u64)end[4]) << 32;
  258. case 4: b |= le32_to_cpup(data); break;
  259. case 3: b |= ((u64)end[2]) << 16;
  260. case 2: b |= le16_to_cpup(data); break;
  261. case 1: b |= end[0];
  262. }
  263. #endif
  264. HPOSTAMBLE
  265. }
  266. EXPORT_SYMBOL(__hsiphash_aligned);
  267. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  268. u32 __hsiphash_unaligned(const void *data, size_t len,
  269. const hsiphash_key_t *key)
  270. {
  271. const u8 *end = data + len - (len % sizeof(u64));
  272. const u8 left = len & (sizeof(u64) - 1);
  273. u64 m;
  274. HPREAMBLE(len)
  275. for (; data != end; data += sizeof(u64)) {
  276. m = get_unaligned_le64(data);
  277. v3 ^= m;
  278. HSIPROUND;
  279. v0 ^= m;
  280. }
  281. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  282. if (left)
  283. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  284. bytemask_from_count(left)));
  285. #else
  286. switch (left) {
  287. case 7: b |= ((u64)end[6]) << 48;
  288. case 6: b |= ((u64)end[5]) << 40;
  289. case 5: b |= ((u64)end[4]) << 32;
  290. case 4: b |= get_unaligned_le32(end); break;
  291. case 3: b |= ((u64)end[2]) << 16;
  292. case 2: b |= get_unaligned_le16(end); break;
  293. case 1: b |= end[0];
  294. }
  295. #endif
  296. HPOSTAMBLE
  297. }
  298. EXPORT_SYMBOL(__hsiphash_unaligned);
  299. #endif
  300. /**
  301. * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
  302. * @first: first u32
  303. * @key: the hsiphash key
  304. */
  305. u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
  306. {
  307. HPREAMBLE(4)
  308. b |= first;
  309. HPOSTAMBLE
  310. }
  311. EXPORT_SYMBOL(hsiphash_1u32);
  312. /**
  313. * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
  314. * @first: first u32
  315. * @second: second u32
  316. * @key: the hsiphash key
  317. */
  318. u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
  319. {
  320. u64 combined = (u64)second << 32 | first;
  321. HPREAMBLE(8)
  322. v3 ^= combined;
  323. HSIPROUND;
  324. v0 ^= combined;
  325. HPOSTAMBLE
  326. }
  327. EXPORT_SYMBOL(hsiphash_2u32);
  328. /**
  329. * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
  330. * @first: first u32
  331. * @second: second u32
  332. * @third: third u32
  333. * @key: the hsiphash key
  334. */
  335. u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
  336. const hsiphash_key_t *key)
  337. {
  338. u64 combined = (u64)second << 32 | first;
  339. HPREAMBLE(12)
  340. v3 ^= combined;
  341. HSIPROUND;
  342. v0 ^= combined;
  343. b |= third;
  344. HPOSTAMBLE
  345. }
  346. EXPORT_SYMBOL(hsiphash_3u32);
  347. /**
  348. * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
  349. * @first: first u32
  350. * @second: second u32
  351. * @third: third u32
  352. * @forth: forth u32
  353. * @key: the hsiphash key
  354. */
  355. u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
  356. const u32 forth, const hsiphash_key_t *key)
  357. {
  358. u64 combined = (u64)second << 32 | first;
  359. HPREAMBLE(16)
  360. v3 ^= combined;
  361. HSIPROUND;
  362. v0 ^= combined;
  363. combined = (u64)forth << 32 | third;
  364. v3 ^= combined;
  365. HSIPROUND;
  366. v0 ^= combined;
  367. HPOSTAMBLE
  368. }
  369. EXPORT_SYMBOL(hsiphash_4u32);
  370. #else
  371. #define HSIPROUND \
  372. do { \
  373. v0 += v1; v1 = rol32(v1, 5); v1 ^= v0; v0 = rol32(v0, 16); \
  374. v2 += v3; v3 = rol32(v3, 8); v3 ^= v2; \
  375. v0 += v3; v3 = rol32(v3, 7); v3 ^= v0; \
  376. v2 += v1; v1 = rol32(v1, 13); v1 ^= v2; v2 = rol32(v2, 16); \
  377. } while (0)
  378. #define HPREAMBLE(len) \
  379. u32 v0 = 0; \
  380. u32 v1 = 0; \
  381. u32 v2 = 0x6c796765U; \
  382. u32 v3 = 0x74656462U; \
  383. u32 b = ((u32)(len)) << 24; \
  384. v3 ^= key->key[1]; \
  385. v2 ^= key->key[0]; \
  386. v1 ^= key->key[1]; \
  387. v0 ^= key->key[0];
  388. #define HPOSTAMBLE \
  389. v3 ^= b; \
  390. HSIPROUND; \
  391. v0 ^= b; \
  392. v2 ^= 0xff; \
  393. HSIPROUND; \
  394. HSIPROUND; \
  395. HSIPROUND; \
  396. return v1 ^ v3;
  397. u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
  398. {
  399. const u8 *end = data + len - (len % sizeof(u32));
  400. const u8 left = len & (sizeof(u32) - 1);
  401. u32 m;
  402. HPREAMBLE(len)
  403. for (; data != end; data += sizeof(u32)) {
  404. m = le32_to_cpup(data);
  405. v3 ^= m;
  406. HSIPROUND;
  407. v0 ^= m;
  408. }
  409. switch (left) {
  410. case 3: b |= ((u32)end[2]) << 16;
  411. case 2: b |= le16_to_cpup(data); break;
  412. case 1: b |= end[0];
  413. }
  414. HPOSTAMBLE
  415. }
  416. EXPORT_SYMBOL(__hsiphash_aligned);
  417. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  418. u32 __hsiphash_unaligned(const void *data, size_t len,
  419. const hsiphash_key_t *key)
  420. {
  421. const u8 *end = data + len - (len % sizeof(u32));
  422. const u8 left = len & (sizeof(u32) - 1);
  423. u32 m;
  424. HPREAMBLE(len)
  425. for (; data != end; data += sizeof(u32)) {
  426. m = get_unaligned_le32(data);
  427. v3 ^= m;
  428. HSIPROUND;
  429. v0 ^= m;
  430. }
  431. switch (left) {
  432. case 3: b |= ((u32)end[2]) << 16;
  433. case 2: b |= get_unaligned_le16(end); break;
  434. case 1: b |= end[0];
  435. }
  436. HPOSTAMBLE
  437. }
  438. EXPORT_SYMBOL(__hsiphash_unaligned);
  439. #endif
  440. /**
  441. * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
  442. * @first: first u32
  443. * @key: the hsiphash key
  444. */
  445. u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
  446. {
  447. HPREAMBLE(4)
  448. v3 ^= first;
  449. HSIPROUND;
  450. v0 ^= first;
  451. HPOSTAMBLE
  452. }
  453. EXPORT_SYMBOL(hsiphash_1u32);
  454. /**
  455. * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
  456. * @first: first u32
  457. * @second: second u32
  458. * @key: the hsiphash key
  459. */
  460. u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
  461. {
  462. HPREAMBLE(8)
  463. v3 ^= first;
  464. HSIPROUND;
  465. v0 ^= first;
  466. v3 ^= second;
  467. HSIPROUND;
  468. v0 ^= second;
  469. HPOSTAMBLE
  470. }
  471. EXPORT_SYMBOL(hsiphash_2u32);
  472. /**
  473. * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
  474. * @first: first u32
  475. * @second: second u32
  476. * @third: third u32
  477. * @key: the hsiphash key
  478. */
  479. u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
  480. const hsiphash_key_t *key)
  481. {
  482. HPREAMBLE(12)
  483. v3 ^= first;
  484. HSIPROUND;
  485. v0 ^= first;
  486. v3 ^= second;
  487. HSIPROUND;
  488. v0 ^= second;
  489. v3 ^= third;
  490. HSIPROUND;
  491. v0 ^= third;
  492. HPOSTAMBLE
  493. }
  494. EXPORT_SYMBOL(hsiphash_3u32);
  495. /**
  496. * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
  497. * @first: first u32
  498. * @second: second u32
  499. * @third: third u32
  500. * @forth: forth u32
  501. * @key: the hsiphash key
  502. */
  503. u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
  504. const u32 forth, const hsiphash_key_t *key)
  505. {
  506. HPREAMBLE(16)
  507. v3 ^= first;
  508. HSIPROUND;
  509. v0 ^= first;
  510. v3 ^= second;
  511. HSIPROUND;
  512. v0 ^= second;
  513. v3 ^= third;
  514. HSIPROUND;
  515. v0 ^= third;
  516. v3 ^= forth;
  517. HSIPROUND;
  518. v0 ^= forth;
  519. HPOSTAMBLE
  520. }
  521. EXPORT_SYMBOL(hsiphash_4u32);
  522. #endif