cipher-gcm-siv.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665
  1. /* cipher-gcm-siv.c - GCM-SIV implementation (RFC 8452)
  2. * Copyright (C) 2021 Jussi Kivilinna <jussi.kivilinna@iki.fi>
  3. *
  4. * This file is part of Libgcrypt.
  5. *
  6. * Libgcrypt is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU Lesser General Public License as
  8. * published by the Free Software Foundation; either version 2.1 of
  9. * the License, or (at your option) any later version.
  10. *
  11. * Libgcrypt is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with this program; if not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include <config.h>
  20. #include <stdio.h>
  21. #include <stdlib.h>
  22. #include <string.h>
  23. #include <errno.h>
  24. #include "g10lib.h"
  25. #include "cipher.h"
  26. #include "bufhelp.h"
  27. #include "./cipher-internal.h"
  28. #define GCM_SIV_NONCE_LENGTH (96 / 8)
  29. static inline void
  30. mulx_ghash (byte *a)
  31. {
  32. u64 t[2], mask;
  33. t[0] = buf_get_be64(a + 0);
  34. t[1] = buf_get_be64(a + 8);
  35. mask = -(t[1] & 1) & 0xe1;
  36. mask <<= 56;
  37. buf_put_be64(a + 8, (t[1] >> 1) ^ (t[0] << 63));
  38. buf_put_be64(a + 0, (t[0] >> 1) ^ mask);
  39. }
  40. static inline void
  41. gcm_siv_bytecounter_add (u32 ctr[2], size_t add)
  42. {
  43. if (sizeof(add) > sizeof(u32))
  44. {
  45. u32 high_add = ((add >> 31) >> 1) & 0xffffffff;
  46. ctr[1] += high_add;
  47. }
  48. ctr[0] += add;
  49. if (ctr[0] >= add)
  50. return;
  51. ++ctr[1];
  52. }
  53. static inline int
  54. gcm_siv_check_len (u32 ctr[2])
  55. {
  56. /* len(plaintext/aadlen) <= 2^39-256 bits == 2^36-32 bytes == 2^32-2 blocks */
  57. if (ctr[1] > 0xfU)
  58. return 0;
  59. if (ctr[1] < 0xfU)
  60. return 1;
  61. if (ctr[0] <= 0xffffffe0U)
  62. return 1;
  63. return 0;
  64. }
  65. static void
  66. polyval_set_key (gcry_cipher_hd_t c, const byte *auth_key)
  67. {
  68. cipher_block_bswap (c->u_mode.gcm.u_ghash_key.key, auth_key,
  69. GCRY_SIV_BLOCK_LEN);
  70. mulx_ghash (c->u_mode.gcm.u_ghash_key.key);
  71. _gcry_cipher_gcm_setupM (c);
  72. }
  73. static void
  74. do_polyval_buf(gcry_cipher_hd_t c, byte *hash, const byte *buf,
  75. size_t buflen, int do_padding)
  76. {
  77. unsigned int blocksize = GCRY_SIV_BLOCK_LEN;
  78. unsigned int unused = c->u_mode.gcm.mac_unused;
  79. ghash_fn_t ghash_fn = c->u_mode.gcm.ghash_fn;
  80. ghash_fn_t polyval_fn = c->u_mode.gcm.polyval_fn;
  81. byte tmp_blocks[16][GCRY_SIV_BLOCK_LEN];
  82. size_t nblocks, n;
  83. unsigned int burn = 0, nburn;
  84. unsigned int num_blks_used = 0;
  85. if (buflen == 0 && (unused == 0 || !do_padding))
  86. return;
  87. do
  88. {
  89. if (buflen > 0 && (buflen + unused < blocksize || unused > 0))
  90. {
  91. n = blocksize - unused;
  92. n = n < buflen ? n : buflen;
  93. buf_cpy (&c->u_mode.gcm.macbuf[unused], buf, n);
  94. unused += n;
  95. buf += n;
  96. buflen -= n;
  97. }
  98. if (!buflen)
  99. {
  100. if (!do_padding && unused < blocksize)
  101. {
  102. break;
  103. }
  104. n = blocksize - unused;
  105. if (n > 0)
  106. {
  107. memset (&c->u_mode.gcm.macbuf[unused], 0, n);
  108. unused = blocksize;
  109. }
  110. }
  111. if (unused > 0)
  112. {
  113. gcry_assert (unused == blocksize);
  114. /* Process one block from macbuf. */
  115. if (polyval_fn)
  116. {
  117. nburn = polyval_fn (c, hash, c->u_mode.gcm.macbuf, 1);
  118. }
  119. else
  120. {
  121. cipher_block_bswap (c->u_mode.gcm.macbuf, c->u_mode.gcm.macbuf,
  122. blocksize);
  123. nburn = ghash_fn (c, hash, c->u_mode.gcm.macbuf, 1);
  124. }
  125. burn = nburn > burn ? nburn : burn;
  126. unused = 0;
  127. }
  128. nblocks = buflen / blocksize;
  129. while (nblocks)
  130. {
  131. if (polyval_fn)
  132. {
  133. n = nblocks;
  134. nburn = polyval_fn (c, hash, buf, n);
  135. }
  136. else
  137. {
  138. for (n = 0; n < (nblocks > 16 ? 16 : nblocks); n++)
  139. cipher_block_bswap (tmp_blocks[n], buf + n * blocksize,
  140. blocksize);
  141. num_blks_used = n > num_blks_used ? n : num_blks_used;
  142. nburn = ghash_fn (c, hash, tmp_blocks[0], n);
  143. }
  144. burn = nburn > burn ? nburn : burn;
  145. buf += n * blocksize;
  146. buflen -= n * blocksize;
  147. nblocks -= n;
  148. }
  149. }
  150. while (buflen > 0);
  151. c->u_mode.gcm.mac_unused = unused;
  152. if (num_blks_used)
  153. wipememory (tmp_blocks, num_blks_used * blocksize);
  154. if (burn)
  155. _gcry_burn_stack (burn);
  156. }
  157. static void
  158. do_ctr_le32 (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf,
  159. size_t inbuflen)
  160. {
  161. gcry_cipher_encrypt_t enc_fn = c->spec->encrypt;
  162. unsigned char tmp[GCRY_SIV_BLOCK_LEN];
  163. unsigned int burn = 0, nburn;
  164. size_t nblocks;
  165. if (inbuflen == 0)
  166. return;
  167. /* Use a bulk method if available. */
  168. nblocks = inbuflen / GCRY_SIV_BLOCK_LEN;
  169. if (nblocks && c->bulk.ctr32le_enc)
  170. {
  171. c->bulk.ctr32le_enc (c->context.c, c->u_ctr.ctr, outbuf, inbuf, nblocks);
  172. inbuf += nblocks * GCRY_SIV_BLOCK_LEN;
  173. outbuf += nblocks * GCRY_SIV_BLOCK_LEN;
  174. inbuflen -= nblocks * GCRY_SIV_BLOCK_LEN;
  175. }
  176. do
  177. {
  178. nburn = enc_fn (c->context.c, tmp, c->u_ctr.ctr);
  179. burn = nburn > burn ? nburn : burn;
  180. buf_put_le32(c->u_ctr.ctr, buf_get_le32(c->u_ctr.ctr) + 1);
  181. if (inbuflen < GCRY_SIV_BLOCK_LEN)
  182. break;
  183. cipher_block_xor(outbuf, inbuf, tmp, GCRY_SIV_BLOCK_LEN);
  184. inbuflen -= GCRY_SIV_BLOCK_LEN;
  185. outbuf += GCRY_SIV_BLOCK_LEN;
  186. inbuf += GCRY_SIV_BLOCK_LEN;
  187. }
  188. while (inbuflen);
  189. if (inbuflen)
  190. {
  191. buf_xor(outbuf, inbuf, tmp, inbuflen);
  192. outbuf += inbuflen;
  193. inbuf += inbuflen;
  194. inbuflen -= inbuflen;
  195. }
  196. wipememory (tmp, sizeof(tmp));
  197. if (burn > 0)
  198. _gcry_burn_stack (burn + 4 * sizeof(void *));
  199. }
  200. static int
  201. gcm_siv_selftest (gcry_cipher_hd_t c)
  202. {
  203. static const byte in1[GCRY_SIV_BLOCK_LEN] =
  204. "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00";
  205. static const byte out1[GCRY_SIV_BLOCK_LEN] =
  206. "\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00";
  207. static const byte in2[GCRY_SIV_BLOCK_LEN] =
  208. "\x9c\x98\xc0\x4d\xf9\x38\x7d\xed\x82\x81\x75\xa9\x2b\xa6\x52\xd8";
  209. static const byte out2[GCRY_SIV_BLOCK_LEN] =
  210. "\x4e\x4c\x60\x26\xfc\x9c\x3e\xf6\xc1\x40\xba\xd4\x95\xd3\x29\x6c";
  211. static const byte polyval_key[GCRY_SIV_BLOCK_LEN] =
  212. "\x25\x62\x93\x47\x58\x92\x42\x76\x1d\x31\xf8\x26\xba\x4b\x75\x7b";
  213. static const byte ghash_key[GCRY_SIV_BLOCK_LEN] =
  214. "\xdc\xba\xa5\xdd\x13\x7c\x18\x8e\xbb\x21\x49\x2c\x23\xc9\xb1\x12";
  215. static const byte polyval_data[GCRY_SIV_BLOCK_LEN * 2] =
  216. "\x4f\x4f\x95\x66\x8c\x83\xdf\xb6\x40\x17\x62\xbb\x2d\x01\xa2\x62"
  217. "\xd1\xa2\x4d\xdd\x27\x21\xd0\x06\xbb\xe4\x5f\x20\xd3\xc9\xf3\x62";
  218. static const byte polyval_tag[GCRY_SIV_BLOCK_LEN] =
  219. "\xf7\xa3\xb4\x7b\x84\x61\x19\xfa\xe5\xb7\x86\x6c\xf5\xe5\xb7\x7e";
  220. byte tmp[GCRY_SIV_BLOCK_LEN];
  221. /* Test mulx_ghash */
  222. memcpy (tmp, in1, GCRY_SIV_BLOCK_LEN);
  223. mulx_ghash (tmp);
  224. if (memcmp (tmp, out1, GCRY_SIV_BLOCK_LEN) != 0)
  225. return -1;
  226. memcpy (tmp, in2, GCRY_SIV_BLOCK_LEN);
  227. mulx_ghash (tmp);
  228. if (memcmp (tmp, out2, GCRY_SIV_BLOCK_LEN) != 0)
  229. return -1;
  230. /* Test GHASH key generation */
  231. memcpy (tmp, polyval_key, GCRY_SIV_BLOCK_LEN);
  232. cipher_block_bswap (tmp, tmp, GCRY_SIV_BLOCK_LEN);
  233. mulx_ghash (tmp);
  234. if (memcmp (tmp, ghash_key, GCRY_SIV_BLOCK_LEN) != 0)
  235. return -1;
  236. /* Test POLYVAL */
  237. memset (&c->u_mode.gcm, 0, sizeof(c->u_mode.gcm));
  238. polyval_set_key (c, polyval_key);
  239. memset (&tmp, 0, sizeof(tmp));
  240. do_polyval_buf (c, tmp, polyval_data, GCRY_SIV_BLOCK_LEN * 2, 1);
  241. cipher_block_bswap (tmp, tmp, GCRY_SIV_BLOCK_LEN);
  242. if (memcmp (tmp, polyval_tag, GCRY_SIV_BLOCK_LEN) != 0)
  243. return -1;
  244. return 0;
  245. }
  246. gcry_err_code_t
  247. _gcry_cipher_gcm_siv_setkey (gcry_cipher_hd_t c, unsigned int keylen)
  248. {
  249. static int done;
  250. if (keylen != 16 && keylen != 32)
  251. return GPG_ERR_INV_KEYLEN;
  252. if (!done)
  253. {
  254. if (gcm_siv_selftest (c))
  255. return GPG_ERR_SELFTEST_FAILED;
  256. done = 1;
  257. }
  258. c->marks.iv = 0;
  259. c->marks.tag = 0;
  260. memset (&c->u_mode.gcm, 0, sizeof(c->u_mode.gcm));
  261. c->u_mode.gcm.siv_keylen = keylen;
  262. return 0;
  263. }
  264. gcry_err_code_t
  265. _gcry_cipher_gcm_siv_set_nonce (gcry_cipher_hd_t c, const byte *iv,
  266. size_t ivlen)
  267. {
  268. byte auth_key[GCRY_SIV_BLOCK_LEN];
  269. byte tmp_in[GCRY_SIV_BLOCK_LEN];
  270. byte tmp[GCRY_SIV_BLOCK_LEN];
  271. byte enc_key[32];
  272. gcry_err_code_t err;
  273. if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN)
  274. return GPG_ERR_CIPHER_ALGO;
  275. if (ivlen != GCM_SIV_NONCE_LENGTH)
  276. return GPG_ERR_INV_ARG;
  277. if (c->u_mode.gcm.siv_keylen == 0)
  278. return GPG_ERR_INV_STATE;
  279. if (c->marks.iv)
  280. {
  281. /* If nonce is already set, use cipher_reset or setkey first to reset
  282. * cipher state. */
  283. return GPG_ERR_INV_STATE;
  284. }
  285. memset (c->u_mode.gcm.aadlen, 0, sizeof(c->u_mode.gcm.aadlen));
  286. memset (c->u_mode.gcm.datalen, 0, sizeof(c->u_mode.gcm.datalen));
  287. memset (c->u_mode.gcm.u_tag.tag, 0, sizeof(c->u_mode.gcm.u_tag.tag));
  288. c->u_mode.gcm.datalen_over_limits = 0;
  289. c->u_mode.gcm.ghash_data_finalized = 0;
  290. c->u_mode.gcm.ghash_aad_finalized = 0;
  291. memset (c->u_iv.iv, 0, GCRY_SIV_BLOCK_LEN);
  292. memcpy (c->u_iv.iv, iv, ivlen);
  293. memcpy (tmp_in + 4, iv, ivlen);
  294. /* Derive message authentication key */
  295. buf_put_le32(tmp_in, 0);
  296. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  297. memcpy (auth_key + 0, tmp, 8);
  298. buf_put_le32(tmp_in, 1);
  299. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  300. memcpy (auth_key + 8, tmp, 8);
  301. polyval_set_key (c, auth_key);
  302. wipememory (auth_key, sizeof(auth_key));
  303. /* Derive message encryption key */
  304. buf_put_le32(tmp_in, 2);
  305. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  306. memcpy (enc_key + 0, tmp, 8);
  307. buf_put_le32(tmp_in, 3);
  308. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  309. memcpy (enc_key + 8, tmp, 8);
  310. if (c->u_mode.gcm.siv_keylen >= 24)
  311. {
  312. buf_put_le32(tmp_in, 4);
  313. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  314. memcpy (enc_key + 16, tmp, 8);
  315. }
  316. if (c->u_mode.gcm.siv_keylen >= 32)
  317. {
  318. buf_put_le32(tmp_in, 5);
  319. c->spec->encrypt (&c->context.c, tmp, tmp_in);
  320. memcpy (enc_key + 24, tmp, 8);
  321. }
  322. wipememory (tmp, sizeof(tmp));
  323. wipememory (tmp_in, sizeof(tmp_in));
  324. err = c->spec->setkey (&c->context.c, enc_key, c->u_mode.gcm.siv_keylen,
  325. &c->bulk);
  326. wipememory (enc_key, sizeof(enc_key));
  327. if (err)
  328. return err;
  329. c->marks.iv = 1;
  330. return 0;
  331. }
  332. gcry_err_code_t
  333. _gcry_cipher_gcm_siv_authenticate (gcry_cipher_hd_t c,
  334. const byte *aadbuf, size_t aadbuflen)
  335. {
  336. if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN)
  337. return GPG_ERR_CIPHER_ALGO;
  338. if (c->u_mode.gcm.datalen_over_limits)
  339. return GPG_ERR_INV_LENGTH;
  340. if (c->marks.tag
  341. || !c->marks.iv
  342. || c->u_mode.gcm.ghash_aad_finalized
  343. || c->u_mode.gcm.ghash_data_finalized
  344. || !c->u_mode.gcm.ghash_fn)
  345. return GPG_ERR_INV_STATE;
  346. gcm_siv_bytecounter_add (c->u_mode.gcm.aadlen, aadbuflen);
  347. if (!gcm_siv_check_len (c->u_mode.gcm.aadlen))
  348. {
  349. c->u_mode.gcm.datalen_over_limits = 1;
  350. return GPG_ERR_INV_LENGTH;
  351. }
  352. do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, aadbuf, aadbuflen, 0);
  353. return 0;
  354. }
  355. gcry_err_code_t
  356. _gcry_cipher_gcm_siv_encrypt (gcry_cipher_hd_t c,
  357. byte *outbuf, size_t outbuflen,
  358. const byte *inbuf, size_t inbuflen)
  359. {
  360. u32 bitlengths[2][2];
  361. if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN)
  362. return GPG_ERR_CIPHER_ALGO;
  363. if (outbuflen < inbuflen)
  364. return GPG_ERR_BUFFER_TOO_SHORT;
  365. if (c->u_mode.gcm.datalen_over_limits)
  366. return GPG_ERR_INV_LENGTH;
  367. if (c->marks.tag
  368. || !c->marks.iv
  369. || c->u_mode.gcm.ghash_data_finalized
  370. || !c->u_mode.gcm.ghash_fn)
  371. return GPG_ERR_INV_STATE;
  372. if (!c->u_mode.gcm.ghash_aad_finalized)
  373. {
  374. /* Start of encryption marks end of AAD stream. */
  375. do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1);
  376. c->u_mode.gcm.ghash_aad_finalized = 1;
  377. }
  378. gcm_siv_bytecounter_add (c->u_mode.gcm.datalen, inbuflen);
  379. if (!gcm_siv_check_len (c->u_mode.gcm.datalen))
  380. {
  381. c->u_mode.gcm.datalen_over_limits = 1;
  382. return GPG_ERR_INV_LENGTH;
  383. }
  384. /* Plaintext and padding to POLYVAL. */
  385. do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, inbuf, inbuflen, 1);
  386. c->u_mode.gcm.ghash_data_finalized = 1;
  387. /* aad length */
  388. bitlengths[0][0] = le_bswap32(c->u_mode.gcm.aadlen[0] << 3);
  389. bitlengths[0][1] = le_bswap32((c->u_mode.gcm.aadlen[0] >> 29) |
  390. (c->u_mode.gcm.aadlen[1] << 3));
  391. /* data length */
  392. bitlengths[1][0] = le_bswap32(c->u_mode.gcm.datalen[0] << 3);
  393. bitlengths[1][1] = le_bswap32((c->u_mode.gcm.datalen[0] >> 29) |
  394. (c->u_mode.gcm.datalen[1] << 3));
  395. /* Length block to POLYVAL. */
  396. do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, (byte *)bitlengths,
  397. GCRY_SIV_BLOCK_LEN, 1);
  398. wipememory (bitlengths, sizeof(bitlengths));
  399. /* Prepare tag and counter. */
  400. cipher_block_bswap (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.u_tag.tag,
  401. GCRY_SIV_BLOCK_LEN);
  402. cipher_block_xor (c->u_mode.gcm.tagiv, c->u_iv.iv, c->u_mode.gcm.u_tag.tag,
  403. GCRY_SIV_BLOCK_LEN);
  404. c->u_mode.gcm.tagiv[GCRY_SIV_BLOCK_LEN - 1] &= 0x7f;
  405. c->spec->encrypt (&c->context.c, c->u_mode.gcm.tagiv, c->u_mode.gcm.tagiv);
  406. c->marks.tag = 1;
  407. memcpy (c->u_ctr.ctr, c->u_mode.gcm.tagiv, GCRY_SIV_BLOCK_LEN);
  408. c->u_ctr.ctr[GCRY_SIV_BLOCK_LEN - 1] |= 0x80;
  409. /* Encrypt data */
  410. do_ctr_le32 (c, outbuf, inbuf, inbuflen);
  411. return 0;
  412. }
  413. gcry_err_code_t
  414. _gcry_cipher_gcm_siv_set_decryption_tag (gcry_cipher_hd_t c,
  415. const byte *tag, size_t taglen)
  416. {
  417. if (taglen != GCRY_SIV_BLOCK_LEN)
  418. return GPG_ERR_INV_ARG;
  419. if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN)
  420. return GPG_ERR_CIPHER_ALGO;
  421. if (c->marks.tag)
  422. return GPG_ERR_INV_STATE;
  423. memcpy (c->u_mode.gcm.tagiv, tag, GCRY_SIV_BLOCK_LEN);
  424. c->marks.tag = 1;
  425. return 0;
  426. }
  427. gcry_err_code_t
  428. _gcry_cipher_gcm_siv_decrypt (gcry_cipher_hd_t c,
  429. byte *outbuf, size_t outbuflen,
  430. const byte *inbuf, size_t inbuflen)
  431. {
  432. byte expected_tag[GCRY_SIV_BLOCK_LEN];
  433. u32 bitlengths[2][2];
  434. gcry_err_code_t rc = 0;
  435. if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN)
  436. return GPG_ERR_CIPHER_ALGO;
  437. if (outbuflen < inbuflen)
  438. return GPG_ERR_BUFFER_TOO_SHORT;
  439. if (c->u_mode.gcm.datalen_over_limits)
  440. return GPG_ERR_INV_LENGTH;
  441. if (!c->marks.tag
  442. || !c->marks.iv
  443. || c->u_mode.gcm.ghash_data_finalized
  444. || !c->u_mode.gcm.ghash_fn)
  445. return GPG_ERR_INV_STATE;
  446. if (!c->u_mode.gcm.ghash_aad_finalized)
  447. {
  448. /* Start of encryption marks end of AAD stream. */
  449. do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1);
  450. c->u_mode.gcm.ghash_aad_finalized = 1;
  451. }
  452. gcm_siv_bytecounter_add (c->u_mode.gcm.datalen, inbuflen);
  453. if (!gcm_siv_check_len (c->u_mode.gcm.datalen))
  454. {
  455. c->u_mode.gcm.datalen_over_limits = 1;
  456. return GPG_ERR_INV_LENGTH;
  457. }
  458. /* Prepare counter. */
  459. memcpy (c->u_ctr.ctr, c->u_mode.gcm.tagiv, GCRY_SIV_BLOCK_LEN);
  460. c->u_ctr.ctr[GCRY_SIV_BLOCK_LEN - 1] |= 0x80;
  461. /* Decrypt data. */
  462. do_ctr_le32 (c, outbuf, inbuf, inbuflen);
  463. /* Plaintext and padding to POLYVAL. */
  464. do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, outbuf, inbuflen, 1);
  465. c->u_mode.gcm.ghash_data_finalized = 1;
  466. /* aad length */
  467. bitlengths[0][0] = le_bswap32(c->u_mode.gcm.aadlen[0] << 3);
  468. bitlengths[0][1] = le_bswap32((c->u_mode.gcm.aadlen[0] >> 29) |
  469. (c->u_mode.gcm.aadlen[1] << 3));
  470. /* data length */
  471. bitlengths[1][0] = le_bswap32(c->u_mode.gcm.datalen[0] << 3);
  472. bitlengths[1][1] = le_bswap32((c->u_mode.gcm.datalen[0] >> 29) |
  473. (c->u_mode.gcm.datalen[1] << 3));
  474. /* Length block to POLYVAL. */
  475. do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, (byte *)bitlengths,
  476. GCRY_SIV_BLOCK_LEN, 1);
  477. wipememory (bitlengths, sizeof(bitlengths));
  478. /* Prepare tag. */
  479. cipher_block_bswap (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.u_tag.tag,
  480. GCRY_SIV_BLOCK_LEN);
  481. cipher_block_xor (expected_tag, c->u_iv.iv, c->u_mode.gcm.u_tag.tag,
  482. GCRY_SIV_BLOCK_LEN);
  483. expected_tag[GCRY_SIV_BLOCK_LEN - 1] &= 0x7f;
  484. c->spec->encrypt (&c->context.c, expected_tag, expected_tag);
  485. if (!buf_eq_const(c->u_mode.gcm.tagiv, expected_tag, GCRY_SIV_BLOCK_LEN))
  486. {
  487. wipememory (outbuf, inbuflen);
  488. rc = GPG_ERR_CHECKSUM;
  489. }
  490. wipememory (expected_tag, sizeof(expected_tag));
  491. return rc;
  492. }
  493. static gcry_err_code_t
  494. _gcry_cipher_gcm_siv_tag (gcry_cipher_hd_t c,
  495. byte * outbuf, size_t outbuflen, int check)
  496. {
  497. gcry_err_code_t err;
  498. if (!c->marks.tag)
  499. {
  500. if (!c->u_mode.gcm.ghash_fn)
  501. return GPG_ERR_INV_STATE;
  502. if (!c->marks.tag)
  503. {
  504. /* Finalize GCM-SIV with zero-length plaintext. */
  505. err = _gcry_cipher_gcm_siv_encrypt (c, NULL, 0, NULL, 0);
  506. if (err != 0)
  507. return err;
  508. }
  509. }
  510. if (c->u_mode.gcm.datalen_over_limits)
  511. return GPG_ERR_INV_LENGTH;
  512. if (!c->u_mode.gcm.ghash_data_finalized)
  513. return GPG_ERR_INV_STATE;
  514. if (!c->marks.tag)
  515. return GPG_ERR_INV_STATE;
  516. if (!check)
  517. {
  518. if (outbuflen > GCRY_SIV_BLOCK_LEN)
  519. outbuflen = GCRY_SIV_BLOCK_LEN;
  520. /* NB: We already checked that OUTBUF is large enough to hold
  521. * the result or has valid truncated length. */
  522. memcpy (outbuf, c->u_mode.gcm.tagiv, outbuflen);
  523. }
  524. else
  525. {
  526. /* OUTBUFLEN gives the length of the user supplied tag in OUTBUF
  527. * and thus we need to compare its length first. */
  528. if (outbuflen != GCRY_SIV_BLOCK_LEN
  529. || !buf_eq_const (outbuf, c->u_mode.gcm.tagiv, outbuflen))
  530. return GPG_ERR_CHECKSUM;
  531. }
  532. return 0;
  533. }
  534. gcry_err_code_t
  535. _gcry_cipher_gcm_siv_get_tag (gcry_cipher_hd_t c, unsigned char *outtag,
  536. size_t taglen)
  537. {
  538. return _gcry_cipher_gcm_siv_tag (c, outtag, taglen, 0);
  539. }
  540. gcry_err_code_t
  541. _gcry_cipher_gcm_siv_check_tag (gcry_cipher_hd_t c,
  542. const unsigned char *intag,
  543. size_t taglen)
  544. {
  545. return _gcry_cipher_gcm_siv_tag (c, (unsigned char *)intag, taglen, 1);
  546. }