ccm.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <linux/err.h>
  16. #include <linux/init.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <linux/slab.h>
  20. #include "internal.h"
  21. struct ccm_instance_ctx {
  22. struct crypto_skcipher_spawn ctr;
  23. struct crypto_spawn cipher;
  24. };
  25. struct crypto_ccm_ctx {
  26. struct crypto_cipher *cipher;
  27. struct crypto_skcipher *ctr;
  28. };
  29. struct crypto_rfc4309_ctx {
  30. struct crypto_aead *child;
  31. u8 nonce[3];
  32. };
  33. struct crypto_rfc4309_req_ctx {
  34. struct scatterlist src[3];
  35. struct scatterlist dst[3];
  36. struct aead_request subreq;
  37. };
  38. struct crypto_ccm_req_priv_ctx {
  39. u8 odata[16];
  40. u8 idata[16];
  41. u8 auth_tag[16];
  42. u32 ilen;
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct skcipher_request skreq;
  47. };
  48. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  49. struct aead_request *req)
  50. {
  51. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  52. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  53. }
  54. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  55. {
  56. __be32 data;
  57. memset(block, 0, csize);
  58. block += csize;
  59. if (csize >= 4)
  60. csize = 4;
  61. else if (msglen > (1 << (8 * csize)))
  62. return -EOVERFLOW;
  63. data = cpu_to_be32(msglen);
  64. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  65. return 0;
  66. }
  67. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  68. unsigned int keylen)
  69. {
  70. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  71. struct crypto_skcipher *ctr = ctx->ctr;
  72. struct crypto_cipher *tfm = ctx->cipher;
  73. int err = 0;
  74. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  75. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  76. CRYPTO_TFM_REQ_MASK);
  77. err = crypto_skcipher_setkey(ctr, key, keylen);
  78. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  79. CRYPTO_TFM_RES_MASK);
  80. if (err)
  81. goto out;
  82. crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
  83. crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
  84. CRYPTO_TFM_REQ_MASK);
  85. err = crypto_cipher_setkey(tfm, key, keylen);
  86. crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
  87. CRYPTO_TFM_RES_MASK);
  88. out:
  89. return err;
  90. }
  91. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  92. unsigned int authsize)
  93. {
  94. switch (authsize) {
  95. case 4:
  96. case 6:
  97. case 8:
  98. case 10:
  99. case 12:
  100. case 14:
  101. case 16:
  102. break;
  103. default:
  104. return -EINVAL;
  105. }
  106. return 0;
  107. }
  108. static int format_input(u8 *info, struct aead_request *req,
  109. unsigned int cryptlen)
  110. {
  111. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  112. unsigned int lp = req->iv[0];
  113. unsigned int l = lp + 1;
  114. unsigned int m;
  115. m = crypto_aead_authsize(aead);
  116. memcpy(info, req->iv, 16);
  117. /* format control info per RFC 3610 and
  118. * NIST Special Publication 800-38C
  119. */
  120. *info |= (8 * ((m - 2) / 2));
  121. if (req->assoclen)
  122. *info |= 64;
  123. return set_msg_len(info + 16 - l, cryptlen, l);
  124. }
  125. static int format_adata(u8 *adata, unsigned int a)
  126. {
  127. int len = 0;
  128. /* add control info for associated data
  129. * RFC 3610 and NIST Special Publication 800-38C
  130. */
  131. if (a < 65280) {
  132. *(__be16 *)adata = cpu_to_be16(a);
  133. len = 2;
  134. } else {
  135. *(__be16 *)adata = cpu_to_be16(0xfffe);
  136. *(__be32 *)&adata[2] = cpu_to_be32(a);
  137. len = 6;
  138. }
  139. return len;
  140. }
  141. static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
  142. struct crypto_ccm_req_priv_ctx *pctx)
  143. {
  144. unsigned int bs = 16;
  145. u8 *odata = pctx->odata;
  146. u8 *idata = pctx->idata;
  147. int datalen, getlen;
  148. datalen = n;
  149. /* first time in here, block may be partially filled. */
  150. getlen = bs - pctx->ilen;
  151. if (datalen >= getlen) {
  152. memcpy(idata + pctx->ilen, data, getlen);
  153. crypto_xor(odata, idata, bs);
  154. crypto_cipher_encrypt_one(tfm, odata, odata);
  155. datalen -= getlen;
  156. data += getlen;
  157. pctx->ilen = 0;
  158. }
  159. /* now encrypt rest of data */
  160. while (datalen >= bs) {
  161. crypto_xor(odata, data, bs);
  162. crypto_cipher_encrypt_one(tfm, odata, odata);
  163. datalen -= bs;
  164. data += bs;
  165. }
  166. /* check and see if there's leftover data that wasn't
  167. * enough to fill a block.
  168. */
  169. if (datalen) {
  170. memcpy(idata + pctx->ilen, data, datalen);
  171. pctx->ilen += datalen;
  172. }
  173. }
  174. static void get_data_to_compute(struct crypto_cipher *tfm,
  175. struct crypto_ccm_req_priv_ctx *pctx,
  176. struct scatterlist *sg, unsigned int len)
  177. {
  178. struct scatter_walk walk;
  179. u8 *data_src;
  180. int n;
  181. scatterwalk_start(&walk, sg);
  182. while (len) {
  183. n = scatterwalk_clamp(&walk, len);
  184. if (!n) {
  185. scatterwalk_start(&walk, sg_next(walk.sg));
  186. n = scatterwalk_clamp(&walk, len);
  187. }
  188. data_src = scatterwalk_map(&walk);
  189. compute_mac(tfm, data_src, n, pctx);
  190. len -= n;
  191. scatterwalk_unmap(data_src);
  192. scatterwalk_advance(&walk, n);
  193. scatterwalk_done(&walk, 0, len);
  194. if (len)
  195. crypto_yield(pctx->flags);
  196. }
  197. /* any leftover needs padding and then encrypted */
  198. if (pctx->ilen) {
  199. int padlen;
  200. u8 *odata = pctx->odata;
  201. u8 *idata = pctx->idata;
  202. padlen = 16 - pctx->ilen;
  203. memset(idata + pctx->ilen, 0, padlen);
  204. crypto_xor(odata, idata, 16);
  205. crypto_cipher_encrypt_one(tfm, odata, odata);
  206. pctx->ilen = 0;
  207. }
  208. }
  209. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  210. unsigned int cryptlen)
  211. {
  212. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  213. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  214. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  215. struct crypto_cipher *cipher = ctx->cipher;
  216. unsigned int assoclen = req->assoclen;
  217. u8 *odata = pctx->odata;
  218. u8 *idata = pctx->idata;
  219. int err;
  220. /* format control data for input */
  221. err = format_input(odata, req, cryptlen);
  222. if (err)
  223. goto out;
  224. /* encrypt first block to use as start in computing mac */
  225. crypto_cipher_encrypt_one(cipher, odata, odata);
  226. /* format associated data and compute into mac */
  227. if (assoclen) {
  228. pctx->ilen = format_adata(idata, assoclen);
  229. get_data_to_compute(cipher, pctx, req->src, req->assoclen);
  230. } else {
  231. pctx->ilen = 0;
  232. }
  233. /* compute plaintext into mac */
  234. if (cryptlen)
  235. get_data_to_compute(cipher, pctx, plain, cryptlen);
  236. out:
  237. return err;
  238. }
  239. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  240. {
  241. struct aead_request *req = areq->data;
  242. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  243. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  244. u8 *odata = pctx->odata;
  245. if (!err)
  246. scatterwalk_map_and_copy(odata, req->dst,
  247. req->assoclen + req->cryptlen,
  248. crypto_aead_authsize(aead), 1);
  249. aead_request_complete(req, err);
  250. }
  251. static inline int crypto_ccm_check_iv(const u8 *iv)
  252. {
  253. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  254. if (1 > iv[0] || iv[0] > 7)
  255. return -EINVAL;
  256. return 0;
  257. }
  258. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  259. {
  260. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  261. struct scatterlist *sg;
  262. u8 *iv = req->iv;
  263. int err;
  264. err = crypto_ccm_check_iv(iv);
  265. if (err)
  266. return err;
  267. pctx->flags = aead_request_flags(req);
  268. /* Note: rfc 3610 and NIST 800-38C require counter of
  269. * zero to encrypt auth tag.
  270. */
  271. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  272. sg_init_table(pctx->src, 3);
  273. sg_set_buf(pctx->src, tag, 16);
  274. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  275. if (sg != pctx->src + 1)
  276. sg_chain(pctx->src, 2, sg);
  277. if (req->src != req->dst) {
  278. sg_init_table(pctx->dst, 3);
  279. sg_set_buf(pctx->dst, tag, 16);
  280. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  281. if (sg != pctx->dst + 1)
  282. sg_chain(pctx->dst, 2, sg);
  283. }
  284. return 0;
  285. }
  286. static int crypto_ccm_encrypt(struct aead_request *req)
  287. {
  288. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  289. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  290. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  291. struct skcipher_request *skreq = &pctx->skreq;
  292. struct scatterlist *dst;
  293. unsigned int cryptlen = req->cryptlen;
  294. u8 *odata = pctx->odata;
  295. u8 *iv = req->iv;
  296. int err;
  297. err = crypto_ccm_init_crypt(req, odata);
  298. if (err)
  299. return err;
  300. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  301. if (err)
  302. return err;
  303. dst = pctx->src;
  304. if (req->src != req->dst)
  305. dst = pctx->dst;
  306. skcipher_request_set_tfm(skreq, ctx->ctr);
  307. skcipher_request_set_callback(skreq, pctx->flags,
  308. crypto_ccm_encrypt_done, req);
  309. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  310. err = crypto_skcipher_encrypt(skreq);
  311. if (err)
  312. return err;
  313. /* copy authtag to end of dst */
  314. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  315. crypto_aead_authsize(aead), 1);
  316. return err;
  317. }
  318. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  319. int err)
  320. {
  321. struct aead_request *req = areq->data;
  322. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  323. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  324. unsigned int authsize = crypto_aead_authsize(aead);
  325. unsigned int cryptlen = req->cryptlen - authsize;
  326. struct scatterlist *dst;
  327. pctx->flags = 0;
  328. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  329. if (!err) {
  330. err = crypto_ccm_auth(req, dst, cryptlen);
  331. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  332. err = -EBADMSG;
  333. }
  334. aead_request_complete(req, err);
  335. }
  336. static int crypto_ccm_decrypt(struct aead_request *req)
  337. {
  338. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  339. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  340. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  341. struct skcipher_request *skreq = &pctx->skreq;
  342. struct scatterlist *dst;
  343. unsigned int authsize = crypto_aead_authsize(aead);
  344. unsigned int cryptlen = req->cryptlen;
  345. u8 *authtag = pctx->auth_tag;
  346. u8 *odata = pctx->odata;
  347. u8 *iv = pctx->idata;
  348. int err;
  349. cryptlen -= authsize;
  350. err = crypto_ccm_init_crypt(req, authtag);
  351. if (err)
  352. return err;
  353. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  354. authsize, 0);
  355. dst = pctx->src;
  356. if (req->src != req->dst)
  357. dst = pctx->dst;
  358. memcpy(iv, req->iv, 16);
  359. skcipher_request_set_tfm(skreq, ctx->ctr);
  360. skcipher_request_set_callback(skreq, pctx->flags,
  361. crypto_ccm_decrypt_done, req);
  362. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  363. err = crypto_skcipher_decrypt(skreq);
  364. if (err)
  365. return err;
  366. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  367. if (err)
  368. return err;
  369. /* verify */
  370. if (crypto_memneq(authtag, odata, authsize))
  371. return -EBADMSG;
  372. return err;
  373. }
  374. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  375. {
  376. struct aead_instance *inst = aead_alg_instance(tfm);
  377. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  378. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  379. struct crypto_cipher *cipher;
  380. struct crypto_skcipher *ctr;
  381. unsigned long align;
  382. int err;
  383. cipher = crypto_spawn_cipher(&ictx->cipher);
  384. if (IS_ERR(cipher))
  385. return PTR_ERR(cipher);
  386. ctr = crypto_spawn_skcipher2(&ictx->ctr);
  387. err = PTR_ERR(ctr);
  388. if (IS_ERR(ctr))
  389. goto err_free_cipher;
  390. ctx->cipher = cipher;
  391. ctx->ctr = ctr;
  392. align = crypto_aead_alignmask(tfm);
  393. align &= ~(crypto_tfm_ctx_alignment() - 1);
  394. crypto_aead_set_reqsize(
  395. tfm,
  396. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  397. crypto_skcipher_reqsize(ctr));
  398. return 0;
  399. err_free_cipher:
  400. crypto_free_cipher(cipher);
  401. return err;
  402. }
  403. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  404. {
  405. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  406. crypto_free_cipher(ctx->cipher);
  407. crypto_free_skcipher(ctx->ctr);
  408. }
  409. static void crypto_ccm_free(struct aead_instance *inst)
  410. {
  411. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  412. crypto_drop_spawn(&ctx->cipher);
  413. crypto_drop_skcipher(&ctx->ctr);
  414. kfree(inst);
  415. }
  416. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  417. struct rtattr **tb,
  418. const char *full_name,
  419. const char *ctr_name,
  420. const char *cipher_name)
  421. {
  422. struct crypto_attr_type *algt;
  423. struct aead_instance *inst;
  424. struct skcipher_alg *ctr;
  425. struct crypto_alg *cipher;
  426. struct ccm_instance_ctx *ictx;
  427. int err;
  428. algt = crypto_get_attr_type(tb);
  429. if (IS_ERR(algt))
  430. return PTR_ERR(algt);
  431. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  432. return -EINVAL;
  433. cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
  434. CRYPTO_ALG_TYPE_MASK);
  435. if (IS_ERR(cipher))
  436. return PTR_ERR(cipher);
  437. err = -EINVAL;
  438. if (cipher->cra_blocksize != 16)
  439. goto out_put_cipher;
  440. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  441. err = -ENOMEM;
  442. if (!inst)
  443. goto out_put_cipher;
  444. ictx = aead_instance_ctx(inst);
  445. err = crypto_init_spawn(&ictx->cipher, cipher,
  446. aead_crypto_instance(inst),
  447. CRYPTO_ALG_TYPE_MASK);
  448. if (err)
  449. goto err_free_inst;
  450. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  451. err = crypto_grab_skcipher2(&ictx->ctr, ctr_name, 0,
  452. crypto_requires_sync(algt->type,
  453. algt->mask));
  454. if (err)
  455. goto err_drop_cipher;
  456. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  457. /* Not a stream cipher? */
  458. err = -EINVAL;
  459. if (ctr->base.cra_blocksize != 1)
  460. goto err_drop_ctr;
  461. /* We want the real thing! */
  462. if (crypto_skcipher_alg_ivsize(ctr) != 16)
  463. goto err_drop_ctr;
  464. err = -ENAMETOOLONG;
  465. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  466. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  467. cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  468. goto err_drop_ctr;
  469. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  470. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  471. inst->alg.base.cra_priority = (cipher->cra_priority +
  472. ctr->base.cra_priority) / 2;
  473. inst->alg.base.cra_blocksize = 1;
  474. inst->alg.base.cra_alignmask = cipher->cra_alignmask |
  475. ctr->base.cra_alignmask |
  476. (__alignof__(u32) - 1);
  477. inst->alg.ivsize = 16;
  478. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  479. inst->alg.maxauthsize = 16;
  480. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  481. inst->alg.init = crypto_ccm_init_tfm;
  482. inst->alg.exit = crypto_ccm_exit_tfm;
  483. inst->alg.setkey = crypto_ccm_setkey;
  484. inst->alg.setauthsize = crypto_ccm_setauthsize;
  485. inst->alg.encrypt = crypto_ccm_encrypt;
  486. inst->alg.decrypt = crypto_ccm_decrypt;
  487. inst->free = crypto_ccm_free;
  488. err = aead_register_instance(tmpl, inst);
  489. if (err)
  490. goto err_drop_ctr;
  491. out_put_cipher:
  492. crypto_mod_put(cipher);
  493. return err;
  494. err_drop_ctr:
  495. crypto_drop_skcipher(&ictx->ctr);
  496. err_drop_cipher:
  497. crypto_drop_spawn(&ictx->cipher);
  498. err_free_inst:
  499. kfree(inst);
  500. goto out_put_cipher;
  501. }
  502. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  503. {
  504. const char *cipher_name;
  505. char ctr_name[CRYPTO_MAX_ALG_NAME];
  506. char full_name[CRYPTO_MAX_ALG_NAME];
  507. cipher_name = crypto_attr_alg_name(tb[1]);
  508. if (IS_ERR(cipher_name))
  509. return PTR_ERR(cipher_name);
  510. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  511. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  512. return -ENAMETOOLONG;
  513. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  514. CRYPTO_MAX_ALG_NAME)
  515. return -ENAMETOOLONG;
  516. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  517. cipher_name);
  518. }
  519. static struct crypto_template crypto_ccm_tmpl = {
  520. .name = "ccm",
  521. .create = crypto_ccm_create,
  522. .module = THIS_MODULE,
  523. };
  524. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  525. struct rtattr **tb)
  526. {
  527. const char *ctr_name;
  528. const char *cipher_name;
  529. char full_name[CRYPTO_MAX_ALG_NAME];
  530. ctr_name = crypto_attr_alg_name(tb[1]);
  531. if (IS_ERR(ctr_name))
  532. return PTR_ERR(ctr_name);
  533. cipher_name = crypto_attr_alg_name(tb[2]);
  534. if (IS_ERR(cipher_name))
  535. return PTR_ERR(cipher_name);
  536. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  537. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  538. return -ENAMETOOLONG;
  539. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  540. cipher_name);
  541. }
  542. static struct crypto_template crypto_ccm_base_tmpl = {
  543. .name = "ccm_base",
  544. .create = crypto_ccm_base_create,
  545. .module = THIS_MODULE,
  546. };
  547. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  548. unsigned int keylen)
  549. {
  550. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  551. struct crypto_aead *child = ctx->child;
  552. int err;
  553. if (keylen < 3)
  554. return -EINVAL;
  555. keylen -= 3;
  556. memcpy(ctx->nonce, key + keylen, 3);
  557. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  558. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  559. CRYPTO_TFM_REQ_MASK);
  560. err = crypto_aead_setkey(child, key, keylen);
  561. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  562. CRYPTO_TFM_RES_MASK);
  563. return err;
  564. }
  565. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  566. unsigned int authsize)
  567. {
  568. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  569. switch (authsize) {
  570. case 8:
  571. case 12:
  572. case 16:
  573. break;
  574. default:
  575. return -EINVAL;
  576. }
  577. return crypto_aead_setauthsize(ctx->child, authsize);
  578. }
  579. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  580. {
  581. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  582. struct aead_request *subreq = &rctx->subreq;
  583. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  584. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  585. struct crypto_aead *child = ctx->child;
  586. struct scatterlist *sg;
  587. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  588. crypto_aead_alignmask(child) + 1);
  589. /* L' */
  590. iv[0] = 3;
  591. memcpy(iv + 1, ctx->nonce, 3);
  592. memcpy(iv + 4, req->iv, 8);
  593. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  594. sg_init_table(rctx->src, 3);
  595. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  596. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  597. if (sg != rctx->src + 1)
  598. sg_chain(rctx->src, 2, sg);
  599. if (req->src != req->dst) {
  600. sg_init_table(rctx->dst, 3);
  601. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  602. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  603. if (sg != rctx->dst + 1)
  604. sg_chain(rctx->dst, 2, sg);
  605. }
  606. aead_request_set_tfm(subreq, child);
  607. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  608. req->base.data);
  609. aead_request_set_crypt(subreq, rctx->src,
  610. req->src == req->dst ? rctx->src : rctx->dst,
  611. req->cryptlen, iv);
  612. aead_request_set_ad(subreq, req->assoclen - 8);
  613. return subreq;
  614. }
  615. static int crypto_rfc4309_encrypt(struct aead_request *req)
  616. {
  617. if (req->assoclen != 16 && req->assoclen != 20)
  618. return -EINVAL;
  619. req = crypto_rfc4309_crypt(req);
  620. return crypto_aead_encrypt(req);
  621. }
  622. static int crypto_rfc4309_decrypt(struct aead_request *req)
  623. {
  624. if (req->assoclen != 16 && req->assoclen != 20)
  625. return -EINVAL;
  626. req = crypto_rfc4309_crypt(req);
  627. return crypto_aead_decrypt(req);
  628. }
  629. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  630. {
  631. struct aead_instance *inst = aead_alg_instance(tfm);
  632. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  633. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  634. struct crypto_aead *aead;
  635. unsigned long align;
  636. aead = crypto_spawn_aead(spawn);
  637. if (IS_ERR(aead))
  638. return PTR_ERR(aead);
  639. ctx->child = aead;
  640. align = crypto_aead_alignmask(aead);
  641. align &= ~(crypto_tfm_ctx_alignment() - 1);
  642. crypto_aead_set_reqsize(
  643. tfm,
  644. sizeof(struct crypto_rfc4309_req_ctx) +
  645. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  646. align + 32);
  647. return 0;
  648. }
  649. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  650. {
  651. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  652. crypto_free_aead(ctx->child);
  653. }
  654. static void crypto_rfc4309_free(struct aead_instance *inst)
  655. {
  656. crypto_drop_aead(aead_instance_ctx(inst));
  657. kfree(inst);
  658. }
  659. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  660. struct rtattr **tb)
  661. {
  662. struct crypto_attr_type *algt;
  663. struct aead_instance *inst;
  664. struct crypto_aead_spawn *spawn;
  665. struct aead_alg *alg;
  666. const char *ccm_name;
  667. int err;
  668. algt = crypto_get_attr_type(tb);
  669. if (IS_ERR(algt))
  670. return PTR_ERR(algt);
  671. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  672. return -EINVAL;
  673. ccm_name = crypto_attr_alg_name(tb[1]);
  674. if (IS_ERR(ccm_name))
  675. return PTR_ERR(ccm_name);
  676. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  677. if (!inst)
  678. return -ENOMEM;
  679. spawn = aead_instance_ctx(inst);
  680. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  681. err = crypto_grab_aead(spawn, ccm_name, 0,
  682. crypto_requires_sync(algt->type, algt->mask));
  683. if (err)
  684. goto out_free_inst;
  685. alg = crypto_spawn_aead_alg(spawn);
  686. err = -EINVAL;
  687. /* We only support 16-byte blocks. */
  688. if (crypto_aead_alg_ivsize(alg) != 16)
  689. goto out_drop_alg;
  690. /* Not a stream cipher? */
  691. if (alg->base.cra_blocksize != 1)
  692. goto out_drop_alg;
  693. err = -ENAMETOOLONG;
  694. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  695. "rfc4309(%s)", alg->base.cra_name) >=
  696. CRYPTO_MAX_ALG_NAME ||
  697. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  698. "rfc4309(%s)", alg->base.cra_driver_name) >=
  699. CRYPTO_MAX_ALG_NAME)
  700. goto out_drop_alg;
  701. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  702. inst->alg.base.cra_priority = alg->base.cra_priority;
  703. inst->alg.base.cra_blocksize = 1;
  704. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  705. inst->alg.ivsize = 8;
  706. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  707. inst->alg.maxauthsize = 16;
  708. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  709. inst->alg.init = crypto_rfc4309_init_tfm;
  710. inst->alg.exit = crypto_rfc4309_exit_tfm;
  711. inst->alg.setkey = crypto_rfc4309_setkey;
  712. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  713. inst->alg.encrypt = crypto_rfc4309_encrypt;
  714. inst->alg.decrypt = crypto_rfc4309_decrypt;
  715. inst->free = crypto_rfc4309_free;
  716. err = aead_register_instance(tmpl, inst);
  717. if (err)
  718. goto out_drop_alg;
  719. out:
  720. return err;
  721. out_drop_alg:
  722. crypto_drop_aead(spawn);
  723. out_free_inst:
  724. kfree(inst);
  725. goto out;
  726. }
  727. static struct crypto_template crypto_rfc4309_tmpl = {
  728. .name = "rfc4309",
  729. .create = crypto_rfc4309_create,
  730. .module = THIS_MODULE,
  731. };
  732. static int __init crypto_ccm_module_init(void)
  733. {
  734. int err;
  735. err = crypto_register_template(&crypto_ccm_base_tmpl);
  736. if (err)
  737. goto out;
  738. err = crypto_register_template(&crypto_ccm_tmpl);
  739. if (err)
  740. goto out_undo_base;
  741. err = crypto_register_template(&crypto_rfc4309_tmpl);
  742. if (err)
  743. goto out_undo_ccm;
  744. out:
  745. return err;
  746. out_undo_ccm:
  747. crypto_unregister_template(&crypto_ccm_tmpl);
  748. out_undo_base:
  749. crypto_unregister_template(&crypto_ccm_base_tmpl);
  750. goto out;
  751. }
  752. static void __exit crypto_ccm_module_exit(void)
  753. {
  754. crypto_unregister_template(&crypto_rfc4309_tmpl);
  755. crypto_unregister_template(&crypto_ccm_tmpl);
  756. crypto_unregister_template(&crypto_ccm_base_tmpl);
  757. }
  758. module_init(crypto_ccm_module_init);
  759. module_exit(crypto_ccm_module_exit);
  760. MODULE_LICENSE("GPL");
  761. MODULE_DESCRIPTION("Counter with CBC MAC");
  762. MODULE_ALIAS_CRYPTO("ccm_base");
  763. MODULE_ALIAS_CRYPTO("rfc4309");
  764. MODULE_ALIAS_CRYPTO("ccm");