ccm.c 25 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/slab.h>
  21. #include "internal.h"
  22. struct ccm_instance_ctx {
  23. struct crypto_skcipher_spawn ctr;
  24. struct crypto_ahash_spawn mac;
  25. };
  26. struct crypto_ccm_ctx {
  27. struct crypto_ahash *mac;
  28. struct crypto_skcipher *ctr;
  29. };
  30. struct crypto_rfc4309_ctx {
  31. struct crypto_aead *child;
  32. u8 nonce[3];
  33. };
  34. struct crypto_rfc4309_req_ctx {
  35. struct scatterlist src[3];
  36. struct scatterlist dst[3];
  37. struct aead_request subreq;
  38. };
  39. struct crypto_ccm_req_priv_ctx {
  40. u8 odata[16];
  41. u8 idata[16];
  42. u8 auth_tag[16];
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct skcipher_request skreq;
  47. };
  48. struct cbcmac_tfm_ctx {
  49. struct crypto_cipher *child;
  50. };
  51. struct cbcmac_desc_ctx {
  52. unsigned int len;
  53. };
  54. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  55. struct aead_request *req)
  56. {
  57. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  58. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  59. }
  60. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  61. {
  62. __be32 data;
  63. memset(block, 0, csize);
  64. block += csize;
  65. if (csize >= 4)
  66. csize = 4;
  67. else if (msglen > (1 << (8 * csize)))
  68. return -EOVERFLOW;
  69. data = cpu_to_be32(msglen);
  70. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  71. return 0;
  72. }
  73. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  74. unsigned int keylen)
  75. {
  76. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  77. struct crypto_skcipher *ctr = ctx->ctr;
  78. struct crypto_ahash *mac = ctx->mac;
  79. int err = 0;
  80. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  81. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  82. CRYPTO_TFM_REQ_MASK);
  83. err = crypto_skcipher_setkey(ctr, key, keylen);
  84. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  85. CRYPTO_TFM_RES_MASK);
  86. if (err)
  87. goto out;
  88. crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
  89. crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
  90. CRYPTO_TFM_REQ_MASK);
  91. err = crypto_ahash_setkey(mac, key, keylen);
  92. crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
  93. CRYPTO_TFM_RES_MASK);
  94. out:
  95. return err;
  96. }
  97. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  98. unsigned int authsize)
  99. {
  100. switch (authsize) {
  101. case 4:
  102. case 6:
  103. case 8:
  104. case 10:
  105. case 12:
  106. case 14:
  107. case 16:
  108. break;
  109. default:
  110. return -EINVAL;
  111. }
  112. return 0;
  113. }
  114. static int format_input(u8 *info, struct aead_request *req,
  115. unsigned int cryptlen)
  116. {
  117. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  118. unsigned int lp = req->iv[0];
  119. unsigned int l = lp + 1;
  120. unsigned int m;
  121. m = crypto_aead_authsize(aead);
  122. memcpy(info, req->iv, 16);
  123. /* format control info per RFC 3610 and
  124. * NIST Special Publication 800-38C
  125. */
  126. *info |= (8 * ((m - 2) / 2));
  127. if (req->assoclen)
  128. *info |= 64;
  129. return set_msg_len(info + 16 - l, cryptlen, l);
  130. }
  131. static int format_adata(u8 *adata, unsigned int a)
  132. {
  133. int len = 0;
  134. /* add control info for associated data
  135. * RFC 3610 and NIST Special Publication 800-38C
  136. */
  137. if (a < 65280) {
  138. *(__be16 *)adata = cpu_to_be16(a);
  139. len = 2;
  140. } else {
  141. *(__be16 *)adata = cpu_to_be16(0xfffe);
  142. *(__be32 *)&adata[2] = cpu_to_be32(a);
  143. len = 6;
  144. }
  145. return len;
  146. }
  147. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  148. unsigned int cryptlen)
  149. {
  150. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  151. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  152. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  153. AHASH_REQUEST_ON_STACK(ahreq, ctx->mac);
  154. unsigned int assoclen = req->assoclen;
  155. struct scatterlist sg[3];
  156. u8 *odata = pctx->odata;
  157. u8 *idata = pctx->idata;
  158. int ilen, err;
  159. /* format control data for input */
  160. err = format_input(odata, req, cryptlen);
  161. if (err)
  162. goto out;
  163. sg_init_table(sg, 3);
  164. sg_set_buf(&sg[0], odata, 16);
  165. /* format associated data and compute into mac */
  166. if (assoclen) {
  167. ilen = format_adata(idata, assoclen);
  168. sg_set_buf(&sg[1], idata, ilen);
  169. sg_chain(sg, 3, req->src);
  170. } else {
  171. ilen = 0;
  172. sg_chain(sg, 2, req->src);
  173. }
  174. ahash_request_set_tfm(ahreq, ctx->mac);
  175. ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
  176. ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
  177. err = crypto_ahash_init(ahreq);
  178. if (err)
  179. goto out;
  180. err = crypto_ahash_update(ahreq);
  181. if (err)
  182. goto out;
  183. /* we need to pad the MAC input to a round multiple of the block size */
  184. ilen = 16 - (assoclen + ilen) % 16;
  185. if (ilen < 16) {
  186. memset(idata, 0, ilen);
  187. sg_init_table(sg, 2);
  188. sg_set_buf(&sg[0], idata, ilen);
  189. if (plain)
  190. sg_chain(sg, 2, plain);
  191. plain = sg;
  192. cryptlen += ilen;
  193. }
  194. ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen);
  195. err = crypto_ahash_finup(ahreq);
  196. out:
  197. return err;
  198. }
  199. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  200. {
  201. struct aead_request *req = areq->data;
  202. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  203. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  204. u8 *odata = pctx->odata;
  205. if (!err)
  206. scatterwalk_map_and_copy(odata, req->dst,
  207. req->assoclen + req->cryptlen,
  208. crypto_aead_authsize(aead), 1);
  209. aead_request_complete(req, err);
  210. }
  211. static inline int crypto_ccm_check_iv(const u8 *iv)
  212. {
  213. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  214. if (1 > iv[0] || iv[0] > 7)
  215. return -EINVAL;
  216. return 0;
  217. }
  218. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  219. {
  220. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  221. struct scatterlist *sg;
  222. u8 *iv = req->iv;
  223. int err;
  224. err = crypto_ccm_check_iv(iv);
  225. if (err)
  226. return err;
  227. pctx->flags = aead_request_flags(req);
  228. /* Note: rfc 3610 and NIST 800-38C require counter of
  229. * zero to encrypt auth tag.
  230. */
  231. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  232. sg_init_table(pctx->src, 3);
  233. sg_set_buf(pctx->src, tag, 16);
  234. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  235. if (sg != pctx->src + 1)
  236. sg_chain(pctx->src, 2, sg);
  237. if (req->src != req->dst) {
  238. sg_init_table(pctx->dst, 3);
  239. sg_set_buf(pctx->dst, tag, 16);
  240. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  241. if (sg != pctx->dst + 1)
  242. sg_chain(pctx->dst, 2, sg);
  243. }
  244. return 0;
  245. }
  246. static int crypto_ccm_encrypt(struct aead_request *req)
  247. {
  248. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  249. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  250. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  251. struct skcipher_request *skreq = &pctx->skreq;
  252. struct scatterlist *dst;
  253. unsigned int cryptlen = req->cryptlen;
  254. u8 *odata = pctx->odata;
  255. u8 *iv = req->iv;
  256. int err;
  257. err = crypto_ccm_init_crypt(req, odata);
  258. if (err)
  259. return err;
  260. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  261. if (err)
  262. return err;
  263. dst = pctx->src;
  264. if (req->src != req->dst)
  265. dst = pctx->dst;
  266. skcipher_request_set_tfm(skreq, ctx->ctr);
  267. skcipher_request_set_callback(skreq, pctx->flags,
  268. crypto_ccm_encrypt_done, req);
  269. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  270. err = crypto_skcipher_encrypt(skreq);
  271. if (err)
  272. return err;
  273. /* copy authtag to end of dst */
  274. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  275. crypto_aead_authsize(aead), 1);
  276. return err;
  277. }
  278. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  279. int err)
  280. {
  281. struct aead_request *req = areq->data;
  282. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  283. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  284. unsigned int authsize = crypto_aead_authsize(aead);
  285. unsigned int cryptlen = req->cryptlen - authsize;
  286. struct scatterlist *dst;
  287. pctx->flags = 0;
  288. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  289. if (!err) {
  290. err = crypto_ccm_auth(req, dst, cryptlen);
  291. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  292. err = -EBADMSG;
  293. }
  294. aead_request_complete(req, err);
  295. }
  296. static int crypto_ccm_decrypt(struct aead_request *req)
  297. {
  298. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  299. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  300. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  301. struct skcipher_request *skreq = &pctx->skreq;
  302. struct scatterlist *dst;
  303. unsigned int authsize = crypto_aead_authsize(aead);
  304. unsigned int cryptlen = req->cryptlen;
  305. u8 *authtag = pctx->auth_tag;
  306. u8 *odata = pctx->odata;
  307. u8 *iv = pctx->idata;
  308. int err;
  309. cryptlen -= authsize;
  310. err = crypto_ccm_init_crypt(req, authtag);
  311. if (err)
  312. return err;
  313. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  314. authsize, 0);
  315. dst = pctx->src;
  316. if (req->src != req->dst)
  317. dst = pctx->dst;
  318. memcpy(iv, req->iv, 16);
  319. skcipher_request_set_tfm(skreq, ctx->ctr);
  320. skcipher_request_set_callback(skreq, pctx->flags,
  321. crypto_ccm_decrypt_done, req);
  322. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  323. err = crypto_skcipher_decrypt(skreq);
  324. if (err)
  325. return err;
  326. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  327. if (err)
  328. return err;
  329. /* verify */
  330. if (crypto_memneq(authtag, odata, authsize))
  331. return -EBADMSG;
  332. return err;
  333. }
  334. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  335. {
  336. struct aead_instance *inst = aead_alg_instance(tfm);
  337. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  338. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  339. struct crypto_ahash *mac;
  340. struct crypto_skcipher *ctr;
  341. unsigned long align;
  342. int err;
  343. mac = crypto_spawn_ahash(&ictx->mac);
  344. if (IS_ERR(mac))
  345. return PTR_ERR(mac);
  346. ctr = crypto_spawn_skcipher(&ictx->ctr);
  347. err = PTR_ERR(ctr);
  348. if (IS_ERR(ctr))
  349. goto err_free_mac;
  350. ctx->mac = mac;
  351. ctx->ctr = ctr;
  352. align = crypto_aead_alignmask(tfm);
  353. align &= ~(crypto_tfm_ctx_alignment() - 1);
  354. crypto_aead_set_reqsize(
  355. tfm,
  356. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  357. crypto_skcipher_reqsize(ctr));
  358. return 0;
  359. err_free_mac:
  360. crypto_free_ahash(mac);
  361. return err;
  362. }
  363. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  364. {
  365. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  366. crypto_free_ahash(ctx->mac);
  367. crypto_free_skcipher(ctx->ctr);
  368. }
  369. static void crypto_ccm_free(struct aead_instance *inst)
  370. {
  371. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  372. crypto_drop_ahash(&ctx->mac);
  373. crypto_drop_skcipher(&ctx->ctr);
  374. kfree(inst);
  375. }
  376. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  377. struct rtattr **tb,
  378. const char *ctr_name,
  379. const char *mac_name)
  380. {
  381. struct crypto_attr_type *algt;
  382. struct aead_instance *inst;
  383. struct skcipher_alg *ctr;
  384. struct crypto_alg *mac_alg;
  385. struct hash_alg_common *mac;
  386. struct ccm_instance_ctx *ictx;
  387. int err;
  388. algt = crypto_get_attr_type(tb);
  389. if (IS_ERR(algt))
  390. return PTR_ERR(algt);
  391. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  392. return -EINVAL;
  393. mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
  394. CRYPTO_ALG_TYPE_HASH,
  395. CRYPTO_ALG_TYPE_AHASH_MASK |
  396. CRYPTO_ALG_ASYNC);
  397. if (IS_ERR(mac_alg))
  398. return PTR_ERR(mac_alg);
  399. mac = __crypto_hash_alg_common(mac_alg);
  400. err = -EINVAL;
  401. if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 ||
  402. mac->digestsize != 16)
  403. goto out_put_mac;
  404. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  405. err = -ENOMEM;
  406. if (!inst)
  407. goto out_put_mac;
  408. ictx = aead_instance_ctx(inst);
  409. err = crypto_init_ahash_spawn(&ictx->mac, mac,
  410. aead_crypto_instance(inst));
  411. if (err)
  412. goto err_free_inst;
  413. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  414. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  415. crypto_requires_sync(algt->type,
  416. algt->mask));
  417. if (err)
  418. goto err_drop_mac;
  419. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  420. /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
  421. err = -EINVAL;
  422. if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
  423. crypto_skcipher_alg_ivsize(ctr) != 16 ||
  424. ctr->base.cra_blocksize != 1)
  425. goto err_drop_ctr;
  426. /* ctr and cbcmac must use the same underlying block cipher. */
  427. if (strcmp(ctr->base.cra_name + 4, mac->base.cra_name + 7) != 0)
  428. goto err_drop_ctr;
  429. err = -ENAMETOOLONG;
  430. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  431. "ccm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
  432. goto err_drop_ctr;
  433. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  434. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  435. mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  436. goto err_drop_ctr;
  437. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  438. inst->alg.base.cra_priority = (mac->base.cra_priority +
  439. ctr->base.cra_priority) / 2;
  440. inst->alg.base.cra_blocksize = 1;
  441. inst->alg.base.cra_alignmask = mac->base.cra_alignmask |
  442. ctr->base.cra_alignmask;
  443. inst->alg.ivsize = 16;
  444. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  445. inst->alg.maxauthsize = 16;
  446. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  447. inst->alg.init = crypto_ccm_init_tfm;
  448. inst->alg.exit = crypto_ccm_exit_tfm;
  449. inst->alg.setkey = crypto_ccm_setkey;
  450. inst->alg.setauthsize = crypto_ccm_setauthsize;
  451. inst->alg.encrypt = crypto_ccm_encrypt;
  452. inst->alg.decrypt = crypto_ccm_decrypt;
  453. inst->free = crypto_ccm_free;
  454. err = aead_register_instance(tmpl, inst);
  455. if (err)
  456. goto err_drop_ctr;
  457. out_put_mac:
  458. crypto_mod_put(mac_alg);
  459. return err;
  460. err_drop_ctr:
  461. crypto_drop_skcipher(&ictx->ctr);
  462. err_drop_mac:
  463. crypto_drop_ahash(&ictx->mac);
  464. err_free_inst:
  465. kfree(inst);
  466. goto out_put_mac;
  467. }
  468. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  469. {
  470. const char *cipher_name;
  471. char ctr_name[CRYPTO_MAX_ALG_NAME];
  472. char mac_name[CRYPTO_MAX_ALG_NAME];
  473. cipher_name = crypto_attr_alg_name(tb[1]);
  474. if (IS_ERR(cipher_name))
  475. return PTR_ERR(cipher_name);
  476. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  477. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  478. return -ENAMETOOLONG;
  479. if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
  480. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  481. return -ENAMETOOLONG;
  482. return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
  483. }
  484. static struct crypto_template crypto_ccm_tmpl = {
  485. .name = "ccm",
  486. .create = crypto_ccm_create,
  487. .module = THIS_MODULE,
  488. };
  489. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  490. struct rtattr **tb)
  491. {
  492. const char *ctr_name;
  493. const char *mac_name;
  494. ctr_name = crypto_attr_alg_name(tb[1]);
  495. if (IS_ERR(ctr_name))
  496. return PTR_ERR(ctr_name);
  497. mac_name = crypto_attr_alg_name(tb[2]);
  498. if (IS_ERR(mac_name))
  499. return PTR_ERR(mac_name);
  500. return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
  501. }
  502. static struct crypto_template crypto_ccm_base_tmpl = {
  503. .name = "ccm_base",
  504. .create = crypto_ccm_base_create,
  505. .module = THIS_MODULE,
  506. };
  507. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  508. unsigned int keylen)
  509. {
  510. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  511. struct crypto_aead *child = ctx->child;
  512. int err;
  513. if (keylen < 3)
  514. return -EINVAL;
  515. keylen -= 3;
  516. memcpy(ctx->nonce, key + keylen, 3);
  517. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  518. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  519. CRYPTO_TFM_REQ_MASK);
  520. err = crypto_aead_setkey(child, key, keylen);
  521. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  522. CRYPTO_TFM_RES_MASK);
  523. return err;
  524. }
  525. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  526. unsigned int authsize)
  527. {
  528. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  529. switch (authsize) {
  530. case 8:
  531. case 12:
  532. case 16:
  533. break;
  534. default:
  535. return -EINVAL;
  536. }
  537. return crypto_aead_setauthsize(ctx->child, authsize);
  538. }
  539. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  540. {
  541. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  542. struct aead_request *subreq = &rctx->subreq;
  543. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  544. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  545. struct crypto_aead *child = ctx->child;
  546. struct scatterlist *sg;
  547. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  548. crypto_aead_alignmask(child) + 1);
  549. /* L' */
  550. iv[0] = 3;
  551. memcpy(iv + 1, ctx->nonce, 3);
  552. memcpy(iv + 4, req->iv, 8);
  553. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  554. sg_init_table(rctx->src, 3);
  555. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  556. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  557. if (sg != rctx->src + 1)
  558. sg_chain(rctx->src, 2, sg);
  559. if (req->src != req->dst) {
  560. sg_init_table(rctx->dst, 3);
  561. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  562. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  563. if (sg != rctx->dst + 1)
  564. sg_chain(rctx->dst, 2, sg);
  565. }
  566. aead_request_set_tfm(subreq, child);
  567. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  568. req->base.data);
  569. aead_request_set_crypt(subreq, rctx->src,
  570. req->src == req->dst ? rctx->src : rctx->dst,
  571. req->cryptlen, iv);
  572. aead_request_set_ad(subreq, req->assoclen - 8);
  573. return subreq;
  574. }
  575. static int crypto_rfc4309_encrypt(struct aead_request *req)
  576. {
  577. if (req->assoclen != 16 && req->assoclen != 20)
  578. return -EINVAL;
  579. req = crypto_rfc4309_crypt(req);
  580. return crypto_aead_encrypt(req);
  581. }
  582. static int crypto_rfc4309_decrypt(struct aead_request *req)
  583. {
  584. if (req->assoclen != 16 && req->assoclen != 20)
  585. return -EINVAL;
  586. req = crypto_rfc4309_crypt(req);
  587. return crypto_aead_decrypt(req);
  588. }
  589. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  590. {
  591. struct aead_instance *inst = aead_alg_instance(tfm);
  592. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  593. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  594. struct crypto_aead *aead;
  595. unsigned long align;
  596. aead = crypto_spawn_aead(spawn);
  597. if (IS_ERR(aead))
  598. return PTR_ERR(aead);
  599. ctx->child = aead;
  600. align = crypto_aead_alignmask(aead);
  601. align &= ~(crypto_tfm_ctx_alignment() - 1);
  602. crypto_aead_set_reqsize(
  603. tfm,
  604. sizeof(struct crypto_rfc4309_req_ctx) +
  605. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  606. align + 32);
  607. return 0;
  608. }
  609. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  610. {
  611. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  612. crypto_free_aead(ctx->child);
  613. }
  614. static void crypto_rfc4309_free(struct aead_instance *inst)
  615. {
  616. crypto_drop_aead(aead_instance_ctx(inst));
  617. kfree(inst);
  618. }
  619. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  620. struct rtattr **tb)
  621. {
  622. struct crypto_attr_type *algt;
  623. struct aead_instance *inst;
  624. struct crypto_aead_spawn *spawn;
  625. struct aead_alg *alg;
  626. const char *ccm_name;
  627. int err;
  628. algt = crypto_get_attr_type(tb);
  629. if (IS_ERR(algt))
  630. return PTR_ERR(algt);
  631. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  632. return -EINVAL;
  633. ccm_name = crypto_attr_alg_name(tb[1]);
  634. if (IS_ERR(ccm_name))
  635. return PTR_ERR(ccm_name);
  636. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  637. if (!inst)
  638. return -ENOMEM;
  639. spawn = aead_instance_ctx(inst);
  640. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  641. err = crypto_grab_aead(spawn, ccm_name, 0,
  642. crypto_requires_sync(algt->type, algt->mask));
  643. if (err)
  644. goto out_free_inst;
  645. alg = crypto_spawn_aead_alg(spawn);
  646. err = -EINVAL;
  647. /* We only support 16-byte blocks. */
  648. if (crypto_aead_alg_ivsize(alg) != 16)
  649. goto out_drop_alg;
  650. /* Not a stream cipher? */
  651. if (alg->base.cra_blocksize != 1)
  652. goto out_drop_alg;
  653. err = -ENAMETOOLONG;
  654. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  655. "rfc4309(%s)", alg->base.cra_name) >=
  656. CRYPTO_MAX_ALG_NAME ||
  657. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  658. "rfc4309(%s)", alg->base.cra_driver_name) >=
  659. CRYPTO_MAX_ALG_NAME)
  660. goto out_drop_alg;
  661. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  662. inst->alg.base.cra_priority = alg->base.cra_priority;
  663. inst->alg.base.cra_blocksize = 1;
  664. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  665. inst->alg.ivsize = 8;
  666. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  667. inst->alg.maxauthsize = 16;
  668. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  669. inst->alg.init = crypto_rfc4309_init_tfm;
  670. inst->alg.exit = crypto_rfc4309_exit_tfm;
  671. inst->alg.setkey = crypto_rfc4309_setkey;
  672. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  673. inst->alg.encrypt = crypto_rfc4309_encrypt;
  674. inst->alg.decrypt = crypto_rfc4309_decrypt;
  675. inst->free = crypto_rfc4309_free;
  676. err = aead_register_instance(tmpl, inst);
  677. if (err)
  678. goto out_drop_alg;
  679. out:
  680. return err;
  681. out_drop_alg:
  682. crypto_drop_aead(spawn);
  683. out_free_inst:
  684. kfree(inst);
  685. goto out;
  686. }
  687. static struct crypto_template crypto_rfc4309_tmpl = {
  688. .name = "rfc4309",
  689. .create = crypto_rfc4309_create,
  690. .module = THIS_MODULE,
  691. };
  692. static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
  693. const u8 *inkey, unsigned int keylen)
  694. {
  695. struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
  696. return crypto_cipher_setkey(ctx->child, inkey, keylen);
  697. }
  698. static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
  699. {
  700. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  701. int bs = crypto_shash_digestsize(pdesc->tfm);
  702. u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
  703. ctx->len = 0;
  704. memset(dg, 0, bs);
  705. return 0;
  706. }
  707. static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
  708. unsigned int len)
  709. {
  710. struct crypto_shash *parent = pdesc->tfm;
  711. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  712. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  713. struct crypto_cipher *tfm = tctx->child;
  714. int bs = crypto_shash_digestsize(parent);
  715. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  716. while (len > 0) {
  717. unsigned int l = min(len, bs - ctx->len);
  718. crypto_xor(dg + ctx->len, p, l);
  719. ctx->len +=l;
  720. len -= l;
  721. p += l;
  722. if (ctx->len == bs) {
  723. crypto_cipher_encrypt_one(tfm, dg, dg);
  724. ctx->len = 0;
  725. }
  726. }
  727. return 0;
  728. }
  729. static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out)
  730. {
  731. struct crypto_shash *parent = pdesc->tfm;
  732. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  733. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  734. struct crypto_cipher *tfm = tctx->child;
  735. int bs = crypto_shash_digestsize(parent);
  736. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  737. if (ctx->len)
  738. crypto_cipher_encrypt_one(tfm, dg, dg);
  739. memcpy(out, dg, bs);
  740. return 0;
  741. }
  742. static int cbcmac_init_tfm(struct crypto_tfm *tfm)
  743. {
  744. struct crypto_cipher *cipher;
  745. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  746. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  747. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  748. cipher = crypto_spawn_cipher(spawn);
  749. if (IS_ERR(cipher))
  750. return PTR_ERR(cipher);
  751. ctx->child = cipher;
  752. return 0;
  753. };
  754. static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
  755. {
  756. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  757. crypto_free_cipher(ctx->child);
  758. }
  759. static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
  760. {
  761. struct shash_instance *inst;
  762. struct crypto_alg *alg;
  763. int err;
  764. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
  765. if (err)
  766. return err;
  767. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
  768. CRYPTO_ALG_TYPE_MASK);
  769. if (IS_ERR(alg))
  770. return PTR_ERR(alg);
  771. inst = shash_alloc_instance("cbcmac", alg);
  772. err = PTR_ERR(inst);
  773. if (IS_ERR(inst))
  774. goto out_put_alg;
  775. err = crypto_init_spawn(shash_instance_ctx(inst), alg,
  776. shash_crypto_instance(inst),
  777. CRYPTO_ALG_TYPE_MASK);
  778. if (err)
  779. goto out_free_inst;
  780. inst->alg.base.cra_priority = alg->cra_priority;
  781. inst->alg.base.cra_blocksize = 1;
  782. inst->alg.digestsize = alg->cra_blocksize;
  783. inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx),
  784. alg->cra_alignmask + 1) +
  785. alg->cra_blocksize;
  786. inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
  787. inst->alg.base.cra_init = cbcmac_init_tfm;
  788. inst->alg.base.cra_exit = cbcmac_exit_tfm;
  789. inst->alg.init = crypto_cbcmac_digest_init;
  790. inst->alg.update = crypto_cbcmac_digest_update;
  791. inst->alg.final = crypto_cbcmac_digest_final;
  792. inst->alg.setkey = crypto_cbcmac_digest_setkey;
  793. err = shash_register_instance(tmpl, inst);
  794. out_free_inst:
  795. if (err)
  796. shash_free_instance(shash_crypto_instance(inst));
  797. out_put_alg:
  798. crypto_mod_put(alg);
  799. return err;
  800. }
  801. static struct crypto_template crypto_cbcmac_tmpl = {
  802. .name = "cbcmac",
  803. .create = cbcmac_create,
  804. .free = shash_free_instance,
  805. .module = THIS_MODULE,
  806. };
  807. static int __init crypto_ccm_module_init(void)
  808. {
  809. int err;
  810. err = crypto_register_template(&crypto_cbcmac_tmpl);
  811. if (err)
  812. goto out;
  813. err = crypto_register_template(&crypto_ccm_base_tmpl);
  814. if (err)
  815. goto out_undo_cbcmac;
  816. err = crypto_register_template(&crypto_ccm_tmpl);
  817. if (err)
  818. goto out_undo_base;
  819. err = crypto_register_template(&crypto_rfc4309_tmpl);
  820. if (err)
  821. goto out_undo_ccm;
  822. out:
  823. return err;
  824. out_undo_ccm:
  825. crypto_unregister_template(&crypto_ccm_tmpl);
  826. out_undo_base:
  827. crypto_unregister_template(&crypto_ccm_base_tmpl);
  828. out_undo_cbcmac:
  829. crypto_register_template(&crypto_cbcmac_tmpl);
  830. goto out;
  831. }
  832. static void __exit crypto_ccm_module_exit(void)
  833. {
  834. crypto_unregister_template(&crypto_rfc4309_tmpl);
  835. crypto_unregister_template(&crypto_ccm_tmpl);
  836. crypto_unregister_template(&crypto_ccm_base_tmpl);
  837. crypto_unregister_template(&crypto_cbcmac_tmpl);
  838. }
  839. module_init(crypto_ccm_module_init);
  840. module_exit(crypto_ccm_module_exit);
  841. MODULE_LICENSE("GPL");
  842. MODULE_DESCRIPTION("Counter with CBC MAC");
  843. MODULE_ALIAS_CRYPTO("ccm_base");
  844. MODULE_ALIAS_CRYPTO("rfc4309");
  845. MODULE_ALIAS_CRYPTO("ccm");