aead.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943
  1. /*
  2. * AEAD: Authenticated Encryption with Associated Data
  3. *
  4. * This file provides API support for AEAD algorithms.
  5. *
  6. * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
  7. *
  8. * This program is free software; you can redistribute it and/or modify it
  9. * under the terms of the GNU General Public License as published by the Free
  10. * Software Foundation; either version 2 of the License, or (at your option)
  11. * any later version.
  12. *
  13. */
  14. #include <crypto/internal/geniv.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/rtnetlink.h>
  21. #include <linux/sched.h>
  22. #include <linux/slab.h>
  23. #include <linux/seq_file.h>
  24. #include <linux/cryptouser.h>
  25. #include <net/netlink.h>
  26. #include "internal.h"
  27. struct compat_request_ctx {
  28. struct scatterlist src[2];
  29. struct scatterlist dst[2];
  30. struct scatterlist ivbuf[2];
  31. struct scatterlist *ivsg;
  32. struct aead_givcrypt_request subreq;
  33. };
  34. static int aead_null_givencrypt(struct aead_givcrypt_request *req);
  35. static int aead_null_givdecrypt(struct aead_givcrypt_request *req);
  36. static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
  37. unsigned int keylen)
  38. {
  39. unsigned long alignmask = crypto_aead_alignmask(tfm);
  40. int ret;
  41. u8 *buffer, *alignbuffer;
  42. unsigned long absize;
  43. absize = keylen + alignmask;
  44. buffer = kmalloc(absize, GFP_ATOMIC);
  45. if (!buffer)
  46. return -ENOMEM;
  47. alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  48. memcpy(alignbuffer, key, keylen);
  49. ret = tfm->setkey(tfm, alignbuffer, keylen);
  50. memset(alignbuffer, 0, keylen);
  51. kfree(buffer);
  52. return ret;
  53. }
  54. int crypto_aead_setkey(struct crypto_aead *tfm,
  55. const u8 *key, unsigned int keylen)
  56. {
  57. unsigned long alignmask = crypto_aead_alignmask(tfm);
  58. tfm = tfm->child;
  59. if ((unsigned long)key & alignmask)
  60. return setkey_unaligned(tfm, key, keylen);
  61. return tfm->setkey(tfm, key, keylen);
  62. }
  63. EXPORT_SYMBOL_GPL(crypto_aead_setkey);
  64. int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  65. {
  66. int err;
  67. if (authsize > crypto_aead_maxauthsize(tfm))
  68. return -EINVAL;
  69. if (tfm->setauthsize) {
  70. err = tfm->setauthsize(tfm->child, authsize);
  71. if (err)
  72. return err;
  73. }
  74. tfm->child->authsize = authsize;
  75. tfm->authsize = authsize;
  76. return 0;
  77. }
  78. EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
  79. struct aead_old_request {
  80. struct scatterlist srcbuf[2];
  81. struct scatterlist dstbuf[2];
  82. struct aead_request subreq;
  83. };
  84. unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
  85. {
  86. return tfm->reqsize + sizeof(struct aead_old_request);
  87. }
  88. EXPORT_SYMBOL_GPL(crypto_aead_reqsize);
  89. static int old_crypt(struct aead_request *req,
  90. int (*crypt)(struct aead_request *req))
  91. {
  92. struct aead_old_request *nreq = aead_request_ctx(req);
  93. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  94. struct scatterlist *src, *dst;
  95. if (req->old)
  96. return crypt(req);
  97. src = scatterwalk_ffwd(nreq->srcbuf, req->src, req->assoclen);
  98. dst = req->src == req->dst ?
  99. src : scatterwalk_ffwd(nreq->dstbuf, req->dst, req->assoclen);
  100. aead_request_set_tfm(&nreq->subreq, aead);
  101. aead_request_set_callback(&nreq->subreq, aead_request_flags(req),
  102. req->base.complete, req->base.data);
  103. aead_request_set_crypt(&nreq->subreq, src, dst, req->cryptlen,
  104. req->iv);
  105. aead_request_set_assoc(&nreq->subreq, req->src, req->assoclen);
  106. return crypt(&nreq->subreq);
  107. }
  108. static int old_encrypt(struct aead_request *req)
  109. {
  110. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  111. struct old_aead_alg *alg = crypto_old_aead_alg(aead);
  112. return old_crypt(req, alg->encrypt);
  113. }
  114. static int old_decrypt(struct aead_request *req)
  115. {
  116. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  117. struct old_aead_alg *alg = crypto_old_aead_alg(aead);
  118. return old_crypt(req, alg->decrypt);
  119. }
  120. static int no_givcrypt(struct aead_givcrypt_request *req)
  121. {
  122. return -ENOSYS;
  123. }
  124. static int crypto_old_aead_init_tfm(struct crypto_tfm *tfm)
  125. {
  126. struct old_aead_alg *alg = &tfm->__crt_alg->cra_aead;
  127. struct crypto_aead *crt = __crypto_aead_cast(tfm);
  128. if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
  129. return -EINVAL;
  130. crt->setkey = alg->setkey;
  131. crt->setauthsize = alg->setauthsize;
  132. crt->encrypt = old_encrypt;
  133. crt->decrypt = old_decrypt;
  134. if (alg->ivsize) {
  135. crt->givencrypt = alg->givencrypt ?: no_givcrypt;
  136. crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
  137. } else {
  138. crt->givencrypt = aead_null_givencrypt;
  139. crt->givdecrypt = aead_null_givdecrypt;
  140. }
  141. crt->child = __crypto_aead_cast(tfm);
  142. crt->authsize = alg->maxauthsize;
  143. return 0;
  144. }
  145. static void crypto_aead_exit_tfm(struct crypto_tfm *tfm)
  146. {
  147. struct crypto_aead *aead = __crypto_aead_cast(tfm);
  148. struct aead_alg *alg = crypto_aead_alg(aead);
  149. alg->exit(aead);
  150. }
  151. static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
  152. {
  153. struct crypto_aead *aead = __crypto_aead_cast(tfm);
  154. struct aead_alg *alg = crypto_aead_alg(aead);
  155. if (crypto_old_aead_alg(aead)->encrypt)
  156. return crypto_old_aead_init_tfm(tfm);
  157. aead->setkey = alg->setkey;
  158. aead->setauthsize = alg->setauthsize;
  159. aead->encrypt = alg->encrypt;
  160. aead->decrypt = alg->decrypt;
  161. aead->child = __crypto_aead_cast(tfm);
  162. aead->authsize = alg->maxauthsize;
  163. if (alg->exit)
  164. aead->base.exit = crypto_aead_exit_tfm;
  165. if (alg->init)
  166. return alg->init(aead);
  167. return 0;
  168. }
  169. #ifdef CONFIG_NET
  170. static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
  171. {
  172. struct crypto_report_aead raead;
  173. struct old_aead_alg *aead = &alg->cra_aead;
  174. strncpy(raead.type, "aead", sizeof(raead.type));
  175. strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
  176. raead.blocksize = alg->cra_blocksize;
  177. raead.maxauthsize = aead->maxauthsize;
  178. raead.ivsize = aead->ivsize;
  179. if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
  180. sizeof(struct crypto_report_aead), &raead))
  181. goto nla_put_failure;
  182. return 0;
  183. nla_put_failure:
  184. return -EMSGSIZE;
  185. }
  186. #else
  187. static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
  188. {
  189. return -ENOSYS;
  190. }
  191. #endif
  192. static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
  193. __attribute__ ((unused));
  194. static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
  195. {
  196. struct old_aead_alg *aead = &alg->cra_aead;
  197. seq_printf(m, "type : aead\n");
  198. seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
  199. "yes" : "no");
  200. seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
  201. seq_printf(m, "ivsize : %u\n", aead->ivsize);
  202. seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
  203. seq_printf(m, "geniv : %s\n", aead->geniv ?: "<built-in>");
  204. }
  205. const struct crypto_type crypto_aead_type = {
  206. .extsize = crypto_alg_extsize,
  207. .init_tfm = crypto_aead_init_tfm,
  208. #ifdef CONFIG_PROC_FS
  209. .show = crypto_old_aead_show,
  210. #endif
  211. .report = crypto_old_aead_report,
  212. .lookup = crypto_lookup_aead,
  213. .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
  214. .maskset = CRYPTO_ALG_TYPE_MASK,
  215. .type = CRYPTO_ALG_TYPE_AEAD,
  216. .tfmsize = offsetof(struct crypto_aead, base),
  217. };
  218. EXPORT_SYMBOL_GPL(crypto_aead_type);
  219. #ifdef CONFIG_NET
  220. static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
  221. {
  222. struct crypto_report_aead raead;
  223. struct aead_alg *aead = container_of(alg, struct aead_alg, base);
  224. strncpy(raead.type, "aead", sizeof(raead.type));
  225. strncpy(raead.geniv, "<none>", sizeof(raead.geniv));
  226. raead.blocksize = alg->cra_blocksize;
  227. raead.maxauthsize = aead->maxauthsize;
  228. raead.ivsize = aead->ivsize;
  229. if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
  230. sizeof(struct crypto_report_aead), &raead))
  231. goto nla_put_failure;
  232. return 0;
  233. nla_put_failure:
  234. return -EMSGSIZE;
  235. }
  236. #else
  237. static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
  238. {
  239. return -ENOSYS;
  240. }
  241. #endif
  242. static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
  243. __attribute__ ((unused));
  244. static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
  245. {
  246. struct aead_alg *aead = container_of(alg, struct aead_alg, base);
  247. seq_printf(m, "type : aead\n");
  248. seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
  249. "yes" : "no");
  250. seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
  251. seq_printf(m, "ivsize : %u\n", aead->ivsize);
  252. seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
  253. seq_printf(m, "geniv : <none>\n");
  254. }
  255. static const struct crypto_type crypto_new_aead_type = {
  256. .extsize = crypto_alg_extsize,
  257. .init_tfm = crypto_aead_init_tfm,
  258. #ifdef CONFIG_PROC_FS
  259. .show = crypto_aead_show,
  260. #endif
  261. .report = crypto_aead_report,
  262. .maskclear = ~CRYPTO_ALG_TYPE_MASK,
  263. .maskset = CRYPTO_ALG_TYPE_MASK,
  264. .type = CRYPTO_ALG_TYPE_AEAD,
  265. .tfmsize = offsetof(struct crypto_aead, base),
  266. };
  267. static int aead_null_givencrypt(struct aead_givcrypt_request *req)
  268. {
  269. return crypto_aead_encrypt(&req->areq);
  270. }
  271. static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
  272. {
  273. return crypto_aead_decrypt(&req->areq);
  274. }
  275. #ifdef CONFIG_NET
  276. static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
  277. {
  278. struct crypto_report_aead raead;
  279. struct old_aead_alg *aead = &alg->cra_aead;
  280. strncpy(raead.type, "nivaead", sizeof(raead.type));
  281. strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
  282. raead.blocksize = alg->cra_blocksize;
  283. raead.maxauthsize = aead->maxauthsize;
  284. raead.ivsize = aead->ivsize;
  285. if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
  286. sizeof(struct crypto_report_aead), &raead))
  287. goto nla_put_failure;
  288. return 0;
  289. nla_put_failure:
  290. return -EMSGSIZE;
  291. }
  292. #else
  293. static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
  294. {
  295. return -ENOSYS;
  296. }
  297. #endif
  298. static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
  299. __attribute__ ((unused));
  300. static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
  301. {
  302. struct old_aead_alg *aead = &alg->cra_aead;
  303. seq_printf(m, "type : nivaead\n");
  304. seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
  305. "yes" : "no");
  306. seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
  307. seq_printf(m, "ivsize : %u\n", aead->ivsize);
  308. seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
  309. seq_printf(m, "geniv : %s\n", aead->geniv);
  310. }
  311. const struct crypto_type crypto_nivaead_type = {
  312. .extsize = crypto_alg_extsize,
  313. .init_tfm = crypto_aead_init_tfm,
  314. #ifdef CONFIG_PROC_FS
  315. .show = crypto_nivaead_show,
  316. #endif
  317. .report = crypto_nivaead_report,
  318. .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
  319. .maskset = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV,
  320. .type = CRYPTO_ALG_TYPE_AEAD,
  321. .tfmsize = offsetof(struct crypto_aead, base),
  322. };
  323. EXPORT_SYMBOL_GPL(crypto_nivaead_type);
  324. static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
  325. const char *name, u32 type, u32 mask)
  326. {
  327. spawn->base.frontend = &crypto_nivaead_type;
  328. return crypto_grab_spawn(&spawn->base, name, type, mask);
  329. }
  330. static int aead_geniv_setkey(struct crypto_aead *tfm,
  331. const u8 *key, unsigned int keylen)
  332. {
  333. struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
  334. return crypto_aead_setkey(ctx->child, key, keylen);
  335. }
  336. static int aead_geniv_setauthsize(struct crypto_aead *tfm,
  337. unsigned int authsize)
  338. {
  339. struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
  340. return crypto_aead_setauthsize(ctx->child, authsize);
  341. }
  342. static void compat_encrypt_complete2(struct aead_request *req, int err)
  343. {
  344. struct compat_request_ctx *rctx = aead_request_ctx(req);
  345. struct aead_givcrypt_request *subreq = &rctx->subreq;
  346. struct crypto_aead *geniv;
  347. if (err == -EINPROGRESS)
  348. return;
  349. if (err)
  350. goto out;
  351. geniv = crypto_aead_reqtfm(req);
  352. scatterwalk_map_and_copy(subreq->giv, rctx->ivsg, 0,
  353. crypto_aead_ivsize(geniv), 1);
  354. out:
  355. kzfree(subreq->giv);
  356. }
  357. static void compat_encrypt_complete(struct crypto_async_request *base, int err)
  358. {
  359. struct aead_request *req = base->data;
  360. compat_encrypt_complete2(req, err);
  361. aead_request_complete(req, err);
  362. }
  363. static int compat_encrypt(struct aead_request *req)
  364. {
  365. struct crypto_aead *geniv = crypto_aead_reqtfm(req);
  366. struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
  367. struct compat_request_ctx *rctx = aead_request_ctx(req);
  368. struct aead_givcrypt_request *subreq = &rctx->subreq;
  369. unsigned int ivsize = crypto_aead_ivsize(geniv);
  370. struct scatterlist *src, *dst;
  371. crypto_completion_t compl;
  372. void *data;
  373. u8 *info;
  374. __be64 seq;
  375. int err;
  376. if (req->cryptlen < ivsize)
  377. return -EINVAL;
  378. compl = req->base.complete;
  379. data = req->base.data;
  380. rctx->ivsg = scatterwalk_ffwd(rctx->ivbuf, req->dst, req->assoclen);
  381. info = PageHighMem(sg_page(rctx->ivsg)) ? NULL : sg_virt(rctx->ivsg);
  382. if (!info) {
  383. info = kmalloc(ivsize, req->base.flags &
  384. CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
  385. GFP_ATOMIC);
  386. if (!info)
  387. return -ENOMEM;
  388. compl = compat_encrypt_complete;
  389. data = req;
  390. }
  391. memcpy(&seq, req->iv + ivsize - sizeof(seq), sizeof(seq));
  392. src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen + ivsize);
  393. dst = req->src == req->dst ?
  394. src : scatterwalk_ffwd(rctx->dst, rctx->ivsg, ivsize);
  395. aead_givcrypt_set_tfm(subreq, ctx->child);
  396. aead_givcrypt_set_callback(subreq, req->base.flags,
  397. req->base.complete, req->base.data);
  398. aead_givcrypt_set_crypt(subreq, src, dst,
  399. req->cryptlen - ivsize, req->iv);
  400. aead_givcrypt_set_assoc(subreq, req->src, req->assoclen);
  401. aead_givcrypt_set_giv(subreq, info, be64_to_cpu(seq));
  402. err = crypto_aead_givencrypt(subreq);
  403. if (unlikely(PageHighMem(sg_page(rctx->ivsg))))
  404. compat_encrypt_complete2(req, err);
  405. return err;
  406. }
  407. static int compat_decrypt(struct aead_request *req)
  408. {
  409. struct crypto_aead *geniv = crypto_aead_reqtfm(req);
  410. struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
  411. struct compat_request_ctx *rctx = aead_request_ctx(req);
  412. struct aead_request *subreq = &rctx->subreq.areq;
  413. unsigned int ivsize = crypto_aead_ivsize(geniv);
  414. struct scatterlist *src, *dst;
  415. crypto_completion_t compl;
  416. void *data;
  417. if (req->cryptlen < ivsize)
  418. return -EINVAL;
  419. aead_request_set_tfm(subreq, ctx->child);
  420. compl = req->base.complete;
  421. data = req->base.data;
  422. src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen + ivsize);
  423. dst = req->src == req->dst ?
  424. src : scatterwalk_ffwd(rctx->dst, req->dst,
  425. req->assoclen + ivsize);
  426. aead_request_set_callback(subreq, req->base.flags, compl, data);
  427. aead_request_set_crypt(subreq, src, dst,
  428. req->cryptlen - ivsize, req->iv);
  429. aead_request_set_assoc(subreq, req->src, req->assoclen);
  430. scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
  431. return crypto_aead_decrypt(subreq);
  432. }
  433. static int compat_encrypt_first(struct aead_request *req)
  434. {
  435. struct crypto_aead *geniv = crypto_aead_reqtfm(req);
  436. struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
  437. int err = 0;
  438. spin_lock_bh(&ctx->lock);
  439. if (geniv->encrypt != compat_encrypt_first)
  440. goto unlock;
  441. geniv->encrypt = compat_encrypt;
  442. unlock:
  443. spin_unlock_bh(&ctx->lock);
  444. if (err)
  445. return err;
  446. return compat_encrypt(req);
  447. }
  448. static int aead_geniv_init_compat(struct crypto_tfm *tfm)
  449. {
  450. struct crypto_aead *geniv = __crypto_aead_cast(tfm);
  451. struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
  452. int err;
  453. spin_lock_init(&ctx->lock);
  454. crypto_aead_set_reqsize(geniv, sizeof(struct compat_request_ctx));
  455. err = aead_geniv_init(tfm);
  456. ctx->child = geniv->child;
  457. geniv->child = geniv;
  458. return err;
  459. }
  460. static void aead_geniv_exit_compat(struct crypto_tfm *tfm)
  461. {
  462. struct crypto_aead *geniv = __crypto_aead_cast(tfm);
  463. struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
  464. crypto_free_aead(ctx->child);
  465. }
  466. struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
  467. struct rtattr **tb, u32 type, u32 mask)
  468. {
  469. const char *name;
  470. struct crypto_aead_spawn *spawn;
  471. struct crypto_attr_type *algt;
  472. struct aead_instance *inst;
  473. struct aead_alg *alg;
  474. unsigned int ivsize;
  475. unsigned int maxauthsize;
  476. int err;
  477. algt = crypto_get_attr_type(tb);
  478. if (IS_ERR(algt))
  479. return ERR_CAST(algt);
  480. if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
  481. algt->mask)
  482. return ERR_PTR(-EINVAL);
  483. name = crypto_attr_alg_name(tb[1]);
  484. if (IS_ERR(name))
  485. return ERR_CAST(name);
  486. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  487. if (!inst)
  488. return ERR_PTR(-ENOMEM);
  489. spawn = aead_instance_ctx(inst);
  490. /* Ignore async algorithms if necessary. */
  491. mask |= crypto_requires_sync(algt->type, algt->mask);
  492. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  493. err = (algt->mask & CRYPTO_ALG_GENIV) ?
  494. crypto_grab_nivaead(spawn, name, type, mask) :
  495. crypto_grab_aead(spawn, name, type, mask);
  496. if (err)
  497. goto err_free_inst;
  498. alg = crypto_spawn_aead_alg(spawn);
  499. ivsize = crypto_aead_alg_ivsize(alg);
  500. maxauthsize = crypto_aead_alg_maxauthsize(alg);
  501. err = -EINVAL;
  502. if (ivsize < sizeof(u64))
  503. goto err_drop_alg;
  504. /*
  505. * This is only true if we're constructing an algorithm with its
  506. * default IV generator. For the default generator we elide the
  507. * template name and double-check the IV generator.
  508. */
  509. if (algt->mask & CRYPTO_ALG_GENIV) {
  510. if (!alg->base.cra_aead.encrypt)
  511. goto err_drop_alg;
  512. if (strcmp(tmpl->name, alg->base.cra_aead.geniv))
  513. goto err_drop_alg;
  514. memcpy(inst->alg.base.cra_name, alg->base.cra_name,
  515. CRYPTO_MAX_ALG_NAME);
  516. memcpy(inst->alg.base.cra_driver_name,
  517. alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME);
  518. inst->alg.base.cra_flags = CRYPTO_ALG_TYPE_AEAD |
  519. CRYPTO_ALG_GENIV;
  520. inst->alg.base.cra_flags |= alg->base.cra_flags &
  521. CRYPTO_ALG_ASYNC;
  522. inst->alg.base.cra_priority = alg->base.cra_priority;
  523. inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
  524. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  525. inst->alg.base.cra_type = &crypto_aead_type;
  526. inst->alg.base.cra_aead.ivsize = ivsize;
  527. inst->alg.base.cra_aead.maxauthsize = maxauthsize;
  528. inst->alg.base.cra_aead.setkey = alg->base.cra_aead.setkey;
  529. inst->alg.base.cra_aead.setauthsize =
  530. alg->base.cra_aead.setauthsize;
  531. inst->alg.base.cra_aead.encrypt = alg->base.cra_aead.encrypt;
  532. inst->alg.base.cra_aead.decrypt = alg->base.cra_aead.decrypt;
  533. goto out;
  534. }
  535. err = -ENAMETOOLONG;
  536. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  537. "%s(%s)", tmpl->name, alg->base.cra_name) >=
  538. CRYPTO_MAX_ALG_NAME)
  539. goto err_drop_alg;
  540. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  541. "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
  542. CRYPTO_MAX_ALG_NAME)
  543. goto err_drop_alg;
  544. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  545. inst->alg.base.cra_priority = alg->base.cra_priority;
  546. inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
  547. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  548. inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
  549. inst->alg.setkey = aead_geniv_setkey;
  550. inst->alg.setauthsize = aead_geniv_setauthsize;
  551. inst->alg.ivsize = ivsize;
  552. inst->alg.maxauthsize = maxauthsize;
  553. inst->alg.encrypt = compat_encrypt_first;
  554. inst->alg.decrypt = compat_decrypt;
  555. inst->alg.base.cra_init = aead_geniv_init_compat;
  556. inst->alg.base.cra_exit = aead_geniv_exit_compat;
  557. out:
  558. return inst;
  559. err_drop_alg:
  560. crypto_drop_aead(spawn);
  561. err_free_inst:
  562. kfree(inst);
  563. inst = ERR_PTR(err);
  564. goto out;
  565. }
  566. EXPORT_SYMBOL_GPL(aead_geniv_alloc);
  567. void aead_geniv_free(struct aead_instance *inst)
  568. {
  569. crypto_drop_aead(aead_instance_ctx(inst));
  570. kfree(inst);
  571. }
  572. EXPORT_SYMBOL_GPL(aead_geniv_free);
  573. int aead_geniv_init(struct crypto_tfm *tfm)
  574. {
  575. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  576. struct crypto_aead *child;
  577. struct crypto_aead *aead;
  578. aead = __crypto_aead_cast(tfm);
  579. child = crypto_spawn_aead(crypto_instance_ctx(inst));
  580. if (IS_ERR(child))
  581. return PTR_ERR(child);
  582. aead->child = child;
  583. aead->reqsize += crypto_aead_reqsize(child);
  584. return 0;
  585. }
  586. EXPORT_SYMBOL_GPL(aead_geniv_init);
  587. void aead_geniv_exit(struct crypto_tfm *tfm)
  588. {
  589. crypto_free_aead(__crypto_aead_cast(tfm)->child);
  590. }
  591. EXPORT_SYMBOL_GPL(aead_geniv_exit);
  592. static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
  593. {
  594. struct rtattr *tb[3];
  595. struct {
  596. struct rtattr attr;
  597. struct crypto_attr_type data;
  598. } ptype;
  599. struct {
  600. struct rtattr attr;
  601. struct crypto_attr_alg data;
  602. } palg;
  603. struct crypto_template *tmpl;
  604. struct crypto_instance *inst;
  605. struct crypto_alg *larval;
  606. const char *geniv;
  607. int err;
  608. larval = crypto_larval_lookup(alg->cra_driver_name,
  609. CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
  610. CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  611. err = PTR_ERR(larval);
  612. if (IS_ERR(larval))
  613. goto out;
  614. err = -EAGAIN;
  615. if (!crypto_is_larval(larval))
  616. goto drop_larval;
  617. ptype.attr.rta_len = sizeof(ptype);
  618. ptype.attr.rta_type = CRYPTOA_TYPE;
  619. ptype.data.type = type | CRYPTO_ALG_GENIV;
  620. /* GENIV tells the template that we're making a default geniv. */
  621. ptype.data.mask = mask | CRYPTO_ALG_GENIV;
  622. tb[0] = &ptype.attr;
  623. palg.attr.rta_len = sizeof(palg);
  624. palg.attr.rta_type = CRYPTOA_ALG;
  625. /* Must use the exact name to locate ourselves. */
  626. memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
  627. tb[1] = &palg.attr;
  628. tb[2] = NULL;
  629. geniv = alg->cra_aead.geniv;
  630. tmpl = crypto_lookup_template(geniv);
  631. err = -ENOENT;
  632. if (!tmpl)
  633. goto kill_larval;
  634. if (tmpl->create) {
  635. err = tmpl->create(tmpl, tb);
  636. if (err)
  637. goto put_tmpl;
  638. goto ok;
  639. }
  640. inst = tmpl->alloc(tb);
  641. err = PTR_ERR(inst);
  642. if (IS_ERR(inst))
  643. goto put_tmpl;
  644. err = crypto_register_instance(tmpl, inst);
  645. if (err) {
  646. tmpl->free(inst);
  647. goto put_tmpl;
  648. }
  649. ok:
  650. /* Redo the lookup to use the instance we just registered. */
  651. err = -EAGAIN;
  652. put_tmpl:
  653. crypto_tmpl_put(tmpl);
  654. kill_larval:
  655. crypto_larval_kill(larval);
  656. drop_larval:
  657. crypto_mod_put(larval);
  658. out:
  659. crypto_mod_put(alg);
  660. return err;
  661. }
  662. struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
  663. {
  664. struct crypto_alg *alg;
  665. alg = crypto_alg_mod_lookup(name, type, mask);
  666. if (IS_ERR(alg))
  667. return alg;
  668. if (alg->cra_type == &crypto_aead_type)
  669. return alg;
  670. if (!alg->cra_aead.ivsize)
  671. return alg;
  672. crypto_mod_put(alg);
  673. alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
  674. mask & ~CRYPTO_ALG_TESTED);
  675. if (IS_ERR(alg))
  676. return alg;
  677. if (alg->cra_type == &crypto_aead_type) {
  678. if (~alg->cra_flags & (type ^ ~mask) & CRYPTO_ALG_TESTED) {
  679. crypto_mod_put(alg);
  680. alg = ERR_PTR(-ENOENT);
  681. }
  682. return alg;
  683. }
  684. BUG_ON(!alg->cra_aead.ivsize);
  685. return ERR_PTR(crypto_nivaead_default(alg, type, mask));
  686. }
  687. EXPORT_SYMBOL_GPL(crypto_lookup_aead);
  688. int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
  689. u32 type, u32 mask)
  690. {
  691. spawn->base.frontend = &crypto_aead_type;
  692. return crypto_grab_spawn(&spawn->base, name, type, mask);
  693. }
  694. EXPORT_SYMBOL_GPL(crypto_grab_aead);
  695. struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
  696. {
  697. return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
  698. }
  699. EXPORT_SYMBOL_GPL(crypto_alloc_aead);
  700. static int aead_prepare_alg(struct aead_alg *alg)
  701. {
  702. struct crypto_alg *base = &alg->base;
  703. if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
  704. return -EINVAL;
  705. base->cra_type = &crypto_new_aead_type;
  706. base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
  707. base->cra_flags |= CRYPTO_ALG_TYPE_AEAD;
  708. return 0;
  709. }
  710. int crypto_register_aead(struct aead_alg *alg)
  711. {
  712. struct crypto_alg *base = &alg->base;
  713. int err;
  714. err = aead_prepare_alg(alg);
  715. if (err)
  716. return err;
  717. return crypto_register_alg(base);
  718. }
  719. EXPORT_SYMBOL_GPL(crypto_register_aead);
  720. void crypto_unregister_aead(struct aead_alg *alg)
  721. {
  722. crypto_unregister_alg(&alg->base);
  723. }
  724. EXPORT_SYMBOL_GPL(crypto_unregister_aead);
  725. int crypto_register_aeads(struct aead_alg *algs, int count)
  726. {
  727. int i, ret;
  728. for (i = 0; i < count; i++) {
  729. ret = crypto_register_aead(&algs[i]);
  730. if (ret)
  731. goto err;
  732. }
  733. return 0;
  734. err:
  735. for (--i; i >= 0; --i)
  736. crypto_unregister_aead(&algs[i]);
  737. return ret;
  738. }
  739. EXPORT_SYMBOL_GPL(crypto_register_aeads);
  740. void crypto_unregister_aeads(struct aead_alg *algs, int count)
  741. {
  742. int i;
  743. for (i = count - 1; i >= 0; --i)
  744. crypto_unregister_aead(&algs[i]);
  745. }
  746. EXPORT_SYMBOL_GPL(crypto_unregister_aeads);
  747. int aead_register_instance(struct crypto_template *tmpl,
  748. struct aead_instance *inst)
  749. {
  750. int err;
  751. err = aead_prepare_alg(&inst->alg);
  752. if (err)
  753. return err;
  754. return crypto_register_instance(tmpl, aead_crypto_instance(inst));
  755. }
  756. EXPORT_SYMBOL_GPL(aead_register_instance);
  757. MODULE_LICENSE("GPL");
  758. MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");