pcbc.c 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307
  1. /*
  2. * PCBC: Propagating Cipher Block Chaining mode
  3. *
  4. * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5. * Written by David Howells (dhowells@redhat.com)
  6. *
  7. * Derived from cbc.c
  8. * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the Free
  12. * Software Foundation; either version 2 of the License, or (at your option)
  13. * any later version.
  14. *
  15. */
  16. #include <crypto/algapi.h>
  17. #include <crypto/internal/skcipher.h>
  18. #include <linux/err.h>
  19. #include <linux/init.h>
  20. #include <linux/kernel.h>
  21. #include <linux/module.h>
  22. #include <linux/slab.h>
  23. #include <linux/compiler.h>
  24. struct crypto_pcbc_ctx {
  25. struct crypto_cipher *child;
  26. };
  27. static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
  28. unsigned int keylen)
  29. {
  30. struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
  31. struct crypto_cipher *child = ctx->child;
  32. int err;
  33. crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  34. crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
  35. CRYPTO_TFM_REQ_MASK);
  36. err = crypto_cipher_setkey(child, key, keylen);
  37. crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
  38. CRYPTO_TFM_RES_MASK);
  39. return err;
  40. }
  41. static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
  42. struct skcipher_walk *walk,
  43. struct crypto_cipher *tfm)
  44. {
  45. int bsize = crypto_cipher_blocksize(tfm);
  46. unsigned int nbytes = walk->nbytes;
  47. u8 *src = walk->src.virt.addr;
  48. u8 *dst = walk->dst.virt.addr;
  49. u8 * const iv = walk->iv;
  50. do {
  51. crypto_xor(iv, src, bsize);
  52. crypto_cipher_encrypt_one(tfm, dst, iv);
  53. crypto_xor_cpy(iv, dst, src, bsize);
  54. src += bsize;
  55. dst += bsize;
  56. } while ((nbytes -= bsize) >= bsize);
  57. return nbytes;
  58. }
  59. static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
  60. struct skcipher_walk *walk,
  61. struct crypto_cipher *tfm)
  62. {
  63. int bsize = crypto_cipher_blocksize(tfm);
  64. unsigned int nbytes = walk->nbytes;
  65. u8 *src = walk->src.virt.addr;
  66. u8 * const iv = walk->iv;
  67. u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
  68. do {
  69. memcpy(tmpbuf, src, bsize);
  70. crypto_xor(iv, src, bsize);
  71. crypto_cipher_encrypt_one(tfm, src, iv);
  72. crypto_xor_cpy(iv, tmpbuf, src, bsize);
  73. src += bsize;
  74. } while ((nbytes -= bsize) >= bsize);
  75. return nbytes;
  76. }
  77. static int crypto_pcbc_encrypt(struct skcipher_request *req)
  78. {
  79. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  80. struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  81. struct crypto_cipher *child = ctx->child;
  82. struct skcipher_walk walk;
  83. unsigned int nbytes;
  84. int err;
  85. err = skcipher_walk_virt(&walk, req, false);
  86. while ((nbytes = walk.nbytes)) {
  87. if (walk.src.virt.addr == walk.dst.virt.addr)
  88. nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
  89. child);
  90. else
  91. nbytes = crypto_pcbc_encrypt_segment(req, &walk,
  92. child);
  93. err = skcipher_walk_done(&walk, nbytes);
  94. }
  95. return err;
  96. }
  97. static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
  98. struct skcipher_walk *walk,
  99. struct crypto_cipher *tfm)
  100. {
  101. int bsize = crypto_cipher_blocksize(tfm);
  102. unsigned int nbytes = walk->nbytes;
  103. u8 *src = walk->src.virt.addr;
  104. u8 *dst = walk->dst.virt.addr;
  105. u8 * const iv = walk->iv;
  106. do {
  107. crypto_cipher_decrypt_one(tfm, dst, src);
  108. crypto_xor(dst, iv, bsize);
  109. crypto_xor_cpy(iv, dst, src, bsize);
  110. src += bsize;
  111. dst += bsize;
  112. } while ((nbytes -= bsize) >= bsize);
  113. return nbytes;
  114. }
  115. static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
  116. struct skcipher_walk *walk,
  117. struct crypto_cipher *tfm)
  118. {
  119. int bsize = crypto_cipher_blocksize(tfm);
  120. unsigned int nbytes = walk->nbytes;
  121. u8 *src = walk->src.virt.addr;
  122. u8 * const iv = walk->iv;
  123. u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
  124. do {
  125. memcpy(tmpbuf, src, bsize);
  126. crypto_cipher_decrypt_one(tfm, src, src);
  127. crypto_xor(src, iv, bsize);
  128. crypto_xor_cpy(iv, src, tmpbuf, bsize);
  129. src += bsize;
  130. } while ((nbytes -= bsize) >= bsize);
  131. return nbytes;
  132. }
  133. static int crypto_pcbc_decrypt(struct skcipher_request *req)
  134. {
  135. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  136. struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  137. struct crypto_cipher *child = ctx->child;
  138. struct skcipher_walk walk;
  139. unsigned int nbytes;
  140. int err;
  141. err = skcipher_walk_virt(&walk, req, false);
  142. while ((nbytes = walk.nbytes)) {
  143. if (walk.src.virt.addr == walk.dst.virt.addr)
  144. nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
  145. child);
  146. else
  147. nbytes = crypto_pcbc_decrypt_segment(req, &walk,
  148. child);
  149. err = skcipher_walk_done(&walk, nbytes);
  150. }
  151. return err;
  152. }
  153. static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
  154. {
  155. struct skcipher_instance *inst = skcipher_alg_instance(tfm);
  156. struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
  157. struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  158. struct crypto_cipher *cipher;
  159. cipher = crypto_spawn_cipher(spawn);
  160. if (IS_ERR(cipher))
  161. return PTR_ERR(cipher);
  162. ctx->child = cipher;
  163. return 0;
  164. }
  165. static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
  166. {
  167. struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  168. crypto_free_cipher(ctx->child);
  169. }
  170. static void crypto_pcbc_free(struct skcipher_instance *inst)
  171. {
  172. crypto_drop_skcipher(skcipher_instance_ctx(inst));
  173. kfree(inst);
  174. }
  175. static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
  176. {
  177. struct skcipher_instance *inst;
  178. struct crypto_attr_type *algt;
  179. struct crypto_spawn *spawn;
  180. struct crypto_alg *alg;
  181. int err;
  182. algt = crypto_get_attr_type(tb);
  183. if (IS_ERR(algt))
  184. return PTR_ERR(algt);
  185. if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
  186. ~CRYPTO_ALG_INTERNAL)
  187. return -EINVAL;
  188. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  189. if (!inst)
  190. return -ENOMEM;
  191. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
  192. (algt->type & CRYPTO_ALG_INTERNAL),
  193. CRYPTO_ALG_TYPE_MASK |
  194. (algt->mask & CRYPTO_ALG_INTERNAL));
  195. err = PTR_ERR(alg);
  196. if (IS_ERR(alg))
  197. goto err_free_inst;
  198. spawn = skcipher_instance_ctx(inst);
  199. err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
  200. CRYPTO_ALG_TYPE_MASK);
  201. if (err)
  202. goto err_put_alg;
  203. err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
  204. if (err)
  205. goto err_drop_spawn;
  206. inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
  207. inst->alg.base.cra_priority = alg->cra_priority;
  208. inst->alg.base.cra_blocksize = alg->cra_blocksize;
  209. inst->alg.base.cra_alignmask = alg->cra_alignmask;
  210. inst->alg.ivsize = alg->cra_blocksize;
  211. inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
  212. inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
  213. inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
  214. inst->alg.init = crypto_pcbc_init_tfm;
  215. inst->alg.exit = crypto_pcbc_exit_tfm;
  216. inst->alg.setkey = crypto_pcbc_setkey;
  217. inst->alg.encrypt = crypto_pcbc_encrypt;
  218. inst->alg.decrypt = crypto_pcbc_decrypt;
  219. inst->free = crypto_pcbc_free;
  220. err = skcipher_register_instance(tmpl, inst);
  221. if (err)
  222. goto err_drop_spawn;
  223. crypto_mod_put(alg);
  224. out:
  225. return err;
  226. err_drop_spawn:
  227. crypto_drop_spawn(spawn);
  228. err_put_alg:
  229. crypto_mod_put(alg);
  230. err_free_inst:
  231. kfree(inst);
  232. goto out;
  233. }
  234. static struct crypto_template crypto_pcbc_tmpl = {
  235. .name = "pcbc",
  236. .create = crypto_pcbc_create,
  237. .module = THIS_MODULE,
  238. };
  239. static int __init crypto_pcbc_module_init(void)
  240. {
  241. return crypto_register_template(&crypto_pcbc_tmpl);
  242. }
  243. static void __exit crypto_pcbc_module_exit(void)
  244. {
  245. crypto_unregister_template(&crypto_pcbc_tmpl);
  246. }
  247. module_init(crypto_pcbc_module_init);
  248. module_exit(crypto_pcbc_module_exit);
  249. MODULE_LICENSE("GPL");
  250. MODULE_DESCRIPTION("PCBC block cipher algorithm");
  251. MODULE_ALIAS_CRYPTO("pcbc");