aes_xts.c 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. /**
  2. * AES XTS routines supporting VMX In-core instructions on Power 8
  3. *
  4. * Copyright (C) 2015 International Business Machines Inc.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundations; version 2 only.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program; if not, write to the Free Software
  17. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  18. *
  19. * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
  20. */
  21. #include <linux/types.h>
  22. #include <linux/err.h>
  23. #include <linux/crypto.h>
  24. #include <linux/delay.h>
  25. #include <linux/hardirq.h>
  26. #include <asm/switch_to.h>
  27. #include <crypto/aes.h>
  28. #include <crypto/scatterwalk.h>
  29. #include <crypto/xts.h>
  30. #include <crypto/skcipher.h>
  31. #include "aesp8-ppc.h"
  32. struct p8_aes_xts_ctx {
  33. struct crypto_skcipher *fallback;
  34. struct aes_key enc_key;
  35. struct aes_key dec_key;
  36. struct aes_key tweak_key;
  37. };
  38. static int p8_aes_xts_init(struct crypto_tfm *tfm)
  39. {
  40. const char *alg = crypto_tfm_alg_name(tfm);
  41. struct crypto_skcipher *fallback;
  42. struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  43. fallback = crypto_alloc_skcipher(alg, 0,
  44. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  45. if (IS_ERR(fallback)) {
  46. printk(KERN_ERR
  47. "Failed to allocate transformation for '%s': %ld\n",
  48. alg, PTR_ERR(fallback));
  49. return PTR_ERR(fallback);
  50. }
  51. crypto_skcipher_set_flags(
  52. fallback,
  53. crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
  54. ctx->fallback = fallback;
  55. return 0;
  56. }
  57. static void p8_aes_xts_exit(struct crypto_tfm *tfm)
  58. {
  59. struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  60. if (ctx->fallback) {
  61. crypto_free_skcipher(ctx->fallback);
  62. ctx->fallback = NULL;
  63. }
  64. }
  65. static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
  66. unsigned int keylen)
  67. {
  68. int ret;
  69. struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  70. ret = xts_check_key(tfm, key, keylen);
  71. if (ret)
  72. return ret;
  73. preempt_disable();
  74. pagefault_disable();
  75. enable_kernel_vsx();
  76. ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
  77. ret += aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
  78. ret += aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
  79. disable_kernel_vsx();
  80. pagefault_enable();
  81. preempt_enable();
  82. ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
  83. return ret;
  84. }
  85. static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
  86. struct scatterlist *dst,
  87. struct scatterlist *src,
  88. unsigned int nbytes, int enc)
  89. {
  90. int ret;
  91. u8 tweak[AES_BLOCK_SIZE];
  92. u8 *iv;
  93. struct blkcipher_walk walk;
  94. struct p8_aes_xts_ctx *ctx =
  95. crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
  96. if (in_interrupt()) {
  97. SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
  98. skcipher_request_set_tfm(req, ctx->fallback);
  99. skcipher_request_set_callback(req, desc->flags, NULL, NULL);
  100. skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
  101. ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
  102. skcipher_request_zero(req);
  103. } else {
  104. blkcipher_walk_init(&walk, dst, src, nbytes);
  105. ret = blkcipher_walk_virt(desc, &walk);
  106. preempt_disable();
  107. pagefault_disable();
  108. enable_kernel_vsx();
  109. iv = walk.iv;
  110. memset(tweak, 0, AES_BLOCK_SIZE);
  111. aes_p8_encrypt(iv, tweak, &ctx->tweak_key);
  112. disable_kernel_vsx();
  113. pagefault_enable();
  114. preempt_enable();
  115. while ((nbytes = walk.nbytes)) {
  116. preempt_disable();
  117. pagefault_disable();
  118. enable_kernel_vsx();
  119. if (enc)
  120. aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
  121. nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak);
  122. else
  123. aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
  124. nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak);
  125. disable_kernel_vsx();
  126. pagefault_enable();
  127. preempt_enable();
  128. nbytes &= AES_BLOCK_SIZE - 1;
  129. ret = blkcipher_walk_done(desc, &walk, nbytes);
  130. }
  131. }
  132. return ret;
  133. }
  134. static int p8_aes_xts_encrypt(struct blkcipher_desc *desc,
  135. struct scatterlist *dst,
  136. struct scatterlist *src, unsigned int nbytes)
  137. {
  138. return p8_aes_xts_crypt(desc, dst, src, nbytes, 1);
  139. }
  140. static int p8_aes_xts_decrypt(struct blkcipher_desc *desc,
  141. struct scatterlist *dst,
  142. struct scatterlist *src, unsigned int nbytes)
  143. {
  144. return p8_aes_xts_crypt(desc, dst, src, nbytes, 0);
  145. }
  146. struct crypto_alg p8_aes_xts_alg = {
  147. .cra_name = "xts(aes)",
  148. .cra_driver_name = "p8_aes_xts",
  149. .cra_module = THIS_MODULE,
  150. .cra_priority = 2000,
  151. .cra_type = &crypto_blkcipher_type,
  152. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
  153. .cra_alignmask = 0,
  154. .cra_blocksize = AES_BLOCK_SIZE,
  155. .cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
  156. .cra_init = p8_aes_xts_init,
  157. .cra_exit = p8_aes_xts_exit,
  158. .cra_blkcipher = {
  159. .ivsize = AES_BLOCK_SIZE,
  160. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  161. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  162. .setkey = p8_aes_xts_setkey,
  163. .encrypt = p8_aes_xts_encrypt,
  164. .decrypt = p8_aes_xts_decrypt,
  165. }
  166. };