aes_xts.c 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /**
  3. * AES XTS routines supporting VMX In-core instructions on Power 8
  4. *
  5. * Copyright (C) 2015 International Business Machines Inc.
  6. *
  7. * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
  8. */
  9. #include <asm/simd.h>
  10. #include <asm/switch_to.h>
  11. #include <crypto/aes.h>
  12. #include <crypto/internal/simd.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <crypto/xts.h>
  15. #include "aesp8-ppc.h"
  16. struct p8_aes_xts_ctx {
  17. struct crypto_skcipher *fallback;
  18. struct aes_key enc_key;
  19. struct aes_key dec_key;
  20. struct aes_key tweak_key;
  21. };
  22. static int p8_aes_xts_init(struct crypto_skcipher *tfm)
  23. {
  24. struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  25. struct crypto_skcipher *fallback;
  26. fallback = crypto_alloc_skcipher("xts(aes)", 0,
  27. CRYPTO_ALG_NEED_FALLBACK |
  28. CRYPTO_ALG_ASYNC);
  29. if (IS_ERR(fallback)) {
  30. pr_err("Failed to allocate xts(aes) fallback: %ld\n",
  31. PTR_ERR(fallback));
  32. return PTR_ERR(fallback);
  33. }
  34. crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
  35. crypto_skcipher_reqsize(fallback));
  36. ctx->fallback = fallback;
  37. return 0;
  38. }
  39. static void p8_aes_xts_exit(struct crypto_skcipher *tfm)
  40. {
  41. struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  42. crypto_free_skcipher(ctx->fallback);
  43. }
  44. static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
  45. unsigned int keylen)
  46. {
  47. struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  48. int ret;
  49. ret = xts_verify_key(tfm, key, keylen);
  50. if (ret)
  51. return ret;
  52. preempt_disable();
  53. pagefault_disable();
  54. enable_kernel_vsx();
  55. ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
  56. ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
  57. ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
  58. disable_kernel_vsx();
  59. pagefault_enable();
  60. preempt_enable();
  61. ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
  62. return ret ? -EINVAL : 0;
  63. }
  64. static int p8_aes_xts_crypt(struct skcipher_request *req, int enc)
  65. {
  66. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  67. const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  68. struct skcipher_walk walk;
  69. unsigned int nbytes;
  70. u8 tweak[AES_BLOCK_SIZE];
  71. int ret;
  72. if (req->cryptlen < AES_BLOCK_SIZE)
  73. return -EINVAL;
  74. if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) {
  75. struct skcipher_request *subreq = skcipher_request_ctx(req);
  76. *subreq = *req;
  77. skcipher_request_set_tfm(subreq, ctx->fallback);
  78. return enc ? crypto_skcipher_encrypt(subreq) :
  79. crypto_skcipher_decrypt(subreq);
  80. }
  81. ret = skcipher_walk_virt(&walk, req, false);
  82. if (ret)
  83. return ret;
  84. preempt_disable();
  85. pagefault_disable();
  86. enable_kernel_vsx();
  87. aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
  88. disable_kernel_vsx();
  89. pagefault_enable();
  90. preempt_enable();
  91. while ((nbytes = walk.nbytes) != 0) {
  92. preempt_disable();
  93. pagefault_disable();
  94. enable_kernel_vsx();
  95. if (enc)
  96. aes_p8_xts_encrypt(walk.src.virt.addr,
  97. walk.dst.virt.addr,
  98. round_down(nbytes, AES_BLOCK_SIZE),
  99. &ctx->enc_key, NULL, tweak);
  100. else
  101. aes_p8_xts_decrypt(walk.src.virt.addr,
  102. walk.dst.virt.addr,
  103. round_down(nbytes, AES_BLOCK_SIZE),
  104. &ctx->dec_key, NULL, tweak);
  105. disable_kernel_vsx();
  106. pagefault_enable();
  107. preempt_enable();
  108. ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
  109. }
  110. return ret;
  111. }
  112. static int p8_aes_xts_encrypt(struct skcipher_request *req)
  113. {
  114. return p8_aes_xts_crypt(req, 1);
  115. }
  116. static int p8_aes_xts_decrypt(struct skcipher_request *req)
  117. {
  118. return p8_aes_xts_crypt(req, 0);
  119. }
  120. struct skcipher_alg p8_aes_xts_alg = {
  121. .base.cra_name = "xts(aes)",
  122. .base.cra_driver_name = "p8_aes_xts",
  123. .base.cra_module = THIS_MODULE,
  124. .base.cra_priority = 2000,
  125. .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
  126. .base.cra_blocksize = AES_BLOCK_SIZE,
  127. .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
  128. .setkey = p8_aes_xts_setkey,
  129. .encrypt = p8_aes_xts_encrypt,
  130. .decrypt = p8_aes_xts_decrypt,
  131. .init = p8_aes_xts_init,
  132. .exit = p8_aes_xts_exit,
  133. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  134. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  135. .ivsize = AES_BLOCK_SIZE,
  136. };