ghash.c 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185
  1. // SPDX-License-Identifier: GPL-2.0
  2. /**
  3. * GHASH routines supporting VMX instructions on the Power 8
  4. *
  5. * Copyright (C) 2015, 2019 International Business Machines Inc.
  6. *
  7. * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
  8. *
  9. * Extended by Daniel Axtens <dja@axtens.net> to replace the fallback
  10. * mechanism. The new approach is based on arm64 code, which is:
  11. * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
  12. */
  13. #include <linux/types.h>
  14. #include <linux/err.h>
  15. #include <linux/crypto.h>
  16. #include <linux/delay.h>
  17. #include <asm/simd.h>
  18. #include <asm/switch_to.h>
  19. #include <crypto/aes.h>
  20. #include <crypto/ghash.h>
  21. #include <crypto/scatterwalk.h>
  22. #include <crypto/internal/hash.h>
  23. #include <crypto/internal/simd.h>
  24. #include <crypto/b128ops.h>
  25. void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
  26. void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
  27. void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
  28. const u8 *in, size_t len);
  29. struct p8_ghash_ctx {
  30. /* key used by vector asm */
  31. u128 htable[16];
  32. /* key used by software fallback */
  33. be128 key;
  34. };
  35. struct p8_ghash_desc_ctx {
  36. u64 shash[2];
  37. u8 buffer[GHASH_DIGEST_SIZE];
  38. int bytes;
  39. };
  40. static int p8_ghash_init(struct shash_desc *desc)
  41. {
  42. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  43. dctx->bytes = 0;
  44. memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
  45. return 0;
  46. }
  47. static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
  48. unsigned int keylen)
  49. {
  50. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
  51. if (keylen != GHASH_BLOCK_SIZE)
  52. return -EINVAL;
  53. preempt_disable();
  54. pagefault_disable();
  55. enable_kernel_vsx();
  56. gcm_init_p8(ctx->htable, (const u64 *) key);
  57. disable_kernel_vsx();
  58. pagefault_enable();
  59. preempt_enable();
  60. memcpy(&ctx->key, key, GHASH_BLOCK_SIZE);
  61. return 0;
  62. }
  63. static inline void __ghash_block(struct p8_ghash_ctx *ctx,
  64. struct p8_ghash_desc_ctx *dctx)
  65. {
  66. if (crypto_simd_usable()) {
  67. preempt_disable();
  68. pagefault_disable();
  69. enable_kernel_vsx();
  70. gcm_ghash_p8(dctx->shash, ctx->htable,
  71. dctx->buffer, GHASH_DIGEST_SIZE);
  72. disable_kernel_vsx();
  73. pagefault_enable();
  74. preempt_enable();
  75. } else {
  76. crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE);
  77. gf128mul_lle((be128 *)dctx->shash, &ctx->key);
  78. }
  79. }
  80. static inline void __ghash_blocks(struct p8_ghash_ctx *ctx,
  81. struct p8_ghash_desc_ctx *dctx,
  82. const u8 *src, unsigned int srclen)
  83. {
  84. if (crypto_simd_usable()) {
  85. preempt_disable();
  86. pagefault_disable();
  87. enable_kernel_vsx();
  88. gcm_ghash_p8(dctx->shash, ctx->htable,
  89. src, srclen);
  90. disable_kernel_vsx();
  91. pagefault_enable();
  92. preempt_enable();
  93. } else {
  94. while (srclen >= GHASH_BLOCK_SIZE) {
  95. crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE);
  96. gf128mul_lle((be128 *)dctx->shash, &ctx->key);
  97. srclen -= GHASH_BLOCK_SIZE;
  98. src += GHASH_BLOCK_SIZE;
  99. }
  100. }
  101. }
  102. static int p8_ghash_update(struct shash_desc *desc,
  103. const u8 *src, unsigned int srclen)
  104. {
  105. unsigned int len;
  106. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
  107. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  108. if (dctx->bytes) {
  109. if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
  110. memcpy(dctx->buffer + dctx->bytes, src,
  111. srclen);
  112. dctx->bytes += srclen;
  113. return 0;
  114. }
  115. memcpy(dctx->buffer + dctx->bytes, src,
  116. GHASH_DIGEST_SIZE - dctx->bytes);
  117. __ghash_block(ctx, dctx);
  118. src += GHASH_DIGEST_SIZE - dctx->bytes;
  119. srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
  120. dctx->bytes = 0;
  121. }
  122. len = srclen & ~(GHASH_DIGEST_SIZE - 1);
  123. if (len) {
  124. __ghash_blocks(ctx, dctx, src, len);
  125. src += len;
  126. srclen -= len;
  127. }
  128. if (srclen) {
  129. memcpy(dctx->buffer, src, srclen);
  130. dctx->bytes = srclen;
  131. }
  132. return 0;
  133. }
  134. static int p8_ghash_final(struct shash_desc *desc, u8 *out)
  135. {
  136. int i;
  137. struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
  138. struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  139. if (dctx->bytes) {
  140. for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
  141. dctx->buffer[i] = 0;
  142. __ghash_block(ctx, dctx);
  143. dctx->bytes = 0;
  144. }
  145. memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
  146. return 0;
  147. }
  148. struct shash_alg p8_ghash_alg = {
  149. .digestsize = GHASH_DIGEST_SIZE,
  150. .init = p8_ghash_init,
  151. .update = p8_ghash_update,
  152. .final = p8_ghash_final,
  153. .setkey = p8_ghash_setkey,
  154. .descsize = sizeof(struct p8_ghash_desc_ctx)
  155. + sizeof(struct ghash_desc_ctx),
  156. .base = {
  157. .cra_name = "ghash",
  158. .cra_driver_name = "p8_ghash",
  159. .cra_priority = 1000,
  160. .cra_blocksize = GHASH_BLOCK_SIZE,
  161. .cra_ctxsize = sizeof(struct p8_ghash_ctx),
  162. .cra_module = THIS_MODULE,
  163. },
  164. };