sha256-glue.c 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. /*
  2. * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
  3. *
  4. * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <asm/hwcap.h>
  13. #include <asm/neon.h>
  14. #include <asm/simd.h>
  15. #include <crypto/internal/hash.h>
  16. #include <crypto/sha.h>
  17. #include <crypto/sha256_base.h>
  18. #include <linux/cryptohash.h>
  19. #include <linux/types.h>
  20. #include <linux/string.h>
  21. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
  22. MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
  23. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  24. MODULE_LICENSE("GPL v2");
  25. MODULE_ALIAS_CRYPTO("sha224");
  26. MODULE_ALIAS_CRYPTO("sha256");
  27. asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
  28. unsigned int num_blks);
  29. EXPORT_SYMBOL(sha256_block_data_order);
  30. asmlinkage void sha256_block_neon(u32 *digest, const void *data,
  31. unsigned int num_blks);
  32. static int sha256_update(struct shash_desc *desc, const u8 *data,
  33. unsigned int len)
  34. {
  35. return sha256_base_do_update(desc, data, len,
  36. (sha256_block_fn *)sha256_block_data_order);
  37. }
  38. static int sha256_finup(struct shash_desc *desc, const u8 *data,
  39. unsigned int len, u8 *out)
  40. {
  41. if (len)
  42. sha256_base_do_update(desc, data, len,
  43. (sha256_block_fn *)sha256_block_data_order);
  44. sha256_base_do_finalize(desc,
  45. (sha256_block_fn *)sha256_block_data_order);
  46. return sha256_base_finish(desc, out);
  47. }
  48. static int sha256_final(struct shash_desc *desc, u8 *out)
  49. {
  50. return sha256_finup(desc, NULL, 0, out);
  51. }
  52. static struct shash_alg algs[] = { {
  53. .digestsize = SHA256_DIGEST_SIZE,
  54. .init = sha256_base_init,
  55. .update = sha256_update,
  56. .final = sha256_final,
  57. .finup = sha256_finup,
  58. .descsize = sizeof(struct sha256_state),
  59. .base.cra_name = "sha256",
  60. .base.cra_driver_name = "sha256-arm64",
  61. .base.cra_priority = 125,
  62. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  63. .base.cra_module = THIS_MODULE,
  64. }, {
  65. .digestsize = SHA224_DIGEST_SIZE,
  66. .init = sha224_base_init,
  67. .update = sha256_update,
  68. .final = sha256_final,
  69. .finup = sha256_finup,
  70. .descsize = sizeof(struct sha256_state),
  71. .base.cra_name = "sha224",
  72. .base.cra_driver_name = "sha224-arm64",
  73. .base.cra_priority = 125,
  74. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  75. .base.cra_module = THIS_MODULE,
  76. } };
  77. static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
  78. unsigned int len)
  79. {
  80. struct sha256_state *sctx = shash_desc_ctx(desc);
  81. if (!may_use_simd())
  82. return sha256_base_do_update(desc, data, len,
  83. (sha256_block_fn *)sha256_block_data_order);
  84. while (len > 0) {
  85. unsigned int chunk = len;
  86. /*
  87. * Don't hog the CPU for the entire time it takes to process all
  88. * input when running on a preemptible kernel, but process the
  89. * data block by block instead.
  90. */
  91. if (IS_ENABLED(CONFIG_PREEMPT) &&
  92. chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
  93. chunk = SHA256_BLOCK_SIZE -
  94. sctx->count % SHA256_BLOCK_SIZE;
  95. kernel_neon_begin();
  96. sha256_base_do_update(desc, data, chunk,
  97. (sha256_block_fn *)sha256_block_neon);
  98. kernel_neon_end();
  99. data += chunk;
  100. len -= chunk;
  101. }
  102. return 0;
  103. }
  104. static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
  105. unsigned int len, u8 *out)
  106. {
  107. if (!may_use_simd()) {
  108. if (len)
  109. sha256_base_do_update(desc, data, len,
  110. (sha256_block_fn *)sha256_block_data_order);
  111. sha256_base_do_finalize(desc,
  112. (sha256_block_fn *)sha256_block_data_order);
  113. } else {
  114. if (len)
  115. sha256_update_neon(desc, data, len);
  116. kernel_neon_begin();
  117. sha256_base_do_finalize(desc,
  118. (sha256_block_fn *)sha256_block_neon);
  119. kernel_neon_end();
  120. }
  121. return sha256_base_finish(desc, out);
  122. }
  123. static int sha256_final_neon(struct shash_desc *desc, u8 *out)
  124. {
  125. return sha256_finup_neon(desc, NULL, 0, out);
  126. }
  127. static struct shash_alg neon_algs[] = { {
  128. .digestsize = SHA256_DIGEST_SIZE,
  129. .init = sha256_base_init,
  130. .update = sha256_update_neon,
  131. .final = sha256_final_neon,
  132. .finup = sha256_finup_neon,
  133. .descsize = sizeof(struct sha256_state),
  134. .base.cra_name = "sha256",
  135. .base.cra_driver_name = "sha256-arm64-neon",
  136. .base.cra_priority = 150,
  137. .base.cra_blocksize = SHA256_BLOCK_SIZE,
  138. .base.cra_module = THIS_MODULE,
  139. }, {
  140. .digestsize = SHA224_DIGEST_SIZE,
  141. .init = sha224_base_init,
  142. .update = sha256_update_neon,
  143. .final = sha256_final_neon,
  144. .finup = sha256_finup_neon,
  145. .descsize = sizeof(struct sha256_state),
  146. .base.cra_name = "sha224",
  147. .base.cra_driver_name = "sha224-arm64-neon",
  148. .base.cra_priority = 150,
  149. .base.cra_blocksize = SHA224_BLOCK_SIZE,
  150. .base.cra_module = THIS_MODULE,
  151. } };
  152. static int __init sha256_mod_init(void)
  153. {
  154. int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
  155. if (ret)
  156. return ret;
  157. if (elf_hwcap & HWCAP_ASIMD) {
  158. ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  159. if (ret)
  160. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  161. }
  162. return ret;
  163. }
  164. static void __exit sha256_mod_fini(void)
  165. {
  166. if (elf_hwcap & HWCAP_ASIMD)
  167. crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
  168. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  169. }
  170. module_init(sha256_mod_init);
  171. module_exit(sha256_mod_fini);