sha512_glue.c 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229
  1. /* Glue code for SHA512 hashing optimized for sparc64 crypto opcodes.
  2. *
  3. * This is based largely upon crypto/sha512_generic.c
  4. *
  5. * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
  6. * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
  7. * Copyright (c) 2003 Kyle McMartin <kyle@debian.org>
  8. */
  9. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  10. #include <crypto/internal/hash.h>
  11. #include <linux/init.h>
  12. #include <linux/module.h>
  13. #include <linux/mm.h>
  14. #include <linux/cryptohash.h>
  15. #include <linux/types.h>
  16. #include <crypto/sha.h>
  17. #include <asm/pstate.h>
  18. #include <asm/elf.h>
  19. #include "opcodes.h"
  20. asmlinkage void sha512_sparc64_transform(u64 *digest, const char *data,
  21. unsigned int rounds);
  22. static int sha512_sparc64_init(struct shash_desc *desc)
  23. {
  24. struct sha512_state *sctx = shash_desc_ctx(desc);
  25. sctx->state[0] = SHA512_H0;
  26. sctx->state[1] = SHA512_H1;
  27. sctx->state[2] = SHA512_H2;
  28. sctx->state[3] = SHA512_H3;
  29. sctx->state[4] = SHA512_H4;
  30. sctx->state[5] = SHA512_H5;
  31. sctx->state[6] = SHA512_H6;
  32. sctx->state[7] = SHA512_H7;
  33. sctx->count[0] = sctx->count[1] = 0;
  34. return 0;
  35. }
  36. static int sha384_sparc64_init(struct shash_desc *desc)
  37. {
  38. struct sha512_state *sctx = shash_desc_ctx(desc);
  39. sctx->state[0] = SHA384_H0;
  40. sctx->state[1] = SHA384_H1;
  41. sctx->state[2] = SHA384_H2;
  42. sctx->state[3] = SHA384_H3;
  43. sctx->state[4] = SHA384_H4;
  44. sctx->state[5] = SHA384_H5;
  45. sctx->state[6] = SHA384_H6;
  46. sctx->state[7] = SHA384_H7;
  47. sctx->count[0] = sctx->count[1] = 0;
  48. return 0;
  49. }
  50. static void __sha512_sparc64_update(struct sha512_state *sctx, const u8 *data,
  51. unsigned int len, unsigned int partial)
  52. {
  53. unsigned int done = 0;
  54. if ((sctx->count[0] += len) < len)
  55. sctx->count[1]++;
  56. if (partial) {
  57. done = SHA512_BLOCK_SIZE - partial;
  58. memcpy(sctx->buf + partial, data, done);
  59. sha512_sparc64_transform(sctx->state, sctx->buf, 1);
  60. }
  61. if (len - done >= SHA512_BLOCK_SIZE) {
  62. const unsigned int rounds = (len - done) / SHA512_BLOCK_SIZE;
  63. sha512_sparc64_transform(sctx->state, data + done, rounds);
  64. done += rounds * SHA512_BLOCK_SIZE;
  65. }
  66. memcpy(sctx->buf, data + done, len - done);
  67. }
  68. static int sha512_sparc64_update(struct shash_desc *desc, const u8 *data,
  69. unsigned int len)
  70. {
  71. struct sha512_state *sctx = shash_desc_ctx(desc);
  72. unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE;
  73. /* Handle the fast case right here */
  74. if (partial + len < SHA512_BLOCK_SIZE) {
  75. if ((sctx->count[0] += len) < len)
  76. sctx->count[1]++;
  77. memcpy(sctx->buf + partial, data, len);
  78. } else
  79. __sha512_sparc64_update(sctx, data, len, partial);
  80. return 0;
  81. }
  82. static int sha512_sparc64_final(struct shash_desc *desc, u8 *out)
  83. {
  84. struct sha512_state *sctx = shash_desc_ctx(desc);
  85. unsigned int i, index, padlen;
  86. __be64 *dst = (__be64 *)out;
  87. __be64 bits[2];
  88. static const u8 padding[SHA512_BLOCK_SIZE] = { 0x80, };
  89. /* Save number of bits */
  90. bits[1] = cpu_to_be64(sctx->count[0] << 3);
  91. bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
  92. /* Pad out to 112 mod 128 and append length */
  93. index = sctx->count[0] % SHA512_BLOCK_SIZE;
  94. padlen = (index < 112) ? (112 - index) : ((SHA512_BLOCK_SIZE+112) - index);
  95. /* We need to fill a whole block for __sha512_sparc64_update() */
  96. if (padlen <= 112) {
  97. if ((sctx->count[0] += padlen) < padlen)
  98. sctx->count[1]++;
  99. memcpy(sctx->buf + index, padding, padlen);
  100. } else {
  101. __sha512_sparc64_update(sctx, padding, padlen, index);
  102. }
  103. __sha512_sparc64_update(sctx, (const u8 *)&bits, sizeof(bits), 112);
  104. /* Store state in digest */
  105. for (i = 0; i < 8; i++)
  106. dst[i] = cpu_to_be64(sctx->state[i]);
  107. /* Wipe context */
  108. memset(sctx, 0, sizeof(*sctx));
  109. return 0;
  110. }
  111. static int sha384_sparc64_final(struct shash_desc *desc, u8 *hash)
  112. {
  113. u8 D[64];
  114. sha512_sparc64_final(desc, D);
  115. memcpy(hash, D, 48);
  116. memzero_explicit(D, 64);
  117. return 0;
  118. }
  119. static struct shash_alg sha512 = {
  120. .digestsize = SHA512_DIGEST_SIZE,
  121. .init = sha512_sparc64_init,
  122. .update = sha512_sparc64_update,
  123. .final = sha512_sparc64_final,
  124. .descsize = sizeof(struct sha512_state),
  125. .base = {
  126. .cra_name = "sha512",
  127. .cra_driver_name= "sha512-sparc64",
  128. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  129. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  130. .cra_blocksize = SHA512_BLOCK_SIZE,
  131. .cra_module = THIS_MODULE,
  132. }
  133. };
  134. static struct shash_alg sha384 = {
  135. .digestsize = SHA384_DIGEST_SIZE,
  136. .init = sha384_sparc64_init,
  137. .update = sha512_sparc64_update,
  138. .final = sha384_sparc64_final,
  139. .descsize = sizeof(struct sha512_state),
  140. .base = {
  141. .cra_name = "sha384",
  142. .cra_driver_name= "sha384-sparc64",
  143. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  144. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  145. .cra_blocksize = SHA384_BLOCK_SIZE,
  146. .cra_module = THIS_MODULE,
  147. }
  148. };
  149. static bool __init sparc64_has_sha512_opcode(void)
  150. {
  151. unsigned long cfr;
  152. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  153. return false;
  154. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  155. if (!(cfr & CFR_SHA512))
  156. return false;
  157. return true;
  158. }
  159. static int __init sha512_sparc64_mod_init(void)
  160. {
  161. if (sparc64_has_sha512_opcode()) {
  162. int ret = crypto_register_shash(&sha384);
  163. if (ret < 0)
  164. return ret;
  165. ret = crypto_register_shash(&sha512);
  166. if (ret < 0) {
  167. crypto_unregister_shash(&sha384);
  168. return ret;
  169. }
  170. pr_info("Using sparc64 sha512 opcode optimized SHA-512/SHA-384 implementation\n");
  171. return 0;
  172. }
  173. pr_info("sparc64 sha512 opcode not available.\n");
  174. return -ENODEV;
  175. }
  176. static void __exit sha512_sparc64_mod_fini(void)
  177. {
  178. crypto_unregister_shash(&sha384);
  179. crypto_unregister_shash(&sha512);
  180. }
  181. module_init(sha512_sparc64_mod_init);
  182. module_exit(sha512_sparc64_mod_fini);
  183. MODULE_LICENSE("GPL");
  184. MODULE_DESCRIPTION("SHA-384 and SHA-512 Secure Hash Algorithm, sparc64 sha512 opcode accelerated");
  185. MODULE_ALIAS_CRYPTO("sha384");
  186. MODULE_ALIAS_CRYPTO("sha512");
  187. #include "crop_devid.c"