crc32c_glue.c 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /* Glue code for CRC32C optimized for sparc64 crypto opcodes.
  2. *
  3. * This is based largely upon arch/x86/crypto/crc32c-intel.c
  4. *
  5. * Copyright (C) 2008 Intel Corporation
  6. * Authors: Austin Zhang <austin_zhang@linux.intel.com>
  7. * Kent Liu <kent.liu@intel.com>
  8. */
  9. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  10. #include <linux/init.h>
  11. #include <linux/module.h>
  12. #include <linux/string.h>
  13. #include <linux/kernel.h>
  14. #include <linux/crc32.h>
  15. #include <crypto/internal/hash.h>
  16. #include <asm/pstate.h>
  17. #include <asm/elf.h>
  18. #include "opcodes.h"
  19. /*
  20. * Setting the seed allows arbitrary accumulators and flexible XOR policy
  21. * If your algorithm starts with ~0, then XOR with ~0 before you set
  22. * the seed.
  23. */
  24. static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
  25. unsigned int keylen)
  26. {
  27. u32 *mctx = crypto_shash_ctx(hash);
  28. if (keylen != sizeof(u32)) {
  29. crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
  30. return -EINVAL;
  31. }
  32. *(__le32 *)mctx = le32_to_cpup((__le32 *)key);
  33. return 0;
  34. }
  35. static int crc32c_sparc64_init(struct shash_desc *desc)
  36. {
  37. u32 *mctx = crypto_shash_ctx(desc->tfm);
  38. u32 *crcp = shash_desc_ctx(desc);
  39. *crcp = *mctx;
  40. return 0;
  41. }
  42. extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
  43. static void crc32c_compute(u32 *crcp, const u64 *data, unsigned int len)
  44. {
  45. unsigned int asm_len;
  46. asm_len = len & ~7U;
  47. if (asm_len) {
  48. crc32c_sparc64(crcp, data, asm_len);
  49. data += asm_len / 8;
  50. len -= asm_len;
  51. }
  52. if (len)
  53. *crcp = __crc32c_le(*crcp, (const unsigned char *) data, len);
  54. }
  55. static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
  56. unsigned int len)
  57. {
  58. u32 *crcp = shash_desc_ctx(desc);
  59. crc32c_compute(crcp, (const u64 *) data, len);
  60. return 0;
  61. }
  62. static int __crc32c_sparc64_finup(u32 *crcp, const u8 *data, unsigned int len,
  63. u8 *out)
  64. {
  65. u32 tmp = *crcp;
  66. crc32c_compute(&tmp, (const u64 *) data, len);
  67. *(__le32 *) out = ~cpu_to_le32(tmp);
  68. return 0;
  69. }
  70. static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
  71. unsigned int len, u8 *out)
  72. {
  73. return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
  74. }
  75. static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
  76. {
  77. u32 *crcp = shash_desc_ctx(desc);
  78. *(__le32 *) out = ~cpu_to_le32p(crcp);
  79. return 0;
  80. }
  81. static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
  82. unsigned int len, u8 *out)
  83. {
  84. return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
  85. out);
  86. }
  87. static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
  88. {
  89. u32 *key = crypto_tfm_ctx(tfm);
  90. *key = ~0;
  91. return 0;
  92. }
  93. #define CHKSUM_BLOCK_SIZE 1
  94. #define CHKSUM_DIGEST_SIZE 4
  95. static struct shash_alg alg = {
  96. .setkey = crc32c_sparc64_setkey,
  97. .init = crc32c_sparc64_init,
  98. .update = crc32c_sparc64_update,
  99. .final = crc32c_sparc64_final,
  100. .finup = crc32c_sparc64_finup,
  101. .digest = crc32c_sparc64_digest,
  102. .descsize = sizeof(u32),
  103. .digestsize = CHKSUM_DIGEST_SIZE,
  104. .base = {
  105. .cra_name = "crc32c",
  106. .cra_driver_name = "crc32c-sparc64",
  107. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  108. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  109. .cra_blocksize = CHKSUM_BLOCK_SIZE,
  110. .cra_ctxsize = sizeof(u32),
  111. .cra_alignmask = 7,
  112. .cra_module = THIS_MODULE,
  113. .cra_init = crc32c_sparc64_cra_init,
  114. }
  115. };
  116. static bool __init sparc64_has_crc32c_opcode(void)
  117. {
  118. unsigned long cfr;
  119. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  120. return false;
  121. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  122. if (!(cfr & CFR_CRC32C))
  123. return false;
  124. return true;
  125. }
  126. static int __init crc32c_sparc64_mod_init(void)
  127. {
  128. if (sparc64_has_crc32c_opcode()) {
  129. pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
  130. return crypto_register_shash(&alg);
  131. }
  132. pr_info("sparc64 crc32c opcode not available.\n");
  133. return -ENODEV;
  134. }
  135. static void __exit crc32c_sparc64_mod_fini(void)
  136. {
  137. crypto_unregister_shash(&alg);
  138. }
  139. module_init(crc32c_sparc64_mod_init);
  140. module_exit(crc32c_sparc64_mod_fini);
  141. MODULE_LICENSE("GPL");
  142. MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
  143. MODULE_ALIAS_CRYPTO("crc32c");
  144. #include "crop_devid.c"