aes-ctr-fallback.h 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. /*
  2. * Fallback for sync aes(ctr) in contexts where kernel mode NEON
  3. * is not allowed
  4. *
  5. * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. */
  11. #include <crypto/aes.h>
  12. #include <crypto/internal/skcipher.h>
  13. asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  14. static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
  15. struct skcipher_request *req)
  16. {
  17. struct skcipher_walk walk;
  18. u8 buf[AES_BLOCK_SIZE];
  19. int err;
  20. err = skcipher_walk_virt(&walk, req, true);
  21. while (walk.nbytes > 0) {
  22. u8 *dst = walk.dst.virt.addr;
  23. u8 *src = walk.src.virt.addr;
  24. int nbytes = walk.nbytes;
  25. int tail = 0;
  26. if (nbytes < walk.total) {
  27. nbytes = round_down(nbytes, AES_BLOCK_SIZE);
  28. tail = walk.nbytes % AES_BLOCK_SIZE;
  29. }
  30. do {
  31. int bsize = min(nbytes, AES_BLOCK_SIZE);
  32. __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
  33. 6 + ctx->key_length / 4);
  34. crypto_xor_cpy(dst, src, buf, bsize);
  35. crypto_inc(walk.iv, AES_BLOCK_SIZE);
  36. dst += AES_BLOCK_SIZE;
  37. src += AES_BLOCK_SIZE;
  38. nbytes -= AES_BLOCK_SIZE;
  39. } while (nbytes > 0);
  40. err = skcipher_walk_done(&walk, tail);
  41. }
  42. return err;
  43. }