aes-ce-ccm-glue.c 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393
  1. /*
  2. * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
  3. *
  4. * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/simd.h>
  12. #include <asm/unaligned.h>
  13. #include <crypto/aes.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <crypto/internal/aead.h>
  16. #include <crypto/internal/skcipher.h>
  17. #include <linux/module.h>
  18. #include "aes-ce-setkey.h"
  19. static int num_rounds(struct crypto_aes_ctx *ctx)
  20. {
  21. /*
  22. * # of rounds specified by AES:
  23. * 128 bit key 10 rounds
  24. * 192 bit key 12 rounds
  25. * 256 bit key 14 rounds
  26. * => n byte key => 6 + (n/4) rounds
  27. */
  28. return 6 + ctx->key_length / 4;
  29. }
  30. asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  31. u32 *macp, u32 const rk[], u32 rounds);
  32. asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  33. u32 const rk[], u32 rounds, u8 mac[],
  34. u8 ctr[]);
  35. asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  36. u32 const rk[], u32 rounds, u8 mac[],
  37. u8 ctr[]);
  38. asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  39. u32 rounds);
  40. asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  41. static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  42. unsigned int key_len)
  43. {
  44. struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  45. int ret;
  46. ret = ce_aes_expandkey(ctx, in_key, key_len);
  47. if (!ret)
  48. return 0;
  49. tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  50. return -EINVAL;
  51. }
  52. static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  53. {
  54. if ((authsize & 1) || authsize < 4)
  55. return -EINVAL;
  56. return 0;
  57. }
  58. static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  59. {
  60. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  61. __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  62. u32 l = req->iv[0] + 1;
  63. /* verify that CCM dimension 'L' is set correctly in the IV */
  64. if (l < 2 || l > 8)
  65. return -EINVAL;
  66. /* verify that msglen can in fact be represented in L bytes */
  67. if (l < 4 && msglen >> (8 * l))
  68. return -EOVERFLOW;
  69. /*
  70. * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  71. * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  72. */
  73. n[0] = 0;
  74. n[1] = cpu_to_be32(msglen);
  75. memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  76. /*
  77. * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  78. * - bits 0..2 : max # of bytes required to represent msglen, minus 1
  79. * (already set by caller)
  80. * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  81. * - bit 6 : indicates presence of authenticate-only data
  82. */
  83. maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  84. if (req->assoclen)
  85. maciv[0] |= 0x40;
  86. memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
  87. return 0;
  88. }
  89. static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
  90. u32 abytes, u32 *macp, bool use_neon)
  91. {
  92. if (likely(use_neon)) {
  93. ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
  94. num_rounds(key));
  95. } else {
  96. if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
  97. int added = min(abytes, AES_BLOCK_SIZE - *macp);
  98. crypto_xor(&mac[*macp], in, added);
  99. *macp += added;
  100. in += added;
  101. abytes -= added;
  102. }
  103. while (abytes >= AES_BLOCK_SIZE) {
  104. __aes_arm64_encrypt(key->key_enc, mac, mac,
  105. num_rounds(key));
  106. crypto_xor(mac, in, AES_BLOCK_SIZE);
  107. in += AES_BLOCK_SIZE;
  108. abytes -= AES_BLOCK_SIZE;
  109. }
  110. if (abytes > 0) {
  111. __aes_arm64_encrypt(key->key_enc, mac, mac,
  112. num_rounds(key));
  113. crypto_xor(mac, in, abytes);
  114. *macp = abytes;
  115. }
  116. }
  117. }
  118. static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
  119. bool use_neon)
  120. {
  121. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  122. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  123. struct __packed { __be16 l; __be32 h; u16 len; } ltag;
  124. struct scatter_walk walk;
  125. u32 len = req->assoclen;
  126. u32 macp = 0;
  127. /* prepend the AAD with a length tag */
  128. if (len < 0xff00) {
  129. ltag.l = cpu_to_be16(len);
  130. ltag.len = 2;
  131. } else {
  132. ltag.l = cpu_to_be16(0xfffe);
  133. put_unaligned_be32(len, &ltag.h);
  134. ltag.len = 6;
  135. }
  136. ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp, use_neon);
  137. scatterwalk_start(&walk, req->src);
  138. do {
  139. u32 n = scatterwalk_clamp(&walk, len);
  140. u8 *p;
  141. if (!n) {
  142. scatterwalk_start(&walk, sg_next(walk.sg));
  143. n = scatterwalk_clamp(&walk, len);
  144. }
  145. p = scatterwalk_map(&walk);
  146. ccm_update_mac(ctx, mac, p, n, &macp, use_neon);
  147. len -= n;
  148. scatterwalk_unmap(p);
  149. scatterwalk_advance(&walk, n);
  150. scatterwalk_done(&walk, 0, len);
  151. } while (len);
  152. }
  153. static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
  154. struct crypto_aes_ctx *ctx, bool enc)
  155. {
  156. u8 buf[AES_BLOCK_SIZE];
  157. int err = 0;
  158. while (walk->nbytes) {
  159. int blocks = walk->nbytes / AES_BLOCK_SIZE;
  160. u32 tail = walk->nbytes % AES_BLOCK_SIZE;
  161. u8 *dst = walk->dst.virt.addr;
  162. u8 *src = walk->src.virt.addr;
  163. u32 nbytes = walk->nbytes;
  164. if (nbytes == walk->total && tail > 0) {
  165. blocks++;
  166. tail = 0;
  167. }
  168. do {
  169. u32 bsize = AES_BLOCK_SIZE;
  170. if (nbytes < AES_BLOCK_SIZE)
  171. bsize = nbytes;
  172. crypto_inc(walk->iv, AES_BLOCK_SIZE);
  173. __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
  174. num_rounds(ctx));
  175. __aes_arm64_encrypt(ctx->key_enc, mac, mac,
  176. num_rounds(ctx));
  177. if (enc)
  178. crypto_xor(mac, src, bsize);
  179. crypto_xor_cpy(dst, src, buf, bsize);
  180. if (!enc)
  181. crypto_xor(mac, dst, bsize);
  182. dst += bsize;
  183. src += bsize;
  184. nbytes -= bsize;
  185. } while (--blocks);
  186. err = skcipher_walk_done(walk, tail);
  187. }
  188. if (!err) {
  189. __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
  190. __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
  191. crypto_xor(mac, buf, AES_BLOCK_SIZE);
  192. }
  193. return err;
  194. }
  195. static int ccm_encrypt(struct aead_request *req)
  196. {
  197. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  198. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  199. struct skcipher_walk walk;
  200. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  201. u8 buf[AES_BLOCK_SIZE];
  202. u32 len = req->cryptlen;
  203. bool use_neon = may_use_simd();
  204. int err;
  205. err = ccm_init_mac(req, mac, len);
  206. if (err)
  207. return err;
  208. if (likely(use_neon))
  209. kernel_neon_begin();
  210. if (req->assoclen)
  211. ccm_calculate_auth_mac(req, mac, use_neon);
  212. /* preserve the original iv for the final round */
  213. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  214. err = skcipher_walk_aead_encrypt(&walk, req, true);
  215. if (likely(use_neon)) {
  216. while (walk.nbytes) {
  217. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  218. if (walk.nbytes == walk.total)
  219. tail = 0;
  220. ce_aes_ccm_encrypt(walk.dst.virt.addr,
  221. walk.src.virt.addr,
  222. walk.nbytes - tail, ctx->key_enc,
  223. num_rounds(ctx), mac, walk.iv);
  224. err = skcipher_walk_done(&walk, tail);
  225. }
  226. if (!err)
  227. ce_aes_ccm_final(mac, buf, ctx->key_enc,
  228. num_rounds(ctx));
  229. kernel_neon_end();
  230. } else {
  231. err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
  232. }
  233. if (err)
  234. return err;
  235. /* copy authtag to end of dst */
  236. scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
  237. crypto_aead_authsize(aead), 1);
  238. return 0;
  239. }
  240. static int ccm_decrypt(struct aead_request *req)
  241. {
  242. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  243. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  244. unsigned int authsize = crypto_aead_authsize(aead);
  245. struct skcipher_walk walk;
  246. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  247. u8 buf[AES_BLOCK_SIZE];
  248. u32 len = req->cryptlen - authsize;
  249. bool use_neon = may_use_simd();
  250. int err;
  251. err = ccm_init_mac(req, mac, len);
  252. if (err)
  253. return err;
  254. if (likely(use_neon))
  255. kernel_neon_begin();
  256. if (req->assoclen)
  257. ccm_calculate_auth_mac(req, mac, use_neon);
  258. /* preserve the original iv for the final round */
  259. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  260. err = skcipher_walk_aead_decrypt(&walk, req, true);
  261. if (likely(use_neon)) {
  262. while (walk.nbytes) {
  263. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  264. if (walk.nbytes == walk.total)
  265. tail = 0;
  266. ce_aes_ccm_decrypt(walk.dst.virt.addr,
  267. walk.src.virt.addr,
  268. walk.nbytes - tail, ctx->key_enc,
  269. num_rounds(ctx), mac, walk.iv);
  270. err = skcipher_walk_done(&walk, tail);
  271. }
  272. if (!err)
  273. ce_aes_ccm_final(mac, buf, ctx->key_enc,
  274. num_rounds(ctx));
  275. kernel_neon_end();
  276. } else {
  277. err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
  278. }
  279. if (err)
  280. return err;
  281. /* compare calculated auth tag with the stored one */
  282. scatterwalk_map_and_copy(buf, req->src,
  283. req->assoclen + req->cryptlen - authsize,
  284. authsize, 0);
  285. if (crypto_memneq(mac, buf, authsize))
  286. return -EBADMSG;
  287. return 0;
  288. }
  289. static struct aead_alg ccm_aes_alg = {
  290. .base = {
  291. .cra_name = "ccm(aes)",
  292. .cra_driver_name = "ccm-aes-ce",
  293. .cra_priority = 300,
  294. .cra_blocksize = 1,
  295. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  296. .cra_module = THIS_MODULE,
  297. },
  298. .ivsize = AES_BLOCK_SIZE,
  299. .chunksize = AES_BLOCK_SIZE,
  300. .maxauthsize = AES_BLOCK_SIZE,
  301. .setkey = ccm_setkey,
  302. .setauthsize = ccm_setauthsize,
  303. .encrypt = ccm_encrypt,
  304. .decrypt = ccm_decrypt,
  305. };
  306. static int __init aes_mod_init(void)
  307. {
  308. if (!(elf_hwcap & HWCAP_AES))
  309. return -ENODEV;
  310. return crypto_register_aead(&ccm_aes_alg);
  311. }
  312. static void __exit aes_mod_exit(void)
  313. {
  314. crypto_unregister_aead(&ccm_aes_alg);
  315. }
  316. module_init(aes_mod_init);
  317. module_exit(aes_mod_exit);
  318. MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
  319. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  320. MODULE_LICENSE("GPL v2");
  321. MODULE_ALIAS_CRYPTO("ccm(aes)");