aes_s390.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986
  1. /*
  2. * Cryptographic API.
  3. *
  4. * s390 implementation of the AES Cipher Algorithm.
  5. *
  6. * s390 Version:
  7. * Copyright IBM Corp. 2005, 2007
  8. * Author(s): Jan Glauber (jang@de.ibm.com)
  9. * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10. *
  11. * Derived from "crypto/aes_generic.c"
  12. *
  13. * This program is free software; you can redistribute it and/or modify it
  14. * under the terms of the GNU General Public License as published by the Free
  15. * Software Foundation; either version 2 of the License, or (at your option)
  16. * any later version.
  17. *
  18. */
  19. #define KMSG_COMPONENT "aes_s390"
  20. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  21. #include <crypto/aes.h>
  22. #include <crypto/algapi.h>
  23. #include <linux/err.h>
  24. #include <linux/module.h>
  25. #include <linux/init.h>
  26. #include <linux/spinlock.h>
  27. #include "crypt_s390.h"
  28. #define AES_KEYLEN_128 1
  29. #define AES_KEYLEN_192 2
  30. #define AES_KEYLEN_256 4
  31. static u8 *ctrblk;
  32. static DEFINE_SPINLOCK(ctrblk_lock);
  33. static char keylen_flag;
  34. struct s390_aes_ctx {
  35. u8 key[AES_MAX_KEY_SIZE];
  36. long enc;
  37. long dec;
  38. int key_len;
  39. union {
  40. struct crypto_blkcipher *blk;
  41. struct crypto_cipher *cip;
  42. } fallback;
  43. };
  44. struct pcc_param {
  45. u8 key[32];
  46. u8 tweak[16];
  47. u8 block[16];
  48. u8 bit[16];
  49. u8 xts[16];
  50. };
  51. struct s390_xts_ctx {
  52. u8 key[32];
  53. u8 pcc_key[32];
  54. long enc;
  55. long dec;
  56. int key_len;
  57. struct crypto_blkcipher *fallback;
  58. };
  59. /*
  60. * Check if the key_len is supported by the HW.
  61. * Returns 0 if it is, a positive number if it is not and software fallback is
  62. * required or a negative number in case the key size is not valid
  63. */
  64. static int need_fallback(unsigned int key_len)
  65. {
  66. switch (key_len) {
  67. case 16:
  68. if (!(keylen_flag & AES_KEYLEN_128))
  69. return 1;
  70. break;
  71. case 24:
  72. if (!(keylen_flag & AES_KEYLEN_192))
  73. return 1;
  74. break;
  75. case 32:
  76. if (!(keylen_flag & AES_KEYLEN_256))
  77. return 1;
  78. break;
  79. default:
  80. return -1;
  81. break;
  82. }
  83. return 0;
  84. }
  85. static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  86. unsigned int key_len)
  87. {
  88. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  89. int ret;
  90. sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  91. sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
  92. CRYPTO_TFM_REQ_MASK);
  93. ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
  94. if (ret) {
  95. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  96. tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
  97. CRYPTO_TFM_RES_MASK);
  98. }
  99. return ret;
  100. }
  101. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  102. unsigned int key_len)
  103. {
  104. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  105. u32 *flags = &tfm->crt_flags;
  106. int ret;
  107. ret = need_fallback(key_len);
  108. if (ret < 0) {
  109. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  110. return -EINVAL;
  111. }
  112. sctx->key_len = key_len;
  113. if (!ret) {
  114. memcpy(sctx->key, in_key, key_len);
  115. return 0;
  116. }
  117. return setkey_fallback_cip(tfm, in_key, key_len);
  118. }
  119. static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  120. {
  121. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  122. if (unlikely(need_fallback(sctx->key_len))) {
  123. crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
  124. return;
  125. }
  126. switch (sctx->key_len) {
  127. case 16:
  128. crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
  129. AES_BLOCK_SIZE);
  130. break;
  131. case 24:
  132. crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
  133. AES_BLOCK_SIZE);
  134. break;
  135. case 32:
  136. crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
  137. AES_BLOCK_SIZE);
  138. break;
  139. }
  140. }
  141. static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  142. {
  143. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  144. if (unlikely(need_fallback(sctx->key_len))) {
  145. crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
  146. return;
  147. }
  148. switch (sctx->key_len) {
  149. case 16:
  150. crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
  151. AES_BLOCK_SIZE);
  152. break;
  153. case 24:
  154. crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
  155. AES_BLOCK_SIZE);
  156. break;
  157. case 32:
  158. crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
  159. AES_BLOCK_SIZE);
  160. break;
  161. }
  162. }
  163. static int fallback_init_cip(struct crypto_tfm *tfm)
  164. {
  165. const char *name = tfm->__crt_alg->cra_name;
  166. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  167. sctx->fallback.cip = crypto_alloc_cipher(name, 0,
  168. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  169. if (IS_ERR(sctx->fallback.cip)) {
  170. pr_err("Allocating AES fallback algorithm %s failed\n",
  171. name);
  172. return PTR_ERR(sctx->fallback.cip);
  173. }
  174. return 0;
  175. }
  176. static void fallback_exit_cip(struct crypto_tfm *tfm)
  177. {
  178. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  179. crypto_free_cipher(sctx->fallback.cip);
  180. sctx->fallback.cip = NULL;
  181. }
  182. static struct crypto_alg aes_alg = {
  183. .cra_name = "aes",
  184. .cra_driver_name = "aes-s390",
  185. .cra_priority = CRYPT_S390_PRIORITY,
  186. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  187. CRYPTO_ALG_NEED_FALLBACK,
  188. .cra_blocksize = AES_BLOCK_SIZE,
  189. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  190. .cra_module = THIS_MODULE,
  191. .cra_init = fallback_init_cip,
  192. .cra_exit = fallback_exit_cip,
  193. .cra_u = {
  194. .cipher = {
  195. .cia_min_keysize = AES_MIN_KEY_SIZE,
  196. .cia_max_keysize = AES_MAX_KEY_SIZE,
  197. .cia_setkey = aes_set_key,
  198. .cia_encrypt = aes_encrypt,
  199. .cia_decrypt = aes_decrypt,
  200. }
  201. }
  202. };
  203. static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
  204. unsigned int len)
  205. {
  206. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  207. unsigned int ret;
  208. sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  209. sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
  210. CRYPTO_TFM_REQ_MASK);
  211. ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
  212. if (ret) {
  213. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  214. tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
  215. CRYPTO_TFM_RES_MASK);
  216. }
  217. return ret;
  218. }
  219. static int fallback_blk_dec(struct blkcipher_desc *desc,
  220. struct scatterlist *dst, struct scatterlist *src,
  221. unsigned int nbytes)
  222. {
  223. unsigned int ret;
  224. struct crypto_blkcipher *tfm;
  225. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  226. tfm = desc->tfm;
  227. desc->tfm = sctx->fallback.blk;
  228. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  229. desc->tfm = tfm;
  230. return ret;
  231. }
  232. static int fallback_blk_enc(struct blkcipher_desc *desc,
  233. struct scatterlist *dst, struct scatterlist *src,
  234. unsigned int nbytes)
  235. {
  236. unsigned int ret;
  237. struct crypto_blkcipher *tfm;
  238. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  239. tfm = desc->tfm;
  240. desc->tfm = sctx->fallback.blk;
  241. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  242. desc->tfm = tfm;
  243. return ret;
  244. }
  245. static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  246. unsigned int key_len)
  247. {
  248. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  249. int ret;
  250. ret = need_fallback(key_len);
  251. if (ret > 0) {
  252. sctx->key_len = key_len;
  253. return setkey_fallback_blk(tfm, in_key, key_len);
  254. }
  255. switch (key_len) {
  256. case 16:
  257. sctx->enc = KM_AES_128_ENCRYPT;
  258. sctx->dec = KM_AES_128_DECRYPT;
  259. break;
  260. case 24:
  261. sctx->enc = KM_AES_192_ENCRYPT;
  262. sctx->dec = KM_AES_192_DECRYPT;
  263. break;
  264. case 32:
  265. sctx->enc = KM_AES_256_ENCRYPT;
  266. sctx->dec = KM_AES_256_DECRYPT;
  267. break;
  268. }
  269. return aes_set_key(tfm, in_key, key_len);
  270. }
  271. static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  272. struct blkcipher_walk *walk)
  273. {
  274. int ret = blkcipher_walk_virt(desc, walk);
  275. unsigned int nbytes;
  276. while ((nbytes = walk->nbytes)) {
  277. /* only use complete blocks */
  278. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  279. u8 *out = walk->dst.virt.addr;
  280. u8 *in = walk->src.virt.addr;
  281. ret = crypt_s390_km(func, param, out, in, n);
  282. if (ret < 0 || ret != n)
  283. return -EIO;
  284. nbytes &= AES_BLOCK_SIZE - 1;
  285. ret = blkcipher_walk_done(desc, walk, nbytes);
  286. }
  287. return ret;
  288. }
  289. static int ecb_aes_encrypt(struct blkcipher_desc *desc,
  290. struct scatterlist *dst, struct scatterlist *src,
  291. unsigned int nbytes)
  292. {
  293. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  294. struct blkcipher_walk walk;
  295. if (unlikely(need_fallback(sctx->key_len)))
  296. return fallback_blk_enc(desc, dst, src, nbytes);
  297. blkcipher_walk_init(&walk, dst, src, nbytes);
  298. return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
  299. }
  300. static int ecb_aes_decrypt(struct blkcipher_desc *desc,
  301. struct scatterlist *dst, struct scatterlist *src,
  302. unsigned int nbytes)
  303. {
  304. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  305. struct blkcipher_walk walk;
  306. if (unlikely(need_fallback(sctx->key_len)))
  307. return fallback_blk_dec(desc, dst, src, nbytes);
  308. blkcipher_walk_init(&walk, dst, src, nbytes);
  309. return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
  310. }
  311. static int fallback_init_blk(struct crypto_tfm *tfm)
  312. {
  313. const char *name = tfm->__crt_alg->cra_name;
  314. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  315. sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
  316. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  317. if (IS_ERR(sctx->fallback.blk)) {
  318. pr_err("Allocating AES fallback algorithm %s failed\n",
  319. name);
  320. return PTR_ERR(sctx->fallback.blk);
  321. }
  322. return 0;
  323. }
  324. static void fallback_exit_blk(struct crypto_tfm *tfm)
  325. {
  326. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  327. crypto_free_blkcipher(sctx->fallback.blk);
  328. sctx->fallback.blk = NULL;
  329. }
  330. static struct crypto_alg ecb_aes_alg = {
  331. .cra_name = "ecb(aes)",
  332. .cra_driver_name = "ecb-aes-s390",
  333. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  334. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  335. CRYPTO_ALG_NEED_FALLBACK,
  336. .cra_blocksize = AES_BLOCK_SIZE,
  337. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  338. .cra_type = &crypto_blkcipher_type,
  339. .cra_module = THIS_MODULE,
  340. .cra_init = fallback_init_blk,
  341. .cra_exit = fallback_exit_blk,
  342. .cra_u = {
  343. .blkcipher = {
  344. .min_keysize = AES_MIN_KEY_SIZE,
  345. .max_keysize = AES_MAX_KEY_SIZE,
  346. .setkey = ecb_aes_set_key,
  347. .encrypt = ecb_aes_encrypt,
  348. .decrypt = ecb_aes_decrypt,
  349. }
  350. }
  351. };
  352. static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  353. unsigned int key_len)
  354. {
  355. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  356. int ret;
  357. ret = need_fallback(key_len);
  358. if (ret > 0) {
  359. sctx->key_len = key_len;
  360. return setkey_fallback_blk(tfm, in_key, key_len);
  361. }
  362. switch (key_len) {
  363. case 16:
  364. sctx->enc = KMC_AES_128_ENCRYPT;
  365. sctx->dec = KMC_AES_128_DECRYPT;
  366. break;
  367. case 24:
  368. sctx->enc = KMC_AES_192_ENCRYPT;
  369. sctx->dec = KMC_AES_192_DECRYPT;
  370. break;
  371. case 32:
  372. sctx->enc = KMC_AES_256_ENCRYPT;
  373. sctx->dec = KMC_AES_256_DECRYPT;
  374. break;
  375. }
  376. return aes_set_key(tfm, in_key, key_len);
  377. }
  378. static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
  379. struct blkcipher_walk *walk)
  380. {
  381. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  382. int ret = blkcipher_walk_virt(desc, walk);
  383. unsigned int nbytes = walk->nbytes;
  384. struct {
  385. u8 iv[AES_BLOCK_SIZE];
  386. u8 key[AES_MAX_KEY_SIZE];
  387. } param;
  388. if (!nbytes)
  389. goto out;
  390. memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
  391. memcpy(param.key, sctx->key, sctx->key_len);
  392. do {
  393. /* only use complete blocks */
  394. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  395. u8 *out = walk->dst.virt.addr;
  396. u8 *in = walk->src.virt.addr;
  397. ret = crypt_s390_kmc(func, &param, out, in, n);
  398. if (ret < 0 || ret != n)
  399. return -EIO;
  400. nbytes &= AES_BLOCK_SIZE - 1;
  401. ret = blkcipher_walk_done(desc, walk, nbytes);
  402. } while ((nbytes = walk->nbytes));
  403. memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
  404. out:
  405. return ret;
  406. }
  407. static int cbc_aes_encrypt(struct blkcipher_desc *desc,
  408. struct scatterlist *dst, struct scatterlist *src,
  409. unsigned int nbytes)
  410. {
  411. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  412. struct blkcipher_walk walk;
  413. if (unlikely(need_fallback(sctx->key_len)))
  414. return fallback_blk_enc(desc, dst, src, nbytes);
  415. blkcipher_walk_init(&walk, dst, src, nbytes);
  416. return cbc_aes_crypt(desc, sctx->enc, &walk);
  417. }
  418. static int cbc_aes_decrypt(struct blkcipher_desc *desc,
  419. struct scatterlist *dst, struct scatterlist *src,
  420. unsigned int nbytes)
  421. {
  422. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  423. struct blkcipher_walk walk;
  424. if (unlikely(need_fallback(sctx->key_len)))
  425. return fallback_blk_dec(desc, dst, src, nbytes);
  426. blkcipher_walk_init(&walk, dst, src, nbytes);
  427. return cbc_aes_crypt(desc, sctx->dec, &walk);
  428. }
  429. static struct crypto_alg cbc_aes_alg = {
  430. .cra_name = "cbc(aes)",
  431. .cra_driver_name = "cbc-aes-s390",
  432. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  433. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  434. CRYPTO_ALG_NEED_FALLBACK,
  435. .cra_blocksize = AES_BLOCK_SIZE,
  436. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  437. .cra_type = &crypto_blkcipher_type,
  438. .cra_module = THIS_MODULE,
  439. .cra_init = fallback_init_blk,
  440. .cra_exit = fallback_exit_blk,
  441. .cra_u = {
  442. .blkcipher = {
  443. .min_keysize = AES_MIN_KEY_SIZE,
  444. .max_keysize = AES_MAX_KEY_SIZE,
  445. .ivsize = AES_BLOCK_SIZE,
  446. .setkey = cbc_aes_set_key,
  447. .encrypt = cbc_aes_encrypt,
  448. .decrypt = cbc_aes_decrypt,
  449. }
  450. }
  451. };
  452. static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
  453. unsigned int len)
  454. {
  455. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  456. unsigned int ret;
  457. xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  458. xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
  459. CRYPTO_TFM_REQ_MASK);
  460. ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
  461. if (ret) {
  462. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  463. tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
  464. CRYPTO_TFM_RES_MASK);
  465. }
  466. return ret;
  467. }
  468. static int xts_fallback_decrypt(struct blkcipher_desc *desc,
  469. struct scatterlist *dst, struct scatterlist *src,
  470. unsigned int nbytes)
  471. {
  472. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  473. struct crypto_blkcipher *tfm;
  474. unsigned int ret;
  475. tfm = desc->tfm;
  476. desc->tfm = xts_ctx->fallback;
  477. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  478. desc->tfm = tfm;
  479. return ret;
  480. }
  481. static int xts_fallback_encrypt(struct blkcipher_desc *desc,
  482. struct scatterlist *dst, struct scatterlist *src,
  483. unsigned int nbytes)
  484. {
  485. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  486. struct crypto_blkcipher *tfm;
  487. unsigned int ret;
  488. tfm = desc->tfm;
  489. desc->tfm = xts_ctx->fallback;
  490. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  491. desc->tfm = tfm;
  492. return ret;
  493. }
  494. static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  495. unsigned int key_len)
  496. {
  497. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  498. u32 *flags = &tfm->crt_flags;
  499. switch (key_len) {
  500. case 32:
  501. xts_ctx->enc = KM_XTS_128_ENCRYPT;
  502. xts_ctx->dec = KM_XTS_128_DECRYPT;
  503. memcpy(xts_ctx->key + 16, in_key, 16);
  504. memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
  505. break;
  506. case 48:
  507. xts_ctx->enc = 0;
  508. xts_ctx->dec = 0;
  509. xts_fallback_setkey(tfm, in_key, key_len);
  510. break;
  511. case 64:
  512. xts_ctx->enc = KM_XTS_256_ENCRYPT;
  513. xts_ctx->dec = KM_XTS_256_DECRYPT;
  514. memcpy(xts_ctx->key, in_key, 32);
  515. memcpy(xts_ctx->pcc_key, in_key + 32, 32);
  516. break;
  517. default:
  518. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  519. return -EINVAL;
  520. }
  521. xts_ctx->key_len = key_len;
  522. return 0;
  523. }
  524. static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
  525. struct s390_xts_ctx *xts_ctx,
  526. struct blkcipher_walk *walk)
  527. {
  528. unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
  529. int ret = blkcipher_walk_virt(desc, walk);
  530. unsigned int nbytes = walk->nbytes;
  531. unsigned int n;
  532. u8 *in, *out;
  533. struct pcc_param pcc_param;
  534. struct {
  535. u8 key[32];
  536. u8 init[16];
  537. } xts_param;
  538. if (!nbytes)
  539. goto out;
  540. memset(pcc_param.block, 0, sizeof(pcc_param.block));
  541. memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
  542. memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
  543. memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
  544. memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
  545. ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
  546. if (ret < 0)
  547. return -EIO;
  548. memcpy(xts_param.key, xts_ctx->key, 32);
  549. memcpy(xts_param.init, pcc_param.xts, 16);
  550. do {
  551. /* only use complete blocks */
  552. n = nbytes & ~(AES_BLOCK_SIZE - 1);
  553. out = walk->dst.virt.addr;
  554. in = walk->src.virt.addr;
  555. ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
  556. if (ret < 0 || ret != n)
  557. return -EIO;
  558. nbytes &= AES_BLOCK_SIZE - 1;
  559. ret = blkcipher_walk_done(desc, walk, nbytes);
  560. } while ((nbytes = walk->nbytes));
  561. out:
  562. return ret;
  563. }
  564. static int xts_aes_encrypt(struct blkcipher_desc *desc,
  565. struct scatterlist *dst, struct scatterlist *src,
  566. unsigned int nbytes)
  567. {
  568. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  569. struct blkcipher_walk walk;
  570. if (unlikely(xts_ctx->key_len == 48))
  571. return xts_fallback_encrypt(desc, dst, src, nbytes);
  572. blkcipher_walk_init(&walk, dst, src, nbytes);
  573. return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
  574. }
  575. static int xts_aes_decrypt(struct blkcipher_desc *desc,
  576. struct scatterlist *dst, struct scatterlist *src,
  577. unsigned int nbytes)
  578. {
  579. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  580. struct blkcipher_walk walk;
  581. if (unlikely(xts_ctx->key_len == 48))
  582. return xts_fallback_decrypt(desc, dst, src, nbytes);
  583. blkcipher_walk_init(&walk, dst, src, nbytes);
  584. return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
  585. }
  586. static int xts_fallback_init(struct crypto_tfm *tfm)
  587. {
  588. const char *name = tfm->__crt_alg->cra_name;
  589. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  590. xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
  591. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  592. if (IS_ERR(xts_ctx->fallback)) {
  593. pr_err("Allocating XTS fallback algorithm %s failed\n",
  594. name);
  595. return PTR_ERR(xts_ctx->fallback);
  596. }
  597. return 0;
  598. }
  599. static void xts_fallback_exit(struct crypto_tfm *tfm)
  600. {
  601. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  602. crypto_free_blkcipher(xts_ctx->fallback);
  603. xts_ctx->fallback = NULL;
  604. }
  605. static struct crypto_alg xts_aes_alg = {
  606. .cra_name = "xts(aes)",
  607. .cra_driver_name = "xts-aes-s390",
  608. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  609. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  610. CRYPTO_ALG_NEED_FALLBACK,
  611. .cra_blocksize = AES_BLOCK_SIZE,
  612. .cra_ctxsize = sizeof(struct s390_xts_ctx),
  613. .cra_type = &crypto_blkcipher_type,
  614. .cra_module = THIS_MODULE,
  615. .cra_init = xts_fallback_init,
  616. .cra_exit = xts_fallback_exit,
  617. .cra_u = {
  618. .blkcipher = {
  619. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  620. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  621. .ivsize = AES_BLOCK_SIZE,
  622. .setkey = xts_aes_set_key,
  623. .encrypt = xts_aes_encrypt,
  624. .decrypt = xts_aes_decrypt,
  625. }
  626. }
  627. };
  628. static int xts_aes_alg_reg;
  629. static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  630. unsigned int key_len)
  631. {
  632. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  633. switch (key_len) {
  634. case 16:
  635. sctx->enc = KMCTR_AES_128_ENCRYPT;
  636. sctx->dec = KMCTR_AES_128_DECRYPT;
  637. break;
  638. case 24:
  639. sctx->enc = KMCTR_AES_192_ENCRYPT;
  640. sctx->dec = KMCTR_AES_192_DECRYPT;
  641. break;
  642. case 32:
  643. sctx->enc = KMCTR_AES_256_ENCRYPT;
  644. sctx->dec = KMCTR_AES_256_DECRYPT;
  645. break;
  646. }
  647. return aes_set_key(tfm, in_key, key_len);
  648. }
  649. static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
  650. {
  651. unsigned int i, n;
  652. /* only use complete blocks, max. PAGE_SIZE */
  653. n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
  654. for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
  655. memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
  656. AES_BLOCK_SIZE);
  657. crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
  658. }
  659. return n;
  660. }
  661. static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
  662. struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
  663. {
  664. int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
  665. unsigned int n, nbytes;
  666. u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
  667. u8 *out, *in, *ctrptr = ctrbuf;
  668. if (!walk->nbytes)
  669. return ret;
  670. if (spin_trylock(&ctrblk_lock))
  671. ctrptr = ctrblk;
  672. memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
  673. while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
  674. out = walk->dst.virt.addr;
  675. in = walk->src.virt.addr;
  676. while (nbytes >= AES_BLOCK_SIZE) {
  677. if (ctrptr == ctrblk)
  678. n = __ctrblk_init(ctrptr, nbytes);
  679. else
  680. n = AES_BLOCK_SIZE;
  681. ret = crypt_s390_kmctr(func, sctx->key, out, in,
  682. n, ctrptr);
  683. if (ret < 0 || ret != n) {
  684. if (ctrptr == ctrblk)
  685. spin_unlock(&ctrblk_lock);
  686. return -EIO;
  687. }
  688. if (n > AES_BLOCK_SIZE)
  689. memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
  690. AES_BLOCK_SIZE);
  691. crypto_inc(ctrptr, AES_BLOCK_SIZE);
  692. out += n;
  693. in += n;
  694. nbytes -= n;
  695. }
  696. ret = blkcipher_walk_done(desc, walk, nbytes);
  697. }
  698. if (ctrptr == ctrblk) {
  699. if (nbytes)
  700. memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
  701. else
  702. memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
  703. spin_unlock(&ctrblk_lock);
  704. } else {
  705. if (!nbytes)
  706. memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
  707. }
  708. /*
  709. * final block may be < AES_BLOCK_SIZE, copy only nbytes
  710. */
  711. if (nbytes) {
  712. out = walk->dst.virt.addr;
  713. in = walk->src.virt.addr;
  714. ret = crypt_s390_kmctr(func, sctx->key, buf, in,
  715. AES_BLOCK_SIZE, ctrbuf);
  716. if (ret < 0 || ret != AES_BLOCK_SIZE)
  717. return -EIO;
  718. memcpy(out, buf, nbytes);
  719. crypto_inc(ctrbuf, AES_BLOCK_SIZE);
  720. ret = blkcipher_walk_done(desc, walk, 0);
  721. memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
  722. }
  723. return ret;
  724. }
  725. static int ctr_aes_encrypt(struct blkcipher_desc *desc,
  726. struct scatterlist *dst, struct scatterlist *src,
  727. unsigned int nbytes)
  728. {
  729. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  730. struct blkcipher_walk walk;
  731. blkcipher_walk_init(&walk, dst, src, nbytes);
  732. return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
  733. }
  734. static int ctr_aes_decrypt(struct blkcipher_desc *desc,
  735. struct scatterlist *dst, struct scatterlist *src,
  736. unsigned int nbytes)
  737. {
  738. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  739. struct blkcipher_walk walk;
  740. blkcipher_walk_init(&walk, dst, src, nbytes);
  741. return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
  742. }
  743. static struct crypto_alg ctr_aes_alg = {
  744. .cra_name = "ctr(aes)",
  745. .cra_driver_name = "ctr-aes-s390",
  746. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  747. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  748. .cra_blocksize = 1,
  749. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  750. .cra_type = &crypto_blkcipher_type,
  751. .cra_module = THIS_MODULE,
  752. .cra_u = {
  753. .blkcipher = {
  754. .min_keysize = AES_MIN_KEY_SIZE,
  755. .max_keysize = AES_MAX_KEY_SIZE,
  756. .ivsize = AES_BLOCK_SIZE,
  757. .setkey = ctr_aes_set_key,
  758. .encrypt = ctr_aes_encrypt,
  759. .decrypt = ctr_aes_decrypt,
  760. }
  761. }
  762. };
  763. static int ctr_aes_alg_reg;
  764. static int __init aes_s390_init(void)
  765. {
  766. int ret;
  767. if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
  768. keylen_flag |= AES_KEYLEN_128;
  769. if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
  770. keylen_flag |= AES_KEYLEN_192;
  771. if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
  772. keylen_flag |= AES_KEYLEN_256;
  773. if (!keylen_flag)
  774. return -EOPNOTSUPP;
  775. /* z9 109 and z9 BC/EC only support 128 bit key length */
  776. if (keylen_flag == AES_KEYLEN_128)
  777. pr_info("AES hardware acceleration is only available for"
  778. " 128-bit keys\n");
  779. ret = crypto_register_alg(&aes_alg);
  780. if (ret)
  781. goto aes_err;
  782. ret = crypto_register_alg(&ecb_aes_alg);
  783. if (ret)
  784. goto ecb_aes_err;
  785. ret = crypto_register_alg(&cbc_aes_alg);
  786. if (ret)
  787. goto cbc_aes_err;
  788. if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
  789. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  790. crypt_s390_func_available(KM_XTS_256_ENCRYPT,
  791. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  792. ret = crypto_register_alg(&xts_aes_alg);
  793. if (ret)
  794. goto xts_aes_err;
  795. xts_aes_alg_reg = 1;
  796. }
  797. if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
  798. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  799. crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
  800. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  801. crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
  802. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  803. ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
  804. if (!ctrblk) {
  805. ret = -ENOMEM;
  806. goto ctr_aes_err;
  807. }
  808. ret = crypto_register_alg(&ctr_aes_alg);
  809. if (ret) {
  810. free_page((unsigned long) ctrblk);
  811. goto ctr_aes_err;
  812. }
  813. ctr_aes_alg_reg = 1;
  814. }
  815. out:
  816. return ret;
  817. ctr_aes_err:
  818. crypto_unregister_alg(&xts_aes_alg);
  819. xts_aes_err:
  820. crypto_unregister_alg(&cbc_aes_alg);
  821. cbc_aes_err:
  822. crypto_unregister_alg(&ecb_aes_alg);
  823. ecb_aes_err:
  824. crypto_unregister_alg(&aes_alg);
  825. aes_err:
  826. goto out;
  827. }
  828. static void __exit aes_s390_fini(void)
  829. {
  830. if (ctr_aes_alg_reg) {
  831. crypto_unregister_alg(&ctr_aes_alg);
  832. free_page((unsigned long) ctrblk);
  833. }
  834. if (xts_aes_alg_reg)
  835. crypto_unregister_alg(&xts_aes_alg);
  836. crypto_unregister_alg(&cbc_aes_alg);
  837. crypto_unregister_alg(&ecb_aes_alg);
  838. crypto_unregister_alg(&aes_alg);
  839. }
  840. module_init(aes_s390_init);
  841. module_exit(aes_s390_fini);
  842. MODULE_ALIAS_CRYPTO("aes-all");
  843. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
  844. MODULE_LICENSE("GPL");