serpent_generic.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Serpent Cipher Algorithm.
  5. *
  6. * Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
  7. * 2003 Herbert Valerio Riedel <hvr@gnu.org>
  8. *
  9. * Added tnepres support:
  10. * Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
  11. * Based on code by hvr
  12. *
  13. * This program is free software; you can redistribute it and/or modify
  14. * it under the terms of the GNU General Public License as published by
  15. * the Free Software Foundation; either version 2 of the License, or
  16. * (at your option) any later version.
  17. */
  18. #include <linux/init.h>
  19. #include <linux/module.h>
  20. #include <linux/errno.h>
  21. #include <asm/byteorder.h>
  22. #include <linux/crypto.h>
  23. #include <linux/types.h>
  24. #include <crypto/serpent.h>
  25. /* Key is padded to the maximum of 256 bits before round key generation.
  26. * Any key length <= 256 bits (32 bytes) is allowed by the algorithm.
  27. */
  28. #define PHI 0x9e3779b9UL
  29. #define keyiter(a, b, c, d, i, j) \
  30. ({ b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b, 11); k[j] = b; })
  31. #define loadkeys(x0, x1, x2, x3, i) \
  32. ({ x0 = k[i]; x1 = k[i+1]; x2 = k[i+2]; x3 = k[i+3]; })
  33. #define storekeys(x0, x1, x2, x3, i) \
  34. ({ k[i] = x0; k[i+1] = x1; k[i+2] = x2; k[i+3] = x3; })
  35. #define store_and_load_keys(x0, x1, x2, x3, s, l) \
  36. ({ storekeys(x0, x1, x2, x3, s); loadkeys(x0, x1, x2, x3, l); })
  37. #define K(x0, x1, x2, x3, i) ({ \
  38. x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \
  39. x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0]; \
  40. })
  41. #define LK(x0, x1, x2, x3, x4, i) ({ \
  42. x0 = rol32(x0, 13);\
  43. x2 = rol32(x2, 3); x1 ^= x0; x4 = x0 << 3; \
  44. x3 ^= x2; x1 ^= x2; \
  45. x1 = rol32(x1, 1); x3 ^= x4; \
  46. x3 = rol32(x3, 7); x4 = x1; \
  47. x0 ^= x1; x4 <<= 7; x2 ^= x3; \
  48. x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \
  49. x1 ^= k[4*i+1]; x0 = rol32(x0, 5); x2 = rol32(x2, 22);\
  50. x0 ^= k[4*i+0]; x2 ^= k[4*i+2]; \
  51. })
  52. #define KL(x0, x1, x2, x3, x4, i) ({ \
  53. x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \
  54. x3 ^= k[4*i+3]; x0 = ror32(x0, 5); x2 = ror32(x2, 22);\
  55. x4 = x1; x2 ^= x3; x0 ^= x3; \
  56. x4 <<= 7; x0 ^= x1; x1 = ror32(x1, 1); \
  57. x2 ^= x4; x3 = ror32(x3, 7); x4 = x0 << 3; \
  58. x1 ^= x0; x3 ^= x4; x0 = ror32(x0, 13);\
  59. x1 ^= x2; x3 ^= x2; x2 = ror32(x2, 3); \
  60. })
  61. #define S0(x0, x1, x2, x3, x4) ({ \
  62. x4 = x3; \
  63. x3 |= x0; x0 ^= x4; x4 ^= x2; \
  64. x4 = ~x4; x3 ^= x1; x1 &= x0; \
  65. x1 ^= x4; x2 ^= x0; x0 ^= x3; \
  66. x4 |= x0; x0 ^= x2; x2 &= x1; \
  67. x3 ^= x2; x1 = ~x1; x2 ^= x4; \
  68. x1 ^= x2; \
  69. })
  70. #define S1(x0, x1, x2, x3, x4) ({ \
  71. x4 = x1; \
  72. x1 ^= x0; x0 ^= x3; x3 = ~x3; \
  73. x4 &= x1; x0 |= x1; x3 ^= x2; \
  74. x0 ^= x3; x1 ^= x3; x3 ^= x4; \
  75. x1 |= x4; x4 ^= x2; x2 &= x0; \
  76. x2 ^= x1; x1 |= x0; x0 = ~x0; \
  77. x0 ^= x2; x4 ^= x1; \
  78. })
  79. #define S2(x0, x1, x2, x3, x4) ({ \
  80. x3 = ~x3; \
  81. x1 ^= x0; x4 = x0; x0 &= x2; \
  82. x0 ^= x3; x3 |= x4; x2 ^= x1; \
  83. x3 ^= x1; x1 &= x0; x0 ^= x2; \
  84. x2 &= x3; x3 |= x1; x0 = ~x0; \
  85. x3 ^= x0; x4 ^= x0; x0 ^= x2; \
  86. x1 |= x2; \
  87. })
  88. #define S3(x0, x1, x2, x3, x4) ({ \
  89. x4 = x1; \
  90. x1 ^= x3; x3 |= x0; x4 &= x0; \
  91. x0 ^= x2; x2 ^= x1; x1 &= x3; \
  92. x2 ^= x3; x0 |= x4; x4 ^= x3; \
  93. x1 ^= x0; x0 &= x3; x3 &= x4; \
  94. x3 ^= x2; x4 |= x1; x2 &= x1; \
  95. x4 ^= x3; x0 ^= x3; x3 ^= x2; \
  96. })
  97. #define S4(x0, x1, x2, x3, x4) ({ \
  98. x4 = x3; \
  99. x3 &= x0; x0 ^= x4; \
  100. x3 ^= x2; x2 |= x4; x0 ^= x1; \
  101. x4 ^= x3; x2 |= x0; \
  102. x2 ^= x1; x1 &= x0; \
  103. x1 ^= x4; x4 &= x2; x2 ^= x3; \
  104. x4 ^= x0; x3 |= x1; x1 = ~x1; \
  105. x3 ^= x0; \
  106. })
  107. #define S5(x0, x1, x2, x3, x4) ({ \
  108. x4 = x1; x1 |= x0; \
  109. x2 ^= x1; x3 = ~x3; x4 ^= x0; \
  110. x0 ^= x2; x1 &= x4; x4 |= x3; \
  111. x4 ^= x0; x0 &= x3; x1 ^= x3; \
  112. x3 ^= x2; x0 ^= x1; x2 &= x4; \
  113. x1 ^= x2; x2 &= x0; \
  114. x3 ^= x2; \
  115. })
  116. #define S6(x0, x1, x2, x3, x4) ({ \
  117. x4 = x1; \
  118. x3 ^= x0; x1 ^= x2; x2 ^= x0; \
  119. x0 &= x3; x1 |= x3; x4 = ~x4; \
  120. x0 ^= x1; x1 ^= x2; \
  121. x3 ^= x4; x4 ^= x0; x2 &= x0; \
  122. x4 ^= x1; x2 ^= x3; x3 &= x1; \
  123. x3 ^= x0; x1 ^= x2; \
  124. })
  125. #define S7(x0, x1, x2, x3, x4) ({ \
  126. x1 = ~x1; \
  127. x4 = x1; x0 = ~x0; x1 &= x2; \
  128. x1 ^= x3; x3 |= x4; x4 ^= x2; \
  129. x2 ^= x3; x3 ^= x0; x0 |= x1; \
  130. x2 &= x0; x0 ^= x4; x4 ^= x3; \
  131. x3 &= x0; x4 ^= x1; \
  132. x2 ^= x4; x3 ^= x1; x4 |= x0; \
  133. x4 ^= x1; \
  134. })
  135. #define SI0(x0, x1, x2, x3, x4) ({ \
  136. x4 = x3; x1 ^= x0; \
  137. x3 |= x1; x4 ^= x1; x0 = ~x0; \
  138. x2 ^= x3; x3 ^= x0; x0 &= x1; \
  139. x0 ^= x2; x2 &= x3; x3 ^= x4; \
  140. x2 ^= x3; x1 ^= x3; x3 &= x0; \
  141. x1 ^= x0; x0 ^= x2; x4 ^= x3; \
  142. })
  143. #define SI1(x0, x1, x2, x3, x4) ({ \
  144. x1 ^= x3; x4 = x0; \
  145. x0 ^= x2; x2 = ~x2; x4 |= x1; \
  146. x4 ^= x3; x3 &= x1; x1 ^= x2; \
  147. x2 &= x4; x4 ^= x1; x1 |= x3; \
  148. x3 ^= x0; x2 ^= x0; x0 |= x4; \
  149. x2 ^= x4; x1 ^= x0; \
  150. x4 ^= x1; \
  151. })
  152. #define SI2(x0, x1, x2, x3, x4) ({ \
  153. x2 ^= x1; x4 = x3; x3 = ~x3; \
  154. x3 |= x2; x2 ^= x4; x4 ^= x0; \
  155. x3 ^= x1; x1 |= x2; x2 ^= x0; \
  156. x1 ^= x4; x4 |= x3; x2 ^= x3; \
  157. x4 ^= x2; x2 &= x1; \
  158. x2 ^= x3; x3 ^= x4; x4 ^= x0; \
  159. })
  160. #define SI3(x0, x1, x2, x3, x4) ({ \
  161. x2 ^= x1; \
  162. x4 = x1; x1 &= x2; \
  163. x1 ^= x0; x0 |= x4; x4 ^= x3; \
  164. x0 ^= x3; x3 |= x1; x1 ^= x2; \
  165. x1 ^= x3; x0 ^= x2; x2 ^= x3; \
  166. x3 &= x1; x1 ^= x0; x0 &= x2; \
  167. x4 ^= x3; x3 ^= x0; x0 ^= x1; \
  168. })
  169. #define SI4(x0, x1, x2, x3, x4) ({ \
  170. x2 ^= x3; x4 = x0; x0 &= x1; \
  171. x0 ^= x2; x2 |= x3; x4 = ~x4; \
  172. x1 ^= x0; x0 ^= x2; x2 &= x4; \
  173. x2 ^= x0; x0 |= x4; \
  174. x0 ^= x3; x3 &= x2; \
  175. x4 ^= x3; x3 ^= x1; x1 &= x0; \
  176. x4 ^= x1; x0 ^= x3; \
  177. })
  178. #define SI5(x0, x1, x2, x3, x4) ({ \
  179. x4 = x1; x1 |= x2; \
  180. x2 ^= x4; x1 ^= x3; x3 &= x4; \
  181. x2 ^= x3; x3 |= x0; x0 = ~x0; \
  182. x3 ^= x2; x2 |= x0; x4 ^= x1; \
  183. x2 ^= x4; x4 &= x0; x0 ^= x1; \
  184. x1 ^= x3; x0 &= x2; x2 ^= x3; \
  185. x0 ^= x2; x2 ^= x4; x4 ^= x3; \
  186. })
  187. #define SI6(x0, x1, x2, x3, x4) ({ \
  188. x0 ^= x2; \
  189. x4 = x0; x0 &= x3; x2 ^= x3; \
  190. x0 ^= x2; x3 ^= x1; x2 |= x4; \
  191. x2 ^= x3; x3 &= x0; x0 = ~x0; \
  192. x3 ^= x1; x1 &= x2; x4 ^= x0; \
  193. x3 ^= x4; x4 ^= x2; x0 ^= x1; \
  194. x2 ^= x0; \
  195. })
  196. #define SI7(x0, x1, x2, x3, x4) ({ \
  197. x4 = x3; x3 &= x0; x0 ^= x2; \
  198. x2 |= x4; x4 ^= x1; x0 = ~x0; \
  199. x1 |= x3; x4 ^= x0; x0 &= x2; \
  200. x0 ^= x1; x1 &= x2; x3 ^= x2; \
  201. x4 ^= x3; x2 &= x3; x3 |= x0; \
  202. x1 ^= x4; x3 ^= x4; x4 &= x0; \
  203. x4 ^= x2; \
  204. })
  205. /*
  206. * both gcc and clang have misoptimized this function in the past,
  207. * producing horrible object code from spilling temporary variables
  208. * on the stack. Forcing this part out of line avoids that.
  209. */
  210. static noinline void __serpent_setkey_sbox(u32 r0, u32 r1, u32 r2,
  211. u32 r3, u32 r4, u32 *k)
  212. {
  213. k += 100;
  214. S3(r3, r4, r0, r1, r2); store_and_load_keys(r1, r2, r4, r3, 28, 24);
  215. S4(r1, r2, r4, r3, r0); store_and_load_keys(r2, r4, r3, r0, 24, 20);
  216. S5(r2, r4, r3, r0, r1); store_and_load_keys(r1, r2, r4, r0, 20, 16);
  217. S6(r1, r2, r4, r0, r3); store_and_load_keys(r4, r3, r2, r0, 16, 12);
  218. S7(r4, r3, r2, r0, r1); store_and_load_keys(r1, r2, r0, r4, 12, 8);
  219. S0(r1, r2, r0, r4, r3); store_and_load_keys(r0, r2, r4, r1, 8, 4);
  220. S1(r0, r2, r4, r1, r3); store_and_load_keys(r3, r4, r1, r0, 4, 0);
  221. S2(r3, r4, r1, r0, r2); store_and_load_keys(r2, r4, r3, r0, 0, -4);
  222. S3(r2, r4, r3, r0, r1); store_and_load_keys(r0, r1, r4, r2, -4, -8);
  223. S4(r0, r1, r4, r2, r3); store_and_load_keys(r1, r4, r2, r3, -8, -12);
  224. S5(r1, r4, r2, r3, r0); store_and_load_keys(r0, r1, r4, r3, -12, -16);
  225. S6(r0, r1, r4, r3, r2); store_and_load_keys(r4, r2, r1, r3, -16, -20);
  226. S7(r4, r2, r1, r3, r0); store_and_load_keys(r0, r1, r3, r4, -20, -24);
  227. S0(r0, r1, r3, r4, r2); store_and_load_keys(r3, r1, r4, r0, -24, -28);
  228. k -= 50;
  229. S1(r3, r1, r4, r0, r2); store_and_load_keys(r2, r4, r0, r3, 22, 18);
  230. S2(r2, r4, r0, r3, r1); store_and_load_keys(r1, r4, r2, r3, 18, 14);
  231. S3(r1, r4, r2, r3, r0); store_and_load_keys(r3, r0, r4, r1, 14, 10);
  232. S4(r3, r0, r4, r1, r2); store_and_load_keys(r0, r4, r1, r2, 10, 6);
  233. S5(r0, r4, r1, r2, r3); store_and_load_keys(r3, r0, r4, r2, 6, 2);
  234. S6(r3, r0, r4, r2, r1); store_and_load_keys(r4, r1, r0, r2, 2, -2);
  235. S7(r4, r1, r0, r2, r3); store_and_load_keys(r3, r0, r2, r4, -2, -6);
  236. S0(r3, r0, r2, r4, r1); store_and_load_keys(r2, r0, r4, r3, -6, -10);
  237. S1(r2, r0, r4, r3, r1); store_and_load_keys(r1, r4, r3, r2, -10, -14);
  238. S2(r1, r4, r3, r2, r0); store_and_load_keys(r0, r4, r1, r2, -14, -18);
  239. S3(r0, r4, r1, r2, r3); store_and_load_keys(r2, r3, r4, r0, -18, -22);
  240. k -= 50;
  241. S4(r2, r3, r4, r0, r1); store_and_load_keys(r3, r4, r0, r1, 28, 24);
  242. S5(r3, r4, r0, r1, r2); store_and_load_keys(r2, r3, r4, r1, 24, 20);
  243. S6(r2, r3, r4, r1, r0); store_and_load_keys(r4, r0, r3, r1, 20, 16);
  244. S7(r4, r0, r3, r1, r2); store_and_load_keys(r2, r3, r1, r4, 16, 12);
  245. S0(r2, r3, r1, r4, r0); store_and_load_keys(r1, r3, r4, r2, 12, 8);
  246. S1(r1, r3, r4, r2, r0); store_and_load_keys(r0, r4, r2, r1, 8, 4);
  247. S2(r0, r4, r2, r1, r3); store_and_load_keys(r3, r4, r0, r1, 4, 0);
  248. S3(r3, r4, r0, r1, r2); storekeys(r1, r2, r4, r3, 0);
  249. }
  250. int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
  251. unsigned int keylen)
  252. {
  253. u32 *k = ctx->expkey;
  254. u8 *k8 = (u8 *)k;
  255. u32 r0, r1, r2, r3, r4;
  256. int i;
  257. /* Copy key, add padding */
  258. for (i = 0; i < keylen; ++i)
  259. k8[i] = key[i];
  260. if (i < SERPENT_MAX_KEY_SIZE)
  261. k8[i++] = 1;
  262. while (i < SERPENT_MAX_KEY_SIZE)
  263. k8[i++] = 0;
  264. /* Expand key using polynomial */
  265. r0 = le32_to_cpu(k[3]);
  266. r1 = le32_to_cpu(k[4]);
  267. r2 = le32_to_cpu(k[5]);
  268. r3 = le32_to_cpu(k[6]);
  269. r4 = le32_to_cpu(k[7]);
  270. keyiter(le32_to_cpu(k[0]), r0, r4, r2, 0, 0);
  271. keyiter(le32_to_cpu(k[1]), r1, r0, r3, 1, 1);
  272. keyiter(le32_to_cpu(k[2]), r2, r1, r4, 2, 2);
  273. keyiter(le32_to_cpu(k[3]), r3, r2, r0, 3, 3);
  274. keyiter(le32_to_cpu(k[4]), r4, r3, r1, 4, 4);
  275. keyiter(le32_to_cpu(k[5]), r0, r4, r2, 5, 5);
  276. keyiter(le32_to_cpu(k[6]), r1, r0, r3, 6, 6);
  277. keyiter(le32_to_cpu(k[7]), r2, r1, r4, 7, 7);
  278. keyiter(k[0], r3, r2, r0, 8, 8);
  279. keyiter(k[1], r4, r3, r1, 9, 9);
  280. keyiter(k[2], r0, r4, r2, 10, 10);
  281. keyiter(k[3], r1, r0, r3, 11, 11);
  282. keyiter(k[4], r2, r1, r4, 12, 12);
  283. keyiter(k[5], r3, r2, r0, 13, 13);
  284. keyiter(k[6], r4, r3, r1, 14, 14);
  285. keyiter(k[7], r0, r4, r2, 15, 15);
  286. keyiter(k[8], r1, r0, r3, 16, 16);
  287. keyiter(k[9], r2, r1, r4, 17, 17);
  288. keyiter(k[10], r3, r2, r0, 18, 18);
  289. keyiter(k[11], r4, r3, r1, 19, 19);
  290. keyiter(k[12], r0, r4, r2, 20, 20);
  291. keyiter(k[13], r1, r0, r3, 21, 21);
  292. keyiter(k[14], r2, r1, r4, 22, 22);
  293. keyiter(k[15], r3, r2, r0, 23, 23);
  294. keyiter(k[16], r4, r3, r1, 24, 24);
  295. keyiter(k[17], r0, r4, r2, 25, 25);
  296. keyiter(k[18], r1, r0, r3, 26, 26);
  297. keyiter(k[19], r2, r1, r4, 27, 27);
  298. keyiter(k[20], r3, r2, r0, 28, 28);
  299. keyiter(k[21], r4, r3, r1, 29, 29);
  300. keyiter(k[22], r0, r4, r2, 30, 30);
  301. keyiter(k[23], r1, r0, r3, 31, 31);
  302. k += 50;
  303. keyiter(k[-26], r2, r1, r4, 32, -18);
  304. keyiter(k[-25], r3, r2, r0, 33, -17);
  305. keyiter(k[-24], r4, r3, r1, 34, -16);
  306. keyiter(k[-23], r0, r4, r2, 35, -15);
  307. keyiter(k[-22], r1, r0, r3, 36, -14);
  308. keyiter(k[-21], r2, r1, r4, 37, -13);
  309. keyiter(k[-20], r3, r2, r0, 38, -12);
  310. keyiter(k[-19], r4, r3, r1, 39, -11);
  311. keyiter(k[-18], r0, r4, r2, 40, -10);
  312. keyiter(k[-17], r1, r0, r3, 41, -9);
  313. keyiter(k[-16], r2, r1, r4, 42, -8);
  314. keyiter(k[-15], r3, r2, r0, 43, -7);
  315. keyiter(k[-14], r4, r3, r1, 44, -6);
  316. keyiter(k[-13], r0, r4, r2, 45, -5);
  317. keyiter(k[-12], r1, r0, r3, 46, -4);
  318. keyiter(k[-11], r2, r1, r4, 47, -3);
  319. keyiter(k[-10], r3, r2, r0, 48, -2);
  320. keyiter(k[-9], r4, r3, r1, 49, -1);
  321. keyiter(k[-8], r0, r4, r2, 50, 0);
  322. keyiter(k[-7], r1, r0, r3, 51, 1);
  323. keyiter(k[-6], r2, r1, r4, 52, 2);
  324. keyiter(k[-5], r3, r2, r0, 53, 3);
  325. keyiter(k[-4], r4, r3, r1, 54, 4);
  326. keyiter(k[-3], r0, r4, r2, 55, 5);
  327. keyiter(k[-2], r1, r0, r3, 56, 6);
  328. keyiter(k[-1], r2, r1, r4, 57, 7);
  329. keyiter(k[0], r3, r2, r0, 58, 8);
  330. keyiter(k[1], r4, r3, r1, 59, 9);
  331. keyiter(k[2], r0, r4, r2, 60, 10);
  332. keyiter(k[3], r1, r0, r3, 61, 11);
  333. keyiter(k[4], r2, r1, r4, 62, 12);
  334. keyiter(k[5], r3, r2, r0, 63, 13);
  335. keyiter(k[6], r4, r3, r1, 64, 14);
  336. keyiter(k[7], r0, r4, r2, 65, 15);
  337. keyiter(k[8], r1, r0, r3, 66, 16);
  338. keyiter(k[9], r2, r1, r4, 67, 17);
  339. keyiter(k[10], r3, r2, r0, 68, 18);
  340. keyiter(k[11], r4, r3, r1, 69, 19);
  341. keyiter(k[12], r0, r4, r2, 70, 20);
  342. keyiter(k[13], r1, r0, r3, 71, 21);
  343. keyiter(k[14], r2, r1, r4, 72, 22);
  344. keyiter(k[15], r3, r2, r0, 73, 23);
  345. keyiter(k[16], r4, r3, r1, 74, 24);
  346. keyiter(k[17], r0, r4, r2, 75, 25);
  347. keyiter(k[18], r1, r0, r3, 76, 26);
  348. keyiter(k[19], r2, r1, r4, 77, 27);
  349. keyiter(k[20], r3, r2, r0, 78, 28);
  350. keyiter(k[21], r4, r3, r1, 79, 29);
  351. keyiter(k[22], r0, r4, r2, 80, 30);
  352. keyiter(k[23], r1, r0, r3, 81, 31);
  353. k += 50;
  354. keyiter(k[-26], r2, r1, r4, 82, -18);
  355. keyiter(k[-25], r3, r2, r0, 83, -17);
  356. keyiter(k[-24], r4, r3, r1, 84, -16);
  357. keyiter(k[-23], r0, r4, r2, 85, -15);
  358. keyiter(k[-22], r1, r0, r3, 86, -14);
  359. keyiter(k[-21], r2, r1, r4, 87, -13);
  360. keyiter(k[-20], r3, r2, r0, 88, -12);
  361. keyiter(k[-19], r4, r3, r1, 89, -11);
  362. keyiter(k[-18], r0, r4, r2, 90, -10);
  363. keyiter(k[-17], r1, r0, r3, 91, -9);
  364. keyiter(k[-16], r2, r1, r4, 92, -8);
  365. keyiter(k[-15], r3, r2, r0, 93, -7);
  366. keyiter(k[-14], r4, r3, r1, 94, -6);
  367. keyiter(k[-13], r0, r4, r2, 95, -5);
  368. keyiter(k[-12], r1, r0, r3, 96, -4);
  369. keyiter(k[-11], r2, r1, r4, 97, -3);
  370. keyiter(k[-10], r3, r2, r0, 98, -2);
  371. keyiter(k[-9], r4, r3, r1, 99, -1);
  372. keyiter(k[-8], r0, r4, r2, 100, 0);
  373. keyiter(k[-7], r1, r0, r3, 101, 1);
  374. keyiter(k[-6], r2, r1, r4, 102, 2);
  375. keyiter(k[-5], r3, r2, r0, 103, 3);
  376. keyiter(k[-4], r4, r3, r1, 104, 4);
  377. keyiter(k[-3], r0, r4, r2, 105, 5);
  378. keyiter(k[-2], r1, r0, r3, 106, 6);
  379. keyiter(k[-1], r2, r1, r4, 107, 7);
  380. keyiter(k[0], r3, r2, r0, 108, 8);
  381. keyiter(k[1], r4, r3, r1, 109, 9);
  382. keyiter(k[2], r0, r4, r2, 110, 10);
  383. keyiter(k[3], r1, r0, r3, 111, 11);
  384. keyiter(k[4], r2, r1, r4, 112, 12);
  385. keyiter(k[5], r3, r2, r0, 113, 13);
  386. keyiter(k[6], r4, r3, r1, 114, 14);
  387. keyiter(k[7], r0, r4, r2, 115, 15);
  388. keyiter(k[8], r1, r0, r3, 116, 16);
  389. keyiter(k[9], r2, r1, r4, 117, 17);
  390. keyiter(k[10], r3, r2, r0, 118, 18);
  391. keyiter(k[11], r4, r3, r1, 119, 19);
  392. keyiter(k[12], r0, r4, r2, 120, 20);
  393. keyiter(k[13], r1, r0, r3, 121, 21);
  394. keyiter(k[14], r2, r1, r4, 122, 22);
  395. keyiter(k[15], r3, r2, r0, 123, 23);
  396. keyiter(k[16], r4, r3, r1, 124, 24);
  397. keyiter(k[17], r0, r4, r2, 125, 25);
  398. keyiter(k[18], r1, r0, r3, 126, 26);
  399. keyiter(k[19], r2, r1, r4, 127, 27);
  400. keyiter(k[20], r3, r2, r0, 128, 28);
  401. keyiter(k[21], r4, r3, r1, 129, 29);
  402. keyiter(k[22], r0, r4, r2, 130, 30);
  403. keyiter(k[23], r1, r0, r3, 131, 31);
  404. /* Apply S-boxes */
  405. __serpent_setkey_sbox(r0, r1, r2, r3, r4, ctx->expkey);
  406. return 0;
  407. }
  408. EXPORT_SYMBOL_GPL(__serpent_setkey);
  409. int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
  410. {
  411. return __serpent_setkey(crypto_tfm_ctx(tfm), key, keylen);
  412. }
  413. EXPORT_SYMBOL_GPL(serpent_setkey);
  414. void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
  415. {
  416. const u32 *k = ctx->expkey;
  417. const __le32 *s = (const __le32 *)src;
  418. __le32 *d = (__le32 *)dst;
  419. u32 r0, r1, r2, r3, r4;
  420. /*
  421. * Note: The conversions between u8* and u32* might cause trouble
  422. * on architectures with stricter alignment rules than x86
  423. */
  424. r0 = le32_to_cpu(s[0]);
  425. r1 = le32_to_cpu(s[1]);
  426. r2 = le32_to_cpu(s[2]);
  427. r3 = le32_to_cpu(s[3]);
  428. K(r0, r1, r2, r3, 0);
  429. S0(r0, r1, r2, r3, r4); LK(r2, r1, r3, r0, r4, 1);
  430. S1(r2, r1, r3, r0, r4); LK(r4, r3, r0, r2, r1, 2);
  431. S2(r4, r3, r0, r2, r1); LK(r1, r3, r4, r2, r0, 3);
  432. S3(r1, r3, r4, r2, r0); LK(r2, r0, r3, r1, r4, 4);
  433. S4(r2, r0, r3, r1, r4); LK(r0, r3, r1, r4, r2, 5);
  434. S5(r0, r3, r1, r4, r2); LK(r2, r0, r3, r4, r1, 6);
  435. S6(r2, r0, r3, r4, r1); LK(r3, r1, r0, r4, r2, 7);
  436. S7(r3, r1, r0, r4, r2); LK(r2, r0, r4, r3, r1, 8);
  437. S0(r2, r0, r4, r3, r1); LK(r4, r0, r3, r2, r1, 9);
  438. S1(r4, r0, r3, r2, r1); LK(r1, r3, r2, r4, r0, 10);
  439. S2(r1, r3, r2, r4, r0); LK(r0, r3, r1, r4, r2, 11);
  440. S3(r0, r3, r1, r4, r2); LK(r4, r2, r3, r0, r1, 12);
  441. S4(r4, r2, r3, r0, r1); LK(r2, r3, r0, r1, r4, 13);
  442. S5(r2, r3, r0, r1, r4); LK(r4, r2, r3, r1, r0, 14);
  443. S6(r4, r2, r3, r1, r0); LK(r3, r0, r2, r1, r4, 15);
  444. S7(r3, r0, r2, r1, r4); LK(r4, r2, r1, r3, r0, 16);
  445. S0(r4, r2, r1, r3, r0); LK(r1, r2, r3, r4, r0, 17);
  446. S1(r1, r2, r3, r4, r0); LK(r0, r3, r4, r1, r2, 18);
  447. S2(r0, r3, r4, r1, r2); LK(r2, r3, r0, r1, r4, 19);
  448. S3(r2, r3, r0, r1, r4); LK(r1, r4, r3, r2, r0, 20);
  449. S4(r1, r4, r3, r2, r0); LK(r4, r3, r2, r0, r1, 21);
  450. S5(r4, r3, r2, r0, r1); LK(r1, r4, r3, r0, r2, 22);
  451. S6(r1, r4, r3, r0, r2); LK(r3, r2, r4, r0, r1, 23);
  452. S7(r3, r2, r4, r0, r1); LK(r1, r4, r0, r3, r2, 24);
  453. S0(r1, r4, r0, r3, r2); LK(r0, r4, r3, r1, r2, 25);
  454. S1(r0, r4, r3, r1, r2); LK(r2, r3, r1, r0, r4, 26);
  455. S2(r2, r3, r1, r0, r4); LK(r4, r3, r2, r0, r1, 27);
  456. S3(r4, r3, r2, r0, r1); LK(r0, r1, r3, r4, r2, 28);
  457. S4(r0, r1, r3, r4, r2); LK(r1, r3, r4, r2, r0, 29);
  458. S5(r1, r3, r4, r2, r0); LK(r0, r1, r3, r2, r4, 30);
  459. S6(r0, r1, r3, r2, r4); LK(r3, r4, r1, r2, r0, 31);
  460. S7(r3, r4, r1, r2, r0); K(r0, r1, r2, r3, 32);
  461. d[0] = cpu_to_le32(r0);
  462. d[1] = cpu_to_le32(r1);
  463. d[2] = cpu_to_le32(r2);
  464. d[3] = cpu_to_le32(r3);
  465. }
  466. EXPORT_SYMBOL_GPL(__serpent_encrypt);
  467. static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  468. {
  469. struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
  470. __serpent_encrypt(ctx, dst, src);
  471. }
  472. void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
  473. {
  474. const u32 *k = ctx->expkey;
  475. const __le32 *s = (const __le32 *)src;
  476. __le32 *d = (__le32 *)dst;
  477. u32 r0, r1, r2, r3, r4;
  478. r0 = le32_to_cpu(s[0]);
  479. r1 = le32_to_cpu(s[1]);
  480. r2 = le32_to_cpu(s[2]);
  481. r3 = le32_to_cpu(s[3]);
  482. K(r0, r1, r2, r3, 32);
  483. SI7(r0, r1, r2, r3, r4); KL(r1, r3, r0, r4, r2, 31);
  484. SI6(r1, r3, r0, r4, r2); KL(r0, r2, r4, r1, r3, 30);
  485. SI5(r0, r2, r4, r1, r3); KL(r2, r3, r0, r4, r1, 29);
  486. SI4(r2, r3, r0, r4, r1); KL(r2, r0, r1, r4, r3, 28);
  487. SI3(r2, r0, r1, r4, r3); KL(r1, r2, r3, r4, r0, 27);
  488. SI2(r1, r2, r3, r4, r0); KL(r2, r0, r4, r3, r1, 26);
  489. SI1(r2, r0, r4, r3, r1); KL(r1, r0, r4, r3, r2, 25);
  490. SI0(r1, r0, r4, r3, r2); KL(r4, r2, r0, r1, r3, 24);
  491. SI7(r4, r2, r0, r1, r3); KL(r2, r1, r4, r3, r0, 23);
  492. SI6(r2, r1, r4, r3, r0); KL(r4, r0, r3, r2, r1, 22);
  493. SI5(r4, r0, r3, r2, r1); KL(r0, r1, r4, r3, r2, 21);
  494. SI4(r0, r1, r4, r3, r2); KL(r0, r4, r2, r3, r1, 20);
  495. SI3(r0, r4, r2, r3, r1); KL(r2, r0, r1, r3, r4, 19);
  496. SI2(r2, r0, r1, r3, r4); KL(r0, r4, r3, r1, r2, 18);
  497. SI1(r0, r4, r3, r1, r2); KL(r2, r4, r3, r1, r0, 17);
  498. SI0(r2, r4, r3, r1, r0); KL(r3, r0, r4, r2, r1, 16);
  499. SI7(r3, r0, r4, r2, r1); KL(r0, r2, r3, r1, r4, 15);
  500. SI6(r0, r2, r3, r1, r4); KL(r3, r4, r1, r0, r2, 14);
  501. SI5(r3, r4, r1, r0, r2); KL(r4, r2, r3, r1, r0, 13);
  502. SI4(r4, r2, r3, r1, r0); KL(r4, r3, r0, r1, r2, 12);
  503. SI3(r4, r3, r0, r1, r2); KL(r0, r4, r2, r1, r3, 11);
  504. SI2(r0, r4, r2, r1, r3); KL(r4, r3, r1, r2, r0, 10);
  505. SI1(r4, r3, r1, r2, r0); KL(r0, r3, r1, r2, r4, 9);
  506. SI0(r0, r3, r1, r2, r4); KL(r1, r4, r3, r0, r2, 8);
  507. SI7(r1, r4, r3, r0, r2); KL(r4, r0, r1, r2, r3, 7);
  508. SI6(r4, r0, r1, r2, r3); KL(r1, r3, r2, r4, r0, 6);
  509. SI5(r1, r3, r2, r4, r0); KL(r3, r0, r1, r2, r4, 5);
  510. SI4(r3, r0, r1, r2, r4); KL(r3, r1, r4, r2, r0, 4);
  511. SI3(r3, r1, r4, r2, r0); KL(r4, r3, r0, r2, r1, 3);
  512. SI2(r4, r3, r0, r2, r1); KL(r3, r1, r2, r0, r4, 2);
  513. SI1(r3, r1, r2, r0, r4); KL(r4, r1, r2, r0, r3, 1);
  514. SI0(r4, r1, r2, r0, r3); K(r2, r3, r1, r4, 0);
  515. d[0] = cpu_to_le32(r2);
  516. d[1] = cpu_to_le32(r3);
  517. d[2] = cpu_to_le32(r1);
  518. d[3] = cpu_to_le32(r4);
  519. }
  520. EXPORT_SYMBOL_GPL(__serpent_decrypt);
  521. static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  522. {
  523. struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
  524. __serpent_decrypt(ctx, dst, src);
  525. }
  526. static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
  527. unsigned int keylen)
  528. {
  529. u8 rev_key[SERPENT_MAX_KEY_SIZE];
  530. int i;
  531. for (i = 0; i < keylen; ++i)
  532. rev_key[keylen - i - 1] = key[i];
  533. return serpent_setkey(tfm, rev_key, keylen);
  534. }
  535. static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  536. {
  537. const u32 * const s = (const u32 * const)src;
  538. u32 * const d = (u32 * const)dst;
  539. u32 rs[4], rd[4];
  540. rs[0] = swab32(s[3]);
  541. rs[1] = swab32(s[2]);
  542. rs[2] = swab32(s[1]);
  543. rs[3] = swab32(s[0]);
  544. serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
  545. d[0] = swab32(rd[3]);
  546. d[1] = swab32(rd[2]);
  547. d[2] = swab32(rd[1]);
  548. d[3] = swab32(rd[0]);
  549. }
  550. static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  551. {
  552. const u32 * const s = (const u32 * const)src;
  553. u32 * const d = (u32 * const)dst;
  554. u32 rs[4], rd[4];
  555. rs[0] = swab32(s[3]);
  556. rs[1] = swab32(s[2]);
  557. rs[2] = swab32(s[1]);
  558. rs[3] = swab32(s[0]);
  559. serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
  560. d[0] = swab32(rd[3]);
  561. d[1] = swab32(rd[2]);
  562. d[2] = swab32(rd[1]);
  563. d[3] = swab32(rd[0]);
  564. }
  565. static struct crypto_alg srp_algs[2] = { {
  566. .cra_name = "serpent",
  567. .cra_driver_name = "serpent-generic",
  568. .cra_priority = 100,
  569. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  570. .cra_blocksize = SERPENT_BLOCK_SIZE,
  571. .cra_ctxsize = sizeof(struct serpent_ctx),
  572. .cra_alignmask = 3,
  573. .cra_module = THIS_MODULE,
  574. .cra_u = { .cipher = {
  575. .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
  576. .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
  577. .cia_setkey = serpent_setkey,
  578. .cia_encrypt = serpent_encrypt,
  579. .cia_decrypt = serpent_decrypt } }
  580. }, {
  581. .cra_name = "tnepres",
  582. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  583. .cra_blocksize = SERPENT_BLOCK_SIZE,
  584. .cra_ctxsize = sizeof(struct serpent_ctx),
  585. .cra_alignmask = 3,
  586. .cra_module = THIS_MODULE,
  587. .cra_u = { .cipher = {
  588. .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
  589. .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
  590. .cia_setkey = tnepres_setkey,
  591. .cia_encrypt = tnepres_encrypt,
  592. .cia_decrypt = tnepres_decrypt } }
  593. } };
  594. static int __init serpent_mod_init(void)
  595. {
  596. return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
  597. }
  598. static void __exit serpent_mod_fini(void)
  599. {
  600. crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
  601. }
  602. module_init(serpent_mod_init);
  603. module_exit(serpent_mod_fini);
  604. MODULE_LICENSE("GPL");
  605. MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
  606. MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
  607. MODULE_ALIAS_CRYPTO("tnepres");
  608. MODULE_ALIAS_CRYPTO("serpent");
  609. MODULE_ALIAS_CRYPTO("serpent-generic");