caamalg_desc.c 51 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557
  1. /*
  2. * Shared descriptors for aead, ablkcipher algorithms
  3. *
  4. * Copyright 2016 NXP
  5. */
  6. #include "compat.h"
  7. #include "desc_constr.h"
  8. #include "caamalg_desc.h"
  9. /*
  10. * For aead functions, read payload and write payload,
  11. * both of which are specified in req->src and req->dst
  12. */
  13. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  14. {
  15. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  16. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  17. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  18. }
  19. /* Set DK bit in class 1 operation if shared */
  20. static inline void append_dec_op1(u32 *desc, u32 type)
  21. {
  22. u32 *jump_cmd, *uncond_jump_cmd;
  23. /* DK bit is valid only for AES */
  24. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  25. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  26. OP_ALG_DECRYPT);
  27. return;
  28. }
  29. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  30. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  31. OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  35. OP_ALG_DECRYPT | OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions.
  43. * A split key is required for SEC Era < 6; the size of the split key
  44. * is specified in this case. Valid algorithm values - one of
  45. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  46. * with OP_ALG_AAI_HMAC_PRECOMP.
  47. * @icvsize: integrity check value (ICV) size (truncated or full)
  48. * @era: SEC Era
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize, int era)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (era < 6) {
  59. if (adata->key_inline)
  60. append_key_as_imm(desc, adata->key_virt,
  61. adata->keylen_pad, adata->keylen,
  62. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  63. KEY_ENC);
  64. else
  65. append_key(desc, adata->key_dma, adata->keylen,
  66. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  67. } else {
  68. append_proto_dkp(desc, adata);
  69. }
  70. set_jump_tgt_here(desc, key_jump_cmd);
  71. /* assoclen + cryptlen = seqinlen */
  72. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  73. /* Prepare to read and write cryptlen + assoclen bytes */
  74. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  75. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  76. /*
  77. * MOVE_LEN opcode is not available in all SEC HW revisions,
  78. * thus need to do some magic, i.e. self-patch the descriptor
  79. * buffer.
  80. */
  81. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  82. MOVE_DEST_MATH3 |
  83. (0x6 << MOVE_LEN_SHIFT));
  84. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  85. MOVE_DEST_DESCBUF |
  86. MOVE_WAITCOMP |
  87. (0x8 << MOVE_LEN_SHIFT));
  88. /* Class 2 operation */
  89. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  90. OP_ALG_ENCRYPT);
  91. /* Read and write cryptlen bytes */
  92. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  93. set_move_tgt_here(desc, read_move_cmd);
  94. set_move_tgt_here(desc, write_move_cmd);
  95. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  96. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  97. MOVE_AUX_LS);
  98. /* Write ICV */
  99. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  100. LDST_SRCDST_BYTE_CONTEXT);
  101. #ifdef DEBUG
  102. print_hex_dump(KERN_ERR,
  103. "aead null enc shdesc@" __stringify(__LINE__)": ",
  104. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  105. #endif
  106. }
  107. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  108. /**
  109. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  110. * (non-protocol) with no (null) decryption.
  111. * @desc: pointer to buffer used for descriptor construction
  112. * @adata: pointer to authentication transform definitions.
  113. * A split key is required for SEC Era < 6; the size of the split key
  114. * is specified in this case. Valid algorithm values - one of
  115. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  116. * with OP_ALG_AAI_HMAC_PRECOMP.
  117. * @icvsize: integrity check value (ICV) size (truncated or full)
  118. * @era: SEC Era
  119. */
  120. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  121. unsigned int icvsize, int era)
  122. {
  123. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  124. init_sh_desc(desc, HDR_SHARE_SERIAL);
  125. /* Skip if already shared */
  126. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  127. JUMP_COND_SHRD);
  128. if (era < 6) {
  129. if (adata->key_inline)
  130. append_key_as_imm(desc, adata->key_virt,
  131. adata->keylen_pad, adata->keylen,
  132. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  133. KEY_ENC);
  134. else
  135. append_key(desc, adata->key_dma, adata->keylen,
  136. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  137. } else {
  138. append_proto_dkp(desc, adata);
  139. }
  140. set_jump_tgt_here(desc, key_jump_cmd);
  141. /* Class 2 operation */
  142. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  143. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  144. /* assoclen + cryptlen = seqoutlen */
  145. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  146. /* Prepare to read and write cryptlen + assoclen bytes */
  147. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  148. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  149. /*
  150. * MOVE_LEN opcode is not available in all SEC HW revisions,
  151. * thus need to do some magic, i.e. self-patch the descriptor
  152. * buffer.
  153. */
  154. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  155. MOVE_DEST_MATH2 |
  156. (0x6 << MOVE_LEN_SHIFT));
  157. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  158. MOVE_DEST_DESCBUF |
  159. MOVE_WAITCOMP |
  160. (0x8 << MOVE_LEN_SHIFT));
  161. /* Read and write cryptlen bytes */
  162. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  163. /*
  164. * Insert a NOP here, since we need at least 4 instructions between
  165. * code patching the descriptor buffer and the location being patched.
  166. */
  167. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  168. set_jump_tgt_here(desc, jump_cmd);
  169. set_move_tgt_here(desc, read_move_cmd);
  170. set_move_tgt_here(desc, write_move_cmd);
  171. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  172. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  173. MOVE_AUX_LS);
  174. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  175. /* Load ICV */
  176. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  177. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  178. #ifdef DEBUG
  179. print_hex_dump(KERN_ERR,
  180. "aead null dec shdesc@" __stringify(__LINE__)": ",
  181. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  182. #endif
  183. }
  184. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  185. static void init_sh_desc_key_aead(u32 * const desc,
  186. struct alginfo * const cdata,
  187. struct alginfo * const adata,
  188. const bool is_rfc3686, u32 *nonce, int era)
  189. {
  190. u32 *key_jump_cmd;
  191. unsigned int enckeylen = cdata->keylen;
  192. /* Note: Context registers are saved. */
  193. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  194. /* Skip if already shared */
  195. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  196. JUMP_COND_SHRD);
  197. /*
  198. * RFC3686 specific:
  199. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  200. * | enckeylen = encryption key size + nonce size
  201. */
  202. if (is_rfc3686)
  203. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  204. if (era < 6) {
  205. if (adata->key_inline)
  206. append_key_as_imm(desc, adata->key_virt,
  207. adata->keylen_pad, adata->keylen,
  208. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  209. KEY_ENC);
  210. else
  211. append_key(desc, adata->key_dma, adata->keylen,
  212. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  213. } else {
  214. append_proto_dkp(desc, adata);
  215. }
  216. if (cdata->key_inline)
  217. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  218. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  219. else
  220. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  221. KEY_DEST_CLASS_REG);
  222. /* Load Counter into CONTEXT1 reg */
  223. if (is_rfc3686) {
  224. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  225. LDST_CLASS_IND_CCB |
  226. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  227. append_move(desc,
  228. MOVE_SRC_OUTFIFO |
  229. MOVE_DEST_CLASS1CTX |
  230. (16 << MOVE_OFFSET_SHIFT) |
  231. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  232. }
  233. set_jump_tgt_here(desc, key_jump_cmd);
  234. }
  235. /**
  236. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  237. * (non-protocol).
  238. * @desc: pointer to buffer used for descriptor construction
  239. * @cdata: pointer to block cipher transform definitions
  240. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  241. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  242. * @adata: pointer to authentication transform definitions.
  243. * A split key is required for SEC Era < 6; the size of the split key
  244. * is specified in this case. Valid algorithm values - one of
  245. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  246. * with OP_ALG_AAI_HMAC_PRECOMP.
  247. * @ivsize: initialization vector size
  248. * @icvsize: integrity check value (ICV) size (truncated or full)
  249. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  250. * @nonce: pointer to rfc3686 nonce
  251. * @ctx1_iv_off: IV offset in CONTEXT1 register
  252. * @is_qi: true when called from caam/qi
  253. * @era: SEC Era
  254. */
  255. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  256. struct alginfo *adata, unsigned int ivsize,
  257. unsigned int icvsize, const bool is_rfc3686,
  258. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
  259. int era)
  260. {
  261. /* Note: Context registers are saved. */
  262. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  263. /* Class 2 operation */
  264. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  265. OP_ALG_ENCRYPT);
  266. if (is_qi) {
  267. u32 *wait_load_cmd;
  268. /* REG3 = assoclen */
  269. append_seq_load(desc, 4, LDST_CLASS_DECO |
  270. LDST_SRCDST_WORD_DECO_MATH3 |
  271. (4 << LDST_OFFSET_SHIFT));
  272. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  273. JUMP_COND_CALM | JUMP_COND_NCP |
  274. JUMP_COND_NOP | JUMP_COND_NIP |
  275. JUMP_COND_NIFP);
  276. set_jump_tgt_here(desc, wait_load_cmd);
  277. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  278. LDST_SRCDST_BYTE_CONTEXT |
  279. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  280. }
  281. /* Read and write assoclen bytes */
  282. if (is_qi || era < 3) {
  283. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  284. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  285. } else {
  286. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  287. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  288. }
  289. /* Skip assoc data */
  290. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  291. /* read assoc before reading payload */
  292. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  293. FIFOLDST_VLF);
  294. /* Load Counter into CONTEXT1 reg */
  295. if (is_rfc3686)
  296. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  297. LDST_SRCDST_BYTE_CONTEXT |
  298. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  299. LDST_OFFSET_SHIFT));
  300. /* Class 1 operation */
  301. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  302. OP_ALG_ENCRYPT);
  303. /* Read and write cryptlen bytes */
  304. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  305. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  306. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  307. /* Write ICV */
  308. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  309. LDST_SRCDST_BYTE_CONTEXT);
  310. #ifdef DEBUG
  311. print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
  312. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  313. #endif
  314. }
  315. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  316. /**
  317. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  318. * (non-protocol).
  319. * @desc: pointer to buffer used for descriptor construction
  320. * @cdata: pointer to block cipher transform definitions
  321. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  322. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  323. * @adata: pointer to authentication transform definitions.
  324. * A split key is required for SEC Era < 6; the size of the split key
  325. * is specified in this case. Valid algorithm values - one of
  326. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  327. * with OP_ALG_AAI_HMAC_PRECOMP.
  328. * @ivsize: initialization vector size
  329. * @icvsize: integrity check value (ICV) size (truncated or full)
  330. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  331. * @nonce: pointer to rfc3686 nonce
  332. * @ctx1_iv_off: IV offset in CONTEXT1 register
  333. * @is_qi: true when called from caam/qi
  334. * @era: SEC Era
  335. */
  336. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  337. struct alginfo *adata, unsigned int ivsize,
  338. unsigned int icvsize, const bool geniv,
  339. const bool is_rfc3686, u32 *nonce,
  340. const u32 ctx1_iv_off, const bool is_qi, int era)
  341. {
  342. /* Note: Context registers are saved. */
  343. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  344. /* Class 2 operation */
  345. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  346. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  347. if (is_qi) {
  348. u32 *wait_load_cmd;
  349. /* REG3 = assoclen */
  350. append_seq_load(desc, 4, LDST_CLASS_DECO |
  351. LDST_SRCDST_WORD_DECO_MATH3 |
  352. (4 << LDST_OFFSET_SHIFT));
  353. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  354. JUMP_COND_CALM | JUMP_COND_NCP |
  355. JUMP_COND_NOP | JUMP_COND_NIP |
  356. JUMP_COND_NIFP);
  357. set_jump_tgt_here(desc, wait_load_cmd);
  358. if (!geniv)
  359. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  360. LDST_SRCDST_BYTE_CONTEXT |
  361. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  362. }
  363. /* Read and write assoclen bytes */
  364. if (is_qi || era < 3) {
  365. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  366. if (geniv)
  367. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
  368. ivsize);
  369. else
  370. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
  371. CAAM_CMD_SZ);
  372. } else {
  373. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  374. if (geniv)
  375. append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
  376. ivsize);
  377. else
  378. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
  379. CAAM_CMD_SZ);
  380. }
  381. /* Skip assoc data */
  382. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  383. /* read assoc before reading payload */
  384. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  385. KEY_VLF);
  386. if (geniv) {
  387. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  388. LDST_SRCDST_BYTE_CONTEXT |
  389. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  390. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  391. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  392. }
  393. /* Load Counter into CONTEXT1 reg */
  394. if (is_rfc3686)
  395. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  396. LDST_SRCDST_BYTE_CONTEXT |
  397. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  398. LDST_OFFSET_SHIFT));
  399. /* Choose operation */
  400. if (ctx1_iv_off)
  401. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  402. OP_ALG_DECRYPT);
  403. else
  404. append_dec_op1(desc, cdata->algtype);
  405. /* Read and write cryptlen bytes */
  406. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  407. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  408. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  409. /* Load ICV */
  410. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  411. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  412. #ifdef DEBUG
  413. print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
  414. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  415. #endif
  416. }
  417. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  418. /**
  419. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  420. * (non-protocol) with HW-generated initialization
  421. * vector.
  422. * @desc: pointer to buffer used for descriptor construction
  423. * @cdata: pointer to block cipher transform definitions
  424. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  425. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  426. * @adata: pointer to authentication transform definitions.
  427. * A split key is required for SEC Era < 6; the size of the split key
  428. * is specified in this case. Valid algorithm values - one of
  429. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  430. * with OP_ALG_AAI_HMAC_PRECOMP.
  431. * @ivsize: initialization vector size
  432. * @icvsize: integrity check value (ICV) size (truncated or full)
  433. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  434. * @nonce: pointer to rfc3686 nonce
  435. * @ctx1_iv_off: IV offset in CONTEXT1 register
  436. * @is_qi: true when called from caam/qi
  437. * @era: SEC Era
  438. */
  439. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  440. struct alginfo *adata, unsigned int ivsize,
  441. unsigned int icvsize, const bool is_rfc3686,
  442. u32 *nonce, const u32 ctx1_iv_off,
  443. const bool is_qi, int era)
  444. {
  445. u32 geniv, moveiv;
  446. u32 *wait_cmd;
  447. /* Note: Context registers are saved. */
  448. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  449. if (is_qi) {
  450. u32 *wait_load_cmd;
  451. /* REG3 = assoclen */
  452. append_seq_load(desc, 4, LDST_CLASS_DECO |
  453. LDST_SRCDST_WORD_DECO_MATH3 |
  454. (4 << LDST_OFFSET_SHIFT));
  455. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  456. JUMP_COND_CALM | JUMP_COND_NCP |
  457. JUMP_COND_NOP | JUMP_COND_NIP |
  458. JUMP_COND_NIFP);
  459. set_jump_tgt_here(desc, wait_load_cmd);
  460. }
  461. if (is_rfc3686) {
  462. if (is_qi)
  463. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  464. LDST_SRCDST_BYTE_CONTEXT |
  465. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  466. goto copy_iv;
  467. }
  468. /* Generate IV */
  469. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  470. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  471. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  472. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  473. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  474. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  475. append_move(desc, MOVE_WAITCOMP |
  476. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  477. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  478. (ivsize << MOVE_LEN_SHIFT));
  479. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  480. copy_iv:
  481. /* Copy IV to class 1 context */
  482. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  483. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  484. (ivsize << MOVE_LEN_SHIFT));
  485. /* Return to encryption */
  486. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  487. OP_ALG_ENCRYPT);
  488. /* Read and write assoclen bytes */
  489. if (is_qi || era < 3) {
  490. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  491. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  492. } else {
  493. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  494. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  495. }
  496. /* Skip assoc data */
  497. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  498. /* read assoc before reading payload */
  499. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  500. KEY_VLF);
  501. /* Copy iv from outfifo to class 2 fifo */
  502. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  503. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  504. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  505. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  506. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  507. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  508. /* Load Counter into CONTEXT1 reg */
  509. if (is_rfc3686)
  510. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  511. LDST_SRCDST_BYTE_CONTEXT |
  512. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  513. LDST_OFFSET_SHIFT));
  514. /* Class 1 operation */
  515. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  516. OP_ALG_ENCRYPT);
  517. /* Will write ivsize + cryptlen */
  518. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  519. /* Not need to reload iv */
  520. append_seq_fifo_load(desc, ivsize,
  521. FIFOLD_CLASS_SKIP);
  522. /* Will read cryptlen */
  523. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  524. /*
  525. * Wait for IV transfer (ofifo -> class2) to finish before starting
  526. * ciphertext transfer (ofifo -> external memory).
  527. */
  528. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
  529. set_jump_tgt_here(desc, wait_cmd);
  530. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  531. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  532. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  533. /* Write ICV */
  534. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  535. LDST_SRCDST_BYTE_CONTEXT);
  536. #ifdef DEBUG
  537. print_hex_dump(KERN_ERR,
  538. "aead givenc shdesc@" __stringify(__LINE__)": ",
  539. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  540. #endif
  541. }
  542. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  543. /**
  544. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  545. * @desc: pointer to buffer used for descriptor construction
  546. * @cdata: pointer to block cipher transform definitions
  547. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  548. * @ivsize: initialization vector size
  549. * @icvsize: integrity check value (ICV) size (truncated or full)
  550. * @is_qi: true when called from caam/qi
  551. */
  552. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  553. unsigned int ivsize, unsigned int icvsize,
  554. const bool is_qi)
  555. {
  556. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  557. *zero_assoc_jump_cmd2;
  558. init_sh_desc(desc, HDR_SHARE_SERIAL);
  559. /* skip key loading if they are loaded due to sharing */
  560. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  561. JUMP_COND_SHRD);
  562. if (cdata->key_inline)
  563. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  564. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  565. else
  566. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  567. KEY_DEST_CLASS_REG);
  568. set_jump_tgt_here(desc, key_jump_cmd);
  569. /* class 1 operation */
  570. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  571. OP_ALG_ENCRYPT);
  572. if (is_qi) {
  573. u32 *wait_load_cmd;
  574. /* REG3 = assoclen */
  575. append_seq_load(desc, 4, LDST_CLASS_DECO |
  576. LDST_SRCDST_WORD_DECO_MATH3 |
  577. (4 << LDST_OFFSET_SHIFT));
  578. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  579. JUMP_COND_CALM | JUMP_COND_NCP |
  580. JUMP_COND_NOP | JUMP_COND_NIP |
  581. JUMP_COND_NIFP);
  582. set_jump_tgt_here(desc, wait_load_cmd);
  583. append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
  584. ivsize);
  585. } else {
  586. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  587. CAAM_CMD_SZ);
  588. }
  589. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  590. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  591. JUMP_COND_MATH_Z);
  592. if (is_qi)
  593. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  594. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  595. /* if assoclen is ZERO, skip reading the assoc data */
  596. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  597. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  598. JUMP_COND_MATH_Z);
  599. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  600. /* skip assoc data */
  601. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  602. /* cryptlen = seqinlen - assoclen */
  603. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  604. /* if cryptlen is ZERO jump to zero-payload commands */
  605. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  606. JUMP_COND_MATH_Z);
  607. /* read assoc data */
  608. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  609. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  610. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  611. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  612. /* write encrypted data */
  613. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  614. /* read payload data */
  615. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  616. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  617. /* jump to ICV writing */
  618. if (is_qi)
  619. append_jump(desc, JUMP_TEST_ALL | 4);
  620. else
  621. append_jump(desc, JUMP_TEST_ALL | 2);
  622. /* zero-payload commands */
  623. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  624. /* read assoc data */
  625. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  626. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  627. if (is_qi)
  628. /* jump to ICV writing */
  629. append_jump(desc, JUMP_TEST_ALL | 2);
  630. /* There is no input data */
  631. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  632. if (is_qi)
  633. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  634. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
  635. FIFOLD_TYPE_LAST1);
  636. /* write ICV */
  637. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  638. LDST_SRCDST_BYTE_CONTEXT);
  639. #ifdef DEBUG
  640. print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
  641. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  642. #endif
  643. }
  644. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  645. /**
  646. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  647. * @desc: pointer to buffer used for descriptor construction
  648. * @cdata: pointer to block cipher transform definitions
  649. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  650. * @ivsize: initialization vector size
  651. * @icvsize: integrity check value (ICV) size (truncated or full)
  652. * @is_qi: true when called from caam/qi
  653. */
  654. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  655. unsigned int ivsize, unsigned int icvsize,
  656. const bool is_qi)
  657. {
  658. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  659. init_sh_desc(desc, HDR_SHARE_SERIAL);
  660. /* skip key loading if they are loaded due to sharing */
  661. key_jump_cmd = append_jump(desc, JUMP_JSL |
  662. JUMP_TEST_ALL | JUMP_COND_SHRD);
  663. if (cdata->key_inline)
  664. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  665. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  666. else
  667. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  668. KEY_DEST_CLASS_REG);
  669. set_jump_tgt_here(desc, key_jump_cmd);
  670. /* class 1 operation */
  671. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  672. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  673. if (is_qi) {
  674. u32 *wait_load_cmd;
  675. /* REG3 = assoclen */
  676. append_seq_load(desc, 4, LDST_CLASS_DECO |
  677. LDST_SRCDST_WORD_DECO_MATH3 |
  678. (4 << LDST_OFFSET_SHIFT));
  679. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  680. JUMP_COND_CALM | JUMP_COND_NCP |
  681. JUMP_COND_NOP | JUMP_COND_NIP |
  682. JUMP_COND_NIFP);
  683. set_jump_tgt_here(desc, wait_load_cmd);
  684. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  685. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  686. }
  687. /* if assoclen is ZERO, skip reading the assoc data */
  688. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  689. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  690. JUMP_COND_MATH_Z);
  691. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  692. /* skip assoc data */
  693. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  694. /* read assoc data */
  695. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  696. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  697. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  698. /* cryptlen = seqoutlen - assoclen */
  699. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  700. /* jump to zero-payload command if cryptlen is zero */
  701. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  702. JUMP_COND_MATH_Z);
  703. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  704. /* store encrypted data */
  705. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  706. /* read payload data */
  707. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  708. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  709. /* zero-payload command */
  710. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  711. /* read ICV */
  712. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  713. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  714. #ifdef DEBUG
  715. print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
  716. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  717. #endif
  718. }
  719. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  720. /**
  721. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  722. * (non-protocol).
  723. * @desc: pointer to buffer used for descriptor construction
  724. * @cdata: pointer to block cipher transform definitions
  725. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  726. * @ivsize: initialization vector size
  727. * @icvsize: integrity check value (ICV) size (truncated or full)
  728. * @is_qi: true when called from caam/qi
  729. */
  730. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  731. unsigned int ivsize, unsigned int icvsize,
  732. const bool is_qi)
  733. {
  734. u32 *key_jump_cmd;
  735. init_sh_desc(desc, HDR_SHARE_SERIAL);
  736. /* Skip key loading if it is loaded due to sharing */
  737. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  738. JUMP_COND_SHRD);
  739. if (cdata->key_inline)
  740. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  741. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  742. else
  743. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  744. KEY_DEST_CLASS_REG);
  745. set_jump_tgt_here(desc, key_jump_cmd);
  746. /* Class 1 operation */
  747. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  748. OP_ALG_ENCRYPT);
  749. if (is_qi) {
  750. u32 *wait_load_cmd;
  751. /* REG3 = assoclen */
  752. append_seq_load(desc, 4, LDST_CLASS_DECO |
  753. LDST_SRCDST_WORD_DECO_MATH3 |
  754. (4 << LDST_OFFSET_SHIFT));
  755. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  756. JUMP_COND_CALM | JUMP_COND_NCP |
  757. JUMP_COND_NOP | JUMP_COND_NIP |
  758. JUMP_COND_NIFP);
  759. set_jump_tgt_here(desc, wait_load_cmd);
  760. /* Read salt and IV */
  761. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  762. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  763. FIFOLD_TYPE_IV);
  764. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  765. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  766. }
  767. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  768. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  769. /* Read assoc data */
  770. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  771. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  772. /* Skip IV */
  773. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  774. /* Will read cryptlen bytes */
  775. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  776. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  777. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  778. /* Skip assoc data */
  779. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  780. /* cryptlen = seqoutlen - assoclen */
  781. append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
  782. /* Write encrypted data */
  783. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  784. /* Read payload data */
  785. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  786. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  787. /* Write ICV */
  788. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  789. LDST_SRCDST_BYTE_CONTEXT);
  790. #ifdef DEBUG
  791. print_hex_dump(KERN_ERR,
  792. "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  793. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  794. #endif
  795. }
  796. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  797. /**
  798. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  799. * (non-protocol).
  800. * @desc: pointer to buffer used for descriptor construction
  801. * @cdata: pointer to block cipher transform definitions
  802. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  803. * @ivsize: initialization vector size
  804. * @icvsize: integrity check value (ICV) size (truncated or full)
  805. * @is_qi: true when called from caam/qi
  806. */
  807. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  808. unsigned int ivsize, unsigned int icvsize,
  809. const bool is_qi)
  810. {
  811. u32 *key_jump_cmd;
  812. init_sh_desc(desc, HDR_SHARE_SERIAL);
  813. /* Skip key loading if it is loaded due to sharing */
  814. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  815. JUMP_COND_SHRD);
  816. if (cdata->key_inline)
  817. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  818. cdata->keylen, CLASS_1 |
  819. KEY_DEST_CLASS_REG);
  820. else
  821. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  822. KEY_DEST_CLASS_REG);
  823. set_jump_tgt_here(desc, key_jump_cmd);
  824. /* Class 1 operation */
  825. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  826. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  827. if (is_qi) {
  828. u32 *wait_load_cmd;
  829. /* REG3 = assoclen */
  830. append_seq_load(desc, 4, LDST_CLASS_DECO |
  831. LDST_SRCDST_WORD_DECO_MATH3 |
  832. (4 << LDST_OFFSET_SHIFT));
  833. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  834. JUMP_COND_CALM | JUMP_COND_NCP |
  835. JUMP_COND_NOP | JUMP_COND_NIP |
  836. JUMP_COND_NIFP);
  837. set_jump_tgt_here(desc, wait_load_cmd);
  838. /* Read salt and IV */
  839. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  840. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  841. FIFOLD_TYPE_IV);
  842. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  843. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  844. }
  845. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  846. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  847. /* Read assoc data */
  848. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  849. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  850. /* Skip IV */
  851. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  852. /* Will read cryptlen bytes */
  853. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  854. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  855. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  856. /* Skip assoc data */
  857. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  858. /* Will write cryptlen bytes */
  859. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  860. /* Store payload data */
  861. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  862. /* Read encrypted data */
  863. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  864. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  865. /* Read ICV */
  866. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  867. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  868. #ifdef DEBUG
  869. print_hex_dump(KERN_ERR,
  870. "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  871. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  872. #endif
  873. }
  874. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  875. /**
  876. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  877. * (non-protocol).
  878. * @desc: pointer to buffer used for descriptor construction
  879. * @cdata: pointer to block cipher transform definitions
  880. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  881. * @ivsize: initialization vector size
  882. * @icvsize: integrity check value (ICV) size (truncated or full)
  883. * @is_qi: true when called from caam/qi
  884. */
  885. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  886. unsigned int ivsize, unsigned int icvsize,
  887. const bool is_qi)
  888. {
  889. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  890. init_sh_desc(desc, HDR_SHARE_SERIAL);
  891. /* Skip key loading if it is loaded due to sharing */
  892. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  893. JUMP_COND_SHRD);
  894. if (cdata->key_inline)
  895. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  896. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  897. else
  898. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  899. KEY_DEST_CLASS_REG);
  900. set_jump_tgt_here(desc, key_jump_cmd);
  901. /* Class 1 operation */
  902. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  903. OP_ALG_ENCRYPT);
  904. if (is_qi) {
  905. /* assoclen is not needed, skip it */
  906. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  907. /* Read salt and IV */
  908. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  909. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  910. FIFOLD_TYPE_IV);
  911. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  912. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  913. }
  914. /* assoclen + cryptlen = seqinlen */
  915. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  916. /*
  917. * MOVE_LEN opcode is not available in all SEC HW revisions,
  918. * thus need to do some magic, i.e. self-patch the descriptor
  919. * buffer.
  920. */
  921. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  922. (0x6 << MOVE_LEN_SHIFT));
  923. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  924. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  925. /* Will read assoclen + cryptlen bytes */
  926. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  927. /* Will write assoclen + cryptlen bytes */
  928. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  929. /* Read and write assoclen + cryptlen bytes */
  930. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  931. set_move_tgt_here(desc, read_move_cmd);
  932. set_move_tgt_here(desc, write_move_cmd);
  933. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  934. /* Move payload data to OFIFO */
  935. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  936. /* Write ICV */
  937. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  938. LDST_SRCDST_BYTE_CONTEXT);
  939. #ifdef DEBUG
  940. print_hex_dump(KERN_ERR,
  941. "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  942. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  943. #endif
  944. }
  945. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  946. /**
  947. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  948. * (non-protocol).
  949. * @desc: pointer to buffer used for descriptor construction
  950. * @cdata: pointer to block cipher transform definitions
  951. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  952. * @ivsize: initialization vector size
  953. * @icvsize: integrity check value (ICV) size (truncated or full)
  954. * @is_qi: true when called from caam/qi
  955. */
  956. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  957. unsigned int ivsize, unsigned int icvsize,
  958. const bool is_qi)
  959. {
  960. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  961. init_sh_desc(desc, HDR_SHARE_SERIAL);
  962. /* Skip key loading if it is loaded due to sharing */
  963. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  964. JUMP_COND_SHRD);
  965. if (cdata->key_inline)
  966. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  967. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  968. else
  969. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  970. KEY_DEST_CLASS_REG);
  971. set_jump_tgt_here(desc, key_jump_cmd);
  972. /* Class 1 operation */
  973. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  974. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  975. if (is_qi) {
  976. /* assoclen is not needed, skip it */
  977. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  978. /* Read salt and IV */
  979. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  980. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  981. FIFOLD_TYPE_IV);
  982. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  983. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  984. }
  985. /* assoclen + cryptlen = seqoutlen */
  986. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  987. /*
  988. * MOVE_LEN opcode is not available in all SEC HW revisions,
  989. * thus need to do some magic, i.e. self-patch the descriptor
  990. * buffer.
  991. */
  992. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  993. (0x6 << MOVE_LEN_SHIFT));
  994. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  995. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  996. /* Will read assoclen + cryptlen bytes */
  997. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  998. /* Will write assoclen + cryptlen bytes */
  999. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  1000. /* Store payload data */
  1001. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  1002. /* In-snoop assoclen + cryptlen data */
  1003. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  1004. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  1005. set_move_tgt_here(desc, read_move_cmd);
  1006. set_move_tgt_here(desc, write_move_cmd);
  1007. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1008. /* Move payload data to OFIFO */
  1009. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  1010. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1011. /* Read ICV */
  1012. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  1013. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  1014. #ifdef DEBUG
  1015. print_hex_dump(KERN_ERR,
  1016. "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  1017. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1018. #endif
  1019. }
  1020. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  1021. /*
  1022. * For ablkcipher encrypt and decrypt, read from req->src and
  1023. * write to req->dst
  1024. */
  1025. static inline void ablkcipher_append_src_dst(u32 *desc)
  1026. {
  1027. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1028. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1029. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  1030. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  1031. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  1032. }
  1033. /**
  1034. * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
  1035. * @desc: pointer to buffer used for descriptor construction
  1036. * @cdata: pointer to block cipher transform definitions
  1037. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1038. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1039. * @ivsize: initialization vector size
  1040. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1041. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1042. */
  1043. void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
  1044. unsigned int ivsize, const bool is_rfc3686,
  1045. const u32 ctx1_iv_off)
  1046. {
  1047. u32 *key_jump_cmd;
  1048. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1049. /* Skip if already shared */
  1050. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1051. JUMP_COND_SHRD);
  1052. /* Load class1 key only */
  1053. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1054. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1055. /* Load nonce into CONTEXT1 reg */
  1056. if (is_rfc3686) {
  1057. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1058. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1059. LDST_CLASS_IND_CCB |
  1060. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1061. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1062. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1063. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1064. }
  1065. set_jump_tgt_here(desc, key_jump_cmd);
  1066. /* Load iv */
  1067. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1068. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1069. /* Load counter into CONTEXT1 reg */
  1070. if (is_rfc3686)
  1071. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1072. LDST_SRCDST_BYTE_CONTEXT |
  1073. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1074. LDST_OFFSET_SHIFT));
  1075. /* Load operation */
  1076. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1077. OP_ALG_ENCRYPT);
  1078. /* Perform operation */
  1079. ablkcipher_append_src_dst(desc);
  1080. #ifdef DEBUG
  1081. print_hex_dump(KERN_ERR,
  1082. "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
  1083. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1084. #endif
  1085. }
  1086. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
  1087. /**
  1088. * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
  1089. * @desc: pointer to buffer used for descriptor construction
  1090. * @cdata: pointer to block cipher transform definitions
  1091. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1092. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1093. * @ivsize: initialization vector size
  1094. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1095. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1096. */
  1097. void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
  1098. unsigned int ivsize, const bool is_rfc3686,
  1099. const u32 ctx1_iv_off)
  1100. {
  1101. u32 *key_jump_cmd;
  1102. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1103. /* Skip if already shared */
  1104. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1105. JUMP_COND_SHRD);
  1106. /* Load class1 key only */
  1107. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1108. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1109. /* Load nonce into CONTEXT1 reg */
  1110. if (is_rfc3686) {
  1111. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1112. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1113. LDST_CLASS_IND_CCB |
  1114. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1115. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1116. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1117. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1118. }
  1119. set_jump_tgt_here(desc, key_jump_cmd);
  1120. /* load IV */
  1121. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1122. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1123. /* Load counter into CONTEXT1 reg */
  1124. if (is_rfc3686)
  1125. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1126. LDST_SRCDST_BYTE_CONTEXT |
  1127. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1128. LDST_OFFSET_SHIFT));
  1129. /* Choose operation */
  1130. if (ctx1_iv_off)
  1131. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1132. OP_ALG_DECRYPT);
  1133. else
  1134. append_dec_op1(desc, cdata->algtype);
  1135. /* Perform operation */
  1136. ablkcipher_append_src_dst(desc);
  1137. #ifdef DEBUG
  1138. print_hex_dump(KERN_ERR,
  1139. "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
  1140. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1141. #endif
  1142. }
  1143. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
  1144. /**
  1145. * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
  1146. * with HW-generated initialization vector.
  1147. * @desc: pointer to buffer used for descriptor construction
  1148. * @cdata: pointer to block cipher transform definitions
  1149. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1150. * with OP_ALG_AAI_CBC.
  1151. * @ivsize: initialization vector size
  1152. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1153. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1154. */
  1155. void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
  1156. unsigned int ivsize, const bool is_rfc3686,
  1157. const u32 ctx1_iv_off)
  1158. {
  1159. u32 *key_jump_cmd, geniv;
  1160. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1161. /* Skip if already shared */
  1162. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1163. JUMP_COND_SHRD);
  1164. /* Load class1 key only */
  1165. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1166. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1167. /* Load Nonce into CONTEXT1 reg */
  1168. if (is_rfc3686) {
  1169. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1170. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1171. LDST_CLASS_IND_CCB |
  1172. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1173. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1174. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1175. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1176. }
  1177. set_jump_tgt_here(desc, key_jump_cmd);
  1178. /* Generate IV */
  1179. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  1180. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
  1181. (ivsize << NFIFOENTRY_DLEN_SHIFT);
  1182. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  1183. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  1184. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1185. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
  1186. MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
  1187. (ctx1_iv_off << MOVE_OFFSET_SHIFT));
  1188. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1189. /* Copy generated IV to memory */
  1190. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1191. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1192. /* Load Counter into CONTEXT1 reg */
  1193. if (is_rfc3686)
  1194. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1195. LDST_SRCDST_BYTE_CONTEXT |
  1196. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1197. LDST_OFFSET_SHIFT));
  1198. if (ctx1_iv_off)
  1199. append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
  1200. (1 << JUMP_OFFSET_SHIFT));
  1201. /* Load operation */
  1202. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1203. OP_ALG_ENCRYPT);
  1204. /* Perform operation */
  1205. ablkcipher_append_src_dst(desc);
  1206. #ifdef DEBUG
  1207. print_hex_dump(KERN_ERR,
  1208. "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
  1209. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1210. #endif
  1211. }
  1212. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
  1213. /**
  1214. * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
  1215. * descriptor
  1216. * @desc: pointer to buffer used for descriptor construction
  1217. * @cdata: pointer to block cipher transform definitions
  1218. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1219. */
  1220. void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
  1221. {
  1222. __be64 sector_size = cpu_to_be64(512);
  1223. u32 *key_jump_cmd;
  1224. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1225. /* Skip if already shared */
  1226. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1227. JUMP_COND_SHRD);
  1228. /* Load class1 keys only */
  1229. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1230. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1231. /* Load sector size with index 40 bytes (0x28) */
  1232. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1233. LDST_SRCDST_BYTE_CONTEXT |
  1234. (0x28 << LDST_OFFSET_SHIFT));
  1235. set_jump_tgt_here(desc, key_jump_cmd);
  1236. /*
  1237. * create sequence for loading the sector index
  1238. * Upper 8B of IV - will be used as sector index
  1239. * Lower 8B of IV - will be discarded
  1240. */
  1241. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1242. (0x20 << LDST_OFFSET_SHIFT));
  1243. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1244. /* Load operation */
  1245. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1246. OP_ALG_ENCRYPT);
  1247. /* Perform operation */
  1248. ablkcipher_append_src_dst(desc);
  1249. #ifdef DEBUG
  1250. print_hex_dump(KERN_ERR,
  1251. "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
  1252. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1253. #endif
  1254. }
  1255. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
  1256. /**
  1257. * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
  1258. * descriptor
  1259. * @desc: pointer to buffer used for descriptor construction
  1260. * @cdata: pointer to block cipher transform definitions
  1261. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1262. */
  1263. void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
  1264. {
  1265. __be64 sector_size = cpu_to_be64(512);
  1266. u32 *key_jump_cmd;
  1267. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1268. /* Skip if already shared */
  1269. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1270. JUMP_COND_SHRD);
  1271. /* Load class1 key only */
  1272. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1273. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1274. /* Load sector size with index 40 bytes (0x28) */
  1275. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1276. LDST_SRCDST_BYTE_CONTEXT |
  1277. (0x28 << LDST_OFFSET_SHIFT));
  1278. set_jump_tgt_here(desc, key_jump_cmd);
  1279. /*
  1280. * create sequence for loading the sector index
  1281. * Upper 8B of IV - will be used as sector index
  1282. * Lower 8B of IV - will be discarded
  1283. */
  1284. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1285. (0x20 << LDST_OFFSET_SHIFT));
  1286. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1287. /* Load operation */
  1288. append_dec_op1(desc, cdata->algtype);
  1289. /* Perform operation */
  1290. ablkcipher_append_src_dst(desc);
  1291. #ifdef DEBUG
  1292. print_hex_dump(KERN_ERR,
  1293. "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
  1294. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1295. #endif
  1296. }
  1297. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
  1298. MODULE_LICENSE("GPL");
  1299. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1300. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");