caamalg_desc.c 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Shared descriptors for aead, skcipher algorithms
  4. *
  5. * Copyright 2016-2019 NXP
  6. */
  7. #include "compat.h"
  8. #include "desc_constr.h"
  9. #include "caamalg_desc.h"
  10. /*
  11. * For aead functions, read payload and write payload,
  12. * both of which are specified in req->src and req->dst
  13. */
  14. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  15. {
  16. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  17. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  18. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  19. }
  20. /* Set DK bit in class 1 operation if shared */
  21. static inline void append_dec_op1(u32 *desc, u32 type)
  22. {
  23. u32 *jump_cmd, *uncond_jump_cmd;
  24. /* DK bit is valid only for AES */
  25. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  26. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  27. OP_ALG_DECRYPT);
  28. return;
  29. }
  30. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  31. append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
  35. OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions.
  43. * A split key is required for SEC Era < 6; the size of the split key
  44. * is specified in this case. Valid algorithm values - one of
  45. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  46. * with OP_ALG_AAI_HMAC_PRECOMP.
  47. * @icvsize: integrity check value (ICV) size (truncated or full)
  48. * @era: SEC Era
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize, int era)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (era < 6) {
  59. if (adata->key_inline)
  60. append_key_as_imm(desc, adata->key_virt,
  61. adata->keylen_pad, adata->keylen,
  62. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  63. KEY_ENC);
  64. else
  65. append_key(desc, adata->key_dma, adata->keylen,
  66. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  67. } else {
  68. append_proto_dkp(desc, adata);
  69. }
  70. set_jump_tgt_here(desc, key_jump_cmd);
  71. /* assoclen + cryptlen = seqinlen */
  72. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  73. /* Prepare to read and write cryptlen + assoclen bytes */
  74. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  75. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  76. /*
  77. * MOVE_LEN opcode is not available in all SEC HW revisions,
  78. * thus need to do some magic, i.e. self-patch the descriptor
  79. * buffer.
  80. */
  81. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  82. MOVE_DEST_MATH3 |
  83. (0x6 << MOVE_LEN_SHIFT));
  84. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  85. MOVE_DEST_DESCBUF |
  86. MOVE_WAITCOMP |
  87. (0x8 << MOVE_LEN_SHIFT));
  88. /* Class 2 operation */
  89. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  90. OP_ALG_ENCRYPT);
  91. /* Read and write cryptlen bytes */
  92. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  93. set_move_tgt_here(desc, read_move_cmd);
  94. set_move_tgt_here(desc, write_move_cmd);
  95. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  96. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  97. MOVE_AUX_LS);
  98. /* Write ICV */
  99. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  100. LDST_SRCDST_BYTE_CONTEXT);
  101. print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
  102. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  103. 1);
  104. }
  105. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  106. /**
  107. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  108. * (non-protocol) with no (null) decryption.
  109. * @desc: pointer to buffer used for descriptor construction
  110. * @adata: pointer to authentication transform definitions.
  111. * A split key is required for SEC Era < 6; the size of the split key
  112. * is specified in this case. Valid algorithm values - one of
  113. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  114. * with OP_ALG_AAI_HMAC_PRECOMP.
  115. * @icvsize: integrity check value (ICV) size (truncated or full)
  116. * @era: SEC Era
  117. */
  118. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  119. unsigned int icvsize, int era)
  120. {
  121. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  122. init_sh_desc(desc, HDR_SHARE_SERIAL);
  123. /* Skip if already shared */
  124. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  125. JUMP_COND_SHRD);
  126. if (era < 6) {
  127. if (adata->key_inline)
  128. append_key_as_imm(desc, adata->key_virt,
  129. adata->keylen_pad, adata->keylen,
  130. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  131. KEY_ENC);
  132. else
  133. append_key(desc, adata->key_dma, adata->keylen,
  134. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  135. } else {
  136. append_proto_dkp(desc, adata);
  137. }
  138. set_jump_tgt_here(desc, key_jump_cmd);
  139. /* Class 2 operation */
  140. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  141. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  142. /* assoclen + cryptlen = seqoutlen */
  143. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  144. /* Prepare to read and write cryptlen + assoclen bytes */
  145. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  146. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  147. /*
  148. * MOVE_LEN opcode is not available in all SEC HW revisions,
  149. * thus need to do some magic, i.e. self-patch the descriptor
  150. * buffer.
  151. */
  152. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  153. MOVE_DEST_MATH2 |
  154. (0x6 << MOVE_LEN_SHIFT));
  155. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  156. MOVE_DEST_DESCBUF |
  157. MOVE_WAITCOMP |
  158. (0x8 << MOVE_LEN_SHIFT));
  159. /* Read and write cryptlen bytes */
  160. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  161. /*
  162. * Insert a NOP here, since we need at least 4 instructions between
  163. * code patching the descriptor buffer and the location being patched.
  164. */
  165. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  166. set_jump_tgt_here(desc, jump_cmd);
  167. set_move_tgt_here(desc, read_move_cmd);
  168. set_move_tgt_here(desc, write_move_cmd);
  169. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  170. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  171. MOVE_AUX_LS);
  172. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  173. /* Load ICV */
  174. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  175. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  176. print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
  177. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  178. 1);
  179. }
  180. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  181. static void init_sh_desc_key_aead(u32 * const desc,
  182. struct alginfo * const cdata,
  183. struct alginfo * const adata,
  184. const bool is_rfc3686, u32 *nonce, int era)
  185. {
  186. u32 *key_jump_cmd;
  187. unsigned int enckeylen = cdata->keylen;
  188. /* Note: Context registers are saved. */
  189. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  190. /* Skip if already shared */
  191. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  192. JUMP_COND_SHRD);
  193. /*
  194. * RFC3686 specific:
  195. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  196. * | enckeylen = encryption key size + nonce size
  197. */
  198. if (is_rfc3686)
  199. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  200. if (era < 6) {
  201. if (adata->key_inline)
  202. append_key_as_imm(desc, adata->key_virt,
  203. adata->keylen_pad, adata->keylen,
  204. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  205. KEY_ENC);
  206. else
  207. append_key(desc, adata->key_dma, adata->keylen,
  208. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  209. } else {
  210. append_proto_dkp(desc, adata);
  211. }
  212. if (cdata->key_inline)
  213. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  214. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  215. else
  216. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  217. KEY_DEST_CLASS_REG);
  218. /* Load Counter into CONTEXT1 reg */
  219. if (is_rfc3686) {
  220. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  221. LDST_CLASS_IND_CCB |
  222. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  223. append_move(desc,
  224. MOVE_SRC_OUTFIFO |
  225. MOVE_DEST_CLASS1CTX |
  226. (16 << MOVE_OFFSET_SHIFT) |
  227. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  228. }
  229. set_jump_tgt_here(desc, key_jump_cmd);
  230. }
  231. /**
  232. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  233. * (non-protocol).
  234. * @desc: pointer to buffer used for descriptor construction
  235. * @cdata: pointer to block cipher transform definitions
  236. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  237. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  238. * @adata: pointer to authentication transform definitions.
  239. * A split key is required for SEC Era < 6; the size of the split key
  240. * is specified in this case. Valid algorithm values - one of
  241. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  242. * with OP_ALG_AAI_HMAC_PRECOMP.
  243. * @ivsize: initialization vector size
  244. * @icvsize: integrity check value (ICV) size (truncated or full)
  245. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  246. * @nonce: pointer to rfc3686 nonce
  247. * @ctx1_iv_off: IV offset in CONTEXT1 register
  248. * @is_qi: true when called from caam/qi
  249. * @era: SEC Era
  250. */
  251. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  252. struct alginfo *adata, unsigned int ivsize,
  253. unsigned int icvsize, const bool is_rfc3686,
  254. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
  255. int era)
  256. {
  257. /* Note: Context registers are saved. */
  258. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  259. /* Class 2 operation */
  260. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  261. OP_ALG_ENCRYPT);
  262. if (is_qi) {
  263. u32 *wait_load_cmd;
  264. /* REG3 = assoclen */
  265. append_seq_load(desc, 4, LDST_CLASS_DECO |
  266. LDST_SRCDST_WORD_DECO_MATH3 |
  267. (4 << LDST_OFFSET_SHIFT));
  268. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  269. JUMP_COND_CALM | JUMP_COND_NCP |
  270. JUMP_COND_NOP | JUMP_COND_NIP |
  271. JUMP_COND_NIFP);
  272. set_jump_tgt_here(desc, wait_load_cmd);
  273. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  274. LDST_SRCDST_BYTE_CONTEXT |
  275. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  276. }
  277. /* Read and write assoclen bytes */
  278. if (is_qi || era < 3) {
  279. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  280. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  281. } else {
  282. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  283. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  284. }
  285. /* Skip assoc data */
  286. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  287. /* read assoc before reading payload */
  288. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  289. FIFOLDST_VLF);
  290. /* Load Counter into CONTEXT1 reg */
  291. if (is_rfc3686)
  292. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  293. LDST_SRCDST_BYTE_CONTEXT |
  294. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  295. LDST_OFFSET_SHIFT));
  296. /* Class 1 operation */
  297. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  298. OP_ALG_ENCRYPT);
  299. /* Read and write cryptlen bytes */
  300. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  301. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  302. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  303. /* Write ICV */
  304. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  305. LDST_SRCDST_BYTE_CONTEXT);
  306. print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
  307. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  308. 1);
  309. }
  310. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  311. /**
  312. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  313. * (non-protocol).
  314. * @desc: pointer to buffer used for descriptor construction
  315. * @cdata: pointer to block cipher transform definitions
  316. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  317. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  318. * @adata: pointer to authentication transform definitions.
  319. * A split key is required for SEC Era < 6; the size of the split key
  320. * is specified in this case. Valid algorithm values - one of
  321. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  322. * with OP_ALG_AAI_HMAC_PRECOMP.
  323. * @ivsize: initialization vector size
  324. * @icvsize: integrity check value (ICV) size (truncated or full)
  325. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  326. * @nonce: pointer to rfc3686 nonce
  327. * @ctx1_iv_off: IV offset in CONTEXT1 register
  328. * @is_qi: true when called from caam/qi
  329. * @era: SEC Era
  330. */
  331. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  332. struct alginfo *adata, unsigned int ivsize,
  333. unsigned int icvsize, const bool geniv,
  334. const bool is_rfc3686, u32 *nonce,
  335. const u32 ctx1_iv_off, const bool is_qi, int era)
  336. {
  337. /* Note: Context registers are saved. */
  338. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  339. /* Class 2 operation */
  340. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  341. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  342. if (is_qi) {
  343. u32 *wait_load_cmd;
  344. /* REG3 = assoclen */
  345. append_seq_load(desc, 4, LDST_CLASS_DECO |
  346. LDST_SRCDST_WORD_DECO_MATH3 |
  347. (4 << LDST_OFFSET_SHIFT));
  348. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  349. JUMP_COND_CALM | JUMP_COND_NCP |
  350. JUMP_COND_NOP | JUMP_COND_NIP |
  351. JUMP_COND_NIFP);
  352. set_jump_tgt_here(desc, wait_load_cmd);
  353. if (!geniv)
  354. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  355. LDST_SRCDST_BYTE_CONTEXT |
  356. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  357. }
  358. /* Read and write assoclen bytes */
  359. if (is_qi || era < 3) {
  360. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  361. if (geniv)
  362. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
  363. ivsize);
  364. else
  365. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
  366. CAAM_CMD_SZ);
  367. } else {
  368. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  369. if (geniv)
  370. append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
  371. ivsize);
  372. else
  373. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
  374. CAAM_CMD_SZ);
  375. }
  376. /* Skip assoc data */
  377. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  378. /* read assoc before reading payload */
  379. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  380. KEY_VLF);
  381. if (geniv) {
  382. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  383. LDST_SRCDST_BYTE_CONTEXT |
  384. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  385. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  386. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  387. }
  388. /* Load Counter into CONTEXT1 reg */
  389. if (is_rfc3686)
  390. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  391. LDST_SRCDST_BYTE_CONTEXT |
  392. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  393. LDST_OFFSET_SHIFT));
  394. /* Choose operation */
  395. if (ctx1_iv_off)
  396. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  397. OP_ALG_DECRYPT);
  398. else
  399. append_dec_op1(desc, cdata->algtype);
  400. /* Read and write cryptlen bytes */
  401. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  402. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  403. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  404. /* Load ICV */
  405. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  406. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  407. print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
  408. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  409. 1);
  410. }
  411. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  412. /**
  413. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  414. * (non-protocol) with HW-generated initialization
  415. * vector.
  416. * @desc: pointer to buffer used for descriptor construction
  417. * @cdata: pointer to block cipher transform definitions
  418. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  419. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  420. * @adata: pointer to authentication transform definitions.
  421. * A split key is required for SEC Era < 6; the size of the split key
  422. * is specified in this case. Valid algorithm values - one of
  423. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  424. * with OP_ALG_AAI_HMAC_PRECOMP.
  425. * @ivsize: initialization vector size
  426. * @icvsize: integrity check value (ICV) size (truncated or full)
  427. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  428. * @nonce: pointer to rfc3686 nonce
  429. * @ctx1_iv_off: IV offset in CONTEXT1 register
  430. * @is_qi: true when called from caam/qi
  431. * @era: SEC Era
  432. */
  433. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  434. struct alginfo *adata, unsigned int ivsize,
  435. unsigned int icvsize, const bool is_rfc3686,
  436. u32 *nonce, const u32 ctx1_iv_off,
  437. const bool is_qi, int era)
  438. {
  439. u32 geniv, moveiv;
  440. u32 *wait_cmd;
  441. /* Note: Context registers are saved. */
  442. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  443. if (is_qi) {
  444. u32 *wait_load_cmd;
  445. /* REG3 = assoclen */
  446. append_seq_load(desc, 4, LDST_CLASS_DECO |
  447. LDST_SRCDST_WORD_DECO_MATH3 |
  448. (4 << LDST_OFFSET_SHIFT));
  449. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  450. JUMP_COND_CALM | JUMP_COND_NCP |
  451. JUMP_COND_NOP | JUMP_COND_NIP |
  452. JUMP_COND_NIFP);
  453. set_jump_tgt_here(desc, wait_load_cmd);
  454. }
  455. if (is_rfc3686) {
  456. if (is_qi)
  457. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  458. LDST_SRCDST_BYTE_CONTEXT |
  459. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  460. goto copy_iv;
  461. }
  462. /* Generate IV */
  463. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  464. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  465. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  466. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  467. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  468. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  469. append_move(desc, MOVE_WAITCOMP |
  470. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  471. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  472. (ivsize << MOVE_LEN_SHIFT));
  473. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  474. copy_iv:
  475. /* Copy IV to class 1 context */
  476. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  477. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  478. (ivsize << MOVE_LEN_SHIFT));
  479. /* Return to encryption */
  480. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  481. OP_ALG_ENCRYPT);
  482. /* Read and write assoclen bytes */
  483. if (is_qi || era < 3) {
  484. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  485. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  486. } else {
  487. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  488. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  489. }
  490. /* Skip assoc data */
  491. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  492. /* read assoc before reading payload */
  493. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  494. KEY_VLF);
  495. /* Copy iv from outfifo to class 2 fifo */
  496. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  497. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  498. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  499. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  500. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  501. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  502. /* Load Counter into CONTEXT1 reg */
  503. if (is_rfc3686)
  504. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  505. LDST_SRCDST_BYTE_CONTEXT |
  506. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  507. LDST_OFFSET_SHIFT));
  508. /* Class 1 operation */
  509. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  510. OP_ALG_ENCRYPT);
  511. /* Will write ivsize + cryptlen */
  512. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  513. /* Not need to reload iv */
  514. append_seq_fifo_load(desc, ivsize,
  515. FIFOLD_CLASS_SKIP);
  516. /* Will read cryptlen */
  517. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  518. /*
  519. * Wait for IV transfer (ofifo -> class2) to finish before starting
  520. * ciphertext transfer (ofifo -> external memory).
  521. */
  522. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
  523. set_jump_tgt_here(desc, wait_cmd);
  524. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  525. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  526. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  527. /* Write ICV */
  528. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  529. LDST_SRCDST_BYTE_CONTEXT);
  530. print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
  531. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  532. 1);
  533. }
  534. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  535. /**
  536. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  537. * @desc: pointer to buffer used for descriptor construction
  538. * @cdata: pointer to block cipher transform definitions
  539. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  540. * @ivsize: initialization vector size
  541. * @icvsize: integrity check value (ICV) size (truncated or full)
  542. * @is_qi: true when called from caam/qi
  543. */
  544. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  545. unsigned int ivsize, unsigned int icvsize,
  546. const bool is_qi)
  547. {
  548. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  549. *zero_assoc_jump_cmd2;
  550. init_sh_desc(desc, HDR_SHARE_SERIAL);
  551. /* skip key loading if they are loaded due to sharing */
  552. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  553. JUMP_COND_SHRD);
  554. if (cdata->key_inline)
  555. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  556. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  557. else
  558. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  559. KEY_DEST_CLASS_REG);
  560. set_jump_tgt_here(desc, key_jump_cmd);
  561. /* class 1 operation */
  562. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  563. OP_ALG_ENCRYPT);
  564. if (is_qi) {
  565. u32 *wait_load_cmd;
  566. /* REG3 = assoclen */
  567. append_seq_load(desc, 4, LDST_CLASS_DECO |
  568. LDST_SRCDST_WORD_DECO_MATH3 |
  569. (4 << LDST_OFFSET_SHIFT));
  570. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  571. JUMP_COND_CALM | JUMP_COND_NCP |
  572. JUMP_COND_NOP | JUMP_COND_NIP |
  573. JUMP_COND_NIFP);
  574. set_jump_tgt_here(desc, wait_load_cmd);
  575. append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
  576. ivsize);
  577. } else {
  578. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  579. CAAM_CMD_SZ);
  580. }
  581. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  582. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  583. JUMP_COND_MATH_Z);
  584. if (is_qi)
  585. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  586. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  587. /* if assoclen is ZERO, skip reading the assoc data */
  588. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  589. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  590. JUMP_COND_MATH_Z);
  591. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  592. /* skip assoc data */
  593. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  594. /* cryptlen = seqinlen - assoclen */
  595. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  596. /* if cryptlen is ZERO jump to zero-payload commands */
  597. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  598. JUMP_COND_MATH_Z);
  599. /* read assoc data */
  600. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  601. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  602. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  603. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  604. /* write encrypted data */
  605. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  606. /* read payload data */
  607. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  608. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  609. /* jump to ICV writing */
  610. if (is_qi)
  611. append_jump(desc, JUMP_TEST_ALL | 4);
  612. else
  613. append_jump(desc, JUMP_TEST_ALL | 2);
  614. /* zero-payload commands */
  615. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  616. /* read assoc data */
  617. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  618. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  619. if (is_qi)
  620. /* jump to ICV writing */
  621. append_jump(desc, JUMP_TEST_ALL | 2);
  622. /* There is no input data */
  623. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  624. if (is_qi)
  625. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  626. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
  627. FIFOLD_TYPE_LAST1);
  628. /* write ICV */
  629. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  630. LDST_SRCDST_BYTE_CONTEXT);
  631. print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
  632. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  633. 1);
  634. }
  635. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  636. /**
  637. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  638. * @desc: pointer to buffer used for descriptor construction
  639. * @cdata: pointer to block cipher transform definitions
  640. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  641. * @ivsize: initialization vector size
  642. * @icvsize: integrity check value (ICV) size (truncated or full)
  643. * @is_qi: true when called from caam/qi
  644. */
  645. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  646. unsigned int ivsize, unsigned int icvsize,
  647. const bool is_qi)
  648. {
  649. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  650. init_sh_desc(desc, HDR_SHARE_SERIAL);
  651. /* skip key loading if they are loaded due to sharing */
  652. key_jump_cmd = append_jump(desc, JUMP_JSL |
  653. JUMP_TEST_ALL | JUMP_COND_SHRD);
  654. if (cdata->key_inline)
  655. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  656. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  657. else
  658. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  659. KEY_DEST_CLASS_REG);
  660. set_jump_tgt_here(desc, key_jump_cmd);
  661. /* class 1 operation */
  662. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  663. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  664. if (is_qi) {
  665. u32 *wait_load_cmd;
  666. /* REG3 = assoclen */
  667. append_seq_load(desc, 4, LDST_CLASS_DECO |
  668. LDST_SRCDST_WORD_DECO_MATH3 |
  669. (4 << LDST_OFFSET_SHIFT));
  670. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  671. JUMP_COND_CALM | JUMP_COND_NCP |
  672. JUMP_COND_NOP | JUMP_COND_NIP |
  673. JUMP_COND_NIFP);
  674. set_jump_tgt_here(desc, wait_load_cmd);
  675. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  676. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  677. }
  678. /* if assoclen is ZERO, skip reading the assoc data */
  679. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  680. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  681. JUMP_COND_MATH_Z);
  682. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  683. /* skip assoc data */
  684. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  685. /* read assoc data */
  686. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  687. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  688. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  689. /* cryptlen = seqoutlen - assoclen */
  690. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  691. /* jump to zero-payload command if cryptlen is zero */
  692. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  693. JUMP_COND_MATH_Z);
  694. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  695. /* store encrypted data */
  696. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  697. /* read payload data */
  698. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  699. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  700. /* zero-payload command */
  701. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  702. /* read ICV */
  703. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  704. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  705. print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
  706. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  707. 1);
  708. }
  709. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  710. /**
  711. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  712. * (non-protocol).
  713. * @desc: pointer to buffer used for descriptor construction
  714. * @cdata: pointer to block cipher transform definitions
  715. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  716. * @ivsize: initialization vector size
  717. * @icvsize: integrity check value (ICV) size (truncated or full)
  718. * @is_qi: true when called from caam/qi
  719. *
  720. * Input sequence: AAD | PTXT
  721. * Output sequence: AAD | CTXT | ICV
  722. * AAD length (assoclen), which includes the IV length, is available in Math3.
  723. */
  724. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  725. unsigned int ivsize, unsigned int icvsize,
  726. const bool is_qi)
  727. {
  728. u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
  729. init_sh_desc(desc, HDR_SHARE_SERIAL);
  730. /* Skip key loading if it is loaded due to sharing */
  731. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  732. JUMP_COND_SHRD);
  733. if (cdata->key_inline)
  734. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  735. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  736. else
  737. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  738. KEY_DEST_CLASS_REG);
  739. set_jump_tgt_here(desc, key_jump_cmd);
  740. /* Class 1 operation */
  741. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  742. OP_ALG_ENCRYPT);
  743. if (is_qi) {
  744. u32 *wait_load_cmd;
  745. /* REG3 = assoclen */
  746. append_seq_load(desc, 4, LDST_CLASS_DECO |
  747. LDST_SRCDST_WORD_DECO_MATH3 |
  748. (4 << LDST_OFFSET_SHIFT));
  749. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  750. JUMP_COND_CALM | JUMP_COND_NCP |
  751. JUMP_COND_NOP | JUMP_COND_NIP |
  752. JUMP_COND_NIFP);
  753. set_jump_tgt_here(desc, wait_load_cmd);
  754. /* Read salt and IV */
  755. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  756. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  757. FIFOLD_TYPE_IV);
  758. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  759. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  760. }
  761. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  762. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  763. /* Skip AAD */
  764. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  765. /* Read cryptlen and set this value into VARSEQOUTLEN */
  766. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  767. /* If cryptlen is ZERO jump to AAD command */
  768. zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  769. JUMP_COND_MATH_Z);
  770. /* Read AAD data */
  771. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  772. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  773. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  774. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
  775. /* Skip IV */
  776. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  777. append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
  778. /* Write encrypted data */
  779. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  780. /* Read payload data */
  781. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  782. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  783. /* Jump instructions to avoid double reading of AAD */
  784. skip_instructions = append_jump(desc, JUMP_TEST_ALL);
  785. /* There is no input data, cryptlen = 0 */
  786. set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
  787. /* Read AAD */
  788. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  789. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  790. set_jump_tgt_here(desc, skip_instructions);
  791. /* Write ICV */
  792. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  793. LDST_SRCDST_BYTE_CONTEXT);
  794. print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  795. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  796. 1);
  797. }
  798. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  799. /**
  800. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  801. * (non-protocol).
  802. * @desc: pointer to buffer used for descriptor construction
  803. * @cdata: pointer to block cipher transform definitions
  804. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  805. * @ivsize: initialization vector size
  806. * @icvsize: integrity check value (ICV) size (truncated or full)
  807. * @is_qi: true when called from caam/qi
  808. */
  809. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  810. unsigned int ivsize, unsigned int icvsize,
  811. const bool is_qi)
  812. {
  813. u32 *key_jump_cmd;
  814. init_sh_desc(desc, HDR_SHARE_SERIAL);
  815. /* Skip key loading if it is loaded due to sharing */
  816. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  817. JUMP_COND_SHRD);
  818. if (cdata->key_inline)
  819. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  820. cdata->keylen, CLASS_1 |
  821. KEY_DEST_CLASS_REG);
  822. else
  823. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  824. KEY_DEST_CLASS_REG);
  825. set_jump_tgt_here(desc, key_jump_cmd);
  826. /* Class 1 operation */
  827. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  828. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  829. if (is_qi) {
  830. u32 *wait_load_cmd;
  831. /* REG3 = assoclen */
  832. append_seq_load(desc, 4, LDST_CLASS_DECO |
  833. LDST_SRCDST_WORD_DECO_MATH3 |
  834. (4 << LDST_OFFSET_SHIFT));
  835. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  836. JUMP_COND_CALM | JUMP_COND_NCP |
  837. JUMP_COND_NOP | JUMP_COND_NIP |
  838. JUMP_COND_NIFP);
  839. set_jump_tgt_here(desc, wait_load_cmd);
  840. /* Read salt and IV */
  841. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  842. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  843. FIFOLD_TYPE_IV);
  844. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  845. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  846. }
  847. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  848. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  849. /* Read assoc data */
  850. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  851. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  852. /* Skip IV */
  853. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  854. /* Will read cryptlen bytes */
  855. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  856. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  857. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  858. /* Skip assoc data */
  859. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  860. /* Will write cryptlen bytes */
  861. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  862. /* Store payload data */
  863. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  864. /* Read encrypted data */
  865. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  866. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  867. /* Read ICV */
  868. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  869. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  870. print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  871. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  872. 1);
  873. }
  874. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  875. /**
  876. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  877. * (non-protocol).
  878. * @desc: pointer to buffer used for descriptor construction
  879. * @cdata: pointer to block cipher transform definitions
  880. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  881. * @ivsize: initialization vector size
  882. * @icvsize: integrity check value (ICV) size (truncated or full)
  883. * @is_qi: true when called from caam/qi
  884. */
  885. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  886. unsigned int ivsize, unsigned int icvsize,
  887. const bool is_qi)
  888. {
  889. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  890. init_sh_desc(desc, HDR_SHARE_SERIAL);
  891. /* Skip key loading if it is loaded due to sharing */
  892. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  893. JUMP_COND_SHRD);
  894. if (cdata->key_inline)
  895. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  896. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  897. else
  898. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  899. KEY_DEST_CLASS_REG);
  900. set_jump_tgt_here(desc, key_jump_cmd);
  901. /* Class 1 operation */
  902. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  903. OP_ALG_ENCRYPT);
  904. if (is_qi) {
  905. /* assoclen is not needed, skip it */
  906. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  907. /* Read salt and IV */
  908. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  909. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  910. FIFOLD_TYPE_IV);
  911. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  912. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  913. }
  914. /* assoclen + cryptlen = seqinlen */
  915. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  916. /*
  917. * MOVE_LEN opcode is not available in all SEC HW revisions,
  918. * thus need to do some magic, i.e. self-patch the descriptor
  919. * buffer.
  920. */
  921. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  922. (0x6 << MOVE_LEN_SHIFT));
  923. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  924. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  925. /* Will read assoclen + cryptlen bytes */
  926. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  927. /* Will write assoclen + cryptlen bytes */
  928. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  929. /* Read and write assoclen + cryptlen bytes */
  930. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  931. set_move_tgt_here(desc, read_move_cmd);
  932. set_move_tgt_here(desc, write_move_cmd);
  933. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  934. /* Move payload data to OFIFO */
  935. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  936. /* Write ICV */
  937. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  938. LDST_SRCDST_BYTE_CONTEXT);
  939. print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  940. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  941. 1);
  942. }
  943. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  944. /**
  945. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  946. * (non-protocol).
  947. * @desc: pointer to buffer used for descriptor construction
  948. * @cdata: pointer to block cipher transform definitions
  949. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  950. * @ivsize: initialization vector size
  951. * @icvsize: integrity check value (ICV) size (truncated or full)
  952. * @is_qi: true when called from caam/qi
  953. */
  954. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  955. unsigned int ivsize, unsigned int icvsize,
  956. const bool is_qi)
  957. {
  958. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  959. init_sh_desc(desc, HDR_SHARE_SERIAL);
  960. /* Skip key loading if it is loaded due to sharing */
  961. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  962. JUMP_COND_SHRD);
  963. if (cdata->key_inline)
  964. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  965. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  966. else
  967. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  968. KEY_DEST_CLASS_REG);
  969. set_jump_tgt_here(desc, key_jump_cmd);
  970. /* Class 1 operation */
  971. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  972. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  973. if (is_qi) {
  974. /* assoclen is not needed, skip it */
  975. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  976. /* Read salt and IV */
  977. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  978. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  979. FIFOLD_TYPE_IV);
  980. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  981. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  982. }
  983. /* assoclen + cryptlen = seqoutlen */
  984. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  985. /*
  986. * MOVE_LEN opcode is not available in all SEC HW revisions,
  987. * thus need to do some magic, i.e. self-patch the descriptor
  988. * buffer.
  989. */
  990. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  991. (0x6 << MOVE_LEN_SHIFT));
  992. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  993. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  994. /* Will read assoclen + cryptlen bytes */
  995. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  996. /* Will write assoclen + cryptlen bytes */
  997. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  998. /* Store payload data */
  999. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  1000. /* In-snoop assoclen + cryptlen data */
  1001. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  1002. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  1003. set_move_tgt_here(desc, read_move_cmd);
  1004. set_move_tgt_here(desc, write_move_cmd);
  1005. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1006. /* Move payload data to OFIFO */
  1007. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  1008. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1009. /* Read ICV */
  1010. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  1011. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  1012. print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  1013. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1014. 1);
  1015. }
  1016. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  1017. /**
  1018. * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
  1019. * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
  1020. * descriptor (non-protocol).
  1021. * @desc: pointer to buffer used for descriptor construction
  1022. * @cdata: pointer to block cipher transform definitions
  1023. * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
  1024. * OP_ALG_AAI_AEAD.
  1025. * @adata: pointer to authentication transform definitions
  1026. * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
  1027. * OP_ALG_AAI_AEAD.
  1028. * @ivsize: initialization vector size
  1029. * @icvsize: integrity check value (ICV) size (truncated or full)
  1030. * @encap: true if encapsulation, false if decapsulation
  1031. * @is_qi: true when called from caam/qi
  1032. */
  1033. void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
  1034. struct alginfo *adata, unsigned int ivsize,
  1035. unsigned int icvsize, const bool encap,
  1036. const bool is_qi)
  1037. {
  1038. u32 *key_jump_cmd, *wait_cmd;
  1039. u32 nfifo;
  1040. const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
  1041. /* Note: Context registers are saved. */
  1042. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1043. /* skip key loading if they are loaded due to sharing */
  1044. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1045. JUMP_COND_SHRD);
  1046. append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
  1047. CLASS_1 | KEY_DEST_CLASS_REG);
  1048. /* For IPsec load the salt from keymat in the context register */
  1049. if (is_ipsec)
  1050. append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
  1051. LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
  1052. 4 << LDST_OFFSET_SHIFT);
  1053. set_jump_tgt_here(desc, key_jump_cmd);
  1054. /* Class 2 and 1 operations: Poly & ChaCha */
  1055. if (encap) {
  1056. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  1057. OP_ALG_ENCRYPT);
  1058. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1059. OP_ALG_ENCRYPT);
  1060. } else {
  1061. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  1062. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  1063. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1064. OP_ALG_DECRYPT);
  1065. }
  1066. if (is_qi) {
  1067. u32 *wait_load_cmd;
  1068. u32 ctx1_iv_off = is_ipsec ? 8 : 4;
  1069. /* REG3 = assoclen */
  1070. append_seq_load(desc, 4, LDST_CLASS_DECO |
  1071. LDST_SRCDST_WORD_DECO_MATH3 |
  1072. 4 << LDST_OFFSET_SHIFT);
  1073. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1074. JUMP_COND_CALM | JUMP_COND_NCP |
  1075. JUMP_COND_NOP | JUMP_COND_NIP |
  1076. JUMP_COND_NIFP);
  1077. set_jump_tgt_here(desc, wait_load_cmd);
  1078. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  1079. LDST_SRCDST_BYTE_CONTEXT |
  1080. ctx1_iv_off << LDST_OFFSET_SHIFT);
  1081. }
  1082. /*
  1083. * MAGIC with NFIFO
  1084. * Read associated data from the input and send them to class1 and
  1085. * class2 alignment blocks. From class1 send data to output fifo and
  1086. * then write it to memory since we don't need to encrypt AD.
  1087. */
  1088. nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
  1089. NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
  1090. append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
  1091. LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
  1092. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  1093. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  1094. append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
  1095. FIFOLD_CLASS_CLASS1 | LDST_VLF);
  1096. append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
  1097. MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
  1098. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
  1099. /* IPsec - copy IV at the output */
  1100. if (is_ipsec)
  1101. append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
  1102. 0x2 << 25);
  1103. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
  1104. JUMP_COND_NOP | JUMP_TEST_ALL);
  1105. set_jump_tgt_here(desc, wait_cmd);
  1106. if (encap) {
  1107. /* Read and write cryptlen bytes */
  1108. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1109. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  1110. CAAM_CMD_SZ);
  1111. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  1112. /* Write ICV */
  1113. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  1114. LDST_SRCDST_BYTE_CONTEXT);
  1115. } else {
  1116. /* Read and write cryptlen bytes */
  1117. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
  1118. CAAM_CMD_SZ);
  1119. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
  1120. CAAM_CMD_SZ);
  1121. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  1122. /* Load ICV for verification */
  1123. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  1124. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  1125. }
  1126. print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
  1127. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1128. 1);
  1129. }
  1130. EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
  1131. /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
  1132. static inline void skcipher_append_src_dst(u32 *desc)
  1133. {
  1134. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1135. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1136. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  1137. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  1138. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  1139. }
  1140. /**
  1141. * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
  1142. * @desc: pointer to buffer used for descriptor construction
  1143. * @cdata: pointer to block cipher transform definitions
  1144. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1145. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
  1146. * - OP_ALG_ALGSEL_CHACHA20
  1147. * @ivsize: initialization vector size
  1148. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1149. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1150. */
  1151. void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
  1152. unsigned int ivsize, const bool is_rfc3686,
  1153. const u32 ctx1_iv_off)
  1154. {
  1155. u32 *key_jump_cmd;
  1156. u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT;
  1157. bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
  1158. OP_ALG_ALGSEL_CHACHA20);
  1159. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1160. /* Skip if already shared */
  1161. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1162. JUMP_COND_SHRD);
  1163. /* Load class1 key only */
  1164. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1165. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1166. /* Load nonce into CONTEXT1 reg */
  1167. if (is_rfc3686) {
  1168. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1169. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1170. LDST_CLASS_IND_CCB |
  1171. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1172. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1173. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1174. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1175. }
  1176. set_jump_tgt_here(desc, key_jump_cmd);
  1177. /* Load IV, if there is one */
  1178. if (ivsize)
  1179. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1180. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1181. LDST_OFFSET_SHIFT));
  1182. /* Load counter into CONTEXT1 reg */
  1183. if (is_rfc3686)
  1184. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1185. LDST_SRCDST_BYTE_CONTEXT |
  1186. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1187. LDST_OFFSET_SHIFT));
  1188. /* Load operation */
  1189. if (is_chacha20)
  1190. options |= OP_ALG_AS_FINALIZE;
  1191. append_operation(desc, options);
  1192. /* Perform operation */
  1193. skcipher_append_src_dst(desc);
  1194. /* Store IV */
  1195. if (!is_chacha20 && ivsize)
  1196. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1197. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1198. LDST_OFFSET_SHIFT));
  1199. print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
  1200. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1201. 1);
  1202. }
  1203. EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
  1204. /**
  1205. * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
  1206. * @desc: pointer to buffer used for descriptor construction
  1207. * @cdata: pointer to block cipher transform definitions
  1208. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1209. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
  1210. * - OP_ALG_ALGSEL_CHACHA20
  1211. * @ivsize: initialization vector size
  1212. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1213. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1214. */
  1215. void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
  1216. unsigned int ivsize, const bool is_rfc3686,
  1217. const u32 ctx1_iv_off)
  1218. {
  1219. u32 *key_jump_cmd;
  1220. bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
  1221. OP_ALG_ALGSEL_CHACHA20);
  1222. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1223. /* Skip if already shared */
  1224. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1225. JUMP_COND_SHRD);
  1226. /* Load class1 key only */
  1227. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1228. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1229. /* Load nonce into CONTEXT1 reg */
  1230. if (is_rfc3686) {
  1231. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1232. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1233. LDST_CLASS_IND_CCB |
  1234. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1235. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1236. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1237. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1238. }
  1239. set_jump_tgt_here(desc, key_jump_cmd);
  1240. /* Load IV, if there is one */
  1241. if (ivsize)
  1242. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1243. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1244. LDST_OFFSET_SHIFT));
  1245. /* Load counter into CONTEXT1 reg */
  1246. if (is_rfc3686)
  1247. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1248. LDST_SRCDST_BYTE_CONTEXT |
  1249. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1250. LDST_OFFSET_SHIFT));
  1251. /* Choose operation */
  1252. if (ctx1_iv_off)
  1253. append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
  1254. OP_ALG_DECRYPT);
  1255. else
  1256. append_dec_op1(desc, cdata->algtype);
  1257. /* Perform operation */
  1258. skcipher_append_src_dst(desc);
  1259. /* Store IV */
  1260. if (!is_chacha20 && ivsize)
  1261. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1262. LDST_CLASS_1_CCB | (ctx1_iv_off <<
  1263. LDST_OFFSET_SHIFT));
  1264. print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
  1265. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
  1266. 1);
  1267. }
  1268. EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
  1269. /**
  1270. * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
  1271. * @desc: pointer to buffer used for descriptor construction
  1272. * @cdata: pointer to block cipher transform definitions
  1273. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1274. */
  1275. void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
  1276. {
  1277. /*
  1278. * Set sector size to a big value, practically disabling
  1279. * sector size segmentation in xts implementation. We cannot
  1280. * take full advantage of this HW feature with existing
  1281. * crypto API / dm-crypt SW architecture.
  1282. */
  1283. __be64 sector_size = cpu_to_be64(BIT(15));
  1284. u32 *key_jump_cmd;
  1285. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1286. /* Skip if already shared */
  1287. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1288. JUMP_COND_SHRD);
  1289. /* Load class1 keys only */
  1290. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1291. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1292. /* Load sector size with index 40 bytes (0x28) */
  1293. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1294. LDST_SRCDST_BYTE_CONTEXT |
  1295. (0x28 << LDST_OFFSET_SHIFT));
  1296. set_jump_tgt_here(desc, key_jump_cmd);
  1297. /*
  1298. * create sequence for loading the sector index
  1299. * Upper 8B of IV - will be used as sector index
  1300. * Lower 8B of IV - will be discarded
  1301. */
  1302. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1303. (0x20 << LDST_OFFSET_SHIFT));
  1304. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1305. /* Load operation */
  1306. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1307. OP_ALG_ENCRYPT);
  1308. /* Perform operation */
  1309. skcipher_append_src_dst(desc);
  1310. /* Store upper 8B of IV */
  1311. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1312. (0x20 << LDST_OFFSET_SHIFT));
  1313. print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
  1314. ": ", DUMP_PREFIX_ADDRESS, 16, 4,
  1315. desc, desc_bytes(desc), 1);
  1316. }
  1317. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
  1318. /**
  1319. * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
  1320. * @desc: pointer to buffer used for descriptor construction
  1321. * @cdata: pointer to block cipher transform definitions
  1322. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1323. */
  1324. void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
  1325. {
  1326. /*
  1327. * Set sector size to a big value, practically disabling
  1328. * sector size segmentation in xts implementation. We cannot
  1329. * take full advantage of this HW feature with existing
  1330. * crypto API / dm-crypt SW architecture.
  1331. */
  1332. __be64 sector_size = cpu_to_be64(BIT(15));
  1333. u32 *key_jump_cmd;
  1334. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1335. /* Skip if already shared */
  1336. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1337. JUMP_COND_SHRD);
  1338. /* Load class1 key only */
  1339. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1340. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1341. /* Load sector size with index 40 bytes (0x28) */
  1342. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1343. LDST_SRCDST_BYTE_CONTEXT |
  1344. (0x28 << LDST_OFFSET_SHIFT));
  1345. set_jump_tgt_here(desc, key_jump_cmd);
  1346. /*
  1347. * create sequence for loading the sector index
  1348. * Upper 8B of IV - will be used as sector index
  1349. * Lower 8B of IV - will be discarded
  1350. */
  1351. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1352. (0x20 << LDST_OFFSET_SHIFT));
  1353. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1354. /* Load operation */
  1355. append_dec_op1(desc, cdata->algtype);
  1356. /* Perform operation */
  1357. skcipher_append_src_dst(desc);
  1358. /* Store upper 8B of IV */
  1359. append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1360. (0x20 << LDST_OFFSET_SHIFT));
  1361. print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
  1362. ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
  1363. desc_bytes(desc), 1);
  1364. }
  1365. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
  1366. MODULE_LICENSE("GPL");
  1367. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1368. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");