stm32-cryp.c 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125
  1. /*
  2. * Copyright (C) STMicroelectronics SA 2017
  3. * Author: Fabien Dessenne <fabien.dessenne@st.com>
  4. * License terms: GNU General Public License (GPL), version 2
  5. */
  6. #include <linux/clk.h>
  7. #include <linux/delay.h>
  8. #include <linux/interrupt.h>
  9. #include <linux/iopoll.h>
  10. #include <linux/module.h>
  11. #include <linux/of_device.h>
  12. #include <linux/platform_device.h>
  13. #include <linux/pm_runtime.h>
  14. #include <linux/reset.h>
  15. #include <crypto/aes.h>
  16. #include <crypto/des.h>
  17. #include <crypto/engine.h>
  18. #include <crypto/scatterwalk.h>
  19. #include <crypto/internal/aead.h>
  20. #define DRIVER_NAME "stm32-cryp"
  21. /* Bit [0] encrypt / decrypt */
  22. #define FLG_ENCRYPT BIT(0)
  23. /* Bit [8..1] algo & operation mode */
  24. #define FLG_AES BIT(1)
  25. #define FLG_DES BIT(2)
  26. #define FLG_TDES BIT(3)
  27. #define FLG_ECB BIT(4)
  28. #define FLG_CBC BIT(5)
  29. #define FLG_CTR BIT(6)
  30. #define FLG_GCM BIT(7)
  31. #define FLG_CCM BIT(8)
  32. /* Mode mask = bits [15..0] */
  33. #define FLG_MODE_MASK GENMASK(15, 0)
  34. /* Bit [31..16] status */
  35. #define FLG_CCM_PADDED_WA BIT(16)
  36. /* Registers */
  37. #define CRYP_CR 0x00000000
  38. #define CRYP_SR 0x00000004
  39. #define CRYP_DIN 0x00000008
  40. #define CRYP_DOUT 0x0000000C
  41. #define CRYP_DMACR 0x00000010
  42. #define CRYP_IMSCR 0x00000014
  43. #define CRYP_RISR 0x00000018
  44. #define CRYP_MISR 0x0000001C
  45. #define CRYP_K0LR 0x00000020
  46. #define CRYP_K0RR 0x00000024
  47. #define CRYP_K1LR 0x00000028
  48. #define CRYP_K1RR 0x0000002C
  49. #define CRYP_K2LR 0x00000030
  50. #define CRYP_K2RR 0x00000034
  51. #define CRYP_K3LR 0x00000038
  52. #define CRYP_K3RR 0x0000003C
  53. #define CRYP_IV0LR 0x00000040
  54. #define CRYP_IV0RR 0x00000044
  55. #define CRYP_IV1LR 0x00000048
  56. #define CRYP_IV1RR 0x0000004C
  57. #define CRYP_CSGCMCCM0R 0x00000050
  58. #define CRYP_CSGCM0R 0x00000070
  59. /* Registers values */
  60. #define CR_DEC_NOT_ENC 0x00000004
  61. #define CR_TDES_ECB 0x00000000
  62. #define CR_TDES_CBC 0x00000008
  63. #define CR_DES_ECB 0x00000010
  64. #define CR_DES_CBC 0x00000018
  65. #define CR_AES_ECB 0x00000020
  66. #define CR_AES_CBC 0x00000028
  67. #define CR_AES_CTR 0x00000030
  68. #define CR_AES_KP 0x00000038
  69. #define CR_AES_GCM 0x00080000
  70. #define CR_AES_CCM 0x00080008
  71. #define CR_AES_UNKNOWN 0xFFFFFFFF
  72. #define CR_ALGO_MASK 0x00080038
  73. #define CR_DATA32 0x00000000
  74. #define CR_DATA16 0x00000040
  75. #define CR_DATA8 0x00000080
  76. #define CR_DATA1 0x000000C0
  77. #define CR_KEY128 0x00000000
  78. #define CR_KEY192 0x00000100
  79. #define CR_KEY256 0x00000200
  80. #define CR_FFLUSH 0x00004000
  81. #define CR_CRYPEN 0x00008000
  82. #define CR_PH_INIT 0x00000000
  83. #define CR_PH_HEADER 0x00010000
  84. #define CR_PH_PAYLOAD 0x00020000
  85. #define CR_PH_FINAL 0x00030000
  86. #define CR_PH_MASK 0x00030000
  87. #define CR_NBPBL_SHIFT 20
  88. #define SR_BUSY 0x00000010
  89. #define SR_OFNE 0x00000004
  90. #define IMSCR_IN BIT(0)
  91. #define IMSCR_OUT BIT(1)
  92. #define MISR_IN BIT(0)
  93. #define MISR_OUT BIT(1)
  94. /* Misc */
  95. #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
  96. #define GCM_CTR_INIT 2
  97. #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
  98. #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
  99. #define CRYP_AUTOSUSPEND_DELAY 50
  100. struct stm32_cryp_caps {
  101. bool swap_final;
  102. bool padding_wa;
  103. };
  104. struct stm32_cryp_ctx {
  105. struct crypto_engine_ctx enginectx;
  106. struct stm32_cryp *cryp;
  107. int keylen;
  108. u32 key[AES_KEYSIZE_256 / sizeof(u32)];
  109. unsigned long flags;
  110. };
  111. struct stm32_cryp_reqctx {
  112. unsigned long mode;
  113. };
  114. struct stm32_cryp {
  115. struct list_head list;
  116. struct device *dev;
  117. void __iomem *regs;
  118. struct clk *clk;
  119. unsigned long flags;
  120. u32 irq_status;
  121. const struct stm32_cryp_caps *caps;
  122. struct stm32_cryp_ctx *ctx;
  123. struct crypto_engine *engine;
  124. struct mutex lock; /* protects req / areq */
  125. struct ablkcipher_request *req;
  126. struct aead_request *areq;
  127. size_t authsize;
  128. size_t hw_blocksize;
  129. size_t total_in;
  130. size_t total_in_save;
  131. size_t total_out;
  132. size_t total_out_save;
  133. struct scatterlist *in_sg;
  134. struct scatterlist *out_sg;
  135. struct scatterlist *out_sg_save;
  136. struct scatterlist in_sgl;
  137. struct scatterlist out_sgl;
  138. bool sgs_copied;
  139. int in_sg_len;
  140. int out_sg_len;
  141. struct scatter_walk in_walk;
  142. struct scatter_walk out_walk;
  143. u32 last_ctr[4];
  144. u32 gcm_ctr;
  145. };
  146. struct stm32_cryp_list {
  147. struct list_head dev_list;
  148. spinlock_t lock; /* protect dev_list */
  149. };
  150. static struct stm32_cryp_list cryp_list = {
  151. .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
  152. .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
  153. };
  154. static inline bool is_aes(struct stm32_cryp *cryp)
  155. {
  156. return cryp->flags & FLG_AES;
  157. }
  158. static inline bool is_des(struct stm32_cryp *cryp)
  159. {
  160. return cryp->flags & FLG_DES;
  161. }
  162. static inline bool is_tdes(struct stm32_cryp *cryp)
  163. {
  164. return cryp->flags & FLG_TDES;
  165. }
  166. static inline bool is_ecb(struct stm32_cryp *cryp)
  167. {
  168. return cryp->flags & FLG_ECB;
  169. }
  170. static inline bool is_cbc(struct stm32_cryp *cryp)
  171. {
  172. return cryp->flags & FLG_CBC;
  173. }
  174. static inline bool is_ctr(struct stm32_cryp *cryp)
  175. {
  176. return cryp->flags & FLG_CTR;
  177. }
  178. static inline bool is_gcm(struct stm32_cryp *cryp)
  179. {
  180. return cryp->flags & FLG_GCM;
  181. }
  182. static inline bool is_ccm(struct stm32_cryp *cryp)
  183. {
  184. return cryp->flags & FLG_CCM;
  185. }
  186. static inline bool is_encrypt(struct stm32_cryp *cryp)
  187. {
  188. return cryp->flags & FLG_ENCRYPT;
  189. }
  190. static inline bool is_decrypt(struct stm32_cryp *cryp)
  191. {
  192. return !is_encrypt(cryp);
  193. }
  194. static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
  195. {
  196. return readl_relaxed(cryp->regs + ofst);
  197. }
  198. static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
  199. {
  200. writel_relaxed(val, cryp->regs + ofst);
  201. }
  202. static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
  203. {
  204. u32 status;
  205. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  206. !(status & SR_BUSY), 10, 100000);
  207. }
  208. static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
  209. {
  210. u32 status;
  211. return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
  212. !(status & CR_CRYPEN), 10, 100000);
  213. }
  214. static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
  215. {
  216. u32 status;
  217. return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
  218. status & SR_OFNE, 10, 100000);
  219. }
  220. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
  221. static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
  222. {
  223. struct stm32_cryp *tmp, *cryp = NULL;
  224. spin_lock_bh(&cryp_list.lock);
  225. if (!ctx->cryp) {
  226. list_for_each_entry(tmp, &cryp_list.dev_list, list) {
  227. cryp = tmp;
  228. break;
  229. }
  230. ctx->cryp = cryp;
  231. } else {
  232. cryp = ctx->cryp;
  233. }
  234. spin_unlock_bh(&cryp_list.lock);
  235. return cryp;
  236. }
  237. static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
  238. size_t align)
  239. {
  240. int len = 0;
  241. if (!total)
  242. return 0;
  243. if (!IS_ALIGNED(total, align))
  244. return -EINVAL;
  245. while (sg) {
  246. if (!IS_ALIGNED(sg->offset, sizeof(u32)))
  247. return -EINVAL;
  248. if (!IS_ALIGNED(sg->length, align))
  249. return -EINVAL;
  250. len += sg->length;
  251. sg = sg_next(sg);
  252. }
  253. if (len != total)
  254. return -EINVAL;
  255. return 0;
  256. }
  257. static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
  258. {
  259. int ret;
  260. ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
  261. cryp->hw_blocksize);
  262. if (ret)
  263. return ret;
  264. ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
  265. cryp->hw_blocksize);
  266. return ret;
  267. }
  268. static void sg_copy_buf(void *buf, struct scatterlist *sg,
  269. unsigned int start, unsigned int nbytes, int out)
  270. {
  271. struct scatter_walk walk;
  272. if (!nbytes)
  273. return;
  274. scatterwalk_start(&walk, sg);
  275. scatterwalk_advance(&walk, start);
  276. scatterwalk_copychunks(buf, &walk, nbytes, out);
  277. scatterwalk_done(&walk, out, 0);
  278. }
  279. static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
  280. {
  281. void *buf_in, *buf_out;
  282. int pages, total_in, total_out;
  283. if (!stm32_cryp_check_io_aligned(cryp)) {
  284. cryp->sgs_copied = 0;
  285. return 0;
  286. }
  287. total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
  288. pages = total_in ? get_order(total_in) : 1;
  289. buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
  290. total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
  291. pages = total_out ? get_order(total_out) : 1;
  292. buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
  293. if (!buf_in || !buf_out) {
  294. dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
  295. cryp->sgs_copied = 0;
  296. return -EFAULT;
  297. }
  298. sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
  299. sg_init_one(&cryp->in_sgl, buf_in, total_in);
  300. cryp->in_sg = &cryp->in_sgl;
  301. cryp->in_sg_len = 1;
  302. sg_init_one(&cryp->out_sgl, buf_out, total_out);
  303. cryp->out_sg_save = cryp->out_sg;
  304. cryp->out_sg = &cryp->out_sgl;
  305. cryp->out_sg_len = 1;
  306. cryp->sgs_copied = 1;
  307. return 0;
  308. }
  309. static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
  310. {
  311. if (!iv)
  312. return;
  313. stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
  314. stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
  315. if (is_aes(cryp)) {
  316. stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
  317. stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
  318. }
  319. }
  320. static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
  321. {
  322. unsigned int i;
  323. int r_id;
  324. if (is_des(c)) {
  325. stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
  326. stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
  327. } else {
  328. r_id = CRYP_K3RR;
  329. for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
  330. stm32_cryp_write(c, r_id,
  331. cpu_to_be32(c->ctx->key[i - 1]));
  332. }
  333. }
  334. static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
  335. {
  336. if (is_aes(cryp) && is_ecb(cryp))
  337. return CR_AES_ECB;
  338. if (is_aes(cryp) && is_cbc(cryp))
  339. return CR_AES_CBC;
  340. if (is_aes(cryp) && is_ctr(cryp))
  341. return CR_AES_CTR;
  342. if (is_aes(cryp) && is_gcm(cryp))
  343. return CR_AES_GCM;
  344. if (is_aes(cryp) && is_ccm(cryp))
  345. return CR_AES_CCM;
  346. if (is_des(cryp) && is_ecb(cryp))
  347. return CR_DES_ECB;
  348. if (is_des(cryp) && is_cbc(cryp))
  349. return CR_DES_CBC;
  350. if (is_tdes(cryp) && is_ecb(cryp))
  351. return CR_TDES_ECB;
  352. if (is_tdes(cryp) && is_cbc(cryp))
  353. return CR_TDES_CBC;
  354. dev_err(cryp->dev, "Unknown mode\n");
  355. return CR_AES_UNKNOWN;
  356. }
  357. static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
  358. {
  359. return is_encrypt(cryp) ? cryp->areq->cryptlen :
  360. cryp->areq->cryptlen - cryp->authsize;
  361. }
  362. static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
  363. {
  364. int ret;
  365. u32 iv[4];
  366. /* Phase 1 : init */
  367. memcpy(iv, cryp->areq->iv, 12);
  368. iv[3] = cpu_to_be32(GCM_CTR_INIT);
  369. cryp->gcm_ctr = GCM_CTR_INIT;
  370. stm32_cryp_hw_write_iv(cryp, iv);
  371. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  372. /* Wait for end of processing */
  373. ret = stm32_cryp_wait_enable(cryp);
  374. if (ret)
  375. dev_err(cryp->dev, "Timeout (gcm init)\n");
  376. return ret;
  377. }
  378. static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
  379. {
  380. int ret;
  381. u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
  382. u32 *d;
  383. unsigned int i, textlen;
  384. /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
  385. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  386. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  387. iv[AES_BLOCK_SIZE - 1] = 1;
  388. stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
  389. /* Build B0 */
  390. memcpy(b0, iv, AES_BLOCK_SIZE);
  391. b0[0] |= (8 * ((cryp->authsize - 2) / 2));
  392. if (cryp->areq->assoclen)
  393. b0[0] |= 0x40;
  394. textlen = stm32_cryp_get_input_text_len(cryp);
  395. b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
  396. b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
  397. /* Enable HW */
  398. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
  399. /* Write B0 */
  400. d = (u32 *)b0;
  401. for (i = 0; i < AES_BLOCK_32; i++) {
  402. if (!cryp->caps->padding_wa)
  403. *d = cpu_to_be32(*d);
  404. stm32_cryp_write(cryp, CRYP_DIN, *d++);
  405. }
  406. /* Wait for end of processing */
  407. ret = stm32_cryp_wait_enable(cryp);
  408. if (ret)
  409. dev_err(cryp->dev, "Timeout (ccm init)\n");
  410. return ret;
  411. }
  412. static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
  413. {
  414. int ret;
  415. u32 cfg, hw_mode;
  416. pm_runtime_get_sync(cryp->dev);
  417. /* Disable interrupt */
  418. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  419. /* Set key */
  420. stm32_cryp_hw_write_key(cryp);
  421. /* Set configuration */
  422. cfg = CR_DATA8 | CR_FFLUSH;
  423. switch (cryp->ctx->keylen) {
  424. case AES_KEYSIZE_128:
  425. cfg |= CR_KEY128;
  426. break;
  427. case AES_KEYSIZE_192:
  428. cfg |= CR_KEY192;
  429. break;
  430. default:
  431. case AES_KEYSIZE_256:
  432. cfg |= CR_KEY256;
  433. break;
  434. }
  435. hw_mode = stm32_cryp_get_hw_mode(cryp);
  436. if (hw_mode == CR_AES_UNKNOWN)
  437. return -EINVAL;
  438. /* AES ECB/CBC decrypt: run key preparation first */
  439. if (is_decrypt(cryp) &&
  440. ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
  441. stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
  442. /* Wait for end of processing */
  443. ret = stm32_cryp_wait_busy(cryp);
  444. if (ret) {
  445. dev_err(cryp->dev, "Timeout (key preparation)\n");
  446. return ret;
  447. }
  448. }
  449. cfg |= hw_mode;
  450. if (is_decrypt(cryp))
  451. cfg |= CR_DEC_NOT_ENC;
  452. /* Apply config and flush (valid when CRYPEN = 0) */
  453. stm32_cryp_write(cryp, CRYP_CR, cfg);
  454. switch (hw_mode) {
  455. case CR_AES_GCM:
  456. case CR_AES_CCM:
  457. /* Phase 1 : init */
  458. if (hw_mode == CR_AES_CCM)
  459. ret = stm32_cryp_ccm_init(cryp, cfg);
  460. else
  461. ret = stm32_cryp_gcm_init(cryp, cfg);
  462. if (ret)
  463. return ret;
  464. /* Phase 2 : header (authenticated data) */
  465. if (cryp->areq->assoclen) {
  466. cfg |= CR_PH_HEADER;
  467. } else if (stm32_cryp_get_input_text_len(cryp)) {
  468. cfg |= CR_PH_PAYLOAD;
  469. stm32_cryp_write(cryp, CRYP_CR, cfg);
  470. } else {
  471. cfg |= CR_PH_INIT;
  472. }
  473. break;
  474. case CR_DES_CBC:
  475. case CR_TDES_CBC:
  476. case CR_AES_CBC:
  477. case CR_AES_CTR:
  478. stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
  479. break;
  480. default:
  481. break;
  482. }
  483. /* Enable now */
  484. cfg |= CR_CRYPEN;
  485. stm32_cryp_write(cryp, CRYP_CR, cfg);
  486. cryp->flags &= ~FLG_CCM_PADDED_WA;
  487. return 0;
  488. }
  489. static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
  490. {
  491. if (!err && (is_gcm(cryp) || is_ccm(cryp)))
  492. /* Phase 4 : output tag */
  493. err = stm32_cryp_read_auth_tag(cryp);
  494. if (cryp->sgs_copied) {
  495. void *buf_in, *buf_out;
  496. int pages, len;
  497. buf_in = sg_virt(&cryp->in_sgl);
  498. buf_out = sg_virt(&cryp->out_sgl);
  499. sg_copy_buf(buf_out, cryp->out_sg_save, 0,
  500. cryp->total_out_save, 1);
  501. len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
  502. pages = len ? get_order(len) : 1;
  503. free_pages((unsigned long)buf_in, pages);
  504. len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
  505. pages = len ? get_order(len) : 1;
  506. free_pages((unsigned long)buf_out, pages);
  507. }
  508. pm_runtime_mark_last_busy(cryp->dev);
  509. pm_runtime_put_autosuspend(cryp->dev);
  510. if (is_gcm(cryp) || is_ccm(cryp)) {
  511. crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
  512. cryp->areq = NULL;
  513. } else {
  514. crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
  515. err);
  516. cryp->req = NULL;
  517. }
  518. memset(cryp->ctx->key, 0, cryp->ctx->keylen);
  519. mutex_unlock(&cryp->lock);
  520. }
  521. static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
  522. {
  523. /* Enable interrupt and let the IRQ handler do everything */
  524. stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
  525. return 0;
  526. }
  527. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
  528. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  529. void *areq);
  530. static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
  531. {
  532. struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
  533. tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
  534. ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
  535. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
  536. ctx->enginectx.op.unprepare_request = NULL;
  537. return 0;
  538. }
  539. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
  540. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
  541. void *areq);
  542. static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
  543. {
  544. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  545. tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
  546. ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
  547. ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
  548. ctx->enginectx.op.unprepare_request = NULL;
  549. return 0;
  550. }
  551. static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
  552. {
  553. struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
  554. crypto_ablkcipher_reqtfm(req));
  555. struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
  556. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  557. if (!cryp)
  558. return -ENODEV;
  559. rctx->mode = mode;
  560. return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req);
  561. }
  562. static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
  563. {
  564. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  565. struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
  566. struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
  567. if (!cryp)
  568. return -ENODEV;
  569. rctx->mode = mode;
  570. return crypto_transfer_aead_request_to_engine(cryp->engine, req);
  571. }
  572. static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
  573. unsigned int keylen)
  574. {
  575. struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  576. memcpy(ctx->key, key, keylen);
  577. ctx->keylen = keylen;
  578. return 0;
  579. }
  580. static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
  581. unsigned int keylen)
  582. {
  583. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  584. keylen != AES_KEYSIZE_256)
  585. return -EINVAL;
  586. else
  587. return stm32_cryp_setkey(tfm, key, keylen);
  588. }
  589. static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
  590. unsigned int keylen)
  591. {
  592. if (keylen != DES_KEY_SIZE)
  593. return -EINVAL;
  594. else
  595. return stm32_cryp_setkey(tfm, key, keylen);
  596. }
  597. static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
  598. unsigned int keylen)
  599. {
  600. if (keylen != (3 * DES_KEY_SIZE))
  601. return -EINVAL;
  602. else
  603. return stm32_cryp_setkey(tfm, key, keylen);
  604. }
  605. static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
  606. unsigned int keylen)
  607. {
  608. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
  609. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  610. keylen != AES_KEYSIZE_256)
  611. return -EINVAL;
  612. memcpy(ctx->key, key, keylen);
  613. ctx->keylen = keylen;
  614. return 0;
  615. }
  616. static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
  617. unsigned int authsize)
  618. {
  619. return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
  620. }
  621. static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
  622. unsigned int authsize)
  623. {
  624. switch (authsize) {
  625. case 4:
  626. case 6:
  627. case 8:
  628. case 10:
  629. case 12:
  630. case 14:
  631. case 16:
  632. break;
  633. default:
  634. return -EINVAL;
  635. }
  636. return 0;
  637. }
  638. static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
  639. {
  640. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
  641. }
  642. static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
  643. {
  644. return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
  645. }
  646. static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
  647. {
  648. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
  649. }
  650. static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
  651. {
  652. return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
  653. }
  654. static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
  655. {
  656. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
  657. }
  658. static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
  659. {
  660. return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
  661. }
  662. static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
  663. {
  664. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
  665. }
  666. static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
  667. {
  668. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
  669. }
  670. static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
  671. {
  672. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
  673. }
  674. static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
  675. {
  676. return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
  677. }
  678. static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
  679. {
  680. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
  681. }
  682. static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
  683. {
  684. return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
  685. }
  686. static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
  687. {
  688. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
  689. }
  690. static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
  691. {
  692. return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
  693. }
  694. static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
  695. {
  696. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
  697. }
  698. static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
  699. {
  700. return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
  701. }
  702. static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
  703. {
  704. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
  705. }
  706. static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
  707. {
  708. return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
  709. }
  710. static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
  711. struct aead_request *areq)
  712. {
  713. struct stm32_cryp_ctx *ctx;
  714. struct stm32_cryp *cryp;
  715. struct stm32_cryp_reqctx *rctx;
  716. int ret;
  717. if (!req && !areq)
  718. return -EINVAL;
  719. ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
  720. crypto_aead_ctx(crypto_aead_reqtfm(areq));
  721. cryp = ctx->cryp;
  722. if (!cryp)
  723. return -ENODEV;
  724. mutex_lock(&cryp->lock);
  725. rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
  726. rctx->mode &= FLG_MODE_MASK;
  727. ctx->cryp = cryp;
  728. cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
  729. cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
  730. cryp->ctx = ctx;
  731. if (req) {
  732. cryp->req = req;
  733. cryp->total_in = req->nbytes;
  734. cryp->total_out = cryp->total_in;
  735. } else {
  736. /*
  737. * Length of input and output data:
  738. * Encryption case:
  739. * INPUT = AssocData || PlainText
  740. * <- assoclen -> <- cryptlen ->
  741. * <------- total_in ----------->
  742. *
  743. * OUTPUT = AssocData || CipherText || AuthTag
  744. * <- assoclen -> <- cryptlen -> <- authsize ->
  745. * <---------------- total_out ----------------->
  746. *
  747. * Decryption case:
  748. * INPUT = AssocData || CipherText || AuthTag
  749. * <- assoclen -> <--------- cryptlen --------->
  750. * <- authsize ->
  751. * <---------------- total_in ------------------>
  752. *
  753. * OUTPUT = AssocData || PlainText
  754. * <- assoclen -> <- crypten - authsize ->
  755. * <---------- total_out ----------------->
  756. */
  757. cryp->areq = areq;
  758. cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
  759. cryp->total_in = areq->assoclen + areq->cryptlen;
  760. if (is_encrypt(cryp))
  761. /* Append auth tag to output */
  762. cryp->total_out = cryp->total_in + cryp->authsize;
  763. else
  764. /* No auth tag in output */
  765. cryp->total_out = cryp->total_in - cryp->authsize;
  766. }
  767. cryp->total_in_save = cryp->total_in;
  768. cryp->total_out_save = cryp->total_out;
  769. cryp->in_sg = req ? req->src : areq->src;
  770. cryp->out_sg = req ? req->dst : areq->dst;
  771. cryp->out_sg_save = cryp->out_sg;
  772. cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
  773. if (cryp->in_sg_len < 0) {
  774. dev_err(cryp->dev, "Cannot get in_sg_len\n");
  775. ret = cryp->in_sg_len;
  776. goto out;
  777. }
  778. cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
  779. if (cryp->out_sg_len < 0) {
  780. dev_err(cryp->dev, "Cannot get out_sg_len\n");
  781. ret = cryp->out_sg_len;
  782. goto out;
  783. }
  784. ret = stm32_cryp_copy_sgs(cryp);
  785. if (ret)
  786. goto out;
  787. scatterwalk_start(&cryp->in_walk, cryp->in_sg);
  788. scatterwalk_start(&cryp->out_walk, cryp->out_sg);
  789. if (is_gcm(cryp) || is_ccm(cryp)) {
  790. /* In output, jump after assoc data */
  791. scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
  792. cryp->total_out -= cryp->areq->assoclen;
  793. }
  794. ret = stm32_cryp_hw_init(cryp);
  795. out:
  796. if (ret)
  797. mutex_unlock(&cryp->lock);
  798. return ret;
  799. }
  800. static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
  801. void *areq)
  802. {
  803. struct ablkcipher_request *req = container_of(areq,
  804. struct ablkcipher_request,
  805. base);
  806. return stm32_cryp_prepare_req(req, NULL);
  807. }
  808. static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
  809. {
  810. struct ablkcipher_request *req = container_of(areq,
  811. struct ablkcipher_request,
  812. base);
  813. struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
  814. crypto_ablkcipher_reqtfm(req));
  815. struct stm32_cryp *cryp = ctx->cryp;
  816. if (!cryp)
  817. return -ENODEV;
  818. return stm32_cryp_cpu_start(cryp);
  819. }
  820. static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
  821. {
  822. struct aead_request *req = container_of(areq, struct aead_request,
  823. base);
  824. return stm32_cryp_prepare_req(NULL, req);
  825. }
  826. static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
  827. {
  828. struct aead_request *req = container_of(areq, struct aead_request,
  829. base);
  830. struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
  831. struct stm32_cryp *cryp = ctx->cryp;
  832. if (!cryp)
  833. return -ENODEV;
  834. if (unlikely(!cryp->areq->assoclen &&
  835. !stm32_cryp_get_input_text_len(cryp))) {
  836. /* No input data to process: get tag and finish */
  837. stm32_cryp_finish_req(cryp, 0);
  838. return 0;
  839. }
  840. return stm32_cryp_cpu_start(cryp);
  841. }
  842. static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
  843. unsigned int n)
  844. {
  845. scatterwalk_advance(&cryp->out_walk, n);
  846. if (unlikely(cryp->out_sg->length == _walked_out)) {
  847. cryp->out_sg = sg_next(cryp->out_sg);
  848. if (cryp->out_sg) {
  849. scatterwalk_start(&cryp->out_walk, cryp->out_sg);
  850. return (sg_virt(cryp->out_sg) + _walked_out);
  851. }
  852. }
  853. return (u32 *)((u8 *)dst + n);
  854. }
  855. static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
  856. unsigned int n)
  857. {
  858. scatterwalk_advance(&cryp->in_walk, n);
  859. if (unlikely(cryp->in_sg->length == _walked_in)) {
  860. cryp->in_sg = sg_next(cryp->in_sg);
  861. if (cryp->in_sg) {
  862. scatterwalk_start(&cryp->in_walk, cryp->in_sg);
  863. return (sg_virt(cryp->in_sg) + _walked_in);
  864. }
  865. }
  866. return (u32 *)((u8 *)src + n);
  867. }
  868. static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
  869. {
  870. u32 cfg, size_bit, *dst, d32;
  871. u8 *d8;
  872. unsigned int i, j;
  873. int ret = 0;
  874. /* Update Config */
  875. cfg = stm32_cryp_read(cryp, CRYP_CR);
  876. cfg &= ~CR_PH_MASK;
  877. cfg |= CR_PH_FINAL;
  878. cfg &= ~CR_DEC_NOT_ENC;
  879. cfg |= CR_CRYPEN;
  880. stm32_cryp_write(cryp, CRYP_CR, cfg);
  881. if (is_gcm(cryp)) {
  882. /* GCM: write aad and payload size (in bits) */
  883. size_bit = cryp->areq->assoclen * 8;
  884. if (cryp->caps->swap_final)
  885. size_bit = cpu_to_be32(size_bit);
  886. stm32_cryp_write(cryp, CRYP_DIN, 0);
  887. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  888. size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
  889. cryp->areq->cryptlen - AES_BLOCK_SIZE;
  890. size_bit *= 8;
  891. if (cryp->caps->swap_final)
  892. size_bit = cpu_to_be32(size_bit);
  893. stm32_cryp_write(cryp, CRYP_DIN, 0);
  894. stm32_cryp_write(cryp, CRYP_DIN, size_bit);
  895. } else {
  896. /* CCM: write CTR0 */
  897. u8 iv[AES_BLOCK_SIZE];
  898. u32 *iv32 = (u32 *)iv;
  899. memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
  900. memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
  901. for (i = 0; i < AES_BLOCK_32; i++) {
  902. if (!cryp->caps->padding_wa)
  903. *iv32 = cpu_to_be32(*iv32);
  904. stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
  905. }
  906. }
  907. /* Wait for output data */
  908. ret = stm32_cryp_wait_output(cryp);
  909. if (ret) {
  910. dev_err(cryp->dev, "Timeout (read tag)\n");
  911. return ret;
  912. }
  913. if (is_encrypt(cryp)) {
  914. /* Get and write tag */
  915. dst = sg_virt(cryp->out_sg) + _walked_out;
  916. for (i = 0; i < AES_BLOCK_32; i++) {
  917. if (cryp->total_out >= sizeof(u32)) {
  918. /* Read a full u32 */
  919. *dst = stm32_cryp_read(cryp, CRYP_DOUT);
  920. dst = stm32_cryp_next_out(cryp, dst,
  921. sizeof(u32));
  922. cryp->total_out -= sizeof(u32);
  923. } else if (!cryp->total_out) {
  924. /* Empty fifo out (data from input padding) */
  925. stm32_cryp_read(cryp, CRYP_DOUT);
  926. } else {
  927. /* Read less than an u32 */
  928. d32 = stm32_cryp_read(cryp, CRYP_DOUT);
  929. d8 = (u8 *)&d32;
  930. for (j = 0; j < cryp->total_out; j++) {
  931. *((u8 *)dst) = *(d8++);
  932. dst = stm32_cryp_next_out(cryp, dst, 1);
  933. }
  934. cryp->total_out = 0;
  935. }
  936. }
  937. } else {
  938. /* Get and check tag */
  939. u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
  940. scatterwalk_map_and_copy(in_tag, cryp->in_sg,
  941. cryp->total_in_save - cryp->authsize,
  942. cryp->authsize, 0);
  943. for (i = 0; i < AES_BLOCK_32; i++)
  944. out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
  945. if (crypto_memneq(in_tag, out_tag, cryp->authsize))
  946. ret = -EBADMSG;
  947. }
  948. /* Disable cryp */
  949. cfg &= ~CR_CRYPEN;
  950. stm32_cryp_write(cryp, CRYP_CR, cfg);
  951. return ret;
  952. }
  953. static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
  954. {
  955. u32 cr;
  956. if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
  957. cryp->last_ctr[3] = 0;
  958. cryp->last_ctr[2]++;
  959. if (!cryp->last_ctr[2]) {
  960. cryp->last_ctr[1]++;
  961. if (!cryp->last_ctr[1])
  962. cryp->last_ctr[0]++;
  963. }
  964. cr = stm32_cryp_read(cryp, CRYP_CR);
  965. stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
  966. stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
  967. stm32_cryp_write(cryp, CRYP_CR, cr);
  968. }
  969. cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
  970. cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
  971. cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
  972. cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
  973. }
  974. static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
  975. {
  976. unsigned int i, j;
  977. u32 d32, *dst;
  978. u8 *d8;
  979. size_t tag_size;
  980. /* Do no read tag now (if any) */
  981. if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
  982. tag_size = cryp->authsize;
  983. else
  984. tag_size = 0;
  985. dst = sg_virt(cryp->out_sg) + _walked_out;
  986. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
  987. if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
  988. /* Read a full u32 */
  989. *dst = stm32_cryp_read(cryp, CRYP_DOUT);
  990. dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
  991. cryp->total_out -= sizeof(u32);
  992. } else if (cryp->total_out == tag_size) {
  993. /* Empty fifo out (data from input padding) */
  994. d32 = stm32_cryp_read(cryp, CRYP_DOUT);
  995. } else {
  996. /* Read less than an u32 */
  997. d32 = stm32_cryp_read(cryp, CRYP_DOUT);
  998. d8 = (u8 *)&d32;
  999. for (j = 0; j < cryp->total_out - tag_size; j++) {
  1000. *((u8 *)dst) = *(d8++);
  1001. dst = stm32_cryp_next_out(cryp, dst, 1);
  1002. }
  1003. cryp->total_out = tag_size;
  1004. }
  1005. }
  1006. return !(cryp->total_out - tag_size) || !cryp->total_in;
  1007. }
  1008. static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
  1009. {
  1010. unsigned int i, j;
  1011. u32 *src;
  1012. u8 d8[4];
  1013. size_t tag_size;
  1014. /* Do no write tag (if any) */
  1015. if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
  1016. tag_size = cryp->authsize;
  1017. else
  1018. tag_size = 0;
  1019. src = sg_virt(cryp->in_sg) + _walked_in;
  1020. for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
  1021. if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
  1022. /* Write a full u32 */
  1023. stm32_cryp_write(cryp, CRYP_DIN, *src);
  1024. src = stm32_cryp_next_in(cryp, src, sizeof(u32));
  1025. cryp->total_in -= sizeof(u32);
  1026. } else if (cryp->total_in == tag_size) {
  1027. /* Write padding data */
  1028. stm32_cryp_write(cryp, CRYP_DIN, 0);
  1029. } else {
  1030. /* Write less than an u32 */
  1031. memset(d8, 0, sizeof(u32));
  1032. for (j = 0; j < cryp->total_in - tag_size; j++) {
  1033. d8[j] = *((u8 *)src);
  1034. src = stm32_cryp_next_in(cryp, src, 1);
  1035. }
  1036. stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
  1037. cryp->total_in = tag_size;
  1038. }
  1039. }
  1040. }
  1041. static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
  1042. {
  1043. int err;
  1044. u32 cfg, tmp[AES_BLOCK_32];
  1045. size_t total_in_ori = cryp->total_in;
  1046. struct scatterlist *out_sg_ori = cryp->out_sg;
  1047. unsigned int i;
  1048. /* 'Special workaround' procedure described in the datasheet */
  1049. /* a) disable ip */
  1050. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1051. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1052. cfg &= ~CR_CRYPEN;
  1053. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1054. /* b) Update IV1R */
  1055. stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
  1056. /* c) change mode to CTR */
  1057. cfg &= ~CR_ALGO_MASK;
  1058. cfg |= CR_AES_CTR;
  1059. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1060. /* a) enable IP */
  1061. cfg |= CR_CRYPEN;
  1062. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1063. /* b) pad and write the last block */
  1064. stm32_cryp_irq_write_block(cryp);
  1065. cryp->total_in = total_in_ori;
  1066. err = stm32_cryp_wait_output(cryp);
  1067. if (err) {
  1068. dev_err(cryp->dev, "Timeout (write gcm header)\n");
  1069. return stm32_cryp_finish_req(cryp, err);
  1070. }
  1071. /* c) get and store encrypted data */
  1072. stm32_cryp_irq_read_data(cryp);
  1073. scatterwalk_map_and_copy(tmp, out_sg_ori,
  1074. cryp->total_in_save - total_in_ori,
  1075. total_in_ori, 0);
  1076. /* d) change mode back to AES GCM */
  1077. cfg &= ~CR_ALGO_MASK;
  1078. cfg |= CR_AES_GCM;
  1079. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1080. /* e) change phase to Final */
  1081. cfg &= ~CR_PH_MASK;
  1082. cfg |= CR_PH_FINAL;
  1083. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1084. /* f) write padded data */
  1085. for (i = 0; i < AES_BLOCK_32; i++) {
  1086. if (cryp->total_in)
  1087. stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
  1088. else
  1089. stm32_cryp_write(cryp, CRYP_DIN, 0);
  1090. cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
  1091. }
  1092. /* g) Empty fifo out */
  1093. err = stm32_cryp_wait_output(cryp);
  1094. if (err) {
  1095. dev_err(cryp->dev, "Timeout (write gcm header)\n");
  1096. return stm32_cryp_finish_req(cryp, err);
  1097. }
  1098. for (i = 0; i < AES_BLOCK_32; i++)
  1099. stm32_cryp_read(cryp, CRYP_DOUT);
  1100. /* h) run the he normal Final phase */
  1101. stm32_cryp_finish_req(cryp, 0);
  1102. }
  1103. static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
  1104. {
  1105. u32 cfg, payload_bytes;
  1106. /* disable ip, set NPBLB and reneable ip */
  1107. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1108. cfg &= ~CR_CRYPEN;
  1109. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1110. payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
  1111. cryp->total_in;
  1112. cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
  1113. cfg |= CR_CRYPEN;
  1114. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1115. }
  1116. static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
  1117. {
  1118. int err = 0;
  1119. u32 cfg, iv1tmp;
  1120. u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
  1121. size_t last_total_out, total_in_ori = cryp->total_in;
  1122. struct scatterlist *out_sg_ori = cryp->out_sg;
  1123. unsigned int i;
  1124. /* 'Special workaround' procedure described in the datasheet */
  1125. cryp->flags |= FLG_CCM_PADDED_WA;
  1126. /* a) disable ip */
  1127. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1128. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1129. cfg &= ~CR_CRYPEN;
  1130. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1131. /* b) get IV1 from CRYP_CSGCMCCM7 */
  1132. iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
  1133. /* c) Load CRYP_CSGCMCCMxR */
  1134. for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
  1135. cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1136. /* d) Write IV1R */
  1137. stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
  1138. /* e) change mode to CTR */
  1139. cfg &= ~CR_ALGO_MASK;
  1140. cfg |= CR_AES_CTR;
  1141. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1142. /* a) enable IP */
  1143. cfg |= CR_CRYPEN;
  1144. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1145. /* b) pad and write the last block */
  1146. stm32_cryp_irq_write_block(cryp);
  1147. cryp->total_in = total_in_ori;
  1148. err = stm32_cryp_wait_output(cryp);
  1149. if (err) {
  1150. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1151. return stm32_cryp_finish_req(cryp, err);
  1152. }
  1153. /* c) get and store decrypted data */
  1154. last_total_out = cryp->total_out;
  1155. stm32_cryp_irq_read_data(cryp);
  1156. memset(tmp, 0, sizeof(tmp));
  1157. scatterwalk_map_and_copy(tmp, out_sg_ori,
  1158. cryp->total_out_save - last_total_out,
  1159. last_total_out, 0);
  1160. /* d) Load again CRYP_CSGCMCCMxR */
  1161. for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
  1162. cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
  1163. /* e) change mode back to AES CCM */
  1164. cfg &= ~CR_ALGO_MASK;
  1165. cfg |= CR_AES_CCM;
  1166. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1167. /* f) change phase to header */
  1168. cfg &= ~CR_PH_MASK;
  1169. cfg |= CR_PH_HEADER;
  1170. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1171. /* g) XOR and write padded data */
  1172. for (i = 0; i < ARRAY_SIZE(tmp); i++) {
  1173. tmp[i] ^= cstmp1[i];
  1174. tmp[i] ^= cstmp2[i];
  1175. stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
  1176. }
  1177. /* h) wait for completion */
  1178. err = stm32_cryp_wait_busy(cryp);
  1179. if (err)
  1180. dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
  1181. /* i) run the he normal Final phase */
  1182. stm32_cryp_finish_req(cryp, err);
  1183. }
  1184. static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
  1185. {
  1186. if (unlikely(!cryp->total_in)) {
  1187. dev_warn(cryp->dev, "No more data to process\n");
  1188. return;
  1189. }
  1190. if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
  1191. (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
  1192. is_encrypt(cryp))) {
  1193. /* Padding for AES GCM encryption */
  1194. if (cryp->caps->padding_wa)
  1195. /* Special case 1 */
  1196. return stm32_cryp_irq_write_gcm_padded_data(cryp);
  1197. /* Setting padding bytes (NBBLB) */
  1198. stm32_cryp_irq_set_npblb(cryp);
  1199. }
  1200. if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
  1201. (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
  1202. is_decrypt(cryp))) {
  1203. /* Padding for AES CCM decryption */
  1204. if (cryp->caps->padding_wa)
  1205. /* Special case 2 */
  1206. return stm32_cryp_irq_write_ccm_padded_data(cryp);
  1207. /* Setting padding bytes (NBBLB) */
  1208. stm32_cryp_irq_set_npblb(cryp);
  1209. }
  1210. if (is_aes(cryp) && is_ctr(cryp))
  1211. stm32_cryp_check_ctr_counter(cryp);
  1212. stm32_cryp_irq_write_block(cryp);
  1213. }
  1214. static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
  1215. {
  1216. int err;
  1217. unsigned int i, j;
  1218. u32 cfg, *src;
  1219. src = sg_virt(cryp->in_sg) + _walked_in;
  1220. for (i = 0; i < AES_BLOCK_32; i++) {
  1221. stm32_cryp_write(cryp, CRYP_DIN, *src);
  1222. src = stm32_cryp_next_in(cryp, src, sizeof(u32));
  1223. cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
  1224. /* Check if whole header written */
  1225. if ((cryp->total_in_save - cryp->total_in) ==
  1226. cryp->areq->assoclen) {
  1227. /* Write padding if needed */
  1228. for (j = i + 1; j < AES_BLOCK_32; j++)
  1229. stm32_cryp_write(cryp, CRYP_DIN, 0);
  1230. /* Wait for completion */
  1231. err = stm32_cryp_wait_busy(cryp);
  1232. if (err) {
  1233. dev_err(cryp->dev, "Timeout (gcm header)\n");
  1234. return stm32_cryp_finish_req(cryp, err);
  1235. }
  1236. if (stm32_cryp_get_input_text_len(cryp)) {
  1237. /* Phase 3 : payload */
  1238. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1239. cfg &= ~CR_CRYPEN;
  1240. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1241. cfg &= ~CR_PH_MASK;
  1242. cfg |= CR_PH_PAYLOAD;
  1243. cfg |= CR_CRYPEN;
  1244. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1245. } else {
  1246. /* Phase 4 : tag */
  1247. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1248. stm32_cryp_finish_req(cryp, 0);
  1249. }
  1250. break;
  1251. }
  1252. if (!cryp->total_in)
  1253. break;
  1254. }
  1255. }
  1256. static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
  1257. {
  1258. int err;
  1259. unsigned int i = 0, j, k;
  1260. u32 alen, cfg, *src;
  1261. u8 d8[4];
  1262. src = sg_virt(cryp->in_sg) + _walked_in;
  1263. alen = cryp->areq->assoclen;
  1264. if (!_walked_in) {
  1265. if (cryp->areq->assoclen <= 65280) {
  1266. /* Write first u32 of B1 */
  1267. d8[0] = (alen >> 8) & 0xFF;
  1268. d8[1] = alen & 0xFF;
  1269. d8[2] = *((u8 *)src);
  1270. src = stm32_cryp_next_in(cryp, src, 1);
  1271. d8[3] = *((u8 *)src);
  1272. src = stm32_cryp_next_in(cryp, src, 1);
  1273. stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
  1274. i++;
  1275. cryp->total_in -= min_t(size_t, 2, cryp->total_in);
  1276. } else {
  1277. /* Build the two first u32 of B1 */
  1278. d8[0] = 0xFF;
  1279. d8[1] = 0xFE;
  1280. d8[2] = alen & 0xFF000000;
  1281. d8[3] = alen & 0x00FF0000;
  1282. stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
  1283. i++;
  1284. d8[0] = alen & 0x0000FF00;
  1285. d8[1] = alen & 0x000000FF;
  1286. d8[2] = *((u8 *)src);
  1287. src = stm32_cryp_next_in(cryp, src, 1);
  1288. d8[3] = *((u8 *)src);
  1289. src = stm32_cryp_next_in(cryp, src, 1);
  1290. stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
  1291. i++;
  1292. cryp->total_in -= min_t(size_t, 2, cryp->total_in);
  1293. }
  1294. }
  1295. /* Write next u32 */
  1296. for (; i < AES_BLOCK_32; i++) {
  1297. /* Build an u32 */
  1298. memset(d8, 0, sizeof(u32));
  1299. for (k = 0; k < sizeof(u32); k++) {
  1300. d8[k] = *((u8 *)src);
  1301. src = stm32_cryp_next_in(cryp, src, 1);
  1302. cryp->total_in -= min_t(size_t, 1, cryp->total_in);
  1303. if ((cryp->total_in_save - cryp->total_in) == alen)
  1304. break;
  1305. }
  1306. stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
  1307. if ((cryp->total_in_save - cryp->total_in) == alen) {
  1308. /* Write padding if needed */
  1309. for (j = i + 1; j < AES_BLOCK_32; j++)
  1310. stm32_cryp_write(cryp, CRYP_DIN, 0);
  1311. /* Wait for completion */
  1312. err = stm32_cryp_wait_busy(cryp);
  1313. if (err) {
  1314. dev_err(cryp->dev, "Timeout (ccm header)\n");
  1315. return stm32_cryp_finish_req(cryp, err);
  1316. }
  1317. if (stm32_cryp_get_input_text_len(cryp)) {
  1318. /* Phase 3 : payload */
  1319. cfg = stm32_cryp_read(cryp, CRYP_CR);
  1320. cfg &= ~CR_CRYPEN;
  1321. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1322. cfg &= ~CR_PH_MASK;
  1323. cfg |= CR_PH_PAYLOAD;
  1324. cfg |= CR_CRYPEN;
  1325. stm32_cryp_write(cryp, CRYP_CR, cfg);
  1326. } else {
  1327. /* Phase 4 : tag */
  1328. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1329. stm32_cryp_finish_req(cryp, 0);
  1330. }
  1331. break;
  1332. }
  1333. }
  1334. }
  1335. static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
  1336. {
  1337. struct stm32_cryp *cryp = arg;
  1338. u32 ph;
  1339. if (cryp->irq_status & MISR_OUT)
  1340. /* Output FIFO IRQ: read data */
  1341. if (unlikely(stm32_cryp_irq_read_data(cryp))) {
  1342. /* All bytes processed, finish */
  1343. stm32_cryp_write(cryp, CRYP_IMSCR, 0);
  1344. stm32_cryp_finish_req(cryp, 0);
  1345. return IRQ_HANDLED;
  1346. }
  1347. if (cryp->irq_status & MISR_IN) {
  1348. if (is_gcm(cryp)) {
  1349. ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
  1350. if (unlikely(ph == CR_PH_HEADER))
  1351. /* Write Header */
  1352. stm32_cryp_irq_write_gcm_header(cryp);
  1353. else
  1354. /* Input FIFO IRQ: write data */
  1355. stm32_cryp_irq_write_data(cryp);
  1356. cryp->gcm_ctr++;
  1357. } else if (is_ccm(cryp)) {
  1358. ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
  1359. if (unlikely(ph == CR_PH_HEADER))
  1360. /* Write Header */
  1361. stm32_cryp_irq_write_ccm_header(cryp);
  1362. else
  1363. /* Input FIFO IRQ: write data */
  1364. stm32_cryp_irq_write_data(cryp);
  1365. } else {
  1366. /* Input FIFO IRQ: write data */
  1367. stm32_cryp_irq_write_data(cryp);
  1368. }
  1369. }
  1370. return IRQ_HANDLED;
  1371. }
  1372. static irqreturn_t stm32_cryp_irq(int irq, void *arg)
  1373. {
  1374. struct stm32_cryp *cryp = arg;
  1375. cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
  1376. return IRQ_WAKE_THREAD;
  1377. }
  1378. static struct crypto_alg crypto_algs[] = {
  1379. {
  1380. .cra_name = "ecb(aes)",
  1381. .cra_driver_name = "stm32-ecb-aes",
  1382. .cra_priority = 200,
  1383. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1384. CRYPTO_ALG_ASYNC,
  1385. .cra_blocksize = AES_BLOCK_SIZE,
  1386. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1387. .cra_alignmask = 0xf,
  1388. .cra_type = &crypto_ablkcipher_type,
  1389. .cra_module = THIS_MODULE,
  1390. .cra_init = stm32_cryp_cra_init,
  1391. .cra_ablkcipher = {
  1392. .min_keysize = AES_MIN_KEY_SIZE,
  1393. .max_keysize = AES_MAX_KEY_SIZE,
  1394. .setkey = stm32_cryp_aes_setkey,
  1395. .encrypt = stm32_cryp_aes_ecb_encrypt,
  1396. .decrypt = stm32_cryp_aes_ecb_decrypt,
  1397. }
  1398. },
  1399. {
  1400. .cra_name = "cbc(aes)",
  1401. .cra_driver_name = "stm32-cbc-aes",
  1402. .cra_priority = 200,
  1403. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1404. CRYPTO_ALG_ASYNC,
  1405. .cra_blocksize = AES_BLOCK_SIZE,
  1406. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1407. .cra_alignmask = 0xf,
  1408. .cra_type = &crypto_ablkcipher_type,
  1409. .cra_module = THIS_MODULE,
  1410. .cra_init = stm32_cryp_cra_init,
  1411. .cra_ablkcipher = {
  1412. .min_keysize = AES_MIN_KEY_SIZE,
  1413. .max_keysize = AES_MAX_KEY_SIZE,
  1414. .ivsize = AES_BLOCK_SIZE,
  1415. .setkey = stm32_cryp_aes_setkey,
  1416. .encrypt = stm32_cryp_aes_cbc_encrypt,
  1417. .decrypt = stm32_cryp_aes_cbc_decrypt,
  1418. }
  1419. },
  1420. {
  1421. .cra_name = "ctr(aes)",
  1422. .cra_driver_name = "stm32-ctr-aes",
  1423. .cra_priority = 200,
  1424. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1425. CRYPTO_ALG_ASYNC,
  1426. .cra_blocksize = 1,
  1427. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1428. .cra_alignmask = 0xf,
  1429. .cra_type = &crypto_ablkcipher_type,
  1430. .cra_module = THIS_MODULE,
  1431. .cra_init = stm32_cryp_cra_init,
  1432. .cra_ablkcipher = {
  1433. .min_keysize = AES_MIN_KEY_SIZE,
  1434. .max_keysize = AES_MAX_KEY_SIZE,
  1435. .ivsize = AES_BLOCK_SIZE,
  1436. .setkey = stm32_cryp_aes_setkey,
  1437. .encrypt = stm32_cryp_aes_ctr_encrypt,
  1438. .decrypt = stm32_cryp_aes_ctr_decrypt,
  1439. }
  1440. },
  1441. {
  1442. .cra_name = "ecb(des)",
  1443. .cra_driver_name = "stm32-ecb-des",
  1444. .cra_priority = 200,
  1445. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1446. CRYPTO_ALG_ASYNC,
  1447. .cra_blocksize = DES_BLOCK_SIZE,
  1448. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1449. .cra_alignmask = 0xf,
  1450. .cra_type = &crypto_ablkcipher_type,
  1451. .cra_module = THIS_MODULE,
  1452. .cra_init = stm32_cryp_cra_init,
  1453. .cra_ablkcipher = {
  1454. .min_keysize = DES_BLOCK_SIZE,
  1455. .max_keysize = DES_BLOCK_SIZE,
  1456. .setkey = stm32_cryp_des_setkey,
  1457. .encrypt = stm32_cryp_des_ecb_encrypt,
  1458. .decrypt = stm32_cryp_des_ecb_decrypt,
  1459. }
  1460. },
  1461. {
  1462. .cra_name = "cbc(des)",
  1463. .cra_driver_name = "stm32-cbc-des",
  1464. .cra_priority = 200,
  1465. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1466. CRYPTO_ALG_ASYNC,
  1467. .cra_blocksize = DES_BLOCK_SIZE,
  1468. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1469. .cra_alignmask = 0xf,
  1470. .cra_type = &crypto_ablkcipher_type,
  1471. .cra_module = THIS_MODULE,
  1472. .cra_init = stm32_cryp_cra_init,
  1473. .cra_ablkcipher = {
  1474. .min_keysize = DES_BLOCK_SIZE,
  1475. .max_keysize = DES_BLOCK_SIZE,
  1476. .ivsize = DES_BLOCK_SIZE,
  1477. .setkey = stm32_cryp_des_setkey,
  1478. .encrypt = stm32_cryp_des_cbc_encrypt,
  1479. .decrypt = stm32_cryp_des_cbc_decrypt,
  1480. }
  1481. },
  1482. {
  1483. .cra_name = "ecb(des3_ede)",
  1484. .cra_driver_name = "stm32-ecb-des3",
  1485. .cra_priority = 200,
  1486. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1487. CRYPTO_ALG_ASYNC,
  1488. .cra_blocksize = DES_BLOCK_SIZE,
  1489. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1490. .cra_alignmask = 0xf,
  1491. .cra_type = &crypto_ablkcipher_type,
  1492. .cra_module = THIS_MODULE,
  1493. .cra_init = stm32_cryp_cra_init,
  1494. .cra_ablkcipher = {
  1495. .min_keysize = 3 * DES_BLOCK_SIZE,
  1496. .max_keysize = 3 * DES_BLOCK_SIZE,
  1497. .setkey = stm32_cryp_tdes_setkey,
  1498. .encrypt = stm32_cryp_tdes_ecb_encrypt,
  1499. .decrypt = stm32_cryp_tdes_ecb_decrypt,
  1500. }
  1501. },
  1502. {
  1503. .cra_name = "cbc(des3_ede)",
  1504. .cra_driver_name = "stm32-cbc-des3",
  1505. .cra_priority = 200,
  1506. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  1507. CRYPTO_ALG_ASYNC,
  1508. .cra_blocksize = DES_BLOCK_SIZE,
  1509. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1510. .cra_alignmask = 0xf,
  1511. .cra_type = &crypto_ablkcipher_type,
  1512. .cra_module = THIS_MODULE,
  1513. .cra_init = stm32_cryp_cra_init,
  1514. .cra_ablkcipher = {
  1515. .min_keysize = 3 * DES_BLOCK_SIZE,
  1516. .max_keysize = 3 * DES_BLOCK_SIZE,
  1517. .ivsize = DES_BLOCK_SIZE,
  1518. .setkey = stm32_cryp_tdes_setkey,
  1519. .encrypt = stm32_cryp_tdes_cbc_encrypt,
  1520. .decrypt = stm32_cryp_tdes_cbc_decrypt,
  1521. }
  1522. },
  1523. };
  1524. static struct aead_alg aead_algs[] = {
  1525. {
  1526. .setkey = stm32_cryp_aes_aead_setkey,
  1527. .setauthsize = stm32_cryp_aes_gcm_setauthsize,
  1528. .encrypt = stm32_cryp_aes_gcm_encrypt,
  1529. .decrypt = stm32_cryp_aes_gcm_decrypt,
  1530. .init = stm32_cryp_aes_aead_init,
  1531. .ivsize = 12,
  1532. .maxauthsize = AES_BLOCK_SIZE,
  1533. .base = {
  1534. .cra_name = "gcm(aes)",
  1535. .cra_driver_name = "stm32-gcm-aes",
  1536. .cra_priority = 200,
  1537. .cra_flags = CRYPTO_ALG_ASYNC,
  1538. .cra_blocksize = 1,
  1539. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1540. .cra_alignmask = 0xf,
  1541. .cra_module = THIS_MODULE,
  1542. },
  1543. },
  1544. {
  1545. .setkey = stm32_cryp_aes_aead_setkey,
  1546. .setauthsize = stm32_cryp_aes_ccm_setauthsize,
  1547. .encrypt = stm32_cryp_aes_ccm_encrypt,
  1548. .decrypt = stm32_cryp_aes_ccm_decrypt,
  1549. .init = stm32_cryp_aes_aead_init,
  1550. .ivsize = AES_BLOCK_SIZE,
  1551. .maxauthsize = AES_BLOCK_SIZE,
  1552. .base = {
  1553. .cra_name = "ccm(aes)",
  1554. .cra_driver_name = "stm32-ccm-aes",
  1555. .cra_priority = 200,
  1556. .cra_flags = CRYPTO_ALG_ASYNC,
  1557. .cra_blocksize = 1,
  1558. .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
  1559. .cra_alignmask = 0xf,
  1560. .cra_module = THIS_MODULE,
  1561. },
  1562. },
  1563. };
  1564. static const struct stm32_cryp_caps f7_data = {
  1565. .swap_final = true,
  1566. .padding_wa = true,
  1567. };
  1568. static const struct stm32_cryp_caps mp1_data = {
  1569. .swap_final = false,
  1570. .padding_wa = false,
  1571. };
  1572. static const struct of_device_id stm32_dt_ids[] = {
  1573. { .compatible = "st,stm32f756-cryp", .data = &f7_data},
  1574. { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
  1575. {},
  1576. };
  1577. MODULE_DEVICE_TABLE(of, stm32_dt_ids);
  1578. static int stm32_cryp_probe(struct platform_device *pdev)
  1579. {
  1580. struct device *dev = &pdev->dev;
  1581. struct stm32_cryp *cryp;
  1582. struct resource *res;
  1583. struct reset_control *rst;
  1584. int irq, ret;
  1585. cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
  1586. if (!cryp)
  1587. return -ENOMEM;
  1588. cryp->caps = of_device_get_match_data(dev);
  1589. if (!cryp->caps)
  1590. return -ENODEV;
  1591. cryp->dev = dev;
  1592. mutex_init(&cryp->lock);
  1593. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  1594. cryp->regs = devm_ioremap_resource(dev, res);
  1595. if (IS_ERR(cryp->regs))
  1596. return PTR_ERR(cryp->regs);
  1597. irq = platform_get_irq(pdev, 0);
  1598. if (irq < 0) {
  1599. dev_err(dev, "Cannot get IRQ resource\n");
  1600. return irq;
  1601. }
  1602. ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
  1603. stm32_cryp_irq_thread, IRQF_ONESHOT,
  1604. dev_name(dev), cryp);
  1605. if (ret) {
  1606. dev_err(dev, "Cannot grab IRQ\n");
  1607. return ret;
  1608. }
  1609. cryp->clk = devm_clk_get(dev, NULL);
  1610. if (IS_ERR(cryp->clk)) {
  1611. dev_err(dev, "Could not get clock\n");
  1612. return PTR_ERR(cryp->clk);
  1613. }
  1614. ret = clk_prepare_enable(cryp->clk);
  1615. if (ret) {
  1616. dev_err(cryp->dev, "Failed to enable clock\n");
  1617. return ret;
  1618. }
  1619. pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
  1620. pm_runtime_use_autosuspend(dev);
  1621. pm_runtime_get_noresume(dev);
  1622. pm_runtime_set_active(dev);
  1623. pm_runtime_enable(dev);
  1624. rst = devm_reset_control_get(dev, NULL);
  1625. if (!IS_ERR(rst)) {
  1626. reset_control_assert(rst);
  1627. udelay(2);
  1628. reset_control_deassert(rst);
  1629. }
  1630. platform_set_drvdata(pdev, cryp);
  1631. spin_lock(&cryp_list.lock);
  1632. list_add(&cryp->list, &cryp_list.dev_list);
  1633. spin_unlock(&cryp_list.lock);
  1634. /* Initialize crypto engine */
  1635. cryp->engine = crypto_engine_alloc_init(dev, 1);
  1636. if (!cryp->engine) {
  1637. dev_err(dev, "Could not init crypto engine\n");
  1638. ret = -ENOMEM;
  1639. goto err_engine1;
  1640. }
  1641. ret = crypto_engine_start(cryp->engine);
  1642. if (ret) {
  1643. dev_err(dev, "Could not start crypto engine\n");
  1644. goto err_engine2;
  1645. }
  1646. ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
  1647. if (ret) {
  1648. dev_err(dev, "Could not register algs\n");
  1649. goto err_algs;
  1650. }
  1651. ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1652. if (ret)
  1653. goto err_aead_algs;
  1654. dev_info(dev, "Initialized\n");
  1655. pm_runtime_put_sync(dev);
  1656. return 0;
  1657. err_aead_algs:
  1658. crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
  1659. err_algs:
  1660. err_engine2:
  1661. crypto_engine_exit(cryp->engine);
  1662. err_engine1:
  1663. spin_lock(&cryp_list.lock);
  1664. list_del(&cryp->list);
  1665. spin_unlock(&cryp_list.lock);
  1666. pm_runtime_disable(dev);
  1667. pm_runtime_put_noidle(dev);
  1668. pm_runtime_disable(dev);
  1669. pm_runtime_put_noidle(dev);
  1670. clk_disable_unprepare(cryp->clk);
  1671. return ret;
  1672. }
  1673. static int stm32_cryp_remove(struct platform_device *pdev)
  1674. {
  1675. struct stm32_cryp *cryp = platform_get_drvdata(pdev);
  1676. int ret;
  1677. if (!cryp)
  1678. return -ENODEV;
  1679. ret = pm_runtime_get_sync(cryp->dev);
  1680. if (ret < 0)
  1681. return ret;
  1682. crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
  1683. crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
  1684. crypto_engine_exit(cryp->engine);
  1685. spin_lock(&cryp_list.lock);
  1686. list_del(&cryp->list);
  1687. spin_unlock(&cryp_list.lock);
  1688. pm_runtime_disable(cryp->dev);
  1689. pm_runtime_put_noidle(cryp->dev);
  1690. clk_disable_unprepare(cryp->clk);
  1691. return 0;
  1692. }
  1693. #ifdef CONFIG_PM
  1694. static int stm32_cryp_runtime_suspend(struct device *dev)
  1695. {
  1696. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1697. clk_disable_unprepare(cryp->clk);
  1698. return 0;
  1699. }
  1700. static int stm32_cryp_runtime_resume(struct device *dev)
  1701. {
  1702. struct stm32_cryp *cryp = dev_get_drvdata(dev);
  1703. int ret;
  1704. ret = clk_prepare_enable(cryp->clk);
  1705. if (ret) {
  1706. dev_err(cryp->dev, "Failed to prepare_enable clock\n");
  1707. return ret;
  1708. }
  1709. return 0;
  1710. }
  1711. #endif
  1712. static const struct dev_pm_ops stm32_cryp_pm_ops = {
  1713. SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
  1714. pm_runtime_force_resume)
  1715. SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
  1716. stm32_cryp_runtime_resume, NULL)
  1717. };
  1718. static struct platform_driver stm32_cryp_driver = {
  1719. .probe = stm32_cryp_probe,
  1720. .remove = stm32_cryp_remove,
  1721. .driver = {
  1722. .name = DRIVER_NAME,
  1723. .pm = &stm32_cryp_pm_ops,
  1724. .of_match_table = stm32_dt_ids,
  1725. },
  1726. };
  1727. module_platform_driver(stm32_cryp_driver);
  1728. MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
  1729. MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
  1730. MODULE_LICENSE("GPL");