vsp1_video.c 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218
  1. /*
  2. * vsp1_video.c -- R-Car VSP1 Video Node
  3. *
  4. * Copyright (C) 2013-2014 Renesas Electronics Corporation
  5. *
  6. * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. */
  13. #include <linux/list.h>
  14. #include <linux/module.h>
  15. #include <linux/mutex.h>
  16. #include <linux/sched.h>
  17. #include <linux/slab.h>
  18. #include <linux/v4l2-mediabus.h>
  19. #include <linux/videodev2.h>
  20. #include <media/media-entity.h>
  21. #include <media/v4l2-dev.h>
  22. #include <media/v4l2-fh.h>
  23. #include <media/v4l2-ioctl.h>
  24. #include <media/v4l2-subdev.h>
  25. #include <media/videobuf2-core.h>
  26. #include <media/videobuf2-dma-contig.h>
  27. #include "vsp1.h"
  28. #include "vsp1_bru.h"
  29. #include "vsp1_entity.h"
  30. #include "vsp1_rwpf.h"
  31. #include "vsp1_uds.h"
  32. #include "vsp1_video.h"
  33. #define VSP1_VIDEO_DEF_FORMAT V4L2_PIX_FMT_YUYV
  34. #define VSP1_VIDEO_DEF_WIDTH 1024
  35. #define VSP1_VIDEO_DEF_HEIGHT 768
  36. #define VSP1_VIDEO_MIN_WIDTH 2U
  37. #define VSP1_VIDEO_MAX_WIDTH 8190U
  38. #define VSP1_VIDEO_MIN_HEIGHT 2U
  39. #define VSP1_VIDEO_MAX_HEIGHT 8190U
  40. /* -----------------------------------------------------------------------------
  41. * Helper functions
  42. */
  43. static const struct vsp1_format_info vsp1_video_formats[] = {
  44. { V4L2_PIX_FMT_RGB332, MEDIA_BUS_FMT_ARGB8888_1X32,
  45. VI6_FMT_RGB_332, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  46. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  47. 1, { 8, 0, 0 }, false, false, 1, 1, false },
  48. { V4L2_PIX_FMT_ARGB444, MEDIA_BUS_FMT_ARGB8888_1X32,
  49. VI6_FMT_ARGB_4444, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  50. VI6_RPF_DSWAP_P_WDS,
  51. 1, { 16, 0, 0 }, false, false, 1, 1, true },
  52. { V4L2_PIX_FMT_XRGB444, MEDIA_BUS_FMT_ARGB8888_1X32,
  53. VI6_FMT_XRGB_4444, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  54. VI6_RPF_DSWAP_P_WDS,
  55. 1, { 16, 0, 0 }, false, false, 1, 1, true },
  56. { V4L2_PIX_FMT_ARGB555, MEDIA_BUS_FMT_ARGB8888_1X32,
  57. VI6_FMT_ARGB_1555, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  58. VI6_RPF_DSWAP_P_WDS,
  59. 1, { 16, 0, 0 }, false, false, 1, 1, true },
  60. { V4L2_PIX_FMT_XRGB555, MEDIA_BUS_FMT_ARGB8888_1X32,
  61. VI6_FMT_XRGB_1555, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  62. VI6_RPF_DSWAP_P_WDS,
  63. 1, { 16, 0, 0 }, false, false, 1, 1, false },
  64. { V4L2_PIX_FMT_RGB565, MEDIA_BUS_FMT_ARGB8888_1X32,
  65. VI6_FMT_RGB_565, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  66. VI6_RPF_DSWAP_P_WDS,
  67. 1, { 16, 0, 0 }, false, false, 1, 1, false },
  68. { V4L2_PIX_FMT_BGR24, MEDIA_BUS_FMT_ARGB8888_1X32,
  69. VI6_FMT_BGR_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  70. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  71. 1, { 24, 0, 0 }, false, false, 1, 1, false },
  72. { V4L2_PIX_FMT_RGB24, MEDIA_BUS_FMT_ARGB8888_1X32,
  73. VI6_FMT_RGB_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  74. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  75. 1, { 24, 0, 0 }, false, false, 1, 1, false },
  76. { V4L2_PIX_FMT_ABGR32, MEDIA_BUS_FMT_ARGB8888_1X32,
  77. VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS,
  78. 1, { 32, 0, 0 }, false, false, 1, 1, true },
  79. { V4L2_PIX_FMT_XBGR32, MEDIA_BUS_FMT_ARGB8888_1X32,
  80. VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS,
  81. 1, { 32, 0, 0 }, false, false, 1, 1, false },
  82. { V4L2_PIX_FMT_ARGB32, MEDIA_BUS_FMT_ARGB8888_1X32,
  83. VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  84. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  85. 1, { 32, 0, 0 }, false, false, 1, 1, true },
  86. { V4L2_PIX_FMT_XRGB32, MEDIA_BUS_FMT_ARGB8888_1X32,
  87. VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  88. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  89. 1, { 32, 0, 0 }, false, false, 1, 1, false },
  90. { V4L2_PIX_FMT_UYVY, MEDIA_BUS_FMT_AYUV8_1X32,
  91. VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  92. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  93. 1, { 16, 0, 0 }, false, false, 2, 1, false },
  94. { V4L2_PIX_FMT_VYUY, MEDIA_BUS_FMT_AYUV8_1X32,
  95. VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  96. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  97. 1, { 16, 0, 0 }, false, true, 2, 1, false },
  98. { V4L2_PIX_FMT_YUYV, MEDIA_BUS_FMT_AYUV8_1X32,
  99. VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  100. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  101. 1, { 16, 0, 0 }, true, false, 2, 1, false },
  102. { V4L2_PIX_FMT_YVYU, MEDIA_BUS_FMT_AYUV8_1X32,
  103. VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  104. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  105. 1, { 16, 0, 0 }, true, true, 2, 1, false },
  106. { V4L2_PIX_FMT_NV12M, MEDIA_BUS_FMT_AYUV8_1X32,
  107. VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  108. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  109. 2, { 8, 16, 0 }, false, false, 2, 2, false },
  110. { V4L2_PIX_FMT_NV21M, MEDIA_BUS_FMT_AYUV8_1X32,
  111. VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  112. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  113. 2, { 8, 16, 0 }, false, true, 2, 2, false },
  114. { V4L2_PIX_FMT_NV16M, MEDIA_BUS_FMT_AYUV8_1X32,
  115. VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  116. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  117. 2, { 8, 16, 0 }, false, false, 2, 1, false },
  118. { V4L2_PIX_FMT_NV61M, MEDIA_BUS_FMT_AYUV8_1X32,
  119. VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  120. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  121. 2, { 8, 16, 0 }, false, true, 2, 1, false },
  122. { V4L2_PIX_FMT_YUV420M, MEDIA_BUS_FMT_AYUV8_1X32,
  123. VI6_FMT_Y_U_V_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS |
  124. VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS,
  125. 3, { 8, 8, 8 }, false, false, 2, 2, false },
  126. };
  127. /*
  128. * vsp1_get_format_info - Retrieve format information for a 4CC
  129. * @fourcc: the format 4CC
  130. *
  131. * Return a pointer to the format information structure corresponding to the
  132. * given V4L2 format 4CC, or NULL if no corresponding format can be found.
  133. */
  134. static const struct vsp1_format_info *vsp1_get_format_info(u32 fourcc)
  135. {
  136. unsigned int i;
  137. for (i = 0; i < ARRAY_SIZE(vsp1_video_formats); ++i) {
  138. const struct vsp1_format_info *info = &vsp1_video_formats[i];
  139. if (info->fourcc == fourcc)
  140. return info;
  141. }
  142. return NULL;
  143. }
  144. static struct v4l2_subdev *
  145. vsp1_video_remote_subdev(struct media_pad *local, u32 *pad)
  146. {
  147. struct media_pad *remote;
  148. remote = media_entity_remote_pad(local);
  149. if (remote == NULL ||
  150. media_entity_type(remote->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
  151. return NULL;
  152. if (pad)
  153. *pad = remote->index;
  154. return media_entity_to_v4l2_subdev(remote->entity);
  155. }
  156. static int vsp1_video_verify_format(struct vsp1_video *video)
  157. {
  158. struct v4l2_subdev_format fmt;
  159. struct v4l2_subdev *subdev;
  160. int ret;
  161. subdev = vsp1_video_remote_subdev(&video->pad, &fmt.pad);
  162. if (subdev == NULL)
  163. return -EINVAL;
  164. fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
  165. ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
  166. if (ret < 0)
  167. return ret == -ENOIOCTLCMD ? -EINVAL : ret;
  168. if (video->fmtinfo->mbus != fmt.format.code ||
  169. video->format.height != fmt.format.height ||
  170. video->format.width != fmt.format.width)
  171. return -EINVAL;
  172. return 0;
  173. }
  174. static int __vsp1_video_try_format(struct vsp1_video *video,
  175. struct v4l2_pix_format_mplane *pix,
  176. const struct vsp1_format_info **fmtinfo)
  177. {
  178. static const u32 xrgb_formats[][2] = {
  179. { V4L2_PIX_FMT_RGB444, V4L2_PIX_FMT_XRGB444 },
  180. { V4L2_PIX_FMT_RGB555, V4L2_PIX_FMT_XRGB555 },
  181. { V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_XBGR32 },
  182. { V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_XRGB32 },
  183. };
  184. const struct vsp1_format_info *info;
  185. unsigned int width = pix->width;
  186. unsigned int height = pix->height;
  187. unsigned int i;
  188. /* Backward compatibility: replace deprecated RGB formats by their XRGB
  189. * equivalent. This selects the format older userspace applications want
  190. * while still exposing the new format.
  191. */
  192. for (i = 0; i < ARRAY_SIZE(xrgb_formats); ++i) {
  193. if (xrgb_formats[i][0] == pix->pixelformat) {
  194. pix->pixelformat = xrgb_formats[i][1];
  195. break;
  196. }
  197. }
  198. /* Retrieve format information and select the default format if the
  199. * requested format isn't supported.
  200. */
  201. info = vsp1_get_format_info(pix->pixelformat);
  202. if (info == NULL)
  203. info = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT);
  204. pix->pixelformat = info->fourcc;
  205. pix->colorspace = V4L2_COLORSPACE_SRGB;
  206. pix->field = V4L2_FIELD_NONE;
  207. memset(pix->reserved, 0, sizeof(pix->reserved));
  208. /* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
  209. width = round_down(width, info->hsub);
  210. height = round_down(height, info->vsub);
  211. /* Clamp the width and height. */
  212. pix->width = clamp(width, VSP1_VIDEO_MIN_WIDTH, VSP1_VIDEO_MAX_WIDTH);
  213. pix->height = clamp(height, VSP1_VIDEO_MIN_HEIGHT,
  214. VSP1_VIDEO_MAX_HEIGHT);
  215. /* Compute and clamp the stride and image size. While not documented in
  216. * the datasheet, strides not aligned to a multiple of 128 bytes result
  217. * in image corruption.
  218. */
  219. for (i = 0; i < max(info->planes, 2U); ++i) {
  220. unsigned int hsub = i > 0 ? info->hsub : 1;
  221. unsigned int vsub = i > 0 ? info->vsub : 1;
  222. unsigned int align = 128;
  223. unsigned int bpl;
  224. bpl = clamp_t(unsigned int, pix->plane_fmt[i].bytesperline,
  225. pix->width / hsub * info->bpp[i] / 8,
  226. round_down(65535U, align));
  227. pix->plane_fmt[i].bytesperline = round_up(bpl, align);
  228. pix->plane_fmt[i].sizeimage = pix->plane_fmt[i].bytesperline
  229. * pix->height / vsub;
  230. }
  231. if (info->planes == 3) {
  232. /* The second and third planes must have the same stride. */
  233. pix->plane_fmt[2].bytesperline = pix->plane_fmt[1].bytesperline;
  234. pix->plane_fmt[2].sizeimage = pix->plane_fmt[1].sizeimage;
  235. }
  236. pix->num_planes = info->planes;
  237. if (fmtinfo)
  238. *fmtinfo = info;
  239. return 0;
  240. }
  241. static bool
  242. vsp1_video_format_adjust(struct vsp1_video *video,
  243. const struct v4l2_pix_format_mplane *format,
  244. struct v4l2_pix_format_mplane *adjust)
  245. {
  246. unsigned int i;
  247. *adjust = *format;
  248. __vsp1_video_try_format(video, adjust, NULL);
  249. if (format->width != adjust->width ||
  250. format->height != adjust->height ||
  251. format->pixelformat != adjust->pixelformat ||
  252. format->num_planes != adjust->num_planes)
  253. return false;
  254. for (i = 0; i < format->num_planes; ++i) {
  255. if (format->plane_fmt[i].bytesperline !=
  256. adjust->plane_fmt[i].bytesperline)
  257. return false;
  258. adjust->plane_fmt[i].sizeimage =
  259. max(adjust->plane_fmt[i].sizeimage,
  260. format->plane_fmt[i].sizeimage);
  261. }
  262. return true;
  263. }
  264. /* -----------------------------------------------------------------------------
  265. * Pipeline Management
  266. */
  267. static int vsp1_pipeline_validate_branch(struct vsp1_pipeline *pipe,
  268. struct vsp1_rwpf *input,
  269. struct vsp1_rwpf *output)
  270. {
  271. struct vsp1_entity *entity;
  272. unsigned int entities = 0;
  273. struct media_pad *pad;
  274. bool bru_found = false;
  275. input->location.left = 0;
  276. input->location.top = 0;
  277. pad = media_entity_remote_pad(&input->entity.pads[RWPF_PAD_SOURCE]);
  278. while (1) {
  279. if (pad == NULL)
  280. return -EPIPE;
  281. /* We've reached a video node, that shouldn't have happened. */
  282. if (media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
  283. return -EPIPE;
  284. entity = to_vsp1_entity(media_entity_to_v4l2_subdev(pad->entity));
  285. /* A BRU is present in the pipeline, store the compose rectangle
  286. * location in the input RPF for use when configuring the RPF.
  287. */
  288. if (entity->type == VSP1_ENTITY_BRU) {
  289. struct vsp1_bru *bru = to_bru(&entity->subdev);
  290. struct v4l2_rect *rect =
  291. &bru->inputs[pad->index].compose;
  292. bru->inputs[pad->index].rpf = input;
  293. input->location.left = rect->left;
  294. input->location.top = rect->top;
  295. bru_found = true;
  296. }
  297. /* We've reached the WPF, we're done. */
  298. if (entity->type == VSP1_ENTITY_WPF)
  299. break;
  300. /* Ensure the branch has no loop. */
  301. if (entities & (1 << entity->subdev.entity.id))
  302. return -EPIPE;
  303. entities |= 1 << entity->subdev.entity.id;
  304. /* UDS can't be chained. */
  305. if (entity->type == VSP1_ENTITY_UDS) {
  306. if (pipe->uds)
  307. return -EPIPE;
  308. pipe->uds = entity;
  309. pipe->uds_input = bru_found ? pipe->bru
  310. : &input->entity;
  311. }
  312. /* Follow the source link. The link setup operations ensure
  313. * that the output fan-out can't be more than one, there is thus
  314. * no need to verify here that only a single source link is
  315. * activated.
  316. */
  317. pad = &entity->pads[entity->source_pad];
  318. pad = media_entity_remote_pad(pad);
  319. }
  320. /* The last entity must be the output WPF. */
  321. if (entity != &output->entity)
  322. return -EPIPE;
  323. return 0;
  324. }
  325. static void __vsp1_pipeline_cleanup(struct vsp1_pipeline *pipe)
  326. {
  327. if (pipe->bru) {
  328. struct vsp1_bru *bru = to_bru(&pipe->bru->subdev);
  329. unsigned int i;
  330. for (i = 0; i < ARRAY_SIZE(bru->inputs); ++i)
  331. bru->inputs[i].rpf = NULL;
  332. }
  333. INIT_LIST_HEAD(&pipe->entities);
  334. pipe->state = VSP1_PIPELINE_STOPPED;
  335. pipe->buffers_ready = 0;
  336. pipe->num_video = 0;
  337. pipe->num_inputs = 0;
  338. pipe->output = NULL;
  339. pipe->bru = NULL;
  340. pipe->lif = NULL;
  341. pipe->uds = NULL;
  342. }
  343. static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe,
  344. struct vsp1_video *video)
  345. {
  346. struct media_entity_graph graph;
  347. struct media_entity *entity = &video->video.entity;
  348. struct media_device *mdev = entity->parent;
  349. unsigned int i;
  350. int ret;
  351. mutex_lock(&mdev->graph_mutex);
  352. /* Walk the graph to locate the entities and video nodes. */
  353. media_entity_graph_walk_start(&graph, entity);
  354. while ((entity = media_entity_graph_walk_next(&graph))) {
  355. struct v4l2_subdev *subdev;
  356. struct vsp1_rwpf *rwpf;
  357. struct vsp1_entity *e;
  358. if (media_entity_type(entity) != MEDIA_ENT_T_V4L2_SUBDEV) {
  359. pipe->num_video++;
  360. continue;
  361. }
  362. subdev = media_entity_to_v4l2_subdev(entity);
  363. e = to_vsp1_entity(subdev);
  364. list_add_tail(&e->list_pipe, &pipe->entities);
  365. if (e->type == VSP1_ENTITY_RPF) {
  366. rwpf = to_rwpf(subdev);
  367. pipe->inputs[pipe->num_inputs++] = rwpf;
  368. rwpf->video.pipe_index = pipe->num_inputs;
  369. } else if (e->type == VSP1_ENTITY_WPF) {
  370. rwpf = to_rwpf(subdev);
  371. pipe->output = to_rwpf(subdev);
  372. rwpf->video.pipe_index = 0;
  373. } else if (e->type == VSP1_ENTITY_LIF) {
  374. pipe->lif = e;
  375. } else if (e->type == VSP1_ENTITY_BRU) {
  376. pipe->bru = e;
  377. }
  378. }
  379. mutex_unlock(&mdev->graph_mutex);
  380. /* We need one output and at least one input. */
  381. if (pipe->num_inputs == 0 || !pipe->output) {
  382. ret = -EPIPE;
  383. goto error;
  384. }
  385. /* Follow links downstream for each input and make sure the graph
  386. * contains no loop and that all branches end at the output WPF.
  387. */
  388. for (i = 0; i < pipe->num_inputs; ++i) {
  389. ret = vsp1_pipeline_validate_branch(pipe, pipe->inputs[i],
  390. pipe->output);
  391. if (ret < 0)
  392. goto error;
  393. }
  394. return 0;
  395. error:
  396. __vsp1_pipeline_cleanup(pipe);
  397. return ret;
  398. }
  399. static int vsp1_pipeline_init(struct vsp1_pipeline *pipe,
  400. struct vsp1_video *video)
  401. {
  402. int ret;
  403. mutex_lock(&pipe->lock);
  404. /* If we're the first user validate and initialize the pipeline. */
  405. if (pipe->use_count == 0) {
  406. ret = vsp1_pipeline_validate(pipe, video);
  407. if (ret < 0)
  408. goto done;
  409. }
  410. pipe->use_count++;
  411. ret = 0;
  412. done:
  413. mutex_unlock(&pipe->lock);
  414. return ret;
  415. }
  416. static void vsp1_pipeline_cleanup(struct vsp1_pipeline *pipe)
  417. {
  418. mutex_lock(&pipe->lock);
  419. /* If we're the last user clean up the pipeline. */
  420. if (--pipe->use_count == 0)
  421. __vsp1_pipeline_cleanup(pipe);
  422. mutex_unlock(&pipe->lock);
  423. }
  424. static void vsp1_pipeline_run(struct vsp1_pipeline *pipe)
  425. {
  426. struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
  427. vsp1_write(vsp1, VI6_CMD(pipe->output->entity.index), VI6_CMD_STRCMD);
  428. pipe->state = VSP1_PIPELINE_RUNNING;
  429. pipe->buffers_ready = 0;
  430. }
  431. static int vsp1_pipeline_stop(struct vsp1_pipeline *pipe)
  432. {
  433. struct vsp1_entity *entity;
  434. unsigned long flags;
  435. int ret;
  436. spin_lock_irqsave(&pipe->irqlock, flags);
  437. if (pipe->state == VSP1_PIPELINE_RUNNING)
  438. pipe->state = VSP1_PIPELINE_STOPPING;
  439. spin_unlock_irqrestore(&pipe->irqlock, flags);
  440. ret = wait_event_timeout(pipe->wq, pipe->state == VSP1_PIPELINE_STOPPED,
  441. msecs_to_jiffies(500));
  442. ret = ret == 0 ? -ETIMEDOUT : 0;
  443. list_for_each_entry(entity, &pipe->entities, list_pipe) {
  444. if (entity->route && entity->route->reg)
  445. vsp1_write(entity->vsp1, entity->route->reg,
  446. VI6_DPR_NODE_UNUSED);
  447. v4l2_subdev_call(&entity->subdev, video, s_stream, 0);
  448. }
  449. return ret;
  450. }
  451. static bool vsp1_pipeline_ready(struct vsp1_pipeline *pipe)
  452. {
  453. unsigned int mask;
  454. mask = ((1 << pipe->num_inputs) - 1) << 1;
  455. if (!pipe->lif)
  456. mask |= 1 << 0;
  457. return pipe->buffers_ready == mask;
  458. }
  459. /*
  460. * vsp1_video_complete_buffer - Complete the current buffer
  461. * @video: the video node
  462. *
  463. * This function completes the current buffer by filling its sequence number,
  464. * time stamp and payload size, and hands it back to the videobuf core.
  465. *
  466. * When operating in DU output mode (deep pipeline to the DU through the LIF),
  467. * the VSP1 needs to constantly supply frames to the display. In that case, if
  468. * no other buffer is queued, reuse the one that has just been processed instead
  469. * of handing it back to the videobuf core.
  470. *
  471. * Return the next queued buffer or NULL if the queue is empty.
  472. */
  473. static struct vsp1_video_buffer *
  474. vsp1_video_complete_buffer(struct vsp1_video *video)
  475. {
  476. struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
  477. struct vsp1_video_buffer *next = NULL;
  478. struct vsp1_video_buffer *done;
  479. unsigned long flags;
  480. unsigned int i;
  481. spin_lock_irqsave(&video->irqlock, flags);
  482. if (list_empty(&video->irqqueue)) {
  483. spin_unlock_irqrestore(&video->irqlock, flags);
  484. return NULL;
  485. }
  486. done = list_first_entry(&video->irqqueue,
  487. struct vsp1_video_buffer, queue);
  488. /* In DU output mode reuse the buffer if the list is singular. */
  489. if (pipe->lif && list_is_singular(&video->irqqueue)) {
  490. spin_unlock_irqrestore(&video->irqlock, flags);
  491. return done;
  492. }
  493. list_del(&done->queue);
  494. if (!list_empty(&video->irqqueue))
  495. next = list_first_entry(&video->irqqueue,
  496. struct vsp1_video_buffer, queue);
  497. spin_unlock_irqrestore(&video->irqlock, flags);
  498. done->buf.v4l2_buf.sequence = video->sequence++;
  499. v4l2_get_timestamp(&done->buf.v4l2_buf.timestamp);
  500. for (i = 0; i < done->buf.num_planes; ++i)
  501. vb2_set_plane_payload(&done->buf, i, done->length[i]);
  502. vb2_buffer_done(&done->buf, VB2_BUF_STATE_DONE);
  503. return next;
  504. }
  505. static void vsp1_video_frame_end(struct vsp1_pipeline *pipe,
  506. struct vsp1_video *video)
  507. {
  508. struct vsp1_video_buffer *buf;
  509. unsigned long flags;
  510. buf = vsp1_video_complete_buffer(video);
  511. if (buf == NULL)
  512. return;
  513. spin_lock_irqsave(&pipe->irqlock, flags);
  514. video->ops->queue(video, buf);
  515. pipe->buffers_ready |= 1 << video->pipe_index;
  516. spin_unlock_irqrestore(&pipe->irqlock, flags);
  517. }
  518. void vsp1_pipeline_frame_end(struct vsp1_pipeline *pipe)
  519. {
  520. enum vsp1_pipeline_state state;
  521. unsigned long flags;
  522. unsigned int i;
  523. if (pipe == NULL)
  524. return;
  525. /* Complete buffers on all video nodes. */
  526. for (i = 0; i < pipe->num_inputs; ++i)
  527. vsp1_video_frame_end(pipe, &pipe->inputs[i]->video);
  528. if (!pipe->lif)
  529. vsp1_video_frame_end(pipe, &pipe->output->video);
  530. spin_lock_irqsave(&pipe->irqlock, flags);
  531. state = pipe->state;
  532. pipe->state = VSP1_PIPELINE_STOPPED;
  533. /* If a stop has been requested, mark the pipeline as stopped and
  534. * return.
  535. */
  536. if (state == VSP1_PIPELINE_STOPPING) {
  537. wake_up(&pipe->wq);
  538. goto done;
  539. }
  540. /* Restart the pipeline if ready. */
  541. if (vsp1_pipeline_ready(pipe))
  542. vsp1_pipeline_run(pipe);
  543. done:
  544. spin_unlock_irqrestore(&pipe->irqlock, flags);
  545. }
  546. /*
  547. * Propagate the alpha value through the pipeline.
  548. *
  549. * As the UDS has restricted scaling capabilities when the alpha component needs
  550. * to be scaled, we disable alpha scaling when the UDS input has a fixed alpha
  551. * value. The UDS then outputs a fixed alpha value which needs to be programmed
  552. * from the input RPF alpha.
  553. */
  554. void vsp1_pipeline_propagate_alpha(struct vsp1_pipeline *pipe,
  555. struct vsp1_entity *input,
  556. unsigned int alpha)
  557. {
  558. struct vsp1_entity *entity;
  559. struct media_pad *pad;
  560. pad = media_entity_remote_pad(&input->pads[RWPF_PAD_SOURCE]);
  561. while (pad) {
  562. if (media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
  563. break;
  564. entity = to_vsp1_entity(media_entity_to_v4l2_subdev(pad->entity));
  565. /* The BRU background color has a fixed alpha value set to 255,
  566. * the output alpha value is thus always equal to 255.
  567. */
  568. if (entity->type == VSP1_ENTITY_BRU)
  569. alpha = 255;
  570. if (entity->type == VSP1_ENTITY_UDS) {
  571. struct vsp1_uds *uds = to_uds(&entity->subdev);
  572. vsp1_uds_set_alpha(uds, alpha);
  573. break;
  574. }
  575. pad = &entity->pads[entity->source_pad];
  576. pad = media_entity_remote_pad(pad);
  577. }
  578. }
  579. /* -----------------------------------------------------------------------------
  580. * videobuf2 Queue Operations
  581. */
  582. static int
  583. vsp1_video_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
  584. unsigned int *nbuffers, unsigned int *nplanes,
  585. unsigned int sizes[], void *alloc_ctxs[])
  586. {
  587. struct vsp1_video *video = vb2_get_drv_priv(vq);
  588. const struct v4l2_pix_format_mplane *format;
  589. struct v4l2_pix_format_mplane pix_mp;
  590. unsigned int i;
  591. if (fmt) {
  592. /* Make sure the format is valid and adjust the sizeimage field
  593. * if needed.
  594. */
  595. if (!vsp1_video_format_adjust(video, &fmt->fmt.pix_mp, &pix_mp))
  596. return -EINVAL;
  597. format = &pix_mp;
  598. } else {
  599. format = &video->format;
  600. }
  601. *nplanes = format->num_planes;
  602. for (i = 0; i < format->num_planes; ++i) {
  603. sizes[i] = format->plane_fmt[i].sizeimage;
  604. alloc_ctxs[i] = video->alloc_ctx;
  605. }
  606. return 0;
  607. }
  608. static int vsp1_video_buffer_prepare(struct vb2_buffer *vb)
  609. {
  610. struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
  611. struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vb);
  612. const struct v4l2_pix_format_mplane *format = &video->format;
  613. unsigned int i;
  614. if (vb->num_planes < format->num_planes)
  615. return -EINVAL;
  616. for (i = 0; i < vb->num_planes; ++i) {
  617. buf->addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
  618. buf->length[i] = vb2_plane_size(vb, i);
  619. if (buf->length[i] < format->plane_fmt[i].sizeimage)
  620. return -EINVAL;
  621. }
  622. return 0;
  623. }
  624. static void vsp1_video_buffer_queue(struct vb2_buffer *vb)
  625. {
  626. struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
  627. struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
  628. struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vb);
  629. unsigned long flags;
  630. bool empty;
  631. spin_lock_irqsave(&video->irqlock, flags);
  632. empty = list_empty(&video->irqqueue);
  633. list_add_tail(&buf->queue, &video->irqqueue);
  634. spin_unlock_irqrestore(&video->irqlock, flags);
  635. if (!empty)
  636. return;
  637. spin_lock_irqsave(&pipe->irqlock, flags);
  638. video->ops->queue(video, buf);
  639. pipe->buffers_ready |= 1 << video->pipe_index;
  640. if (vb2_is_streaming(&video->queue) &&
  641. vsp1_pipeline_ready(pipe))
  642. vsp1_pipeline_run(pipe);
  643. spin_unlock_irqrestore(&pipe->irqlock, flags);
  644. }
  645. static void vsp1_entity_route_setup(struct vsp1_entity *source)
  646. {
  647. struct vsp1_entity *sink;
  648. if (source->route->reg == 0)
  649. return;
  650. sink = container_of(source->sink, struct vsp1_entity, subdev.entity);
  651. vsp1_write(source->vsp1, source->route->reg,
  652. sink->route->inputs[source->sink_pad]);
  653. }
  654. static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count)
  655. {
  656. struct vsp1_video *video = vb2_get_drv_priv(vq);
  657. struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
  658. struct vsp1_entity *entity;
  659. unsigned long flags;
  660. int ret;
  661. mutex_lock(&pipe->lock);
  662. if (pipe->stream_count == pipe->num_video - 1) {
  663. if (pipe->uds) {
  664. struct vsp1_uds *uds = to_uds(&pipe->uds->subdev);
  665. /* If a BRU is present in the pipeline before the UDS,
  666. * the alpha component doesn't need to be scaled as the
  667. * BRU output alpha value is fixed to 255. Otherwise we
  668. * need to scale the alpha component only when available
  669. * at the input RPF.
  670. */
  671. if (pipe->uds_input->type == VSP1_ENTITY_BRU) {
  672. uds->scale_alpha = false;
  673. } else {
  674. struct vsp1_rwpf *rpf =
  675. to_rwpf(&pipe->uds_input->subdev);
  676. uds->scale_alpha = rpf->video.fmtinfo->alpha;
  677. }
  678. }
  679. list_for_each_entry(entity, &pipe->entities, list_pipe) {
  680. vsp1_entity_route_setup(entity);
  681. ret = v4l2_subdev_call(&entity->subdev, video,
  682. s_stream, 1);
  683. if (ret < 0) {
  684. mutex_unlock(&pipe->lock);
  685. return ret;
  686. }
  687. }
  688. }
  689. pipe->stream_count++;
  690. mutex_unlock(&pipe->lock);
  691. spin_lock_irqsave(&pipe->irqlock, flags);
  692. if (vsp1_pipeline_ready(pipe))
  693. vsp1_pipeline_run(pipe);
  694. spin_unlock_irqrestore(&pipe->irqlock, flags);
  695. return 0;
  696. }
  697. static void vsp1_video_stop_streaming(struct vb2_queue *vq)
  698. {
  699. struct vsp1_video *video = vb2_get_drv_priv(vq);
  700. struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity);
  701. struct vsp1_video_buffer *buffer;
  702. unsigned long flags;
  703. int ret;
  704. mutex_lock(&pipe->lock);
  705. if (--pipe->stream_count == 0) {
  706. /* Stop the pipeline. */
  707. ret = vsp1_pipeline_stop(pipe);
  708. if (ret == -ETIMEDOUT)
  709. dev_err(video->vsp1->dev, "pipeline stop timeout\n");
  710. }
  711. mutex_unlock(&pipe->lock);
  712. vsp1_pipeline_cleanup(pipe);
  713. media_entity_pipeline_stop(&video->video.entity);
  714. /* Remove all buffers from the IRQ queue. */
  715. spin_lock_irqsave(&video->irqlock, flags);
  716. list_for_each_entry(buffer, &video->irqqueue, queue)
  717. vb2_buffer_done(&buffer->buf, VB2_BUF_STATE_ERROR);
  718. INIT_LIST_HEAD(&video->irqqueue);
  719. spin_unlock_irqrestore(&video->irqlock, flags);
  720. }
  721. static struct vb2_ops vsp1_video_queue_qops = {
  722. .queue_setup = vsp1_video_queue_setup,
  723. .buf_prepare = vsp1_video_buffer_prepare,
  724. .buf_queue = vsp1_video_buffer_queue,
  725. .wait_prepare = vb2_ops_wait_prepare,
  726. .wait_finish = vb2_ops_wait_finish,
  727. .start_streaming = vsp1_video_start_streaming,
  728. .stop_streaming = vsp1_video_stop_streaming,
  729. };
  730. /* -----------------------------------------------------------------------------
  731. * V4L2 ioctls
  732. */
  733. static int
  734. vsp1_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
  735. {
  736. struct v4l2_fh *vfh = file->private_data;
  737. struct vsp1_video *video = to_vsp1_video(vfh->vdev);
  738. cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
  739. | V4L2_CAP_VIDEO_CAPTURE_MPLANE
  740. | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
  741. if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
  742. cap->device_caps = V4L2_CAP_VIDEO_CAPTURE_MPLANE
  743. | V4L2_CAP_STREAMING;
  744. else
  745. cap->device_caps = V4L2_CAP_VIDEO_OUTPUT_MPLANE
  746. | V4L2_CAP_STREAMING;
  747. strlcpy(cap->driver, "vsp1", sizeof(cap->driver));
  748. strlcpy(cap->card, video->video.name, sizeof(cap->card));
  749. snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
  750. dev_name(video->vsp1->dev));
  751. return 0;
  752. }
  753. static int
  754. vsp1_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
  755. {
  756. struct v4l2_fh *vfh = file->private_data;
  757. struct vsp1_video *video = to_vsp1_video(vfh->vdev);
  758. if (format->type != video->queue.type)
  759. return -EINVAL;
  760. mutex_lock(&video->lock);
  761. format->fmt.pix_mp = video->format;
  762. mutex_unlock(&video->lock);
  763. return 0;
  764. }
  765. static int
  766. vsp1_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
  767. {
  768. struct v4l2_fh *vfh = file->private_data;
  769. struct vsp1_video *video = to_vsp1_video(vfh->vdev);
  770. if (format->type != video->queue.type)
  771. return -EINVAL;
  772. return __vsp1_video_try_format(video, &format->fmt.pix_mp, NULL);
  773. }
  774. static int
  775. vsp1_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
  776. {
  777. struct v4l2_fh *vfh = file->private_data;
  778. struct vsp1_video *video = to_vsp1_video(vfh->vdev);
  779. const struct vsp1_format_info *info;
  780. int ret;
  781. if (format->type != video->queue.type)
  782. return -EINVAL;
  783. ret = __vsp1_video_try_format(video, &format->fmt.pix_mp, &info);
  784. if (ret < 0)
  785. return ret;
  786. mutex_lock(&video->lock);
  787. if (vb2_is_busy(&video->queue)) {
  788. ret = -EBUSY;
  789. goto done;
  790. }
  791. video->format = format->fmt.pix_mp;
  792. video->fmtinfo = info;
  793. done:
  794. mutex_unlock(&video->lock);
  795. return ret;
  796. }
  797. static int
  798. vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
  799. {
  800. struct v4l2_fh *vfh = file->private_data;
  801. struct vsp1_video *video = to_vsp1_video(vfh->vdev);
  802. struct vsp1_pipeline *pipe;
  803. int ret;
  804. if (video->queue.owner && video->queue.owner != file->private_data)
  805. return -EBUSY;
  806. video->sequence = 0;
  807. /* Start streaming on the pipeline. No link touching an entity in the
  808. * pipeline can be activated or deactivated once streaming is started.
  809. *
  810. * Use the VSP1 pipeline object embedded in the first video object that
  811. * starts streaming.
  812. */
  813. pipe = video->video.entity.pipe
  814. ? to_vsp1_pipeline(&video->video.entity) : &video->pipe;
  815. ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe);
  816. if (ret < 0)
  817. return ret;
  818. /* Verify that the configured format matches the output of the connected
  819. * subdev.
  820. */
  821. ret = vsp1_video_verify_format(video);
  822. if (ret < 0)
  823. goto err_stop;
  824. ret = vsp1_pipeline_init(pipe, video);
  825. if (ret < 0)
  826. goto err_stop;
  827. /* Start the queue. */
  828. ret = vb2_streamon(&video->queue, type);
  829. if (ret < 0)
  830. goto err_cleanup;
  831. return 0;
  832. err_cleanup:
  833. vsp1_pipeline_cleanup(pipe);
  834. err_stop:
  835. media_entity_pipeline_stop(&video->video.entity);
  836. return ret;
  837. }
  838. static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops = {
  839. .vidioc_querycap = vsp1_video_querycap,
  840. .vidioc_g_fmt_vid_cap_mplane = vsp1_video_get_format,
  841. .vidioc_s_fmt_vid_cap_mplane = vsp1_video_set_format,
  842. .vidioc_try_fmt_vid_cap_mplane = vsp1_video_try_format,
  843. .vidioc_g_fmt_vid_out_mplane = vsp1_video_get_format,
  844. .vidioc_s_fmt_vid_out_mplane = vsp1_video_set_format,
  845. .vidioc_try_fmt_vid_out_mplane = vsp1_video_try_format,
  846. .vidioc_reqbufs = vb2_ioctl_reqbufs,
  847. .vidioc_querybuf = vb2_ioctl_querybuf,
  848. .vidioc_qbuf = vb2_ioctl_qbuf,
  849. .vidioc_dqbuf = vb2_ioctl_dqbuf,
  850. .vidioc_create_bufs = vb2_ioctl_create_bufs,
  851. .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
  852. .vidioc_streamon = vsp1_video_streamon,
  853. .vidioc_streamoff = vb2_ioctl_streamoff,
  854. };
  855. /* -----------------------------------------------------------------------------
  856. * V4L2 File Operations
  857. */
  858. static int vsp1_video_open(struct file *file)
  859. {
  860. struct vsp1_video *video = video_drvdata(file);
  861. struct v4l2_fh *vfh;
  862. int ret = 0;
  863. vfh = kzalloc(sizeof(*vfh), GFP_KERNEL);
  864. if (vfh == NULL)
  865. return -ENOMEM;
  866. v4l2_fh_init(vfh, &video->video);
  867. v4l2_fh_add(vfh);
  868. file->private_data = vfh;
  869. ret = vsp1_device_get(video->vsp1);
  870. if (ret < 0) {
  871. v4l2_fh_del(vfh);
  872. kfree(vfh);
  873. }
  874. return ret;
  875. }
  876. static int vsp1_video_release(struct file *file)
  877. {
  878. struct vsp1_video *video = video_drvdata(file);
  879. struct v4l2_fh *vfh = file->private_data;
  880. mutex_lock(&video->lock);
  881. if (video->queue.owner == vfh) {
  882. vb2_queue_release(&video->queue);
  883. video->queue.owner = NULL;
  884. }
  885. mutex_unlock(&video->lock);
  886. vsp1_device_put(video->vsp1);
  887. v4l2_fh_release(file);
  888. file->private_data = NULL;
  889. return 0;
  890. }
  891. static struct v4l2_file_operations vsp1_video_fops = {
  892. .owner = THIS_MODULE,
  893. .unlocked_ioctl = video_ioctl2,
  894. .open = vsp1_video_open,
  895. .release = vsp1_video_release,
  896. .poll = vb2_fop_poll,
  897. .mmap = vb2_fop_mmap,
  898. };
  899. /* -----------------------------------------------------------------------------
  900. * Initialization and Cleanup
  901. */
  902. int vsp1_video_init(struct vsp1_video *video, struct vsp1_entity *rwpf)
  903. {
  904. const char *direction;
  905. int ret;
  906. switch (video->type) {
  907. case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
  908. direction = "output";
  909. video->pad.flags = MEDIA_PAD_FL_SINK;
  910. break;
  911. case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
  912. direction = "input";
  913. video->pad.flags = MEDIA_PAD_FL_SOURCE;
  914. video->video.vfl_dir = VFL_DIR_TX;
  915. break;
  916. default:
  917. return -EINVAL;
  918. }
  919. video->rwpf = rwpf;
  920. mutex_init(&video->lock);
  921. spin_lock_init(&video->irqlock);
  922. INIT_LIST_HEAD(&video->irqqueue);
  923. mutex_init(&video->pipe.lock);
  924. spin_lock_init(&video->pipe.irqlock);
  925. INIT_LIST_HEAD(&video->pipe.entities);
  926. init_waitqueue_head(&video->pipe.wq);
  927. video->pipe.state = VSP1_PIPELINE_STOPPED;
  928. /* Initialize the media entity... */
  929. ret = media_entity_init(&video->video.entity, 1, &video->pad, 0);
  930. if (ret < 0)
  931. return ret;
  932. /* ... and the format ... */
  933. video->fmtinfo = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT);
  934. video->format.pixelformat = video->fmtinfo->fourcc;
  935. video->format.colorspace = V4L2_COLORSPACE_SRGB;
  936. video->format.field = V4L2_FIELD_NONE;
  937. video->format.width = VSP1_VIDEO_DEF_WIDTH;
  938. video->format.height = VSP1_VIDEO_DEF_HEIGHT;
  939. video->format.num_planes = 1;
  940. video->format.plane_fmt[0].bytesperline =
  941. video->format.width * video->fmtinfo->bpp[0] / 8;
  942. video->format.plane_fmt[0].sizeimage =
  943. video->format.plane_fmt[0].bytesperline * video->format.height;
  944. /* ... and the video node... */
  945. video->video.v4l2_dev = &video->vsp1->v4l2_dev;
  946. video->video.fops = &vsp1_video_fops;
  947. snprintf(video->video.name, sizeof(video->video.name), "%s %s",
  948. rwpf->subdev.name, direction);
  949. video->video.vfl_type = VFL_TYPE_GRABBER;
  950. video->video.release = video_device_release_empty;
  951. video->video.ioctl_ops = &vsp1_video_ioctl_ops;
  952. video_set_drvdata(&video->video, video);
  953. /* ... and the buffers queue... */
  954. video->alloc_ctx = vb2_dma_contig_init_ctx(video->vsp1->dev);
  955. if (IS_ERR(video->alloc_ctx)) {
  956. ret = PTR_ERR(video->alloc_ctx);
  957. goto error;
  958. }
  959. video->queue.type = video->type;
  960. video->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
  961. video->queue.lock = &video->lock;
  962. video->queue.drv_priv = video;
  963. video->queue.buf_struct_size = sizeof(struct vsp1_video_buffer);
  964. video->queue.ops = &vsp1_video_queue_qops;
  965. video->queue.mem_ops = &vb2_dma_contig_memops;
  966. video->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
  967. ret = vb2_queue_init(&video->queue);
  968. if (ret < 0) {
  969. dev_err(video->vsp1->dev, "failed to initialize vb2 queue\n");
  970. goto error;
  971. }
  972. /* ... and register the video device. */
  973. video->video.queue = &video->queue;
  974. ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1);
  975. if (ret < 0) {
  976. dev_err(video->vsp1->dev, "failed to register video device\n");
  977. goto error;
  978. }
  979. return 0;
  980. error:
  981. vb2_dma_contig_cleanup_ctx(video->alloc_ctx);
  982. vsp1_video_cleanup(video);
  983. return ret;
  984. }
  985. void vsp1_video_cleanup(struct vsp1_video *video)
  986. {
  987. if (video_is_registered(&video->video))
  988. video_unregister_device(&video->video);
  989. vb2_dma_contig_cleanup_ctx(video->alloc_ctx);
  990. media_entity_cleanup(&video->video.entity);
  991. }