VideoFFmpeg.cpp 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393
  1. /*
  2. * ***** BEGIN GPL LICENSE BLOCK *****
  3. *
  4. * This program is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU General Public License
  6. * as published by the Free Software Foundation; either version 2
  7. * of the License, or (at your option) any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program; if not, write to the Free Software Foundation,
  16. * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
  17. *
  18. * Copyright (c) 2007 The Zdeno Ash Miklas
  19. *
  20. * This source file is part of VideoTexture library
  21. *
  22. * Contributor(s):
  23. *
  24. * ***** END GPL LICENSE BLOCK *****
  25. */
  26. /** \file gameengine/VideoTexture/VideoFFmpeg.cpp
  27. * \ingroup bgevideotex
  28. */
  29. #ifdef WITH_FFMPEG
  30. // INT64_C fix for some linux machines (C99ism)
  31. #ifndef __STDC_CONSTANT_MACROS
  32. #define __STDC_CONSTANT_MACROS
  33. #ifdef __STDC_CONSTANT_MACROS /* quiet warning */
  34. #endif
  35. #endif
  36. #include <stdint.h>
  37. #include "MEM_guardedalloc.h"
  38. #include "PIL_time.h"
  39. #include <string>
  40. #include "VideoFFmpeg.h"
  41. #include "Exception.h"
  42. // default framerate
  43. const double defFrameRate = 25.0;
  44. // macro for exception handling and logging
  45. #define CATCH_EXCP catch (Exception & exp) \
  46. { exp.report(); m_status = SourceError; }
  47. // class RenderVideo
  48. // constructor
  49. VideoFFmpeg::VideoFFmpeg (HRESULT * hRslt) : VideoBase(),
  50. m_codec(NULL), m_formatCtx(NULL), m_codecCtx(NULL),
  51. m_frame(NULL), m_frameDeinterlaced(NULL), m_frameRGB(NULL), m_imgConvertCtx(NULL),
  52. m_deinterlace(false), m_preseek(0), m_videoStream(-1), m_baseFrameRate(25.0),
  53. m_lastFrame(-1), m_eof(false), m_externTime(false), m_curPosition(-1), m_startTime(0),
  54. m_captWidth(0), m_captHeight(0), m_captRate(0.f), m_isImage(false),
  55. m_isThreaded(false), m_isStreaming(false), m_stopThread(false), m_cacheStarted(false)
  56. {
  57. // set video format
  58. m_format = RGB24;
  59. // force flip because ffmpeg always return the image in the wrong orientation for texture
  60. setFlip(true);
  61. // construction is OK
  62. *hRslt = S_OK;
  63. BLI_listbase_clear(&m_thread);
  64. pthread_mutex_init(&m_cacheMutex, NULL);
  65. BLI_listbase_clear(&m_frameCacheFree);
  66. BLI_listbase_clear(&m_frameCacheBase);
  67. BLI_listbase_clear(&m_packetCacheFree);
  68. BLI_listbase_clear(&m_packetCacheBase);
  69. }
  70. // destructor
  71. VideoFFmpeg::~VideoFFmpeg ()
  72. {
  73. }
  74. void VideoFFmpeg::refresh(void)
  75. {
  76. // a fixed image will not refresh because it is loaded only once at creation
  77. if (m_isImage)
  78. return;
  79. m_avail = false;
  80. }
  81. // release components
  82. bool VideoFFmpeg::release()
  83. {
  84. // release
  85. stopCache();
  86. if (m_codecCtx)
  87. {
  88. avcodec_close(m_codecCtx);
  89. m_codecCtx = NULL;
  90. }
  91. if (m_formatCtx)
  92. {
  93. avformat_close_input(&m_formatCtx);
  94. m_formatCtx = NULL;
  95. }
  96. if (m_frame)
  97. {
  98. av_free(m_frame);
  99. m_frame = NULL;
  100. }
  101. if (m_frameDeinterlaced)
  102. {
  103. MEM_freeN(m_frameDeinterlaced->data[0]);
  104. av_free(m_frameDeinterlaced);
  105. m_frameDeinterlaced = NULL;
  106. }
  107. if (m_frameRGB)
  108. {
  109. MEM_freeN(m_frameRGB->data[0]);
  110. av_free(m_frameRGB);
  111. m_frameRGB = NULL;
  112. }
  113. if (m_imgConvertCtx)
  114. {
  115. sws_freeContext(m_imgConvertCtx);
  116. m_imgConvertCtx = NULL;
  117. }
  118. m_codec = NULL;
  119. m_status = SourceStopped;
  120. m_lastFrame = -1;
  121. return true;
  122. }
  123. AVFrame *VideoFFmpeg::allocFrameRGB()
  124. {
  125. AVFrame *frame;
  126. frame = av_frame_alloc();
  127. if (m_format == RGBA32)
  128. {
  129. avpicture_fill((AVPicture*)frame,
  130. (uint8_t*)MEM_callocN(avpicture_get_size(
  131. AV_PIX_FMT_RGBA,
  132. m_codecCtx->width, m_codecCtx->height),
  133. "ffmpeg rgba"),
  134. AV_PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
  135. } else
  136. {
  137. avpicture_fill((AVPicture*)frame,
  138. (uint8_t*)MEM_callocN(avpicture_get_size(
  139. AV_PIX_FMT_RGB24,
  140. m_codecCtx->width, m_codecCtx->height),
  141. "ffmpeg rgb"),
  142. AV_PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
  143. }
  144. return frame;
  145. }
  146. // set initial parameters
  147. void VideoFFmpeg::initParams (short width, short height, float rate, bool image)
  148. {
  149. m_captWidth = width;
  150. m_captHeight = height;
  151. m_captRate = rate;
  152. m_isImage = image;
  153. }
  154. int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVDictionary **formatParams)
  155. {
  156. AVFormatContext *formatCtx = NULL;
  157. int i, videoStream;
  158. AVCodec *codec;
  159. AVCodecContext *codecCtx;
  160. if (avformat_open_input(&formatCtx, filename, inputFormat, formatParams)!=0)
  161. return -1;
  162. if (avformat_find_stream_info(formatCtx, NULL) < 0)
  163. {
  164. avformat_close_input(&formatCtx);
  165. return -1;
  166. }
  167. /* Find the first video stream */
  168. videoStream=-1;
  169. for (i=0; i<formatCtx->nb_streams; i++)
  170. {
  171. if (formatCtx->streams[i] &&
  172. get_codec_from_stream(formatCtx->streams[i]) &&
  173. (get_codec_from_stream(formatCtx->streams[i])->codec_type==AVMEDIA_TYPE_VIDEO))
  174. {
  175. videoStream=i;
  176. break;
  177. }
  178. }
  179. if (videoStream==-1)
  180. {
  181. avformat_close_input(&formatCtx);
  182. return -1;
  183. }
  184. codecCtx = get_codec_from_stream(formatCtx->streams[videoStream]);
  185. /* Find the decoder for the video stream */
  186. codec=avcodec_find_decoder(codecCtx->codec_id);
  187. if (codec==NULL)
  188. {
  189. avformat_close_input(&formatCtx);
  190. return -1;
  191. }
  192. codecCtx->workaround_bugs = 1;
  193. if (avcodec_open2(codecCtx, codec, NULL) < 0)
  194. {
  195. avformat_close_input(&formatCtx);
  196. return -1;
  197. }
  198. #ifdef FFMPEG_OLD_FRAME_RATE
  199. if (codecCtx->frame_rate>1000 && codecCtx->frame_rate_base==1)
  200. codecCtx->frame_rate_base=1000;
  201. m_baseFrameRate = (double)codecCtx->frame_rate / (double)codecCtx->frame_rate_base;
  202. #else
  203. m_baseFrameRate = av_q2d(av_get_r_frame_rate_compat(formatCtx->streams[videoStream]));
  204. #endif
  205. if (m_baseFrameRate <= 0.0)
  206. m_baseFrameRate = defFrameRate;
  207. m_codec = codec;
  208. m_codecCtx = codecCtx;
  209. m_formatCtx = formatCtx;
  210. m_videoStream = videoStream;
  211. m_frame = av_frame_alloc();
  212. m_frameDeinterlaced = av_frame_alloc();
  213. // allocate buffer if deinterlacing is required
  214. avpicture_fill((AVPicture*)m_frameDeinterlaced,
  215. (uint8_t*)MEM_callocN(avpicture_get_size(
  216. m_codecCtx->pix_fmt,
  217. m_codecCtx->width, m_codecCtx->height),
  218. "ffmpeg deinterlace"),
  219. m_codecCtx->pix_fmt, m_codecCtx->width, m_codecCtx->height);
  220. // check if the pixel format supports Alpha
  221. if (m_codecCtx->pix_fmt == AV_PIX_FMT_RGB32 ||
  222. m_codecCtx->pix_fmt == AV_PIX_FMT_BGR32 ||
  223. m_codecCtx->pix_fmt == AV_PIX_FMT_RGB32_1 ||
  224. m_codecCtx->pix_fmt == AV_PIX_FMT_BGR32_1)
  225. {
  226. // allocate buffer to store final decoded frame
  227. m_format = RGBA32;
  228. // allocate sws context
  229. m_imgConvertCtx = sws_getContext(
  230. m_codecCtx->width,
  231. m_codecCtx->height,
  232. m_codecCtx->pix_fmt,
  233. m_codecCtx->width,
  234. m_codecCtx->height,
  235. AV_PIX_FMT_RGBA,
  236. SWS_FAST_BILINEAR,
  237. NULL, NULL, NULL);
  238. } else
  239. {
  240. // allocate buffer to store final decoded frame
  241. m_format = RGB24;
  242. // allocate sws context
  243. m_imgConvertCtx = sws_getContext(
  244. m_codecCtx->width,
  245. m_codecCtx->height,
  246. m_codecCtx->pix_fmt,
  247. m_codecCtx->width,
  248. m_codecCtx->height,
  249. AV_PIX_FMT_RGB24,
  250. SWS_FAST_BILINEAR,
  251. NULL, NULL, NULL);
  252. }
  253. m_frameRGB = allocFrameRGB();
  254. if (!m_imgConvertCtx) {
  255. avcodec_close(m_codecCtx);
  256. m_codecCtx = NULL;
  257. avformat_close_input(&m_formatCtx);
  258. m_formatCtx = NULL;
  259. av_free(m_frame);
  260. m_frame = NULL;
  261. MEM_freeN(m_frameDeinterlaced->data[0]);
  262. av_free(m_frameDeinterlaced);
  263. m_frameDeinterlaced = NULL;
  264. MEM_freeN(m_frameRGB->data[0]);
  265. av_free(m_frameRGB);
  266. m_frameRGB = NULL;
  267. return -1;
  268. }
  269. return 0;
  270. }
  271. /*
  272. * This thread is used to load video frame asynchronously.
  273. * It provides a frame caching service.
  274. * The main thread is responsible for positioning the frame pointer in the
  275. * file correctly before calling startCache() which starts this thread.
  276. * The cache is organized in two layers: 1) a cache of 20-30 undecoded packets to keep
  277. * memory and CPU low 2) a cache of 5 decoded frames.
  278. * If the main thread does not find the frame in the cache (because the video has restarted
  279. * or because the GE is lagging), it stops the cache with StopCache() (this is a synchronous
  280. * function: it sends a signal to stop the cache thread and wait for confirmation), then
  281. * change the position in the stream and restarts the cache thread.
  282. */
  283. void *VideoFFmpeg::cacheThread(void *data)
  284. {
  285. VideoFFmpeg* video = (VideoFFmpeg*)data;
  286. // holds the frame that is being decoded
  287. CacheFrame *currentFrame = NULL;
  288. CachePacket *cachePacket;
  289. bool endOfFile = false;
  290. int frameFinished = 0;
  291. double timeBase = av_q2d(video->m_formatCtx->streams[video->m_videoStream]->time_base);
  292. int64_t startTs = video->m_formatCtx->streams[video->m_videoStream]->start_time;
  293. if (startTs == AV_NOPTS_VALUE)
  294. startTs = 0;
  295. while (!video->m_stopThread)
  296. {
  297. // packet cache is used solely by this thread, no need to lock
  298. // In case the stream/file contains other stream than the one we are looking for,
  299. // allow a bit of cycling to get rid quickly of those frames
  300. frameFinished = 0;
  301. while ( !endOfFile
  302. && (cachePacket = (CachePacket *)video->m_packetCacheFree.first) != NULL
  303. && frameFinished < 25)
  304. {
  305. // free packet => packet cache is not full yet, just read more
  306. if (av_read_frame(video->m_formatCtx, &cachePacket->packet)>=0)
  307. {
  308. if (cachePacket->packet.stream_index == video->m_videoStream)
  309. {
  310. // make sure fresh memory is allocated for the packet and move it to queue
  311. av_dup_packet(&cachePacket->packet);
  312. BLI_remlink(&video->m_packetCacheFree, cachePacket);
  313. BLI_addtail(&video->m_packetCacheBase, cachePacket);
  314. break;
  315. } else {
  316. // this is not a good packet for us, just leave it on free queue
  317. // Note: here we could handle sound packet
  318. av_free_packet(&cachePacket->packet);
  319. frameFinished++;
  320. }
  321. } else {
  322. if (video->m_isFile)
  323. // this mark the end of the file
  324. endOfFile = true;
  325. // if we cannot read a packet, no need to continue
  326. break;
  327. }
  328. }
  329. // frame cache is also used by main thread, lock
  330. if (currentFrame == NULL)
  331. {
  332. // no current frame being decoded, take free one
  333. pthread_mutex_lock(&video->m_cacheMutex);
  334. if ((currentFrame = (CacheFrame *)video->m_frameCacheFree.first) != NULL)
  335. BLI_remlink(&video->m_frameCacheFree, currentFrame);
  336. pthread_mutex_unlock(&video->m_cacheMutex);
  337. }
  338. if (currentFrame != NULL)
  339. {
  340. // this frame is out of free and busy queue, we can manipulate it without locking
  341. frameFinished = 0;
  342. while (!frameFinished && (cachePacket = (CachePacket *)video->m_packetCacheBase.first) != NULL)
  343. {
  344. BLI_remlink(&video->m_packetCacheBase, cachePacket);
  345. // use m_frame because when caching, it is not used in main thread
  346. // we can't use currentFrame directly because we need to convert to RGB first
  347. avcodec_decode_video2(video->m_codecCtx,
  348. video->m_frame, &frameFinished,
  349. &cachePacket->packet);
  350. if (frameFinished)
  351. {
  352. AVFrame * input = video->m_frame;
  353. /* This means the data wasnt read properly, this check stops crashing */
  354. if ( input->data[0]!=0 || input->data[1]!=0
  355. || input->data[2]!=0 || input->data[3]!=0)
  356. {
  357. if (video->m_deinterlace)
  358. {
  359. if (avpicture_deinterlace(
  360. (AVPicture*) video->m_frameDeinterlaced,
  361. (const AVPicture*) video->m_frame,
  362. video->m_codecCtx->pix_fmt,
  363. video->m_codecCtx->width,
  364. video->m_codecCtx->height) >= 0)
  365. {
  366. input = video->m_frameDeinterlaced;
  367. }
  368. }
  369. // convert to RGB24
  370. sws_scale(video->m_imgConvertCtx,
  371. input->data,
  372. input->linesize,
  373. 0,
  374. video->m_codecCtx->height,
  375. currentFrame->frame->data,
  376. currentFrame->frame->linesize);
  377. // move frame to queue, this frame is necessarily the next one
  378. video->m_curPosition = (long)((cachePacket->packet.dts-startTs) * (video->m_baseFrameRate*timeBase) + 0.5);
  379. currentFrame->framePosition = video->m_curPosition;
  380. pthread_mutex_lock(&video->m_cacheMutex);
  381. BLI_addtail(&video->m_frameCacheBase, currentFrame);
  382. pthread_mutex_unlock(&video->m_cacheMutex);
  383. currentFrame = NULL;
  384. }
  385. }
  386. av_free_packet(&cachePacket->packet);
  387. BLI_addtail(&video->m_packetCacheFree, cachePacket);
  388. }
  389. if (currentFrame && endOfFile)
  390. {
  391. // no more packet and end of file => put a special frame that indicates that
  392. currentFrame->framePosition = -1;
  393. pthread_mutex_lock(&video->m_cacheMutex);
  394. BLI_addtail(&video->m_frameCacheBase, currentFrame);
  395. pthread_mutex_unlock(&video->m_cacheMutex);
  396. currentFrame = NULL;
  397. // no need to stay any longer in this thread
  398. break;
  399. }
  400. }
  401. // small sleep to avoid unnecessary looping
  402. PIL_sleep_ms(10);
  403. }
  404. // before quitting, put back the current frame to queue to allow freeing
  405. if (currentFrame)
  406. {
  407. pthread_mutex_lock(&video->m_cacheMutex);
  408. BLI_addtail(&video->m_frameCacheFree, currentFrame);
  409. pthread_mutex_unlock(&video->m_cacheMutex);
  410. }
  411. return 0;
  412. }
  413. // start thread to cache video frame from file/capture/stream
  414. // this function should be called only when the position in the stream is set for the
  415. // first frame to cache
  416. bool VideoFFmpeg::startCache()
  417. {
  418. if (!m_cacheStarted && m_isThreaded)
  419. {
  420. m_stopThread = false;
  421. for (int i=0; i<CACHE_FRAME_SIZE; i++)
  422. {
  423. CacheFrame *frame = new CacheFrame();
  424. frame->frame = allocFrameRGB();
  425. BLI_addtail(&m_frameCacheFree, frame);
  426. }
  427. for (int i=0; i<CACHE_PACKET_SIZE; i++)
  428. {
  429. CachePacket *packet = new CachePacket();
  430. BLI_addtail(&m_packetCacheFree, packet);
  431. }
  432. BLI_init_threads(&m_thread, cacheThread, 1);
  433. BLI_insert_thread(&m_thread, this);
  434. m_cacheStarted = true;
  435. }
  436. return m_cacheStarted;
  437. }
  438. void VideoFFmpeg::stopCache()
  439. {
  440. if (m_cacheStarted)
  441. {
  442. m_stopThread = true;
  443. BLI_end_threads(&m_thread);
  444. // now delete the cache
  445. CacheFrame *frame;
  446. CachePacket *packet;
  447. while ((frame = (CacheFrame *)m_frameCacheBase.first) != NULL)
  448. {
  449. BLI_remlink(&m_frameCacheBase, frame);
  450. MEM_freeN(frame->frame->data[0]);
  451. av_free(frame->frame);
  452. delete frame;
  453. }
  454. while ((frame = (CacheFrame *)m_frameCacheFree.first) != NULL)
  455. {
  456. BLI_remlink(&m_frameCacheFree, frame);
  457. MEM_freeN(frame->frame->data[0]);
  458. av_free(frame->frame);
  459. delete frame;
  460. }
  461. while ((packet = (CachePacket *)m_packetCacheBase.first) != NULL)
  462. {
  463. BLI_remlink(&m_packetCacheBase, packet);
  464. av_free_packet(&packet->packet);
  465. delete packet;
  466. }
  467. while ((packet = (CachePacket *)m_packetCacheFree.first) != NULL)
  468. {
  469. BLI_remlink(&m_packetCacheFree, packet);
  470. delete packet;
  471. }
  472. m_cacheStarted = false;
  473. }
  474. }
  475. void VideoFFmpeg::releaseFrame(AVFrame *frame)
  476. {
  477. if (frame == m_frameRGB)
  478. {
  479. // this is not a frame from the cache, ignore
  480. return;
  481. }
  482. // this frame MUST be the first one of the queue
  483. pthread_mutex_lock(&m_cacheMutex);
  484. CacheFrame *cacheFrame = (CacheFrame *)m_frameCacheBase.first;
  485. assert (cacheFrame != NULL && cacheFrame->frame == frame);
  486. BLI_remlink(&m_frameCacheBase, cacheFrame);
  487. BLI_addtail(&m_frameCacheFree, cacheFrame);
  488. pthread_mutex_unlock(&m_cacheMutex);
  489. }
  490. // open video file
  491. void VideoFFmpeg::openFile (char *filename)
  492. {
  493. if (openStream(filename, NULL, NULL) != 0)
  494. return;
  495. if (m_codecCtx->gop_size)
  496. m_preseek = (m_codecCtx->gop_size < 25) ? m_codecCtx->gop_size+1 : 25;
  497. else if (m_codecCtx->has_b_frames)
  498. m_preseek = 25; // should determine gopsize
  499. else
  500. m_preseek = 0;
  501. // get video time range
  502. m_range[0] = 0.0;
  503. m_range[1] = (double)m_formatCtx->duration / AV_TIME_BASE;
  504. // open base class
  505. VideoBase::openFile(filename);
  506. if (
  507. // ffmpeg reports that http source are actually non stream
  508. // but it is really not desirable to seek on http file, so force streaming.
  509. // It would be good to find this information from the context but there are no simple indication
  510. !strncmp(filename, "http://", 7) ||
  511. !strncmp(filename, "rtsp://", 7) ||
  512. (m_formatCtx->pb && !m_formatCtx->pb->seekable)
  513. )
  514. {
  515. // the file is in fact a streaming source, treat as cam to prevent seeking
  516. m_isFile = false;
  517. // but it's not handled exactly like a camera.
  518. m_isStreaming = true;
  519. // for streaming it is important to do non blocking read
  520. m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
  521. }
  522. if (m_isImage)
  523. {
  524. // the file is to be treated as an image, i.e. load the first frame only
  525. m_isFile = false;
  526. // in case of reload, the filename is taken from m_imageName, no need to change it
  527. if (m_imageName.Ptr() != filename)
  528. m_imageName = filename;
  529. m_preseek = 0;
  530. m_avail = false;
  531. play();
  532. }
  533. // check if we should do multi-threading?
  534. if (!m_isImage && BLI_system_thread_count() > 1)
  535. {
  536. // never thread image: there are no frame to read ahead
  537. // no need to thread if the system has a single core
  538. m_isThreaded = true;
  539. }
  540. }
  541. // open video capture device
  542. void VideoFFmpeg::openCam (char *file, short camIdx)
  543. {
  544. // open camera source
  545. AVInputFormat *inputFormat;
  546. AVDictionary *formatParams = NULL;
  547. char filename[28], rateStr[20];
  548. #ifdef WIN32
  549. // video capture on windows only through Video For Windows driver
  550. inputFormat = av_find_input_format("vfwcap");
  551. if (!inputFormat)
  552. // Video For Windows not supported??
  553. return;
  554. sprintf(filename, "%d", camIdx);
  555. #else
  556. // In Linux we support two types of devices: VideoForLinux and DV1394.
  557. // the user specify it with the filename:
  558. // [<device_type>][:<standard>]
  559. // <device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394. By default 'v4l'
  560. // <standard> : 'pal', 'secam' or 'ntsc'. By default 'ntsc'
  561. // The driver name is constructed automatically from the device type:
  562. // v4l : /dev/video<camIdx>
  563. // dv1394: /dev/dv1394/<camIdx>
  564. // If you have different driver name, you can specify the driver name explicitly
  565. // instead of device type. Examples of valid filename:
  566. // /dev/v4l/video0:pal
  567. // /dev/ieee1394/1:ntsc
  568. // dv1394:secam
  569. // v4l:pal
  570. char *p;
  571. if (file && strstr(file, "1394") != NULL)
  572. {
  573. // the user specifies a driver, check if it is v4l or d41394
  574. inputFormat = av_find_input_format("dv1394");
  575. sprintf(filename, "/dev/dv1394/%d", camIdx);
  576. } else
  577. {
  578. const char *formats[] = {"video4linux2,v4l2", "video4linux2", "video4linux"};
  579. int i, formatsCount = sizeof(formats) / sizeof(char*);
  580. for (i = 0; i < formatsCount; i++) {
  581. inputFormat = av_find_input_format(formats[i]);
  582. if (inputFormat)
  583. break;
  584. }
  585. sprintf(filename, "/dev/video%d", camIdx);
  586. }
  587. if (!inputFormat)
  588. // these format should be supported, check ffmpeg compilation
  589. return;
  590. if (file && strncmp(file, "/dev", 4) == 0)
  591. {
  592. // user does not specify a driver
  593. strncpy(filename, file, sizeof(filename));
  594. filename[sizeof(filename)-1] = 0;
  595. if ((p = strchr(filename, ':')) != 0)
  596. *p = 0;
  597. }
  598. if (file && (p = strchr(file, ':')) != NULL) {
  599. av_dict_set(&formatParams, "standard", p+1, 0);
  600. }
  601. #endif
  602. //frame rate
  603. if (m_captRate <= 0.f)
  604. m_captRate = defFrameRate;
  605. sprintf(rateStr, "%f", m_captRate);
  606. av_dict_set(&formatParams, "framerate", rateStr, 0);
  607. if (m_captWidth > 0 && m_captHeight > 0) {
  608. char video_size[64];
  609. BLI_snprintf(video_size, sizeof(video_size), "%dx%d", m_captWidth, m_captHeight);
  610. av_dict_set(&formatParams, "video_size", video_size, 0);
  611. }
  612. if (openStream(filename, inputFormat, &formatParams) != 0)
  613. return;
  614. // for video capture it is important to do non blocking read
  615. m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
  616. // open base class
  617. VideoBase::openCam(file, camIdx);
  618. // check if we should do multi-threading?
  619. if (BLI_system_thread_count() > 1)
  620. {
  621. // no need to thread if the system has a single core
  622. m_isThreaded = true;
  623. }
  624. av_dict_free(&formatParams);
  625. }
  626. // play video
  627. bool VideoFFmpeg::play (void)
  628. {
  629. try
  630. {
  631. // if object is able to play
  632. if (VideoBase::play())
  633. {
  634. // set video position
  635. setPositions();
  636. if (m_isStreaming)
  637. {
  638. av_read_play(m_formatCtx);
  639. }
  640. // return success
  641. return true;
  642. }
  643. }
  644. CATCH_EXCP;
  645. return false;
  646. }
  647. // pause video
  648. bool VideoFFmpeg::pause (void)
  649. {
  650. try
  651. {
  652. if (VideoBase::pause())
  653. {
  654. if (m_isStreaming)
  655. {
  656. av_read_pause(m_formatCtx);
  657. }
  658. return true;
  659. }
  660. }
  661. CATCH_EXCP;
  662. return false;
  663. }
  664. // stop video
  665. bool VideoFFmpeg::stop (void)
  666. {
  667. try
  668. {
  669. VideoBase::stop();
  670. // force restart when play
  671. m_lastFrame = -1;
  672. return true;
  673. }
  674. CATCH_EXCP;
  675. return false;
  676. }
  677. // set video range
  678. void VideoFFmpeg::setRange (double start, double stop)
  679. {
  680. try
  681. {
  682. // set range
  683. if (m_isFile)
  684. {
  685. VideoBase::setRange(start, stop);
  686. // set range for video
  687. setPositions();
  688. }
  689. }
  690. CATCH_EXCP;
  691. }
  692. // set framerate
  693. void VideoFFmpeg::setFrameRate (float rate)
  694. {
  695. VideoBase::setFrameRate(rate);
  696. }
  697. // image calculation
  698. // load frame from video
  699. void VideoFFmpeg::calcImage (unsigned int texId, double ts)
  700. {
  701. if (m_status == SourcePlaying)
  702. {
  703. // get actual time
  704. double startTime = PIL_check_seconds_timer();
  705. double actTime;
  706. // timestamp passed from audio actuators can sometimes be slightly negative
  707. if (m_isFile && ts >= -0.5)
  708. {
  709. // allow setting timestamp only when not streaming
  710. actTime = ts;
  711. if (actTime * actFrameRate() < m_lastFrame)
  712. {
  713. // user is asking to rewind, force a cache clear to make sure we will do a seek
  714. // note that this does not decrement m_repeat if ts didn't reach m_range[1]
  715. stopCache();
  716. }
  717. }
  718. else
  719. {
  720. if (m_lastFrame == -1 && !m_isFile)
  721. m_startTime = startTime;
  722. actTime = startTime - m_startTime;
  723. }
  724. // if video has ended
  725. if (m_isFile && actTime * m_frameRate >= m_range[1])
  726. {
  727. // in any case, this resets the cache
  728. stopCache();
  729. // if repeats are set, decrease them
  730. if (m_repeat > 0)
  731. --m_repeat;
  732. // if video has to be replayed
  733. if (m_repeat != 0)
  734. {
  735. // reset its position
  736. actTime -= (m_range[1] - m_range[0]) / m_frameRate;
  737. m_startTime += (m_range[1] - m_range[0]) / m_frameRate;
  738. }
  739. // if video has to be stopped, stop it
  740. else
  741. {
  742. m_status = SourceStopped;
  743. return;
  744. }
  745. }
  746. // actual frame
  747. long actFrame = (m_isImage) ? m_lastFrame+1 : long(actTime * actFrameRate());
  748. // if actual frame differs from last frame
  749. if (actFrame != m_lastFrame)
  750. {
  751. AVFrame* frame;
  752. // get image
  753. if ((frame = grabFrame(actFrame)) != NULL)
  754. {
  755. if (!m_isFile && !m_cacheStarted)
  756. {
  757. // streaming without cache: detect synchronization problem
  758. double execTime = PIL_check_seconds_timer() - startTime;
  759. if (execTime > 0.005)
  760. {
  761. // exec time is too long, it means that the function was blocking
  762. // resynchronize the stream from this time
  763. m_startTime += execTime;
  764. }
  765. }
  766. // save actual frame
  767. m_lastFrame = actFrame;
  768. // init image, if needed
  769. init(short(m_codecCtx->width), short(m_codecCtx->height));
  770. // process image
  771. process((BYTE*)(frame->data[0]));
  772. // finished with the frame, release it so that cache can reuse it
  773. releaseFrame(frame);
  774. // in case it is an image, automatically stop reading it
  775. if (m_isImage)
  776. {
  777. m_status = SourceStopped;
  778. // close the file as we don't need it anymore
  779. release();
  780. }
  781. } else if (m_isStreaming)
  782. {
  783. // we didn't get a frame and we are streaming, this may be due to
  784. // a delay in the network or because we are getting the frame too fast.
  785. // In the later case, shift time by a small amount to compensate for a drift
  786. m_startTime += 0.001;
  787. }
  788. }
  789. }
  790. }
  791. // set actual position
  792. void VideoFFmpeg::setPositions (void)
  793. {
  794. // set video start time
  795. m_startTime = PIL_check_seconds_timer();
  796. // if file is played and actual position is before end position
  797. if (!m_eof && m_lastFrame >= 0 && (!m_isFile || m_lastFrame < m_range[1] * actFrameRate()))
  798. // continue from actual position
  799. m_startTime -= double(m_lastFrame) / actFrameRate();
  800. else {
  801. m_startTime -= m_range[0];
  802. // start from beginning, stop cache just in case
  803. stopCache();
  804. }
  805. }
  806. // position pointer in file, position in second
  807. AVFrame *VideoFFmpeg::grabFrame(long position)
  808. {
  809. AVPacket packet;
  810. int frameFinished;
  811. int posFound = 1;
  812. bool frameLoaded = false;
  813. int64_t targetTs = 0;
  814. CacheFrame *frame;
  815. int64_t dts = 0;
  816. if (m_cacheStarted)
  817. {
  818. // when cache is active, we must not read the file directly
  819. do {
  820. pthread_mutex_lock(&m_cacheMutex);
  821. frame = (CacheFrame *)m_frameCacheBase.first;
  822. pthread_mutex_unlock(&m_cacheMutex);
  823. // no need to remove the frame from the queue: the cache thread does not touch the head, only the tail
  824. if (frame == NULL)
  825. {
  826. // no frame in cache, in case of file it is an abnormal situation
  827. if (m_isFile)
  828. {
  829. // go back to no threaded reading
  830. stopCache();
  831. break;
  832. }
  833. return NULL;
  834. }
  835. if (frame->framePosition == -1)
  836. {
  837. // this frame mark the end of the file (only used for file)
  838. // leave in cache to make sure we don't miss it
  839. m_eof = true;
  840. return NULL;
  841. }
  842. // for streaming, always return the next frame,
  843. // that's what grabFrame does in non cache mode anyway.
  844. if (m_isStreaming || frame->framePosition == position)
  845. {
  846. return frame->frame;
  847. }
  848. // for cam, skip old frames to keep image realtime.
  849. // There should be no risk of clock drift since it all happens on the same CPU
  850. if (frame->framePosition > position)
  851. {
  852. // this can happen after rewind if the seek didn't find the first frame
  853. // the frame in the buffer is ahead of time, just leave it there
  854. return NULL;
  855. }
  856. // this frame is not useful, release it
  857. pthread_mutex_lock(&m_cacheMutex);
  858. BLI_remlink(&m_frameCacheBase, frame);
  859. BLI_addtail(&m_frameCacheFree, frame);
  860. pthread_mutex_unlock(&m_cacheMutex);
  861. } while (true);
  862. }
  863. double timeBase = av_q2d(m_formatCtx->streams[m_videoStream]->time_base);
  864. int64_t startTs = m_formatCtx->streams[m_videoStream]->start_time;
  865. if (startTs == AV_NOPTS_VALUE)
  866. startTs = 0;
  867. // come here when there is no cache or cache has been stopped
  868. // locate the frame, by seeking if necessary (seeking is only possible for files)
  869. if (m_isFile)
  870. {
  871. // first check if the position that we are looking for is in the preseek range
  872. // if so, just read the frame until we get there
  873. if (position > m_curPosition + 1
  874. && m_preseek
  875. && position - (m_curPosition + 1) < m_preseek)
  876. {
  877. while (av_read_frame(m_formatCtx, &packet)>=0)
  878. {
  879. if (packet.stream_index == m_videoStream)
  880. {
  881. avcodec_decode_video2(
  882. m_codecCtx,
  883. m_frame, &frameFinished,
  884. &packet);
  885. if (frameFinished)
  886. {
  887. m_curPosition = (long)((packet.dts-startTs) * (m_baseFrameRate*timeBase) + 0.5);
  888. }
  889. }
  890. av_free_packet(&packet);
  891. if (position == m_curPosition+1)
  892. break;
  893. }
  894. }
  895. // if the position is not in preseek, do a direct jump
  896. if (position != m_curPosition + 1)
  897. {
  898. int64_t pos = (int64_t)((position - m_preseek) / (m_baseFrameRate*timeBase));
  899. if (pos < 0)
  900. pos = 0;
  901. pos += startTs;
  902. if (position <= m_curPosition || !m_eof)
  903. {
  904. #if 0
  905. // Tried to make this work but couldn't: seeking on byte is ignored by the
  906. // format plugin and it will generally continue to read from last timestamp.
  907. // Too bad because frame seek is not always able to get the first frame
  908. // of the file.
  909. if (position <= m_preseek)
  910. {
  911. // we can safely go the beginning of the file
  912. if (av_seek_frame(m_formatCtx, m_videoStream, 0, AVSEEK_FLAG_BYTE) >= 0)
  913. {
  914. // binary seek does not reset the timestamp, must do it now
  915. av_update_cur_dts(m_formatCtx, m_formatCtx->streams[m_videoStream], startTs);
  916. m_curPosition = 0;
  917. }
  918. }
  919. else
  920. #endif
  921. {
  922. // current position is now lost, guess a value.
  923. if (av_seek_frame(m_formatCtx, m_videoStream, pos, AVSEEK_FLAG_BACKWARD) >= 0)
  924. {
  925. // current position is now lost, guess a value.
  926. // It's not important because it will be set at this end of this function
  927. m_curPosition = position - m_preseek - 1;
  928. }
  929. }
  930. }
  931. // this is the timestamp of the frame we're looking for
  932. targetTs = (int64_t)(position / (m_baseFrameRate * timeBase)) + startTs;
  933. posFound = 0;
  934. avcodec_flush_buffers(m_codecCtx);
  935. }
  936. } else if (m_isThreaded)
  937. {
  938. // cache is not started but threading is possible
  939. // better not read the stream => make take some time, better start caching
  940. if (startCache())
  941. return NULL;
  942. // Abnormal!!! could not start cache, fall back on direct read
  943. m_isThreaded = false;
  944. }
  945. // find the correct frame, in case of streaming and no cache, it means just
  946. // return the next frame. This is not quite correct, may need more work
  947. while (av_read_frame(m_formatCtx, &packet) >= 0)
  948. {
  949. if (packet.stream_index == m_videoStream)
  950. {
  951. AVFrame *input = m_frame;
  952. short counter = 0;
  953. /* If m_isImage, while the data is not read properly (png, tiffs, etc formats may need several pass), else don't need while loop*/
  954. do {
  955. avcodec_decode_video2(m_codecCtx, m_frame, &frameFinished, &packet);
  956. counter++;
  957. } while ((input->data[0] == 0 && input->data[1] == 0 && input->data[2] == 0 && input->data[3] == 0) && counter < 10 && m_isImage);
  958. // remember dts to compute exact frame number
  959. dts = packet.dts;
  960. if (frameFinished && !posFound)
  961. {
  962. if (dts >= targetTs)
  963. {
  964. posFound = 1;
  965. }
  966. }
  967. if (frameFinished && posFound == 1)
  968. {
  969. AVFrame * input = m_frame;
  970. /* This means the data wasnt read properly,
  971. * this check stops crashing */
  972. if ( input->data[0]==0 && input->data[1]==0
  973. && input->data[2]==0 && input->data[3]==0)
  974. {
  975. av_free_packet(&packet);
  976. break;
  977. }
  978. if (m_deinterlace)
  979. {
  980. if (avpicture_deinterlace(
  981. (AVPicture*) m_frameDeinterlaced,
  982. (const AVPicture*) m_frame,
  983. m_codecCtx->pix_fmt,
  984. m_codecCtx->width,
  985. m_codecCtx->height) >= 0)
  986. {
  987. input = m_frameDeinterlaced;
  988. }
  989. }
  990. // convert to RGB24
  991. sws_scale(m_imgConvertCtx,
  992. input->data,
  993. input->linesize,
  994. 0,
  995. m_codecCtx->height,
  996. m_frameRGB->data,
  997. m_frameRGB->linesize);
  998. av_free_packet(&packet);
  999. frameLoaded = true;
  1000. break;
  1001. }
  1002. }
  1003. av_free_packet(&packet);
  1004. }
  1005. m_eof = m_isFile && !frameLoaded;
  1006. if (frameLoaded)
  1007. {
  1008. m_curPosition = (long)((dts-startTs) * (m_baseFrameRate*timeBase) + 0.5);
  1009. if (m_isThreaded)
  1010. {
  1011. // normal case for file: first locate, then start cache
  1012. if (!startCache())
  1013. {
  1014. // Abnormal!! could not start cache, return to non-cache mode
  1015. m_isThreaded = false;
  1016. }
  1017. }
  1018. return m_frameRGB;
  1019. }
  1020. return NULL;
  1021. }
  1022. // python methods
  1023. // cast Image pointer to VideoFFmpeg
  1024. inline VideoFFmpeg * getVideoFFmpeg (PyImage *self)
  1025. { return static_cast<VideoFFmpeg*>(self->m_image); }
  1026. // object initialization
  1027. static int VideoFFmpeg_init(PyObject *pySelf, PyObject *args, PyObject *kwds)
  1028. {
  1029. PyImage *self = reinterpret_cast<PyImage*>(pySelf);
  1030. // parameters - video source
  1031. // file name or format type for capture (only for Linux: video4linux or dv1394)
  1032. char * file = NULL;
  1033. // capture device number
  1034. short capt = -1;
  1035. // capture width, only if capt is >= 0
  1036. short width = 0;
  1037. // capture height, only if capt is >= 0
  1038. short height = 0;
  1039. // capture rate, only if capt is >= 0
  1040. float rate = 25.f;
  1041. static const char *kwlist[] = {"file", "capture", "rate", "width", "height", NULL};
  1042. // get parameters
  1043. if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|hfhh",
  1044. const_cast<char**>(kwlist), &file, &capt, &rate, &width, &height))
  1045. return -1;
  1046. try
  1047. {
  1048. // create video object
  1049. Video_init<VideoFFmpeg>(self);
  1050. // set thread usage
  1051. getVideoFFmpeg(self)->initParams(width, height, rate);
  1052. // open video source
  1053. Video_open(getVideo(self), file, capt);
  1054. }
  1055. catch (Exception & exp)
  1056. {
  1057. exp.report();
  1058. return -1;
  1059. }
  1060. // initialization succeded
  1061. return 0;
  1062. }
  1063. static PyObject *VideoFFmpeg_getPreseek(PyImage *self, void *closure)
  1064. {
  1065. return Py_BuildValue("h", getFFmpeg(self)->getPreseek());
  1066. }
  1067. // set range
  1068. static int VideoFFmpeg_setPreseek(PyImage *self, PyObject *value, void *closure)
  1069. {
  1070. // check validity of parameter
  1071. if (value == NULL || !PyLong_Check(value))
  1072. {
  1073. PyErr_SetString(PyExc_TypeError, "The value must be an integer");
  1074. return -1;
  1075. }
  1076. // set preseek
  1077. getFFmpeg(self)->setPreseek(PyLong_AsLong(value));
  1078. // success
  1079. return 0;
  1080. }
  1081. // get deinterlace
  1082. static PyObject *VideoFFmpeg_getDeinterlace(PyImage *self, void *closure)
  1083. {
  1084. if (getFFmpeg(self)->getDeinterlace())
  1085. Py_RETURN_TRUE;
  1086. else
  1087. Py_RETURN_FALSE;
  1088. }
  1089. // set flip
  1090. static int VideoFFmpeg_setDeinterlace(PyImage *self, PyObject *value, void *closure)
  1091. {
  1092. // check parameter, report failure
  1093. if (value == NULL || !PyBool_Check(value))
  1094. {
  1095. PyErr_SetString(PyExc_TypeError, "The value must be a bool");
  1096. return -1;
  1097. }
  1098. // set deinterlace
  1099. getFFmpeg(self)->setDeinterlace(value == Py_True);
  1100. // success
  1101. return 0;
  1102. }
  1103. // methods structure
  1104. static PyMethodDef videoMethods[] =
  1105. { // methods from VideoBase class
  1106. {"play", (PyCFunction)Video_play, METH_NOARGS, "Play (restart) video"},
  1107. {"pause", (PyCFunction)Video_pause, METH_NOARGS, "pause video"},
  1108. {"stop", (PyCFunction)Video_stop, METH_NOARGS, "stop video (play will replay it from start)"},
  1109. {"refresh", (PyCFunction)Video_refresh, METH_VARARGS, "Refresh video - get its status"},
  1110. {NULL}
  1111. };
  1112. // attributes structure
  1113. static PyGetSetDef videoGetSets[] =
  1114. { // methods from VideoBase class
  1115. {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
  1116. {(char*)"range", (getter)Video_getRange, (setter)Video_setRange, (char*)"replay range", NULL},
  1117. {(char*)"repeat", (getter)Video_getRepeat, (setter)Video_setRepeat, (char*)"repeat count, -1 for infinite repeat", NULL},
  1118. {(char*)"framerate", (getter)Video_getFrameRate, (setter)Video_setFrameRate, (char*)"frame rate", NULL},
  1119. // attributes from ImageBase class
  1120. {(char*)"valid", (getter)Image_valid, NULL, (char*)"bool to tell if an image is available", NULL},
  1121. {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
  1122. {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
  1123. {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbor)", NULL},
  1124. {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
  1125. {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
  1126. {(char*)"preseek", (getter)VideoFFmpeg_getPreseek, (setter)VideoFFmpeg_setPreseek, (char*)"nb of frames of preseek", NULL},
  1127. {(char*)"deinterlace", (getter)VideoFFmpeg_getDeinterlace, (setter)VideoFFmpeg_setDeinterlace, (char*)"deinterlace image", NULL},
  1128. {NULL}
  1129. };
  1130. // python type declaration
  1131. PyTypeObject VideoFFmpegType =
  1132. {
  1133. PyVarObject_HEAD_INIT(NULL, 0)
  1134. "VideoTexture.VideoFFmpeg", /*tp_name*/
  1135. sizeof(PyImage), /*tp_basicsize*/
  1136. 0, /*tp_itemsize*/
  1137. (destructor)Image_dealloc, /*tp_dealloc*/
  1138. 0, /*tp_print*/
  1139. 0, /*tp_getattr*/
  1140. 0, /*tp_setattr*/
  1141. 0, /*tp_compare*/
  1142. 0, /*tp_repr*/
  1143. 0, /*tp_as_number*/
  1144. 0, /*tp_as_sequence*/
  1145. 0, /*tp_as_mapping*/
  1146. 0, /*tp_hash */
  1147. 0, /*tp_call*/
  1148. 0, /*tp_str*/
  1149. 0, /*tp_getattro*/
  1150. 0, /*tp_setattro*/
  1151. &imageBufferProcs, /*tp_as_buffer*/
  1152. Py_TPFLAGS_DEFAULT, /*tp_flags*/
  1153. "FFmpeg video source", /* tp_doc */
  1154. 0, /* tp_traverse */
  1155. 0, /* tp_clear */
  1156. 0, /* tp_richcompare */
  1157. 0, /* tp_weaklistoffset */
  1158. 0, /* tp_iter */
  1159. 0, /* tp_iternext */
  1160. videoMethods, /* tp_methods */
  1161. 0, /* tp_members */
  1162. videoGetSets, /* tp_getset */
  1163. 0, /* tp_base */
  1164. 0, /* tp_dict */
  1165. 0, /* tp_descr_get */
  1166. 0, /* tp_descr_set */
  1167. 0, /* tp_dictoffset */
  1168. (initproc)VideoFFmpeg_init, /* tp_init */
  1169. 0, /* tp_alloc */
  1170. Image_allocNew, /* tp_new */
  1171. };
  1172. // object initialization
  1173. static int ImageFFmpeg_init(PyObject *pySelf, PyObject *args, PyObject *kwds)
  1174. {
  1175. PyImage *self = reinterpret_cast<PyImage*>(pySelf);
  1176. // parameters - video source
  1177. // file name or format type for capture (only for Linux: video4linux or dv1394)
  1178. char * file = NULL;
  1179. // get parameters
  1180. if (!PyArg_ParseTuple(args, "s:ImageFFmpeg", &file))
  1181. return -1;
  1182. try
  1183. {
  1184. // create video object
  1185. Video_init<VideoFFmpeg>(self);
  1186. getVideoFFmpeg(self)->initParams(0, 0, 1.0, true);
  1187. // open video source
  1188. Video_open(getVideo(self), file, -1);
  1189. }
  1190. catch (Exception & exp)
  1191. {
  1192. exp.report();
  1193. return -1;
  1194. }
  1195. // initialization succeded
  1196. return 0;
  1197. }
  1198. static PyObject *Image_reload(PyImage *self, PyObject *args)
  1199. {
  1200. char * newname = NULL;
  1201. if (!PyArg_ParseTuple(args, "|s:reload", &newname))
  1202. return NULL;
  1203. if (self->m_image != NULL)
  1204. {
  1205. VideoFFmpeg* video = getFFmpeg(self);
  1206. // check type of object
  1207. if (!newname)
  1208. newname = video->getImageName();
  1209. if (!newname) {
  1210. // if not set, retport error
  1211. PyErr_SetString(PyExc_RuntimeError, "No image file name given");
  1212. return NULL;
  1213. }
  1214. // make sure the previous file is cleared
  1215. video->release();
  1216. // open the new file
  1217. video->openFile(newname);
  1218. }
  1219. Py_RETURN_NONE;
  1220. }
  1221. // methods structure
  1222. static PyMethodDef imageMethods[] =
  1223. { // methods from VideoBase class
  1224. {"refresh", (PyCFunction)Video_refresh, METH_VARARGS, "Refresh image, i.e. load it"},
  1225. {"reload", (PyCFunction)Image_reload, METH_VARARGS, "Reload image, i.e. reopen it"},
  1226. {NULL}
  1227. };
  1228. // attributes structure
  1229. static PyGetSetDef imageGetSets[] =
  1230. { // methods from VideoBase class
  1231. {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
  1232. // attributes from ImageBase class
  1233. {(char*)"valid", (getter)Image_valid, NULL, (char*)"bool to tell if an image is available", NULL},
  1234. {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
  1235. {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
  1236. {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbor)", NULL},
  1237. {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
  1238. {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
  1239. {NULL}
  1240. };
  1241. // python type declaration
  1242. PyTypeObject ImageFFmpegType =
  1243. {
  1244. PyVarObject_HEAD_INIT(NULL, 0)
  1245. "VideoTexture.ImageFFmpeg", /*tp_name*/
  1246. sizeof(PyImage), /*tp_basicsize*/
  1247. 0, /*tp_itemsize*/
  1248. (destructor)Image_dealloc, /*tp_dealloc*/
  1249. 0, /*tp_print*/
  1250. 0, /*tp_getattr*/
  1251. 0, /*tp_setattr*/
  1252. 0, /*tp_compare*/
  1253. 0, /*tp_repr*/
  1254. 0, /*tp_as_number*/
  1255. 0, /*tp_as_sequence*/
  1256. 0, /*tp_as_mapping*/
  1257. 0, /*tp_hash */
  1258. 0, /*tp_call*/
  1259. 0, /*tp_str*/
  1260. 0, /*tp_getattro*/
  1261. 0, /*tp_setattro*/
  1262. &imageBufferProcs, /*tp_as_buffer*/
  1263. Py_TPFLAGS_DEFAULT, /*tp_flags*/
  1264. "FFmpeg image source", /* tp_doc */
  1265. 0, /* tp_traverse */
  1266. 0, /* tp_clear */
  1267. 0, /* tp_richcompare */
  1268. 0, /* tp_weaklistoffset */
  1269. 0, /* tp_iter */
  1270. 0, /* tp_iternext */
  1271. imageMethods, /* tp_methods */
  1272. 0, /* tp_members */
  1273. imageGetSets, /* tp_getset */
  1274. 0, /* tp_base */
  1275. 0, /* tp_dict */
  1276. 0, /* tp_descr_get */
  1277. 0, /* tp_descr_set */
  1278. 0, /* tp_dictoffset */
  1279. (initproc)ImageFFmpeg_init, /* tp_init */
  1280. 0, /* tp_alloc */
  1281. Image_allocNew, /* tp_new */
  1282. };
  1283. #endif //WITH_FFMPEG