merged 28477:29285 from trunk
[blender.git] / source / gameengine / VideoTexture / VideoFFmpeg.cpp
1 /* $Id$
2 -----------------------------------------------------------------------------
3 This source file is part of VideoTexture library
4
5 Copyright (c) 2007 The Zdeno Ash Miklas
6
7 This program is free software; you can redistribute it and/or modify it under
8 the terms of the GNU Lesser General Public License as published by the Free Software
9 Foundation; either version 2 of the License, or (at your option) any later
10 version.
11
12 This program is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public License along with
17 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
18 Place - Suite 330, Boston, MA 02111-1307, USA, or go to
19 http://www.gnu.org/copyleft/lesser.txt.
20 -----------------------------------------------------------------------------
21 */
22
23 #ifdef WITH_FFMPEG
24
25 // INT64_C fix for some linux machines (C99ism)
26 #ifndef __STDC_CONSTANT_MACROS
27 #define __STDC_CONSTANT_MACROS
28 #endif
29 #include <stdint.h>
30
31
32 #include "MEM_guardedalloc.h"
33 #include "PIL_time.h"
34
35 #include <string>
36
37 #include "Exception.h"
38 #include "VideoFFmpeg.h"
39
40
41 // default framerate
42 const double defFrameRate = 25.0;
43 // time scale constant
44 const long timeScale = 1000;
45
46 // macro for exception handling and logging
47 #define CATCH_EXCP catch (Exception & exp) \
48 { exp.report(); m_status = SourceError; }
49
50 extern "C" void do_init_ffmpeg();
51
52 // class RenderVideo
53
54 // constructor
55 VideoFFmpeg::VideoFFmpeg (HRESULT * hRslt) : VideoBase(), 
56 m_codec(NULL), m_formatCtx(NULL), m_codecCtx(NULL), 
57 m_frame(NULL), m_frameDeinterlaced(NULL), m_frameRGB(NULL), m_imgConvertCtx(NULL),
58 m_deinterlace(false), m_preseek(0),     m_videoStream(-1), m_baseFrameRate(25.0),
59 m_lastFrame(-1),  m_eof(false), m_externTime(false), m_curPosition(-1), m_startTime(0), 
60 m_captWidth(0), m_captHeight(0), m_captRate(0.f), m_isImage(false),
61 m_isThreaded(false), m_isStreaming(false), m_stopThread(false), m_cacheStarted(false)
62 {
63         // set video format
64         m_format = RGB24;
65         // force flip because ffmpeg always return the image in the wrong orientation for texture
66         setFlip(true);
67         // construction is OK
68         *hRslt = S_OK;
69         m_thread.first = m_thread.last = NULL;
70         pthread_mutex_init(&m_cacheMutex, NULL);
71         m_frameCacheFree.first = m_frameCacheFree.last = NULL;
72         m_frameCacheBase.first = m_frameCacheBase.last = NULL;
73         m_packetCacheFree.first = m_packetCacheFree.last = NULL;
74         m_packetCacheBase.first = m_packetCacheBase.last = NULL;
75 }
76
77 // destructor
78 VideoFFmpeg::~VideoFFmpeg () 
79 {
80 }
81
82
83 // release components
84 bool VideoFFmpeg::release()
85 {
86         // release
87         stopCache();
88         if (m_codecCtx)
89         {
90                 avcodec_close(m_codecCtx);
91                 m_codecCtx = NULL;
92         }
93         if (m_formatCtx)
94         {
95                 av_close_input_file(m_formatCtx);
96                 m_formatCtx = NULL;
97         }
98         if (m_frame)
99         {
100                 av_free(m_frame);
101                 m_frame = NULL;
102         }
103         if (m_frameDeinterlaced)
104         {
105                 MEM_freeN(m_frameDeinterlaced->data[0]);
106                 av_free(m_frameDeinterlaced);
107                 m_frameDeinterlaced = NULL;
108         }
109         if (m_frameRGB)
110         {
111                 MEM_freeN(m_frameRGB->data[0]);
112                 av_free(m_frameRGB);
113                 m_frameRGB = NULL;
114         }
115         if (m_imgConvertCtx)
116         {
117                 sws_freeContext(m_imgConvertCtx);
118                 m_imgConvertCtx = NULL;
119         }
120         m_codec = NULL;
121         m_status = SourceStopped;
122         m_lastFrame = -1;
123         return true;
124 }
125
126 AVFrame *VideoFFmpeg::allocFrameRGB()
127 {
128         AVFrame *frame;
129         frame = avcodec_alloc_frame();
130         if (m_format == RGBA32)
131         {
132                 avpicture_fill((AVPicture*)frame, 
133                         (uint8_t*)MEM_callocN(avpicture_get_size(
134                                 PIX_FMT_RGBA,
135                                 m_codecCtx->width, m_codecCtx->height),
136                                 "ffmpeg rgba"),
137                         PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
138         } else 
139         {
140                 avpicture_fill((AVPicture*)frame, 
141                         (uint8_t*)MEM_callocN(avpicture_get_size(
142                                 PIX_FMT_RGB24,
143                                 m_codecCtx->width, m_codecCtx->height),
144                                 "ffmpeg rgb"),
145                         PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
146         }
147         return frame;
148 }
149
150 // set initial parameters
151 void VideoFFmpeg::initParams (short width, short height, float rate, bool image)
152 {
153         m_captWidth = width;
154         m_captHeight = height;
155         m_captRate = rate;
156         m_isImage = image;
157 }
158
159
160 int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams)
161 {
162         AVFormatContext *formatCtx;
163         int                             i, videoStream;
164         AVCodec                 *codec;
165         AVCodecContext  *codecCtx;
166
167         if(av_open_input_file(&formatCtx, filename, inputFormat, 0, formatParams)!=0)
168                 return -1;
169
170         if(av_find_stream_info(formatCtx)<0) 
171         {
172                 av_close_input_file(formatCtx);
173                 return -1;
174         }
175
176         /* Find the first video stream */
177         videoStream=-1;
178         for(i=0; i<formatCtx->nb_streams; i++)
179         {
180                 if(formatCtx->streams[i] &&
181                         get_codec_from_stream(formatCtx->streams[i]) && 
182                         (get_codec_from_stream(formatCtx->streams[i])->codec_type==CODEC_TYPE_VIDEO))
183                 {
184                         videoStream=i;
185                         break;
186                 }
187         }
188
189         if(videoStream==-1) 
190         {
191                 av_close_input_file(formatCtx);
192                 return -1;
193         }
194
195         codecCtx = get_codec_from_stream(formatCtx->streams[videoStream]);
196
197         /* Find the decoder for the video stream */
198         codec=avcodec_find_decoder(codecCtx->codec_id);
199         if(codec==NULL) 
200         {
201                 av_close_input_file(formatCtx);
202                 return -1;
203         }
204         codecCtx->workaround_bugs = 1;
205         if(avcodec_open(codecCtx, codec)<0) 
206         {
207                 av_close_input_file(formatCtx);
208                 return -1;
209         }
210
211 #ifdef FFMPEG_OLD_FRAME_RATE
212         if(codecCtx->frame_rate>1000 && codecCtx->frame_rate_base==1)
213                 codecCtx->frame_rate_base=1000;
214         m_baseFrameRate = (double)codecCtx->frame_rate / (double)codecCtx->frame_rate_base;
215 #else
216         m_baseFrameRate = av_q2d(formatCtx->streams[videoStream]->r_frame_rate);
217 #endif
218         if (m_baseFrameRate <= 0.0) 
219                 m_baseFrameRate = defFrameRate;
220
221         m_codec = codec;
222         m_codecCtx = codecCtx;
223         m_formatCtx = formatCtx;
224         m_videoStream = videoStream;
225         m_frame = avcodec_alloc_frame();
226         m_frameDeinterlaced = avcodec_alloc_frame();
227
228         // allocate buffer if deinterlacing is required
229         avpicture_fill((AVPicture*)m_frameDeinterlaced, 
230                 (uint8_t*)MEM_callocN(avpicture_get_size(
231                 m_codecCtx->pix_fmt,
232                 m_codecCtx->width, m_codecCtx->height), 
233                 "ffmpeg deinterlace"), 
234                 m_codecCtx->pix_fmt, m_codecCtx->width, m_codecCtx->height);
235
236         // check if the pixel format supports Alpha
237         if (m_codecCtx->pix_fmt == PIX_FMT_RGB32 ||
238                 m_codecCtx->pix_fmt == PIX_FMT_BGR32 ||
239                 m_codecCtx->pix_fmt == PIX_FMT_RGB32_1 ||
240                 m_codecCtx->pix_fmt == PIX_FMT_BGR32_1) 
241         {
242                 // allocate buffer to store final decoded frame
243                 m_format = RGBA32;
244                 // allocate sws context
245                 m_imgConvertCtx = sws_getContext(
246                         m_codecCtx->width,
247                         m_codecCtx->height,
248                         m_codecCtx->pix_fmt,
249                         m_codecCtx->width,
250                         m_codecCtx->height,
251                         PIX_FMT_RGBA,
252                         SWS_FAST_BILINEAR,
253                         NULL, NULL, NULL);
254         } else
255         {
256                 // allocate buffer to store final decoded frame
257                 m_format = RGB24;
258                 // allocate sws context
259                 m_imgConvertCtx = sws_getContext(
260                         m_codecCtx->width,
261                         m_codecCtx->height,
262                         m_codecCtx->pix_fmt,
263                         m_codecCtx->width,
264                         m_codecCtx->height,
265                         PIX_FMT_RGB24,
266                         SWS_FAST_BILINEAR,
267                         NULL, NULL, NULL);
268         }
269         m_frameRGB = allocFrameRGB();
270
271         if (!m_imgConvertCtx) {
272                 avcodec_close(m_codecCtx);
273                 m_codecCtx = NULL;
274                 av_close_input_file(m_formatCtx);
275                 m_formatCtx = NULL;
276                 av_free(m_frame);
277                 m_frame = NULL;
278                 MEM_freeN(m_frameDeinterlaced->data[0]);
279                 av_free(m_frameDeinterlaced);
280                 m_frameDeinterlaced = NULL;
281                 MEM_freeN(m_frameRGB->data[0]);
282                 av_free(m_frameRGB);
283                 m_frameRGB = NULL;
284                 return -1;
285         }
286         return 0;
287 }
288
289 /*
290  * This thread is used to load video frame asynchronously.
291  * It provides a frame caching service. 
292  * The main thread is responsible for positionning the frame pointer in the
293  * file correctly before calling startCache() which starts this thread.
294  * The cache is organized in two layers: 1) a cache of 20-30 undecoded packets to keep
295  * memory and CPU low 2) a cache of 5 decoded frames. 
296  * If the main thread does not find the frame in the cache (because the video has restarted
297  * or because the GE is lagging), it stops the cache with StopCache() (this is a synchronous
298  * function: it sends a signal to stop the cache thread and wait for confirmation), then
299  * change the position in the stream and restarts the cache thread.
300  */
301 void *VideoFFmpeg::cacheThread(void *data)
302 {
303         VideoFFmpeg* video = (VideoFFmpeg*)data;
304         // holds the frame that is being decoded
305         CacheFrame *currentFrame = NULL;
306         CachePacket *cachePacket;
307         bool endOfFile = false;
308         int frameFinished = 0;
309         double timeBase = av_q2d(video->m_formatCtx->streams[video->m_videoStream]->time_base);
310         int64_t startTs = video->m_formatCtx->streams[video->m_videoStream]->start_time;
311
312         if (startTs == AV_NOPTS_VALUE)
313                 startTs = 0;
314
315         while (!video->m_stopThread)
316         {
317                 // packet cache is used solely by this thread, no need to lock
318                 // In case the stream/file contains other stream than the one we are looking for,
319                 // allow a bit of cycling to get rid quickly of those frames
320                 frameFinished = 0;
321                 while (    !endOfFile 
322                                 && (cachePacket = (CachePacket *)video->m_packetCacheFree.first) != NULL 
323                                 && frameFinished < 25)
324                 {
325                         // free packet => packet cache is not full yet, just read more
326                         if (av_read_frame(video->m_formatCtx, &cachePacket->packet)>=0) 
327                         {
328                                 if (cachePacket->packet.stream_index == video->m_videoStream)
329                                 {
330                                         // make sure fresh memory is allocated for the packet and move it to queue
331                                         av_dup_packet(&cachePacket->packet);
332                                         BLI_remlink(&video->m_packetCacheFree, cachePacket);
333                                         BLI_addtail(&video->m_packetCacheBase, cachePacket);
334                                         break;
335                                 } else {
336                                         // this is not a good packet for us, just leave it on free queue
337                                         // Note: here we could handle sound packet
338                                         av_free_packet(&cachePacket->packet);
339                                         frameFinished++;
340                                 }
341                                 
342                         } else {
343                                 if (video->m_isFile)
344                                         // this mark the end of the file
345                                         endOfFile = true;
346                                 // if we cannot read a packet, no need to continue
347                                 break;
348                         }
349                 }
350                 // frame cache is also used by main thread, lock
351                 if (currentFrame == NULL) 
352                 {
353                         // no current frame being decoded, take free one
354                         pthread_mutex_lock(&video->m_cacheMutex);
355                         if ((currentFrame = (CacheFrame *)video->m_frameCacheFree.first) != NULL)
356                                 BLI_remlink(&video->m_frameCacheFree, currentFrame);
357                         pthread_mutex_unlock(&video->m_cacheMutex);
358                 }
359                 if (currentFrame != NULL)
360                 {
361                         // this frame is out of free and busy queue, we can manipulate it without locking
362                         frameFinished = 0;
363                         while (!frameFinished && (cachePacket = (CachePacket *)video->m_packetCacheBase.first) != NULL)
364                         {
365                                 BLI_remlink(&video->m_packetCacheBase, cachePacket);
366                                 // use m_frame because when caching, it is not used in main thread
367                                 // we can't use currentFrame directly because we need to convert to RGB first
368                                 avcodec_decode_video(video->m_codecCtx, 
369                                         video->m_frame, &frameFinished, 
370                                         cachePacket->packet.data, cachePacket->packet.size);
371                                 if(frameFinished) 
372                                 {
373                                         AVFrame * input = video->m_frame;
374
375                                         /* This means the data wasnt read properly, this check stops crashing */
376                                         if (   input->data[0]!=0 || input->data[1]!=0 
377                                                 || input->data[2]!=0 || input->data[3]!=0)
378                                         {
379                                                 if (video->m_deinterlace) 
380                                                 {
381                                                         if (avpicture_deinterlace(
382                                                                 (AVPicture*) video->m_frameDeinterlaced,
383                                                                 (const AVPicture*) video->m_frame,
384                                                                 video->m_codecCtx->pix_fmt,
385                                                                 video->m_codecCtx->width,
386                                                                 video->m_codecCtx->height) >= 0)
387                                                         {
388                                                                 input = video->m_frameDeinterlaced;
389                                                         }
390                                                 }
391                                                 // convert to RGB24
392                                                 sws_scale(video->m_imgConvertCtx,
393                                                         input->data,
394                                                         input->linesize,
395                                                         0,
396                                                         video->m_codecCtx->height,
397                                                         currentFrame->frame->data,
398                                                         currentFrame->frame->linesize);
399                                                 // move frame to queue, this frame is necessarily the next one
400                                                 video->m_curPosition = (long)((cachePacket->packet.dts-startTs) * (video->m_baseFrameRate*timeBase) + 0.5);
401                                                 currentFrame->framePosition = video->m_curPosition;
402                                                 pthread_mutex_lock(&video->m_cacheMutex);
403                                                 BLI_addtail(&video->m_frameCacheBase, currentFrame);
404                                                 pthread_mutex_unlock(&video->m_cacheMutex);
405                                                 currentFrame = NULL;
406                                         }
407                                 }
408                                 av_free_packet(&cachePacket->packet);
409                                 BLI_addtail(&video->m_packetCacheFree, cachePacket);
410                         } 
411                         if (currentFrame && endOfFile) 
412                         {
413                                 // no more packet and end of file => put a special frame that indicates that
414                                 currentFrame->framePosition = -1;
415                                 pthread_mutex_lock(&video->m_cacheMutex);
416                                 BLI_addtail(&video->m_frameCacheBase, currentFrame);
417                                 pthread_mutex_unlock(&video->m_cacheMutex);
418                                 currentFrame = NULL;
419                                 // no need to stay any longer in this thread
420                                 break;
421                         }
422                 }
423                 // small sleep to avoid unnecessary looping
424                 PIL_sleep_ms(10);
425         }
426         // before quitting, put back the current frame to queue to allow freeing
427         if (currentFrame)
428         {
429                 pthread_mutex_lock(&video->m_cacheMutex);
430                 BLI_addtail(&video->m_frameCacheFree, currentFrame);
431                 pthread_mutex_unlock(&video->m_cacheMutex);
432         }
433         return 0;
434 }
435
436 // start thread to cache video frame from file/capture/stream
437 // this function should be called only when the position in the stream is set for the
438 // first frame to cache
439 bool VideoFFmpeg::startCache()
440 {
441         if (!m_cacheStarted && m_isThreaded)
442         {
443                 m_stopThread = false;
444                 for (int i=0; i<CACHE_FRAME_SIZE; i++)
445                 {
446                         CacheFrame *frame = new CacheFrame();
447                         frame->frame = allocFrameRGB();
448                         BLI_addtail(&m_frameCacheFree, frame);
449                 }
450                 for (int i=0; i<CACHE_PACKET_SIZE; i++) 
451                 {
452                         CachePacket *packet = new CachePacket();
453                         BLI_addtail(&m_packetCacheFree, packet);
454                 }
455                 BLI_init_threads(&m_thread, cacheThread, 1);
456                 BLI_insert_thread(&m_thread, this);
457                 m_cacheStarted = true;
458         }
459         return m_cacheStarted;
460 }
461
462 void VideoFFmpeg::stopCache()
463 {
464         if (m_cacheStarted)
465         {
466                 m_stopThread = true;
467                 BLI_end_threads(&m_thread);
468                 // now delete the cache
469                 CacheFrame *frame;
470                 CachePacket *packet;
471                 while ((frame = (CacheFrame *)m_frameCacheBase.first) != NULL)
472                 {
473                         BLI_remlink(&m_frameCacheBase, frame);
474                         MEM_freeN(frame->frame->data[0]);
475                         av_free(frame->frame);
476                         delete frame;
477                 }
478                 while ((frame = (CacheFrame *)m_frameCacheFree.first) != NULL)
479                 {
480                         BLI_remlink(&m_frameCacheFree, frame);
481                         MEM_freeN(frame->frame->data[0]);
482                         av_free(frame->frame);
483                         delete frame;
484                 }
485                 while((packet = (CachePacket *)m_packetCacheBase.first) != NULL)
486                 {
487                         BLI_remlink(&m_packetCacheBase, packet);
488                         av_free_packet(&packet->packet);
489                         delete packet;
490                 }
491                 while((packet = (CachePacket *)m_packetCacheFree.first) != NULL)
492                 {
493                         BLI_remlink(&m_packetCacheFree, packet);
494                         delete packet;
495                 }
496                 m_cacheStarted = false;
497         }
498 }
499
500 void VideoFFmpeg::releaseFrame(AVFrame* frame)
501 {
502         if (frame == m_frameRGB)
503         {
504                 // this is not a frame from the cache, ignore
505                 return;
506         }
507         // this frame MUST be the first one of the queue
508         pthread_mutex_lock(&m_cacheMutex);
509         CacheFrame *cacheFrame = (CacheFrame *)m_frameCacheBase.first;
510         assert (cacheFrame != NULL && cacheFrame->frame == frame);
511         BLI_remlink(&m_frameCacheBase, cacheFrame);
512         BLI_addtail(&m_frameCacheFree, cacheFrame);
513         pthread_mutex_unlock(&m_cacheMutex);
514 }
515
516 // open video file
517 void VideoFFmpeg::openFile (char * filename)
518 {
519         do_init_ffmpeg();
520
521         if (openStream(filename, NULL, NULL) != 0)
522                 return;
523
524         if (m_codecCtx->gop_size)
525                 m_preseek = (m_codecCtx->gop_size < 25) ? m_codecCtx->gop_size+1 : 25;
526         else if (m_codecCtx->has_b_frames)              
527                 m_preseek = 25; // should determine gopsize
528         else
529                 m_preseek = 0;
530
531         // get video time range
532         m_range[0] = 0.0;
533         m_range[1] = (double)m_formatCtx->duration / AV_TIME_BASE;
534
535         // open base class
536         VideoBase::openFile(filename);
537
538         if (
539                 // ffmpeg reports that http source are actually non stream
540                 // but it is really not desirable to seek on http file, so force streaming.
541                 // It would be good to find this information from the context but there are no simple indication
542                 !strncmp(filename, "http://", 7) ||
543 #ifdef FFMPEG_PB_IS_POINTER
544         (m_formatCtx->pb && m_formatCtx->pb->is_streamed)
545 #else
546         m_formatCtx->pb.is_streamed
547 #endif
548         )
549         {
550                 // the file is in fact a streaming source, treat as cam to prevent seeking
551                 m_isFile = false;
552                 // but it's not handled exactly like a camera.
553                 m_isStreaming = true;
554                 // for streaming it is important to do non blocking read
555                 m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
556         }
557
558         if (m_isImage) 
559         {
560                 // the file is to be treated as an image, i.e. load the first frame only
561                 m_isFile = false;
562                 // in case of reload, the filename is taken from m_imageName, no need to change it
563                 if (m_imageName.Ptr() != filename)
564                         m_imageName = filename;
565                 m_preseek = 0;
566                 m_avail = false;
567                 play();
568         }
569         // check if we should do multi-threading?
570         if (!m_isImage && BLI_system_thread_count() > 1)
571         {
572                 // never thread image: there are no frame to read ahead
573                 // no need to thread if the system has a single core
574                 m_isThreaded =  true;
575         }
576 }
577
578
579 // open video capture device
580 void VideoFFmpeg::openCam (char * file, short camIdx)
581 {
582         // open camera source
583         AVInputFormat           *inputFormat;
584         AVFormatParameters      formatParams;
585         AVRational                      frameRate;
586         char                            *p, filename[28], rateStr[20];
587
588         do_init_ffmpeg();
589
590         memset(&formatParams, 0, sizeof(formatParams));
591 #ifdef WIN32
592         // video capture on windows only through Video For Windows driver
593         inputFormat = av_find_input_format("vfwcap");
594         if (!inputFormat)
595                 // Video For Windows not supported??
596                 return;
597         sprintf(filename, "%d", camIdx);
598 #else
599         // In Linux we support two types of devices: VideoForLinux and DV1394. 
600         // the user specify it with the filename:
601         // [<device_type>][:<standard>]
602         // <device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394. By default 'v4l'
603         // <standard>    : 'pal', 'secam' or 'ntsc'. By default 'ntsc'
604         // The driver name is constructed automatically from the device type:
605         // v4l   : /dev/video<camIdx>
606         // dv1394: /dev/dv1394/<camIdx>
607         // If you have different driver name, you can specify the driver name explicitely 
608         // instead of device type. Examples of valid filename:
609         //    /dev/v4l/video0:pal
610         //    /dev/ieee1394/1:ntsc
611         //    dv1394:secam
612         //    v4l:pal
613         if (file && strstr(file, "1394") != NULL) 
614         {
615                 // the user specifies a driver, check if it is v4l or d41394
616                 inputFormat = av_find_input_format("dv1394");
617                 sprintf(filename, "/dev/dv1394/%d", camIdx);
618         } else 
619         {
620                 inputFormat = av_find_input_format("video4linux");
621                 sprintf(filename, "/dev/video%d", camIdx);
622         }
623         if (!inputFormat)
624                 // these format should be supported, check ffmpeg compilation
625                 return;
626         if (file && strncmp(file, "/dev", 4) == 0) 
627         {
628                 // user does not specify a driver
629                 strncpy(filename, file, sizeof(filename));
630                 filename[sizeof(filename)-1] = 0;
631                 if ((p = strchr(filename, ':')) != 0)
632                         *p = 0;
633         }
634         if (file && (p = strchr(file, ':')) != NULL)
635                 formatParams.standard = p+1;
636 #endif
637         //frame rate
638         if (m_captRate <= 0.f)
639                 m_captRate = defFrameRate;
640         sprintf(rateStr, "%f", m_captRate);
641         av_parse_video_frame_rate(&frameRate, rateStr);
642         // populate format parameters
643         // need to specify the time base = inverse of rate
644         formatParams.time_base.num = frameRate.den;
645         formatParams.time_base.den = frameRate.num;
646         formatParams.width = m_captWidth;
647         formatParams.height = m_captHeight;
648
649         if (openStream(filename, inputFormat, &formatParams) != 0)
650                 return;
651
652         // for video capture it is important to do non blocking read
653         m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
654         // open base class
655         VideoBase::openCam(file, camIdx);
656         // check if we should do multi-threading?
657         if (BLI_system_thread_count() > 1)
658         {
659                 // no need to thread if the system has a single core
660                 m_isThreaded =  true;
661         }
662 }
663
664 // play video
665 bool VideoFFmpeg::play (void)
666 {
667         try
668         {
669                 // if object is able to play
670                 if (VideoBase::play())
671                 {
672                         // set video position
673                         setPositions();
674                         // return success
675                         return true;
676                 }
677         }
678         CATCH_EXCP;
679         return false;
680 }
681
682
683 // pause video
684 bool VideoFFmpeg::pause (void)
685 {
686         try
687         {
688                 if (VideoBase::pause())
689                 {
690                         return true;
691                 }
692         }
693         CATCH_EXCP;
694         return false;
695 }
696
697 // stop video
698 bool VideoFFmpeg::stop (void)
699 {
700         try
701         {
702                 VideoBase::stop();
703                 // force restart when play
704                 m_lastFrame = -1;
705                 return true;
706         }
707         CATCH_EXCP;
708         return false;
709 }
710
711
712 // set video range
713 void VideoFFmpeg::setRange (double start, double stop)
714 {
715         try
716         {
717                 // set range
718                 if (m_isFile)
719                 {
720                         VideoBase::setRange(start, stop);
721                         // set range for video
722                         setPositions();
723                 }
724         }
725         CATCH_EXCP;
726 }
727
728 // set framerate
729 void VideoFFmpeg::setFrameRate (float rate)
730 {
731         VideoBase::setFrameRate(rate);
732 }
733
734
735 // image calculation
736 // load frame from video
737 void VideoFFmpeg::calcImage (unsigned int texId, double ts)
738 {
739         if (m_status == SourcePlaying)
740         {
741                 // get actual time
742                 double startTime = PIL_check_seconds_timer();
743                 double actTime;
744                 // timestamp passed from audio actuators can sometimes be slightly negative
745                 if (m_isFile && ts >= -0.5)
746                 {
747                         // allow setting timestamp only when not streaming
748                         actTime = ts;
749                         if (actTime * actFrameRate() < m_lastFrame) 
750                         {
751                                 // user is asking to rewind, force a cache clear to make sure we will do a seek
752                                 // note that this does not decrement m_repeat if ts didn't reach m_range[1]
753                                 stopCache();
754                         }
755                 }
756                 else
757                 {
758                         if (m_lastFrame == -1 && !m_isFile)
759                                 m_startTime = startTime;
760                         actTime = startTime - m_startTime;
761                 }
762                 // if video has ended
763                 if (m_isFile && actTime * m_frameRate >= m_range[1])
764                 {
765                         // in any case, this resets the cache
766                         stopCache();
767                         // if repeats are set, decrease them
768                         if (m_repeat > 0) 
769                                 --m_repeat;
770                         // if video has to be replayed
771                         if (m_repeat != 0)
772                         {
773                                 // reset its position
774                                 actTime -= (m_range[1] - m_range[0]) / m_frameRate;
775                                 m_startTime += (m_range[1] - m_range[0]) / m_frameRate;
776                         }
777                         // if video has to be stopped, stop it
778                         else 
779                         {
780                                 m_status = SourceStopped;
781                                 return;
782                         }
783                 }
784                 // actual frame
785                 long actFrame = (m_isImage) ? m_lastFrame+1 : long(actTime * actFrameRate());
786                 // if actual frame differs from last frame
787                 if (actFrame != m_lastFrame)
788                 {
789                         AVFrame* frame;
790                         // get image
791                         if((frame = grabFrame(actFrame)) != NULL)
792                         {
793                                 if (!m_isFile && !m_cacheStarted) 
794                                 {
795                                         // streaming without cache: detect synchronization problem
796                                         double execTime = PIL_check_seconds_timer() - startTime;
797                                         if (execTime > 0.005) 
798                                         {
799                                                 // exec time is too long, it means that the function was blocking
800                                                 // resynchronize the stream from this time
801                                                 m_startTime += execTime;
802                                         }
803                                 }
804                                 // save actual frame
805                                 m_lastFrame = actFrame;
806                                 // init image, if needed
807                                 init(short(m_codecCtx->width), short(m_codecCtx->height));
808                                 // process image
809                                 process((BYTE*)(frame->data[0]));
810                                 // finished with the frame, release it so that cache can reuse it
811                                 releaseFrame(frame);
812                                 // in case it is an image, automatically stop reading it
813                                 if (m_isImage)
814                                 {
815                                         m_status = SourceStopped;
816                                         // close the file as we don't need it anymore
817                                         release();
818                                 }
819                         } else if (m_isStreaming)
820                         {
821                                 // we didn't get a frame and we are streaming, this may be due to
822                                 // a delay in the network or because we are getting the frame too fast.
823                                 // In the later case, shift time by a small amount to compensate for a drift
824                                 m_startTime += 0.001;
825                         }
826                 }
827         }
828 }
829
830
831 // set actual position
832 void VideoFFmpeg::setPositions (void)
833 {
834         // set video start time
835         m_startTime = PIL_check_seconds_timer();
836         // if file is played and actual position is before end position
837         if (!m_eof && m_lastFrame >= 0 && (!m_isFile || m_lastFrame < m_range[1] * actFrameRate()))
838                 // continue from actual position
839                 m_startTime -= double(m_lastFrame) / actFrameRate();
840         else {
841                 m_startTime -= m_range[0];
842                 // start from begining, stop cache just in case
843                 stopCache();
844         }
845 }
846
847 // position pointer in file, position in second
848 AVFrame *VideoFFmpeg::grabFrame(long position)
849 {
850         AVPacket packet;
851         int frameFinished;
852         int posFound = 1;
853         bool frameLoaded = false;
854         int64_t targetTs = 0;
855         CacheFrame *frame;
856         int64_t dts = 0;
857
858         if (m_cacheStarted)
859         {
860                 // when cache is active, we must not read the file directly
861                 do {
862                         pthread_mutex_lock(&m_cacheMutex);
863                         frame = (CacheFrame *)m_frameCacheBase.first;
864                         pthread_mutex_unlock(&m_cacheMutex);
865                         // no need to remove the frame from the queue: the cache thread does not touch the head, only the tail
866                         if (frame == NULL)
867                         {
868                                 // no frame in cache, in case of file it is an abnormal situation
869                                 if (m_isFile)
870                                 {
871                                         // go back to no threaded reading
872                                         stopCache();
873                                         break;
874                                 }
875                                 return NULL;
876                         }
877                         if (frame->framePosition == -1) 
878                         {
879                                 // this frame mark the end of the file (only used for file)
880                                 // leave in cache to make sure we don't miss it
881                                 m_eof = true;
882                                 return NULL;
883                         }
884                         // for streaming, always return the next frame, 
885                         // that's what grabFrame does in non cache mode anyway.
886                         if (m_isStreaming || frame->framePosition == position)
887                         {
888                                 return frame->frame;
889                         }
890                         // for cam, skip old frames to keep image realtime.
891                         // There should be no risk of clock drift since it all happens on the same CPU
892                         if (frame->framePosition > position) 
893                         {
894                                 // this can happen after rewind if the seek didn't find the first frame
895                                 // the frame in the buffer is ahead of time, just leave it there
896                                 return NULL;
897                         }
898                         // this frame is not useful, release it
899                         pthread_mutex_lock(&m_cacheMutex);
900                         BLI_remlink(&m_frameCacheBase, frame);
901                         BLI_addtail(&m_frameCacheFree, frame);
902                         pthread_mutex_unlock(&m_cacheMutex);
903                 } while (true);
904         }
905         double timeBase = av_q2d(m_formatCtx->streams[m_videoStream]->time_base);
906         int64_t startTs = m_formatCtx->streams[m_videoStream]->start_time;
907         if (startTs == AV_NOPTS_VALUE)
908                 startTs = 0;
909
910         // come here when there is no cache or cache has been stopped
911         // locate the frame, by seeking if necessary (seeking is only possible for files)
912         if (m_isFile)
913         {
914                 // first check if the position that we are looking for is in the preseek range
915                 // if so, just read the frame until we get there
916                 if (position > m_curPosition + 1 
917                         && m_preseek 
918                         && position - (m_curPosition + 1) < m_preseek) 
919                 {
920                         while(av_read_frame(m_formatCtx, &packet)>=0) 
921                         {
922                                 if (packet.stream_index == m_videoStream) 
923                                 {
924                                         avcodec_decode_video(
925                                                 m_codecCtx, 
926                                                 m_frame, &frameFinished, 
927                                                 packet.data, packet.size);
928                                         if (frameFinished)
929                                         {
930                                                 m_curPosition = (long)((packet.dts-startTs) * (m_baseFrameRate*timeBase) + 0.5);
931                                         }
932                                 }
933                                 av_free_packet(&packet);
934                                 if (position == m_curPosition+1)
935                                         break;
936                         }
937                 }
938                 // if the position is not in preseek, do a direct jump
939                 if (position != m_curPosition + 1) 
940                 { 
941                         int64_t pos = (int64_t)((position - m_preseek) / (m_baseFrameRate*timeBase));
942
943                         if (pos < 0)
944                                 pos = 0;
945
946                         pos += startTs;
947
948                         if (position <= m_curPosition || !m_eof)
949                         {
950 #if 0
951                                 // Tried to make this work but couldn't: seeking on byte is ignored by the
952                                 // format plugin and it will generally continue to read from last timestamp.
953                                 // Too bad because frame seek is not always able to get the first frame
954                                 // of the file.
955                                 if (position <= m_preseek)
956                                 {
957                                         // we can safely go the begining of the file
958                                         if (av_seek_frame(m_formatCtx, m_videoStream, 0, AVSEEK_FLAG_BYTE) >= 0)
959                                         {
960                                                 // binary seek does not reset the timestamp, must do it now
961                                                 av_update_cur_dts(m_formatCtx, m_formatCtx->streams[m_videoStream], startTs);
962                                                 m_curPosition = 0;
963                                         }
964                                 }
965                                 else
966 #endif
967                                 {
968                                         // current position is now lost, guess a value. 
969                                         if (av_seek_frame(m_formatCtx, m_videoStream, pos, AVSEEK_FLAG_BACKWARD) >= 0)
970                                         {
971                                                 // current position is now lost, guess a value. 
972                                                 // It's not important because it will be set at this end of this function
973                                                 m_curPosition = position - m_preseek - 1;
974                                         }
975                                 }
976                         }
977                         // this is the timestamp of the frame we're looking for
978                         targetTs = (int64_t)(position / (m_baseFrameRate * timeBase)) + startTs;
979
980                         posFound = 0;
981                         avcodec_flush_buffers(m_codecCtx);
982                 }
983         } else if (m_isThreaded)
984         {
985                 // cache is not started but threading is possible
986                 // better not read the stream => make take some time, better start caching
987                 if (startCache())
988                         return NULL;
989                 // Abnormal!!! could not start cache, fall back on direct read
990                 m_isThreaded = false;
991         }
992
993         // find the correct frame, in case of streaming and no cache, it means just
994         // return the next frame. This is not quite correct, may need more work
995         while(av_read_frame(m_formatCtx, &packet)>=0) 
996         {
997                 if(packet.stream_index == m_videoStream) 
998                 {
999                         avcodec_decode_video(m_codecCtx, 
1000                                 m_frame, &frameFinished, 
1001                                 packet.data, packet.size);
1002                         // remember dts to compute exact frame number
1003                         dts = packet.dts;
1004                         if (frameFinished && !posFound) 
1005                         {
1006                                 if (dts >= targetTs)
1007                                 {
1008                                         posFound = 1;
1009                                 }
1010                         } 
1011
1012                         if (frameFinished && posFound == 1) 
1013                         {
1014                                 AVFrame * input = m_frame;
1015
1016                                 /* This means the data wasnt read properly, 
1017                                 this check stops crashing */
1018                                 if (   input->data[0]==0 && input->data[1]==0 
1019                                         && input->data[2]==0 && input->data[3]==0)
1020                                 {
1021                                         av_free_packet(&packet);
1022                                         break;
1023                                 }
1024
1025                                 if (m_deinterlace) 
1026                                 {
1027                                         if (avpicture_deinterlace(
1028                                                 (AVPicture*) m_frameDeinterlaced,
1029                                                 (const AVPicture*) m_frame,
1030                                                 m_codecCtx->pix_fmt,
1031                                                 m_codecCtx->width,
1032                                                 m_codecCtx->height) >= 0)
1033                                         {
1034                                                 input = m_frameDeinterlaced;
1035                                         }
1036                                 }
1037                                 // convert to RGB24
1038                                 sws_scale(m_imgConvertCtx,
1039                                         input->data,
1040                                         input->linesize,
1041                                         0,
1042                                         m_codecCtx->height,
1043                                         m_frameRGB->data,
1044                                         m_frameRGB->linesize);
1045                                 av_free_packet(&packet);
1046                                 frameLoaded = true;
1047                                 break;
1048                         }
1049                 }
1050                 av_free_packet(&packet);
1051         }
1052         m_eof = m_isFile && !frameLoaded;
1053         if (frameLoaded)
1054         {
1055                 m_curPosition = (long)((dts-startTs) * (m_baseFrameRate*timeBase) + 0.5);
1056                 if (m_isThreaded)
1057                 {
1058                         // normal case for file: first locate, then start cache
1059                         if (!startCache())
1060                         {
1061                                 // Abnormal!! could not start cache, return to non-cache mode
1062                                 m_isThreaded = false;
1063                         }
1064                 }
1065                 return m_frameRGB;
1066         }
1067         return NULL;
1068 }
1069
1070
1071 // python methods
1072
1073
1074 // cast Image pointer to VideoFFmpeg
1075 inline VideoFFmpeg * getVideoFFmpeg (PyImage * self)
1076 { return static_cast<VideoFFmpeg*>(self->m_image); }
1077
1078
1079 // object initialization
1080 static int VideoFFmpeg_init (PyObject * pySelf, PyObject * args, PyObject * kwds)
1081 {
1082         PyImage * self = reinterpret_cast<PyImage*>(pySelf);
1083         // parameters - video source
1084         // file name or format type for capture (only for Linux: video4linux or dv1394)
1085         char * file = NULL;
1086         // capture device number
1087         short capt = -1;
1088         // capture width, only if capt is >= 0
1089         short width = 0;
1090         // capture height, only if capt is >= 0
1091         short height = 0;
1092         // capture rate, only if capt is >= 0
1093         float rate = 25.f;
1094
1095         static const char *kwlist[] = {"file", "capture", "rate", "width", "height", NULL};
1096
1097         // get parameters
1098         if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|hfhh",
1099                 const_cast<char**>(kwlist), &file, &capt, &rate, &width, &height))
1100                 return -1; 
1101
1102         try
1103         {
1104                 // create video object
1105                 Video_init<VideoFFmpeg>(self);
1106
1107                 // set thread usage
1108                 getVideoFFmpeg(self)->initParams(width, height, rate);
1109
1110                 // open video source
1111                 Video_open(getVideo(self), file, capt);
1112         }
1113         catch (Exception & exp)
1114         {
1115                 exp.report();
1116                 return -1;
1117         }
1118         // initialization succeded
1119         return 0;
1120 }
1121
1122 PyObject * VideoFFmpeg_getPreseek (PyImage *self, void * closure)
1123 {
1124         return Py_BuildValue("h", getFFmpeg(self)->getPreseek());
1125 }
1126
1127 // set range
1128 int VideoFFmpeg_setPreseek (PyImage * self, PyObject * value, void * closure)
1129 {
1130         // check validity of parameter
1131         if (value == NULL || !PyLong_Check(value))
1132         {
1133                 PyErr_SetString(PyExc_TypeError, "The value must be an integer");
1134                 return -1;
1135         }
1136         // set preseek
1137         getFFmpeg(self)->setPreseek(PyLong_AsSsize_t(value));
1138         // success
1139         return 0;
1140 }
1141
1142 // get deinterlace
1143 PyObject * VideoFFmpeg_getDeinterlace (PyImage * self, void * closure)
1144 {
1145         if (getFFmpeg(self)->getDeinterlace())
1146                 Py_RETURN_TRUE;
1147         else
1148                 Py_RETURN_FALSE;
1149 }
1150
1151 // set flip
1152 int VideoFFmpeg_setDeinterlace (PyImage * self, PyObject * value, void * closure)
1153 {
1154         // check parameter, report failure
1155         if (value == NULL || !PyBool_Check(value))
1156         {
1157                 PyErr_SetString(PyExc_TypeError, "The value must be a bool");
1158                 return -1;
1159         }
1160         // set deinterlace
1161         getFFmpeg(self)->setDeinterlace(value == Py_True);
1162         // success
1163         return 0;
1164 }
1165
1166 // methods structure
1167 static PyMethodDef videoMethods[] =
1168 { // methods from VideoBase class
1169         {"play", (PyCFunction)Video_play, METH_NOARGS, "Play (restart) video"},
1170         {"pause", (PyCFunction)Video_pause, METH_NOARGS, "pause video"},
1171         {"stop", (PyCFunction)Video_stop, METH_NOARGS, "stop video (play will replay it from start)"},
1172         {"refresh", (PyCFunction)Video_refresh, METH_NOARGS, "Refresh video - get its status"},
1173         {NULL}
1174 };
1175 // attributes structure
1176 static PyGetSetDef videoGetSets[] =
1177 { // methods from VideoBase class
1178         {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
1179         {(char*)"range", (getter)Video_getRange, (setter)Video_setRange, (char*)"replay range", NULL},
1180         {(char*)"repeat", (getter)Video_getRepeat, (setter)Video_setRepeat, (char*)"repeat count, -1 for infinite repeat", NULL},
1181         {(char*)"framerate", (getter)Video_getFrameRate, (setter)Video_setFrameRate, (char*)"frame rate", NULL},
1182         // attributes from ImageBase class
1183         {(char*)"valid", (getter)Image_valid, NULL, (char*)"bool to tell if an image is available", NULL},
1184         {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
1185         {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
1186         {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbour)", NULL},
1187         {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
1188         {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
1189         {(char*)"preseek", (getter)VideoFFmpeg_getPreseek, (setter)VideoFFmpeg_setPreseek, (char*)"nb of frames of preseek", NULL},
1190         {(char*)"deinterlace", (getter)VideoFFmpeg_getDeinterlace, (setter)VideoFFmpeg_setDeinterlace, (char*)"deinterlace image", NULL},
1191         {NULL}
1192 };
1193
1194 // python type declaration
1195 PyTypeObject VideoFFmpegType =
1196
1197         PyVarObject_HEAD_INIT(NULL, 0)
1198         "VideoTexture.VideoFFmpeg",   /*tp_name*/
1199         sizeof(PyImage),          /*tp_basicsize*/
1200         0,                         /*tp_itemsize*/
1201         (destructor)Image_dealloc, /*tp_dealloc*/
1202         0,                         /*tp_print*/
1203         0,                         /*tp_getattr*/
1204         0,                         /*tp_setattr*/
1205         0,                         /*tp_compare*/
1206         0,                         /*tp_repr*/
1207         0,                         /*tp_as_number*/
1208         0,                         /*tp_as_sequence*/
1209         0,                         /*tp_as_mapping*/
1210         0,                         /*tp_hash */
1211         0,                         /*tp_call*/
1212         0,                         /*tp_str*/
1213         0,                         /*tp_getattro*/
1214         0,                         /*tp_setattro*/
1215         &imageBufferProcs,         /*tp_as_buffer*/
1216         Py_TPFLAGS_DEFAULT,        /*tp_flags*/
1217         "FFmpeg video source",       /* tp_doc */
1218         0,                             /* tp_traverse */
1219         0,                             /* tp_clear */
1220         0,                             /* tp_richcompare */
1221         0,                             /* tp_weaklistoffset */
1222         0,                             /* tp_iter */
1223         0,                             /* tp_iternext */
1224         videoMethods,    /* tp_methods */
1225         0,                   /* tp_members */
1226         videoGetSets,          /* tp_getset */
1227         0,                         /* tp_base */
1228         0,                         /* tp_dict */
1229         0,                         /* tp_descr_get */
1230         0,                         /* tp_descr_set */
1231         0,                         /* tp_dictoffset */
1232         (initproc)VideoFFmpeg_init,     /* tp_init */
1233         0,                         /* tp_alloc */
1234         Image_allocNew,           /* tp_new */
1235 };
1236
1237 // object initialization
1238 static int ImageFFmpeg_init (PyObject * pySelf, PyObject * args, PyObject * kwds)
1239 {
1240         PyImage * self = reinterpret_cast<PyImage*>(pySelf);
1241         // parameters - video source
1242         // file name or format type for capture (only for Linux: video4linux or dv1394)
1243         char * file = NULL;
1244
1245         // get parameters
1246         if (!PyArg_ParseTuple(args, "s:ImageFFmpeg", &file))
1247                 return -1; 
1248
1249         try
1250         {
1251                 // create video object
1252                 Video_init<VideoFFmpeg>(self);
1253
1254                 getVideoFFmpeg(self)->initParams(0, 0, 1.0, true);
1255
1256                 // open video source
1257                 Video_open(getVideo(self), file, -1);
1258         }
1259         catch (Exception & exp)
1260         {
1261                 exp.report();
1262                 return -1;
1263         }
1264         // initialization succeded
1265         return 0;
1266 }
1267
1268 PyObject * Image_reload (PyImage * self, PyObject *args)
1269 {
1270         char * newname = NULL;
1271         if (!PyArg_ParseTuple(args, "|s:reload", &newname))
1272                 return NULL;
1273         if (self->m_image != NULL)
1274         {
1275                 VideoFFmpeg* video = getFFmpeg(self);
1276                 // check type of object
1277                 if (!newname)
1278                         newname = video->getImageName();
1279                 if (!newname) {
1280                         // if not set, retport error
1281                         PyErr_SetString(PyExc_RuntimeError, "No image file name given");
1282                         return NULL;
1283                 }
1284                 // make sure the previous file is cleared
1285                 video->release();
1286                 // open the new file
1287                 video->openFile(newname);
1288         }
1289         Py_RETURN_NONE;
1290 }
1291
1292 // methods structure
1293 static PyMethodDef imageMethods[] =
1294 { // methods from VideoBase class
1295         {"refresh", (PyCFunction)Video_refresh, METH_NOARGS, "Refresh image, i.e. load it"},
1296         {"reload", (PyCFunction)Image_reload, METH_VARARGS, "Reload image, i.e. reopen it"},
1297         {NULL}
1298 };
1299 // attributes structure
1300 static PyGetSetDef imageGetSets[] =
1301 { // methods from VideoBase class
1302         {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
1303         // attributes from ImageBase class
1304         {(char*)"valid", (getter)Image_valid, NULL, (char*)"bool to tell if an image is available", NULL},
1305         {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
1306         {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
1307         {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbour)", NULL},
1308         {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
1309         {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
1310         {NULL}
1311 };
1312
1313 // python type declaration
1314 PyTypeObject ImageFFmpegType =
1315
1316         PyVarObject_HEAD_INIT(NULL, 0)
1317         "VideoTexture.ImageFFmpeg",   /*tp_name*/
1318         sizeof(PyImage),          /*tp_basicsize*/
1319         0,                         /*tp_itemsize*/
1320         (destructor)Image_dealloc, /*tp_dealloc*/
1321         0,                         /*tp_print*/
1322         0,                         /*tp_getattr*/
1323         0,                         /*tp_setattr*/
1324         0,                         /*tp_compare*/
1325         0,                         /*tp_repr*/
1326         0,                         /*tp_as_number*/
1327         0,                         /*tp_as_sequence*/
1328         0,                         /*tp_as_mapping*/
1329         0,                         /*tp_hash */
1330         0,                         /*tp_call*/
1331         0,                         /*tp_str*/
1332         0,                         /*tp_getattro*/
1333         0,                         /*tp_setattro*/
1334         &imageBufferProcs,         /*tp_as_buffer*/
1335         Py_TPFLAGS_DEFAULT,        /*tp_flags*/
1336         "FFmpeg image source",       /* tp_doc */
1337         0,                             /* tp_traverse */
1338         0,                             /* tp_clear */
1339         0,                             /* tp_richcompare */
1340         0,                             /* tp_weaklistoffset */
1341         0,                             /* tp_iter */
1342         0,                             /* tp_iternext */
1343         imageMethods,    /* tp_methods */
1344         0,                   /* tp_members */
1345         imageGetSets,          /* tp_getset */
1346         0,                         /* tp_base */
1347         0,                         /* tp_dict */
1348         0,                         /* tp_descr_get */
1349         0,                         /* tp_descr_set */
1350         0,                         /* tp_dictoffset */
1351         (initproc)ImageFFmpeg_init,     /* tp_init */
1352         0,                         /* tp_alloc */
1353         Image_allocNew,           /* tp_new */
1354 };
1355
1356 #endif  //WITH_FFMPEG
1357
1358