BGE VideoTexture: VideoFFmpeg was missing a rewind function: rename stop() to pause...
[blender.git] / source / gameengine / VideoTexture / VideoFFmpeg.cpp
1 /* $Id$
2 -----------------------------------------------------------------------------
3 This source file is part of VideoTexture library
4
5 Copyright (c) 2007 The Zdeno Ash Miklas
6
7 This program is free software; you can redistribute it and/or modify it under
8 the terms of the GNU Lesser General Public License as published by the Free Software
9 Foundation; either version 2 of the License, or (at your option) any later
10 version.
11
12 This program is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
14 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
15
16 You should have received a copy of the GNU Lesser General Public License along with
17 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
18 Place - Suite 330, Boston, MA 02111-1307, USA, or go to
19 http://www.gnu.org/copyleft/lesser.txt.
20 -----------------------------------------------------------------------------
21 */
22
23 #ifdef WITH_FFMPEG
24
25 // INT64_C fix for some linux machines (C99ism)
26 #define __STDC_CONSTANT_MACROS
27 #include <stdint.h>
28
29
30 #include "MEM_guardedalloc.h"
31 #include "PIL_time.h"
32
33 #include <string>
34
35 #include "Exception.h"
36 #include "VideoFFmpeg.h"
37
38
39 // default framerate
40 const double defFrameRate = 25.0;
41 // time scale constant
42 const long timeScale = 1000;
43
44 // macro for exception handling and logging
45 #define CATCH_EXCP catch (Exception & exp) \
46 { exp.report(); m_status = SourceError; }
47
48 extern "C" void do_init_ffmpeg();
49
50 // class RenderVideo
51
52 // constructor
53 VideoFFmpeg::VideoFFmpeg (HRESULT * hRslt) : VideoBase(), 
54 m_codec(NULL), m_formatCtx(NULL), m_codecCtx(NULL), 
55 m_frame(NULL), m_frameDeinterlaced(NULL), m_frameRGB(NULL), m_imgConvertCtx(NULL),
56 m_deinterlace(false), m_preseek(0),     m_videoStream(-1), m_baseFrameRate(25.0),
57 m_lastFrame(-1),  m_eof(false), m_curPosition(-1), m_startTime(0), 
58 m_captWidth(0), m_captHeight(0), m_captRate(0.f), m_isImage(false),
59 m_isThreaded(false), m_stopThread(false), m_cacheStarted(false)
60 {
61         // set video format
62         m_format = RGB24;
63         // force flip because ffmpeg always return the image in the wrong orientation for texture
64         setFlip(true);
65         // construction is OK
66         *hRslt = S_OK;
67         m_thread.first = m_thread.last = NULL;
68         pthread_mutex_init(&m_cacheMutex, NULL);
69         m_frameCacheFree.first = m_frameCacheFree.last = NULL;
70         m_frameCacheBase.first = m_frameCacheBase.last = NULL;
71         m_packetCacheFree.first = m_packetCacheFree.last = NULL;
72         m_packetCacheBase.first = m_packetCacheBase.last = NULL;
73 }
74
75 // destructor
76 VideoFFmpeg::~VideoFFmpeg () 
77 {
78 }
79
80
81 // release components
82 bool VideoFFmpeg::release()
83 {
84         // release
85         stopCache();
86         if (m_codecCtx)
87         {
88                 avcodec_close(m_codecCtx);
89                 m_codecCtx = NULL;
90         }
91         if (m_formatCtx)
92         {
93                 av_close_input_file(m_formatCtx);
94                 m_formatCtx = NULL;
95         }
96         if (m_frame)
97         {
98                 av_free(m_frame);
99                 m_frame = NULL;
100         }
101         if (m_frameDeinterlaced)
102         {
103                 MEM_freeN(m_frameDeinterlaced->data[0]);
104                 av_free(m_frameDeinterlaced);
105                 m_frameDeinterlaced = NULL;
106         }
107         if (m_frameRGB)
108         {
109                 MEM_freeN(m_frameRGB->data[0]);
110                 av_free(m_frameRGB);
111                 m_frameRGB = NULL;
112         }
113         if (m_imgConvertCtx)
114         {
115                 sws_freeContext(m_imgConvertCtx);
116                 m_imgConvertCtx = NULL;
117         }
118         m_codec = NULL;
119         m_status = SourceStopped;
120         return true;
121 }
122
123 AVFrame *VideoFFmpeg::allocFrameRGB()
124 {
125         AVFrame *frame;
126         frame = avcodec_alloc_frame();
127         if (m_format == RGBA32)
128         {
129                 avpicture_fill((AVPicture*)frame, 
130                         (uint8_t*)MEM_callocN(avpicture_get_size(
131                                 PIX_FMT_RGBA,
132                                 m_codecCtx->width, m_codecCtx->height),
133                                 "ffmpeg rgba"),
134                         PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
135         } else 
136         {
137                 avpicture_fill((AVPicture*)frame, 
138                         (uint8_t*)MEM_callocN(avpicture_get_size(
139                                 PIX_FMT_RGB24,
140                                 m_codecCtx->width, m_codecCtx->height),
141                                 "ffmpeg rgb"),
142                         PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
143         }
144         return frame;
145 }
146
147 // set initial parameters
148 void VideoFFmpeg::initParams (short width, short height, float rate, bool image)
149 {
150         m_captWidth = width;
151         m_captHeight = height;
152         m_captRate = rate;
153         m_isImage = image;
154 }
155
156
157 int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams)
158 {
159         AVFormatContext *formatCtx;
160         int                             i, videoStream;
161         AVCodec                 *codec;
162         AVCodecContext  *codecCtx;
163
164         if(av_open_input_file(&formatCtx, filename, inputFormat, 0, formatParams)!=0)
165                 return -1;
166
167         if(av_find_stream_info(formatCtx)<0) 
168         {
169                 av_close_input_file(formatCtx);
170                 return -1;
171         }
172
173         /* Find the first video stream */
174         videoStream=-1;
175         for(i=0; i<formatCtx->nb_streams; i++)
176         {
177                 if(formatCtx->streams[i] &&
178                         get_codec_from_stream(formatCtx->streams[i]) && 
179                         (get_codec_from_stream(formatCtx->streams[i])->codec_type==CODEC_TYPE_VIDEO))
180                 {
181                         videoStream=i;
182                         break;
183                 }
184         }
185
186         if(videoStream==-1) 
187         {
188                 av_close_input_file(formatCtx);
189                 return -1;
190         }
191
192         codecCtx = get_codec_from_stream(formatCtx->streams[videoStream]);
193
194         /* Find the decoder for the video stream */
195         codec=avcodec_find_decoder(codecCtx->codec_id);
196         if(codec==NULL) 
197         {
198                 av_close_input_file(formatCtx);
199                 return -1;
200         }
201         codecCtx->workaround_bugs = 1;
202         if(avcodec_open(codecCtx, codec)<0) 
203         {
204                 av_close_input_file(formatCtx);
205                 return -1;
206         }
207
208 #ifdef FFMPEG_OLD_FRAME_RATE
209         if(codecCtx->frame_rate>1000 && codecCtx->frame_rate_base==1)
210                 codecCtx->frame_rate_base=1000;
211         m_baseFrameRate = (double)codecCtx->frame_rate / (double)codecCtx->frame_rate_base;
212 #else
213         m_baseFrameRate = av_q2d(formatCtx->streams[videoStream]->r_frame_rate);
214 #endif
215         if (m_baseFrameRate <= 0.0) 
216                 m_baseFrameRate = defFrameRate;
217
218         m_codec = codec;
219         m_codecCtx = codecCtx;
220         m_formatCtx = formatCtx;
221         m_videoStream = videoStream;
222         m_frame = avcodec_alloc_frame();
223         m_frameDeinterlaced = avcodec_alloc_frame();
224
225         // allocate buffer if deinterlacing is required
226         avpicture_fill((AVPicture*)m_frameDeinterlaced, 
227                 (uint8_t*)MEM_callocN(avpicture_get_size(
228                 m_codecCtx->pix_fmt,
229                 m_codecCtx->width, m_codecCtx->height), 
230                 "ffmpeg deinterlace"), 
231                 m_codecCtx->pix_fmt, m_codecCtx->width, m_codecCtx->height);
232
233         // check if the pixel format supports Alpha
234         if (m_codecCtx->pix_fmt == PIX_FMT_RGB32 ||
235                 m_codecCtx->pix_fmt == PIX_FMT_BGR32 ||
236                 m_codecCtx->pix_fmt == PIX_FMT_RGB32_1 ||
237                 m_codecCtx->pix_fmt == PIX_FMT_BGR32_1) 
238         {
239                 // allocate buffer to store final decoded frame
240                 m_format = RGBA32;
241                 // allocate sws context
242                 m_imgConvertCtx = sws_getContext(
243                         m_codecCtx->width,
244                         m_codecCtx->height,
245                         m_codecCtx->pix_fmt,
246                         m_codecCtx->width,
247                         m_codecCtx->height,
248                         PIX_FMT_RGBA,
249                         SWS_FAST_BILINEAR,
250                         NULL, NULL, NULL);
251         } else
252         {
253                 // allocate buffer to store final decoded frame
254                 m_format = RGB24;
255                 // allocate sws context
256                 m_imgConvertCtx = sws_getContext(
257                         m_codecCtx->width,
258                         m_codecCtx->height,
259                         m_codecCtx->pix_fmt,
260                         m_codecCtx->width,
261                         m_codecCtx->height,
262                         PIX_FMT_RGB24,
263                         SWS_FAST_BILINEAR,
264                         NULL, NULL, NULL);
265         }
266         m_frameRGB = allocFrameRGB();
267
268         if (!m_imgConvertCtx) {
269                 avcodec_close(m_codecCtx);
270                 m_codecCtx = NULL;
271                 av_close_input_file(m_formatCtx);
272                 m_formatCtx = NULL;
273                 av_free(m_frame);
274                 m_frame = NULL;
275                 MEM_freeN(m_frameDeinterlaced->data[0]);
276                 av_free(m_frameDeinterlaced);
277                 m_frameDeinterlaced = NULL;
278                 MEM_freeN(m_frameRGB->data[0]);
279                 av_free(m_frameRGB);
280                 m_frameRGB = NULL;
281                 return -1;
282         }
283         return 0;
284 }
285
286 /*
287  * This thread is used to load video frame asynchronously.
288  * It provides a frame caching service. 
289  * The main thread is responsible for positionning the frame pointer in the
290  * file correctly before calling startCache() which starts this thread.
291  * The cache is organized in two layers: 1) a cache of 20-30 undecoded packets to keep
292  * memory and CPU low 2) a cache of 5 decoded frames. 
293  * If the main thread does not find the frame in the cache (because the video has restarted
294  * or because the GE is lagging), it stops the cache with StopCache() (this is a synchronous
295  * function: it sends a signal to stop the cache thread and wait for confirmation), then
296  * change the position in the stream and restarts the cache thread.
297  */
298 void *VideoFFmpeg::cacheThread(void *data)
299 {
300         VideoFFmpeg* video = (VideoFFmpeg*)data;
301         // holds the frame that is being decoded
302         CacheFrame *currentFrame = NULL;
303         CachePacket *cachePacket;
304         bool endOfFile = false;
305         int frameFinished = 0;
306
307         while (!video->m_stopThread)
308         {
309                 // packet cache is used solely by this thread, no need to lock
310                 // In case the stream/file contains other stream than the one we are looking for,
311                 // allow a bit of cycling to get rid quickly of those frames
312                 frameFinished = 0;
313                 while (    !endOfFile 
314                                 && (cachePacket = (CachePacket *)video->m_packetCacheFree.first) != NULL 
315                                 && frameFinished < 25)
316                 {
317                         // free packet => packet cache is not full yet, just read more
318                         if (av_read_frame(video->m_formatCtx, &cachePacket->packet)>=0) 
319                         {
320                                 if (cachePacket->packet.stream_index == video->m_videoStream)
321                                 {
322                                         // make sure fresh memory is allocated for the packet and move it to queue
323                                         av_dup_packet(&cachePacket->packet);
324                                         BLI_remlink(&video->m_packetCacheFree, cachePacket);
325                                         BLI_addtail(&video->m_packetCacheBase, cachePacket);
326                                         break;
327                                 } else {
328                                         // this is not a good packet for us, just leave it on free queue
329                                         // Note: here we could handle sound packet
330                                         av_free_packet(&cachePacket->packet);
331                                         frameFinished++;
332                                 }
333                                 
334                         } else {
335                                 if (video->m_isFile)
336                                         // this mark the end of the file
337                                         endOfFile = true;
338                                 // if we cannot read a packet, no need to continue
339                                 break;
340                         }
341                 }
342                 // frame cache is also used by main thread, lock
343                 if (currentFrame == NULL) 
344                 {
345                         // no current frame being decoded, take free one
346                         pthread_mutex_lock(&video->m_cacheMutex);
347                         if ((currentFrame = (CacheFrame *)video->m_frameCacheFree.first) != NULL)
348                                 BLI_remlink(&video->m_frameCacheFree, currentFrame);
349                         pthread_mutex_unlock(&video->m_cacheMutex);
350                 }
351                 if (currentFrame != NULL)
352                 {
353                         // this frame is out of free and busy queue, we can manipulate it without locking
354                         frameFinished = 0;
355                         while (!frameFinished && (cachePacket = (CachePacket *)video->m_packetCacheBase.first) != NULL)
356                         {
357                                 BLI_remlink(&video->m_packetCacheBase, cachePacket);
358                                 // use m_frame because when caching, it is not used in main thread
359                                 // we can't use currentFrame directly because we need to convert to RGB first
360                                 avcodec_decode_video(video->m_codecCtx, 
361                                         video->m_frame, &frameFinished, 
362                                         cachePacket->packet.data, cachePacket->packet.size);
363                                 if(frameFinished) 
364                                 {
365                                         AVFrame * input = video->m_frame;
366
367                                         /* This means the data wasnt read properly, this check stops crashing */
368                                         if (   input->data[0]!=0 || input->data[1]!=0 
369                                                 || input->data[2]!=0 || input->data[3]!=0)
370                                         {
371                                                 if (video->m_deinterlace) 
372                                                 {
373                                                         if (avpicture_deinterlace(
374                                                                 (AVPicture*) video->m_frameDeinterlaced,
375                                                                 (const AVPicture*) video->m_frame,
376                                                                 video->m_codecCtx->pix_fmt,
377                                                                 video->m_codecCtx->width,
378                                                                 video->m_codecCtx->height) >= 0)
379                                                         {
380                                                                 input = video->m_frameDeinterlaced;
381                                                         }
382                                                 }
383                                                 // convert to RGB24
384                                                 sws_scale(video->m_imgConvertCtx,
385                                                         input->data,
386                                                         input->linesize,
387                                                         0,
388                                                         video->m_codecCtx->height,
389                                                         currentFrame->frame->data,
390                                                         currentFrame->frame->linesize);
391                                                 // move frame to queue, this frame is necessarily the next one
392                                                 currentFrame->framePosition = ++video->m_curPosition;
393                                                 pthread_mutex_lock(&video->m_cacheMutex);
394                                                 BLI_addtail(&video->m_frameCacheBase, currentFrame);
395                                                 pthread_mutex_unlock(&video->m_cacheMutex);
396                                                 currentFrame = NULL;
397                                         }
398                                 }
399                                 av_free_packet(&cachePacket->packet);
400                                 BLI_addtail(&video->m_packetCacheFree, cachePacket);
401                         } 
402                         if (currentFrame && endOfFile) 
403                         {
404                                 // no more packet and end of file => put a special frame that indicates that
405                                 currentFrame->framePosition = -1;
406                                 pthread_mutex_lock(&video->m_cacheMutex);
407                                 BLI_addtail(&video->m_frameCacheBase, currentFrame);
408                                 pthread_mutex_unlock(&video->m_cacheMutex);
409                                 currentFrame = NULL;
410                                 // no need to stay any longer in this thread
411                                 break;
412                         }
413                 }
414                 // small sleep to avoid unnecessary looping
415                 PIL_sleep_ms(10);
416         }
417         // before quitting, put back the current frame to queue to allow freeing
418         if (currentFrame)
419         {
420                 pthread_mutex_lock(&video->m_cacheMutex);
421                 BLI_addtail(&video->m_frameCacheFree, currentFrame);
422                 pthread_mutex_unlock(&video->m_cacheMutex);
423         }
424         return 0;
425 }
426
427 // start thread to cache video frame from file/capture/stream
428 // this function should be called only when the position in the stream is set for the
429 // first frame to cache
430 bool VideoFFmpeg::startCache()
431 {
432         if (!m_cacheStarted && m_isThreaded)
433         {
434                 m_stopThread = false;
435                 for (int i=0; i<CACHE_FRAME_SIZE; i++)
436                 {
437                         CacheFrame *frame = new CacheFrame();
438                         frame->frame = allocFrameRGB();
439                         BLI_addtail(&m_frameCacheFree, frame);
440                 }
441                 for (int i=0; i<CACHE_PACKET_SIZE; i++) 
442                 {
443                         CachePacket *packet = new CachePacket();
444                         BLI_addtail(&m_packetCacheFree, packet);
445                 }
446                 BLI_init_threads(&m_thread, cacheThread, 1);
447                 BLI_insert_thread(&m_thread, this);
448                 m_cacheStarted = true;
449         }
450         return m_cacheStarted;
451 }
452
453 void VideoFFmpeg::stopCache()
454 {
455         if (m_cacheStarted)
456         {
457                 m_stopThread = true;
458                 BLI_end_threads(&m_thread);
459                 // now delete the cache
460                 CacheFrame *frame;
461                 CachePacket *packet;
462                 while ((frame = (CacheFrame *)m_frameCacheBase.first) != NULL)
463                 {
464                         BLI_remlink(&m_frameCacheBase, frame);
465                         MEM_freeN(frame->frame->data[0]);
466                         av_free(frame->frame);
467                         delete frame;
468                 }
469                 while ((frame = (CacheFrame *)m_frameCacheFree.first) != NULL)
470                 {
471                         BLI_remlink(&m_frameCacheFree, frame);
472                         MEM_freeN(frame->frame->data[0]);
473                         av_free(frame->frame);
474                         delete frame;
475                 }
476                 while((packet = (CachePacket *)m_packetCacheBase.first) != NULL)
477                 {
478                         BLI_remlink(&m_packetCacheBase, packet);
479                         av_free_packet(&packet->packet);
480                         delete packet;
481                 }
482                 while((packet = (CachePacket *)m_packetCacheFree.first) != NULL)
483                 {
484                         BLI_remlink(&m_packetCacheFree, packet);
485                         delete packet;
486                 }
487                 m_cacheStarted = false;
488         }
489 }
490
491 void VideoFFmpeg::releaseFrame(AVFrame* frame)
492 {
493         if (frame == m_frameRGB)
494         {
495                 // this is not a frame from the cache, ignore
496                 return;
497         }
498         // this frame MUST be the first one of the queue
499         pthread_mutex_lock(&m_cacheMutex);
500         CacheFrame *cacheFrame = (CacheFrame *)m_frameCacheBase.first;
501         assert (cacheFrame != NULL && cacheFrame->frame == frame);
502         BLI_remlink(&m_frameCacheBase, cacheFrame);
503         BLI_addtail(&m_frameCacheFree, cacheFrame);
504         pthread_mutex_unlock(&m_cacheMutex);
505 }
506
507 // open video file
508 void VideoFFmpeg::openFile (char * filename)
509 {
510         do_init_ffmpeg();
511
512         if (openStream(filename, NULL, NULL) != 0)
513                 return;
514
515         if (m_codecCtx->gop_size)
516                 m_preseek = (m_codecCtx->gop_size < 25) ? m_codecCtx->gop_size+1 : 25;
517         else if (m_codecCtx->has_b_frames)              
518                 m_preseek = 25; // should determine gopsize
519         else
520                 m_preseek = 0;
521
522         // get video time range
523         m_range[0] = 0.0;
524         m_range[1] = (double)m_formatCtx->duration / AV_TIME_BASE;
525
526         // open base class
527         VideoBase::openFile(filename);
528
529         if (
530                 // ffmpeg reports that http source are actually non stream
531                 // but it is really not desirable to seek on http file, so force streaming.
532                 // It would be good to find this information from the context but there are no simple indication
533                 !strncmp(filename, "http://", 7) ||
534 #ifdef FFMPEG_PB_IS_POINTER
535         (m_formatCtx->pb && m_formatCtx->pb->is_streamed)
536 #else
537         m_formatCtx->pb.is_streamed
538 #endif
539         )
540         {
541                 // the file is in fact a streaming source, prevent seeking
542                 m_isFile = false;
543                 // for streaming it is important to do non blocking read
544                 m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
545         }
546
547         if (m_isImage) 
548         {
549                 // the file is to be treated as an image, i.e. load the first frame only
550                 m_isFile = false;
551                 // in case of reload, the filename is taken from m_imageName, no need to change it
552                 if (m_imageName.Ptr() != filename)
553                         m_imageName = filename;
554                 m_preseek = 0;
555                 m_avail = false;
556                 play();
557         }
558         // check if we should do multi-threading?
559         if (!m_isImage && BLI_system_thread_count() > 1)
560         {
561                 // never thread image: there are no frame to read ahead
562                 // no need to thread if the system has a single core
563                 m_isThreaded =  true;
564         }
565 }
566
567
568 // open video capture device
569 void VideoFFmpeg::openCam (char * file, short camIdx)
570 {
571         // open camera source
572         AVInputFormat           *inputFormat;
573         AVFormatParameters      formatParams;
574         AVRational                      frameRate;
575         char                            *p, filename[28], rateStr[20];
576
577         do_init_ffmpeg();
578
579         memset(&formatParams, 0, sizeof(formatParams));
580 #ifdef WIN32
581         // video capture on windows only through Video For Windows driver
582         inputFormat = av_find_input_format("vfwcap");
583         if (!inputFormat)
584                 // Video For Windows not supported??
585                 return;
586         sprintf(filename, "%d", camIdx);
587 #else
588         // In Linux we support two types of devices: VideoForLinux and DV1394. 
589         // the user specify it with the filename:
590         // [<device_type>][:<standard>]
591         // <device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394. By default 'v4l'
592         // <standard>    : 'pal', 'secam' or 'ntsc'. By default 'ntsc'
593         // The driver name is constructed automatically from the device type:
594         // v4l   : /dev/video<camIdx>
595         // dv1394: /dev/dv1394/<camIdx>
596         // If you have different driver name, you can specify the driver name explicitely 
597         // instead of device type. Examples of valid filename:
598         //    /dev/v4l/video0:pal
599         //    /dev/ieee1394/1:ntsc
600         //    dv1394:secam
601         //    v4l:pal
602         if (file && strstr(file, "1394") != NULL) 
603         {
604                 // the user specifies a driver, check if it is v4l or d41394
605                 inputFormat = av_find_input_format("dv1394");
606                 sprintf(filename, "/dev/dv1394/%d", camIdx);
607         } else 
608         {
609                 inputFormat = av_find_input_format("video4linux");
610                 sprintf(filename, "/dev/video%d", camIdx);
611         }
612         if (!inputFormat)
613                 // these format should be supported, check ffmpeg compilation
614                 return;
615         if (file && strncmp(file, "/dev", 4) == 0) 
616         {
617                 // user does not specify a driver
618                 strncpy(filename, file, sizeof(filename));
619                 filename[sizeof(filename)-1] = 0;
620                 if ((p = strchr(filename, ':')) != 0)
621                         *p = 0;
622         }
623         if (file && (p = strchr(file, ':')) != NULL)
624                 formatParams.standard = p+1;
625 #endif
626         //frame rate
627         if (m_captRate <= 0.f)
628                 m_captRate = defFrameRate;
629         sprintf(rateStr, "%f", m_captRate);
630         av_parse_video_frame_rate(&frameRate, rateStr);
631         // populate format parameters
632         // need to specify the time base = inverse of rate
633         formatParams.time_base.num = frameRate.den;
634         formatParams.time_base.den = frameRate.num;
635         formatParams.width = m_captWidth;
636         formatParams.height = m_captHeight;
637
638         if (openStream(filename, inputFormat, &formatParams) != 0)
639                 return;
640
641         // for video capture it is important to do non blocking read
642         m_formatCtx->flags |= AVFMT_FLAG_NONBLOCK;
643         // open base class
644         VideoBase::openCam(file, camIdx);
645         // check if we should do multi-threading?
646         if (BLI_system_thread_count() > 1)
647         {
648                 // no need to thread if the system has a single core
649                 m_isThreaded =  true;
650         }
651 }
652
653 // play video
654 bool VideoFFmpeg::play (void)
655 {
656         try
657         {
658                 // if object is able to play
659                 if (VideoBase::play())
660                 {
661                         // set video position
662                         setPositions();
663                         // return success
664                         return true;
665                 }
666         }
667         CATCH_EXCP;
668         return false;
669 }
670
671
672 // pause video
673 bool VideoFFmpeg::pause (void)
674 {
675         try
676         {
677                 if (VideoBase::pause())
678                 {
679                         return true;
680                 }
681         }
682         CATCH_EXCP;
683         return false;
684 }
685
686 // stop video
687 bool VideoFFmpeg::stop (void)
688 {
689         try
690         {
691                 VideoBase::stop();
692                 // force restart when play
693                 m_lastFrame = -1;
694                 return true;
695         }
696         CATCH_EXCP;
697         return false;
698 }
699
700
701 // set video range
702 void VideoFFmpeg::setRange (double start, double stop)
703 {
704         try
705         {
706                 // set range
707                 if (m_isFile)
708                 {
709                         VideoBase::setRange(start, stop);
710                         // set range for video
711                         setPositions();
712                 }
713         }
714         CATCH_EXCP;
715 }
716
717 // set framerate
718 void VideoFFmpeg::setFrameRate (float rate)
719 {
720         VideoBase::setFrameRate(rate);
721 }
722
723
724 // image calculation
725 void VideoFFmpeg::calcImage (unsigned int texId)
726 {
727         loadFrame();
728 }
729
730
731 // load frame from video
732 void VideoFFmpeg::loadFrame (void)
733 {
734         if (m_status == SourcePlaying)
735         {
736                 // get actual time
737                 double startTime = PIL_check_seconds_timer();
738                 if (m_lastFrame == -1)
739                         m_startTime = startTime;
740                 double actTime = startTime - m_startTime;
741                 // if video has ended
742                 if (m_isFile && actTime * m_frameRate >= m_range[1])
743                 {
744                         // in any case, this resets the cache
745                         stopCache();
746                         // if repeats are set, decrease them
747                         if (m_repeat > 0) 
748                                 --m_repeat;
749                         // if video has to be replayed
750                         if (m_repeat != 0)
751                         {
752                                 // reset its position
753                                 actTime -= (m_range[1] - m_range[0]) / m_frameRate;
754                                 m_startTime += (m_range[1] - m_range[0]) / m_frameRate;
755                         }
756                         // if video has to be stopped, stop it
757                         else 
758                         {
759                                 m_status = SourceStopped;
760                                 return;
761                         }
762                 }
763                 // actual frame
764                 long actFrame = (m_isImage) ? m_lastFrame+1 : long(actTime * actFrameRate());
765                 // if actual frame differs from last frame
766                 if (actFrame != m_lastFrame)
767                 {
768                         AVFrame* frame;
769                         // get image
770                         if((frame = grabFrame(actFrame)) != NULL)
771                         {
772                                 if (!m_isFile && !m_cacheStarted) 
773                                 {
774                                         // streaming without cache: detect synchronization problem
775                                         double execTime = PIL_check_seconds_timer() - startTime;
776                                         if (execTime > 0.005) 
777                                         {
778                                                 // exec time is too long, it means that the function was blocking
779                                                 // resynchronize the stream from this time
780                                                 m_startTime += execTime;
781                                         }
782                                 }
783                                 // save actual frame
784                                 m_lastFrame = actFrame;
785                                 // init image, if needed
786                                 init(short(m_codecCtx->width), short(m_codecCtx->height));
787                                 // process image
788                                 process((BYTE*)(frame->data[0]));
789                                 // finished with the frame, release it so that cache can reuse it
790                                 releaseFrame(frame);
791                                 // in case it is an image, automatically stop reading it
792                                 if (m_isImage)
793                                 {
794                                         m_status = SourceStopped;
795                                         // close the file as we don't need it anymore
796                                         release();
797                                 }
798                         } else if (!m_isFile)
799                         {
800                                 // we didn't get a frame and we are streaming, this may be due to
801                                 // a delay in the network or because we are getting the frame too fast.
802                                 // In the later case, shift time by a small amount to compensate for a drift
803                                 m_startTime += 0.01;
804                         }
805                 }
806         }
807 }
808
809
810 // set actual position
811 void VideoFFmpeg::setPositions (void)
812 {
813         // set video start time
814         m_startTime = PIL_check_seconds_timer();
815         // if file is played and actual position is before end position
816         if (!m_eof && m_lastFrame >= 0 && (!m_isFile || m_lastFrame < m_range[1] * actFrameRate()))
817                 // continue from actual position
818                 m_startTime -= double(m_lastFrame) / actFrameRate();
819         else {
820                 m_startTime -= m_range[0];
821                 // start from begining, stop cache just in case
822                 stopCache();
823         }
824 }
825
826 // position pointer in file, position in second
827 AVFrame *VideoFFmpeg::grabFrame(long position)
828 {
829         AVPacket packet;
830         int frameFinished;
831         int posFound = 1;
832         bool frameLoaded = false;
833         long long targetTs = 0;
834         CacheFrame *frame;
835
836         if (m_cacheStarted)
837         {
838                 // when cache is active, we must not read the file directly
839                 do {
840                         pthread_mutex_lock(&m_cacheMutex);
841                         frame = (CacheFrame *)m_frameCacheBase.first;
842                         pthread_mutex_unlock(&m_cacheMutex);
843                         // no need to remove the frame from the queue: the cache thread does not touch the head, only the tail
844                         if (frame == NULL)
845                         {
846                                 // no frame in cache, in case of file it is an abnormal situation
847                                 if (m_isFile)
848                                 {
849                                         // go back to no threaded reading
850                                         stopCache();
851                                         break;
852                                 }
853                                 return NULL;
854                         }
855                         if (frame->framePosition == -1) 
856                         {
857                                 // this frame mark the end of the file (only used for file)
858                                 // leave in cache to make sure we don't miss it
859                                 m_eof = true;
860                                 return NULL;
861                         }
862                         // for streaming, always return the next frame, 
863                         // that's what grabFrame does in non cache mode anyway.
864                         if (!m_isFile || frame->framePosition == position)
865                         {
866                                 return frame->frame;
867                         }
868                         // this frame is not useful, release it
869                         pthread_mutex_lock(&m_cacheMutex);
870                         BLI_remlink(&m_frameCacheBase, frame);
871                         BLI_addtail(&m_frameCacheFree, frame);
872                         pthread_mutex_unlock(&m_cacheMutex);
873                 } while (true);
874         }
875         // come here when there is no cache or cache has been stopped
876         // locate the frame, by seeking if necessary (seeking is only possible for files)
877         if (m_isFile)
878         {
879                 // first check if the position that we are looking for is in the preseek range
880                 // if so, just read the frame until we get there
881                 if (position > m_curPosition + 1 
882                         && m_preseek 
883                         && position - (m_curPosition + 1) < m_preseek) 
884                 {
885                         while(av_read_frame(m_formatCtx, &packet)>=0) 
886                         {
887                                 if (packet.stream_index == m_videoStream) 
888                                 {
889                                         avcodec_decode_video(
890                                                 m_codecCtx, 
891                                                 m_frame, &frameFinished, 
892                                                 packet.data, packet.size);
893                                         if (frameFinished)
894                                                 m_curPosition++;
895                                 }
896                                 av_free_packet(&packet);
897                                 if (position == m_curPosition+1)
898                                         break;
899                         }
900                 }
901                 // if the position is not in preseek, do a direct jump
902                 if (position != m_curPosition + 1) 
903                 { 
904                         double timeBase = av_q2d(m_formatCtx->streams[m_videoStream]->time_base);
905                         int64_t pos = (int64_t)((position - m_preseek) / (m_baseFrameRate*timeBase));
906                         int64_t startTs = m_formatCtx->streams[m_videoStream]->start_time;
907                         int seekres;
908
909                         if (pos < 0)
910                                 pos = 0;
911
912                         if (startTs != AV_NOPTS_VALUE)
913                                 pos += startTs;
914
915                         if (position <= m_curPosition || !m_eof)
916                         {
917 #if 0
918                                 // Tried to make this work but couldn't: seeking on byte is ignored by the
919                                 // format plugin and it will generally continue to read from last timestamp.
920                                 // Too bad because frame seek is not always able to get the first frame
921                                 // of the file.
922                                 if (position <= m_preseek)
923                                 {
924                                         // we can safely go the begining of the file
925                                         if (av_seek_frame(m_formatCtx, m_videoStream, 0, AVSEEK_FLAG_BYTE) >= 0)
926                                         {
927                                                 // binary seek does not reset the timestamp, must do it now
928                                                 av_update_cur_dts(m_formatCtx, m_formatCtx->streams[m_videoStream], startTs);
929                                                 m_curPosition = 0;
930                                         }
931                                 }
932                                 else
933 #endif
934                                 {
935                                         // current position is now lost, guess a value. 
936                                         if (av_seek_frame(m_formatCtx, m_videoStream, pos, AVSEEK_FLAG_BACKWARD) >= 0)
937                                         {
938                                                 // current position is now lost, guess a value. 
939                                                 // It's not important because it will be set at this end of this function
940                                                 m_curPosition = position - m_preseek - 1;
941                                         }
942                                 }
943                         }
944                         // this is the timestamp of the frame we're looking for
945                         targetTs = (int64_t)(position / (m_baseFrameRate * timeBase));
946                         if (startTs != AV_NOPTS_VALUE)
947                                 targetTs += startTs;
948
949                         posFound = 0;
950                         avcodec_flush_buffers(m_codecCtx);
951                 }
952         } else if (m_isThreaded)
953         {
954                 // cache is not started but threading is possible
955                 // better not read the stream => make take some time, better start caching
956                 if (startCache())
957                         return NULL;
958                 // Abnormal!!! could not start cache, fall back on direct read
959                 m_isThreaded = false;
960         }
961
962         // find the correct frame, in case of streaming and no cache, it means just
963         // return the next frame. This is not quite correct, may need more work
964         while(av_read_frame(m_formatCtx, &packet)>=0) 
965         {
966                 if(packet.stream_index == m_videoStream) 
967                 {
968                         avcodec_decode_video(m_codecCtx, 
969                                 m_frame, &frameFinished, 
970                                 packet.data, packet.size);
971
972                         if (frameFinished && !posFound) 
973                         {
974                                 if (packet.dts >= targetTs)
975                                         posFound = 1;
976                         } 
977
978                         if(frameFinished && posFound == 1) 
979                         {
980                                 AVFrame * input = m_frame;
981
982                                 /* This means the data wasnt read properly, 
983                                 this check stops crashing */
984                                 if (   input->data[0]==0 && input->data[1]==0 
985                                         && input->data[2]==0 && input->data[3]==0)
986                                 {
987                                         av_free_packet(&packet);
988                                         break;
989                                 }
990
991                                 if (m_deinterlace) 
992                                 {
993                                         if (avpicture_deinterlace(
994                                                 (AVPicture*) m_frameDeinterlaced,
995                                                 (const AVPicture*) m_frame,
996                                                 m_codecCtx->pix_fmt,
997                                                 m_codecCtx->width,
998                                                 m_codecCtx->height) >= 0)
999                                         {
1000                                                 input = m_frameDeinterlaced;
1001                                         }
1002                                 }
1003                                 // convert to RGB24
1004                                 sws_scale(m_imgConvertCtx,
1005                                         input->data,
1006                                         input->linesize,
1007                                         0,
1008                                         m_codecCtx->height,
1009                                         m_frameRGB->data,
1010                                         m_frameRGB->linesize);
1011                                 av_free_packet(&packet);
1012                                 frameLoaded = true;
1013                                 break;
1014                         }
1015                 }
1016                 av_free_packet(&packet);
1017         }
1018         m_eof = m_isFile && !frameLoaded;
1019         if (frameLoaded)
1020         {
1021                 m_curPosition = position;
1022                 if (m_isThreaded)
1023                 {
1024                         // normal case for file: first locate, then start cache
1025                         if (!startCache())
1026                         {
1027                                 // Abnormal!! could not start cache, return to non-cache mode
1028                                 m_isThreaded = false;
1029                         }
1030                 }
1031                 return m_frameRGB;
1032         }
1033         return NULL;
1034 }
1035
1036
1037 // python methods
1038
1039
1040 // cast Image pointer to VideoFFmpeg
1041 inline VideoFFmpeg * getVideoFFmpeg (PyImage * self)
1042 { return static_cast<VideoFFmpeg*>(self->m_image); }
1043
1044
1045 // object initialization
1046 static int VideoFFmpeg_init (PyObject * pySelf, PyObject * args, PyObject * kwds)
1047 {
1048         PyImage * self = reinterpret_cast<PyImage*>(pySelf);
1049         // parameters - video source
1050         // file name or format type for capture (only for Linux: video4linux or dv1394)
1051         char * file = NULL;
1052         // capture device number
1053         short capt = -1;
1054         // capture width, only if capt is >= 0
1055         short width = 0;
1056         // capture height, only if capt is >= 0
1057         short height = 0;
1058         // capture rate, only if capt is >= 0
1059         float rate = 25.f;
1060
1061         static char *kwlist[] = {"file", "capture", "rate", "width", "height", NULL};
1062
1063         // get parameters
1064         if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|hfhh", kwlist, &file, &capt,
1065                 &rate, &width, &height))
1066                 return -1; 
1067
1068         try
1069         {
1070                 // create video object
1071                 Video_init<VideoFFmpeg>(self);
1072
1073                 // set thread usage
1074                 getVideoFFmpeg(self)->initParams(width, height, rate);
1075
1076                 // open video source
1077                 Video_open(getVideo(self), file, capt);
1078         }
1079         catch (Exception & exp)
1080         {
1081                 exp.report();
1082                 return -1;
1083         }
1084         // initialization succeded
1085         return 0;
1086 }
1087
1088 PyObject * VideoFFmpeg_getPreseek (PyImage *self, void * closure)
1089 {
1090         return Py_BuildValue("h", getFFmpeg(self)->getPreseek());
1091 }
1092
1093 // set range
1094 int VideoFFmpeg_setPreseek (PyImage * self, PyObject * value, void * closure)
1095 {
1096         // check validity of parameter
1097         if (value == NULL || !PyInt_Check(value))
1098         {
1099                 PyErr_SetString(PyExc_TypeError, "The value must be an integer");
1100                 return -1;
1101         }
1102         // set preseek
1103         getFFmpeg(self)->setPreseek(PyInt_AsLong(value));
1104         // success
1105         return 0;
1106 }
1107
1108 // get deinterlace
1109 PyObject * VideoFFmpeg_getDeinterlace (PyImage * self, void * closure)
1110 {
1111         if (getFFmpeg(self)->getDeinterlace())
1112                 Py_RETURN_TRUE;
1113         else
1114                 Py_RETURN_FALSE;
1115 }
1116
1117 // set flip
1118 int VideoFFmpeg_setDeinterlace (PyImage * self, PyObject * value, void * closure)
1119 {
1120         // check parameter, report failure
1121         if (value == NULL || !PyBool_Check(value))
1122         {
1123                 PyErr_SetString(PyExc_TypeError, "The value must be a bool");
1124                 return -1;
1125         }
1126         // set deinterlace
1127         getFFmpeg(self)->setDeinterlace(value == Py_True);
1128         // success
1129         return 0;
1130 }
1131
1132 // methods structure
1133 static PyMethodDef videoMethods[] =
1134 { // methods from VideoBase class
1135         {"play", (PyCFunction)Video_play, METH_NOARGS, "Play (restart) video"},
1136         {"pause", (PyCFunction)Video_pause, METH_NOARGS, "pause video"},
1137         {"stop", (PyCFunction)Video_stop, METH_NOARGS, "stop video (play will replay it from start)"},
1138         {"refresh", (PyCFunction)Video_refresh, METH_NOARGS, "Refresh video - get its status"},
1139         {NULL}
1140 };
1141 // attributes structure
1142 static PyGetSetDef videoGetSets[] =
1143 { // methods from VideoBase class
1144         {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
1145         {(char*)"range", (getter)Video_getRange, (setter)Video_setRange, (char*)"replay range", NULL},
1146         {(char*)"repeat", (getter)Video_getRepeat, (setter)Video_setRepeat, (char*)"repeat count, -1 for infinite repeat", NULL},
1147         {(char*)"framerate", (getter)Video_getFrameRate, (setter)Video_setFrameRate, (char*)"frame rate", NULL},
1148         // attributes from ImageBase class
1149         {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
1150         {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
1151         {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbour)", NULL},
1152         {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
1153         {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
1154         {(char*)"preseek", (getter)VideoFFmpeg_getPreseek, (setter)VideoFFmpeg_setPreseek, (char*)"nb of frames of preseek", NULL},
1155         {(char*)"deinterlace", (getter)VideoFFmpeg_getDeinterlace, (setter)VideoFFmpeg_setDeinterlace, (char*)"deinterlace image", NULL},
1156         {NULL}
1157 };
1158
1159 // python type declaration
1160 PyTypeObject VideoFFmpegType =
1161
1162 #if (PY_VERSION_HEX >= 0x02060000)
1163         PyVarObject_HEAD_INIT(NULL, 0)
1164 #else
1165         /* python 2.5 and below */
1166         PyObject_HEAD_INIT( NULL )  /* required py macro */
1167         0,                         /*ob_size*/
1168 #endif
1169         "VideoTexture.VideoFFmpeg",   /*tp_name*/
1170         sizeof(PyImage),          /*tp_basicsize*/
1171         0,                         /*tp_itemsize*/
1172         (destructor)Image_dealloc, /*tp_dealloc*/
1173         0,                         /*tp_print*/
1174         0,                         /*tp_getattr*/
1175         0,                         /*tp_setattr*/
1176         0,                         /*tp_compare*/
1177         0,                         /*tp_repr*/
1178         0,                         /*tp_as_number*/
1179         0,                         /*tp_as_sequence*/
1180         0,                         /*tp_as_mapping*/
1181         0,                         /*tp_hash */
1182         0,                         /*tp_call*/
1183         0,                         /*tp_str*/
1184         0,                         /*tp_getattro*/
1185         0,                         /*tp_setattro*/
1186         0,                         /*tp_as_buffer*/
1187         Py_TPFLAGS_DEFAULT,        /*tp_flags*/
1188         "FFmpeg video source",       /* tp_doc */
1189         0,                             /* tp_traverse */
1190         0,                             /* tp_clear */
1191         0,                             /* tp_richcompare */
1192         0,                             /* tp_weaklistoffset */
1193         0,                             /* tp_iter */
1194         0,                             /* tp_iternext */
1195         videoMethods,    /* tp_methods */
1196         0,                   /* tp_members */
1197         videoGetSets,          /* tp_getset */
1198         0,                         /* tp_base */
1199         0,                         /* tp_dict */
1200         0,                         /* tp_descr_get */
1201         0,                         /* tp_descr_set */
1202         0,                         /* tp_dictoffset */
1203         (initproc)VideoFFmpeg_init,     /* tp_init */
1204         0,                         /* tp_alloc */
1205         Image_allocNew,           /* tp_new */
1206 };
1207
1208 // object initialization
1209 static int ImageFFmpeg_init (PyObject * pySelf, PyObject * args, PyObject * kwds)
1210 {
1211         PyImage * self = reinterpret_cast<PyImage*>(pySelf);
1212         // parameters - video source
1213         // file name or format type for capture (only for Linux: video4linux or dv1394)
1214         char * file = NULL;
1215
1216         // get parameters
1217         if (!PyArg_ParseTuple(args, "s:ImageFFmpeg", &file))
1218                 return -1; 
1219
1220         try
1221         {
1222                 // create video object
1223                 Video_init<VideoFFmpeg>(self);
1224
1225                 getVideoFFmpeg(self)->initParams(0, 0, 1.0, true);
1226
1227                 // open video source
1228                 Video_open(getVideo(self), file, -1);
1229         }
1230         catch (Exception & exp)
1231         {
1232                 exp.report();
1233                 return -1;
1234         }
1235         // initialization succeded
1236         return 0;
1237 }
1238
1239 PyObject * Image_reload (PyImage * self, PyObject *args)
1240 {
1241         char * newname = NULL;
1242         if (!PyArg_ParseTuple(args, "|s:reload", &newname))
1243                 return NULL;
1244         if (self->m_image != NULL)
1245         {
1246                 VideoFFmpeg* video = getFFmpeg(self);
1247                 // check type of object
1248                 if (!newname)
1249                         newname = video->getImageName();
1250                 if (!newname) {
1251                         // if not set, retport error
1252                         PyErr_SetString(PyExc_RuntimeError, "No image file name given");
1253                         return NULL;
1254                 }
1255                 // make sure the previous file is cleared
1256                 video->release();
1257                 // open the new file
1258                 video->openFile(newname);
1259         }
1260         Py_RETURN_NONE;
1261 }
1262
1263 // methods structure
1264 static PyMethodDef imageMethods[] =
1265 { // methods from VideoBase class
1266         {"refresh", (PyCFunction)Video_refresh, METH_NOARGS, "Refresh image, i.e. load it"},
1267         {"reload", (PyCFunction)Image_reload, METH_VARARGS, "Reload image, i.e. reopen it"},
1268         {NULL}
1269 };
1270 // attributes structure
1271 static PyGetSetDef imageGetSets[] =
1272 { // methods from VideoBase class
1273         {(char*)"status", (getter)Video_getStatus, NULL, (char*)"video status", NULL},
1274         // attributes from ImageBase class
1275         {(char*)"image", (getter)Image_getImage, NULL, (char*)"image data", NULL},
1276         {(char*)"size", (getter)Image_getSize, NULL, (char*)"image size", NULL},
1277         {(char*)"scale", (getter)Image_getScale, (setter)Image_setScale, (char*)"fast scale of image (near neighbour)", NULL},
1278         {(char*)"flip", (getter)Image_getFlip, (setter)Image_setFlip, (char*)"flip image vertically", NULL},
1279         {(char*)"filter", (getter)Image_getFilter, (setter)Image_setFilter, (char*)"pixel filter", NULL},
1280         {NULL}
1281 };
1282
1283 // python type declaration
1284 PyTypeObject ImageFFmpegType =
1285
1286 #if (PY_VERSION_HEX >= 0x02060000)
1287         PyVarObject_HEAD_INIT(NULL, 0)
1288 #else
1289         /* python 2.5 and below */
1290         PyObject_HEAD_INIT( NULL )  /* required py macro */
1291         0,                         /*ob_size*/
1292 #endif
1293         "VideoTexture.ImageFFmpeg",   /*tp_name*/
1294         sizeof(PyImage),          /*tp_basicsize*/
1295         0,                         /*tp_itemsize*/
1296         (destructor)Image_dealloc, /*tp_dealloc*/
1297         0,                         /*tp_print*/
1298         0,                         /*tp_getattr*/
1299         0,                         /*tp_setattr*/
1300         0,                         /*tp_compare*/
1301         0,                         /*tp_repr*/
1302         0,                         /*tp_as_number*/
1303         0,                         /*tp_as_sequence*/
1304         0,                         /*tp_as_mapping*/
1305         0,                         /*tp_hash */
1306         0,                         /*tp_call*/
1307         0,                         /*tp_str*/
1308         0,                         /*tp_getattro*/
1309         0,                         /*tp_setattro*/
1310         0,                         /*tp_as_buffer*/
1311         Py_TPFLAGS_DEFAULT,        /*tp_flags*/
1312         "FFmpeg image source",       /* tp_doc */
1313         0,                             /* tp_traverse */
1314         0,                             /* tp_clear */
1315         0,                             /* tp_richcompare */
1316         0,                             /* tp_weaklistoffset */
1317         0,                             /* tp_iter */
1318         0,                             /* tp_iternext */
1319         imageMethods,    /* tp_methods */
1320         0,                   /* tp_members */
1321         imageGetSets,          /* tp_getset */
1322         0,                         /* tp_base */
1323         0,                         /* tp_dict */
1324         0,                         /* tp_descr_get */
1325         0,                         /* tp_descr_set */
1326         0,                         /* tp_dictoffset */
1327         (initproc)ImageFFmpeg_init,     /* tp_init */
1328         0,                         /* tp_alloc */
1329         Image_allocNew,           /* tp_new */
1330 };
1331
1332 #endif  //WITH_FFMPEG
1333
1334