source: trunk/src/objects/RpVideo.c @ 2724

Last change on this file since 2724 was 2724, checked in by gah, 13 years ago

Addition checks for ffmeg

File size: 45.4 KB
Line 
1/*
2 * ----------------------------------------------------------------------
3 *  TkFFMPEG:  video
4 *
5 *  These routines support the methods in the "video" class, which is
6 *  a video stream that can be read from or written to.  The class
7 *  itself is defined in itcl, but when methods are called, execution
8 *  jumps down to this level.
9 * ======================================================================
10 *  AUTHOR:  Michael McLennan, Purdue University
11 *  Copyright (c) 2004-2008  Purdue Research Foundation
12 *
13 *  See the file "license.terms" for information on usage and
14 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
15 * ======================================================================
16 */
17
18#include <stdlib.h>
19#include <string.h>
20
21#include "config.h"
22
23#ifdef HAVE_FFMPEG_AVCODEC_H
24# include <ffmpeg/avcodec.h>
25#endif
26
27#ifdef HAVE_LIBAVCODEC_AVCODEC_H
28# include <libavcodec/avcodec.h>
29#endif
30
31#ifdef HAVE_FFMPEG_AVFORMAT_H
32# include <ffmpeg/avformat.h>
33#endif
34
35#ifdef HAVE_LIBAVFORMAT_AVFORMAT_H
36# include <libavformat/avformat.h>
37#endif
38
39#ifdef HAVE_FFMPEG_AVUTIL_H
40# include <ffmpeg/avutil.h>
41#endif
42
43#ifdef HAVE_LIBAVUTIL_AVUTIL_H
44# include <libavutil/avutil.h>
45#endif
46
47#ifdef HAVE_FFMPEG_SWSCALE_H
48# include <ffmpeg/swscale.h>
49#endif
50
51#ifdef HAVE_LIBSWSCALE_SWSCALE_H
52# include <libswscale/swscale.h>
53#endif
54
55#include "RpVideo.h"
56
57#ifndef HAVE_AVMEDIA_TYPE_VIDEO
58#define AVMEDIA_TYPE_VIDEO      CODEC_TYPE_VIDEO
59#endif  /* HAVE_AVMEDIA_TYPE_VIDEO */
60
61#ifndef AV_PKT_FLAG_KEY
62#define AV_PKT_FLAG_KEY         PKT_FLAG_KEY           
63#endif
64
65#ifndef HAVE_AVIO_CLOSE
66#define avio_close              url_fclose
67#endif
68
69/*
70 * Each video object is represented by the following data:
71 */
72struct VideoObjRec {
73    int magic;
74
75    /* video input */
76    AVFormatContext *pFormatCtx;
77    int videoStream;
78    int frameNumber;
79    int atEnd;
80
81    /* video output */
82    AVFormatContext *outFormatCtx;
83    AVStream *outVideoStr;
84
85    /* used for both input/output */
86    AVFrame *pFrameYUV;
87    uint8_t *yuvbuffer;
88    int yuvw, yuvh;
89    AVFrame *pFrameRGB;
90    uint8_t *rgbbuffer;
91    int rgbw, rgbh;
92    struct SwsContext *scalingCtx;
93
94    char *fileName;
95    char mode[64];
96    char fmt[64];
97    int lastframe;
98
99    /* tmp buffer to give images back to user */
100    void *img;
101    int imgHeaderLen;
102    int imgWidth;
103    int imgHeight;
104};
105
106/* magic stamp for VideoObj, to make sure data is valid */
107#define VIDEO_OBJ_MAGIC 0x0102abcd
108
109static VideoObj *VideoSetData ();
110
111static int VideoModeRead (VideoObj *vidPtr);
112// static int VideoModeWrite (Tcl_Interp *interp, int w, int h);
113
114static int VideoTime2Frame (AVStream *streamPtr, int64_t tval);
115static int64_t VideoFrame2Time (AVStream *streamPtr, int fval);
116static void VideoNextFrame (VideoObj *vidPtr);
117
118uint64_t global_video_pkt_pts = AV_NOPTS_VALUE;
119static int VideoAvGetBuffer (struct AVCodecContext *c, AVFrame *fr);
120static void VideoAvReleaseBuffer (struct AVCodecContext *c, AVFrame *fr);
121static int VideoWriteFrame (VideoObj *vidPtr, AVFrame *framePtr);
122
123static int VideoAllocImgBuffer (VideoObj *vidPtr, int width, int height);
124static int VideoFreeImgBuffer (VideoObj *vidPtr);
125static double VideoTransformFrames2Duration (VideoObj *vidPtr, int frame);
126static int VideoTransformDuration2Frames (VideoObj *vidPtr, double duration);
127
128/*
129 * ------------------------------------------------------------------------
130 *  VideoSetData()
131 *
132 *  Saves VideoObj data in the "_videodata" slot in the current object
133 *  context.  The data can be retrieved later by calling VideoGetData().
134 * ------------------------------------------------------------------------
135 */
136VideoObj *
137VideoSetData()
138{
139    VideoObj* vid = NULL;
140
141    vid = malloc(sizeof(VideoObj));
142
143    if (vid == NULL) {
144        return NULL;
145    }
146
147    vid->magic = VIDEO_OBJ_MAGIC;
148    vid->pFormatCtx = NULL;
149    vid->videoStream = 0;
150    vid->frameNumber = -1;
151    vid->atEnd = 0;
152
153    vid->outFormatCtx = NULL;
154    vid->outVideoStr = NULL;
155
156    vid->pFrameYUV = NULL;
157    vid->yuvbuffer = NULL;
158    vid->yuvw = 0;
159    vid->yuvh = 0;
160    vid->pFrameRGB = NULL;
161    vid->rgbbuffer = NULL;
162    vid->rgbw = 0;
163    vid->rgbh = 0;
164    vid->scalingCtx = NULL;
165
166    vid->fileName = NULL;
167    *vid->mode = '\0';
168    *vid->fmt = '\0';
169    vid->lastframe = 0;
170
171    vid->img = NULL;
172    vid->imgHeaderLen = 0;
173    vid->imgWidth = 0;
174    vid->imgHeight = 0;
175
176    return vid;
177}
178
179int
180VideoOpenFile(vidPtr, fileName, mode)
181    VideoObj *vidPtr;
182    const char *fileName;
183    const char *mode;
184{
185    int fnlen = 0;
186    int err = 0;
187    int lastframe = 0;
188
189    if (fileName == NULL) {
190        // missing value for fileName
191        // return TCL_ERROR;
192        return -1;
193    }
194    if (fileName == '\0') {
195        /* no file name set -- do nothing */
196        return 0;
197    }
198
199    fnlen = strlen(fileName);
200    if (vidPtr->fileName != NULL) {
201        free(vidPtr->fileName);
202    }
203    vidPtr->fileName = (char *) malloc((fnlen+1)*sizeof(char));
204    if (vidPtr->fileName == NULL) {
205        // trouble mallocing space
206        return -1;
207    }
208    strncpy(vidPtr->fileName,fileName,fnlen);
209    vidPtr->fileName[fnlen] = '\0';
210
211    // FIXME: remove this constraint when we support
212    // the modes: r, r+, w, w+, a, a+, b and combinations
213    if (strlen(mode) > 1) {
214        return -1;
215    }
216
217    if (*mode == 'r') {
218        /* we're now in "input" mode */
219        err = VideoModeRead(vidPtr);
220        if (err) {
221            return err;
222        }
223
224        VideoFindLastFrame(vidPtr,&lastframe);
225        vidPtr->lastframe = lastframe;
226    } else if (*mode == 'w') {
227        /* we're now in "input" mode */
228        // VideoModeWrite(vidPtr);
229    } else {
230        // unrecognized mode
231        return -1;
232    }
233
234    return 0;
235}
236
237/*
238 * ------------------------------------------------------------------------
239 *  VideoFindLastFrame()
240 *
241 *  Find the last readable frame.
242 * ------------------------------------------------------------------------
243 */
244int
245VideoFindLastFrame(vidPtr,lastframe)
246    VideoObj *vidPtr;
247    int *lastframe;
248{
249    int f = 0;
250    int nframe = 0;
251    int cur = 0;
252    AVStream *vstreamPtr;
253
254    if (vidPtr == NULL) {
255        return -1;
256    }
257
258    if (lastframe == NULL) {
259        return -1;
260    }
261
262    if (VideoModeRead(vidPtr) != 0) {
263        return -1;
264    }
265
266    // calculate an estimate of the last frame
267    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
268    nframe = VideoTime2Frame(vstreamPtr,
269        vstreamPtr->start_time + vstreamPtr->duration);
270
271    // get the real last readable frame
272    // is 50 frames far enough to go back
273    // to be outside of the last key frame?
274    f = vidPtr->frameNumber;
275    cur = VideoGoToN(vidPtr,nframe-50);
276    while (cur != nframe) {
277        cur = nframe;
278        nframe = VideoGoNext(vidPtr);
279    }
280    *lastframe = nframe;
281    VideoGoToN(vidPtr,f);
282
283    return 0;
284}
285
286
287/*
288 * ------------------------------------------------------------------------
289 *  VideoModeRead()
290 *
291 *  Tries to force this video stream into "read" mode.  If the current
292 *  mode is "", then the -file is opened for reading.  If the current
293 *  mode is "write", then the stream is closed and then opened for
294 *  reading.  If the current mode is "read", then this call does nothing.
295 *  Usually called just before a "read" operation (get, go, etc.) is
296 *  performed.
297 *
298 *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
299 *  opening or closing the stream.
300 *
301 *  Error Codes
302 *  -1
303 *  -2      missing file name
304 *  -3      couldn't open file
305 *  -4      couldn't find streams in file
306 *  -5      couldn't find video stream in file
307 *  -6      unsupported codec for file
308 *  -7      couldn't open codec for file
309 *  -8      couldn't allocate frame space
310 *  -9      strcpy input to vidPtr->mode failed
311 * ------------------------------------------------------------------------
312 */
313int
314VideoModeRead(vidPtr)
315    VideoObj *vidPtr;
316{
317    char c, buffer[64]; int i;
318    const char *fmt;
319    AVCodecContext *vcodecCtx;
320    AVCodec *vcodec;
321
322    if (vidPtr == NULL) {
323        return -1;
324    }
325
326    if (vidPtr->fileName == NULL) {
327        // Tcl_AppendResult(interp, "missing value for -file", (char*)NULL);
328        // return TCL_ERROR;
329
330        // missing file name
331        return -2;
332    }
333    if (*vidPtr->fileName == '\0') {
334        /* no file name set -- do nothing */
335        return 0;
336    }
337
338    if (strcmp(vidPtr->mode,"input") == 0) {
339        return 0;
340    } else if (strcmp(vidPtr->mode,"output") == 0) {
341        if (VideoClose(vidPtr) != 0) {
342            return -1;
343        }
344    }
345
346    /*
347     * Open the video stream from that file.
348     */
349#ifdef HAVE_AVFORMAT_OPEN_INPUT
350    if (avformat_open_input(&vidPtr->pFormatCtx, vidPtr->fileName, NULL,
351        NULL) != 0) {
352        return -3;
353    }
354#else
355    if (av_open_input_file(&vidPtr->pFormatCtx, vidPtr->fileName,
356            NULL, 0, NULL) != 0) {
357        return -3;
358    }
359#endif
360    if (av_find_stream_info(vidPtr->pFormatCtx) < 0) {
361        // Tcl_AppendResult(interp, "couldn't find streams in file \"",
362        //     fileName, "\"", (char*)NULL);
363        // return TCL_ERROR;
364
365        // couldn't find streams in file
366        return -4;
367    }
368
369    /*
370     * Search for a video stream and its codec.
371     */
372    vidPtr->videoStream = -1;
373    for (i=0; i < vidPtr->pFormatCtx->nb_streams; i++) {
374        if (vidPtr->pFormatCtx->streams[i]->codec->codec_type
375            == AVMEDIA_TYPE_VIDEO) {
376            vidPtr->videoStream = i;
377            break;
378        }
379    }
380    if (vidPtr->videoStream < 0) {
381        // Tcl_AppendResult(interp, "couldn't find video stream in file \"",
382        //     fileName, "\"", (char*)NULL);
383        // return TCL_ERROR;
384
385        // couldn't find video stream in file
386        return -5;
387    }
388
389    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
390    vcodec = avcodec_find_decoder(vcodecCtx->codec_id);
391    if (vcodec == NULL) {
392        // Tcl_AppendResult(interp, "unsupported codec for file \"",
393        //     fileName, "\"", (char*)NULL);
394        // return TCL_ERROR;
395
396        // unsupported codec for file
397        return -6;
398    }
399    if (avcodec_open(vcodecCtx, vcodec) < 0) {
400        // Tcl_AppendResult(interp, "couldn't open codec for file \"",
401        //     fileName, "\"", (char*)NULL);
402        // return TCL_ERROR;
403
404        // couldn't open codec for file
405        return -7;
406    }
407
408    vcodecCtx->get_buffer = VideoAvGetBuffer;
409    vcodecCtx->release_buffer = VideoAvReleaseBuffer;
410
411    vidPtr->pFrameYUV = avcodec_alloc_frame();
412    vidPtr->pFrameRGB = avcodec_alloc_frame();
413    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
414        // Tcl_AppendResult(interp, "couldn't allocate frame space",
415        //     " for file \"", fileName, "\"", (char*)NULL);
416        // return TCL_ERROR;
417
418        // couldn't allocate frame space
419        return -8;
420    }
421
422    /* save the name of the codec as the -format option */
423    fmt = "?";
424    if (vcodecCtx->codec && vcodecCtx->codec->name) {
425        fmt = vcodecCtx->codec->name;
426        strcpy(vidPtr->fmt,fmt);
427    }
428//
429//    sprintf(buffer, "%d", vcodecCtx->width);
430//    if (Tcl_SetVar(interp, "width", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
431//        return TCL_ERROR;
432//    }
433//    sprintf(buffer, "%d", vcodecCtx->height);
434//    if (Tcl_SetVar(interp, "height", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
435//        return TCL_ERROR;
436//    }
437//
438
439    if (strcpy(vidPtr->mode,"input") == NULL) {
440        // strcpy input to vidPtr->mode failed
441        return -9;
442    }
443
444    return 0;
445}
446
447
448// FIXME: get this function working.
449///*
450// * ------------------------------------------------------------------------
451// *  VideoModeWrite()
452// *
453// *  Tries to force this video stream into "write" mode.  If the current
454// *  mode is "", then the -file is opened for writing.  If the current
455// *  mode is "read", then the stream is closed and then opened for
456// *  writing.  If the current mode is "write", then this call does nothing.
457// *  Usually called just before a "write" operation (put, etc.) is
458// *  performed.
459// *
460// *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
461// *  opening or closing the stream.
462// * ------------------------------------------------------------------------
463// */
464//int
465//VideoModeWrite(vidPtr, fileName, width, height, fmt)
466//    VideoObj *vidPtr;      /* video object to write */
467//    CONST84 char *fileName;
468//    int width;             /* native width of each frame */
469//    int height;            /* native height of each frame */
470//    CONST84 char *fmt
471//{
472//    char c;
473//    int numBytes, pixfmt, iwd, iht;
474//    CONST84 char *size;
475//    AVCodecContext *codecCtx;
476//    AVCodec *vcodec;
477//
478//    if (vidPtr == NULL) {
479//        return -1;
480//    }
481//
482//    /*
483//     * Get the current mode.  If we're already in "output", then we're
484//     * done.  Otherwise, close the stream if necessary and prepare to
485//     * open the file for write.
486//     */
487//    if (vidPtr->mode == NULL) {
488//        return -1;
489//    }
490//
491//    c = *vidPtr->mode;
492//    if (c == 'o' && strcmp(vidPtr->mode,"output") == 0) {
493//        return 0;
494//    }
495//    else if (c == 'i' && strcmp(vidPtr->mode,"input") == 0) {
496//        if (VideoClose(vidPtr) != 0) {
497//            return -1;
498//        }
499//    }
500//
501//    /*
502//     * Get the file name from the -file variable.
503//     */
504//    if ((fileName == NULL) || (*filename == '\0')) {
505//        /* no file name set -- do nothing */
506//        return 0;
507//    }
508//
509//    /*
510//     * Get the -width and -height of each frame.  If these are set
511//     * to 0 (default), then use the incoming width/height from an
512//     * actual frame.
513//     */
514//     iwd = width;
515//     iht = height;
516//
517//    /*
518//     * Get the format argument.
519//     */
520//    if (fmt == NULL) {
521////        Tcl_AppendResult(interp, "missing value for -format", (char*)NULL);
522////        return TCL_ERROR;
523//        return -1;
524//    }
525//    if (strcmp(fmt,"mpeg1video") == 0) {
526//        vidPtr->outFormatCtx = av_alloc_format_context();
527//        vidPtr->outFormatCtx->oformat = guess_format("mpeg", NULL, NULL);
528//    }
529//    else if (strcmp(fmt,"flv") == 0) {
530//        vidPtr->outFormatCtx = av_alloc_format_context();
531//        vidPtr->outFormatCtx->oformat = guess_format("flv", NULL, NULL);
532//    }
533//    else if (strcmp(fmt,"mov") == 0) {
534//        vidPtr->outFormatCtx = av_alloc_format_context();
535//        vidPtr->outFormatCtx->oformat = guess_format("mov", NULL, NULL);
536//        /* MOV normally uses MPEG4, but that may not be installed */
537//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
538//    }
539//    else if (strcmp(fmt,"avi") == 0) {
540//        vidPtr->outFormatCtx = av_alloc_format_context();
541//        vidPtr->outFormatCtx->oformat = guess_format("avi", NULL, NULL);
542//        /* AVI normally uses MPEG4, but that may not be installed */
543//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
544//    }
545//    else {
546////        Tcl_AppendResult(interp, "bad format \"", fmt, "\": should be",
547////            " avi, flv, mpeg1video, mov", (char*)NULL);
548////        return TCL_ERROR;
549//        return -1;
550//    }
551//
552//    /*
553//     * Open the video stream for writing.
554//     */
555//    strncpy(vidPtr->outFormatCtx->filename, fileName,
556//        sizeof(vidPtr->outFormatCtx->filename));
557//
558//    vidPtr->outVideoStr = av_new_stream(vidPtr->outFormatCtx, 0);
559//    if (vidPtr->outVideoStr == NULL) {
560////        Tcl_AppendResult(interp, "internal error:",
561////            " problem opening stream", (char*)NULL);
562////        return TCL_ERROR;
563//        retunr -1;
564//    }
565//    codecCtx = vidPtr->outVideoStr->codec;
566//
567//    codecCtx->codec_id = vidPtr->outFormatCtx->oformat->video_codec;
568//    codecCtx->codec_type = CODEC_TYPE_VIDEO;
569//
570//    /* put sample parameters */
571//    codecCtx->bit_rate = 400000;
572//    /* resolution must be a multiple of two */
573//    codecCtx->width = (iwd/2)*2;
574//    codecCtx->height = (iht/2)*2;
575//    codecCtx->time_base.den = 24;
576//    codecCtx->time_base.num = 1;
577//    codecCtx->gop_size = 12; /* emit one intra frame every so often */
578//    codecCtx->pix_fmt = PIX_FMT_YUV420P;
579//    if (codecCtx->codec_id == CODEC_ID_MPEG2VIDEO) {
580//        codecCtx->max_b_frames = 2;
581//    }
582//
583//    /* find the video encoder */
584//    vcodec = avcodec_find_encoder(codecCtx->codec_id);
585//    if (!vcodec || avcodec_open(codecCtx, vcodec) < 0) {
586//        // Tcl_AppendResult(interp, "internal error:",
587//        //     " problem opening codec", (char*)NULL);
588//        // return TCL_ERROR;
589//        return -1;
590//    }
591//
592//    if (av_set_parameters(vidPtr->outFormatCtx, NULL) < 0) {
593//        // Tcl_AppendResult(interp, "internal error:",
594//        //     " problem in av_set_parameters()", (char*)NULL);
595//        // return TCL_ERROR;
596//        return -1;
597//    }
598//
599//    if (url_fopen(&vidPtr->outFormatCtx->pb, fileName, URL_WRONLY) < 0) {
600//        // Tcl_AppendResult(interp, "can't open file \"", fileName,
601//        //     "\"", (char*)NULL);
602//        // return TCL_ERROR;
603//        return -1;
604//    }
605//    av_write_header(vidPtr->outFormatCtx);
606//
607//    vidPtr->pFrameYUV = avcodec_alloc_frame();
608//    vidPtr->pFrameRGB = avcodec_alloc_frame();
609//    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
610//        // Tcl_AppendResult(interp, "couldn't allocate frame space",
611//        //     " for file \"", fileName, "\"", (char*)NULL);
612//        // return TCL_ERROR;
613//        return -1;
614//    }
615//
616//    vidPtr->yuvw = vidPtr->outVideoStr->codec->width;
617//    vidPtr->yuvh = vidPtr->outVideoStr->codec->height;
618//    pixfmt = vidPtr->outVideoStr->codec->pix_fmt;
619//
620//    numBytes = avpicture_get_size(pixfmt, vidPtr->yuvw, vidPtr->yuvh);
621//    vidPtr->yuvbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
622//
623//    avpicture_fill((AVPicture*)vidPtr->pFrameYUV, vidPtr->yuvbuffer,
624//        pixfmt, vidPtr->yuvw, vidPtr->yuvh);
625//
626//
627//    if (strcpy(vid->mode,"output") == NULL) {
628//        return -1;
629//    }
630//
631//    return 0;
632//}
633
634
635/*
636 * ------------------------------------------------------------------------
637 *  VideoTime2Frame()
638 *
639 *  Converts a time value (as defined by the FFMPEG package) into an
640 *  integer frame number in the range 0-end for the stream.
641 * ------------------------------------------------------------------------
642 */
643int
644VideoTime2Frame(streamPtr, tval)
645    AVStream *streamPtr;   /* scale values according to this stream */
646    int64_t tval;          /* time value as defined by stream */
647{
648    AVRational one, factor;
649    one.num = 1;
650    one.den = 1;
651    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
652    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
653
654    if (tval > streamPtr->start_time) {
655        tval -= streamPtr->start_time;
656    } else {
657        tval = 0;
658    }
659    tval = av_rescale_q(tval, factor, one);
660    return (int)tval;
661}
662
663/*
664 * ------------------------------------------------------------------------
665 *  VideoFrame2Time()
666 *
667 *  Converts a frame number 0-end to the corresponding time value
668 *  (as defined by FFMPEG) for the given stream.
669 * ------------------------------------------------------------------------
670 */
671int64_t
672VideoFrame2Time(streamPtr, fval)
673    AVStream *streamPtr;   /* scale values according to this stream */
674    int fval;              /* frame value in the range 0-end */
675{
676    int64_t tval;
677    AVRational one, factor;
678    one.num = 1;
679    one.den = 1;
680
681    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
682    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
683
684    tval = av_rescale_q((int64_t)fval, one, factor) + streamPtr->start_time;
685    return tval;
686}
687
688/*
689 * ------------------------------------------------------------------------
690 *  VideoNextFrame()
691 *
692 *  Decodes a series of video packets until the end of the frame
693 *  is reached.  Updates the frameNumber and atEnd to maintain the
694 *  current status for this video stream.
695 * ------------------------------------------------------------------------
696 */
697void
698VideoNextFrame(vidPtr)
699    VideoObj *vidPtr;   /* get a frame from this video stream */
700{
701    int frameFinished;
702    uint64_t pts;
703    AVCodecContext *vcodecCtx;
704    AVStream *vstreamPtr;
705    AVPacket packet;
706
707    if (vidPtr->pFormatCtx) {
708        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
709        vcodecCtx = vstreamPtr->codec;
710
711        /*
712         * Decode as many packets as necessary to get the next frame.
713         */
714        pts = 0;
715        while (1) {
716            if (av_read_frame(vidPtr->pFormatCtx, &packet) >= 0) {
717                if (packet.stream_index == vidPtr->videoStream) {
718                    /* save pts so we can grab it again in VideoAvGetBuffer */
719                    global_video_pkt_pts = packet.pts;
720
721#ifdef HAVE_AVCODEC_DECODE_VIDEO2
722                    // new avcodec decode video function
723                    avcodec_decode_video2(vcodecCtx, vidPtr->pFrameYUV,
724                        &frameFinished, &packet);
725#else
726                    // old avcodec decode video function
727                    avcodec_decode_video(vcodecCtx, vidPtr->pFrameYUV,
728                        &frameFinished, packet.data, packet.size);
729#endif
730                    if (packet.dts == AV_NOPTS_VALUE
731                          && vidPtr->pFrameYUV->opaque
732                          && *(uint64_t*)vidPtr->pFrameYUV->opaque != AV_NOPTS_VALUE) {
733                        pts = *(uint64_t*)vidPtr->pFrameYUV->opaque;
734                    } else if (packet.dts != AV_NOPTS_VALUE) {
735                        pts = packet.dts;
736                    } else {
737                        pts = 0;
738                    }
739
740                    if (frameFinished) {
741                        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, pts);
742                        break;
743                    }
744                }
745            } else {
746                vidPtr->atEnd = 1;
747                break;
748            }
749        }
750        av_free_packet(&packet);
751    }
752}
753
754/*
755 * ------------------------------------------------------------------------
756 *  These two routines are called whenever a frame buffer is allocated,
757 *  which means that we're starting a new frame.  Grab the global pts
758 *  counter and squirrel it away in the opaque slot of the frame.  This
759 *  will give us a pts value that we can trust later.
760 * ------------------------------------------------------------------------
761 */
762int
763VideoAvGetBuffer(c,fr)
764    AVCodecContext *c;  /* codec doing the frame decoding */
765    AVFrame *fr;        /* frame being decoded */
766{
767    int rval = avcodec_default_get_buffer(c, fr);
768    uint64_t *ptsPtr = av_malloc(sizeof(uint64_t));
769    *ptsPtr = global_video_pkt_pts;
770    fr->opaque = ptsPtr;
771    return rval;
772}
773
774void
775VideoAvReleaseBuffer(c,fr)
776    AVCodecContext *c;  /* codec doing the frame decoding */
777    AVFrame *fr;        /* frame being decoded */
778{
779    if (fr && fr->opaque) {
780        av_freep(&fr->opaque);
781    }
782    avcodec_default_release_buffer(c,fr);
783}
784
785/*
786 * ------------------------------------------------------------------------
787 *  VideoInit()
788 *
789 *  Implements the body of the _ffmpeg_init method in the "video" class.
790 *  Initializes the basic data structure and stores it in the _videodata
791 *  variable within the class.
792 * ------------------------------------------------------------------------
793 */
794VideoObj *
795VideoInit()
796{
797    /*
798     * Create an object to represent this video stream.
799     */
800
801    /* Register all codecs and formats */
802    av_register_all();
803
804    return VideoSetData();
805}
806
807/*
808 * ------------------------------------------------------------------------
809 *  VideoCleanup()
810 *
811 *  Implements the body of the _ffmpeg_cleanup method in the "video" class.
812 *  Accesses the data structure stored in the _videodata variable and
813 *  frees up the data.
814 * ------------------------------------------------------------------------
815 */
816int
817VideoCleanup(vidPtr)
818    VideoObj *vidPtr;
819{
820    /*
821     *  Nothing much to do here.  Just close the file in case it is
822     *  still open.  Don't free vidPtr itself; that is cleaned up by
823     *  the ByteArrayObj in the class data member.
824     */
825    int ret = 0;
826
827    ret -= VideoClose(vidPtr);
828
829    if (vidPtr != NULL) {
830        VideoFreeImgBuffer(vidPtr);
831        if (vidPtr->fileName != NULL) {
832            free(vidPtr->fileName);
833            vidPtr->fileName = NULL;
834        }
835        free(vidPtr);
836        vidPtr = NULL;
837// FIXME: need a test to make sure vidPtr is null after the function returns.
838    }
839
840    return ret;
841}
842
843/*
844 * ------------------------------------------------------------------------
845 *  VideoSize()
846 *
847 *  Implements the body of the "size" method in the "video" class.
848 *  Returns the size of each frame in this video stream as a list {w h}.
849 * ------------------------------------------------------------------------
850 */
851int
852VideoSize(vidPtr, width, height)
853    VideoObj *vidPtr;
854    int *width;
855    int *height;
856{
857    AVCodecContext *vcodecCtx;
858
859    if (vidPtr == NULL) {
860        return -1;
861    }
862
863    if (vidPtr->pFormatCtx == NULL) {
864        // "internal error: video stream is not open",
865        return -1;
866    }
867
868    if (vidPtr->pFormatCtx) {
869        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
870        if (width != NULL) {
871            *width = vcodecCtx->width;
872        }
873        if (height != NULL) {
874            *height = vcodecCtx->height;
875        }
876    }
877    return 0;
878}
879
880/*
881 * ------------------------------------------------------------------------
882 *  VideoGo()
883 *
884 *  Implements the body of the "go" method in the "video" class.
885 *  Advances by one or more frames, or seeks backward in the stream.
886 *  Handles the following syntax:
887 *    obj go next ...... go to next frame (same as +1)
888 *    obj go +n ........ advance by n frames
889 *    obj go -n ........ go back by n frames
890 *    obj go n ......... go to frame n
891 * ------------------------------------------------------------------------
892 */
893int
894VideoGoNext(vidPtr)
895    VideoObj *vidPtr;
896{
897    int nabs;
898
899    if (vidPtr == NULL) {
900        return -1;
901    }
902
903    nabs = vidPtr->frameNumber + 1;
904    return VideoGoToN(vidPtr, nabs);
905}
906
907int
908VideoGoPlusMinusN(vidPtr, n)
909    VideoObj *vidPtr;
910    int n;
911{
912    int nabs;
913
914    if (vidPtr == NULL) {
915        return -1;
916    }
917
918    nabs = vidPtr->frameNumber + n;
919    return VideoGoToN(vidPtr, nabs);
920}
921
922int
923VideoGoToN(vidPtr, n)
924    VideoObj *vidPtr;
925    int n;
926{
927    int nrel, nabs, seekFlags, gotframe, t;
928    int64_t nseek;
929    AVCodecContext *vcodecCtx;
930    AVStream *vstreamPtr;
931
932    if (vidPtr == NULL) {
933        return -1;
934    }
935
936    if (vidPtr->pFormatCtx == NULL) {
937        // "internal error: video stream is not open",
938        return -1;
939    }
940    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
941
942    nabs = n;
943
944    if (nabs < 0) {
945        nabs = 0;
946    }
947
948    if (nabs < vidPtr->frameNumber) {
949        seekFlags = AVSEEK_FLAG_BACKWARD;
950    } else {
951        seekFlags = 0;
952    }
953
954    /*
955     * If we're going to an absolute frame, or if we're going backward
956     * or too far forward, then seek the frame.
957     */
958    nrel = nabs-vidPtr->frameNumber;
959    if ((nrel > 50) || (seekFlags&AVSEEK_FLAG_BACKWARD)) {
960
961        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
962        nseek = VideoFrame2Time(vstreamPtr, nabs);
963        // not sure why it is checking against the number 100
964        if (nseek > 100) {
965            nseek -= 100;
966        } else {
967            nseek = 0;
968        }
969
970        /* first, seek the nearest reference frame for a good starting pt */
971        av_seek_frame(vidPtr->pFormatCtx, vidPtr->videoStream,
972            nseek, seekFlags);
973
974        // this doesn't seem to give me back the true frame number
975        // feels like it is more of a reverse of the VideoFrame2Time call
976        // because vidPtr->frameNumber always equals nabs
977        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, nseek);
978        vidPtr->atEnd = 0;
979
980        /* read the frame to figure out what the frame number is */
981        VideoNextFrame(vidPtr);
982
983        /* then, move forward until we reach the desired frame */
984        gotframe = 0;
985        while (vidPtr->frameNumber < nabs && !vidPtr->atEnd) {
986            VideoNextFrame(vidPtr);
987            gotframe = 1;
988        }
989
990        /* get at least one frame, unless we're done or at the beginning*/
991        if (!gotframe && !vidPtr->atEnd) {
992            if (vidPtr->frameNumber > nabs) {
993                // we are probably at a key frame, just past
994                // the requested frame and need to seek backwards.
995                VideoGoToN(vidPtr,n);
996            } else {
997                VideoNextFrame(vidPtr);
998            }
999        }
1000    }
1001    else {
1002        while (nrel-- > 0) {
1003            VideoNextFrame(vidPtr);
1004        }
1005    }
1006
1007    /*
1008     * Send back the current frame number or "end" as the result.
1009     */
1010    return vidPtr->frameNumber;
1011}
1012
1013/*
1014 * ------------------------------------------------------------------------
1015 *  VideoGet()
1016 *
1017 *  Implements the body of the "get" method in the "video" class.
1018 *  Returns information about the current frame via the following
1019 *  syntax:
1020 *    obj get start|position|end
1021 *    obj get <imageHandle>
1022 * ------------------------------------------------------------------------
1023 */
1024int
1025VideoGetImage(vidPtr, iw, ih, img, bufSize)
1026    VideoObj *vidPtr;
1027    int iw;
1028    int ih;
1029    void **img;
1030    int *bufSize;
1031{
1032
1033    int nframe, numBytes;
1034    char c, buffer[64];
1035    AVCodecContext *vcodecCtx;
1036    AVStream *vstreamPtr;
1037
1038    if (vidPtr == NULL) {
1039        return -1;
1040    }
1041
1042    if (VideoModeRead(vidPtr) != 0) {
1043        return -1;
1044    }
1045
1046    /*
1047    if (vidPtr->pFormatCtx) {
1048        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1049    } else {
1050        vcodecCtx = NULL;
1051    }
1052    */
1053
1054    if (vidPtr->pFormatCtx == NULL) {
1055        // vidPtr->pFormatCtx is NULL, video not open
1056        return -1;
1057    }
1058    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1059
1060    /*
1061     * Query the size for this photo and make sure that we have a
1062     * buffer of the appropriate size for software scaling and
1063     * format conversion.
1064     */
1065
1066    // if the user's desired size is less then 0,
1067    // use the default size
1068
1069    if (iw < 0) {
1070        iw = vcodecCtx->width;
1071    }
1072    if (ih < 0) {
1073        ih = vcodecCtx->height;
1074    }
1075
1076
1077    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1078        if (vidPtr->rgbbuffer) {
1079            av_free(vidPtr->rgbbuffer);
1080            vidPtr->rgbbuffer = NULL;
1081        }
1082        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1083        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1084        vidPtr->rgbw = iw;
1085        vidPtr->rgbh = ih;
1086
1087        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1088            PIX_FMT_RGB24, iw, ih);
1089
1090        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1091            vcodecCtx->width, vcodecCtx->height, vcodecCtx->pix_fmt,
1092            iw, ih, PIX_FMT_RGB24, SWS_BICUBIC|SWS_PRINT_INFO, NULL, NULL, NULL);
1093    }
1094
1095    /*
1096     * Rescale the current frame to the desired size, and translate
1097     * into RGB format so we can copy into the destination image.
1098     */
1099    if (vidPtr->pFrameYUV && vidPtr->pFrameYUV->data[0]) {
1100        sws_scale(vidPtr->scalingCtx, (const uint8_t * const*)
1101            vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize,
1102            0, vcodecCtx->height,
1103            vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize);
1104
1105/*
1106        iblock.pixelPtr  = (unsigned char*)vidPtr->pFrameRGB->data[0];
1107        iblock.width     = iw;
1108        iblock.height    = ih;
1109        iblock.pitch     = vidPtr->pFrameRGB->linesize[0];
1110        iblock.pixelSize = 3;
1111        iblock.offset[0] = 0;
1112        iblock.offset[1] = 1;
1113        iblock.offset[2] = 2;
1114        iblock.offset[3] = 0;
1115
1116        Tk_PhotoPutBlock_NoComposite(img, &iblock, 0, 0, iw, ih);
1117*/
1118
1119        int bufsize = 0;
1120        if (vidPtr->img == NULL) {
1121            VideoAllocImgBuffer(vidPtr,iw,ih);
1122        } else {
1123            if ((vidPtr->imgWidth != iw) && (vidPtr->imgHeight != ih)) {
1124                // new height or width
1125                // resize the image buffer
1126                free(vidPtr->img);
1127                VideoAllocImgBuffer(vidPtr,iw,ih);
1128            }
1129        }
1130
1131        // Write pixel data
1132        memcpy(vidPtr->img+vidPtr->imgHeaderLen,
1133            vidPtr->pFrameRGB->data[0],
1134            vidPtr->imgWidth*3*vidPtr->imgHeight);
1135    }
1136    *img = vidPtr->img;
1137    *bufSize = (vidPtr->imgWidth*3*vidPtr->imgHeight) + vidPtr->imgHeaderLen;
1138    return 0;
1139}
1140
1141int
1142VideoFrameRate (vidPtr, fr)
1143    VideoObj *vidPtr;
1144    double *fr;
1145{
1146    AVStream *vstreamPtr;
1147
1148    if (vidPtr == NULL) {
1149        return -1;
1150    }
1151
1152    if (fr == NULL) {
1153        return -1;
1154    }
1155
1156    if (vidPtr->pFormatCtx == NULL) {
1157        // vidPtr->pFormatCtx is NULL, video not open
1158        return -1;
1159    }
1160    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1161
1162    // http://trac.handbrake.fr/browser/trunk/libhb/decavcodec.c?rev=1490#L684
1163    // there seems to be some controversy over what structure holds
1164    // the correct frame rate information for different video codecs.
1165    // for now we will use the stream's r_frame_rate.
1166    // from the above post, it looks like this value can be interpreted
1167    // as frames per second.
1168    *fr = av_q2d(vstreamPtr->r_frame_rate);
1169
1170    return 0;
1171}
1172
1173int
1174VideoFileName (vidPtr, fname)
1175    VideoObj *vidPtr;
1176    const char **fname;
1177{
1178    AVStream *vstreamPtr;
1179
1180    if (vidPtr == NULL) {
1181        return -1;
1182    }
1183
1184    if (fname == NULL) {
1185        return -1;
1186    }
1187
1188    if (vidPtr->pFormatCtx == NULL) {
1189        // vidPtr->pFormatCtx is NULL, video not open
1190        return -1;
1191    }
1192
1193    *fname = vidPtr->fileName;
1194
1195    return 0;
1196}
1197
1198int
1199VideoPixelAspectRatio (vidPtr, num, den)
1200    VideoObj *vidPtr;
1201    int *num;
1202    int *den;
1203{
1204    AVCodecContext *vcodecCtx;
1205
1206    if (vidPtr == NULL) {
1207        return -1;
1208    }
1209
1210    if ((num == NULL) || (den == NULL)) {
1211        return -1;
1212    }
1213
1214    if (vidPtr->pFormatCtx == NULL) {
1215        // vidPtr->pFormatCtx is NULL, video not open
1216        return -1;
1217    }
1218
1219    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1220
1221    *num = vcodecCtx->sample_aspect_ratio.num;
1222    *den = vcodecCtx->sample_aspect_ratio.den;
1223
1224    return 0;
1225}
1226
1227int
1228VideoDisplayAspectRatio (vidPtr, num, den)
1229    VideoObj *vidPtr;
1230    int *num;
1231    int *den;
1232{
1233    AVCodecContext *vcodecCtx;
1234    int width = 0;
1235    int height = 0;
1236    int64_t gcd = 0;
1237    int64_t gcd2 = 0;
1238
1239    if (vidPtr == NULL) {
1240        return -1;
1241    }
1242
1243    if ((num == NULL) || (den == NULL)) {
1244        return -1;
1245    }
1246
1247    if (vidPtr->pFormatCtx == NULL) {
1248        // vidPtr->pFormatCtx is NULL, video not open
1249        return -1;
1250    }
1251
1252    VideoSize(vidPtr, &width, &height);
1253    VideoPixelAspectRatio(vidPtr, num, den);
1254
1255    width = (*num)*width;
1256    height = (*den)*height;
1257#ifdef FFMPEG_COMMON_H
1258    // old gcd function
1259    gcd = ff_gcd(FFABS(width), FFABS(height));
1260#else
1261    // new gcd function
1262    gcd = av_gcd(FFABS(width), FFABS(height));
1263#endif
1264
1265
1266    *num = width/gcd;
1267    *den = height/gcd;
1268
1269    if (*den == 0) {
1270        *num = 0;
1271        *den = 1;
1272    }
1273
1274    return 0;
1275}
1276
1277int
1278VideoAllocImgBuffer(vidPtr, width, height)
1279    VideoObj *vidPtr;
1280    int width;
1281    int height;
1282{
1283
1284    char header[64];
1285    int headerLen = 0;
1286    int bufsize = 0;
1287
1288    sprintf(header,"P6\n%d %d\n255\n", width, height);
1289    headerLen = strlen(header);
1290    bufsize = headerLen + (width*3*height);
1291    vidPtr->img = (void*) malloc(bufsize);
1292    vidPtr->imgHeaderLen = headerLen;
1293    vidPtr->imgWidth = width;
1294    vidPtr->imgHeight = height;
1295    memcpy(vidPtr->img,header,headerLen);
1296
1297    return 0;
1298}
1299
1300int
1301VideoFreeImgBuffer(vidPtr)
1302    VideoObj *vidPtr;
1303{
1304    if ((vidPtr != NULL) && (vidPtr->img != NULL)) {
1305        free(vidPtr->img);
1306        vidPtr->img = NULL;
1307    }
1308    return 0;
1309}
1310
1311int
1312VideoGetPositionCur(vidPtr, pos)
1313    VideoObj *vidPtr;      /* video object to act on */
1314    int *pos;
1315{
1316    int fnum = -1;
1317    AVStream *vstreamPtr;
1318
1319    if (vidPtr == NULL) {
1320        return -1;
1321    }
1322
1323    if (pos == NULL) {
1324        return -1;
1325    }
1326
1327    if (VideoModeRead(vidPtr) != 0) {
1328        return -1;
1329    }
1330
1331    if (vidPtr->pFormatCtx) {
1332        fnum = vidPtr->frameNumber;
1333    }
1334
1335    *pos = fnum;
1336    return 0;
1337}
1338
1339int
1340VideoGetPositionEnd(vidPtr, pos)
1341    VideoObj *vidPtr;      /* video object to act on */
1342    int *pos;
1343{
1344    AVStream *vstreamPtr;
1345
1346    if (vidPtr == NULL) {
1347        return -1;
1348    }
1349
1350    if (pos == NULL) {
1351        return -1;
1352    }
1353
1354    if (VideoModeRead(vidPtr) != 0) {
1355        return -1;
1356    }
1357
1358    *pos = vidPtr->lastframe;
1359    return 0;
1360}
1361
1362// FIXME: get this function working
1363///*
1364// * ------------------------------------------------------------------------
1365// *  VideoPut()
1366// *
1367// *  Implements the body of the "put" method in the "video" class.
1368// *  Stores a single frame into the video stream:
1369// *    obj put <imageHandle>
1370// * ------------------------------------------------------------------------
1371// */
1372//int
1373//VideoPut(cdata, interp, argc, argv)
1374//    ClientData cdata;      /* not used */
1375//    Tcl_Interp *interp;    /* interpreter */
1376//    int argc;              /* number of arguments */
1377//    CONST84 char* argv[];  /* argument strings */
1378//{
1379//    VideoObj *vidPtr;
1380//    int iw, ih, numBytes, roffs, goffs, boffs;
1381//    char buffer[64];
1382//    unsigned char* photodata;
1383//    uint8_t* rgbdata;
1384//    Tk_PhotoHandle img;
1385//    Tk_PhotoImageBlock iblock;
1386//    AVCodecContext *codecCtx;
1387//
1388//    if (VideoGetData(interp, &vidPtr) != TCL_OK) {
1389//        return TCL_ERROR;
1390//    }
1391//
1392//    if (argc != 2) {
1393//        Tcl_AppendResult(interp, "wrong # args: should be \"", argv[0],
1394//            " image\"", (char*)NULL);
1395//        return TCL_ERROR;
1396//    }
1397//
1398//    /*
1399//     * Get the name of the image and copy from it.
1400//     */
1401//    img = Tk_FindPhoto(interp, argv[1]);
1402//    if (img == NULL) {
1403//        Tcl_AppendResult(interp, "bad value \"", argv[1],
1404//            "\": expected photo image", (char*)NULL);
1405//        return TCL_ERROR;
1406//    }
1407//
1408//    /*
1409//     * Query the size for this photo and make sure that we have a
1410//     * buffer of the appropriate size for software scaling and
1411//     * format conversion.
1412//     */
1413//    Tk_PhotoGetImage(img, &iblock);
1414//    Tk_PhotoGetSize(img, &iw, &ih);
1415//
1416//    if (VideoModeWrite(interp, iw, ih) != TCL_OK) {
1417//        return TCL_ERROR;
1418//    }
1419//    codecCtx = vidPtr->outVideoStr->codec;
1420//
1421//    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1422//        if (vidPtr->rgbbuffer) {
1423//            av_free(vidPtr->rgbbuffer);
1424//            vidPtr->rgbbuffer = NULL;
1425//        }
1426//        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1427//        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1428//        vidPtr->rgbw = iw;
1429//        vidPtr->rgbh = ih;
1430//
1431//        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1432//            PIX_FMT_RGB24, iw, ih);
1433//
1434//        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1435//            iw, ih, PIX_FMT_RGB24,
1436//            codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
1437//            SWS_BICUBIC, NULL, NULL, NULL);
1438//    }
1439//
1440//    /*
1441//     * Copy the data from the Tk photo block into the RGB frame.
1442//     */
1443//    roffs = iblock.offset[0];
1444//    goffs = iblock.offset[1];
1445//    boffs = iblock.offset[2];
1446//
1447//    for (ih=0; ih < iblock.height; ih++) {
1448//        rgbdata = vidPtr->pFrameRGB->data[0] + ih*vidPtr->pFrameRGB->linesize[0];
1449//        photodata = iblock.pixelPtr + ih*iblock.pitch;
1450//        for (iw=0; iw < iblock.width; iw++) {
1451//            rgbdata[0] = photodata[roffs];
1452//            rgbdata[1] = photodata[goffs];
1453//            rgbdata[2] = photodata[boffs];
1454//            rgbdata += 3;
1455//            photodata += iblock.pixelSize;
1456//        }
1457//    }
1458//
1459//    /*
1460//     * Rescale the current frame to the desired size, and translate
1461//     * from RGB to YUV so we can give the frame to the codec.
1462//     */
1463//    sws_scale(vidPtr->scalingCtx,
1464//        vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize,
1465//        0, ih,
1466//        vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize);
1467//
1468//    numBytes = VideoWriteFrame(vidPtr, vidPtr->pFrameYUV);
1469//    if (numBytes < 0) {
1470//        Tcl_AppendResult(interp, "error in av_write_frame()", (char*)NULL);
1471//        return TCL_ERROR;
1472//    }
1473//    sprintf(buffer, "frame %d (%d bytes)", vidPtr->frameNumber++, numBytes);
1474//    Tcl_SetResult(interp, buffer, TCL_VOLATILE);
1475//    return TCL_OK;
1476//}
1477
1478
1479/*
1480 * ------------------------------------------------------------------------
1481 *  VideoWriteFrame()
1482 *
1483 *  Used internally to write a single frame out to the output stream.
1484 *  Returns the number of bytes written to the frame, or -1 if an error
1485 *  occurred.
1486 * ------------------------------------------------------------------------
1487 */
1488int
1489VideoWriteFrame(vidPtr, framePtr)
1490    VideoObj *vidPtr;      /* video object being updated */
1491    AVFrame *framePtr;     /* picture frame being written out */
1492{
1493    int numBytes;
1494    AVCodecContext *codecCtx;
1495    AVPacket pkt;
1496
1497#define OUTBUF_SIZE 500000
1498    uint8_t outbuf[OUTBUF_SIZE];
1499
1500    codecCtx = vidPtr->outVideoStr->codec;
1501    numBytes = avcodec_encode_video(codecCtx, outbuf, OUTBUF_SIZE, framePtr);
1502
1503    if (numBytes > 0) {
1504        av_init_packet(&pkt);
1505
1506        if (codecCtx->coded_frame->pts != AV_NOPTS_VALUE) {
1507            pkt.pts = av_rescale_q(codecCtx->coded_frame->pts,
1508                codecCtx->time_base,
1509                vidPtr->outVideoStr->time_base);
1510        }
1511        if (codecCtx->coded_frame->key_frame) {
1512            pkt.flags |= AV_PKT_FLAG_KEY;
1513        }
1514        pkt.stream_index = vidPtr->outVideoStr->index;
1515        pkt.data = outbuf;
1516        pkt.size = numBytes;
1517
1518        /* write the compressed frame in the media file */
1519        if (av_write_frame(vidPtr->outFormatCtx, &pkt) != 0) {
1520            return -1;
1521        }
1522    }
1523    return numBytes;
1524}
1525
1526/*
1527 * ------------------------------------------------------------------------
1528 *  VideoTransform()
1529 *
1530 *  Implements the body of the "transform" method in the "video" class.
1531 *  Translates one value into another--times into frames, etc.  Handles
1532 *  the following syntax:
1533 *    obj transform frames2duration <frames>
1534 *    obj transform duration2frames <duration>
1535 * ------------------------------------------------------------------------
1536 */
1537double
1538VideoTransformFrames2Duration(vidPtr, frame)
1539    VideoObj *vidPtr;
1540    int frame;
1541{
1542    double duration;
1543    AVCodecContext *vcodecCtx;
1544    AVStream *vstreamPtr;
1545    AVRational hundred;
1546    int64_t tval;
1547
1548    hundred.num = 100;
1549    hundred.den = 1;
1550
1551    if (vidPtr == NULL) {
1552        return -1;
1553    }
1554
1555    if (vidPtr->pFormatCtx == NULL) {
1556//        Tcl_AppendResult(interp, "can't compute transformations:",
1557//            " stream not opened", (char*)NULL);
1558//        return TCL_ERROR;
1559        return -1;
1560    }
1561
1562    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1563    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1564
1565    tval = av_rescale_q((int64_t)frame, hundred, vstreamPtr->r_frame_rate);
1566    duration = 0.01*tval;
1567
1568    return duration;
1569}
1570
1571int
1572VideoTransformDuration2Frames(vidPtr, duration)
1573    VideoObj *vidPtr;
1574    double duration;
1575{
1576    int frames;
1577    AVCodecContext *vcodecCtx;
1578    AVStream *vstreamPtr;
1579    AVRational hundred;
1580    int64_t tval;
1581
1582    hundred.num = 100;
1583    hundred.den = 1;
1584
1585    if (vidPtr == NULL) {
1586        return -1;
1587    }
1588    if (vidPtr->pFormatCtx == NULL) {
1589//        Tcl_AppendResult(interp, "can't compute transformations:",
1590//            " stream not opened", (char*)NULL);
1591//        return TCL_ERROR;
1592        return -1;
1593    }
1594
1595    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1596    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1597
1598    tval = (int64_t)(duration*100);
1599    frames = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1600    // check above for overflow
1601    // tval = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1602    // sprintf(buffer, "%lld", tval);
1603
1604    return frames;
1605}
1606
1607/*
1608 * ------------------------------------------------------------------------
1609 *  VideoClose()
1610 *
1611 *  Implements the body of the _ffmpeg_close method in the "video" class.
1612 *  Closes any file opened previously by the open methods for read/write.
1613 *  If nothing is open, this does nothing.
1614 * ------------------------------------------------------------------------
1615 */
1616int
1617VideoClose(vidPtr)
1618    VideoObj *vidPtr;
1619{
1620    AVCodecContext *vcodecCtx;
1621    int i;
1622
1623    if (vidPtr == NULL) {
1624        return -1;
1625    }
1626
1627    if (vidPtr->yuvbuffer) {
1628        av_free(vidPtr->yuvbuffer);
1629        vidPtr->yuvbuffer = NULL;
1630        vidPtr->yuvw = 0;
1631        vidPtr->yuvh = 0;
1632    }
1633    if (vidPtr->pFrameYUV) {
1634        av_free(vidPtr->pFrameYUV);
1635        vidPtr->pFrameYUV = NULL;
1636    }
1637
1638    if (vidPtr->rgbbuffer) {
1639        av_free(vidPtr->rgbbuffer);
1640        vidPtr->rgbbuffer = NULL;
1641        vidPtr->rgbw = 0;
1642        vidPtr->rgbh = 0;
1643    }
1644    if (vidPtr->pFrameRGB) {
1645        av_free(vidPtr->pFrameRGB);
1646        vidPtr->pFrameRGB = NULL;
1647    }
1648
1649    if (vidPtr->scalingCtx) {
1650        sws_freeContext(vidPtr->scalingCtx);
1651        vidPtr->scalingCtx = NULL;
1652    }
1653    if (vidPtr->pFormatCtx && vidPtr->videoStream >= 0) {
1654        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1655        if (vcodecCtx) {
1656            avcodec_close(vcodecCtx);
1657        }
1658    }
1659    if (vidPtr->pFormatCtx) {
1660        av_close_input_file(vidPtr->pFormatCtx);
1661        vidPtr->pFormatCtx = NULL;
1662    }
1663
1664    if (vidPtr->outFormatCtx) {
1665        while (VideoWriteFrame(vidPtr, NULL) > 0)
1666            ; /* write out any remaining frames */
1667
1668        av_write_trailer(vidPtr->outFormatCtx);
1669
1670        for (i=0; i < vidPtr->outFormatCtx->nb_streams; i++) {
1671            avcodec_close(vidPtr->outFormatCtx->streams[i]->codec);
1672            av_freep(&vidPtr->outFormatCtx->streams[i]->codec);
1673            av_freep(&vidPtr->outFormatCtx->streams[i]);
1674        }
1675
1676        if (vidPtr->outFormatCtx->pb) {
1677            avio_close(vidPtr->outFormatCtx->pb);
1678        }
1679
1680        av_free(vidPtr->outFormatCtx);
1681        vidPtr->outFormatCtx = NULL;
1682    }
1683
1684    /* reset the mode to null */
1685    *vidPtr->mode = '\0';
1686
1687    return 0;
1688}
Note: See TracBrowser for help on using the repository browser.