source: trunk/src/objects/RpVideo.c @ 1983

Last change on this file since 1983 was 1983, checked in by dkearney, 14 years ago

checking location of ffmpeg include files

File size: 42.0 KB
Line 
1/*
2 * ----------------------------------------------------------------------
3 *  TkFFMPEG:  video
4 *
5 *  These routines support the methods in the "video" class, which is
6 *  a video stream that can be read from or written to.  The class
7 *  itself is defined in itcl, but when methods are called, execution
8 *  jumps down to this level.
9 * ======================================================================
10 *  AUTHOR:  Michael McLennan, Purdue University
11 *  Copyright (c) 2004-2008  Purdue Research Foundation
12 *
13 *  See the file "license.terms" for information on usage and
14 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
15 * ======================================================================
16 */
17
18#include <stdlib.h>
19#include <string.h>
20
21#include "config.h"
22
23#ifdef HAVE_FFMPEG_AVCODEC_H
24# include <ffmpeg/avcodec.h>
25#endif
26
27#ifdef HAVE_LIBAVCODEC_AVCODEC_H
28# include <libavcodec/avcodec.h>
29#endif
30
31#ifdef HAVE_FFMPEG_AVFORMAT_H
32# include <ffmpeg/avformat.h>
33#endif
34
35#ifdef HAVE_LIBAVFORMAT_AVFORMAT_H
36# include <libavformat/avformat.h>
37#endif
38
39#ifdef HAVE_FFMPEG_AVUTIL_H
40# include <ffmpeg/avutil.h>
41#endif
42
43#ifdef HAVE_LIBAVUTIL_AVUTIL_H
44# include <libavutil/avutil.h>
45#endif
46
47#ifdef HAVE_FFMPEG_SWSCALE_H
48# include <ffmpeg/swscale.h>
49#endif
50
51#ifdef HAVE_LIBSWSCALE_SWSCALE_H
52# include <libswscale/swscale.h>
53#endif
54
55#include "RpVideo.h"
56
57/*
58 * Each video object is represented by the following data:
59 */
60struct VideoObjRec {
61    int magic;
62
63    /* video input */
64    AVFormatContext *pFormatCtx;
65    int videoStream;
66    int frameNumber;
67    int atEnd;
68
69    /* video output */
70    AVFormatContext *outFormatCtx;
71    AVStream *outVideoStr;
72
73    /* used for both input/output */
74    AVFrame *pFrameYUV;
75    uint8_t *yuvbuffer;
76    int yuvw, yuvh;
77    AVFrame *pFrameRGB;
78    uint8_t *rgbbuffer;
79    int rgbw, rgbh;
80    struct SwsContext *scalingCtx;
81
82    char *fileName;
83    char mode[64];
84    char fmt[64];
85
86    /* tmp buffer to give images back to user */
87    void *img;
88    int imgHeaderLen;
89    int imgWidth;
90    int imgHeight;
91};
92
93/* magic stamp for VideoObj, to make sure data is valid */
94#define VIDEO_OBJ_MAGIC 0x0102abcd
95
96static VideoObj *VideoSetData ();
97
98static int VideoModeRead (VideoObj *vidPtr);
99// static int VideoModeWrite (Tcl_Interp *interp, int w, int h);
100
101static int VideoTime2Frame (AVStream *streamPtr, int64_t tval);
102static int64_t VideoFrame2Time (AVStream *streamPtr, int fval);
103static void VideoNextFrame (VideoObj *vidPtr);
104
105uint64_t global_video_pkt_pts = AV_NOPTS_VALUE;
106static int VideoAvGetBuffer (struct AVCodecContext *c, AVFrame *fr);
107static void VideoAvReleaseBuffer (struct AVCodecContext *c, AVFrame *fr);
108static int VideoWriteFrame (VideoObj *vidPtr, AVFrame *framePtr);
109
110static int VideoAllocImgBuffer (VideoObj *vidPtr, int width, int height);
111static int VideoFreeImgBuffer (VideoObj *vidPtr);
112static double VideoTransformFrames2Duration (VideoObj *vidPtr, int frame);
113static int VideoTransformDuration2Frames (VideoObj *vidPtr, double duration);
114
115/*
116 * ------------------------------------------------------------------------
117 *  VideoSetData()
118 *
119 *  Saves VideoObj data in the "_videodata" slot in the current object
120 *  context.  The data can be retrieved later by calling VideoGetData().
121 * ------------------------------------------------------------------------
122 */
123VideoObj *
124VideoSetData()
125{
126    VideoObj* vid = NULL;
127
128    vid = malloc(sizeof(VideoObj));
129
130    if (vid == NULL) {
131        return NULL;
132    }
133
134    vid->magic = VIDEO_OBJ_MAGIC;
135    vid->pFormatCtx = NULL;
136    vid->videoStream = 0;
137    vid->frameNumber = -1;
138    vid->atEnd = 0;
139
140    vid->outFormatCtx = NULL;
141    vid->outVideoStr = NULL;
142
143    vid->pFrameYUV = NULL;
144    vid->yuvbuffer = NULL;
145    vid->yuvw = 0;
146    vid->yuvh = 0;
147    vid->pFrameRGB = NULL;
148    vid->rgbbuffer = NULL;
149    vid->rgbw = 0;
150    vid->rgbh = 0;
151    vid->scalingCtx = NULL;
152
153    vid->fileName = NULL;
154    *vid->mode = '\0';
155    *vid->fmt = '\0';
156
157    vid->img = NULL;
158    vid->imgHeaderLen = 0;
159    vid->imgWidth = 0;
160    vid->imgHeight = 0;
161
162    return vid;
163}
164
165int
166VideoOpenFile(vidPtr, fileName, mode)
167    VideoObj *vidPtr;
168    const char *fileName;
169    const char *mode;
170{
171    int fnlen = 0;
172    int err = 0;
173
174    if (fileName == NULL) {
175        // missing value for fileName
176        // return TCL_ERROR;
177        return -1;
178    }
179    if (fileName == '\0') {
180        /* no file name set -- do nothing */
181        return 0;
182    }
183
184    fnlen = strlen(fileName);
185    if (vidPtr->fileName != NULL) {
186        free(vidPtr->fileName);
187    }
188    vidPtr->fileName = (char *) malloc((fnlen+1)*sizeof(char));
189    if (vidPtr->fileName == NULL) {
190        // trouble mallocing space
191        return -1;
192    }
193    strncpy(vidPtr->fileName,fileName,fnlen);
194    vidPtr->fileName[fnlen] = '\0';
195
196    // FIXME: remove this constraint when we support
197    // the modes: r, r+, w, w+, a, a+, b and combinations
198    if (strlen(mode) > 1) {
199        return -1;
200    }
201
202    if (*mode == 'r') {
203        /* we're now in "input" mode */
204        err = VideoModeRead(vidPtr);
205        if (err) {
206            return err;
207        }
208    } else if (*mode == 'w') {
209        /* we're now in "input" mode */
210        // VideoModeWrite(vidPtr);
211    } else {
212        // unrecognized mode
213        return -1;
214    }
215
216    return 0;
217}
218
219/*
220 * ------------------------------------------------------------------------
221 *  VideoModeRead()
222 *
223 *  Tries to force this video stream into "read" mode.  If the current
224 *  mode is "", then the -file is opened for reading.  If the current
225 *  mode is "write", then the stream is closed and then opened for
226 *  reading.  If the current mode is "read", then this call does nothing.
227 *  Usually called just before a "read" operation (get, go, etc.) is
228 *  performed.
229 *
230 *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
231 *  opening or closing the stream.
232 *
233 *  Error Codes
234 *  -1
235 *  -2      missing file name
236 *  -3      couldn't open file
237 *  -4      couldn't find streams in file
238 *  -5      couldn't find video stream in file
239 *  -6      unsupported codec for file
240 *  -7      couldn't open codec for file
241 *  -8      couldn't allocate frame space
242 *  -9      strcpy input to vidPtr->mode failed
243 * ------------------------------------------------------------------------
244 */
245int
246VideoModeRead(vidPtr)
247    VideoObj *vidPtr;
248{
249    char c, buffer[64]; int i;
250    const char *fmt;
251    AVCodecContext *vcodecCtx;
252    AVCodec *vcodec;
253
254    if (vidPtr == NULL) {
255        return -1;
256    }
257
258    if (vidPtr->fileName == NULL) {
259        // Tcl_AppendResult(interp, "missing value for -file", (char*)NULL);
260        // return TCL_ERROR;
261
262        // missing file name
263        return -2;
264    }
265    if (*vidPtr->fileName == '\0') {
266        /* no file name set -- do nothing */
267        return 0;
268    }
269
270    if (strcmp(vidPtr->mode,"input") == 0) {
271        return 0;
272    } else if (strcmp(vidPtr->mode,"output") == 0) {
273        if (VideoClose(vidPtr) != 0) {
274            return -1;
275        }
276    }
277
278    /*
279     * Open the video stream from that file.
280     */
281    if (av_open_input_file(&vidPtr->pFormatCtx, vidPtr->fileName,
282            NULL, 0, NULL) != 0) {
283        // Tcl_AppendResult(interp, "couldn't open file \"",
284        //     fileName, "\"", (char*)NULL);
285        // return TCL_ERROR;
286
287        // couldn't open file
288        return -3;
289    }
290    if (av_find_stream_info(vidPtr->pFormatCtx) < 0) {
291        // Tcl_AppendResult(interp, "couldn't find streams in file \"",
292        //     fileName, "\"", (char*)NULL);
293        // return TCL_ERROR;
294
295        // couldn't find streams in file
296        return -4;
297    }
298
299    /*
300     * Search for a video stream and its codec.
301     */
302    vidPtr->videoStream = -1;
303    for (i=0; i < vidPtr->pFormatCtx->nb_streams; i++) {
304        if (vidPtr->pFormatCtx->streams[i]->codec->codec_type
305                == CODEC_TYPE_VIDEO) {
306            vidPtr->videoStream = i;
307            break;
308        }
309    }
310    if (vidPtr->videoStream < 0) {
311        // Tcl_AppendResult(interp, "couldn't find video stream in file \"",
312        //     fileName, "\"", (char*)NULL);
313        // return TCL_ERROR;
314
315        // couldn't find video stream in file
316        return -5;
317    }
318
319    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
320    vcodec = avcodec_find_decoder(vcodecCtx->codec_id);
321    if (vcodec == NULL) {
322        // Tcl_AppendResult(interp, "unsupported codec for file \"",
323        //     fileName, "\"", (char*)NULL);
324        // return TCL_ERROR;
325
326        // unsupported codec for file
327        return -6;
328    }
329    if (avcodec_open(vcodecCtx, vcodec) < 0) {
330        // Tcl_AppendResult(interp, "couldn't open codec for file \"",
331        //     fileName, "\"", (char*)NULL);
332        // return TCL_ERROR;
333
334        // couldn't open codec for file
335        return -7;
336    }
337
338    vcodecCtx->get_buffer = VideoAvGetBuffer;
339    vcodecCtx->release_buffer = VideoAvReleaseBuffer;
340
341    vidPtr->pFrameYUV = avcodec_alloc_frame();
342    vidPtr->pFrameRGB = avcodec_alloc_frame();
343    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
344        // Tcl_AppendResult(interp, "couldn't allocate frame space",
345        //     " for file \"", fileName, "\"", (char*)NULL);
346        // return TCL_ERROR;
347
348        // couldn't allocate frame space
349        return -8;
350    }
351
352    /* save the name of the codec as the -format option */
353    fmt = "?";
354    if (vcodecCtx->codec && vcodecCtx->codec->name) {
355        fmt = vcodecCtx->codec->name;
356        strcpy(vidPtr->fmt,fmt);
357    }
358//
359//    sprintf(buffer, "%d", vcodecCtx->width);
360//    if (Tcl_SetVar(interp, "width", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
361//        return TCL_ERROR;
362//    }
363//    sprintf(buffer, "%d", vcodecCtx->height);
364//    if (Tcl_SetVar(interp, "height", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
365//        return TCL_ERROR;
366//    }
367//
368
369    if (strcpy(vidPtr->mode,"input") == NULL) {
370        // strcpy input to vidPtr->mode failed
371        return -9;
372    }
373
374    return 0;
375}
376
377
378// FIXME: get this function working.
379///*
380// * ------------------------------------------------------------------------
381// *  VideoModeWrite()
382// *
383// *  Tries to force this video stream into "write" mode.  If the current
384// *  mode is "", then the -file is opened for writing.  If the current
385// *  mode is "read", then the stream is closed and then opened for
386// *  writing.  If the current mode is "write", then this call does nothing.
387// *  Usually called just before a "write" operation (put, etc.) is
388// *  performed.
389// *
390// *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
391// *  opening or closing the stream.
392// * ------------------------------------------------------------------------
393// */
394//int
395//VideoModeWrite(vidPtr, fileName, width, height, fmt)
396//    VideoObj *vidPtr;      /* video object to write */
397//    CONST84 char *fileName;
398//    int width;             /* native width of each frame */
399//    int height;            /* native height of each frame */
400//    CONST84 char *fmt
401//{
402//    char c;
403//    int numBytes, pixfmt, iwd, iht;
404//    CONST84 char *size;
405//    AVCodecContext *codecCtx;
406//    AVCodec *vcodec;
407//
408//    if (vidPtr == NULL) {
409//        return -1;
410//    }
411//
412//    /*
413//     * Get the current mode.  If we're already in "output", then we're
414//     * done.  Otherwise, close the stream if necessary and prepare to
415//     * open the file for write.
416//     */
417//    if (vidPtr->mode == NULL) {
418//        return -1;
419//    }
420//
421//    c = *vidPtr->mode;
422//    if (c == 'o' && strcmp(vidPtr->mode,"output") == 0) {
423//        return 0;
424//    }
425//    else if (c == 'i' && strcmp(vidPtr->mode,"input") == 0) {
426//        if (VideoClose(vidPtr) != 0) {
427//            return -1;
428//        }
429//    }
430//
431//    /*
432//     * Get the file name from the -file variable.
433//     */
434//    if ((fileName == NULL) || (*filename == '\0')) {
435//        /* no file name set -- do nothing */
436//        return 0;
437//    }
438//
439//    /*
440//     * Get the -width and -height of each frame.  If these are set
441//     * to 0 (default), then use the incoming width/height from an
442//     * actual frame.
443//     */
444//     iwd = width;
445//     iht = height;
446//
447//    /*
448//     * Get the format argument.
449//     */
450//    if (fmt == NULL) {
451////        Tcl_AppendResult(interp, "missing value for -format", (char*)NULL);
452////        return TCL_ERROR;
453//        return -1;
454//    }
455//    if (strcmp(fmt,"mpeg1video") == 0) {
456//        vidPtr->outFormatCtx = av_alloc_format_context();
457//        vidPtr->outFormatCtx->oformat = guess_format("mpeg", NULL, NULL);
458//    }
459//    else if (strcmp(fmt,"flv") == 0) {
460//        vidPtr->outFormatCtx = av_alloc_format_context();
461//        vidPtr->outFormatCtx->oformat = guess_format("flv", NULL, NULL);
462//    }
463//    else if (strcmp(fmt,"mov") == 0) {
464//        vidPtr->outFormatCtx = av_alloc_format_context();
465//        vidPtr->outFormatCtx->oformat = guess_format("mov", NULL, NULL);
466//        /* MOV normally uses MPEG4, but that may not be installed */
467//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
468//    }
469//    else if (strcmp(fmt,"avi") == 0) {
470//        vidPtr->outFormatCtx = av_alloc_format_context();
471//        vidPtr->outFormatCtx->oformat = guess_format("avi", NULL, NULL);
472//        /* AVI normally uses MPEG4, but that may not be installed */
473//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
474//    }
475//    else {
476////        Tcl_AppendResult(interp, "bad format \"", fmt, "\": should be",
477////            " avi, flv, mpeg1video, mov", (char*)NULL);
478////        return TCL_ERROR;
479//        return -1;
480//    }
481//
482//    /*
483//     * Open the video stream for writing.
484//     */
485//    strncpy(vidPtr->outFormatCtx->filename, fileName,
486//        sizeof(vidPtr->outFormatCtx->filename));
487//
488//    vidPtr->outVideoStr = av_new_stream(vidPtr->outFormatCtx, 0);
489//    if (vidPtr->outVideoStr == NULL) {
490////        Tcl_AppendResult(interp, "internal error:",
491////            " problem opening stream", (char*)NULL);
492////        return TCL_ERROR;
493//        retunr -1;
494//    }
495//    codecCtx = vidPtr->outVideoStr->codec;
496//
497//    codecCtx->codec_id = vidPtr->outFormatCtx->oformat->video_codec;
498//    codecCtx->codec_type = CODEC_TYPE_VIDEO;
499//
500//    /* put sample parameters */
501//    codecCtx->bit_rate = 400000;
502//    /* resolution must be a multiple of two */
503//    codecCtx->width = (iwd/2)*2;
504//    codecCtx->height = (iht/2)*2;
505//    codecCtx->time_base.den = 24;
506//    codecCtx->time_base.num = 1;
507//    codecCtx->gop_size = 12; /* emit one intra frame every so often */
508//    codecCtx->pix_fmt = PIX_FMT_YUV420P;
509//    if (codecCtx->codec_id == CODEC_ID_MPEG2VIDEO) {
510//        codecCtx->max_b_frames = 2;
511//    }
512//
513//    /* find the video encoder */
514//    vcodec = avcodec_find_encoder(codecCtx->codec_id);
515//    if (!vcodec || avcodec_open(codecCtx, vcodec) < 0) {
516//        // Tcl_AppendResult(interp, "internal error:",
517//        //     " problem opening codec", (char*)NULL);
518//        // return TCL_ERROR;
519//        return -1;
520//    }
521//
522//    if (av_set_parameters(vidPtr->outFormatCtx, NULL) < 0) {
523//        // Tcl_AppendResult(interp, "internal error:",
524//        //     " problem in av_set_parameters()", (char*)NULL);
525//        // return TCL_ERROR;
526//        return -1;
527//    }
528//
529//    if (url_fopen(&vidPtr->outFormatCtx->pb, fileName, URL_WRONLY) < 0) {
530//        // Tcl_AppendResult(interp, "can't open file \"", fileName,
531//        //     "\"", (char*)NULL);
532//        // return TCL_ERROR;
533//        return -1;
534//    }
535//    av_write_header(vidPtr->outFormatCtx);
536//
537//    vidPtr->pFrameYUV = avcodec_alloc_frame();
538//    vidPtr->pFrameRGB = avcodec_alloc_frame();
539//    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
540//        // Tcl_AppendResult(interp, "couldn't allocate frame space",
541//        //     " for file \"", fileName, "\"", (char*)NULL);
542//        // return TCL_ERROR;
543//        return -1;
544//    }
545//
546//    vidPtr->yuvw = vidPtr->outVideoStr->codec->width;
547//    vidPtr->yuvh = vidPtr->outVideoStr->codec->height;
548//    pixfmt = vidPtr->outVideoStr->codec->pix_fmt;
549//
550//    numBytes = avpicture_get_size(pixfmt, vidPtr->yuvw, vidPtr->yuvh);
551//    vidPtr->yuvbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
552//
553//    avpicture_fill((AVPicture*)vidPtr->pFrameYUV, vidPtr->yuvbuffer,
554//        pixfmt, vidPtr->yuvw, vidPtr->yuvh);
555//
556//
557//    if (strcpy(vid->mode,"output") == NULL) {
558//        return -1;
559//    }
560//
561//    return 0;
562//}
563
564
565/*
566 * ------------------------------------------------------------------------
567 *  VideoTime2Frame()
568 *
569 *  Converts a time value (as defined by the FFMPEG package) into an
570 *  integer frame number in the range 0-end for the stream.
571 * ------------------------------------------------------------------------
572 */
573int
574VideoTime2Frame(streamPtr, tval)
575    AVStream *streamPtr;   /* scale values according to this stream */
576    int64_t tval;          /* time value as defined by stream */
577{
578    AVRational one, factor;
579    one.num = 1;
580    one.den = 1;
581    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
582    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
583
584    if (tval > streamPtr->start_time) {
585        tval -= streamPtr->start_time;
586    } else {
587        tval = 0;
588    }
589    tval = av_rescale_q(tval, factor, one);
590    return (int)tval;
591}
592
593/*
594 * ------------------------------------------------------------------------
595 *  VideoFrame2Time()
596 *
597 *  Converts a frame number 0-end to the corresponding time value
598 *  (as defined by FFMPEG) for the given stream.
599 * ------------------------------------------------------------------------
600 */
601int64_t
602VideoFrame2Time(streamPtr, fval)
603    AVStream *streamPtr;   /* scale values according to this stream */
604    int fval;              /* frame value in the range 0-end */
605{
606    int64_t tval;
607    AVRational one, factor;
608    one.num = 1;
609    one.den = 1;
610
611    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
612    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
613
614    tval = av_rescale_q((int64_t)fval, one, factor) + streamPtr->start_time;
615    return tval;
616}
617
618/*
619 * ------------------------------------------------------------------------
620 *  VideoNextFrame()
621 *
622 *  Decodes a series of video packets until the end of the frame
623 *  is reached.  Updates the frameNumber and atEnd to maintain the
624 *  current status for this video stream.
625 * ------------------------------------------------------------------------
626 */
627void
628VideoNextFrame(vidPtr)
629    VideoObj *vidPtr;   /* get a frame from this video stream */
630{
631    int frameFinished;
632    uint64_t pts;
633    AVCodecContext *vcodecCtx;
634    AVStream *vstreamPtr;
635    AVPacket packet;
636
637    if (vidPtr->pFormatCtx) {
638        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
639        vcodecCtx = vstreamPtr->codec;
640
641        /*
642         * Decode as many packets as necessary to get the next frame.
643         */
644        pts = 0;
645        while (1) {
646            if (av_read_frame(vidPtr->pFormatCtx, &packet) >= 0) {
647                if (packet.stream_index == vidPtr->videoStream) {
648                    /* save pts so we can grab it again in VideoAvGetBuffer */
649                    global_video_pkt_pts = packet.pts;
650
651                    avcodec_decode_video(vcodecCtx, vidPtr->pFrameYUV,
652                        &frameFinished, packet.data, packet.size);
653
654                    // avcodec_decode_video2(vcodecCtx, vidPtr->pFrameYUV,
655                    //    &frameFinished, &packet);
656
657                    if (packet.dts == AV_NOPTS_VALUE
658                          && vidPtr->pFrameYUV->opaque
659                          && *(uint64_t*)vidPtr->pFrameYUV->opaque != AV_NOPTS_VALUE) {
660                        pts = *(uint64_t*)vidPtr->pFrameYUV->opaque;
661                    } else if (packet.dts != AV_NOPTS_VALUE) {
662                        pts = packet.dts;
663                    } else {
664                        pts = 0;
665                    }
666
667                    if (frameFinished) {
668                        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, pts);
669                        break;
670                    }
671                }
672            } else {
673                vidPtr->atEnd = 1;
674                break;
675            }
676        }
677        av_free_packet(&packet);
678    }
679}
680
681/*
682 * ------------------------------------------------------------------------
683 *  These two routines are called whenever a frame buffer is allocated,
684 *  which means that we're starting a new frame.  Grab the global pts
685 *  counter and squirrel it away in the opaque slot of the frame.  This
686 *  will give us a pts value that we can trust later.
687 * ------------------------------------------------------------------------
688 */
689int
690VideoAvGetBuffer(c,fr)
691    AVCodecContext *c;  /* codec doing the frame decoding */
692    AVFrame *fr;        /* frame being decoded */
693{
694    int rval = avcodec_default_get_buffer(c, fr);
695    uint64_t *ptsPtr = av_malloc(sizeof(uint64_t));
696    *ptsPtr = global_video_pkt_pts;
697    fr->opaque = ptsPtr;
698    return rval;
699}
700
701void
702VideoAvReleaseBuffer(c,fr)
703    AVCodecContext *c;  /* codec doing the frame decoding */
704    AVFrame *fr;        /* frame being decoded */
705{
706    if (fr && fr->opaque) {
707        av_freep(&fr->opaque);
708    }
709    avcodec_default_release_buffer(c,fr);
710}
711
712/*
713 * ------------------------------------------------------------------------
714 *  VideoInitCmd()
715 *
716 *  Implements the body of the _ffmpeg_init method in the "video" class.
717 *  Initializes the basic data structure and stores it in the _videodata
718 *  variable within the class.
719 * ------------------------------------------------------------------------
720 */
721VideoObj *
722VideoInitCmd()
723{
724    /*
725     * Create an object to represent this video stream.
726     */
727
728    /* Register all codecs and formats */
729    av_register_all();
730
731    return VideoSetData();
732}
733
734/*
735 * ------------------------------------------------------------------------
736 *  VideoCleanupCmd()
737 *
738 *  Implements the body of the _ffmpeg_cleanup method in the "video" class.
739 *  Accesses the data structure stored in the _videodata variable and
740 *  frees up the data.
741 * ------------------------------------------------------------------------
742 */
743int
744VideoCleanupCmd(vidPtr)
745    VideoObj *vidPtr;
746{
747    /*
748     *  Nothing much to do here.  Just close the file in case it is
749     *  still open.  Don't free vidPtr itself; that is cleaned up by
750     *  the ByteArrayObj in the class data member.
751     */
752    int ret = 0;
753
754    ret -= VideoClose(vidPtr);
755
756    if (vidPtr != NULL) {
757        VideoFreeImgBuffer(vidPtr);
758        if (vidPtr->fileName != NULL) {
759            free(vidPtr->fileName);
760            vidPtr->fileName = NULL;
761        }
762        free(vidPtr);
763        vidPtr = NULL;
764// FIXME: need a test to make sure vidPtr is null after the function returns.
765    }
766
767    return ret;
768}
769
770/*
771 * ------------------------------------------------------------------------
772 *  VideoSizeCmd()
773 *
774 *  Implements the body of the "size" method in the "video" class.
775 *  Returns the size of each frame in this video stream as a list {w h}.
776 * ------------------------------------------------------------------------
777 */
778int
779VideoSizeCmd(vidPtr, width, height)
780    VideoObj *vidPtr;
781    int *width;
782    int *height;
783{
784    AVCodecContext *vcodecCtx;
785
786    if (vidPtr == NULL) {
787        return -1;
788    }
789
790    if (vidPtr->pFormatCtx == NULL) {
791        // "internal error: video stream is not open",
792        return -1;
793    }
794
795    if (vidPtr->pFormatCtx) {
796        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
797        if (width != NULL) {
798            *width = vcodecCtx->width;
799        }
800        if (height != NULL) {
801            *height = vcodecCtx->height;
802        }
803    }
804    return 0;
805}
806
807/*
808 * ------------------------------------------------------------------------
809 *  VideoGoCmd()
810 *
811 *  Implements the body of the "go" method in the "video" class.
812 *  Advances by one or more frames, or seeks backward in the stream.
813 *  Handles the following syntax:
814 *    obj go next ...... go to next frame (same as +1)
815 *    obj go +n ........ advance by n frames
816 *    obj go -n ........ go back by n frames
817 *    obj go n ......... go to frame n
818 * ------------------------------------------------------------------------
819 */
820int
821VideoGoNext(vidPtr)
822    VideoObj *vidPtr;
823{
824    return VideoGoPlusMinusN(vidPtr,1);
825}
826
827int
828VideoGoPlusMinusN(vidPtr, n)
829    VideoObj *vidPtr;
830    int n;
831{
832    int nabs;
833
834    if (vidPtr == NULL) {
835        return -1;
836    }
837
838    nabs = vidPtr->frameNumber + n;
839    return VideoGoToN(vidPtr, nabs);
840}
841
842int
843VideoGoToN(vidPtr, n)
844    VideoObj *vidPtr;
845    int n;
846{
847    int nrel, nabs, seekFlags, gotframe, t;
848    int64_t nseek;
849    AVCodecContext *vcodecCtx;
850    AVStream *vstreamPtr;
851
852    if (vidPtr == NULL) {
853        return -1;
854    }
855
856    if (vidPtr->pFormatCtx == NULL) {
857        // "internal error: video stream is not open",
858        return -1;
859    }
860    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
861
862    nabs = n;
863
864    if (nabs < 0) {
865        nabs = 0;
866    }
867
868    if (nabs < vidPtr->frameNumber) {
869        seekFlags = AVSEEK_FLAG_BACKWARD;
870    } else {
871        seekFlags = 0;
872    }
873
874    /*
875     * If we're going to an absolute frame, or if we're going backward
876     * or too far forward, then seek the frame.
877     */
878    nrel = nabs-vidPtr->frameNumber;
879    if ((nrel > 50) || (seekFlags&AVSEEK_FLAG_BACKWARD)) {
880
881        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
882        nseek = VideoFrame2Time(vstreamPtr, nabs);
883        // not sure why it is checking against the number 100
884        if (nseek > 100) {
885            nseek -= 100;
886        } else {
887            nseek = 0;
888        }
889
890        /* first, seek the nearest reference frame for a good starting pt */
891        av_seek_frame(vidPtr->pFormatCtx, vidPtr->videoStream,
892            nseek, seekFlags);
893
894        // this doesn't seem to give me back the true frame number
895        // feels like it is more of a reverse of the VideoFrame2Time call
896        // because vidPtr->frameNumber always equals nabs
897        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, nseek);
898        vidPtr->atEnd = 0;
899
900        /* read the frame to figure out what the frame number is */
901        VideoNextFrame(vidPtr);
902
903        /* then, move forward until we reach the desired frame */
904        gotframe = 0;
905        while (vidPtr->frameNumber < nabs && !vidPtr->atEnd) {
906            VideoNextFrame(vidPtr);
907            gotframe = 1;
908        }
909
910        /* get at least one frame, unless we're done or at the beginning*/
911        if (!gotframe && !vidPtr->atEnd) {
912            VideoNextFrame(vidPtr);
913        }
914    }
915    else {
916        while (nrel-- > 0) {
917            VideoNextFrame(vidPtr);
918        }
919    }
920
921    /*
922     * Send back the current frame number or "end" as the result.
923     */
924    return vidPtr->frameNumber;
925}
926
927/*
928 * ------------------------------------------------------------------------
929 *  VideoGetCmd()
930 *
931 *  Implements the body of the "get" method in the "video" class.
932 *  Returns information about the current frame via the following
933 *  syntax:
934 *    obj get start|position|end
935 *    obj get <imageHandle>
936 * ------------------------------------------------------------------------
937 */
938int
939VideoGetImage(vidPtr, iw, ih, img, bufSize)
940    VideoObj *vidPtr;
941    int iw;
942    int ih;
943    void **img;
944    int *bufSize;
945{
946
947    int nframe, numBytes;
948    char c, buffer[64];
949    AVCodecContext *vcodecCtx;
950    AVStream *vstreamPtr;
951
952    if (vidPtr == NULL) {
953        return -1;
954    }
955
956    if (VideoModeRead(vidPtr) != 0) {
957        return -1;
958    }
959
960    /*
961    if (vidPtr->pFormatCtx) {
962        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
963    } else {
964        vcodecCtx = NULL;
965    }
966    */
967
968    if (vidPtr->pFormatCtx == NULL) {
969        // vidPtr->pFormatCtx is NULL, video not open
970        return -1;
971    }
972    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
973
974    /*
975     * Query the size for this photo and make sure that we have a
976     * buffer of the appropriate size for software scaling and
977     * format conversion.
978     */
979
980    // if the user's desired size is less then 0,
981    // use the default size
982
983    if (iw < 0) {
984        iw = vcodecCtx->width;
985    }
986    if (ih < 0) {
987        ih = vcodecCtx->height;
988    }
989
990
991    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
992        if (vidPtr->rgbbuffer) {
993            av_free(vidPtr->rgbbuffer);
994            vidPtr->rgbbuffer = NULL;
995        }
996        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
997        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
998        vidPtr->rgbw = iw;
999        vidPtr->rgbh = ih;
1000
1001        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1002            PIX_FMT_RGB24, iw, ih);
1003
1004        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1005            vcodecCtx->width, vcodecCtx->height, vcodecCtx->pix_fmt,
1006            iw, ih, PIX_FMT_RGB24, SWS_BICUBIC|SWS_PRINT_INFO, NULL, NULL, NULL);
1007    }
1008
1009    /*
1010     * Rescale the current frame to the desired size, and translate
1011     * into RGB format so we can copy into the destination image.
1012     */
1013    if (vidPtr->pFrameYUV && vidPtr->pFrameYUV->data[0]) {
1014        sws_scale(vidPtr->scalingCtx, (const uint8_t * const*)
1015            vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize,
1016            0, vcodecCtx->height,
1017            vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize);
1018
1019/*
1020        iblock.pixelPtr  = (unsigned char*)vidPtr->pFrameRGB->data[0];
1021        iblock.width     = iw;
1022        iblock.height    = ih;
1023        iblock.pitch     = vidPtr->pFrameRGB->linesize[0];
1024        iblock.pixelSize = 3;
1025        iblock.offset[0] = 0;
1026        iblock.offset[1] = 1;
1027        iblock.offset[2] = 2;
1028        iblock.offset[3] = 0;
1029
1030        Tk_PhotoPutBlock_NoComposite(img, &iblock, 0, 0, iw, ih);
1031*/
1032
1033        int bufsize = 0;
1034        if (vidPtr->img == NULL) {
1035            VideoAllocImgBuffer(vidPtr,iw,ih);
1036        } else {
1037            if ((vidPtr->imgWidth != iw) && (vidPtr->imgHeight != ih)) {
1038                // new height or width
1039                // resize the image buffer
1040                free(vidPtr->img);
1041                VideoAllocImgBuffer(vidPtr,iw,ih);
1042            } else {
1043                // image buffer is the correct size
1044                // do nothing
1045            }
1046        }
1047
1048        // Write pixel data
1049        memcpy(vidPtr->img+vidPtr->imgHeaderLen,
1050            vidPtr->pFrameRGB->data[0],
1051            vidPtr->imgWidth*3*vidPtr->imgHeight);
1052    }
1053    *img = vidPtr->img;
1054    *bufSize = (vidPtr->imgWidth*3*vidPtr->imgHeight) + vidPtr->imgHeaderLen;
1055    return 0;
1056}
1057
1058int
1059VideoGetFrameRate (vidPtr, fr)
1060    VideoObj *vidPtr;
1061    double *fr;
1062{
1063    AVStream *vstreamPtr;
1064
1065    if (vidPtr == NULL) {
1066        return -1;
1067    }
1068
1069    if (fr == NULL) {
1070        return -1;
1071    }
1072
1073    if (vidPtr->pFormatCtx == NULL) {
1074        // vidPtr->pFormatCtx is NULL, video not open
1075        return -1;
1076    }
1077    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1078
1079    // http://trac.handbrake.fr/browser/trunk/libhb/decavcodec.c?rev=1490#L684
1080    // there seems to be some controversy over what structure holds
1081    // the correct frame rate information for different video codecs.
1082    // for now we will use the stream's r_frame_rate.
1083    // from the above post, it looks like this value can be interpreted
1084    // as frames per second.
1085    *fr = av_q2d(vstreamPtr->r_frame_rate);
1086
1087    return 0;
1088}
1089
1090int
1091VideoAllocImgBuffer(vidPtr, width, height)
1092    VideoObj *vidPtr;
1093    int width;
1094    int height;
1095{
1096
1097    char header[64];
1098    int headerLen = 0;
1099    int bufsize = 0;
1100
1101    sprintf(header,"P6\n%d %d\n255\n", width, height);
1102    headerLen = strlen(header);
1103    bufsize = headerLen + (width*3*height);
1104    vidPtr->img = (void*) malloc(bufsize);
1105    vidPtr->imgHeaderLen = headerLen;
1106    vidPtr->imgWidth = width;
1107    vidPtr->imgHeight = height;
1108    memcpy(vidPtr->img,header,headerLen);
1109
1110    return 0;
1111}
1112
1113int
1114VideoFreeImgBuffer(vidPtr)
1115    VideoObj *vidPtr;
1116{
1117    if ((vidPtr != NULL) && (vidPtr->img != NULL)) {
1118        free(vidPtr->img);
1119        vidPtr->img = NULL;
1120    }
1121    return 0;
1122}
1123
1124int
1125VideoGetPositionCur(vidPtr, pos)
1126    VideoObj *vidPtr;      /* video object to act on */
1127    int *pos;
1128{
1129    int fnum = -1;
1130    AVStream *vstreamPtr;
1131
1132    if (vidPtr == NULL) {
1133        return -1;
1134    }
1135
1136    if (pos == NULL) {
1137        return -1;
1138    }
1139
1140    if (VideoModeRead(vidPtr) != 0) {
1141        return -1;
1142    }
1143
1144    if (vidPtr->pFormatCtx) {
1145        fnum = vidPtr->frameNumber;
1146    }
1147
1148    *pos = fnum;
1149    return 0;
1150}
1151
1152int
1153VideoGetPositionEnd(vidPtr, pos)
1154    VideoObj *vidPtr;      /* video object to act on */
1155    int *pos;
1156{
1157    int nframe = -1;
1158    AVStream *vstreamPtr;
1159
1160    if (vidPtr == NULL) {
1161        return -1;
1162    }
1163
1164    if (pos == NULL) {
1165        return -1;
1166    }
1167
1168    if (VideoModeRead(vidPtr) != 0) {
1169        return -1;
1170    }
1171
1172    if (vidPtr->pFormatCtx) {
1173        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1174        nframe = VideoTime2Frame(vstreamPtr,
1175            vstreamPtr->start_time + vstreamPtr->duration);
1176    }
1177
1178    *pos = nframe;
1179    return 0;
1180}
1181
1182// FIXME: get this function working
1183///*
1184// * ------------------------------------------------------------------------
1185// *  VideoPutCmd()
1186// *
1187// *  Implements the body of the "put" method in the "video" class.
1188// *  Stores a single frame into the video stream:
1189// *    obj put <imageHandle>
1190// * ------------------------------------------------------------------------
1191// */
1192//int
1193//VideoPutCmd(cdata, interp, argc, argv)
1194//    ClientData cdata;      /* not used */
1195//    Tcl_Interp *interp;    /* interpreter */
1196//    int argc;              /* number of arguments */
1197//    CONST84 char* argv[];  /* argument strings */
1198//{
1199//    VideoObj *vidPtr;
1200//    int iw, ih, numBytes, roffs, goffs, boffs;
1201//    char buffer[64];
1202//    unsigned char* photodata;
1203//    uint8_t* rgbdata;
1204//    Tk_PhotoHandle img;
1205//    Tk_PhotoImageBlock iblock;
1206//    AVCodecContext *codecCtx;
1207//
1208//    if (VideoGetData(interp, &vidPtr) != TCL_OK) {
1209//        return TCL_ERROR;
1210//    }
1211//
1212//    if (argc != 2) {
1213//        Tcl_AppendResult(interp, "wrong # args: should be \"", argv[0],
1214//            " image\"", (char*)NULL);
1215//        return TCL_ERROR;
1216//    }
1217//
1218//    /*
1219//     * Get the name of the image and copy from it.
1220//     */
1221//    img = Tk_FindPhoto(interp, argv[1]);
1222//    if (img == NULL) {
1223//        Tcl_AppendResult(interp, "bad value \"", argv[1],
1224//            "\": expected photo image", (char*)NULL);
1225//        return TCL_ERROR;
1226//    }
1227//
1228//    /*
1229//     * Query the size for this photo and make sure that we have a
1230//     * buffer of the appropriate size for software scaling and
1231//     * format conversion.
1232//     */
1233//    Tk_PhotoGetImage(img, &iblock);
1234//    Tk_PhotoGetSize(img, &iw, &ih);
1235//
1236//    if (VideoModeWrite(interp, iw, ih) != TCL_OK) {
1237//        return TCL_ERROR;
1238//    }
1239//    codecCtx = vidPtr->outVideoStr->codec;
1240//
1241//    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1242//        if (vidPtr->rgbbuffer) {
1243//            av_free(vidPtr->rgbbuffer);
1244//            vidPtr->rgbbuffer = NULL;
1245//        }
1246//        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1247//        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1248//        vidPtr->rgbw = iw;
1249//        vidPtr->rgbh = ih;
1250//
1251//        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1252//            PIX_FMT_RGB24, iw, ih);
1253//
1254//        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1255//            iw, ih, PIX_FMT_RGB24,
1256//            codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
1257//            SWS_BICUBIC, NULL, NULL, NULL);
1258//    }
1259//
1260//    /*
1261//     * Copy the data from the Tk photo block into the RGB frame.
1262//     */
1263//    roffs = iblock.offset[0];
1264//    goffs = iblock.offset[1];
1265//    boffs = iblock.offset[2];
1266//
1267//    for (ih=0; ih < iblock.height; ih++) {
1268//        rgbdata = vidPtr->pFrameRGB->data[0] + ih*vidPtr->pFrameRGB->linesize[0];
1269//        photodata = iblock.pixelPtr + ih*iblock.pitch;
1270//        for (iw=0; iw < iblock.width; iw++) {
1271//            rgbdata[0] = photodata[roffs];
1272//            rgbdata[1] = photodata[goffs];
1273//            rgbdata[2] = photodata[boffs];
1274//            rgbdata += 3;
1275//            photodata += iblock.pixelSize;
1276//        }
1277//    }
1278//
1279//    /*
1280//     * Rescale the current frame to the desired size, and translate
1281//     * from RGB to YUV so we can give the frame to the codec.
1282//     */
1283//    sws_scale(vidPtr->scalingCtx,
1284//        vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize,
1285//        0, ih,
1286//        vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize);
1287//
1288//    numBytes = VideoWriteFrame(vidPtr, vidPtr->pFrameYUV);
1289//    if (numBytes < 0) {
1290//        Tcl_AppendResult(interp, "error in av_write_frame()", (char*)NULL);
1291//        return TCL_ERROR;
1292//    }
1293//    sprintf(buffer, "frame %d (%d bytes)", vidPtr->frameNumber++, numBytes);
1294//    Tcl_SetResult(interp, buffer, TCL_VOLATILE);
1295//    return TCL_OK;
1296//}
1297
1298
1299/*
1300 * ------------------------------------------------------------------------
1301 *  VideoWriteFrame()
1302 *
1303 *  Used internally to write a single frame out to the output stream.
1304 *  Returns the number of bytes written to the frame, or -1 if an error
1305 *  occurred.
1306 * ------------------------------------------------------------------------
1307 */
1308int
1309VideoWriteFrame(vidPtr, framePtr)
1310    VideoObj *vidPtr;      /* video object being updated */
1311    AVFrame *framePtr;     /* picture frame being written out */
1312{
1313    int numBytes;
1314    AVCodecContext *codecCtx;
1315    AVPacket pkt;
1316
1317#define OUTBUF_SIZE 500000
1318    uint8_t outbuf[OUTBUF_SIZE];
1319
1320    codecCtx = vidPtr->outVideoStr->codec;
1321    numBytes = avcodec_encode_video(codecCtx, outbuf, OUTBUF_SIZE, framePtr);
1322
1323    if (numBytes > 0) {
1324        av_init_packet(&pkt);
1325
1326        if (codecCtx->coded_frame->pts != AV_NOPTS_VALUE) {
1327            pkt.pts = av_rescale_q(codecCtx->coded_frame->pts,
1328                codecCtx->time_base,
1329                vidPtr->outVideoStr->time_base);
1330        }
1331        if (codecCtx->coded_frame->key_frame) {
1332            pkt.flags |= PKT_FLAG_KEY;
1333        }
1334        pkt.stream_index = vidPtr->outVideoStr->index;
1335        pkt.data = outbuf;
1336        pkt.size = numBytes;
1337
1338        /* write the compressed frame in the media file */
1339        if (av_write_frame(vidPtr->outFormatCtx, &pkt) != 0) {
1340            return -1;
1341        }
1342    }
1343    return numBytes;
1344}
1345
1346/*
1347 * ------------------------------------------------------------------------
1348 *  VideoTransform()
1349 *
1350 *  Implements the body of the "transform" method in the "video" class.
1351 *  Translates one value into another--times into frames, etc.  Handles
1352 *  the following syntax:
1353 *    obj transform frames2duration <frames>
1354 *    obj transform duration2frames <duration>
1355 * ------------------------------------------------------------------------
1356 */
1357double
1358VideoTransformFrames2Duration(vidPtr, frame)
1359    VideoObj *vidPtr;
1360    int frame;
1361{
1362    double duration;
1363    AVCodecContext *vcodecCtx;
1364    AVStream *vstreamPtr;
1365    AVRational hundred;
1366    int64_t tval;
1367
1368    hundred.num = 100;
1369    hundred.den = 1;
1370
1371    if (vidPtr == NULL) {
1372        return -1;
1373    }
1374
1375    if (vidPtr->pFormatCtx == NULL) {
1376//        Tcl_AppendResult(interp, "can't compute transformations:",
1377//            " stream not opened", (char*)NULL);
1378//        return TCL_ERROR;
1379        return -1;
1380    }
1381
1382    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1383    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1384
1385    tval = av_rescale_q((int64_t)frame, hundred, vstreamPtr->r_frame_rate);
1386    duration = 0.01*tval;
1387
1388    return duration;
1389}
1390
1391int
1392VideoTransformDuration2Frames(vidPtr, duration)
1393    VideoObj *vidPtr;
1394    double duration;
1395{
1396    int frames;
1397    AVCodecContext *vcodecCtx;
1398    AVStream *vstreamPtr;
1399    AVRational hundred;
1400    int64_t tval;
1401
1402    hundred.num = 100;
1403    hundred.den = 1;
1404
1405    if (vidPtr == NULL) {
1406        return -1;
1407    }
1408    if (vidPtr->pFormatCtx == NULL) {
1409//        Tcl_AppendResult(interp, "can't compute transformations:",
1410//            " stream not opened", (char*)NULL);
1411//        return TCL_ERROR;
1412        return -1;
1413    }
1414
1415    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1416    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1417
1418    tval = (int64_t)(duration*100);
1419    frames = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1420    // check above for overflow
1421    // tval = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1422    // sprintf(buffer, "%lld", tval);
1423
1424    return frames;
1425}
1426
1427/*
1428 * ------------------------------------------------------------------------
1429 *  VideoClose()
1430 *
1431 *  Implements the body of the _ffmpeg_close method in the "video" class.
1432 *  Closes any file opened previously by the open methods for read/write.
1433 *  If nothing is open, this does nothing.
1434 * ------------------------------------------------------------------------
1435 */
1436int
1437VideoClose(vidPtr)
1438    VideoObj *vidPtr;
1439{
1440    AVCodecContext *vcodecCtx;
1441    int i;
1442
1443    if (vidPtr == NULL) {
1444        return -1;
1445    }
1446
1447    if (vidPtr->yuvbuffer) {
1448        av_free(vidPtr->yuvbuffer);
1449        vidPtr->yuvbuffer = NULL;
1450        vidPtr->yuvw = 0;
1451        vidPtr->yuvh = 0;
1452    }
1453    if (vidPtr->pFrameYUV) {
1454        av_free(vidPtr->pFrameYUV);
1455        vidPtr->pFrameYUV = NULL;
1456    }
1457
1458    if (vidPtr->rgbbuffer) {
1459        av_free(vidPtr->rgbbuffer);
1460        vidPtr->rgbbuffer = NULL;
1461        vidPtr->rgbw = 0;
1462        vidPtr->rgbh = 0;
1463    }
1464    if (vidPtr->pFrameRGB) {
1465        av_free(vidPtr->pFrameRGB);
1466        vidPtr->pFrameRGB = NULL;
1467    }
1468
1469    if (vidPtr->scalingCtx) {
1470        sws_freeContext(vidPtr->scalingCtx);
1471        vidPtr->scalingCtx = NULL;
1472    }
1473    if (vidPtr->pFormatCtx && vidPtr->videoStream >= 0) {
1474        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1475        if (vcodecCtx) {
1476            avcodec_close(vcodecCtx);
1477        }
1478    }
1479    if (vidPtr->pFormatCtx) {
1480        av_close_input_file(vidPtr->pFormatCtx);
1481        vidPtr->pFormatCtx = NULL;
1482    }
1483
1484    if (vidPtr->outFormatCtx) {
1485        while (VideoWriteFrame(vidPtr, NULL) > 0)
1486            ; /* write out any remaining frames */
1487
1488        av_write_trailer(vidPtr->outFormatCtx);
1489
1490        for (i=0; i < vidPtr->outFormatCtx->nb_streams; i++) {
1491            avcodec_close(vidPtr->outFormatCtx->streams[i]->codec);
1492            av_freep(&vidPtr->outFormatCtx->streams[i]->codec);
1493            av_freep(&vidPtr->outFormatCtx->streams[i]);
1494        }
1495
1496        if (vidPtr->outFormatCtx->pb) {
1497            url_fclose(vidPtr->outFormatCtx->pb);
1498        }
1499
1500        av_free(vidPtr->outFormatCtx);
1501        vidPtr->outFormatCtx = NULL;
1502    }
1503
1504    /* reset the mode to null */
1505    *vidPtr->mode = '\0';
1506
1507    return 0;
1508}
Note: See TracBrowser for help on using the repository browser.