source: branches/uq/video/RpVideo.c @ 5206

Last change on this file since 5206 was 4225, checked in by ldelgass, 11 years ago

merge 3931 from trunk

File size: 45.3 KB
Line 
1/*
2 * ----------------------------------------------------------------------
3 *  TkFFMPEG:  video
4 *
5 *  These routines support the methods in the "video" class, which is
6 *  a video stream that can be read from or written to.  The class
7 *  itself is defined in itcl, but when methods are called, execution
8 *  jumps down to this level.
9 * ======================================================================
10 *  AUTHOR:  Michael McLennan, Purdue University
11 *  Copyright (c) 2004-2012  HUBzero Foundation, LLC
12 *
13 *  See the file "license.terms" for information on usage and
14 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
15 * ======================================================================
16 */
17
18#include <stdlib.h>
19#include <string.h>
20
21#include "config.h"
22
23#ifdef HAVE_FFMPEG_AVCODEC_H
24# include <ffmpeg/avcodec.h>
25#endif
26
27#ifdef HAVE_LIBAVCODEC_AVCODEC_H
28# include <libavcodec/avcodec.h>
29#endif
30
31#ifdef HAVE_FFMPEG_AVFORMAT_H
32# include <ffmpeg/avformat.h>
33#endif
34
35#ifdef HAVE_LIBAVFORMAT_AVFORMAT_H
36# include <libavformat/avformat.h>
37#endif
38
39#ifdef HAVE_FFMPEG_AVUTIL_H
40# include <ffmpeg/avutil.h>
41#endif
42
43#ifdef HAVE_LIBAVUTIL_AVUTIL_H
44# include <libavutil/avutil.h>
45#endif
46
47#ifdef HAVE_LIBAVUTIL_MATHEMATICS_H
48# include <libavutil/mathematics.h> /* for av_rescale_q and av_gcd */
49#endif
50
51#ifdef HAVE_FFMPEG_SWSCALE_H
52# include <ffmpeg/swscale.h>
53#endif
54
55#ifdef HAVE_LIBSWSCALE_SWSCALE_H
56# include <libswscale/swscale.h>
57#endif
58
59#include "RpVideo.h"
60
61#ifndef HAVE_AVMEDIA_TYPE_VIDEO
62#define AVMEDIA_TYPE_VIDEO      CODEC_TYPE_VIDEO
63#endif  /* HAVE_AVMEDIA_TYPE_VIDEO */
64
65#ifndef AV_PKT_FLAG_KEY
66#define AV_PKT_FLAG_KEY         PKT_FLAG_KEY           
67#endif
68
69#ifndef HAVE_AVIO_CLOSE
70#define avio_close              url_fclose
71#endif
72
73/*
74 * Each video object is represented by the following data:
75 */
76struct VideoObjRec {
77    int magic;
78
79    /* video input */
80    AVFormatContext *pFormatCtx;
81    int videoStream;
82    int frameNumber;
83    int atEnd;
84
85    /* video output */
86    AVFormatContext *outFormatCtx;
87    AVStream *outVideoStr;
88
89    /* used for both input/output */
90    AVFrame *pFrameYUV;
91    uint8_t *yuvbuffer;
92    int yuvw, yuvh;
93    AVFrame *pFrameRGB;
94    uint8_t *rgbbuffer;
95    int rgbw, rgbh;
96    struct SwsContext *scalingCtx;
97
98    char *fileName;
99    char mode[64];
100    char fmt[64];
101    int lastframe;
102
103    /* tmp buffer to give images back to user */
104    void *img;
105    int imgHeaderLen;
106    int imgWidth;
107    int imgHeight;
108};
109
110/* magic stamp for VideoObj, to make sure data is valid */
111#define VIDEO_OBJ_MAGIC 0x0102abcd
112
113static VideoObj *VideoSetData ();
114
115static int VideoModeRead (VideoObj *vidPtr);
116// static int VideoModeWrite (Tcl_Interp *interp, int w, int h);
117
118static int VideoTime2Frame (AVStream *streamPtr, int64_t tval);
119static int64_t VideoFrame2Time (AVStream *streamPtr, int fval);
120static void VideoNextFrame (VideoObj *vidPtr);
121
122uint64_t global_video_pkt_pts = AV_NOPTS_VALUE;
123static int VideoAvGetBuffer (struct AVCodecContext *c, AVFrame *fr);
124static void VideoAvReleaseBuffer (struct AVCodecContext *c, AVFrame *fr);
125static int VideoWriteFrame (VideoObj *vidPtr, AVFrame *framePtr);
126
127static int VideoAllocImgBuffer (VideoObj *vidPtr, int width, int height);
128static int VideoFreeImgBuffer (VideoObj *vidPtr);
129
130#ifdef notdef
131static double VideoTransformFrames2Duration (VideoObj *vidPtr, int frame);
132static int VideoTransformDuration2Frames (VideoObj *vidPtr, double duration);
133#endif
134
135/*
136 * ------------------------------------------------------------------------
137 *  VideoSetData()
138 *
139 *  Saves VideoObj data in the "_videodata" slot in the current object
140 *  context.  The data can be retrieved later by calling VideoGetData().
141 * ------------------------------------------------------------------------
142 */
143VideoObj *
144VideoSetData()
145{
146    VideoObj* vid = NULL;
147
148    vid = malloc(sizeof(VideoObj));
149
150    if (vid == NULL) {
151        return NULL;
152    }
153
154    vid->magic = VIDEO_OBJ_MAGIC;
155    vid->pFormatCtx = NULL;
156    vid->videoStream = 0;
157    vid->frameNumber = -1;
158    vid->atEnd = 0;
159
160    vid->outFormatCtx = NULL;
161    vid->outVideoStr = NULL;
162
163    vid->pFrameYUV = NULL;
164    vid->yuvbuffer = NULL;
165    vid->yuvw = 0;
166    vid->yuvh = 0;
167    vid->pFrameRGB = NULL;
168    vid->rgbbuffer = NULL;
169    vid->rgbw = 0;
170    vid->rgbh = 0;
171    vid->scalingCtx = NULL;
172
173    vid->fileName = NULL;
174    *vid->mode = '\0';
175    *vid->fmt = '\0';
176    vid->lastframe = 0;
177
178    vid->img = NULL;
179    vid->imgHeaderLen = 0;
180    vid->imgWidth = 0;
181    vid->imgHeight = 0;
182
183    return vid;
184}
185
186/*
187 * ------------------------------------------------------------------------
188 *  VideoFindLastFrame()
189 *
190 *  Find the last readable frame.
191 * ------------------------------------------------------------------------
192 */
193int
194VideoFindLastFrame(vidPtr,lastframe)
195    VideoObj *vidPtr;
196    int *lastframe;
197{
198    int f = 0;
199    int nframe = 0;
200    int cur = 0;
201    AVStream *vstreamPtr;
202
203    if (vidPtr == NULL) {
204        return -1;
205    }
206
207    if (lastframe == NULL) {
208        return -1;
209    }
210
211    if (VideoModeRead(vidPtr) != 0) {
212        return -1;
213    }
214
215    // calculate an estimate of the last frame
216    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
217    nframe = VideoTime2Frame(vstreamPtr,
218        vstreamPtr->start_time + vstreamPtr->duration);
219
220    // get the real last readable frame
221    // is 50 frames far enough to go back
222    // to be outside of the last key frame?
223    f = vidPtr->frameNumber;
224    cur = VideoGoToN(vidPtr,nframe-50);
225    while (cur != nframe) {
226        cur = nframe;
227        nframe = VideoGoNext(vidPtr);
228    }
229    *lastframe = nframe;
230    VideoGoToN(vidPtr,f);
231
232    return 0;
233}
234
235
236int
237VideoOpenFile(vidPtr, fileName, mode)
238    VideoObj *vidPtr;
239    const char *fileName;
240    const char *mode;
241{
242    int fnlen = 0;
243    int err = 0;
244    int lastframe = 0;
245
246    if (fileName == NULL) {
247        // missing value for fileName
248        // return TCL_ERROR;
249        return -1;
250    }
251    if (fileName == '\0') {
252        /* no file name set -- do nothing */
253        return 0;
254    }
255
256    fnlen = strlen(fileName);
257    if (vidPtr->fileName != NULL) {
258        free(vidPtr->fileName);
259    }
260    vidPtr->fileName = (char *) malloc((fnlen+1)*sizeof(char));
261    if (vidPtr->fileName == NULL) {
262        // trouble mallocing space
263        return -1;
264    }
265    strncpy(vidPtr->fileName,fileName,fnlen);
266    vidPtr->fileName[fnlen] = '\0';
267
268    // FIXME: remove this constraint when we support
269    // the modes: r, r+, w, w+, a, a+, b and combinations
270    if (strlen(mode) > 1) {
271        return -1;
272    }
273
274    if (*mode == 'r') {
275        /* we're now in "input" mode */
276        err = VideoModeRead(vidPtr);
277        if (err) {
278            return err;
279        }
280
281        VideoFindLastFrame(vidPtr,&lastframe);
282        vidPtr->lastframe = lastframe;
283    } else if (*mode == 'w') {
284        /* we're now in "input" mode */
285        // VideoModeWrite(vidPtr);
286    } else {
287        // unrecognized mode
288        return -1;
289    }
290
291    return 0;
292}
293
294
295/*
296 * ------------------------------------------------------------------------
297 *  VideoModeRead()
298 *
299 *  Tries to force this video stream into "read" mode.  If the current
300 *  mode is "", then the -file is opened for reading.  If the current
301 *  mode is "write", then the stream is closed and then opened for
302 *  reading.  If the current mode is "read", then this call does nothing.
303 *  Usually called just before a "read" operation (get, go, etc.) is
304 *  performed.
305 *
306 *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
307 *  opening or closing the stream.
308 *
309 *  Error Codes
310 *  -1
311 *  -2      missing file name
312 *  -3      couldn't open file
313 *  -4      couldn't find streams in file
314 *  -5      couldn't find video stream in file
315 *  -6      unsupported codec for file
316 *  -7      couldn't open codec for file
317 *  -8      couldn't allocate frame space
318 *  -9      strcpy input to vidPtr->mode failed
319 * ------------------------------------------------------------------------
320 */
321int
322VideoModeRead(vidPtr)
323    VideoObj *vidPtr;
324{
325    int i;
326    const char *fmt;
327    AVCodecContext *vcodecCtx;
328    AVCodec *vcodec;
329
330    if (vidPtr == NULL) {
331        return -1;
332    }
333
334    if (vidPtr->fileName == NULL) {
335        // Tcl_AppendResult(interp, "missing value for -file", (char*)NULL);
336        // return TCL_ERROR;
337
338        // missing file name
339        return -2;
340    }
341    if (*vidPtr->fileName == '\0') {
342        /* no file name set -- do nothing */
343        return 0;
344    }
345
346    if (strcmp(vidPtr->mode,"input") == 0) {
347        return 0;
348    } else if (strcmp(vidPtr->mode,"output") == 0) {
349        if (VideoClose(vidPtr) != 0) {
350            return -1;
351        }
352    }
353
354    /*
355     * Open the video stream from that file.
356     */
357#ifdef HAVE_AVFORMAT_OPEN_INPUT
358    if (avformat_open_input(&vidPtr->pFormatCtx, vidPtr->fileName, NULL,
359        NULL) != 0) {
360        return -3;
361    }
362#else
363    if (av_open_input_file(&vidPtr->pFormatCtx, vidPtr->fileName,
364            NULL, 0, NULL) != 0) {
365        return -3;
366    }
367#endif
368#ifdef HAVE_AVFORMAT_FIND_STREAM_INFO
369    if (avformat_find_stream_info(vidPtr->pFormatCtx, NULL) < 0) {
370#else
371    if (av_find_stream_info(vidPtr->pFormatCtx) < 0) {
372#endif
373        // Tcl_AppendResult(interp, "couldn't find streams in file \"",
374        //     fileName, "\"", (char*)NULL);
375        // return TCL_ERROR;
376
377        // couldn't find streams in file
378        return -4;
379    }
380
381    /*
382     * Search for a video stream and its codec.
383     */
384    vidPtr->videoStream = -1;
385    for (i=0; i < vidPtr->pFormatCtx->nb_streams; i++) {
386        if (vidPtr->pFormatCtx->streams[i]->codec->codec_type
387            == AVMEDIA_TYPE_VIDEO) {
388            vidPtr->videoStream = i;
389            break;
390        }
391    }
392    if (vidPtr->videoStream < 0) {
393        // Tcl_AppendResult(interp, "couldn't find video stream in file \"",
394        //     fileName, "\"", (char*)NULL);
395        // return TCL_ERROR;
396
397        // couldn't find video stream in file
398        return -5;
399    }
400
401    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
402    vcodec = avcodec_find_decoder(vcodecCtx->codec_id);
403    if (vcodec == NULL) {
404        // Tcl_AppendResult(interp, "unsupported codec for file \"",
405        //     fileName, "\"", (char*)NULL);
406        // return TCL_ERROR;
407
408        // unsupported codec for file
409        return -6;
410    }
411#ifdef HAVE_AVCODEC_OPEN2
412    if (avcodec_open2(vcodecCtx, vcodec, NULL) < 0) {
413#else
414    if (avcodec_open(vcodecCtx, vcodec) < 0) {
415#endif
416        // Tcl_AppendResult(interp, "couldn't open codec for file \"",
417        //     fileName, "\"", (char*)NULL);
418        // return TCL_ERROR;
419
420        // couldn't open codec for file
421        return -7;
422    }
423
424    vcodecCtx->get_buffer = VideoAvGetBuffer;
425    vcodecCtx->release_buffer = VideoAvReleaseBuffer;
426
427    vidPtr->pFrameYUV = avcodec_alloc_frame();
428    vidPtr->pFrameRGB = avcodec_alloc_frame();
429    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
430        // Tcl_AppendResult(interp, "couldn't allocate frame space",
431        //     " for file \"", fileName, "\"", (char*)NULL);
432        // return TCL_ERROR;
433
434        // couldn't allocate frame space
435        return -8;
436    }
437
438    /* save the name of the codec as the -format option */
439    fmt = "?";
440    if (vcodecCtx->codec && vcodecCtx->codec->name) {
441        fmt = vcodecCtx->codec->name;
442        strcpy(vidPtr->fmt,fmt);
443    }
444//
445//    sprintf(buffer, "%d", vcodecCtx->width);
446//    if (Tcl_SetVar(interp, "width", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
447//        return TCL_ERROR;
448//    }
449//    sprintf(buffer, "%d", vcodecCtx->height);
450//    if (Tcl_SetVar(interp, "height", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
451//        return TCL_ERROR;
452//    }
453//
454
455    if (strcpy(vidPtr->mode,"input") == NULL) {
456        // strcpy input to vidPtr->mode failed
457        return -9;
458    }
459
460    return 0;
461}
462
463
464// FIXME: get this function working.
465///*
466// * ------------------------------------------------------------------------
467// *  VideoModeWrite()
468// *
469// *  Tries to force this video stream into "write" mode.  If the current
470// *  mode is "", then the -file is opened for writing.  If the current
471// *  mode is "read", then the stream is closed and then opened for
472// *  writing.  If the current mode is "write", then this call does nothing.
473// *  Usually called just before a "write" operation (put, etc.) is
474// *  performed.
475// *
476// *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
477// *  opening or closing the stream.
478// * ------------------------------------------------------------------------
479// */
480//int
481//VideoModeWrite(vidPtr, fileName, width, height, fmt)
482//    VideoObj *vidPtr;      /* video object to write */
483//    CONST84 char *fileName;
484//    int width;             /* native width of each frame */
485//    int height;            /* native height of each frame */
486//    CONST84 char *fmt
487//{
488//    char c;
489//    int numBytes, pixfmt, iwd, iht;
490//    CONST84 char *size;
491//    AVCodecContext *codecCtx;
492//    AVCodec *vcodec;
493//
494//    if (vidPtr == NULL) {
495//        return -1;
496//    }
497//
498//    /*
499//     * Get the current mode.  If we're already in "output", then we're
500//     * done.  Otherwise, close the stream if necessary and prepare to
501//     * open the file for write.
502//     */
503//    if (vidPtr->mode == NULL) {
504//        return -1;
505//    }
506//
507//    c = *vidPtr->mode;
508//    if (c == 'o' && strcmp(vidPtr->mode,"output") == 0) {
509//        return 0;
510//    }
511//    else if (c == 'i' && strcmp(vidPtr->mode,"input") == 0) {
512//        if (VideoClose(vidPtr) != 0) {
513//            return -1;
514//        }
515//    }
516//
517//    /*
518//     * Get the file name from the -file variable.
519//     */
520//    if ((fileName == NULL) || (*filename == '\0')) {
521//        /* no file name set -- do nothing */
522//        return 0;
523//    }
524//
525//    /*
526//     * Get the -width and -height of each frame.  If these are set
527//     * to 0 (default), then use the incoming width/height from an
528//     * actual frame.
529//     */
530//     iwd = width;
531//     iht = height;
532//
533//    /*
534//     * Get the format argument.
535//     */
536//    if (fmt == NULL) {
537////        Tcl_AppendResult(interp, "missing value for -format", (char*)NULL);
538////        return TCL_ERROR;
539//        return -1;
540//    }
541//    if (strcmp(fmt,"mpeg1video") == 0) {
542//        vidPtr->outFormatCtx = av_alloc_format_context();
543//        vidPtr->outFormatCtx->oformat = guess_format("mpeg", NULL, NULL);
544//    }
545//    else if (strcmp(fmt,"flv") == 0) {
546//        vidPtr->outFormatCtx = av_alloc_format_context();
547//        vidPtr->outFormatCtx->oformat = guess_format("flv", NULL, NULL);
548//    }
549//    else if (strcmp(fmt,"mov") == 0) {
550//        vidPtr->outFormatCtx = av_alloc_format_context();
551//        vidPtr->outFormatCtx->oformat = guess_format("mov", NULL, NULL);
552//        /* MOV normally uses MPEG4, but that may not be installed */
553//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
554//    }
555//    else if (strcmp(fmt,"avi") == 0) {
556//        vidPtr->outFormatCtx = av_alloc_format_context();
557//        vidPtr->outFormatCtx->oformat = guess_format("avi", NULL, NULL);
558//        /* AVI normally uses MPEG4, but that may not be installed */
559//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
560//    }
561//    else {
562////        Tcl_AppendResult(interp, "bad format \"", fmt, "\": should be",
563////            " avi, flv, mpeg1video, mov", (char*)NULL);
564////        return TCL_ERROR;
565//        return -1;
566//    }
567//
568//    /*
569//     * Open the video stream for writing.
570//     */
571//    strncpy(vidPtr->outFormatCtx->filename, fileName,
572//        sizeof(vidPtr->outFormatCtx->filename));
573//
574//    vidPtr->outVideoStr = av_new_stream(vidPtr->outFormatCtx, 0);
575//    if (vidPtr->outVideoStr == NULL) {
576////        Tcl_AppendResult(interp, "internal error:",
577////            " problem opening stream", (char*)NULL);
578////        return TCL_ERROR;
579//        retunr -1;
580//    }
581//    codecCtx = vidPtr->outVideoStr->codec;
582//
583//    codecCtx->codec_id = vidPtr->outFormatCtx->oformat->video_codec;
584//    codecCtx->codec_type = CODEC_TYPE_VIDEO;
585//
586//    /* put sample parameters */
587//    codecCtx->bit_rate = 400000;
588//    /* resolution must be a multiple of two */
589//    codecCtx->width = (iwd/2)*2;
590//    codecCtx->height = (iht/2)*2;
591//    codecCtx->time_base.den = 24;
592//    codecCtx->time_base.num = 1;
593//    codecCtx->gop_size = 12; /* emit one intra frame every so often */
594//    codecCtx->pix_fmt = PIX_FMT_YUV420P;
595//    if (codecCtx->codec_id == CODEC_ID_MPEG2VIDEO) {
596//        codecCtx->max_b_frames = 2;
597//    }
598//
599//    /* find the video encoder */
600//    vcodec = avcodec_find_encoder(codecCtx->codec_id);
601//    if (!vcodec || avcodec_open(codecCtx, vcodec) < 0) {
602//        // Tcl_AppendResult(interp, "internal error:",
603//        //     " problem opening codec", (char*)NULL);
604//        // return TCL_ERROR;
605//        return -1;
606//    }
607//
608//    if (av_set_parameters(vidPtr->outFormatCtx, NULL) < 0) {
609//        // Tcl_AppendResult(interp, "internal error:",
610//        //     " problem in av_set_parameters()", (char*)NULL);
611//        // return TCL_ERROR;
612//        return -1;
613//    }
614//
615//    if (url_fopen(&vidPtr->outFormatCtx->pb, fileName, URL_WRONLY) < 0) {
616//        // Tcl_AppendResult(interp, "can't open file \"", fileName,
617//        //     "\"", (char*)NULL);
618//        // return TCL_ERROR;
619//        return -1;
620//    }
621//    av_write_header(vidPtr->outFormatCtx);
622//
623//    vidPtr->pFrameYUV = avcodec_alloc_frame();
624//    vidPtr->pFrameRGB = avcodec_alloc_frame();
625//    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
626//        // Tcl_AppendResult(interp, "couldn't allocate frame space",
627//        //     " for file \"", fileName, "\"", (char*)NULL);
628//        // return TCL_ERROR;
629//        return -1;
630//    }
631//
632//    vidPtr->yuvw = vidPtr->outVideoStr->codec->width;
633//    vidPtr->yuvh = vidPtr->outVideoStr->codec->height;
634//    pixfmt = vidPtr->outVideoStr->codec->pix_fmt;
635//
636//    numBytes = avpicture_get_size(pixfmt, vidPtr->yuvw, vidPtr->yuvh);
637//    vidPtr->yuvbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
638//
639//    avpicture_fill((AVPicture*)vidPtr->pFrameYUV, vidPtr->yuvbuffer,
640//        pixfmt, vidPtr->yuvw, vidPtr->yuvh);
641//
642//
643//    if (strcpy(vid->mode,"output") == NULL) {
644//        return -1;
645//    }
646//
647//    return 0;
648//}
649
650
651/*
652 * ------------------------------------------------------------------------
653 *  VideoTime2Frame()
654 *
655 *  Converts a time value (as defined by the FFMPEG package) into an
656 *  integer frame number in the range 0-end for the stream.
657 * ------------------------------------------------------------------------
658 */
659int
660VideoTime2Frame(streamPtr, tval)
661    AVStream *streamPtr;   /* scale values according to this stream */
662    int64_t tval;          /* time value as defined by stream */
663{
664    AVRational one, factor;
665    one.num = 1;
666    one.den = 1;
667    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
668    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
669
670    if (tval > streamPtr->start_time) {
671        tval -= streamPtr->start_time;
672    } else {
673        tval = 0;
674    }
675    tval = av_rescale_q(tval, factor, one);
676    return (int)tval;
677}
678
679/*
680 * ------------------------------------------------------------------------
681 *  VideoFrame2Time()
682 *
683 *  Converts a frame number 0-end to the corresponding time value
684 *  (as defined by FFMPEG) for the given stream.
685 * ------------------------------------------------------------------------
686 */
687int64_t
688VideoFrame2Time(streamPtr, fval)
689    AVStream *streamPtr;   /* scale values according to this stream */
690    int fval;              /* frame value in the range 0-end */
691{
692    int64_t tval;
693    AVRational one, factor;
694    one.num = 1;
695    one.den = 1;
696
697    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
698    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
699
700    tval = av_rescale_q((int64_t)fval, one, factor) + streamPtr->start_time;
701    return tval;
702}
703
704/*
705 * ------------------------------------------------------------------------
706 *  VideoNextFrame()
707 *
708 *  Decodes a series of video packets until the end of the frame
709 *  is reached.  Updates the frameNumber and atEnd to maintain the
710 *  current status for this video stream.
711 * ------------------------------------------------------------------------
712 */
713void
714VideoNextFrame(vidPtr)
715    VideoObj *vidPtr;   /* get a frame from this video stream */
716{
717    int frameFinished;
718    uint64_t pts;
719    AVCodecContext *vcodecCtx;
720    AVStream *vstreamPtr;
721    AVPacket packet;
722
723    if (vidPtr->pFormatCtx) {
724        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
725        vcodecCtx = vstreamPtr->codec;
726
727        /*
728         * Decode as many packets as necessary to get the next frame.
729         */
730        pts = 0;
731        while (1) {
732            if (av_read_frame(vidPtr->pFormatCtx, &packet) >= 0) {
733                if (packet.stream_index == vidPtr->videoStream) {
734                    /* save pts so we can grab it again in VideoAvGetBuffer */
735                    global_video_pkt_pts = packet.pts;
736
737#ifdef HAVE_AVCODEC_DECODE_VIDEO2
738                    // new avcodec decode video function
739                    avcodec_decode_video2(vcodecCtx, vidPtr->pFrameYUV,
740                        &frameFinished, &packet);
741#else
742                    // old avcodec decode video function
743                    avcodec_decode_video(vcodecCtx, vidPtr->pFrameYUV,
744                        &frameFinished, packet.data, packet.size);
745#endif
746                    if (packet.dts == AV_NOPTS_VALUE
747                          && vidPtr->pFrameYUV->opaque
748                          && *(uint64_t*)vidPtr->pFrameYUV->opaque != AV_NOPTS_VALUE) {
749                        pts = *(uint64_t*)vidPtr->pFrameYUV->opaque;
750                    } else if (packet.dts != AV_NOPTS_VALUE) {
751                        pts = packet.dts;
752                    } else {
753                        pts = 0;
754                    }
755
756                    if (frameFinished) {
757                        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, pts);
758                        break;
759                    }
760                }
761            } else {
762                vidPtr->atEnd = 1;
763                break;
764            }
765        }
766        av_free_packet(&packet);
767    }
768}
769
770/*
771 * ------------------------------------------------------------------------
772 *  These two routines are called whenever a frame buffer is allocated,
773 *  which means that we're starting a new frame.  Grab the global pts
774 *  counter and squirrel it away in the opaque slot of the frame.  This
775 *  will give us a pts value that we can trust later.
776 * ------------------------------------------------------------------------
777 */
778int
779VideoAvGetBuffer(c,fr)
780    AVCodecContext *c;  /* codec doing the frame decoding */
781    AVFrame *fr;        /* frame being decoded */
782{
783    int rval = avcodec_default_get_buffer(c, fr);
784    uint64_t *ptsPtr = av_malloc(sizeof(uint64_t));
785    *ptsPtr = global_video_pkt_pts;
786    fr->opaque = ptsPtr;
787    return rval;
788}
789
790void
791VideoAvReleaseBuffer(c,fr)
792    AVCodecContext *c;  /* codec doing the frame decoding */
793    AVFrame *fr;        /* frame being decoded */
794{
795    if (fr && fr->opaque) {
796        av_freep(&fr->opaque);
797    }
798    avcodec_default_release_buffer(c,fr);
799}
800
801/*
802 * ------------------------------------------------------------------------
803 *  VideoInit()
804 *
805 *  Implements the body of the _ffmpeg_init method in the "video" class.
806 *  Initializes the basic data structure and stores it in the _videodata
807 *  variable within the class.
808 * ------------------------------------------------------------------------
809 */
810VideoObj *
811VideoInit()
812{
813    /*
814     * Create an object to represent this video stream.
815     */
816
817    /* Register all codecs and formats */
818    av_register_all();
819
820    return VideoSetData();
821}
822
823/*
824 * ------------------------------------------------------------------------
825 *  VideoCleanup()
826 *
827 *  Implements the body of the _ffmpeg_cleanup method in the "video" class.
828 *  Accesses the data structure stored in the _videodata variable and
829 *  frees up the data.
830 * ------------------------------------------------------------------------
831 */
832int
833VideoCleanup(vidPtr)
834    VideoObj *vidPtr;
835{
836    /*
837     *  Nothing much to do here.  Just close the file in case it is
838     *  still open.  Don't free vidPtr itself; that is cleaned up by
839     *  the ByteArrayObj in the class data member.
840     */
841    int ret = 0;
842
843    ret -= VideoClose(vidPtr);
844
845    if (vidPtr != NULL) {
846        VideoFreeImgBuffer(vidPtr);
847        if (vidPtr->fileName != NULL) {
848            free(vidPtr->fileName);
849            vidPtr->fileName = NULL;
850        }
851        free(vidPtr);
852        vidPtr = NULL;
853// FIXME: need a test to make sure vidPtr is null after the function returns.
854    }
855
856    return ret;
857}
858
859/*
860 * ------------------------------------------------------------------------
861 *  VideoSize()
862 *
863 *  Implements the body of the "size" method in the "video" class.
864 *  Returns the size of each frame in this video stream as a list {w h}.
865 * ------------------------------------------------------------------------
866 */
867int
868VideoSize(vidPtr, width, height)
869    VideoObj *vidPtr;
870    int *width;
871    int *height;
872{
873    AVCodecContext *vcodecCtx;
874
875    if (vidPtr == NULL) {
876        return -1;
877    }
878
879    if (vidPtr->pFormatCtx == NULL) {
880        // "internal error: video stream is not open",
881        return -1;
882    }
883
884    if (vidPtr->pFormatCtx) {
885        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
886        if (width != NULL) {
887            *width = vcodecCtx->width;
888        }
889        if (height != NULL) {
890            *height = vcodecCtx->height;
891        }
892    }
893    return 0;
894}
895
896/*
897 * ------------------------------------------------------------------------
898 *  VideoGo()
899 *
900 *  Implements the body of the "go" method in the "video" class.
901 *  Advances by one or more frames, or seeks backward in the stream.
902 *  Handles the following syntax:
903 *    obj go next ...... go to next frame (same as +1)
904 *    obj go +n ........ advance by n frames
905 *    obj go -n ........ go back by n frames
906 *    obj go n ......... go to frame n
907 * ------------------------------------------------------------------------
908 */
909int
910VideoGoNext(vidPtr)
911    VideoObj *vidPtr;
912{
913    int nabs;
914
915    if (vidPtr == NULL) {
916        return -1;
917    }
918
919    nabs = vidPtr->frameNumber + 1;
920    return VideoGoToN(vidPtr, nabs);
921}
922
923int
924VideoGoPlusMinusN(vidPtr, n)
925    VideoObj *vidPtr;
926    int n;
927{
928    int nabs;
929
930    if (vidPtr == NULL) {
931        return -1;
932    }
933
934    nabs = vidPtr->frameNumber + n;
935    return VideoGoToN(vidPtr, nabs);
936}
937
938int
939VideoGoToN(vidPtr, n)
940    VideoObj *vidPtr;
941    int n;
942{
943    int nrel, nabs, seekFlags, gotframe;
944    int64_t nseek;
945    AVStream *vstreamPtr;
946
947    if (vidPtr == NULL) {
948        return -1;
949    }
950
951    if (vidPtr->pFormatCtx == NULL) {
952        // "internal error: video stream is not open",
953        return -1;
954    }
955
956    nabs = n;
957
958    if (nabs < 0) {
959        nabs = 0;
960    }
961
962    if (nabs < vidPtr->frameNumber) {
963        seekFlags = AVSEEK_FLAG_BACKWARD;
964    } else {
965        seekFlags = 0;
966    }
967
968    /*
969     * If we're going to an absolute frame, or if we're going backward
970     * or too far forward, then seek the frame.
971     */
972    nrel = nabs-vidPtr->frameNumber;
973    if ((nrel > 50) || (seekFlags&AVSEEK_FLAG_BACKWARD)) {
974
975        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
976        nseek = VideoFrame2Time(vstreamPtr, nabs);
977        // not sure why it is checking against the number 100
978        if (nseek > 100) {
979            nseek -= 100;
980        } else {
981            nseek = 0;
982        }
983
984        /* first, seek the nearest reference frame for a good starting pt */
985        av_seek_frame(vidPtr->pFormatCtx, vidPtr->videoStream,
986            nseek, seekFlags);
987
988        // this doesn't seem to give me back the true frame number
989        // feels like it is more of a reverse of the VideoFrame2Time call
990        // because vidPtr->frameNumber always equals nabs
991        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, nseek);
992        vidPtr->atEnd = 0;
993
994        /* read the frame to figure out what the frame number is */
995        VideoNextFrame(vidPtr);
996
997        /* then, move forward until we reach the desired frame */
998        gotframe = 0;
999        while (vidPtr->frameNumber < nabs && !vidPtr->atEnd) {
1000            VideoNextFrame(vidPtr);
1001            gotframe = 1;
1002        }
1003
1004        /* get at least one frame, unless we're done or at the beginning*/
1005        if (!gotframe && !vidPtr->atEnd) {
1006            if (vidPtr->frameNumber > nabs) {
1007                // we are probably at a key frame, just past
1008                // the requested frame and need to seek backwards.
1009                VideoGoToN(vidPtr,n);
1010            } else {
1011                VideoNextFrame(vidPtr);
1012            }
1013        }
1014    }
1015    else {
1016        while (nrel-- > 0) {
1017            VideoNextFrame(vidPtr);
1018        }
1019    }
1020
1021    /*
1022     * Send back the current frame number or "end" as the result.
1023     */
1024    return vidPtr->frameNumber;
1025}
1026
1027/*
1028 * ------------------------------------------------------------------------
1029 *  VideoGet()
1030 *
1031 *  Implements the body of the "get" method in the "video" class.
1032 *  Returns information about the current frame via the following
1033 *  syntax:
1034 *    obj get start|position|end
1035 *    obj get <imageHandle>
1036 * ------------------------------------------------------------------------
1037 */
1038int
1039VideoGetImage(vidPtr, iw, ih, img, bufSize)
1040    VideoObj *vidPtr;
1041    int iw;
1042    int ih;
1043    void **img;
1044    int *bufSize;
1045{
1046
1047    int numBytes;
1048    AVCodecContext *vcodecCtx;
1049
1050    if (vidPtr == NULL) {
1051        return -1;
1052    }
1053
1054    if (VideoModeRead(vidPtr) != 0) {
1055        return -1;
1056    }
1057
1058    /*
1059    if (vidPtr->pFormatCtx) {
1060        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1061    } else {
1062        vcodecCtx = NULL;
1063    }
1064    */
1065
1066    if (vidPtr->pFormatCtx == NULL) {
1067        // vidPtr->pFormatCtx is NULL, video not open
1068        return -1;
1069    }
1070    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1071
1072    /*
1073     * Query the size for this photo and make sure that we have a
1074     * buffer of the appropriate size for software scaling and
1075     * format conversion.
1076     */
1077
1078    // if the user's desired size is less then 0,
1079    // use the default size
1080
1081    if (iw < 0) {
1082        iw = vcodecCtx->width;
1083    }
1084    if (ih < 0) {
1085        ih = vcodecCtx->height;
1086    }
1087
1088
1089    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1090        if (vidPtr->rgbbuffer) {
1091            av_free(vidPtr->rgbbuffer);
1092            vidPtr->rgbbuffer = NULL;
1093        }
1094        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1095        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1096        vidPtr->rgbw = iw;
1097        vidPtr->rgbh = ih;
1098
1099        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1100            PIX_FMT_RGB24, iw, ih);
1101
1102        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1103            vcodecCtx->width, vcodecCtx->height, vcodecCtx->pix_fmt,
1104            iw, ih, PIX_FMT_RGB24, SWS_BICUBIC|SWS_PRINT_INFO, NULL, NULL, NULL);
1105    }
1106
1107    /*
1108     * Rescale the current frame to the desired size, and translate
1109     * into RGB format so we can copy into the destination image.
1110     */
1111    if (vidPtr->pFrameYUV && vidPtr->pFrameYUV->data[0]) {
1112        sws_scale(vidPtr->scalingCtx, (const uint8_t * const*)
1113            vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize,
1114            0, vcodecCtx->height,
1115            vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize);
1116
1117/*
1118        iblock.pixelPtr  = (unsigned char*)vidPtr->pFrameRGB->data[0];
1119        iblock.width     = iw;
1120        iblock.height    = ih;
1121        iblock.pitch     = vidPtr->pFrameRGB->linesize[0];
1122        iblock.pixelSize = 3;
1123        iblock.offset[0] = 0;
1124        iblock.offset[1] = 1;
1125        iblock.offset[2] = 2;
1126        iblock.offset[3] = 0;
1127
1128        Tk_PhotoPutBlock_NoComposite(img, &iblock, 0, 0, iw, ih);
1129*/
1130
1131        if (vidPtr->img == NULL) {
1132            VideoAllocImgBuffer(vidPtr,iw,ih);
1133        } else {
1134            if ((vidPtr->imgWidth != iw) && (vidPtr->imgHeight != ih)) {
1135                // new height or width
1136                // resize the image buffer
1137                free(vidPtr->img);
1138                VideoAllocImgBuffer(vidPtr,iw,ih);
1139            }
1140        }
1141
1142        // Write pixel data
1143        memcpy(vidPtr->img+vidPtr->imgHeaderLen,
1144            vidPtr->pFrameRGB->data[0],
1145            vidPtr->imgWidth*3*vidPtr->imgHeight);
1146    }
1147    *img = vidPtr->img;
1148    *bufSize = (vidPtr->imgWidth*3*vidPtr->imgHeight) + vidPtr->imgHeaderLen;
1149    return 0;
1150}
1151
1152int
1153VideoFrameRate (vidPtr, fr)
1154    VideoObj *vidPtr;
1155    double *fr;
1156{
1157    AVStream *vstreamPtr;
1158
1159    if (vidPtr == NULL) {
1160        return -1;
1161    }
1162
1163    if (fr == NULL) {
1164        return -1;
1165    }
1166
1167    if (vidPtr->pFormatCtx == NULL) {
1168        // vidPtr->pFormatCtx is NULL, video not open
1169        return -1;
1170    }
1171    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1172
1173    // http://trac.handbrake.fr/browser/trunk/libhb/decavcodec.c?rev=1490#L684
1174    // there seems to be some controversy over what structure holds
1175    // the correct frame rate information for different video codecs.
1176    // for now we will use the stream's r_frame_rate.
1177    // from the above post, it looks like this value can be interpreted
1178    // as frames per second.
1179    *fr = av_q2d(vstreamPtr->r_frame_rate);
1180
1181    return 0;
1182}
1183
1184int
1185VideoFileName (vidPtr, fname)
1186    VideoObj *vidPtr;
1187    const char **fname;
1188{
1189    if (vidPtr == NULL) {
1190        return -1;
1191    }
1192
1193    if (fname == NULL) {
1194        return -1;
1195    }
1196
1197    if (vidPtr->pFormatCtx == NULL) {
1198        // vidPtr->pFormatCtx is NULL, video not open
1199        return -1;
1200    }
1201
1202    *fname = vidPtr->fileName;
1203
1204    return 0;
1205}
1206
1207int
1208VideoPixelAspectRatio (vidPtr, num, den)
1209    VideoObj *vidPtr;
1210    int *num;
1211    int *den;
1212{
1213    AVCodecContext *vcodecCtx;
1214
1215    if (vidPtr == NULL) {
1216        return -1;
1217    }
1218
1219    if ((num == NULL) || (den == NULL)) {
1220        return -1;
1221    }
1222
1223    if (vidPtr->pFormatCtx == NULL) {
1224        // vidPtr->pFormatCtx is NULL, video not open
1225        return -1;
1226    }
1227
1228    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1229
1230    *num = vcodecCtx->sample_aspect_ratio.num;
1231    *den = vcodecCtx->sample_aspect_ratio.den;
1232
1233    return 0;
1234}
1235
1236int
1237VideoDisplayAspectRatio (vidPtr, num, den)
1238    VideoObj *vidPtr;
1239    int *num;
1240    int *den;
1241{
1242    int width = 0;
1243    int height = 0;
1244    int64_t gcd = 0;
1245
1246    if (vidPtr == NULL) {
1247        return -1;
1248    }
1249
1250    if ((num == NULL) || (den == NULL)) {
1251        return -1;
1252    }
1253
1254    if (vidPtr->pFormatCtx == NULL) {
1255        // vidPtr->pFormatCtx is NULL, video not open
1256        return -1;
1257    }
1258
1259    VideoSize(vidPtr, &width, &height);
1260    VideoPixelAspectRatio(vidPtr, num, den);
1261
1262    width = (*num)*width;
1263    height = (*den)*height;
1264#ifdef FFMPEG_COMMON_H
1265    // old gcd function
1266    gcd = ff_gcd(FFABS(width), FFABS(height));
1267#else
1268    // new gcd function
1269    gcd = av_gcd(FFABS(width), FFABS(height));
1270#endif
1271
1272
1273    *num = width/gcd;
1274    *den = height/gcd;
1275
1276    if (*den == 0) {
1277        *num = 0;
1278        *den = 1;
1279    }
1280
1281    return 0;
1282}
1283
1284int
1285VideoAllocImgBuffer(vidPtr, width, height)
1286    VideoObj *vidPtr;
1287    int width;
1288    int height;
1289{
1290
1291    char header[64];
1292    int headerLen = 0;
1293    int bufsize = 0;
1294
1295    sprintf(header,"P6\n%d %d\n255\n", width, height);
1296    headerLen = strlen(header);
1297    bufsize = headerLen + (width*3*height);
1298    vidPtr->img = (void*) malloc(bufsize);
1299    vidPtr->imgHeaderLen = headerLen;
1300    vidPtr->imgWidth = width;
1301    vidPtr->imgHeight = height;
1302    memcpy(vidPtr->img,header,headerLen);
1303
1304    return 0;
1305}
1306
1307int
1308VideoFreeImgBuffer(vidPtr)
1309    VideoObj *vidPtr;
1310{
1311    if ((vidPtr != NULL) && (vidPtr->img != NULL)) {
1312        free(vidPtr->img);
1313        vidPtr->img = NULL;
1314    }
1315    return 0;
1316}
1317
1318int
1319VideoGetPositionCur(vidPtr, pos)
1320    VideoObj *vidPtr;      /* video object to act on */
1321    int *pos;
1322{
1323    int fnum = -1;
1324
1325    if (vidPtr == NULL) {
1326        return -1;
1327    }
1328
1329    if (pos == NULL) {
1330        return -1;
1331    }
1332
1333    if (VideoModeRead(vidPtr) != 0) {
1334        return -1;
1335    }
1336
1337    if (vidPtr->pFormatCtx) {
1338        fnum = vidPtr->frameNumber;
1339    }
1340
1341    *pos = fnum;
1342    return 0;
1343}
1344
1345int
1346VideoGetPositionEnd(vidPtr, pos)
1347    VideoObj *vidPtr;      /* video object to act on */
1348    int *pos;
1349{
1350    if (vidPtr == NULL) {
1351        return -1;
1352    }
1353
1354    if (pos == NULL) {
1355        return -1;
1356    }
1357
1358    if (VideoModeRead(vidPtr) != 0) {
1359        return -1;
1360    }
1361
1362    *pos = vidPtr->lastframe;
1363    return 0;
1364}
1365
1366// FIXME: get this function working
1367///*
1368// * ------------------------------------------------------------------------
1369// *  VideoPut()
1370// *
1371// *  Implements the body of the "put" method in the "video" class.
1372// *  Stores a single frame into the video stream:
1373// *    obj put <imageHandle>
1374// * ------------------------------------------------------------------------
1375// */
1376//int
1377//VideoPut(cdata, interp, argc, argv)
1378//    ClientData cdata;      /* not used */
1379//    Tcl_Interp *interp;    /* interpreter */
1380//    int argc;              /* number of arguments */
1381//    CONST84 char* argv[];  /* argument strings */
1382//{
1383//    VideoObj *vidPtr;
1384//    int iw, ih, numBytes, roffs, goffs, boffs;
1385//    char buffer[64];
1386//    unsigned char* photodata;
1387//    uint8_t* rgbdata;
1388//    Tk_PhotoHandle img;
1389//    Tk_PhotoImageBlock iblock;
1390//    AVCodecContext *codecCtx;
1391//
1392//    if (VideoGetData(interp, &vidPtr) != TCL_OK) {
1393//        return TCL_ERROR;
1394//    }
1395//
1396//    if (argc != 2) {
1397//        Tcl_AppendResult(interp, "wrong # args: should be \"", argv[0],
1398//            " image\"", (char*)NULL);
1399//        return TCL_ERROR;
1400//    }
1401//
1402//    /*
1403//     * Get the name of the image and copy from it.
1404//     */
1405//    img = Tk_FindPhoto(interp, argv[1]);
1406//    if (img == NULL) {
1407//        Tcl_AppendResult(interp, "bad value \"", argv[1],
1408//            "\": expected photo image", (char*)NULL);
1409//        return TCL_ERROR;
1410//    }
1411//
1412//    /*
1413//     * Query the size for this photo and make sure that we have a
1414//     * buffer of the appropriate size for software scaling and
1415//     * format conversion.
1416//     */
1417//    Tk_PhotoGetImage(img, &iblock);
1418//    Tk_PhotoGetSize(img, &iw, &ih);
1419//
1420//    if (VideoModeWrite(interp, iw, ih) != TCL_OK) {
1421//        return TCL_ERROR;
1422//    }
1423//    codecCtx = vidPtr->outVideoStr->codec;
1424//
1425//    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1426//        if (vidPtr->rgbbuffer) {
1427//            av_free(vidPtr->rgbbuffer);
1428//            vidPtr->rgbbuffer = NULL;
1429//        }
1430//        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1431//        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1432//        vidPtr->rgbw = iw;
1433//        vidPtr->rgbh = ih;
1434//
1435//        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1436//            PIX_FMT_RGB24, iw, ih);
1437//
1438//        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1439//            iw, ih, PIX_FMT_RGB24,
1440//            codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
1441//            SWS_BICUBIC, NULL, NULL, NULL);
1442//    }
1443//
1444//    /*
1445//     * Copy the data from the Tk photo block into the RGB frame.
1446//     */
1447//    roffs = iblock.offset[0];
1448//    goffs = iblock.offset[1];
1449//    boffs = iblock.offset[2];
1450//
1451//    for (ih=0; ih < iblock.height; ih++) {
1452//        rgbdata = vidPtr->pFrameRGB->data[0] + ih*vidPtr->pFrameRGB->linesize[0];
1453//        photodata = iblock.pixelPtr + ih*iblock.pitch;
1454//        for (iw=0; iw < iblock.width; iw++) {
1455//            rgbdata[0] = photodata[roffs];
1456//            rgbdata[1] = photodata[goffs];
1457//            rgbdata[2] = photodata[boffs];
1458//            rgbdata += 3;
1459//            photodata += iblock.pixelSize;
1460//        }
1461//    }
1462//
1463//    /*
1464//     * Rescale the current frame to the desired size, and translate
1465//     * from RGB to YUV so we can give the frame to the codec.
1466//     */
1467//    sws_scale(vidPtr->scalingCtx,
1468//        vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize,
1469//        0, ih,
1470//        vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize);
1471//
1472//    numBytes = VideoWriteFrame(vidPtr, vidPtr->pFrameYUV);
1473//    if (numBytes < 0) {
1474//        Tcl_AppendResult(interp, "error in av_write_frame()", (char*)NULL);
1475//        return TCL_ERROR;
1476//    }
1477//    sprintf(buffer, "frame %d (%d bytes)", vidPtr->frameNumber++, numBytes);
1478//    Tcl_SetResult(interp, buffer, TCL_VOLATILE);
1479//    return TCL_OK;
1480//}
1481
1482
1483/*
1484 * ------------------------------------------------------------------------
1485 *  VideoWriteFrame()
1486 *
1487 *  Used internally to write a single frame out to the output stream.
1488 *  Returns the number of bytes written to the frame, or -1 if an error
1489 *  occurred.
1490 * ------------------------------------------------------------------------
1491 */
1492int
1493VideoWriteFrame(vidPtr, framePtr)
1494    VideoObj *vidPtr;      /* video object being updated */
1495    AVFrame *framePtr;     /* picture frame being written out */
1496{
1497    int numBytes;
1498    AVCodecContext *codecCtx;
1499    AVPacket pkt;
1500
1501#define OUTBUF_SIZE 500000
1502    uint8_t outbuf[OUTBUF_SIZE];
1503
1504    codecCtx = vidPtr->outVideoStr->codec;
1505    numBytes = avcodec_encode_video(codecCtx, outbuf, OUTBUF_SIZE, framePtr);
1506
1507    if (numBytes > 0) {
1508        av_init_packet(&pkt);
1509
1510        if (codecCtx->coded_frame->pts != AV_NOPTS_VALUE) {
1511            pkt.pts = av_rescale_q(codecCtx->coded_frame->pts,
1512                codecCtx->time_base,
1513                vidPtr->outVideoStr->time_base);
1514        }
1515        if (codecCtx->coded_frame->key_frame) {
1516            pkt.flags |= AV_PKT_FLAG_KEY;
1517        }
1518        pkt.stream_index = vidPtr->outVideoStr->index;
1519        pkt.data = outbuf;
1520        pkt.size = numBytes;
1521
1522        /* write the compressed frame in the media file */
1523        if (av_write_frame(vidPtr->outFormatCtx, &pkt) != 0) {
1524            return -1;
1525        }
1526    }
1527    return numBytes;
1528}
1529
1530#ifdef notdef
1531/*
1532 * ------------------------------------------------------------------------
1533 *  VideoTransform()
1534 *
1535 *  Implements the body of the "transform" method in the "video" class.
1536 *  Translates one value into another--times into frames, etc.  Handles
1537 *  the following syntax:
1538 *    obj transform frames2duration <frames>
1539 *    obj transform duration2frames <duration>
1540 * ------------------------------------------------------------------------
1541 */
1542double
1543VideoTransformFrames2Duration(vidPtr, frame)
1544    VideoObj *vidPtr;
1545    int frame;
1546{
1547    double duration;
1548    AVStream *vstreamPtr;
1549    AVRational hundred;
1550    int64_t tval;
1551
1552    hundred.num = 100;
1553    hundred.den = 1;
1554
1555    if (vidPtr == NULL) {
1556        return -1;
1557    }
1558
1559    if (vidPtr->pFormatCtx == NULL) {
1560//        Tcl_AppendResult(interp, "can't compute transformations:",
1561//            " stream not opened", (char*)NULL);
1562//        return TCL_ERROR;
1563        return -1;
1564    }
1565
1566    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1567
1568    tval = av_rescale_q((int64_t)frame, hundred, vstreamPtr->r_frame_rate);
1569    duration = 0.01*tval;
1570
1571    return duration;
1572}
1573
1574int
1575VideoTransformDuration2Frames(vidPtr, duration)
1576    VideoObj *vidPtr;
1577    double duration;
1578{
1579    int frames;
1580    AVStream *vstreamPtr;
1581    AVRational hundred;
1582    int64_t tval;
1583
1584    hundred.num = 100;
1585    hundred.den = 1;
1586
1587    if (vidPtr == NULL) {
1588        return -1;
1589    }
1590    if (vidPtr->pFormatCtx == NULL) {
1591//        Tcl_AppendResult(interp, "can't compute transformations:",
1592//            " stream not opened", (char*)NULL);
1593//        return TCL_ERROR;
1594        return -1;
1595    }
1596
1597    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1598
1599    tval = (int64_t)(duration*100);
1600    frames = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1601    // check above for overflow
1602    // tval = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1603    // sprintf(buffer, "%lld", tval);
1604
1605    return frames;
1606}
1607#endif
1608
1609/*
1610 * ------------------------------------------------------------------------
1611 *  VideoClose()
1612 *
1613 *  Implements the body of the _ffmpeg_close method in the "video" class.
1614 *  Closes any file opened previously by the open methods for read/write.
1615 *  If nothing is open, this does nothing.
1616 * ------------------------------------------------------------------------
1617 */
1618int
1619VideoClose(vidPtr)
1620    VideoObj *vidPtr;
1621{
1622    AVCodecContext *vcodecCtx;
1623    int i;
1624
1625    if (vidPtr == NULL) {
1626        return -1;
1627    }
1628
1629    if (vidPtr->yuvbuffer) {
1630        av_free(vidPtr->yuvbuffer);
1631        vidPtr->yuvbuffer = NULL;
1632        vidPtr->yuvw = 0;
1633        vidPtr->yuvh = 0;
1634    }
1635    if (vidPtr->pFrameYUV) {
1636        av_free(vidPtr->pFrameYUV);
1637        vidPtr->pFrameYUV = NULL;
1638    }
1639
1640    if (vidPtr->rgbbuffer) {
1641        av_free(vidPtr->rgbbuffer);
1642        vidPtr->rgbbuffer = NULL;
1643        vidPtr->rgbw = 0;
1644        vidPtr->rgbh = 0;
1645    }
1646    if (vidPtr->pFrameRGB) {
1647        av_free(vidPtr->pFrameRGB);
1648        vidPtr->pFrameRGB = NULL;
1649    }
1650
1651    if (vidPtr->scalingCtx) {
1652        sws_freeContext(vidPtr->scalingCtx);
1653        vidPtr->scalingCtx = NULL;
1654    }
1655    if (vidPtr->pFormatCtx && vidPtr->videoStream >= 0) {
1656        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1657        if (vcodecCtx) {
1658            avcodec_close(vcodecCtx);
1659        }
1660    }
1661    if (vidPtr->pFormatCtx) {
1662#ifdef HAVE_AVFORMAT_CLOSE_INPUT
1663        avformat_close_input(&vidPtr->pFormatCtx);
1664#else
1665        av_close_input_file(vidPtr->pFormatCtx);
1666#endif
1667        vidPtr->pFormatCtx = NULL;
1668    }
1669
1670    if (vidPtr->outFormatCtx) {
1671        while (VideoWriteFrame(vidPtr, NULL) > 0)
1672            ; /* write out any remaining frames */
1673
1674        av_write_trailer(vidPtr->outFormatCtx);
1675
1676        for (i=0; i < vidPtr->outFormatCtx->nb_streams; i++) {
1677            avcodec_close(vidPtr->outFormatCtx->streams[i]->codec);
1678            av_freep(&vidPtr->outFormatCtx->streams[i]->codec);
1679            av_freep(&vidPtr->outFormatCtx->streams[i]);
1680        }
1681
1682        if (vidPtr->outFormatCtx->pb) {
1683            avio_close(vidPtr->outFormatCtx->pb);
1684        }
1685
1686        av_free(vidPtr->outFormatCtx);
1687        vidPtr->outFormatCtx = NULL;
1688    }
1689
1690    /* reset the mode to null */
1691    *vidPtr->mode = '\0';
1692
1693    return 0;
1694}
Note: See TracBrowser for help on using the repository browser.