source: tags/1.2.0/video/RpVideo.c @ 5106

Last change on this file since 5106 was 3177, checked in by mmc, 12 years ago

Updated all of the copyright notices to reference the transfer to
the new HUBzero Foundation, LLC.

File size: 45.4 KB
Line 
1/*
2 * ----------------------------------------------------------------------
3 *  TkFFMPEG:  video
4 *
5 *  These routines support the methods in the "video" class, which is
6 *  a video stream that can be read from or written to.  The class
7 *  itself is defined in itcl, but when methods are called, execution
8 *  jumps down to this level.
9 * ======================================================================
10 *  AUTHOR:  Michael McLennan, Purdue University
11 *  Copyright (c) 2004-2012  HUBzero Foundation, LLC
12 *
13 *  See the file "license.terms" for information on usage and
14 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
15 * ======================================================================
16 */
17
18#include <stdlib.h>
19#include <string.h>
20
21#include "config.h"
22
23#ifdef HAVE_FFMPEG_AVCODEC_H
24# include <ffmpeg/avcodec.h>
25#endif
26
27#ifdef HAVE_LIBAVCODEC_AVCODEC_H
28# include <libavcodec/avcodec.h>
29#endif
30
31#ifdef HAVE_FFMPEG_AVFORMAT_H
32# include <ffmpeg/avformat.h>
33#endif
34
35#ifdef HAVE_LIBAVFORMAT_AVFORMAT_H
36# include <libavformat/avformat.h>
37#endif
38
39#ifdef HAVE_FFMPEG_AVUTIL_H
40# include <ffmpeg/avutil.h>
41#endif
42
43#ifdef HAVE_LIBAVUTIL_AVUTIL_H
44# include <libavutil/avutil.h>
45#endif
46
47#ifdef HAVE_FFMPEG_SWSCALE_H
48# include <ffmpeg/swscale.h>
49#endif
50
51#ifdef HAVE_LIBSWSCALE_SWSCALE_H
52# include <libswscale/swscale.h>
53#endif
54
55#include "RpVideo.h"
56
57#ifndef HAVE_AVMEDIA_TYPE_VIDEO
58#define AVMEDIA_TYPE_VIDEO      CODEC_TYPE_VIDEO
59#endif  /* HAVE_AVMEDIA_TYPE_VIDEO */
60
61#ifndef AV_PKT_FLAG_KEY
62#define AV_PKT_FLAG_KEY         PKT_FLAG_KEY           
63#endif
64
65#ifndef HAVE_AVIO_CLOSE
66#define avio_close              url_fclose
67#endif
68
69/*
70 * Each video object is represented by the following data:
71 */
72struct VideoObjRec {
73    int magic;
74
75    /* video input */
76    AVFormatContext *pFormatCtx;
77    int videoStream;
78    int frameNumber;
79    int atEnd;
80
81    /* video output */
82    AVFormatContext *outFormatCtx;
83    AVStream *outVideoStr;
84
85    /* used for both input/output */
86    AVFrame *pFrameYUV;
87    uint8_t *yuvbuffer;
88    int yuvw, yuvh;
89    AVFrame *pFrameRGB;
90    uint8_t *rgbbuffer;
91    int rgbw, rgbh;
92    struct SwsContext *scalingCtx;
93
94    char *fileName;
95    char mode[64];
96    char fmt[64];
97    int lastframe;
98
99    /* tmp buffer to give images back to user */
100    void *img;
101    int imgHeaderLen;
102    int imgWidth;
103    int imgHeight;
104};
105
106/* magic stamp for VideoObj, to make sure data is valid */
107#define VIDEO_OBJ_MAGIC 0x0102abcd
108
109static VideoObj *VideoSetData ();
110
111static int VideoModeRead (VideoObj *vidPtr);
112// static int VideoModeWrite (Tcl_Interp *interp, int w, int h);
113
114static int VideoTime2Frame (AVStream *streamPtr, int64_t tval);
115static int64_t VideoFrame2Time (AVStream *streamPtr, int fval);
116static void VideoNextFrame (VideoObj *vidPtr);
117
118uint64_t global_video_pkt_pts = AV_NOPTS_VALUE;
119static int VideoAvGetBuffer (struct AVCodecContext *c, AVFrame *fr);
120static void VideoAvReleaseBuffer (struct AVCodecContext *c, AVFrame *fr);
121static int VideoWriteFrame (VideoObj *vidPtr, AVFrame *framePtr);
122
123static int VideoAllocImgBuffer (VideoObj *vidPtr, int width, int height);
124static int VideoFreeImgBuffer (VideoObj *vidPtr);
125static double VideoTransformFrames2Duration (VideoObj *vidPtr, int frame);
126static int VideoTransformDuration2Frames (VideoObj *vidPtr, double duration);
127
128/*
129 * ------------------------------------------------------------------------
130 *  VideoSetData()
131 *
132 *  Saves VideoObj data in the "_videodata" slot in the current object
133 *  context.  The data can be retrieved later by calling VideoGetData().
134 * ------------------------------------------------------------------------
135 */
136VideoObj *
137VideoSetData()
138{
139    VideoObj* vid = NULL;
140
141    vid = malloc(sizeof(VideoObj));
142
143    if (vid == NULL) {
144        return NULL;
145    }
146
147    vid->magic = VIDEO_OBJ_MAGIC;
148    vid->pFormatCtx = NULL;
149    vid->videoStream = 0;
150    vid->frameNumber = -1;
151    vid->atEnd = 0;
152
153    vid->outFormatCtx = NULL;
154    vid->outVideoStr = NULL;
155
156    vid->pFrameYUV = NULL;
157    vid->yuvbuffer = NULL;
158    vid->yuvw = 0;
159    vid->yuvh = 0;
160    vid->pFrameRGB = NULL;
161    vid->rgbbuffer = NULL;
162    vid->rgbw = 0;
163    vid->rgbh = 0;
164    vid->scalingCtx = NULL;
165
166    vid->fileName = NULL;
167    *vid->mode = '\0';
168    *vid->fmt = '\0';
169    vid->lastframe = 0;
170
171    vid->img = NULL;
172    vid->imgHeaderLen = 0;
173    vid->imgWidth = 0;
174    vid->imgHeight = 0;
175
176    return vid;
177}
178
179/*
180 * ------------------------------------------------------------------------
181 *  VideoFindLastFrame()
182 *
183 *  Find the last readable frame.
184 * ------------------------------------------------------------------------
185 */
186int
187VideoFindLastFrame(vidPtr,lastframe)
188    VideoObj *vidPtr;
189    int *lastframe;
190{
191    int f = 0;
192    int nframe = 0;
193    int cur = 0;
194    AVStream *vstreamPtr;
195
196    if (vidPtr == NULL) {
197        return -1;
198    }
199
200    if (lastframe == NULL) {
201        return -1;
202    }
203
204    if (VideoModeRead(vidPtr) != 0) {
205        return -1;
206    }
207
208    // calculate an estimate of the last frame
209    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
210    nframe = VideoTime2Frame(vstreamPtr,
211        vstreamPtr->start_time + vstreamPtr->duration);
212
213    // get the real last readable frame
214    // is 50 frames far enough to go back
215    // to be outside of the last key frame?
216    f = vidPtr->frameNumber;
217    cur = VideoGoToN(vidPtr,nframe-50);
218    while (cur != nframe) {
219        cur = nframe;
220        nframe = VideoGoNext(vidPtr);
221    }
222    *lastframe = nframe;
223    VideoGoToN(vidPtr,f);
224
225    return 0;
226}
227
228
229int
230VideoOpenFile(vidPtr, fileName, mode)
231    VideoObj *vidPtr;
232    const char *fileName;
233    const char *mode;
234{
235    int fnlen = 0;
236    int err = 0;
237    int lastframe = 0;
238
239    if (fileName == NULL) {
240        // missing value for fileName
241        // return TCL_ERROR;
242        return -1;
243    }
244    if (fileName == '\0') {
245        /* no file name set -- do nothing */
246        return 0;
247    }
248
249    fnlen = strlen(fileName);
250    if (vidPtr->fileName != NULL) {
251        free(vidPtr->fileName);
252    }
253    vidPtr->fileName = (char *) malloc((fnlen+1)*sizeof(char));
254    if (vidPtr->fileName == NULL) {
255        // trouble mallocing space
256        return -1;
257    }
258    strncpy(vidPtr->fileName,fileName,fnlen);
259    vidPtr->fileName[fnlen] = '\0';
260
261    // FIXME: remove this constraint when we support
262    // the modes: r, r+, w, w+, a, a+, b and combinations
263    if (strlen(mode) > 1) {
264        return -1;
265    }
266
267    if (*mode == 'r') {
268        /* we're now in "input" mode */
269        err = VideoModeRead(vidPtr);
270        if (err) {
271            return err;
272        }
273
274        VideoFindLastFrame(vidPtr,&lastframe);
275        vidPtr->lastframe = lastframe;
276    } else if (*mode == 'w') {
277        /* we're now in "input" mode */
278        // VideoModeWrite(vidPtr);
279    } else {
280        // unrecognized mode
281        return -1;
282    }
283
284    return 0;
285}
286
287
288/*
289 * ------------------------------------------------------------------------
290 *  VideoModeRead()
291 *
292 *  Tries to force this video stream into "read" mode.  If the current
293 *  mode is "", then the -file is opened for reading.  If the current
294 *  mode is "write", then the stream is closed and then opened for
295 *  reading.  If the current mode is "read", then this call does nothing.
296 *  Usually called just before a "read" operation (get, go, etc.) is
297 *  performed.
298 *
299 *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
300 *  opening or closing the stream.
301 *
302 *  Error Codes
303 *  -1
304 *  -2      missing file name
305 *  -3      couldn't open file
306 *  -4      couldn't find streams in file
307 *  -5      couldn't find video stream in file
308 *  -6      unsupported codec for file
309 *  -7      couldn't open codec for file
310 *  -8      couldn't allocate frame space
311 *  -9      strcpy input to vidPtr->mode failed
312 * ------------------------------------------------------------------------
313 */
314int
315VideoModeRead(vidPtr)
316    VideoObj *vidPtr;
317{
318    int i;
319    const char *fmt;
320    AVCodecContext *vcodecCtx;
321    AVCodec *vcodec;
322
323    if (vidPtr == NULL) {
324        return -1;
325    }
326
327    if (vidPtr->fileName == NULL) {
328        // Tcl_AppendResult(interp, "missing value for -file", (char*)NULL);
329        // return TCL_ERROR;
330
331        // missing file name
332        return -2;
333    }
334    if (*vidPtr->fileName == '\0') {
335        /* no file name set -- do nothing */
336        return 0;
337    }
338
339    if (strcmp(vidPtr->mode,"input") == 0) {
340        return 0;
341    } else if (strcmp(vidPtr->mode,"output") == 0) {
342        if (VideoClose(vidPtr) != 0) {
343            return -1;
344        }
345    }
346
347    /*
348     * Open the video stream from that file.
349     */
350#ifdef HAVE_AVFORMAT_OPEN_INPUT
351    if (avformat_open_input(&vidPtr->pFormatCtx, vidPtr->fileName, NULL,
352        NULL) != 0) {
353        return -3;
354    }
355#else
356    if (av_open_input_file(&vidPtr->pFormatCtx, vidPtr->fileName,
357            NULL, 0, NULL) != 0) {
358        return -3;
359    }
360#endif
361#ifdef HAVE_AVFORMAT_FIND_STREAM_INFO
362    if (avformat_find_stream_info(vidPtr->pFormatCtx, NULL) < 0) {
363#else
364    if (av_find_stream_info(vidPtr->pFormatCtx) < 0) {
365#endif
366        // Tcl_AppendResult(interp, "couldn't find streams in file \"",
367        //     fileName, "\"", (char*)NULL);
368        // return TCL_ERROR;
369
370        // couldn't find streams in file
371        return -4;
372    }
373
374    /*
375     * Search for a video stream and its codec.
376     */
377    vidPtr->videoStream = -1;
378    for (i=0; i < vidPtr->pFormatCtx->nb_streams; i++) {
379        if (vidPtr->pFormatCtx->streams[i]->codec->codec_type
380            == AVMEDIA_TYPE_VIDEO) {
381            vidPtr->videoStream = i;
382            break;
383        }
384    }
385    if (vidPtr->videoStream < 0) {
386        // Tcl_AppendResult(interp, "couldn't find video stream in file \"",
387        //     fileName, "\"", (char*)NULL);
388        // return TCL_ERROR;
389
390        // couldn't find video stream in file
391        return -5;
392    }
393
394    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
395    vcodec = avcodec_find_decoder(vcodecCtx->codec_id);
396    if (vcodec == NULL) {
397        // Tcl_AppendResult(interp, "unsupported codec for file \"",
398        //     fileName, "\"", (char*)NULL);
399        // return TCL_ERROR;
400
401        // unsupported codec for file
402        return -6;
403    }
404#ifdef HAVE_AVCODEC_OPEN2
405    if (avcodec_open2(vcodecCtx, vcodec, NULL) < 0) {
406#else
407    if (avcodec_open(vcodecCtx, vcodec) < 0) {
408#endif
409        // Tcl_AppendResult(interp, "couldn't open codec for file \"",
410        //     fileName, "\"", (char*)NULL);
411        // return TCL_ERROR;
412
413        // couldn't open codec for file
414        return -7;
415    }
416
417    vcodecCtx->get_buffer = VideoAvGetBuffer;
418    vcodecCtx->release_buffer = VideoAvReleaseBuffer;
419
420    vidPtr->pFrameYUV = avcodec_alloc_frame();
421    vidPtr->pFrameRGB = avcodec_alloc_frame();
422    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
423        // Tcl_AppendResult(interp, "couldn't allocate frame space",
424        //     " for file \"", fileName, "\"", (char*)NULL);
425        // return TCL_ERROR;
426
427        // couldn't allocate frame space
428        return -8;
429    }
430
431    /* save the name of the codec as the -format option */
432    fmt = "?";
433    if (vcodecCtx->codec && vcodecCtx->codec->name) {
434        fmt = vcodecCtx->codec->name;
435        strcpy(vidPtr->fmt,fmt);
436    }
437//
438//    sprintf(buffer, "%d", vcodecCtx->width);
439//    if (Tcl_SetVar(interp, "width", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
440//        return TCL_ERROR;
441//    }
442//    sprintf(buffer, "%d", vcodecCtx->height);
443//    if (Tcl_SetVar(interp, "height", buffer, TCL_LEAVE_ERR_MSG) == NULL) {
444//        return TCL_ERROR;
445//    }
446//
447
448    if (strcpy(vidPtr->mode,"input") == NULL) {
449        // strcpy input to vidPtr->mode failed
450        return -9;
451    }
452
453    return 0;
454}
455
456
457// FIXME: get this function working.
458///*
459// * ------------------------------------------------------------------------
460// *  VideoModeWrite()
461// *
462// *  Tries to force this video stream into "write" mode.  If the current
463// *  mode is "", then the -file is opened for writing.  If the current
464// *  mode is "read", then the stream is closed and then opened for
465// *  writing.  If the current mode is "write", then this call does nothing.
466// *  Usually called just before a "write" operation (put, etc.) is
467// *  performed.
468// *
469// *  Returns TCL_OK if successful, and TCL_ERROR if there is a problem
470// *  opening or closing the stream.
471// * ------------------------------------------------------------------------
472// */
473//int
474//VideoModeWrite(vidPtr, fileName, width, height, fmt)
475//    VideoObj *vidPtr;      /* video object to write */
476//    CONST84 char *fileName;
477//    int width;             /* native width of each frame */
478//    int height;            /* native height of each frame */
479//    CONST84 char *fmt
480//{
481//    char c;
482//    int numBytes, pixfmt, iwd, iht;
483//    CONST84 char *size;
484//    AVCodecContext *codecCtx;
485//    AVCodec *vcodec;
486//
487//    if (vidPtr == NULL) {
488//        return -1;
489//    }
490//
491//    /*
492//     * Get the current mode.  If we're already in "output", then we're
493//     * done.  Otherwise, close the stream if necessary and prepare to
494//     * open the file for write.
495//     */
496//    if (vidPtr->mode == NULL) {
497//        return -1;
498//    }
499//
500//    c = *vidPtr->mode;
501//    if (c == 'o' && strcmp(vidPtr->mode,"output") == 0) {
502//        return 0;
503//    }
504//    else if (c == 'i' && strcmp(vidPtr->mode,"input") == 0) {
505//        if (VideoClose(vidPtr) != 0) {
506//            return -1;
507//        }
508//    }
509//
510//    /*
511//     * Get the file name from the -file variable.
512//     */
513//    if ((fileName == NULL) || (*filename == '\0')) {
514//        /* no file name set -- do nothing */
515//        return 0;
516//    }
517//
518//    /*
519//     * Get the -width and -height of each frame.  If these are set
520//     * to 0 (default), then use the incoming width/height from an
521//     * actual frame.
522//     */
523//     iwd = width;
524//     iht = height;
525//
526//    /*
527//     * Get the format argument.
528//     */
529//    if (fmt == NULL) {
530////        Tcl_AppendResult(interp, "missing value for -format", (char*)NULL);
531////        return TCL_ERROR;
532//        return -1;
533//    }
534//    if (strcmp(fmt,"mpeg1video") == 0) {
535//        vidPtr->outFormatCtx = av_alloc_format_context();
536//        vidPtr->outFormatCtx->oformat = guess_format("mpeg", NULL, NULL);
537//    }
538//    else if (strcmp(fmt,"flv") == 0) {
539//        vidPtr->outFormatCtx = av_alloc_format_context();
540//        vidPtr->outFormatCtx->oformat = guess_format("flv", NULL, NULL);
541//    }
542//    else if (strcmp(fmt,"mov") == 0) {
543//        vidPtr->outFormatCtx = av_alloc_format_context();
544//        vidPtr->outFormatCtx->oformat = guess_format("mov", NULL, NULL);
545//        /* MOV normally uses MPEG4, but that may not be installed */
546//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
547//    }
548//    else if (strcmp(fmt,"avi") == 0) {
549//        vidPtr->outFormatCtx = av_alloc_format_context();
550//        vidPtr->outFormatCtx->oformat = guess_format("avi", NULL, NULL);
551//        /* AVI normally uses MPEG4, but that may not be installed */
552//        vidPtr->outFormatCtx->oformat->video_codec = CODEC_ID_FFV1;
553//    }
554//    else {
555////        Tcl_AppendResult(interp, "bad format \"", fmt, "\": should be",
556////            " avi, flv, mpeg1video, mov", (char*)NULL);
557////        return TCL_ERROR;
558//        return -1;
559//    }
560//
561//    /*
562//     * Open the video stream for writing.
563//     */
564//    strncpy(vidPtr->outFormatCtx->filename, fileName,
565//        sizeof(vidPtr->outFormatCtx->filename));
566//
567//    vidPtr->outVideoStr = av_new_stream(vidPtr->outFormatCtx, 0);
568//    if (vidPtr->outVideoStr == NULL) {
569////        Tcl_AppendResult(interp, "internal error:",
570////            " problem opening stream", (char*)NULL);
571////        return TCL_ERROR;
572//        retunr -1;
573//    }
574//    codecCtx = vidPtr->outVideoStr->codec;
575//
576//    codecCtx->codec_id = vidPtr->outFormatCtx->oformat->video_codec;
577//    codecCtx->codec_type = CODEC_TYPE_VIDEO;
578//
579//    /* put sample parameters */
580//    codecCtx->bit_rate = 400000;
581//    /* resolution must be a multiple of two */
582//    codecCtx->width = (iwd/2)*2;
583//    codecCtx->height = (iht/2)*2;
584//    codecCtx->time_base.den = 24;
585//    codecCtx->time_base.num = 1;
586//    codecCtx->gop_size = 12; /* emit one intra frame every so often */
587//    codecCtx->pix_fmt = PIX_FMT_YUV420P;
588//    if (codecCtx->codec_id == CODEC_ID_MPEG2VIDEO) {
589//        codecCtx->max_b_frames = 2;
590//    }
591//
592//    /* find the video encoder */
593//    vcodec = avcodec_find_encoder(codecCtx->codec_id);
594//    if (!vcodec || avcodec_open(codecCtx, vcodec) < 0) {
595//        // Tcl_AppendResult(interp, "internal error:",
596//        //     " problem opening codec", (char*)NULL);
597//        // return TCL_ERROR;
598//        return -1;
599//    }
600//
601//    if (av_set_parameters(vidPtr->outFormatCtx, NULL) < 0) {
602//        // Tcl_AppendResult(interp, "internal error:",
603//        //     " problem in av_set_parameters()", (char*)NULL);
604//        // return TCL_ERROR;
605//        return -1;
606//    }
607//
608//    if (url_fopen(&vidPtr->outFormatCtx->pb, fileName, URL_WRONLY) < 0) {
609//        // Tcl_AppendResult(interp, "can't open file \"", fileName,
610//        //     "\"", (char*)NULL);
611//        // return TCL_ERROR;
612//        return -1;
613//    }
614//    av_write_header(vidPtr->outFormatCtx);
615//
616//    vidPtr->pFrameYUV = avcodec_alloc_frame();
617//    vidPtr->pFrameRGB = avcodec_alloc_frame();
618//    if (vidPtr->pFrameYUV == NULL || vidPtr->pFrameRGB == NULL) {
619//        // Tcl_AppendResult(interp, "couldn't allocate frame space",
620//        //     " for file \"", fileName, "\"", (char*)NULL);
621//        // return TCL_ERROR;
622//        return -1;
623//    }
624//
625//    vidPtr->yuvw = vidPtr->outVideoStr->codec->width;
626//    vidPtr->yuvh = vidPtr->outVideoStr->codec->height;
627//    pixfmt = vidPtr->outVideoStr->codec->pix_fmt;
628//
629//    numBytes = avpicture_get_size(pixfmt, vidPtr->yuvw, vidPtr->yuvh);
630//    vidPtr->yuvbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
631//
632//    avpicture_fill((AVPicture*)vidPtr->pFrameYUV, vidPtr->yuvbuffer,
633//        pixfmt, vidPtr->yuvw, vidPtr->yuvh);
634//
635//
636//    if (strcpy(vid->mode,"output") == NULL) {
637//        return -1;
638//    }
639//
640//    return 0;
641//}
642
643
644/*
645 * ------------------------------------------------------------------------
646 *  VideoTime2Frame()
647 *
648 *  Converts a time value (as defined by the FFMPEG package) into an
649 *  integer frame number in the range 0-end for the stream.
650 * ------------------------------------------------------------------------
651 */
652int
653VideoTime2Frame(streamPtr, tval)
654    AVStream *streamPtr;   /* scale values according to this stream */
655    int64_t tval;          /* time value as defined by stream */
656{
657    AVRational one, factor;
658    one.num = 1;
659    one.den = 1;
660    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
661    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
662
663    if (tval > streamPtr->start_time) {
664        tval -= streamPtr->start_time;
665    } else {
666        tval = 0;
667    }
668    tval = av_rescale_q(tval, factor, one);
669    return (int)tval;
670}
671
672/*
673 * ------------------------------------------------------------------------
674 *  VideoFrame2Time()
675 *
676 *  Converts a frame number 0-end to the corresponding time value
677 *  (as defined by FFMPEG) for the given stream.
678 * ------------------------------------------------------------------------
679 */
680int64_t
681VideoFrame2Time(streamPtr, fval)
682    AVStream *streamPtr;   /* scale values according to this stream */
683    int fval;              /* frame value in the range 0-end */
684{
685    int64_t tval;
686    AVRational one, factor;
687    one.num = 1;
688    one.den = 1;
689
690    factor.num = streamPtr->time_base.num * streamPtr->r_frame_rate.num;
691    factor.den = streamPtr->time_base.den * streamPtr->r_frame_rate.den;
692
693    tval = av_rescale_q((int64_t)fval, one, factor) + streamPtr->start_time;
694    return tval;
695}
696
697/*
698 * ------------------------------------------------------------------------
699 *  VideoNextFrame()
700 *
701 *  Decodes a series of video packets until the end of the frame
702 *  is reached.  Updates the frameNumber and atEnd to maintain the
703 *  current status for this video stream.
704 * ------------------------------------------------------------------------
705 */
706void
707VideoNextFrame(vidPtr)
708    VideoObj *vidPtr;   /* get a frame from this video stream */
709{
710    int frameFinished;
711    uint64_t pts;
712    AVCodecContext *vcodecCtx;
713    AVStream *vstreamPtr;
714    AVPacket packet;
715
716    if (vidPtr->pFormatCtx) {
717        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
718        vcodecCtx = vstreamPtr->codec;
719
720        /*
721         * Decode as many packets as necessary to get the next frame.
722         */
723        pts = 0;
724        while (1) {
725            if (av_read_frame(vidPtr->pFormatCtx, &packet) >= 0) {
726                if (packet.stream_index == vidPtr->videoStream) {
727                    /* save pts so we can grab it again in VideoAvGetBuffer */
728                    global_video_pkt_pts = packet.pts;
729
730#ifdef HAVE_AVCODEC_DECODE_VIDEO2
731                    // new avcodec decode video function
732                    avcodec_decode_video2(vcodecCtx, vidPtr->pFrameYUV,
733                        &frameFinished, &packet);
734#else
735                    // old avcodec decode video function
736                    avcodec_decode_video(vcodecCtx, vidPtr->pFrameYUV,
737                        &frameFinished, packet.data, packet.size);
738#endif
739                    if (packet.dts == AV_NOPTS_VALUE
740                          && vidPtr->pFrameYUV->opaque
741                          && *(uint64_t*)vidPtr->pFrameYUV->opaque != AV_NOPTS_VALUE) {
742                        pts = *(uint64_t*)vidPtr->pFrameYUV->opaque;
743                    } else if (packet.dts != AV_NOPTS_VALUE) {
744                        pts = packet.dts;
745                    } else {
746                        pts = 0;
747                    }
748
749                    if (frameFinished) {
750                        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, pts);
751                        break;
752                    }
753                }
754            } else {
755                vidPtr->atEnd = 1;
756                break;
757            }
758        }
759        av_free_packet(&packet);
760    }
761}
762
763/*
764 * ------------------------------------------------------------------------
765 *  These two routines are called whenever a frame buffer is allocated,
766 *  which means that we're starting a new frame.  Grab the global pts
767 *  counter and squirrel it away in the opaque slot of the frame.  This
768 *  will give us a pts value that we can trust later.
769 * ------------------------------------------------------------------------
770 */
771int
772VideoAvGetBuffer(c,fr)
773    AVCodecContext *c;  /* codec doing the frame decoding */
774    AVFrame *fr;        /* frame being decoded */
775{
776    int rval = avcodec_default_get_buffer(c, fr);
777    uint64_t *ptsPtr = av_malloc(sizeof(uint64_t));
778    *ptsPtr = global_video_pkt_pts;
779    fr->opaque = ptsPtr;
780    return rval;
781}
782
783void
784VideoAvReleaseBuffer(c,fr)
785    AVCodecContext *c;  /* codec doing the frame decoding */
786    AVFrame *fr;        /* frame being decoded */
787{
788    if (fr && fr->opaque) {
789        av_freep(&fr->opaque);
790    }
791    avcodec_default_release_buffer(c,fr);
792}
793
794/*
795 * ------------------------------------------------------------------------
796 *  VideoInit()
797 *
798 *  Implements the body of the _ffmpeg_init method in the "video" class.
799 *  Initializes the basic data structure and stores it in the _videodata
800 *  variable within the class.
801 * ------------------------------------------------------------------------
802 */
803VideoObj *
804VideoInit()
805{
806    /*
807     * Create an object to represent this video stream.
808     */
809
810    /* Register all codecs and formats */
811    av_register_all();
812
813    return VideoSetData();
814}
815
816/*
817 * ------------------------------------------------------------------------
818 *  VideoCleanup()
819 *
820 *  Implements the body of the _ffmpeg_cleanup method in the "video" class.
821 *  Accesses the data structure stored in the _videodata variable and
822 *  frees up the data.
823 * ------------------------------------------------------------------------
824 */
825int
826VideoCleanup(vidPtr)
827    VideoObj *vidPtr;
828{
829    /*
830     *  Nothing much to do here.  Just close the file in case it is
831     *  still open.  Don't free vidPtr itself; that is cleaned up by
832     *  the ByteArrayObj in the class data member.
833     */
834    int ret = 0;
835
836    ret -= VideoClose(vidPtr);
837
838    if (vidPtr != NULL) {
839        VideoFreeImgBuffer(vidPtr);
840        if (vidPtr->fileName != NULL) {
841            free(vidPtr->fileName);
842            vidPtr->fileName = NULL;
843        }
844        free(vidPtr);
845        vidPtr = NULL;
846// FIXME: need a test to make sure vidPtr is null after the function returns.
847    }
848
849    return ret;
850}
851
852/*
853 * ------------------------------------------------------------------------
854 *  VideoSize()
855 *
856 *  Implements the body of the "size" method in the "video" class.
857 *  Returns the size of each frame in this video stream as a list {w h}.
858 * ------------------------------------------------------------------------
859 */
860int
861VideoSize(vidPtr, width, height)
862    VideoObj *vidPtr;
863    int *width;
864    int *height;
865{
866    AVCodecContext *vcodecCtx;
867
868    if (vidPtr == NULL) {
869        return -1;
870    }
871
872    if (vidPtr->pFormatCtx == NULL) {
873        // "internal error: video stream is not open",
874        return -1;
875    }
876
877    if (vidPtr->pFormatCtx) {
878        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
879        if (width != NULL) {
880            *width = vcodecCtx->width;
881        }
882        if (height != NULL) {
883            *height = vcodecCtx->height;
884        }
885    }
886    return 0;
887}
888
889/*
890 * ------------------------------------------------------------------------
891 *  VideoGo()
892 *
893 *  Implements the body of the "go" method in the "video" class.
894 *  Advances by one or more frames, or seeks backward in the stream.
895 *  Handles the following syntax:
896 *    obj go next ...... go to next frame (same as +1)
897 *    obj go +n ........ advance by n frames
898 *    obj go -n ........ go back by n frames
899 *    obj go n ......... go to frame n
900 * ------------------------------------------------------------------------
901 */
902int
903VideoGoNext(vidPtr)
904    VideoObj *vidPtr;
905{
906    int nabs;
907
908    if (vidPtr == NULL) {
909        return -1;
910    }
911
912    nabs = vidPtr->frameNumber + 1;
913    return VideoGoToN(vidPtr, nabs);
914}
915
916int
917VideoGoPlusMinusN(vidPtr, n)
918    VideoObj *vidPtr;
919    int n;
920{
921    int nabs;
922
923    if (vidPtr == NULL) {
924        return -1;
925    }
926
927    nabs = vidPtr->frameNumber + n;
928    return VideoGoToN(vidPtr, nabs);
929}
930
931int
932VideoGoToN(vidPtr, n)
933    VideoObj *vidPtr;
934    int n;
935{
936    int nrel, nabs, seekFlags, gotframe;
937    int64_t nseek;
938    AVCodecContext *vcodecCtx;
939    AVStream *vstreamPtr;
940
941    if (vidPtr == NULL) {
942        return -1;
943    }
944
945    if (vidPtr->pFormatCtx == NULL) {
946        // "internal error: video stream is not open",
947        return -1;
948    }
949    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
950
951    nabs = n;
952
953    if (nabs < 0) {
954        nabs = 0;
955    }
956
957    if (nabs < vidPtr->frameNumber) {
958        seekFlags = AVSEEK_FLAG_BACKWARD;
959    } else {
960        seekFlags = 0;
961    }
962
963    /*
964     * If we're going to an absolute frame, or if we're going backward
965     * or too far forward, then seek the frame.
966     */
967    nrel = nabs-vidPtr->frameNumber;
968    if ((nrel > 50) || (seekFlags&AVSEEK_FLAG_BACKWARD)) {
969
970        vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
971        nseek = VideoFrame2Time(vstreamPtr, nabs);
972        // not sure why it is checking against the number 100
973        if (nseek > 100) {
974            nseek -= 100;
975        } else {
976            nseek = 0;
977        }
978
979        /* first, seek the nearest reference frame for a good starting pt */
980        av_seek_frame(vidPtr->pFormatCtx, vidPtr->videoStream,
981            nseek, seekFlags);
982
983        // this doesn't seem to give me back the true frame number
984        // feels like it is more of a reverse of the VideoFrame2Time call
985        // because vidPtr->frameNumber always equals nabs
986        vidPtr->frameNumber = VideoTime2Frame(vstreamPtr, nseek);
987        vidPtr->atEnd = 0;
988
989        /* read the frame to figure out what the frame number is */
990        VideoNextFrame(vidPtr);
991
992        /* then, move forward until we reach the desired frame */
993        gotframe = 0;
994        while (vidPtr->frameNumber < nabs && !vidPtr->atEnd) {
995            VideoNextFrame(vidPtr);
996            gotframe = 1;
997        }
998
999        /* get at least one frame, unless we're done or at the beginning*/
1000        if (!gotframe && !vidPtr->atEnd) {
1001            if (vidPtr->frameNumber > nabs) {
1002                // we are probably at a key frame, just past
1003                // the requested frame and need to seek backwards.
1004                VideoGoToN(vidPtr,n);
1005            } else {
1006                VideoNextFrame(vidPtr);
1007            }
1008        }
1009    }
1010    else {
1011        while (nrel-- > 0) {
1012            VideoNextFrame(vidPtr);
1013        }
1014    }
1015
1016    /*
1017     * Send back the current frame number or "end" as the result.
1018     */
1019    return vidPtr->frameNumber;
1020}
1021
1022/*
1023 * ------------------------------------------------------------------------
1024 *  VideoGet()
1025 *
1026 *  Implements the body of the "get" method in the "video" class.
1027 *  Returns information about the current frame via the following
1028 *  syntax:
1029 *    obj get start|position|end
1030 *    obj get <imageHandle>
1031 * ------------------------------------------------------------------------
1032 */
1033int
1034VideoGetImage(vidPtr, iw, ih, img, bufSize)
1035    VideoObj *vidPtr;
1036    int iw;
1037    int ih;
1038    void **img;
1039    int *bufSize;
1040{
1041
1042    int numBytes;
1043    AVCodecContext *vcodecCtx;
1044
1045    if (vidPtr == NULL) {
1046        return -1;
1047    }
1048
1049    if (VideoModeRead(vidPtr) != 0) {
1050        return -1;
1051    }
1052
1053    /*
1054    if (vidPtr->pFormatCtx) {
1055        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1056    } else {
1057        vcodecCtx = NULL;
1058    }
1059    */
1060
1061    if (vidPtr->pFormatCtx == NULL) {
1062        // vidPtr->pFormatCtx is NULL, video not open
1063        return -1;
1064    }
1065    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1066
1067    /*
1068     * Query the size for this photo and make sure that we have a
1069     * buffer of the appropriate size for software scaling and
1070     * format conversion.
1071     */
1072
1073    // if the user's desired size is less then 0,
1074    // use the default size
1075
1076    if (iw < 0) {
1077        iw = vcodecCtx->width;
1078    }
1079    if (ih < 0) {
1080        ih = vcodecCtx->height;
1081    }
1082
1083
1084    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1085        if (vidPtr->rgbbuffer) {
1086            av_free(vidPtr->rgbbuffer);
1087            vidPtr->rgbbuffer = NULL;
1088        }
1089        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1090        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1091        vidPtr->rgbw = iw;
1092        vidPtr->rgbh = ih;
1093
1094        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1095            PIX_FMT_RGB24, iw, ih);
1096
1097        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1098            vcodecCtx->width, vcodecCtx->height, vcodecCtx->pix_fmt,
1099            iw, ih, PIX_FMT_RGB24, SWS_BICUBIC|SWS_PRINT_INFO, NULL, NULL, NULL);
1100    }
1101
1102    /*
1103     * Rescale the current frame to the desired size, and translate
1104     * into RGB format so we can copy into the destination image.
1105     */
1106    if (vidPtr->pFrameYUV && vidPtr->pFrameYUV->data[0]) {
1107        sws_scale(vidPtr->scalingCtx, (const uint8_t * const*)
1108            vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize,
1109            0, vcodecCtx->height,
1110            vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize);
1111
1112/*
1113        iblock.pixelPtr  = (unsigned char*)vidPtr->pFrameRGB->data[0];
1114        iblock.width     = iw;
1115        iblock.height    = ih;
1116        iblock.pitch     = vidPtr->pFrameRGB->linesize[0];
1117        iblock.pixelSize = 3;
1118        iblock.offset[0] = 0;
1119        iblock.offset[1] = 1;
1120        iblock.offset[2] = 2;
1121        iblock.offset[3] = 0;
1122
1123        Tk_PhotoPutBlock_NoComposite(img, &iblock, 0, 0, iw, ih);
1124*/
1125
1126        if (vidPtr->img == NULL) {
1127            VideoAllocImgBuffer(vidPtr,iw,ih);
1128        } else {
1129            if ((vidPtr->imgWidth != iw) && (vidPtr->imgHeight != ih)) {
1130                // new height or width
1131                // resize the image buffer
1132                free(vidPtr->img);
1133                VideoAllocImgBuffer(vidPtr,iw,ih);
1134            }
1135        }
1136
1137        // Write pixel data
1138        memcpy(vidPtr->img+vidPtr->imgHeaderLen,
1139            vidPtr->pFrameRGB->data[0],
1140            vidPtr->imgWidth*3*vidPtr->imgHeight);
1141    }
1142    *img = vidPtr->img;
1143    *bufSize = (vidPtr->imgWidth*3*vidPtr->imgHeight) + vidPtr->imgHeaderLen;
1144    return 0;
1145}
1146
1147int
1148VideoFrameRate (vidPtr, fr)
1149    VideoObj *vidPtr;
1150    double *fr;
1151{
1152    AVStream *vstreamPtr;
1153
1154    if (vidPtr == NULL) {
1155        return -1;
1156    }
1157
1158    if (fr == NULL) {
1159        return -1;
1160    }
1161
1162    if (vidPtr->pFormatCtx == NULL) {
1163        // vidPtr->pFormatCtx is NULL, video not open
1164        return -1;
1165    }
1166    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1167
1168    // http://trac.handbrake.fr/browser/trunk/libhb/decavcodec.c?rev=1490#L684
1169    // there seems to be some controversy over what structure holds
1170    // the correct frame rate information for different video codecs.
1171    // for now we will use the stream's r_frame_rate.
1172    // from the above post, it looks like this value can be interpreted
1173    // as frames per second.
1174    *fr = av_q2d(vstreamPtr->r_frame_rate);
1175
1176    return 0;
1177}
1178
1179int
1180VideoFileName (vidPtr, fname)
1181    VideoObj *vidPtr;
1182    const char **fname;
1183{
1184    if (vidPtr == NULL) {
1185        return -1;
1186    }
1187
1188    if (fname == NULL) {
1189        return -1;
1190    }
1191
1192    if (vidPtr->pFormatCtx == NULL) {
1193        // vidPtr->pFormatCtx is NULL, video not open
1194        return -1;
1195    }
1196
1197    *fname = vidPtr->fileName;
1198
1199    return 0;
1200}
1201
1202int
1203VideoPixelAspectRatio (vidPtr, num, den)
1204    VideoObj *vidPtr;
1205    int *num;
1206    int *den;
1207{
1208    AVCodecContext *vcodecCtx;
1209
1210    if (vidPtr == NULL) {
1211        return -1;
1212    }
1213
1214    if ((num == NULL) || (den == NULL)) {
1215        return -1;
1216    }
1217
1218    if (vidPtr->pFormatCtx == NULL) {
1219        // vidPtr->pFormatCtx is NULL, video not open
1220        return -1;
1221    }
1222
1223    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1224
1225    *num = vcodecCtx->sample_aspect_ratio.num;
1226    *den = vcodecCtx->sample_aspect_ratio.den;
1227
1228    return 0;
1229}
1230
1231int
1232VideoDisplayAspectRatio (vidPtr, num, den)
1233    VideoObj *vidPtr;
1234    int *num;
1235    int *den;
1236{
1237    int width = 0;
1238    int height = 0;
1239    int64_t gcd = 0;
1240
1241    if (vidPtr == NULL) {
1242        return -1;
1243    }
1244
1245    if ((num == NULL) || (den == NULL)) {
1246        return -1;
1247    }
1248
1249    if (vidPtr->pFormatCtx == NULL) {
1250        // vidPtr->pFormatCtx is NULL, video not open
1251        return -1;
1252    }
1253
1254    VideoSize(vidPtr, &width, &height);
1255    VideoPixelAspectRatio(vidPtr, num, den);
1256
1257    width = (*num)*width;
1258    height = (*den)*height;
1259#ifdef FFMPEG_COMMON_H
1260    // old gcd function
1261    gcd = ff_gcd(FFABS(width), FFABS(height));
1262#else
1263    // new gcd function
1264    gcd = av_gcd(FFABS(width), FFABS(height));
1265#endif
1266
1267
1268    *num = width/gcd;
1269    *den = height/gcd;
1270
1271    if (*den == 0) {
1272        *num = 0;
1273        *den = 1;
1274    }
1275
1276    return 0;
1277}
1278
1279int
1280VideoAllocImgBuffer(vidPtr, width, height)
1281    VideoObj *vidPtr;
1282    int width;
1283    int height;
1284{
1285
1286    char header[64];
1287    int headerLen = 0;
1288    int bufsize = 0;
1289
1290    sprintf(header,"P6\n%d %d\n255\n", width, height);
1291    headerLen = strlen(header);
1292    bufsize = headerLen + (width*3*height);
1293    vidPtr->img = (void*) malloc(bufsize);
1294    vidPtr->imgHeaderLen = headerLen;
1295    vidPtr->imgWidth = width;
1296    vidPtr->imgHeight = height;
1297    memcpy(vidPtr->img,header,headerLen);
1298
1299    return 0;
1300}
1301
1302int
1303VideoFreeImgBuffer(vidPtr)
1304    VideoObj *vidPtr;
1305{
1306    if ((vidPtr != NULL) && (vidPtr->img != NULL)) {
1307        free(vidPtr->img);
1308        vidPtr->img = NULL;
1309    }
1310    return 0;
1311}
1312
1313int
1314VideoGetPositionCur(vidPtr, pos)
1315    VideoObj *vidPtr;      /* video object to act on */
1316    int *pos;
1317{
1318    int fnum = -1;
1319
1320    if (vidPtr == NULL) {
1321        return -1;
1322    }
1323
1324    if (pos == NULL) {
1325        return -1;
1326    }
1327
1328    if (VideoModeRead(vidPtr) != 0) {
1329        return -1;
1330    }
1331
1332    if (vidPtr->pFormatCtx) {
1333        fnum = vidPtr->frameNumber;
1334    }
1335
1336    *pos = fnum;
1337    return 0;
1338}
1339
1340int
1341VideoGetPositionEnd(vidPtr, pos)
1342    VideoObj *vidPtr;      /* video object to act on */
1343    int *pos;
1344{
1345    if (vidPtr == NULL) {
1346        return -1;
1347    }
1348
1349    if (pos == NULL) {
1350        return -1;
1351    }
1352
1353    if (VideoModeRead(vidPtr) != 0) {
1354        return -1;
1355    }
1356
1357    *pos = vidPtr->lastframe;
1358    return 0;
1359}
1360
1361// FIXME: get this function working
1362///*
1363// * ------------------------------------------------------------------------
1364// *  VideoPut()
1365// *
1366// *  Implements the body of the "put" method in the "video" class.
1367// *  Stores a single frame into the video stream:
1368// *    obj put <imageHandle>
1369// * ------------------------------------------------------------------------
1370// */
1371//int
1372//VideoPut(cdata, interp, argc, argv)
1373//    ClientData cdata;      /* not used */
1374//    Tcl_Interp *interp;    /* interpreter */
1375//    int argc;              /* number of arguments */
1376//    CONST84 char* argv[];  /* argument strings */
1377//{
1378//    VideoObj *vidPtr;
1379//    int iw, ih, numBytes, roffs, goffs, boffs;
1380//    char buffer[64];
1381//    unsigned char* photodata;
1382//    uint8_t* rgbdata;
1383//    Tk_PhotoHandle img;
1384//    Tk_PhotoImageBlock iblock;
1385//    AVCodecContext *codecCtx;
1386//
1387//    if (VideoGetData(interp, &vidPtr) != TCL_OK) {
1388//        return TCL_ERROR;
1389//    }
1390//
1391//    if (argc != 2) {
1392//        Tcl_AppendResult(interp, "wrong # args: should be \"", argv[0],
1393//            " image\"", (char*)NULL);
1394//        return TCL_ERROR;
1395//    }
1396//
1397//    /*
1398//     * Get the name of the image and copy from it.
1399//     */
1400//    img = Tk_FindPhoto(interp, argv[1]);
1401//    if (img == NULL) {
1402//        Tcl_AppendResult(interp, "bad value \"", argv[1],
1403//            "\": expected photo image", (char*)NULL);
1404//        return TCL_ERROR;
1405//    }
1406//
1407//    /*
1408//     * Query the size for this photo and make sure that we have a
1409//     * buffer of the appropriate size for software scaling and
1410//     * format conversion.
1411//     */
1412//    Tk_PhotoGetImage(img, &iblock);
1413//    Tk_PhotoGetSize(img, &iw, &ih);
1414//
1415//    if (VideoModeWrite(interp, iw, ih) != TCL_OK) {
1416//        return TCL_ERROR;
1417//    }
1418//    codecCtx = vidPtr->outVideoStr->codec;
1419//
1420//    if (iw != vidPtr->rgbw || ih != vidPtr->rgbh) {
1421//        if (vidPtr->rgbbuffer) {
1422//            av_free(vidPtr->rgbbuffer);
1423//            vidPtr->rgbbuffer = NULL;
1424//        }
1425//        numBytes = avpicture_get_size(PIX_FMT_RGB24, iw, ih);
1426//        vidPtr->rgbbuffer = (uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
1427//        vidPtr->rgbw = iw;
1428//        vidPtr->rgbh = ih;
1429//
1430//        avpicture_fill((AVPicture*)vidPtr->pFrameRGB, vidPtr->rgbbuffer,
1431//            PIX_FMT_RGB24, iw, ih);
1432//
1433//        vidPtr->scalingCtx = sws_getCachedContext(vidPtr->scalingCtx,
1434//            iw, ih, PIX_FMT_RGB24,
1435//            codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
1436//            SWS_BICUBIC, NULL, NULL, NULL);
1437//    }
1438//
1439//    /*
1440//     * Copy the data from the Tk photo block into the RGB frame.
1441//     */
1442//    roffs = iblock.offset[0];
1443//    goffs = iblock.offset[1];
1444//    boffs = iblock.offset[2];
1445//
1446//    for (ih=0; ih < iblock.height; ih++) {
1447//        rgbdata = vidPtr->pFrameRGB->data[0] + ih*vidPtr->pFrameRGB->linesize[0];
1448//        photodata = iblock.pixelPtr + ih*iblock.pitch;
1449//        for (iw=0; iw < iblock.width; iw++) {
1450//            rgbdata[0] = photodata[roffs];
1451//            rgbdata[1] = photodata[goffs];
1452//            rgbdata[2] = photodata[boffs];
1453//            rgbdata += 3;
1454//            photodata += iblock.pixelSize;
1455//        }
1456//    }
1457//
1458//    /*
1459//     * Rescale the current frame to the desired size, and translate
1460//     * from RGB to YUV so we can give the frame to the codec.
1461//     */
1462//    sws_scale(vidPtr->scalingCtx,
1463//        vidPtr->pFrameRGB->data, vidPtr->pFrameRGB->linesize,
1464//        0, ih,
1465//        vidPtr->pFrameYUV->data, vidPtr->pFrameYUV->linesize);
1466//
1467//    numBytes = VideoWriteFrame(vidPtr, vidPtr->pFrameYUV);
1468//    if (numBytes < 0) {
1469//        Tcl_AppendResult(interp, "error in av_write_frame()", (char*)NULL);
1470//        return TCL_ERROR;
1471//    }
1472//    sprintf(buffer, "frame %d (%d bytes)", vidPtr->frameNumber++, numBytes);
1473//    Tcl_SetResult(interp, buffer, TCL_VOLATILE);
1474//    return TCL_OK;
1475//}
1476
1477
1478/*
1479 * ------------------------------------------------------------------------
1480 *  VideoWriteFrame()
1481 *
1482 *  Used internally to write a single frame out to the output stream.
1483 *  Returns the number of bytes written to the frame, or -1 if an error
1484 *  occurred.
1485 * ------------------------------------------------------------------------
1486 */
1487int
1488VideoWriteFrame(vidPtr, framePtr)
1489    VideoObj *vidPtr;      /* video object being updated */
1490    AVFrame *framePtr;     /* picture frame being written out */
1491{
1492    int numBytes;
1493    AVCodecContext *codecCtx;
1494    AVPacket pkt;
1495
1496#define OUTBUF_SIZE 500000
1497    uint8_t outbuf[OUTBUF_SIZE];
1498
1499    codecCtx = vidPtr->outVideoStr->codec;
1500    numBytes = avcodec_encode_video(codecCtx, outbuf, OUTBUF_SIZE, framePtr);
1501
1502    if (numBytes > 0) {
1503        av_init_packet(&pkt);
1504
1505        if (codecCtx->coded_frame->pts != AV_NOPTS_VALUE) {
1506            pkt.pts = av_rescale_q(codecCtx->coded_frame->pts,
1507                codecCtx->time_base,
1508                vidPtr->outVideoStr->time_base);
1509        }
1510        if (codecCtx->coded_frame->key_frame) {
1511            pkt.flags |= AV_PKT_FLAG_KEY;
1512        }
1513        pkt.stream_index = vidPtr->outVideoStr->index;
1514        pkt.data = outbuf;
1515        pkt.size = numBytes;
1516
1517        /* write the compressed frame in the media file */
1518        if (av_write_frame(vidPtr->outFormatCtx, &pkt) != 0) {
1519            return -1;
1520        }
1521    }
1522    return numBytes;
1523}
1524
1525/*
1526 * ------------------------------------------------------------------------
1527 *  VideoTransform()
1528 *
1529 *  Implements the body of the "transform" method in the "video" class.
1530 *  Translates one value into another--times into frames, etc.  Handles
1531 *  the following syntax:
1532 *    obj transform frames2duration <frames>
1533 *    obj transform duration2frames <duration>
1534 * ------------------------------------------------------------------------
1535 */
1536double
1537VideoTransformFrames2Duration(vidPtr, frame)
1538    VideoObj *vidPtr;
1539    int frame;
1540{
1541    double duration;
1542    AVCodecContext *vcodecCtx;
1543    AVStream *vstreamPtr;
1544    AVRational hundred;
1545    int64_t tval;
1546
1547    hundred.num = 100;
1548    hundred.den = 1;
1549
1550    if (vidPtr == NULL) {
1551        return -1;
1552    }
1553
1554    if (vidPtr->pFormatCtx == NULL) {
1555//        Tcl_AppendResult(interp, "can't compute transformations:",
1556//            " stream not opened", (char*)NULL);
1557//        return TCL_ERROR;
1558        return -1;
1559    }
1560
1561    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1562    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1563
1564    tval = av_rescale_q((int64_t)frame, hundred, vstreamPtr->r_frame_rate);
1565    duration = 0.01*tval;
1566
1567    return duration;
1568}
1569
1570int
1571VideoTransformDuration2Frames(vidPtr, duration)
1572    VideoObj *vidPtr;
1573    double duration;
1574{
1575    int frames;
1576    AVCodecContext *vcodecCtx;
1577    AVStream *vstreamPtr;
1578    AVRational hundred;
1579    int64_t tval;
1580
1581    hundred.num = 100;
1582    hundred.den = 1;
1583
1584    if (vidPtr == NULL) {
1585        return -1;
1586    }
1587    if (vidPtr->pFormatCtx == NULL) {
1588//        Tcl_AppendResult(interp, "can't compute transformations:",
1589//            " stream not opened", (char*)NULL);
1590//        return TCL_ERROR;
1591        return -1;
1592    }
1593
1594    vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1595    vstreamPtr = vidPtr->pFormatCtx->streams[vidPtr->videoStream];
1596
1597    tval = (int64_t)(duration*100);
1598    frames = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1599    // check above for overflow
1600    // tval = av_rescale_q(tval, vstreamPtr->r_frame_rate, hundred);
1601    // sprintf(buffer, "%lld", tval);
1602
1603    return frames;
1604}
1605
1606/*
1607 * ------------------------------------------------------------------------
1608 *  VideoClose()
1609 *
1610 *  Implements the body of the _ffmpeg_close method in the "video" class.
1611 *  Closes any file opened previously by the open methods for read/write.
1612 *  If nothing is open, this does nothing.
1613 * ------------------------------------------------------------------------
1614 */
1615int
1616VideoClose(vidPtr)
1617    VideoObj *vidPtr;
1618{
1619    AVCodecContext *vcodecCtx;
1620    int i;
1621
1622    if (vidPtr == NULL) {
1623        return -1;
1624    }
1625
1626    if (vidPtr->yuvbuffer) {
1627        av_free(vidPtr->yuvbuffer);
1628        vidPtr->yuvbuffer = NULL;
1629        vidPtr->yuvw = 0;
1630        vidPtr->yuvh = 0;
1631    }
1632    if (vidPtr->pFrameYUV) {
1633        av_free(vidPtr->pFrameYUV);
1634        vidPtr->pFrameYUV = NULL;
1635    }
1636
1637    if (vidPtr->rgbbuffer) {
1638        av_free(vidPtr->rgbbuffer);
1639        vidPtr->rgbbuffer = NULL;
1640        vidPtr->rgbw = 0;
1641        vidPtr->rgbh = 0;
1642    }
1643    if (vidPtr->pFrameRGB) {
1644        av_free(vidPtr->pFrameRGB);
1645        vidPtr->pFrameRGB = NULL;
1646    }
1647
1648    if (vidPtr->scalingCtx) {
1649        sws_freeContext(vidPtr->scalingCtx);
1650        vidPtr->scalingCtx = NULL;
1651    }
1652    if (vidPtr->pFormatCtx && vidPtr->videoStream >= 0) {
1653        vcodecCtx = vidPtr->pFormatCtx->streams[vidPtr->videoStream]->codec;
1654        if (vcodecCtx) {
1655            avcodec_close(vcodecCtx);
1656        }
1657    }
1658    if (vidPtr->pFormatCtx) {
1659#ifdef HAVE_AVFORMAT_CLOSE_INPUT
1660        avformat_close_input(&vidPtr->pFormatCtx);
1661#else
1662        av_close_input_file(vidPtr->pFormatCtx);
1663#endif
1664        vidPtr->pFormatCtx = NULL;
1665    }
1666
1667    if (vidPtr->outFormatCtx) {
1668        while (VideoWriteFrame(vidPtr, NULL) > 0)
1669            ; /* write out any remaining frames */
1670
1671        av_write_trailer(vidPtr->outFormatCtx);
1672
1673        for (i=0; i < vidPtr->outFormatCtx->nb_streams; i++) {
1674            avcodec_close(vidPtr->outFormatCtx->streams[i]->codec);
1675            av_freep(&vidPtr->outFormatCtx->streams[i]->codec);
1676            av_freep(&vidPtr->outFormatCtx->streams[i]);
1677        }
1678
1679        if (vidPtr->outFormatCtx->pb) {
1680            avio_close(vidPtr->outFormatCtx->pb);
1681        }
1682
1683        av_free(vidPtr->outFormatCtx);
1684        vidPtr->outFormatCtx = NULL;
1685    }
1686
1687    /* reset the mode to null */
1688    *vidPtr->mode = '\0';
1689
1690    return 0;
1691}
Note: See TracBrowser for help on using the repository browser.