source: trunk/packages/vizservers/nanovis/RpAVTranslate.cpp @ 1503

Last change on this file since 1503 was 1503, checked in by gah, 15 years ago

Flip image along y-axis for movie

File size: 11.4 KB
Line 
1
2/*
3 * ======================================================================
4 *  Rappture::AVTranslate
5 *
6 *  AUTHOR:  Derrick Kearney, Purdue University
7 *
8 *  Copyright (c) 2005-2009  Purdue Research Foundation
9 * ----------------------------------------------------------------------
10 *  See the file "license.terms" for information on usage and
11 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
12 * ======================================================================
13 */
14
15
16#include "nvconf.h"
17
18#include <cstdlib>
19#include <cstdio>
20#include <cstring>
21#include <cmath>
22#include <fstream>
23#include <assert.h>
24
25extern "C" {
26#ifdef HAVE_FFMPEG_MEM_H
27#include <ffmpeg/mem.h>
28#endif
29#ifdef HAVE_LIBAVUTIL_MEM_H
30#include <libavutil/mem.h>
31#endif
32}
33
34#include "RpAVTranslate.h"
35
36#ifndef M_PI
37#define M_PI 3.14159265358979323846
38#endif
39
40using namespace Rappture;
41
42AVTranslate::AVTranslate(size_t width, size_t height) :
43    _width (width),
44    _height (height),
45    _bitRate(400000),
46    _frameRate(25.0f),
47    _videoOutbufSize(200000),
48    _videoOutbuf(NULL),
49    _fmtPtr(NULL),
50    _ocPtr(NULL),
51    _avStreamPtr(NULL),
52    _pictPtr(NULL),
53    _rgbPictPtr(NULL)
54{
55}
56
57AVTranslate::AVTranslate(size_t width, size_t height, size_t bitRate,
58                         float frameRate)
59  : _width (width),
60    _height (height),
61    _bitRate(bitRate),
62    _frameRate(frameRate),
63    _videoOutbufSize(200000),
64    _videoOutbuf(NULL),
65    _fmtPtr(NULL),
66    _ocPtr(NULL),
67    _avStreamPtr(NULL),
68    _pictPtr(NULL),
69    _rgbPictPtr(NULL)
70{
71}
72
73/**
74 * Copy constructor
75 * @param AVTranslate object to copy
76 */
77 /*
78AVTranslate::AVTranslate(const AVTranslate& o)
79{}
80*/
81
82AVTranslate::~AVTranslate()
83{
84#ifdef notdef
85    /* FIXME:: This can't be right.  Don't want to automatically write out
86     * trailer etc. on destruction of this object. */
87    done();
88#endif
89}
90
91
92bool
93AVTranslate::init(Outcome &status, const char *filename)
94{
95    status.addContext("Rappture::AVTranslate::init()");
96    /* initialize libavcodec, and register all codecs and formats */
97    av_register_all();
98
99    /* auto detect the output format from the name. default is
100       mpeg. */
101    _fmtPtr = guess_format(NULL, filename, NULL);
102    if (_fmtPtr == NULL) {
103        /*
104        printf(  "Could not deduce output format from"
105                 "file extension: using MPEG.\n");
106        */
107        _fmtPtr = guess_format("mpeg", NULL, NULL);
108    }
109    if (_fmtPtr == NULL) {
110        status.addError("Could not find suitable output format");
111        return false;
112    }
113
114#ifdef HAVE_AVFORMAT_ALLOC_CONTEXT
115    /* allocate the output media context */
116    _ocPtr = avformat_alloc_context();
117#else
118    _ocPtr = av_alloc_format_context();
119#endif
120
121    if (!_ocPtr) {
122        status.addError("Memory error while allocating format context");
123        return false;
124    }
125    _ocPtr->oformat = _fmtPtr;
126    snprintf(_ocPtr->filename, sizeof(_ocPtr->filename), "%s", filename);
127
128    /* add the video stream using the default format codecs
129       and initialize the codecs */
130    _avStreamPtr = NULL;
131    if (_fmtPtr->video_codec != CODEC_ID_NONE) {
132        if ( (!addVideoStream(status, _fmtPtr->video_codec,&_avStreamPtr)) ) {
133            return false;
134        }
135    }
136
137    /* set the output parameters (must be done even if no
138       parameters). */
139    if (av_set_parameters(_ocPtr, NULL) < 0) {
140        status.addError("Invalid output format parameters");
141        return false;
142    }
143
144    dump_format(_ocPtr, 0, filename, 1);
145
146    /* now that all the parameters are set, we can open the
147       video codec and allocate the necessary encode buffers */
148    if (_avStreamPtr) {
149        if (!openVideo(status)) {
150            return false;
151        }
152    }
153
154    /* open the output file, if needed */
155    if (!(_fmtPtr->flags & AVFMT_NOFILE)) {
156        if (url_fopen(&_ocPtr->pb, filename, URL_WRONLY) < 0) {
157            status.addError("Could not open '%s'", filename);
158            return false;
159        }
160    }
161
162    /* write the stream header, if any */
163    av_write_header(_ocPtr);
164    return true;
165}
166
167bool
168AVTranslate::append(Outcome &status, uint8_t *rgbData, size_t linePad)
169{
170    status.addContext("Rappture::AVTranslate::append()");
171    if (rgbData == NULL) {
172        status.addError("rdbData pointer is NULL");
173        return false;
174    }
175    /* Copy the data into the picture without the padding and reversing the
176     * rows. Note that the origin of the GL image is the lower-left while for
177     * the movie it's upper-left. */
178    size_t bytesPerRow = _width * 3;
179    size_t bytesPerLine = bytesPerRow + linePad;
180    uint8_t *srcRowPtr = rgbData + ((_height - 1) * bytesPerLine);
181    uint8_t *destPtr = _rgbPictPtr->data[0];
182    for (size_t y = 0; y < _height; y++) {
183        uint8_t *sp, *send;
184       
185        for (sp = srcRowPtr, send = sp + bytesPerRow; sp < send; sp++, destPtr++) {
186            *destPtr = *sp;
187        }
188        srcRowPtr -= bytesPerLine;
189    }
190
191#ifdef HAVE_IMG_CONVERT
192    // use img_convert instead of sws_scale because img_convert
193    // is lgpl nad sws_scale is gpl
194    img_convert((AVPicture *)_pictPtr, PIX_FMT_YUV420P,
195                (AVPicture *)_rgbPictPtr, PIX_FMT_RGB24,
196                _width, _height);
197#endif  /*HAVE_IMG_CONVERT*/
198    writeVideoFrame(status);
199
200    return true;
201}
202
203
204bool
205AVTranslate::done(Outcome &status)
206{
207    size_t i = 0;
208
209    /* close each codec */
210    if (_avStreamPtr) {
211        closeVideo(status);
212    }
213
214    /* write the trailer, if any */
215    av_write_trailer(_ocPtr);
216
217    /* free the streams */
218    for(i = 0; i < _ocPtr->nb_streams; i++) {
219        av_freep(&_ocPtr->streams[i]->codec);
220        // _ocPtr->streams[i]->codec = NULL;
221
222        av_freep(&_ocPtr->streams[i]);
223        // _ocPtr->streams[i] = NULL;
224    }
225
226    if (!(_fmtPtr->flags & AVFMT_NOFILE)) {
227        /* close the output file */
228        url_fclose(_ocPtr->pb);
229    }
230
231    /* free the stream */
232    av_free(_ocPtr);
233    _ocPtr = NULL;
234    return true;
235}
236
237
238/* add a video output stream */
239bool
240AVTranslate::addVideoStream(Outcome &status, CodecID codec_id,
241                            AVStream **streamPtrPtr)
242{
243    status.addContext("Rappture::AVTranslate::add_video_stream()");
244    if (streamPtrPtr == NULL) {
245        status.addError("AVStream **st is NULL");
246        return false;
247    }
248
249    AVStream *streamPtr;
250    streamPtr = av_new_stream(_ocPtr, 0);
251    if (streamPtr == NULL) {
252        status.addError("Could not alloc stream");
253        return false;
254    }
255
256    AVCodecContext *codecPtr;
257    codecPtr = streamPtr->codec;
258    codecPtr->codec_id = codec_id;
259    codecPtr->codec_type = CODEC_TYPE_VIDEO;
260
261    /* put sample parameters */
262    codecPtr->bit_rate = _bitRate;
263    /* resolution must be a multiple of two */
264    codecPtr->width = _width;
265    codecPtr->height = _height;
266    /* time base: this is the fundamental unit of time (in seconds) in terms
267       of which frame timestamps are represented. for fixed-fps content,
268       timebase should be 1/framerate and timestamp increments should be
269       identically 1. */
270    codecPtr->time_base.den = _frameRate;
271    codecPtr->time_base.num = 1;
272    codecPtr->gop_size = 12;    /* emit one intra frame every twelve frames at
273                                 * most */
274    codecPtr->pix_fmt = PIX_FMT_YUV420P;
275    if (codecPtr->codec_id == CODEC_ID_MPEG2VIDEO) {
276        /* just for testing, we also add B frames */
277        codecPtr->max_b_frames = 2;
278    }
279    if (codecPtr->codec_id == CODEC_ID_MPEG1VIDEO) {
280        /* Needed to avoid using macroblocks in which some coeffs overflow.
281           This does not happen with normal video, it just happens here as
282           the motion of the chroma plane does not match the luma plane. */
283        codecPtr->mb_decision=2;
284    }
285    // some formats want stream headers to be separate
286    if((strcmp(_ocPtr->oformat->name, "mp4") == 0) ||
287       (strcmp(_ocPtr->oformat->name, "mov") == 0) ||
288       (strcmp(_ocPtr->oformat->name, "3gp") == 0)) {
289        codecPtr->flags |= CODEC_FLAG_GLOBAL_HEADER;
290    }
291    *streamPtrPtr = streamPtr;
292    return true;
293}
294
295bool
296AVTranslate::allocPicture(Outcome &status, int pixFmt, AVFrame **framePtrPtr)
297{
298    status.addContext("Rappture::AVTranslate::allocPicture()");
299    if (framePtrPtr == NULL) {
300        status.addError("AVFrame **p == NULL");
301        return false;
302    }
303
304    AVFrame *framePtr;
305    framePtr = avcodec_alloc_frame();
306    if (framePtr == NULL) {
307        status.addError("Memory error: Could not alloc frame");
308        return false;
309    }
310
311    size_t size;
312    size = avpicture_get_size(pixFmt, _width, _height);
313
314    uint8_t *bits;
315    bits = (uint8_t *)av_malloc(size);
316    if (bits == NULL) {
317        av_free(framePtr);
318        status.addError("Memory error: Could not alloc picture buffer");
319        return false;
320    }
321    avpicture_fill((AVPicture *)framePtr, bits, pixFmt, _width, _height);
322    *framePtrPtr = framePtr;
323    return true;
324}
325
326bool
327AVTranslate::openVideo(Outcome &status)
328{
329    AVCodec *codec;
330    AVCodecContext *c;
331
332    status.addContext("Rappture::AVTranslate::openVideo()");
333    c = _avStreamPtr->codec;
334
335    /* find the video encoder */
336    codec = avcodec_find_encoder(c->codec_id);
337    if (!codec) {
338        status.addError("codec not found");
339        return false;
340    }
341
342    /* open the codec */
343    if (avcodec_open(c, codec) < 0) {
344        status.addError("could not open codec");
345        return false;
346    }
347
348    _videoOutbuf = NULL;
349    if (!(_ocPtr->oformat->flags & AVFMT_RAWPICTURE)) {
350        /* allocate output buffer */
351        /* XXX: API change will be done */
352        /* buffers passed into lav* can be allocated any way you prefer,
353           as long as they're aligned enough for the architecture, and
354           they're freed appropriately (such as using av_free for buffers
355           allocated with av_malloc) */
356        _videoOutbuf = (uint8_t *) av_malloc(_videoOutbufSize);
357    }
358    /* Allocate the encoded raw picture */
359    if (!allocPicture(status, c->pix_fmt, &_pictPtr)) {
360        return false;
361    }
362    if (!allocPicture(status, PIX_FMT_RGB24, &_rgbPictPtr)) {
363        status.addError("allocPicture: can't allocate picture");
364        return false;
365    }
366    return true;
367}
368
369bool
370AVTranslate::writeVideoFrame(Outcome &status)
371{
372    AVCodecContext *codecPtr;
373
374    status.addContext("Rappture::AVTranslate::writeVideoframe()");
375    codecPtr = _avStreamPtr->codec;
376
377    /* encode the image */
378    int size;
379    size = avcodec_encode_video(codecPtr, _videoOutbuf, _videoOutbufSize,
380        _pictPtr);
381    if (size < 0) {
382        status.addError("Error while writing video frame");
383        return false;
384    }
385    if (size == 0) {
386        return true;            /* Image was buffered */
387    }
388    AVPacket pkt;
389    av_init_packet(&pkt);
390
391    pkt.pts = av_rescale_q(codecPtr->coded_frame->pts, codecPtr->time_base,
392                           _avStreamPtr->time_base);
393    if (codecPtr->coded_frame->key_frame) {
394        pkt.flags |= PKT_FLAG_KEY;
395    }
396    pkt.stream_index = _avStreamPtr->index;
397    pkt.data = _videoOutbuf;
398    pkt.size = size;
399   
400    /* write the compressed frame i the media file */
401    if (av_write_frame(_ocPtr, &pkt) < 0) {
402        status.addError("Error while writing video frame");
403        return false;
404    }
405    return true;
406}
407
408
409bool
410AVTranslate::closeVideo(Outcome &status)
411{
412    if (_avStreamPtr != NULL) {
413        avcodec_close(_avStreamPtr->codec);
414    }
415    if (_pictPtr != NULL) {
416        av_free(_pictPtr->data[0]);
417        av_free(_pictPtr);
418        _pictPtr = NULL;
419    }
420    if (_rgbPictPtr != NULL) {
421        av_free(_rgbPictPtr->data[0]);
422        av_free(_rgbPictPtr);
423        _rgbPictPtr = NULL;
424    }
425    if (_videoOutbuf != NULL) {
426        av_free(_videoOutbuf);
427        _videoOutbuf = NULL;
428    }
429    return true;
430}
431
432/*
433status.addError("error while opening file");
434status.addContext("Rappture::Buffer::dump()");
435return status;
436*/
437
Note: See TracBrowser for help on using the repository browser.