source: trunk/packages/vizservers/nanovis/RpAVTranslate.cpp @ 2618

Last change on this file since 2618 was 2376, checked in by gah, 13 years ago
  • Property svn:eol-style set to native
File size: 11.6 KB
Line 
1
2/*
3 * ======================================================================
4 *  Rappture::AVTranslate
5 *
6 *  AUTHOR:  Derrick Kearney, Purdue University
7 *
8 *  Copyright (c) 2005-2009  Purdue Research Foundation
9 * ----------------------------------------------------------------------
10 *  See the file "license.terms" for information on usage and
11 *  redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
12 * ======================================================================
13 */
14
15
16#include "nvconf.h"
17
18#if defined(HAVE_LIBAVCODEC) || defined(HAVE_LIBAVFORMAT)
19#include <cstdlib>
20#include <cstdio>
21#include <cstring>
22#include <cmath>
23#include <fstream>
24#include <assert.h>
25
26extern "C" {
27#ifdef HAVE_FFMPEG_MEM_H
28#include <ffmpeg/mem.h>
29#endif
30#ifdef HAVE_LIBAVUTIL_MEM_H
31#include <libavutil/mem.h>
32#endif
33}
34
35#include "RpAVTranslate.h"
36
37#ifndef M_PI
38#define M_PI 3.14159265358979323846
39#endif
40
41using namespace Rappture;
42
43AVTranslate::AVTranslate(size_t width, size_t height) :
44    _width (width),
45    _height (height),
46    _bitRate(400000),
47    _frameRate(25.0f),
48    _videoOutbufSize(200000),
49    _videoOutbuf(NULL),
50    _fmtPtr(NULL),
51    _ocPtr(NULL),
52    _avStreamPtr(NULL),
53    _pictPtr(NULL),
54    _rgbPictPtr(NULL)
55{
56}
57
58AVTranslate::AVTranslate(size_t width, size_t height, size_t bitRate,
59                         float frameRate)
60  : _width (width),
61    _height (height),
62    _bitRate(bitRate),
63    _frameRate(frameRate),
64    _videoOutbufSize(200000),
65    _videoOutbuf(NULL),
66    _fmtPtr(NULL),
67    _ocPtr(NULL),
68    _avStreamPtr(NULL),
69    _pictPtr(NULL),
70    _rgbPictPtr(NULL)
71{
72}
73
74/**
75 * Copy constructor
76 * @param AVTranslate object to copy
77 */
78 /*
79AVTranslate::AVTranslate(const AVTranslate& o)
80{}
81*/
82
83AVTranslate::~AVTranslate()
84{
85#ifdef notdef
86    /* FIXME:: This can't be right.  Don't want to automatically write out
87     * trailer etc. on destruction of this object. */
88    done();
89#endif
90}
91
92
93bool
94AVTranslate::init(Outcome &status, const char *filename)
95{
96    status.addContext("Rappture::AVTranslate::init()");
97    /* Initialize libavcodec, and register all codecs and formats */
98    avcodec_init();
99    avcodec_register_all();
100    av_register_all();
101
102    /* Auto detect the output format from the name. default is mpeg. */
103    _fmtPtr = guess_format(NULL, filename, NULL);
104    if (_fmtPtr == NULL) {
105        /*
106          TRACE(  "Could not deduce output format from"
107                 "file extension: using MPEG.\n");
108        */
109        _fmtPtr = guess_format("mpeg", NULL, NULL);
110    }
111    if (_fmtPtr == NULL) {
112        status.addError("Could not find suitable output format");
113        return false;
114    }
115
116#ifdef HAVE_AVFORMAT_ALLOC_CONTEXT
117    /* Allocate the output media context. */
118    _ocPtr = avformat_alloc_context();
119#else
120    _ocPtr = av_alloc_format_context();
121#endif
122    if (!_ocPtr) {
123        status.addError("Memory error while allocating format context");
124        return false;
125    }
126    _ocPtr->oformat = _fmtPtr;
127    snprintf(_ocPtr->filename, sizeof(_ocPtr->filename), "%s", filename);
128
129    /* Add the video stream using the default format codecs and initialize the
130       codecs. */
131    _avStreamPtr = NULL;
132    if (_fmtPtr->video_codec != CODEC_ID_NONE) {
133        if ( (!addVideoStream(status, _fmtPtr->video_codec, &_avStreamPtr)) ) {
134            return false;
135        }
136    }
137
138    /* Set the output parameters (must be done even if no parameters). */
139    if (av_set_parameters(_ocPtr, NULL) < 0) {
140        status.addError("Invalid output format parameters");
141        return false;
142    }
143
144    dump_format(_ocPtr, 0, filename, 1);
145
146    /* Now that all the parameters are set, we can open the video codec and
147       allocate the necessary encode buffers */
148    if (_avStreamPtr) {
149        if (!openVideo(status)) {
150            return false;
151        }
152    }
153
154    /* Open the output file, if needed. */
155    if (!(_fmtPtr->flags & AVFMT_NOFILE)) {
156        if (url_fopen(&_ocPtr->pb, filename, URL_WRONLY) < 0) {
157            status.addError("Could not open '%s'", filename);
158            return false;
159        }
160    }
161
162    /* write the stream header, if any */
163    av_write_header(_ocPtr);
164    return true;
165}
166
167bool
168AVTranslate::append(Outcome &status, uint8_t *rgbData, size_t linePad)
169{
170    status.addContext("Rappture::AVTranslate::append()");
171    if (rgbData == NULL) {
172        status.addError("rdbData pointer is NULL");
173        return false;
174    }
175    /* Copy the data into the picture without the padding and reversing the
176     * rows. Note that the origin of the GL image is the lower-left while for
177     * the movie it's upper-left. */
178    size_t bytesPerRow = _width * 3;
179    size_t bytesPerLine = bytesPerRow + linePad;
180    uint8_t *srcRowPtr = rgbData + ((_height - 1) * bytesPerLine);
181    uint8_t *destPtr = _rgbPictPtr->data[0];
182    for (size_t y = 0; y < _height; y++) {
183        uint8_t *sp, *send;
184       
185        for (sp = srcRowPtr, send = sp + bytesPerRow; sp < send; sp++, destPtr++) {
186            *destPtr = *sp;
187        }
188        srcRowPtr -= bytesPerLine;
189    }
190
191#ifdef HAVE_IMG_CONVERT
192    // Use img_convert instead of sws_scale because img_convert is LGPL and
193    // sws_scale is GPL
194    img_convert((AVPicture *)_pictPtr, PIX_FMT_YUV420P,
195                (AVPicture *)_rgbPictPtr, PIX_FMT_RGB24,
196                _width, _height);
197#endif  /*HAVE_IMG_CONVERT*/
198    writeVideoFrame(status);
199
200    return true;
201}
202
203
204bool
205AVTranslate::done(Outcome &status)
206{
207    size_t i = 0;
208
209    /* Close each codec */
210    if (_avStreamPtr) {
211        closeVideo(status);
212    }
213
214    /* Write the trailer, if any */
215    av_write_trailer(_ocPtr);
216
217    /* Free the streams */
218    for(i = 0; i < _ocPtr->nb_streams; i++) {
219        av_freep(&_ocPtr->streams[i]->codec);
220        // _ocPtr->streams[i]->codec = NULL;
221
222        av_freep(&_ocPtr->streams[i]);
223        // _ocPtr->streams[i] = NULL;
224    }
225
226    if (!(_fmtPtr->flags & AVFMT_NOFILE)) {
227        /* close the output file */
228        url_fclose(_ocPtr->pb);
229    }
230
231    /* Free the stream */
232    av_free(_ocPtr);
233    _ocPtr = NULL;
234    return true;
235}
236
237
238/* Add a video output stream */
239bool
240AVTranslate::addVideoStream(Outcome &status, CodecID codec_id,
241                            AVStream **streamPtrPtr)
242{
243    status.addContext("Rappture::AVTranslate::add_video_stream()");
244    if (streamPtrPtr == NULL) {
245        status.addError("AVStream **st is NULL");
246        return false;
247    }
248
249    AVStream *streamPtr;
250    streamPtr = av_new_stream(_ocPtr, 0);
251    if (streamPtr == NULL) {
252        status.addError("Could not alloc stream");
253        return false;
254    }
255
256    AVCodecContext *codecPtr;
257    codecPtr = streamPtr->codec;
258    codecPtr->codec_id = codec_id;
259    codecPtr->codec_type = CODEC_TYPE_VIDEO;
260
261    /* Put sample parameters */
262    codecPtr->bit_rate = _bitRate;
263    /* resolution must be a multiple of two */
264    codecPtr->width = _width;
265    codecPtr->height = _height;
266    /* time base: this is the fundamental unit of time (in seconds) in terms
267       of which frame timestamps are represented. for fixed-fps content,
268       timebase should be 1/framerate and timestamp increments should be
269       identically 1. */
270    codecPtr->time_base.den = _frameRate;
271    codecPtr->time_base.num = 1;
272    codecPtr->gop_size = 12;            /* Emit one intra frame every twelve
273                                         * frames at most */
274    codecPtr->pix_fmt = PIX_FMT_YUV420P;
275    if (codecPtr->codec_id == CODEC_ID_MPEG2VIDEO) {
276        /* just for testing, we also add B frames */
277        codecPtr->max_b_frames = 2;
278    }
279    if (codecPtr->codec_id == CODEC_ID_MPEG1VIDEO) {
280        /* Needed to avoid using macroblocks in which some coeffs overflow.
281           This does not happen with normal video, it just happens here as the
282           motion of the chroma plane does not match the luma plane. */
283        codecPtr->mb_decision=2;
284    }
285    /* some formats want stream headers to be separate */
286    if((strcmp(_ocPtr->oformat->name, "mp4") == 0) ||
287       (strcmp(_ocPtr->oformat->name, "mov") == 0) ||
288       (strcmp(_ocPtr->oformat->name, "3gp") == 0)) {
289        codecPtr->flags |= CODEC_FLAG_GLOBAL_HEADER;
290    }
291    *streamPtrPtr = streamPtr;
292    return true;
293}
294
295bool
296AVTranslate::allocPicture(Outcome &status, PixelFormat pixFmt,
297     AVFrame **framePtrPtr)
298{
299    status.addContext("Rappture::AVTranslate::allocPicture()");
300    if (framePtrPtr == NULL) {
301        status.addError("AVFrame **p == NULL");
302        return false;
303    }
304
305    AVFrame *framePtr;
306    framePtr = avcodec_alloc_frame();
307    if (framePtr == NULL) {
308        status.addError("Memory error: Could not alloc frame");
309        return false;
310    }
311
312    size_t size;
313    size = avpicture_get_size(pixFmt, _width, _height);
314
315    uint8_t *bits;
316    bits = (uint8_t *)av_malloc(size);
317    if (bits == NULL) {
318        av_free(framePtr);
319        status.addError("Memory error: Could not alloc picture buffer");
320        return false;
321    }
322    avpicture_fill((AVPicture *)framePtr, bits, pixFmt, _width, _height);
323    *framePtrPtr = framePtr;
324    return true;
325}
326
327bool
328AVTranslate::openVideo(Outcome &status)
329{
330    AVCodec *codec;
331    AVCodecContext *c;
332
333    status.addContext("Rappture::AVTranslate::openVideo()");
334    c = _avStreamPtr->codec;
335
336    /* find the video encoder */
337    codec = avcodec_find_encoder(c->codec_id);
338    if (codec == NULL) {
339        status.addError("can't find codec %d\n", c->codec->id);
340        return false;
341    }
342
343    /* open the codec */
344    if (avcodec_open(c, codec) < 0) {
345        status.addError("can't open codec %d", c->codec->id);
346        return false;
347    }
348
349    _videoOutbuf = NULL;
350    if (!(_ocPtr->oformat->flags & AVFMT_RAWPICTURE)) {
351        /* allocate output buffer */
352        /* XXX: API change will be done */
353        /* buffers passed into lav* can be allocated any way you prefer,
354           as long as they're aligned enough for the architecture, and
355           they're freed appropriately (such as using av_free for buffers
356           allocated with av_malloc) */
357        _videoOutbuf = (uint8_t *) av_malloc(_videoOutbufSize);
358    }
359    /* Allocate the encoded raw picture */
360    if (!allocPicture(status, c->pix_fmt, &_pictPtr)) {
361        return false;
362    }
363    if (!allocPicture(status, PIX_FMT_RGB24, &_rgbPictPtr)) {
364        status.addError("allocPicture: can't allocate picture");
365        return false;
366    }
367    return true;
368}
369
370bool
371AVTranslate::writeVideoFrame(Outcome &status)
372{
373    AVCodecContext *codecPtr;
374
375    status.addContext("Rappture::AVTranslate::writeVideoframe()");
376    codecPtr = _avStreamPtr->codec;
377
378    /* encode the image */
379    int size;
380    size = avcodec_encode_video(codecPtr, _videoOutbuf, _videoOutbufSize,
381        _pictPtr);
382    if (size < 0) {
383        status.addError("Error while writing video frame");
384        return false;
385    }
386    if (size == 0) {
387        return true;            /* Image was buffered */
388    }
389    AVPacket pkt;
390    av_init_packet(&pkt);
391
392    pkt.pts = av_rescale_q(codecPtr->coded_frame->pts, codecPtr->time_base,
393                           _avStreamPtr->time_base);
394    if (codecPtr->coded_frame->key_frame) {
395        pkt.flags |= PKT_FLAG_KEY;
396    }
397    pkt.stream_index = _avStreamPtr->index;
398    pkt.data = _videoOutbuf;
399    pkt.size = size;
400   
401    /* write the compressed frame i the media file */
402    if (av_write_frame(_ocPtr, &pkt) < 0) {
403        status.addError("Error while writing video frame");
404        return false;
405    }
406    return true;
407}
408
409
410bool
411AVTranslate::closeVideo(Outcome &status)
412{
413    if (_avStreamPtr != NULL) {
414        avcodec_close(_avStreamPtr->codec);
415    }
416    if (_pictPtr != NULL) {
417        av_free(_pictPtr->data[0]);
418        av_free(_pictPtr);
419        _pictPtr = NULL;
420    }
421    if (_rgbPictPtr != NULL) {
422        av_free(_rgbPictPtr->data[0]);
423        av_free(_rgbPictPtr);
424        _rgbPictPtr = NULL;
425    }
426    if (_videoOutbuf != NULL) {
427        av_free(_videoOutbuf);
428        _videoOutbuf = NULL;
429    }
430    return true;
431}
432
433/*
434status.addError("error while opening file");
435status.addContext("Rappture::Buffer::dump()");
436return status;
437*/
438
439#endif /* HAVE_LIBAVCODEC && HAVE_LIBAVFORMAT */
Note: See TracBrowser for help on using the repository browser.