this is my first question so i hope i did it correctly. If not, please let me know to fix it.
I'm trying to convert a short (10 secs) mp4 video file into a gif using ffmpeg libraries (I'm pretty new using ffmpeg). The program works pretty well converting to gif, but some times it randomly crash.
This is the version of the ffmpeg libraries I'm using:
libavutil 54. 27.100
libavcodec 56. 41.100
libavformat 56. 36.100
libavdevice 56. 4.100
libavfilter 5. 16.101
libavresample 2. 1. 0
libswscale 3. 1.101
libswresample 1. 2.100
libpostproc 53. 3.100
I'm using a 1920x1080p video, so in order to generate the gif I'm doing a pixel format convertion, from AV_PIX_FMT_YUV420P
to AV_PIX_FMT_RGB8
with a resizing from the initial resolution to 432x240p.
Here is the code:
int VideoManager::loadVideo(QString filename, bool showInfo)
{
if(avformat_open_input(&iFmtCtx, filename.toStdString().c_str(), 0, 0) < 0)
{
qDebug() << "Could not open input file " << filename;
closeInput();
return -1;
}
if (avformat_find_stream_info(iFmtCtx, 0) < 0)
{
qDebug() << "Failed to retrieve input stream information";
closeInput();
return -2;
}
videoStreamIndex = -1;
for(unsigned int i = 0; i < iFmtCtx->nb_streams; ++i)
if(iFmtCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoStreamIndex = i;
break;
}
if(videoStreamIndex == -1)
{
qDebug() << "Didn't find any video stream!";
closeInput();
return -3;
}
iCodecCtx = iFmtCtx->streams[videoStreamIndex]->codec;
iCodec = avcodec_find_decoder(iCodecCtx->codec_id);
if(iCodec == NULL) // Codec not found
{
qDebug() << "Codec not found!";
closeInput();
return -4;
}
if(avcodec_open2(iCodecCtx, iCodec, NULL) < 0)
{
qDebug() << "Could not open codec!";
closeInput();
return -1;
}
if(showInfo)
av_dump_format(iFmtCtx, 0, filename.toStdString().c_str(), 0);
return 0;
}
void VideoManager::generateGif(QString filename)
{
int ret, frameCount = 0;
AVPacket packet;
packet.data = NULL;
packet.size = 0;
AVFrame *frame = NULL;
unsigned int stream_index;
int got_frame;
gifHeight = iFmtCtx->streams[videoStreamIndex]->codec->height;
gifWidth = iFmtCtx->streams[videoStreamIndex]->codec->width;
if(gifHeight > MAX_GIF_HEIGHT || gifWidth > MAX_GIF_WIDTH)
{
if(gifHeight > gifWidth)
{
gifWidth = (float)gifWidth * ( (float)MAX_GIF_HEIGHT / (float)gifHeight );
gifHeight = MAX_GIF_HEIGHT;
}
else
{
gifHeight = (float)gifHeight * ( (float)MAX_GIF_WIDTH / (float)gifWidth );
gifWidth = MAX_GIF_WIDTH;
}
}
if(openOutputFile(filename.toStdString().c_str()) < 0)
{
qDebug() << "Error openning output file: " << filename;
return;
}
while (1) {
int ret = av_read_frame(iFmtCtx, &packet);
if (ret < 0)
{
if(ret != AVERROR_EOF)
qDebug() << "Error reading frame: " << ret;
break;
}
stream_index = packet.stream_index;
if(stream_index == videoStreamIndex)
{
frame = av_frame_alloc();
if (!frame) {
qDebug() << "Error allocating frame";
break;
}
av_packet_rescale_ts(&packet,
iFmtCtx->streams[stream_index]->time_base,
iFmtCtx->streams[stream_index]->codec->time_base);
ret = avcodec_decode_video2(iFmtCtx->streams[stream_index]->codec, frame,
&got_frame, &packet);
if (ret < 0) {
qDebug() << "Decoding failed";
break;
}
if(got_frame)
{
qDebug() << ++frameCount;
nframes++;
frame->pts = av_frame_get_best_effort_timestamp(frame);
////////////////////////////////////////////////////////////////////////////////
/// Pixel format convertion and resize
////////////////////////////////////////////////////////////////////////////////
uint8_t *out_buffer = NULL;
SwsContext *img_convert_ctx = NULL;
AVFrame *pFrameRGB = av_frame_alloc();
if(pFrameRGB == NULL)
{
qDebug() << "Error allocating frameRGB";
break;
}
AVPixelFormat pixFmt;
switch (iFmtCtx->streams[stream_index]->codec->pix_fmt)
{
case AV_PIX_FMT_YUVJ420P : pixFmt = AV_PIX_FMT_YUV420P; break;
case AV_PIX_FMT_YUVJ422P : pixFmt = AV_PIX_FMT_YUV422P; break;
case AV_PIX_FMT_YUVJ444P : pixFmt = AV_PIX_FMT_YUV444P; break;
case AV_PIX_FMT_YUVJ440P : pixFmt = AV_PIX_FMT_YUV440P; break;
default:
pixFmt = iFmtCtx->streams[stream_index]->codec->pix_fmt;
}
out_buffer = (uint8_t*)av_malloc( avpicture_get_size( AV_PIX_FMT_RGB8,
gifWidth,
gifHeight ));
if(!out_buffer)
{
qDebug() << "Error alocatting out_buffer!";
}
avpicture_fill((AVPicture *)pFrameRGB, out_buffer, AV_PIX_FMT_RGB8,
gifWidth,
gifHeight);
img_convert_ctx = sws_getContext( iFmtCtx->streams[stream_index]->codec->width,
iFmtCtx->streams[stream_index]->codec->height,
pixFmt,
gifWidth,
gifHeight,
AV_PIX_FMT_RGB8,
SWS_ERROR_DIFFUSION, NULL, NULL, NULL );
if(!img_convert_ctx)
{
qDebug() << "error getting sws context";
}
sws_scale( img_convert_ctx, (const uint8_t* const*)frame->data,
frame->linesize, 0,
iFmtCtx->streams[stream_index]->codec->height,
pFrameRGB->data,
pFrameRGB->linesize );
pFrameRGB->format = AV_PIX_FMT_RGB8;
pFrameRGB->pts = frame->pts;
pFrameRGB->best_effort_timestamp = frame->best_effort_timestamp;
pFrameRGB->width = gifWidth;
pFrameRGB->height = gifHeight;
pFrameRGB->pkt_dts = frame->pkt_dts;
pFrameRGB->pkt_pts = frame->pkt_pts;
pFrameRGB->pkt_duration = frame->pkt_duration;
pFrameRGB->pkt_pos = frame->pkt_pos;
pFrameRGB->pkt_size = frame->pkt_size;
pFrameRGB->interlaced_frame = frame->interlaced_frame;
////////////////////////////////////////////////////////////////////////////////
ret = encodeAndWriteFrame(pFrameRGB, stream_index, NULL);
//av_frame_free(&frame);
//av_free(out_buffer);
//sws_freeContext(img_convert_ctx);
if (ret < 0)
{
qDebug() << "Error encoding and writting frame";
//av_free_packet(&packet);
closeOutput();
}
}
else {
//av_frame_free(&frame);
}
}
av_free_packet(&packet);
}
ret = flushEncoder(videoStreamIndex);
if (ret < 0)
{
qDebug() << "Flushing encoder failed";
}
av_write_trailer(oFmtCtx);
//av_free_packet(&packet);
//av_frame_free(&frame);
closeOutput();
}
void VideoManager::closeOutput()
{
if (oFmtCtx && oFmtCtx->nb_streams > 0 && oFmtCtx->streams[0] && oFmtCtx->streams[0]->codec)
avcodec_close(oFmtCtx->streams[0]->codec);
if (oFmtCtx && oFmt && !(oFmt->flags & AVFMT_NOFILE))
avio_closep(&oFmtCtx->pb);
avformat_free_context(oFmtCtx);
}
int VideoManager::openOutputFile(const char *filename)
{
AVStream *out_stream;
AVStream *in_stream;
AVCodecContext *dec_ctx, *enc_ctx;
AVCodec *encoder;
int ret;
oFmtCtx = NULL;
avformat_alloc_output_context2(&oFmtCtx, NULL, NULL, filename);
if (!oFmtCtx) {
qDebug() << "Could not create output context";
return AVERROR_UNKNOWN;
}
oFmt = oFmtCtx->oformat;
out_stream = avformat_new_stream(oFmtCtx, NULL);
if (!out_stream) {
qDebug() << "Failed allocating output stream";
return AVERROR_UNKNOWN;
}
in_stream = iFmtCtx->streams[videoStreamIndex];
dec_ctx = in_stream->codec;
enc_ctx = out_stream->codec;
encoder = avcodec_find_encoder(AV_CODEC_ID_GIF);
if (!encoder) {
qDebug() << "FATAL!: Necessary encoder not found";
return AVERROR_INVALIDDATA;
}
enc_ctx->height = gifHeight;
enc_ctx->width = gifWidth;
enc_ctx->sample_aspect_ratio = dec_ctx->sample_aspect_ratio;
enc_ctx->pix_fmt = AV_PIX_FMT_RGB8;
enc_ctx->time_base = dec_ctx->time_base;
ret = avcodec_open2(enc_ctx, encoder, NULL);
if (ret < 0) {
qDebug() << "Cannot open video encoder for gif";
return ret;
}
if (oFmt->flags & AVFMT_GLOBALHEADER)
enc_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
if (!(oFmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&oFmtCtx->pb, filename, AVIO_FLAG_WRITE);
if (ret < 0) {
qDebug() << "Could not open output file " << filename;
return ret;
}
}
ret = avformat_write_header(oFmtCtx, NULL);
if (ret < 0) {
qDebug() << "Error occurred when opening output file";
return ret;
}
return 0;
}
int VideoManager::encodeAndWriteFrame(AVFrame *frame, unsigned int stream_index, int *got_frame) {
int ret;
int got_frame_local;
AVPacket enc_pkt;
if (!got_frame)
got_frame = &got_frame_local;
enc_pkt.data = NULL;
enc_pkt.size = 0;
av_init_packet(&enc_pkt);
ret = avcodec_encode_video2(oFmtCtx->streams[stream_index]->codec, &enc_pkt,
frame, got_frame);
//av_frame_free(&frame);
if (ret < 0)
return ret;
if (!(*got_frame))
return 0;
enc_pkt.stream_index = stream_index;
av_packet_rescale_ts(&enc_pkt,
oFmtCtx->streams[stream_index]->codec->time_base,
oFmtCtx->streams[stream_index]->time_base);
ret = av_interleaved_write_frame(oFmtCtx, &enc_pkt);
return ret;
}
int VideoManager::flushEncoder(unsigned int stream_index)
{
int ret;
int got_frame;
if (!(oFmtCtx->streams[stream_index]->codec->codec->capabilities &
CODEC_CAP_DELAY))
return 0;
while (1) {
ret = encodeAndWriteFrame(NULL, stream_index, &got_frame);
if (ret < 0)
break;
if (!got_frame)
return 0;
}
return ret;
}
I know there are a lot of memory leaks. I deleted/commented most of the free functions intentionality because i thought that was the problem.
I'm using Qtcreator, so when i debug the programs this is the output:
Level Function Line
0 av_image_copy 303
1 frame_copy_video 650
2 av_frame_copy 687
3 av_frame_ref 384
4 gif_encode_frame 307
5 avcodec_encode_video2 2191
6 VideoManager::encodeAndWriteFrame 813
7 VideoManager::generateGif 375
8 qMain 31
9 WinMain*16 112
10 main
I've checked if there is a specific frame the program crash at, but it's a random frame too.
Any idea of what i'm doing wrong? Any help would be very appreciated.
EDIT:
After a few days of pain, suffering and frustation I decided to write the whole code from scratch. Both times i started from this example and modified it in order to works as I described before. And it works perfectly now :D! The only error i could find in the old code (posted before) is when i try to access to the video stream in the output file I used videoStreamIndex
, but that index is from the video stream in the input file. Some times it could be the same index and some times not. But it doesn't explain why it crashed randomly. If that was the reason of the crash, it should crash every time i ran the code with the same video. So probably, there are more errors in that code.
Notice that i've not tested if fixing that error in the code above actually solve the crashing problems.