2

We fetch a camera stream over RTSP (Axis Q1755 H264). And after a random time we a start recording the stream to a file. I check the video file with ffprobe and realise the start time is not the start time of the recording it is the start time of fetch the stream. My Video has a length of 5 seconds and the start time of the video should be 0. But the real starttime of the Video is at second 20. This leads to a an incorrect timebase. (ffmpeg Version 3.3.3)

I try to repair it, but now i have some time the error timebase 1/180000... i marked my changes with the comment (timebase repair logic)

timebase video

ffprobe Output (without timebase repair logic) The start is higher as the duration, correctly the start is 0 and not 20.080000

Input #0, mov,mp4,m4a,3gp,3g2,mj2, from
'C:\testvideo.mp4':  
 Metadata:
     major_brand     : isom
     minor_version   : 512
     compatible_brands: isomiso2avc1mp41
     encoder         : Lavf57.71.100   Duration: 00:00:05.04, start: 20.080000, bitrate: 2675 kb/s
     Stream #0:0(und): Video: h264 (Baseline) (avc1 / 0x31637661), yuvj420p(pc, bt709), 800x450 [SAR 1:1 DAR 16:9], 2670 kb/s, 50.20 fps,
 50 tbr, 180k tbn, 360k tbc (default)
     Metadata:
       handler_name    : VideoHandler

Error after the add of timebase repair logic

[mpeg4 @ 00000178fcb12b40] timebase 1/180000 not supported by MPEG 4 standard, the maximum admitted value for the timebase denominator is 65535 Could not open codec 'mpeg4': Unspecified error Cannot start recording

Here some parts of my code

Start Logic

bool RecordingStreamGrabber::start()
{
    CORE_LOG_INFO(m_logger, "Started RecordingStreamGrabber");
    if (m_thread == NULL)
    {
        if (this->prepareInputStream())
        {
            m_run = true;
            m_thread = new std::thread(RecordingStreamGrabber::run, this);
            return true;
        }
        CORE_LOG_ERROR(m_logger, "Error starting RecrodingStreamGrabber");
        return false;
    }
    return false;
}

Prepare Input/Output Stream

bool RecordingStreamGrabber::prepareInputStream()
{
    CORE_LOG_INFO(m_logger, "Preparing Inputstream for recording: " << m_url);

    m_ifmtctx = avformat_alloc_context();
    interrupt_recording_nostop = false;
    interrupt_recording_timeout = m_timeout; // Timeout in milisekunde
    interrupt_recording_starttime = GetTickCount();
    m_ifmtctx->interrupt_callback = interrupt_timeout_cb;

    if (avformat_open_input(&m_ifmtctx, m_url.c_str(), NULL, NULL) != 0)
    {
        m_ifmtctx = NULL;
        CORE_LOG_ERROR(m_logger, "Error opening recording URL: " << m_url);
        return false;
    }

    interrupt_recording_nostop = true;

    if (avformat_find_stream_info(m_ifmtctx, NULL) < 0)
    {
        CORE_LOG_ERROR(m_logger, "Error finding stream in URL: " << m_url);
        avformat_close_input(&m_ifmtctx);
        m_ifmtctx = NULL;
        return false;
    }

    //search for the first video stream
    m_stream_index = -1;
    for (unsigned int i = 0; i < m_ifmtctx->nb_streams && m_stream_index == -1; i++)
    {
        m_iccx = m_ifmtctx->streams[i]->codec;
        if (m_iccx->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_istream = m_ifmtctx->streams[i];
            m_stream_index = i;
        }
    }

    if (m_stream_index == -1)
    {
        CORE_LOG_ERROR(m_logger, "Could not find video stream in URL: " << m_url);
        avformat_close_input(&m_ifmtctx);
        m_ifmtctx = NULL;
        return false;
    }

    return true;
}

bool RecordingStreamGrabber::prepareOutputStream()
{
    if (m_ofmtctx)
    {
        CORE_LOG_DEBUG(m_logger, "Close outputfile: " << m_targetfile);
        avformat_free_context(m_ofmtctx);
        m_ofmtctx = NULL;
    }

    m_ofmt = av_guess_format(NULL, m_targetfile.c_str(), NULL);

    m_ofmtctx = avformat_alloc_context();

    m_ofmtctx->oformat = m_ofmt;

    if (avio_open2(&m_ofmtctx->pb, m_targetfile.c_str(), AVIO_FLAG_WRITE, NULL, NULL) != 0)
    {
        avformat_free_context(m_ofmtctx);
        m_ofmtctx = NULL;
        CORE_LOG_ERROR(m_logger, "Error opening outputfile: " << m_targetfile);
        return false;
    }

    m_ostream = avformat_new_stream(m_ofmtctx, NULL);

    avcodec_copy_context(m_ostream->codec, m_iccx);

    m_ostream->sample_aspect_ratio.num = m_iccx->sample_aspect_ratio.num;
    m_ostream->sample_aspect_ratio.den = m_iccx->sample_aspect_ratio.den;

    /* time base: this is the fundamental unit of time (in seconds) in terms
    of which frame timestamps are represented. for fixed-fps content,
    timebase should be 1/framerate and timestamp increments should be
    identically 1. */
    m_ostream->time_base.num = m_iccx->time_base.num;
    m_ostream->time_base.den = m_iccx->time_base.den;

    avformat_write_header(m_ofmtctx, NULL);

#ifdef WIN32
    sprintf_s(m_ofmtctx->filename, sizeof(m_ofmtctx->filename), "%s", m_targetfile.c_str());
#else
    snprintf(m_ofmtctx->filename, sizeof(m_ofmtctx->filename), "%s", m_targetfile.c_str());
#endif
    return true;
}

Recording Logic

void RecordingStreamGrabber::run(RecordingStreamGrabber *_this)
{
    AVPacket packet;
    av_init_packet(&packet);
    int i = 0;
    bool startFrame = true;
    bool keyFrame = false;
    int64_t pts, dts;
    _this->m_tailWritten = true;
    while (_this->m_run)
    {
        if (av_read_frame(_this->m_ifmtctx, &packet) >= 0)
        {
            if ((packet.flags & AV_PKT_FLAG_KEY) == AV_PKT_FLAG_KEY)
            {
                keyFrame = true;
                CORE_LOG_DEBUG(_this->m_logger, "Detected key frame: " << i << "");
            }

            if (_this->m_record)
            {
                if (packet.stream_index == _this->m_stream_index)
                {
                    packet.stream_index = _this->m_ostream->id;

                    if (_this->m_tailWritten == false || keyFrame == true)
                    {
                        //#####################################
                        //timebase repair logic (my changes)
                        //#####################################
                        AVStream *in_stream;
                        AVStream *out_stream;

                        in_stream = _this->m_ifmtctx->streams[packet.stream_index];
                        out_stream = _this->m_ofmtctx->streams[packet.stream_index];

                        if (startFrame)
                        {
                            pts = packet.pts;
                            dts = packet.dts;
                            startFrame = false;
                        }
                        packet.pts -= pts;
                        packet.dts -= dts;

                        packet.pts = av_rescale_q_rnd(packet.pts, in_stream->time_base, out_stream->time_base, (AVRounding)((int)AV_ROUND_NEAR_INF | (int)AV_ROUND_PASS_MINMAX));
                        packet.dts = av_rescale_q_rnd(packet.dts, in_stream->time_base, out_stream->time_base, (AVRounding)((int)AV_ROUND_NEAR_INF | (int)AV_ROUND_PASS_MINMAX));

                        packet.duration = av_rescale_q(packet.duration, in_stream->time_base, out_stream->time_base);
                        //end of my changes
                        
                        av_interleaved_write_frame(_this->m_ofmtctx, &packet);
                        _this->m_tailWritten = false;
                    }
                }
            }
            else if (_this->m_ofmtctx)
            {
                if (_this->m_tailWritten == false)
                {
                    av_write_trailer(_this->m_ofmtctx);
                    avio_close(_this->m_ofmtctx->pb);
                }
                avformat_free_context(_this->m_ofmtctx);
                _this->m_tailWritten = true;
                _this->m_ofmtctx = NULL;
                startFrame = true;
            }
            i++;
            keyFrame = false;
        }
        av_free_packet(&packet);
        av_init_packet(&packet);
    }
    if (_this->m_record)
    {
        av_write_trailer(_this->m_ofmtctx);
        avio_close(_this->m_ofmtctx->pb);
    }
    if (_this->m_ofmtctx)
    {
        avformat_free_context(_this->m_ofmtctx);
        _this->m_ofmtctx = NULL;
    }
    _this->m_record = false;

    avformat_close_input(&_this->m_ifmtctx);
    _this->m_ifmtctx = NULL;
}

Other question How can I check whether my project uses the deprecated api (answer of @berak)?

Tom Baires
  • 381
  • 3
  • 16
  • Ugh. This is not a complete example, and makes the question unanswerable, but I understand that the minimal example will still be large. Still, do not post a random incomplete fragment. And even then, understand that most of us won't have an Axis camera handy. – MSalters Dec 04 '17 at 22:01
  • @MSalters i have revised my question – Tom Baires Dec 05 '17 at 08:22
  • 1
    About the deprecated api question, it means the C api of OpenCV... most of them are functions like cvFunction instead of cv::function which is c++, also it will be using cvMat or IplImage instead of cv::Mat. I do not see any OpenCV code in your snippets... so I am not sure if you have deprecated code or not. Basically the [structures and operations here](https://docs.opencv.org/2.4/modules/core/doc/old_basic_structures.html) should be avoided and the c++ one used instead. I changed the tags from opencv to ffmpeg since it is a problem with that lib and not opencv. – api55 Dec 05 '17 at 12:51
  • where do you prepare your output stream ? – UmNyobe Dec 05 '17 at 13:00
  • @api55 thanks i have on a other part of the project opencv, and i check the method all with cv::function. – Tom Baires Dec 05 '17 at 14:44
  • @UmNyobe i have add the prepareOutputStream logic, sorry – Tom Baires Dec 05 '17 at 14:53
  • Does the video file start right away ? – UmNyobe Dec 05 '17 at 16:01
  • The previous video file works with "Windows Media Player" but not with VLC, without the Code block `timebase repair logic (my changes)`. And with this code block i create good videos but the code not work stable it crash with the error `timebase 1/180000 not supported...` – Tom Baires Dec 05 '17 at 16:25

0 Answers0