0

My goal is to transcode a video with burning subtitle(by using textoverlay). I reference Gstreamer tutorial and write a little program to test 2 pipeline:

pipeline 1(without subtitle):

multiqueue max-size-buffers=10000 max-size-bytes=0 max-size-time=0 name=mq splitfilesrc location="/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.mkv" ! matroskademux name=dmx dmx. ! queue ! identity sync=false ! h264parse ! omxh264dec ! omxh264enc force-keyframe-period=40 scaling-width=1920 scaling-height=800 ! h264parse ! mq.sink_0 mq.src_0 ! mpegtsmux name=mux ! filesink location="/share/Public/aaa.ts" dmx. ! aacparse ! queue ! identity sync=false ! faad name=adec ! audioconvert ! audioresample ! audio/x-raw,channels=2,rate=44100 ! queue ! voaacenc ! audio/mpeg,mpegversion=4,stream-format=raw ! aacparse ! mq.sink_1 mq.src_1 ! audio/mpeg,mpegversion=4,stream-format=raw ! mux.

pipeline 2(with subtitle):

multiqueue max-size-buffers=10000 max-size-bytes=0 max-size-time=0 name=mq textoverlay wait-text=false name=txo splitfilesrc location="/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.mkv" ! matroskademux name=dmx dmx. ! queue ! identity sync=false ! h264parse ! omxh264dec ! txo.video_sink txo.src ! omxh264enc force-keyframe-period=40 scaling-width=1920 scaling-height=800 ! h264parse ! mq.sink_0 mq.src_0 ! mpegtsmux name=mux ! filesink location="/share/Public/aaa.ts" filesrc location="/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.zh-TW.srt" ! subparse subtitle-encoding=UTF-8 ! txo.text_sink dmx. ! aacparse ! queue ! identity sync=false ! faad name=adec ! audioconvert ! audioresample ! audio/x-raw,channels=2,rate=44100 ! queue ! voaacenc ! audio/mpeg,mpegversion=4,stream-format=raw ! aacparse ! mq.sink_1 mq.src_1 ! audio/mpeg,mpegversion=4,stream-format=raw ! mux.

Pipeline 1 works correctly whether seek event is performed or not. Pipeline 2 is also working correctly without seeking but have issue while seek is performed. It will block at

ret = gst_element_seek_simple (pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_KEY_UNIT, 7280 * GST_SECOND);

and never return.

Is there anything I can do about this? the following is my test code

regards, Tany

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline;
    GstBus *bus;
    GstMessage *msg;
    gboolean ret;
    int isSeek=0;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);

    /* Build the pipeline */
#if 1 // with subtitle
    pipeline = gst_parse_launch("multiqueue max-size-buffers=10000 max-size-bytes=0 max-size-time=0 name=mq textoverlay wait-text=false name=txo splitfilesrc location=\"/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.mkv\" ! matroskademux name=dmx  dmx. ! queue ! identity sync=false ! h264parse ! omxh264dec ! txo.video_sink  txo.src ! omxh264enc force-keyframe-period=40 scaling-width=1920 scaling-height=800 ! h264parse ! mq.sink_0  mq.src_0 ! mpegtsmux name=mux ! filesink location=\"/share/Public/aaa.ts\"  filesrc location=\"/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.zh-TW.srt\" ! subparse subtitle-encoding=UTF-8 ! txo.text_sink dmx. ! aacparse ! queue ! identity sync=false ! faad name=adec ! audioconvert ! audioresample ! audio/x-raw,channels=2,rate=44100 ! queue ! voaacenc ! audio/mpeg,mpegversion=4,stream-format=raw ! aacparse ! mq.sink_1  mq.src_1 ! audio/mpeg,mpegversion=4,stream-format=raw ! mux.", NULL);
#else // constantine without subtitle
    pipeline = gst_parse_launch("multiqueue max-size-buffers=10000 max-size-bytes=0 max-size-time=0 name=mq splitfilesrc location=\"/share/Multimedia/debug/harry/Harry.Potter.and.the.Deathly.Hallows.Part.1.2010.BluRay.1080p.AAC.4Audio.x264-CHD.mkv\" ! matroskademux name=dmx  dmx. ! queue ! identity sync=false ! h264parse ! omxh264dec ! omxh264enc force-keyframe-period=40 scaling-width=1920 scaling-height=800 ! h264parse ! mq.sink_0  mq.src_0 ! mpegtsmux name=mux ! filesink location=\"/share/Public/aaa.ts\"  dmx. ! aacparse ! queue ! identity sync=false ! faad name=adec ! audioconvert ! audioresample ! audio/x-raw,channels=2,rate=44100 ! queue ! voaacenc ! audio/mpeg,mpegversion=4,stream-format=raw ! aacparse ! mq.sink_1  mq.src_1 ! audio/mpeg,mpegversion=4,stream-format=raw ! mux.", NULL);
#endif  

    /* Start playing */
    //gst_element_set_state (pipeline, GST_STATE_PLAYING);
    gst_element_set_state (pipeline, GST_STATE_PAUSED);

    bus = gst_element_get_bus (pipeline);

    do{
      msg = gst_bus_timed_pop_filtered (bus, 1 * GST_SECOND, 
                GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
        if(msg != NULL){
            GError *err;
            gchar *debug_info;
            switch(GST_MESSAGE_TYPE(msg)){
                case GST_MESSAGE_ERROR:
                    gst_message_parse_error (msg, &err, &debug_info);
                    g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
                    g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
                    g_clear_error (&err);
                    g_free (debug_info);
                    break;
                case GST_MESSAGE_EOS:
                    g_print ("End-Of-Stream reached.\n");
                    goto _exit;
                    break;
                case GST_MESSAGE_DURATION:
                    g_print ("The duration has changed.\n");
                    break;
                case GST_MESSAGE_STATE_CHANGED:{
                        GstState old_state, new_state, pending_state;
                        gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
                        if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
                            g_print ("Pipeline state changed from %s to %s:\n",
                                gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));

                            if (new_state == GST_STATE_PLAYING) {
                                g_printerr("[%s:%d]: do nothing now.\n", __FILE__, __LINE__);
                            }else if (new_state == GST_STATE_PAUSED){
                                // do seek
                                if(isSeek == 0){
                                    // show seek info
                                    GstQuery *query;
                                    gint64 start, end;
                                    gboolean seek_enabled;
                                    query = gst_query_new_seeking (GST_FORMAT_TIME);
                                    if (gst_element_query (pipeline, query)) {
                                        gst_query_parse_seeking (query, NULL, &seek_enabled, &start, &end);
                                        if (seek_enabled) {
                                            g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
                                                GST_TIME_ARGS (start), GST_TIME_ARGS (end));
                                        } else {
                                            g_print ("Seeking is DISABLED for this stream.\n");
                                        }
                                    }else {
                                        g_printerr ("Seeking query failed.");
                                    }
                                    gst_query_unref (query);

                                    // do seek
                                    isSeek = 1;
                                    g_printerr("[%s:%d]start to seek \n", __FILE__, __LINE__);  
                                    ret = gst_element_seek_simple (pipeline, GST_FORMAT_TIME,
                                              //GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 
                                              GST_SEEK_FLAG_KEY_UNIT,
                                              //GST_SEEK_FLAG_ACCURATE,
                                              //GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
                                              //GST_SEEK_FLAG_FLUSH,
                                              //GST_SEEK_FLAG_NONE,
                                              7280 * GST_SECOND);
                                    g_printerr("[%s:%d]end to seek \n", __FILE__, __LINE__);
                                    if(ret == TRUE){
                                        g_printerr("[%s:%d]ret=true\n", __FILE__, __LINE__);
                                    }else{
                                        g_printerr("[%s:%d]ret=false\n", __FILE__, __LINE__);
                                    }
                                    gst_element_set_state (pipeline, GST_STATE_PLAYING);
                                }                           
                            }
                        }
                    }
                default:
                    //g_printerr ("Unexpected message received.\n");
                    break;
            }
            gst_message_unref (msg);
        }else{
            GstFormat fmt = GST_FORMAT_TIME;
            gint64 current = -1;
            gint64 duration;

            if (!gst_element_query_position (pipeline, fmt, &current)) {
                g_printerr ("Could not query current position.\n");
            }

            if (!GST_CLOCK_TIME_IS_VALID (duration)) {
                if (!gst_element_query_duration (pipeline, fmt, &duration)) {
                    g_printerr ("Could not query current duration.\n");
                }
            }

            g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\n",
            GST_TIME_ARGS (current), GST_TIME_ARGS (duration));

        }
    }while(1);  

_exit:   
    /* Free resources */
    if (msg != NULL)
    gst_message_unref (msg);
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;
}
Tany
  • 33
  • 5
  • I am guessing you know this, but just in case... subtitles are usually i a separate stream with tomcods to link to the video and audio stream, rather than being burnt into the video itself. This allows easy switching between languages as one advantage as is less processing overhead also. – Mick Feb 16 '17 at 12:15
  • Very thanks for your reply, I know that subtitles are usually in a separate stream, but i need to overlay hard-subtitle on video on some special situation. – Tany Feb 17 '17 at 03:38

0 Answers0