Quantcast

The delay difference of live video stream between "gst-launch-1.0 command" and "appsink callback"

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|  
Report Content as Inappropriate

The delay difference of live video stream between "gst-launch-1.0 command" and "appsink callback"

sulli_xue
This post has NOT been accepted by the mailing list yet.
Hello everyone,I have encountered a confusing problem on my jetson-TX1(gstreamer1.8,qt5.5,opencv3.2 which has been configured with gstreamer).
First,I tested "gst-launch-1.0 rtspsrc location=rtsp://admin:admin12345@192.168.1.64:554/h264/ch33/main/av_stream latency=0 ! decodebin ! videoconvert ! xvimagesink sync=false" in command window and I got real-time video of my IP camera.
However,when I want to use gstreamer in qt project to do opencv displaying.I tested the follow code which used appsink callback and the result is that video is not real-time and the delaying is accumulating over time.The result is unacceptable in my real-time project.Could someone help me?Best wishes to you all!

#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <stdlib.h>
#include<QTime>
#include<QDebug>

#include "opencv2/opencv.hpp"

using namespace cv;

#define CAPS "video/x-raw,format=BGR,fFrameRate=25/1,width=1280,height=720"

// TODO: use synchronized deque
GMainLoop *loop;
std::deque<Mat> frameQueue;
int live_flag = 0;
int quit_flag = 0;
int sum_time=0;

GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data)
{
    g_print ("Got preroll!\n");
    return GST_FLOW_OK;
}

GstFlowReturn new_sample(GstAppSink *appsink, gpointer data)
{
    QTime time;
    time.start();
    static int framecount = 0;
    framecount++;

    static int width=0, height=0 ;

    GstSample *sample = gst_app_sink_pull_sample(appsink);
    GstCaps *caps = gst_sample_get_caps(sample);
    GstBuffer *buffer = gst_sample_get_buffer(sample);
    static GstStructure *s;
    const GstStructure *info = gst_sample_get_info(sample);
    // ---- get width and height
    if(framecount==1)
    {
        if(!caps)
        {
            g_print("Could not get image info from filter caps");
            exit(-11);
        }

        s = gst_caps_get_structure(caps,0);
        gboolean res = gst_structure_get_int(s, "width", &width);
        res |= gst_structure_get_int(s, "height", &height);
        if(!res)
        {
            g_print("Could not get image width and height from filter caps");
            exit(-12);
        }
        g_print("Image size: %d\t%d\n",width,height);
    }


    // ---- Read frame and convert to opencv format ---------------
    GstMapInfo map;
    gst_buffer_map (buffer, &map, GST_MAP_READ);

    // convert gstreamer data to OpenCV Mat, you could actually
    // resolve height / width from caps...

    Mat frame(Size(width, height), CV_8UC3, (char*)map.data, Mat::AUTO_STEP);

        // this lags pretty badly even when grabbing frames from webcam
        //Mat edges;
        //cvtColor(frame, edges, CV_RGB2GRAY);
        //GaussianBlur(edges, edges, Size(7,7), 1.5, 1.5);
        //Canny(edges, edges, 0, 30, 3);
        imshow("stream", frame);

        //char key = cv::waitKey(10);
        //if(key!=-1) quit_flag = 1;


    gst_buffer_unmap(buffer, &map);

    // ------------------------------------------------------------

    // print dot every 30 frames
    if (framecount%30 == 0) {
    g_print (".");
    }

    // show caps on first frame
    if (framecount == 1) {
    g_print ("%s\n", gst_caps_to_string(caps));
    }

    gst_sample_unref (sample);
    sum_time =time.elapsed();
    qDebug()<<"time:"<<sum_time<<"\n";
    return GST_FLOW_OK;
}

static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data)
{
    g_print ("Got %s message from %s\n", GST_MESSAGE_TYPE_NAME (message), GST_OBJECT_NAME (message->src));
    switch (GST_MESSAGE_TYPE (message))
    {
            case GST_MESSAGE_ERROR:
            {
                    GError *err;
                    gchar *debug;

                    gst_message_parse_error (message, &err, &debug);
                    g_print ("Error from %s: %s\n", GST_OBJECT_NAME (message->src), err->message);
                    g_error_free (err);
                    g_free (debug);
                    break;
            }
            case GST_MESSAGE_EOS:
                    /* end-of-stream */
                    quit_flag = 1;
                    break;
            case GST_MESSAGE_STATE_CHANGED:
                    GstState oldstate, newstate;
                    gst_message_parse_state_changed(message, &oldstate, &newstate, NULL);
                    g_print ("Element %s changed state from %s to %s.\n",
                    GST_OBJECT_NAME (message->src),
                            gst_element_state_get_name (oldstate),
                            gst_element_state_get_name (newstate));
                    break;
            default:
                    /* unhandled message */
                    break;
    }
    /* we want to be notified again the next time there is a message
    * on the bus, so returning TRUE (FALSE means we want to stop watching
    * for messages on the bus and our callback should not be called again)
    */
    return TRUE;
}

int main (int argc, char *argv[])
{
    GError *error = NULL;

    GstElement *pipeline, *sink;
    GstStateChangeReturn state_ret;

    GstSample *sample;

    gst_init (&argc, &argv);

        gchar *descr = g_strdup(
        "rtspsrc location=rtsp://admin:admin12345@192.168.1.64:554/h264/ch33/main/av_stream latency=0 ! "
        "decodebin ! "
        "videoconvert ! "
        "appsink name=sink sync=false"
    );

//    gchar *descr = g_strdup(
//              "rtspsrc location=\"rtsp://admin:admin12345@192.168.1.64:554/h264/ch33/main/av_stream\" latency=0 ! "
//                    "decodebin ! "
//                    "videoconvert ! "
//                    "xvimagesink name=sink sync=true"
//                );

    pipeline = gst_parse_launch (descr, &error);

    if (error != NULL)
    {
        g_print ("could not construct pipeline: %s\n", error->message);
        g_error_free (error);
        exit (-1);
    }

    /* get sink */
    sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");

    /*set to pause*/
    state_ret = gst_element_set_state(pipeline, GST_STATE_PAUSED);

    switch(state_ret)
    {
        case GST_STATE_CHANGE_FAILURE:
            g_print ("failed to play the file\n");
            exit (-2);
        case GST_STATE_CHANGE_NO_PREROLL:
            /* for live sources, we need to set the pipeline to PLAYING before we can
            * receive a buffer. */
            g_print ("live source detected\n");
            live_flag = 1;
            break;
        default:
            break;
    }

    gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
    gst_app_sink_set_drop((GstAppSink*)sink, true);
    gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
    GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
    gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);

    GstBus *bus;
    guint bus_watch_id;
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
    gst_object_unref (bus);

        gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

    namedWindow("stream",1);

    loop = g_main_loop_new(NULL,false);
        g_main_loop_run(loop);

    cv::destroyWindow("stream");
    g_print ("Going to end of main!\n");
    gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (pipeline));

    return 0;
}

Loading...