how to capture sequence no. and time stamp of RTP/UDP packets

classic Classic list List threaded Threaded
13 messages Options
Reply | Threaded
Open this post in threaded view
|

how to capture sequence no. and time stamp of RTP/UDP packets

raheeb
Hello,

I am multicasting video stream using RTP (rtph264pay) and UDP (udpsink) from a source node. At the client end the video stream is decoded and Displayed As They stream. I have coded this using gstreamer in c / c ++. I now want to

1. At the source, capture the sequence no., Packet size (in bytes) and timestamp of each packet of the video stream being Transmitted over the network.

2. At receiver, capture the sequence no. And timestamp of the received packets so as to measure delay and packet loss.

It would be great if someone Could help me suggest a way to implement this in C / C ++. I do not want to use GST debug since I have to do some processing with the video packets. Here is the code for the server and client.


SERVER 

#include <GST / gst.h>

Static gboolean
bus_call (GstBus * Bus,
GstMessage * msg,
gpointer data)
{
GMainLoop * loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
Case GST_MESSAGE_EOS:
g_print ("End of video stream \ n");
g_main_loop_quit (loop);
break;
Case GST_MESSAGE_ERROR: {
gchar * debug;
GError * error;
gst_message_parse_error (msg, & error, and debug);
G_free (debug);
g_printerr ("Error:% s \ n", error-> message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}

static void
on_pad_added (GstElement * element,
GstPad * pad,
gpointer data)
{
GstPad * sinkpad;
GstElement * decoder = (GstElement *) data;
g_print ("Dynamic pad created, linking demuxer / decoder \ n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}



typedef struct {Custom_Data

GMainLoop * Loop;
GstElement * pipeline * source, demuxer *, * decoder, * ARATIO, * encoder * RTP, * usink, que *, * videoconvert, * videoscale_capsfilter, * VideoScaleMode;
GstBus * bus;
guint bus_watch_id;
guint bitrate;
GstCaps * c, * VideoScaleMode caps;

} Custom_Data;



int
main (int argc,
char * argv [])
{

Custom_Data Data;


/ * Initialization * /
gst_init (& argc, argv &);


Data.loop = g_main_loop_new (NULL, FALSE);
/ * Check input arguments * /
if (argc! = 2) {
g_printerr ("Usage:% s <cif filename> \ n", argv [0]);
return -1;
}

Data.videoscalecaps = gst_caps_from_string ("video / x-raw, width = 1024, height = 768");
Data.pipeline = gst_pipeline_new ("video-send");
Data.source = gst_element_factory_make ("filesrc", "file-source");
g_assert (Data.source);
Data.demuxer = gst_element_factory_make ("qtdemux", "demuxer");
g_assert (Data.demuxer);
Data.que = gst_element_factory_make ("queue", "queer");
g_assert (Data.que);
Data.decoder = gst_element_factory_make ("avdec_h264", the "Decoder");
g_assert (Data.decoder);
Data.videoscale = gst_element_factory_make ("scale video", "scale");
g_assert (Data.videoscale);
Data.videoscale_capsfilter = gst_element_factory_make ("caps filter", "videoscale_capsfilter");
g_assert (Data.videoscale_capsfilter);
Data.aratio = gst_element_factory_make ("aspectratiocrop", "ARATIO");
g_assert (Data.aratio);
Data.videoconvert = gst_element_factory_make ("videoconvert", "Video Convert");
g_assert (Data.videoconvert);
Data.encoder = gst_element_factory_make ("x264enc" "encoder");
g_assert (Data.encoder);
Data.rtp = gst_element_factory_make ("rtph264pay", "RTP");
g_assert (Data.rtp);
Data.usink = gst_element_factory_make ("udpsink", "udp_sink");
g_assert (Data.usink);

g_object_set (G_OBJECT (Data.source), "location", argv [1], NULL);
g_object_set (G_OBJECT (Data.source), "do-timestamp", true, NULL);
g_object_set (G_OBJECT (Data.aratio), "Aspect Ratio", 4, 3, NULL);
g_object_set (G_OBJECT (Data.encoder), "b-adapt", true, NULL);
g_object_set (G_OBJECT (Data.usink), "host" 224.0.0.0, NULL);
g_object_set (G_OBJECT (Data.usink), "port", 5007, NULL);
g_object_set (G_OBJECT (Data.usink), "auto-multicast", TRUE, NULL);
g_object_set (G_OBJECT (Data.videoscale_capsfilter), "caps", Data.videoscalecaps, NULL);

Data.bus = gst_pipeline_get_bus (GST_PIPELINE (Data.pipeline));
Data.bus_watch_id = gst_bus_add_watch (Data.bus, bus_call, Data.loop);
gst_object_unref (Data.bus);

gst_bin_add (GST_BIN (Data.pipeline) Data.source);
gst_bin_add (GST_BIN (Data.pipeline) Data.demuxer);
gst_bin_add (GST_BIN (Data.pipeline) Data.decoder);
gst_bin_add (GST_BIN (Data.pipeline) Data.videoscale);
gst_bin_add (GST_BIN (Data.pipeline) Data.videoscale_capsfilter);
gst_bin_add (GST_BIN (Data.pipeline) Data.aratio);
gst_bin_add (GST_BIN (Data.pipeline) Data.videoconvert);
gst_bin_add (GST_BIN (Data.pipeline) Data.encoder);
gst_bin_add (GST_BIN (Data.pipeline) Data.rtp);
gst_bin_add (GST_BIN (Data.pipeline) Data.usink);

if (! gst_element_link (Data.source, Data.demuxer))
{
g_printerr ("Here is the problem. \ n");
}

if (! gst_element_link_many (Data.decoder, Data.videoscale, Data.videoscale_capsfilter, Data.aratio, Data.videoconvert, Data.encoder, Data.rtp, Data.usink, NULL))
{
g_printerr ("Here is the problem too. \ n");
}

g_signal_connect (Data.demuxer, "pad-added" G_CALLBACK (on_pad_added) Data.decoder);

g_print ("Now playing:% s \ n", argv [1]);

gst_element_set_state (Data.pipeline, GST_STATE_PLAYING);
g_main_loop_run (Data.loop);
gst_element_set_state (Data.pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (Data.pipeline));
g_source_remove (Data.bus_watch_id);
g_main_loop_unref (Data.loop);

return 0;



CLIENT

#include <GST / gst.h>

Static gboolean
bus_call (GstBus * Bus,
GstMessage * msg,
gpointer data)
{
GMainLoop * loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
Case GST_MESSAGE_EOS:
g_print ("End of stream \ n");
g_main_loop_quit (loop);
break;
Case GST_MESSAGE_ERROR: {
gchar * debug;
GError * error;
gst_message_parse_error (msg, & error, and debug);
G_free (debug);
g_printerr ("Error:% s \ n", error-> message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}


static void
on_pad_added (GstElement * element,
GstPad * pad,
gpointer data)
{
GstPad * sinkpad;
GstElement * decoder = (GstElement *) data;
g_print ("Dynamic pad created, linking demuxer / decoder \ n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}

typedef struct {_CustomData
GstElement * pipeline * source * RTP, * decoder, sink *, * Video Mixer, * videoconvert, * VideoScaleMode, que *, * filter; 
GstState State;                 
} Custom Data;




int main (int argc, char * argv []) {


  Custom Data Data;
  GMainLoop * Loop;
  GstPad * pad;



GstCaps * Apparel * filter caps;
GstBus * bus;
GstStateChangeReturn ret;


  gst_init (& argc, argv &);
 caps = gst_caps_from_string ("application / x-RTP, media = (string) video, clock rate = (int) 90000, encoding-name = (string) H264, Payload = (int) 96");

loop = g_main_loop_new (NULL, FALSE);

if (argc! = 1) {
g_printerr ("Usage:% s <udpsrc> \ n", argv [0]);
return -1;
}  

data.pipeline = gst_pipeline_new ("video-receive");
data.source = gst_element_factory_make ("udpsrc", "UDP source");
data.videoscale = gst_element_factory_make ("scale video", "video-scale");
data.videoconvert = gst_element_factory_make ("videoconvert", "Video Convert");
data.sink = gst_element_factory_make ("xvimagesink", "video output");
data.rtp = gst_element_factory_make ("rtph264depay", "RTP");
data.decoder = gst_element_factory_make ("avdec_h264", the "Decoder");
data.filter = gst_element_factory_make ("caps filter", "filter caps");
data.que = gst_element_factory_make ("queue", "queer");


filter caps = gst_caps_new_simple ("video / x-raw", "format", G_TYPE_STRING, "I420", "width", G_TYPE_INT, 200, "height", G_TYPE_INT, 200, NULL);

if (! data.pipeline || data.source! ||! data.rtp ||! data.decoder ||! data.sink)
{
g_printerr ("One element Could not be created. \ n");
return -1;
}



g_object_set (G_OBJECT (data.filter), "caps", filter caps, NULL);



  bus = gst_element_get_bus (data.pipeline);
  gst_bus_add_signal_watch (bus);
  gst_object_unref (bus);


g_object_set (G_OBJECT (data.source), "multicast group", 224.0.0.0, NULL);
g_object_set (G_OBJECT (data.source), "port", 5007, NULL);
g_object_set (G_OBJECT (data.source), "caps", caps, NULL);
g_object_set (G_OBJECT (data.source), "do-timestamp", true, NULL);
g_object_set (G_OBJECT (data.sink), "sync", FALSE, NULL);


gst_bin_add_many (GST_BIN (data.pipeline) data.source, data.rtp, data.decoder, data.que, data.sink, NULL);

gst_element_link (data.source, data.decoder);
gst_element_link_many (data.source, data.rtp, data.decoder, data.que, data.sink, NULL);
g_signal_connect (data.decoder, "pad-added" G_CALLBACK (on_pad_added) data.sink);
g_print ("Now playing ... \ n"); //:% s \ n ", argv [1]);
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
g_print ("Running ... \ n");
g_main_loop_run (loop);
  g_print ("Deleting pipeline \ n");
gst_object_unref (GST_OBJECT (data.pipeline));
  g_main_loop_unref (loop);

  return 0;
}



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: how to capture sequence no. and time stamp of RTP/UDP packets

Sebastian Dröge-3
On Do, 2015-04-30 at 17:39 +0000, Raheeb Muzaffar wrote:
> Hello,
> I am multicasting video stream using RTP (rtph264pay) and UDP (udpsink) from a source node. At the client end the video stream is decoded and Displayed As They stream. I have coded this using gstreamer in c / c ++. I now want to
> 1. At the source, capture the sequence no., Packet size (in bytes) and timestamp of each packet of the video stream being Transmitted over the network.
> 2. At receiver, capture the sequence no. And timestamp of the received packets so as to measure delay and packet loss.
> It would be great if someone Could help me suggest a way to implement this in C / C ++. I do not want to use GST debug since I have to do some processing with the video packets. Here is the code for the server and client.

Why don't you want to use GStreamer for that?

But in any case, if you don't you'll have to parse the RTP packets
yourself. See https://tools.ietf.org/html/rfc3550 section 5 for the
packet format.

If you use GStreamer, you could use the gst_rtp_buffer_map() function
from libgstrtp.

--
Sebastian Dröge, Centricular Ltd · http://www.centricular.com

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

signature.asc (968 bytes) Download Attachment
Reply | Threaded
Open this post in threaded view
|

Re: how to capture sequence no. and time stamp of RTP/UDP packets

raheeb
Thanks Sebastian,

Can you give an example on how to use gst_rtp_buffer_map with respect to my pipeline code.




On Saturday, 2 May 2015, 9:02, Sebastian Dröge <[hidden email]> wrote:


On Do, 2015-04-30 at 17:39 +0000, Raheeb Muzaffar wrote:
> Hello,
> I am multicasting video stream using RTP (rtph264pay) and UDP (udpsink) from a source node. At the client end the video stream is decoded and Displayed As They stream. I have coded this using gstreamer in c / c ++. I now want to
> 1. At the source, capture the sequence no., Packet size (in bytes) and timestamp of each packet of the video stream being Transmitted over the network.
> 2. At receiver, capture the sequence no. And timestamp of the received packets so as to measure delay and packet loss.
> It would be great if someone Could help me suggest a way to implement this in C / C ++. I do not want to use GST debug since I have to do some processing with the video packets. Here is the code for the server and client.

Why don't you want to use GStreamer for that?

But in any case, if you don't you'll have to parse the RTP packets
yourself. See https://tools.ietf.org/html/rfc3550 section 5 for the
packet format.

If you use GStreamer, you could use the gst_rtp_buffer_map() function
from libgstrtp.

--
Sebastian Dröge, Centricular Ltd · http://www.centricular.com
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: how to capture sequence no. and time stamp of RTP/UDP packets

Chuck Crisler-2
In reply to this post by Sebastian Dröge-3
If you are trying to determine packet loss, look at the RTCP support. That is one thing that it does.

On Sat, May 2, 2015 at 3:58 AM, Sebastian Dröge <[hidden email]> wrote:
On Do, 2015-04-30 at 17:39 +0000, Raheeb Muzaffar wrote:
> Hello,
> I am multicasting video stream using RTP (rtph264pay) and UDP (udpsink) from a source node. At the client end the video stream is decoded and Displayed As They stream. I have coded this using gstreamer in c / c ++. I now want to
> 1. At the source, capture the sequence no., Packet size (in bytes) and timestamp of each packet of the video stream being Transmitted over the network.
> 2. At receiver, capture the sequence no. And timestamp of the received packets so as to measure delay and packet loss.
> It would be great if someone Could help me suggest a way to implement this in C / C ++. I do not want to use GST debug since I have to do some processing with the video packets. Here is the code for the server and client.

Why don't you want to use GStreamer for that?

But in any case, if you don't you'll have to parse the RTP packets
yourself. See https://tools.ietf.org/html/rfc3550 section 5 for the
packet format.

If you use GStreamer, you could use the gst_rtp_buffer_map() function
from libgstrtp.

--
Sebastian Dröge, Centricular Ltd · http://www.centricular.com

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel




--
___________________________________________________
Name
Chuck Crisler

Mutualink, Inc.
3 Lan Dr.
Westford, MA  01886

Direct: <a href="tel:%28866%29%20957-5465" value="+18669575465" target="_blank">(978) 490-3006 Ext: 118

E-Mail: [hidden email] 

Web: www.mutualink.net


The information in this email, and subsequent attachments, may contain confidential information that is 
intended solely for the attention and use of the named addressee(s). This message or any part thereof 
must not be disclosed, copied, distributed or retained by any person without authorization from the addressee.

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

how to display video on server side

gagankumarnigam
Sir,
 i am new to gstreamer coding. What you had done in your server-client code is actually my requirement.
I want to know that how to display video after capturing from webcam and simultaneously streaming it to other system
(how to display on server side parallely displaying it on client side).


Thanks and regards

Reply | Threaded
Open this post in threaded view
|

Re: how to display video on server side

Nicolas Dufresne-3
Hello,

it would be nice to create a new thread instead of replying to an
existing thread next time.

Le vendredi 08 mai 2015 à 22:50 -0700, gagankumarnigam a écrit :
> Sir,
>  i am new to gstreamer coding. What you had done in your server
> -client code
> is actually my requirement.
> I want to know that how to display video after capturing from webcam
> and
> simultaneously streaming it to other system
> (how to display on server side parallely displaying it on client
> side).

This can be achieved using a tee element. Here's an example pipeline:

gst-launch-1.0 \
  v4l2src ! tee name=t \
    t. ! queue ! videoconvert ! autovideosink \
    t. ! queue ! x264enc tune=zerolatency ! rtph264pay ! udpsink port=1234 host=127.0.0.1

Note that in some case, it might not be wanted to synchronised
transmission and preview. If this is your case, you can add sync=0 to
autovideosink (or the sink of your choice). Note this is just an
example (specially the network part ;-P).

cheers,
Nicolas


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: how to display video on server side

gagankumarnigam
thanks for great help.

But i want some c code  for it. can it possible for you describe the same thing  with small c code , so that i will understand better.



Thanks with regards

Reply | Threaded
Open this post in threaded view
|

how to display video at client side captured from udpsrc

gagankumarnigam
In reply to this post by Nicolas Dufresne-3
.Hello,

I tried to streaming video using RTSP  and UDP (udpsink) from a video source v4l2. At the client my programe is running well but it could'nt display video . here is my code for server and client Can some body tell me what wrong i am doing at client side.
  SERVER.c


int BroadcastVideo()
{
        GMainLoop *loop;
    gst_init(NULL,NULL);
   
    loop = g_main_loop_new (NULL, FALSE);
   
   // GstElement  *pipeline, ;
GstElement *vsource, *vtee, *vqueue, *tover, *xvsink, *evqueue, *vencoder, *muxer, *filesink;
    GstCaps *filtercaps;
    GstBin      *recording;
    GstBus      *bus;
    GstPad      *srcpad,*sinkpad;
   gint width, height, num, denom;
   const GstStructure *str;

  // Create gstreamer elements
    pipeline   = gst_pipeline_new ("Live Recording");
    vsource    = gst_element_factory_make ("v4l2src",     "viewing-file-source");
    vtee       = gst_element_factory_make ("tee",              "viewing-tee");
    vqueue     = gst_element_factory_make ("queue2",           "viewing-queue");
    tover      = gst_element_factory_make ("timeoverlay",      "viewing-overlay");
    xvsink     = gst_element_factory_make ("xvimagesink",      "viewing-xvsink");

printf("4\n");
/*
    recording  = GST_BIN(gst_bin_new("recording-bin"));
    evqueue    = gst_element_factory_make ("queue2",           "encoding-queue");
    vencoder   = gst_element_factory_make ("ffenc_mpeg4",      "encoding-encoder");
    muxer      = gst_element_factory_make ("avimux",           "encoding-muxer"); //mp4mux
    filesink   = gst_element_factory_make ("filesink",         "encoding-filesink");
*/
   GstElement  *filter, *vrate, *encoder, *conv, *sink;

 
   recording  = GST_BIN(gst_bin_new("recording-bin"));
   evqueue    = gst_element_factory_make ("queue2","encoding-queue");
   vrate = gst_element_factory_make ("videorate", "video-rate");
   filter = gst_element_factory_make ("capsfilter", "filter");
   conv = gst_element_factory_make ("ffmpegcolorspace","converter");
   vencoder = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
   sink = gst_element_factory_make ("udpsink","audio-output");

gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), GDK_WINDOW_XID(video_window->window));

   if(!pipeline || !vsource || !xvsink || !tover )
    {
        g_print("Unable to create all necessary elements\n");
        return -1;
    }

   filtercaps = gst_caps_new_simple ("video/x-raw-yuv","width", G_TYPE_INT, 640,"height", G_TYPE_INT, 480,"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
   g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
   gst_caps_unref (filtercaps);


   g_object_set (G_OBJECT (vencoder), "bitrate" , 384 ,  NULL);
   g_object_set (G_OBJECT (sink), "host" , "192.168.1.25" ,  NULL);
   //g_object_set (G_OBJECT (sink), "host" , "127.0.0.1" ,  NULL);
   g_object_set (G_OBJECT (sink), "port" , 8999 ,  NULL);
   g_object_set (G_OBJECT (sink), "async" , FALSE ,  NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

   /* g_object_set(G_OBJECT(vsource), "num-buffers",300, NULL);
    g_object_set(G_OBJECT(filesink),"location","output.avi", NULL);    
    g_object_set (G_OBJECT (tover), "halign", "right", NULL);
    g_object_set (G_OBJECT (tover), "valign", "top", NULL);
    g_object_set (G_OBJECT (tover), "shaded-background", TRUE, NULL);
*/
  /* create the recording bin */
 
  //  gst_bin_add_many (recording, evqueue, vencoder, muxer, filesink, NULL);
      gst_bin_add_many (recording, evqueue,vrate,filter,conv,vencoder, sink, NULL);
 

    sinkpad       = gst_element_get_static_pad(evqueue,"sink");
    GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad);

    if(ghost == NULL)
    {
        g_error("Unable to create ghostpad!\n");
    }

    gst_element_add_pad(GST_ELEMENT(recording),ghost);
    gst_object_unref(GST_OBJECT(sinkpad));
   // gst_element_link_many(evqueue,vencoder,muxer,filesink,NULL);
    gst_element_link_many(evqueue,vrate,filter,conv,vencoder, sink,NULL);


    gst_bin_add_many (GST_BIN (pipeline), vsource, vtee, vqueue, tover, xvsink, recording, NULL);

 
    gst_element_link_many(vsource,tover,vtee,NULL);
    srcpad  = gst_element_get_request_pad(vtee,"src0");
    sinkpad = gst_element_get_pad(vqueue,"sink");
    gst_pad_link(srcpad,sinkpad);
    gst_element_link(vqueue,xvsink);

   
    srcpad  = gst_element_get_request_pad(vtee,"src1");
    sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink");
    gst_pad_link(srcpad,sinkpad);
       
     g_print ("Running...\n");
    gst_element_set_state(pipeline,GST_STATE_PLAYING);

     str = gst_caps_get_structure (filtercaps, 0);
   if (!gst_structure_get_int (str, "width", &width) || !gst_structure_get_int (str, "height", &height) ||
       !gst_structure_get_fraction (str, "framerate", &num, &denom))

         g_print ("No width/height available\n");
   g_print ("The video size of this set of capabilities is %dx%d and the frame rate is %d/%d\n", width, height, num, denom);


 
    g_main_loop_run (loop);

    g_print ("Returned, stopping playback\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));

    return 0;
}
gboolean bus_call(GstBus *bus, GstMessage *msg, void *data)
{
    gchar           *debug;
    GError          *err;
    GMainLoop       *loop = (GMainLoop*)data;

    switch (GST_MESSAGE_TYPE(msg))
    {
        case GST_MESSAGE_APPLICATION:
            g_print("APP received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
            break;
        case GST_MESSAGE_EOS:
            g_print("EOS received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
            g_main_loop_quit (loop);
            break;
        case GST_MESSAGE_ERROR:
            gst_message_parse_error(msg, &err, &debug);
            g_free(debug);
            g_print("BUS CALL %s\n", err->message);
            g_error_free(err);
            g_main_loop_quit (loop);
            break;
        default:
            break;
    }
    return TRUE;
}  


client.c

#include <stdlib.h>
#include <gst/gst.h>

#define VIDEO_CAPS "video/x-raw-yuv,media=video,clock-rate=90000,encoding-name=H264"

gboolean bus_call(GstBus *bus, GstMessage *msg, void *data)
{
    gchar           *debug;
    GError          *err;
    GMainLoop       *loop = (GMainLoop*)data;

    switch (GST_MESSAGE_TYPE(msg))
    {
        case GST_MESSAGE_APPLICATION:
            g_print("APP received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
            break;
        case GST_MESSAGE_EOS:
            g_print("EOS received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src));
            g_main_loop_quit (loop);
            break;
        case GST_MESSAGE_ERROR:
            gst_message_parse_error(msg, &err, &debug);
            g_free(debug);
            g_print("BUS CALL %s\n", err->message);
            g_error_free(err);
            g_main_loop_quit (loop);
            break;
        default:
            break;
    }
    return TRUE;
}

int main(int argc, char* argv[])
{
    GMainLoop *loop;
    gst_init(&argc,&argv);
   
    loop = g_main_loop_new (NULL, FALSE);
   
    GstElement  *pipeline, *rtpsrc, *vtee, *vqueue, *tover, *xvsink, *evqueue, *videodec,*videodepay,*muxer, *filesink, *videosink;
    GstCaps *filtercaps;
    GstBin      *recording;
    GstBus      *bus;
    GstPad      *srcpad,*sinkpad;
   gint width, height, num, denom;
   const GstStructure *str;

  // Create gstreamer elements
    pipeline   = gst_pipeline_new ("Live Recording");
    //vsource    = gst_element_factory_make ("v4l2src",     "viewing-file-source");
    rtpsrc     = gst_element_factory_make ("udpsrc", "UDP source");
   // rtpsrc     = gst_element_factory_make ("v4l2src", "source");
    vtee       = gst_element_factory_make ("tee","viewing-tee");
   // videodepay = gst_element_factory_make ("rtph264depay", "videodepay");
  //  g_assert (videodepay);
    vqueue     = gst_element_factory_make ("queue2",           "viewing-queue");
    tover      = gst_element_factory_make ("timeoverlay",      "viewing-overlay");
    xvsink     = gst_element_factory_make ("xvimagesink",      "viewing-xvsink");

printf("4\n");
/*
    recording  = GST_BIN(gst_bin_new("recording-bin"));
    evqueue    = gst_element_factory_make ("queue2",           "encoding-queue");
    videodec   = gst_element_factory_make ("ffenc_mpeg4",      "encoding-encoder");
    muxer      = gst_element_factory_make ("avimux",           "encoding-muxer"); //mp4mux
    filesink   = gst_element_factory_make ("filesink",         "encoding-filesink");
*/
   GstElement  *filter, *vrate, *encoder, *conv, *sink;

 
   recording  = GST_BIN(gst_bin_new("recording-bin"));
   evqueue    = gst_element_factory_make ("queue2","encoding-queue");
   vrate = gst_element_factory_make ("videorate", "video-rate");
   filter = gst_element_factory_make ("capsfilter", "filter");
   videodepay = gst_element_factory_make ("rtph264depay", "videodepay");
   conv = gst_element_factory_make ("ffmpegcolorspace","converter");
   //videodec = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
   videodec = gst_element_factory_make ("ffdec_h264", "videodec");
   videosink = gst_element_factory_make ("autovideosink", "videosink");
   //sink = gst_element_factory_make ("udpsink","audio-output");

   if(!pipeline || !rtpsrc || !xvsink || !tover )
    {
        g_print("Unable to create all necessary elements\n");
        return -1;
    }

  // filtercaps = gst_caps_new_simple ("video/x-raw-yuv","width", G_TYPE_INT, 640,"height", G_TYPE_INT, 480,"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
   filtercaps = gst_caps_from_string (VIDEO_CAPS);
   g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
   gst_caps_unref (filtercaps);


 
   g_object_set (G_OBJECT (rtpsrc), "port" , 8999 ,  NULL);
 

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

 
  //  gst_bin_add_many (recording, evqueue, videodec, muxer, filesink, NULL);
    gst_bin_add_many (recording, evqueue,vrate,filter,videodepay,conv,videodec, videosink, NULL);
    sinkpad       = gst_element_get_static_pad(evqueue,"sink");

    GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad);

    if(ghost == NULL)
    {
        g_error("Unable to create ghostpad!\n");
    }

    gst_element_add_pad(GST_ELEMENT(recording),ghost);
    gst_object_unref(GST_OBJECT(sinkpad));
   // gst_element_link_many(evqueue,videodec,muxer,filesink,NULL);
   // gst_element_link_many(evqueue,vrate,filter,conv,videodec, videosink,NULL);
   gst_element_link_many(evqueue,vrate,filter,videodepay,conv,videodec, videosink,NULL);
   gst_bin_add_many (GST_BIN (pipeline), rtpsrc, vtee, vqueue, tover, xvsink, recording, NULL);

   
    gst_element_link_many(rtpsrc,tover,vtee,NULL);
    srcpad  = gst_element_get_request_pad(vtee,"src0");
    sinkpad = gst_element_get_pad(vqueue,"sink");
    gst_pad_link(srcpad,sinkpad);    
    gst_element_link(vqueue,xvsink);

   
    srcpad  = gst_element_get_request_pad(vtee,"src1");
    sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink");
    gst_pad_link(srcpad,sinkpad);
       
    g_print ("Running...\n");
    gst_element_set_state(pipeline,GST_STATE_PLAYING);

 
    g_main_loop_run (loop);

   
    g_print ("Returned, stopping playback\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));

    return 0;
}








Reply | Threaded
Open this post in threaded view
|

Re: how to display video at client side captured from udpsrc

Nicolas Dufresne-3
Le lundi 15 juin 2015 à 00:13 -0700, gagankumarnigam a écrit :
> I tried to streaming video using RTSP  and UDP (udpsink) from a video
> source
> v4l2. At the client my programe is running well but it could'nt
> display
> video . here is my code for server and client Can some body tell me
> what
> wrong i am doing at client side.

It seems you are using GStreamer 0.10 series, which have reach end of
life few years ago. Have you tried with recent version of GStreamers
(e.g. 1.4.5) ?

Nicolas

p.s. I think you meant to use queue, not queue2, and you missed queue
-size property on v4l2src
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

signature.asc (188 bytes) Download Attachment
Reply | Threaded
Open this post in threaded view
|

Re: how to display video at client side captured from udpsrc

gagankumarnigam
Hi, Nicolos

I am forced to use gstreamer-0.10.29 version with RHEL-6.5(though i tried to use recent version of gstreamer-1.0 but every time my kernel panic that is why i used rhel-6.5 already installed version of gstreamer and that is also my project requirement).

As per your suggestion I changed the queue from queue2 but still same result not getting display. however i tried cmd line on server side (gst-launch udpsrc port=8999 ! mpegvideoparse !ffdec_mpeg4 !ffdec_mpeg4 !ffmpegcolorspace !autovideosink") and i got display
I can also use this cmd line with gst_parse_launch() but that is not my requirement.

I request u plz help me out in my in my code to display video at client side since server side it is working properly (displaying video simultaneously sending same video on UDP(RTSP))

Thanks with regards
Reply | Threaded
Open this post in threaded view
|

Re: how to display video at client side captured from udpsrc

gagankumarnigam
In reply to this post by Nicolas Dufresne-3
Can u plz tell me  how to use queue-size property on v4l2src in my code
Reply | Threaded
Open this post in threaded view
|

Re: how to display video at client side captured from udpsrc

Nicolas Dufresne-3
Le jeudi 25 juin 2015 à 20:54 -0700, gagankumarnigam a écrit :
> Can u plz tell me  how to use queue-size property on v4l2src in my
> code

This property does not exist anymore in GStreamer. You must be using a
very old and unmaintained version.

Properties are set using g_object_set() method, e.g.:

  g_object_set (v4l2src, "queue-size", 16, NULL);

Don't forgot the sentinel (NULL param). You can set multiple properties
in th same calls, g_object_set (obj, "prop1", value1, "prop2", value2
..., NULL).

Nicolas
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

signature.asc (188 bytes) Download Attachment
Reply | Threaded
Open this post in threaded view
|

Re: how to display video at client side captured from udpsrc

gagankumarnigam
hi Nicolos,

As per your suggestion i set queue size for v4l2src ( g_object_set(v4l2src,"queue-size",16,NULL))
but got warning like:

value "16" of type 'guint' is invalid or out of range for property 'queue-size' of type 'guint'.

Any way i got video on client side with the delay of 5 sec( As i changed frame rate from  sender side to 30/5). delay is okay for me but thing is that video is not running smoothly on client(slowly displaying)

video running in slow motion.

Can u plz help me to resolve this problem?