c++ GStreamer RTP Receive Stream - c++

Hi I'm trying run the rtp receiving string in c++. When I use gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false line i can get stream using cmd.
Code in the c++ also works when i use parse launch pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
Here is the question when I'm trying to write line by line the code in c++ I think I'm getting linking error because of sometimes pad. Here what I tried.
To easily debug the code tried to link elements one by one like this
int res = gst_element_link(udp, depay);
However, it seems like i can't link udpsrc to rtph264depay or decodebin to videoconvert.
Can you help me where I am doing mistake?
#include <gst/gst.h>
#include <iostream>
static void pad_added(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name;
name = gst_pad_get_name(pad);
std::cout<<"A new pad is created:\n"<< name;
g_free(name);
}
int main()
{
GstElement* pipeline, * conv, * dec, * depay, * udp,*videosink;
GstCaps* udpcaps;
GMainLoop* loop;
// init GStreamer
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
//pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
// setup pipeline
pipeline = gst_pipeline_new("pipeline");
conv = gst_element_factory_make("videoconvert", "conv");
videosink = gst_element_factory_make("autovideosink", "videosink");
//enc = gst_element_factory_make("x264enc", "enc");
depay = gst_element_factory_make("rtph264depay", "depay");
dec = gst_element_factory_make("decodebin", "decode");
//g_object_set(G_OBJECT(depay), "config-interval", 1, NULL);
udp = gst_element_factory_make("udpsrc", "udp");
//g_object_set(G_OBJECT(udp), "address", "127.0.0.1", NULL);
g_object_set(G_OBJECT(udp), "port", 5000, NULL);
udpcaps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 9000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
g_object_set(G_OBJECT(udp), "caps", udpcaps , NULL);
gst_caps_unref(udpcaps);
gst_bin_add_many(GST_BIN(pipeline), udp, depay, dec, conv, videosink, NULL);
// g_signal_connect(udp, "pad-added", G_CALLBACK(pad_added), depay);
// int res2= gst_element_link(dec, conv);
int res = gst_element_link(conv, videosink);
//int res = gst_element_link_many(udp,depay, dec, conv, videosink, NULL);
if (res!=TRUE)
{
std::cout << "Error!\n"<<res<<"\n";
// return -1;
}
// play
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
// clean up
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}

Related

GStreamer bitrate does not work the same when implemented in C++ compared to command line get-launch-1.0 (x264enc & omxh264enc)

I encountered different behavior with a gstreamer pipelines when implemented in C++ compared to gst-launch-1.0 execution in command line - the problem is with the bitrate property.
Same problem as described bellow in both implementations (C++ & command line execution) occurs with omxh264enc encoder as well with control-rate property set to 2 (CBR mode).
The pipeline which used in command line was:
gst-launch-1.0 ximagesrc ! autovideoconvert ! x264enc bitrate=800 pass=0 ! video/x-h264, stream-format=byte-stream ! h264parse ! mpegtsmux ! udpsink host=127.0.0.1 port=1234 sync=false
The C++ implementation is:
GstElement* pipeline;
GstElement* appsrc;
GstElement* videoConvert;
GstElement* encoder;
GstElement* encoderCapsFilter;
GstElement* parser;
GstElement* tsmux;
GstElement* udpsink;
pipeline = gst_pipeline_new ("pipeline");
appsrc = gst_element_factory_make ("appsrc", "source");
videoConvert = gst_element_factory_make ("autovideoconvert", "my_video_convertor");
encoder = gst_element_factory_make ("x264enc", "my_encoder");
encoderCapsFilter = gst_element_factory_make("capsfilter", "my_caps");
parser = gst_element_factory_make ("h264parse", "my_parser");
tsmux = gst_element_factory_make ("mpegtsmux", "my_muxer");
udpsink = gst_element_factory_make ("udpsink", "my_udpsink");
/*Configure appsrc*/
g_object_set (G_OBJECT (appsrc), "caps", gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 25, 1, NULL), NULL);
g_object_set(G_OBJECT(appsrc), "is-live" , true, NULL);
/*Configure videoConvert*/
/*Configure encoder*/
g_object_set(G_OBJECT(encoder), "bitrate" , 800, NULL);
g_object_set(G_OBJECT(encoder), "pass" , 0, NULL);
/*Configure encoder caps*/
g_object_set(G_OBJECT (encoderCapsFilter), "caps", gst_caps_from_string("video/x-h264, stream-format=byte-stream"), NULL);
/*Configure h264parse*/
/*Configure mpegtsmux*/
/*Configure udpsink*/
g_object_set(G_OBJECT(udpsink), "host" , "127.0.0.1", NULL);
g_object_set(G_OBJECT(udpsink), "port" , 1234, NULL);
g_object_set(G_OBJECT(udpsink), "sync" , false, NULL);
// add
gst_bin_add_many(GST_BIN(pipeline),
appsrc,
videoConvert,
encoder,
encoderCapsFilter,
parser,
tsmux,
udpsink,
NULL);
// link
if (!gst_element_link_many(appsrc,
videoConvert,
encoder,
encoderCapsFilter,
parser,
tsmux,
udpsink,
NULL))
{
g_printerr("Elements could not be linked");
}
bitrate is set to 800kbps and when testing this pipeline from command line with Wireshark the baudrate results around 800-850kbps which is good,
when tested the same pipeline in C++ (to use appsrc instead of ximagesrc) the baudrate results in different and higher bitrate (around 1200-1300kbps).
What is missing to reach the same bitrate result when executed through command line?
Is there more configuration to be done into the gst elements when implemented in C++?

Latency increase with the time on display + record pipeline

Below is my pipeline to display and record my stream coming from an udp source. The problem is that a latency increase (start at no latency) with the time on my stream and my record. However, if I just display my stream, there is no latency.
Can someone have an of where the problem can come ?
pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
Thanks,
EDIT :
All my save and display code :
void MainWindow::SaveVideo()
{
std::string strPathVideo = m_VideoPath + CreateFileName("mkv");
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstElement* matrox;
GstElement* clocktime;
//GstElement* compression;
GstElement* textoverlay;
GstElement* sink;
GstPad* padsink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
(*ptrstats).pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! textoverlay halignment=center valignment=top name=text ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
}
if (!(*ptrstats).pipeline) {
outfile << "Save : ", error->message ,"\n";
exit(1);
}
sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
filesink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "myFile");
g_object_set(filesink, "location", strPathVideo.c_str(), NULL);
//compression = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "compression");
//g_object_set(G_OBJECT(compression), "bitrate", m_intcompression, NULL);
matrox = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "matrox");
g_object_set(G_OBJECT(matrox), "offset-to-zero", true, NULL);
source = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
padsink = gst_element_get_static_pad(sink, "sink");
gst_pad_add_probe(padsink, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)buffer_sink, ptrstats, NULL);
gst_object_unref(padsink);
(*ptrstats).bus = gst_element_get_bus(GST_ELEMENT((*ptrstats).pipeline));
#ifdef __WXGTK__
GstElement* sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_xid);
#elif defined __WXMSW__
WXWidget hwnd = (*ptrstats).m_renderWindow->GetHandle();
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink),
reinterpret_cast<guintptr>(hwnd));
#endif
PlayHelper();
}
void MainWindow::PlayHelper()
{
GstStateChangeReturn ret =
gst_element_set_state((*ptrstats).pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
outfile << "Playhelper : Unable to set the pipeline to the playing state.\n";
wxLogWarning("Unable to set the pipeline to the playing state.");
gst_object_unref((*ptrstats).pipeline);
(*ptrstats).pipeline = NULL;
}
}

gstreamer rtsp tee appsink can't emit signal new-sample

I am using gstreamer to play and slove the rtsp stream.
rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink
and i write in c++ code like this :
#include <gst/gst.h>
void printIt(GList *p) {
if(!p) {
g_print("p null\n");
return ;
}
while(p) {
GstPad *pad = (GstPad*)p->data;
g_print("[%s]", pad->object.name);
p = p->next;
}
g_print("\n");
}
GstFlowReturn new_sample_cb (GstElement * appsink, gpointer udata) {
g_print("new-sample cb\n");
return GST_FLOW_OK;
}
GstFlowReturn new_preroll_cb (GstElement* appsink, gpointer udata) {
g_print("new_preroll_cb cb\n");
return GST_FLOW_OK;
}
int
main (int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch("rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink", NULL);
GstElement *appsink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
printIt(appsink->pads);
g_signal_connect(appsink, "new-sample", G_CALLBACK(new_sample_cb), pipeline);
g_print("sig conn new-sample\n");
g_signal_connect(appsink, "new-preroll", G_CALLBACK(new_preroll_cb), pipeline);
g_print("sig conn new-preroll\n");
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
when i compile and run it. it has output video in the autovideosink but the appsink's signal new-sample is not be callbacked. what should i do if i what to slove a frame in appsink ?
thanks.
By default appsink favors to use callbacks instead of signals for performance reasons (but I wouldn't consider your use case as a performance problem). For appsink to emit signals you will need to set the emit-signals property of the appsink to true. It defaults to false.
P.S. Apart from the above, I think you will need a GMainLoop for the event processing as demonstrated in the GStreamer examples.

GStreamer - Pipeline how to connect filesrc to qmlglsink

I'm new in the world of GStreamer, so I can't figure out how it works, and how to pair all GstElements.
I want to merge Video (mp4 for example, or any other video format) with qml (from Qt) as an overlay.
This example works perfectly fine.
GstElement *pipeline = gst_pipeline_new(NULL);
GstElement *src = gst_element_factory_make("videotestsrc",NULL);
GstElement *glupload = gst_element_factory_make("glupload",NULL);
GstElement *qmlglsink = gst_element_factory_make("qmlglsink",NULL);
g_assert(src && glupload && qmlglsink);
gst_bin_add_many(GST_BIN(pipeline), src, glupload, sink);
gst_element_link_many(src, glupload, sink, NULL);
But that example uses videotestsrc as Source, I would prefer to use something like filesrc.
I tried this code:
GstElement *pipeline = gst_pipeline_new (NULL);
GstElement *src = gst_element_factory_make ("filesrc", "file-source");
GstElement *parser = gst_element_factory_make("h264parse",NULL);
GstElement *decoder = gst_element_factory_make("avdec_h264",NULL);
GstElement *colors = gst_element_factory_make("glcolorconvert",NULL);
GstElement *glupload = gst_element_factory_make ("glupload", NULL);
GstElement *sink = gst_element_factory_make ("qmlglsink", NULL);
g_assert (src && parser && decoder %% colors && glupload && sink);
g_object_set (G_OBJECT (src), "location", "file:///home/test.mp4", NULL);
gst_bin_add_many (GST_BIN (pipeline), src, parser, decoder, glupload, colors, sink, NULL);
gst_element_link_many (src, parser, decoder, glupload, colors, sink, NULL);
It compiles, but the output is just a black screen.
Since I'm not sure how the GStreamer pipeline works, I tried that.
First, get the file from memory with filesrc, then parse it with h265parse and decode it with avdec_h264. Then forward that (I guess raw uncompressed data) to glupload and make colors good with glcolorconvert, since qmlglsink uses RGBA, and avdec_h264 is I420. After colors, are adjusted forward it to qmlglsink to be displayed in qml.
I'm missing something, and I don't know how to pair GstElemnts, as I said I need to pair filesrc (any video format) and qmlglsink.
You can try like below
MediaPlayer{
id: playVideo
source: "gst-pipeline: filesrc location=/home/root/skim-debris.mp4 ! qtdemux ! avdec_h264 ! qtvideosink"
autoLoad: true
autoPlay: true
playbackRate: 1.0
loops: 10
}
VideoOutput {
anchors.fill: parent
source: playVideo
}
It is easier to use a bin or any of GStreamers auto-pluggers.
But the main issue here is that you try treating an MP4 file as H.264 stream. This cannot work this way. You need to demux media streams from your container.
E.g. the pipeline should be something like this:
gst-launch-1.0 filesrc location=/home/test.mp4 ! qtdemux ! \
h264parse ! avdec_h264 ! glupload ! glcolorconvert ! qmlglsink

gst_parse_launch differs output from command line gst_launch

I have a pipeline like this, which works with gst-launch-1.0
gst-launch-1.0 v4l2src device='/dev/video0' ! 'video/x-raw,format=(string)YUY2,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw,format=(string)NV12,width=(int)640,height=(int)480' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink sync=false -v
In my c application, such as below which executes the same command line, it doesn't display any window.
#include <gst/gst.h>
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline =
gst_parse_launch
("v4l2src device='/dev/video0' ! 'video/x-raw,format=(string)YUY2,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw,format=(string)NV12,width=(int)640,height=(int)480' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink sync=false -v",
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
That's why you should check for errors. You pipeline description has a -v at the end which is an option to the gst-launch-1.0 application but is not part of a valid pipeline description.
I figured out the mistake in the above code. In the caps filter, there is no quotes. and the rest works fine.
#include <gst/gst.h>
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline =
gst_parse_launch
("v4l2src ! video/x-raw,format=(string)YUY2 ! nvvidconv ! video/x-raw(memory:NVMM),format=(string)NV12 ! nvvidconv ! video/x-raw,format=(string)NV12 ! nvvideoconvert ! video/x-raw(memory:NVMM),format=(string)NV12 ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
This above code works for me in nvidia xavier!