gst_parse_launch differs output from command line gst_launch - gstreamer

I have a pipeline like this, which works with gst-launch-1.0
gst-launch-1.0 v4l2src device='/dev/video0' ! 'video/x-raw,format=(string)YUY2,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw,format=(string)NV12,width=(int)640,height=(int)480' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink sync=false -v
In my c application, such as below which executes the same command line, it doesn't display any window.
#include <gst/gst.h>
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline =
gst_parse_launch
("v4l2src device='/dev/video0' ! 'video/x-raw,format=(string)YUY2,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw,format=(string)NV12,width=(int)640,height=(int)480' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink sync=false -v",
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}

That's why you should check for errors. You pipeline description has a -v at the end which is an option to the gst-launch-1.0 application but is not part of a valid pipeline description.

I figured out the mistake in the above code. In the caps filter, there is no quotes. and the rest works fine.
#include <gst/gst.h>
int
main (int argc, char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline =
gst_parse_launch
("v4l2src ! video/x-raw,format=(string)YUY2 ! nvvidconv ! video/x-raw(memory:NVMM),format=(string)NV12 ! nvvidconv ! video/x-raw,format=(string)NV12 ! nvvideoconvert ! video/x-raw(memory:NVMM),format=(string)NV12 ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
This above code works for me in nvidia xavier!

Related

c++ GStreamer RTP Receive Stream

Hi I'm trying run the rtp receiving string in c++. When I use gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false line i can get stream using cmd.
Code in the c++ also works when i use parse launch pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
Here is the question when I'm trying to write line by line the code in c++ I think I'm getting linking error because of sometimes pad. Here what I tried.
To easily debug the code tried to link elements one by one like this
int res = gst_element_link(udp, depay);
However, it seems like i can't link udpsrc to rtph264depay or decodebin to videoconvert.
Can you help me where I am doing mistake?
#include <gst/gst.h>
#include <iostream>
static void pad_added(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name;
name = gst_pad_get_name(pad);
std::cout<<"A new pad is created:\n"<< name;
g_free(name);
}
int main()
{
GstElement* pipeline, * conv, * dec, * depay, * udp,*videosink;
GstCaps* udpcaps;
GMainLoop* loop;
// init GStreamer
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
//pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
// setup pipeline
pipeline = gst_pipeline_new("pipeline");
conv = gst_element_factory_make("videoconvert", "conv");
videosink = gst_element_factory_make("autovideosink", "videosink");
//enc = gst_element_factory_make("x264enc", "enc");
depay = gst_element_factory_make("rtph264depay", "depay");
dec = gst_element_factory_make("decodebin", "decode");
//g_object_set(G_OBJECT(depay), "config-interval", 1, NULL);
udp = gst_element_factory_make("udpsrc", "udp");
//g_object_set(G_OBJECT(udp), "address", "127.0.0.1", NULL);
g_object_set(G_OBJECT(udp), "port", 5000, NULL);
udpcaps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 9000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
g_object_set(G_OBJECT(udp), "caps", udpcaps , NULL);
gst_caps_unref(udpcaps);
gst_bin_add_many(GST_BIN(pipeline), udp, depay, dec, conv, videosink, NULL);
// g_signal_connect(udp, "pad-added", G_CALLBACK(pad_added), depay);
// int res2= gst_element_link(dec, conv);
int res = gst_element_link(conv, videosink);
//int res = gst_element_link_many(udp,depay, dec, conv, videosink, NULL);
if (res!=TRUE)
{
std::cout << "Error!\n"<<res<<"\n";
// return -1;
}
// play
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
// clean up
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}

Latency increase with the time on display + record pipeline

Below is my pipeline to display and record my stream coming from an udp source. The problem is that a latency increase (start at no latency) with the time on my stream and my record. However, if I just display my stream, there is no latency.
Can someone have an of where the problem can come ?
pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
Thanks,
EDIT :
All my save and display code :
void MainWindow::SaveVideo()
{
std::string strPathVideo = m_VideoPath + CreateFileName("mkv");
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstElement* matrox;
GstElement* clocktime;
//GstElement* compression;
GstElement* textoverlay;
GstElement* sink;
GstPad* padsink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
(*ptrstats).pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! textoverlay halignment=center valignment=top name=text ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
}
if (!(*ptrstats).pipeline) {
outfile << "Save : ", error->message ,"\n";
exit(1);
}
sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
filesink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "myFile");
g_object_set(filesink, "location", strPathVideo.c_str(), NULL);
//compression = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "compression");
//g_object_set(G_OBJECT(compression), "bitrate", m_intcompression, NULL);
matrox = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "matrox");
g_object_set(G_OBJECT(matrox), "offset-to-zero", true, NULL);
source = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
padsink = gst_element_get_static_pad(sink, "sink");
gst_pad_add_probe(padsink, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)buffer_sink, ptrstats, NULL);
gst_object_unref(padsink);
(*ptrstats).bus = gst_element_get_bus(GST_ELEMENT((*ptrstats).pipeline));
#ifdef __WXGTK__
GstElement* sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_xid);
#elif defined __WXMSW__
WXWidget hwnd = (*ptrstats).m_renderWindow->GetHandle();
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink),
reinterpret_cast<guintptr>(hwnd));
#endif
PlayHelper();
}
void MainWindow::PlayHelper()
{
GstStateChangeReturn ret =
gst_element_set_state((*ptrstats).pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
outfile << "Playhelper : Unable to set the pipeline to the playing state.\n";
wxLogWarning("Unable to set the pipeline to the playing state.");
gst_object_unref((*ptrstats).pipeline);
(*ptrstats).pipeline = NULL;
}
}

gstreamer rtsp tee appsink can't emit signal new-sample

I am using gstreamer to play and slove the rtsp stream.
rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink
and i write in c++ code like this :
#include <gst/gst.h>
void printIt(GList *p) {
if(!p) {
g_print("p null\n");
return ;
}
while(p) {
GstPad *pad = (GstPad*)p->data;
g_print("[%s]", pad->object.name);
p = p->next;
}
g_print("\n");
}
GstFlowReturn new_sample_cb (GstElement * appsink, gpointer udata) {
g_print("new-sample cb\n");
return GST_FLOW_OK;
}
GstFlowReturn new_preroll_cb (GstElement* appsink, gpointer udata) {
g_print("new_preroll_cb cb\n");
return GST_FLOW_OK;
}
int
main (int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch("rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink", NULL);
GstElement *appsink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
printIt(appsink->pads);
g_signal_connect(appsink, "new-sample", G_CALLBACK(new_sample_cb), pipeline);
g_print("sig conn new-sample\n");
g_signal_connect(appsink, "new-preroll", G_CALLBACK(new_preroll_cb), pipeline);
g_print("sig conn new-preroll\n");
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
when i compile and run it. it has output video in the autovideosink but the appsink's signal new-sample is not be callbacked. what should i do if i what to slove a frame in appsink ?
thanks.
By default appsink favors to use callbacks instead of signals for performance reasons (but I wouldn't consider your use case as a performance problem). For appsink to emit signals you will need to set the emit-signals property of the appsink to true. It defaults to false.
P.S. Apart from the above, I think you will need a GMainLoop for the event processing as demonstrated in the GStreamer examples.

gstreamer audiomixer command to code converting

I want to use audiomixer in my application which receives audios from different sources and should play them together in speaker.
my final application should do something like this command:
gst-launch-1.0 audiomixer name=mix ! autoaudiosink autoaudiosrc ! \
audioconvert ! mix. udpsrc port=5001 caps="application/x-rtp" ! queue !\
rtppcmudepay ! mulawdec ! audioconvert ! audioresample ! mix.
I already wrote a code to use tee and queues and know how to work with tee and queues in code based on this code. but I don't know how to use mixer in my code.
so for simplicity I just want to write a code to work as this command does:
gst-launch-1.0 audiotestsrc freq=100 ! audiomixer name=mix ! audioconvert ! autoaudiosink autoaudiosrc ! mix.
I didn't find any useful example to reach this goal, how can I write a C code to do this?
for the second part:
gst-launch-1.0 audiotestsrc freq=100 ! audiomixer name=mix ! audioconvert ! autoaudiosink autoaudiosrc ! mix.
this code works:
#include <gst/gst.h>
static GMainLoop *loop;
int bus_callback (GstBus *bus, GstMessage *message, gpointer data)
{
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
g_main_loop_quit (loop);
break;
default:
/* unhandled message */
break;
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
int main(int argc, char *argv[])
{
/* Initialize GStreamer */
gst_init (nullptr, nullptr);
GstElement *pipeline, *src1,*src2, *sink, *convert1,*convert2,*audiomixer;
GstPad *conv_pad1, *conv_pad2, *mixer1_sinkpad,*mixer2_sinkpad;
gint i;
static GstBus *bus;
static guint bus_watch_id;
pipeline = gst_pipeline_new ("pipeline");
audiomixer = gst_element_factory_make ("adder", "mixer");
sink = gst_element_factory_make ("autoaudiosink", "sink");
src1 = gst_element_factory_make ("audiotestsrc", "src1");
convert1 = gst_element_factory_make ("audioconvert", "convert1");
src2 = gst_element_factory_make ("autoaudiosrc", "src2");
convert2 = gst_element_factory_make ("audioconvert", "convert2");
//g_object_set (sink, "async-handling", TRUE, NULL);
gst_bin_add_many (GST_BIN (pipeline), audiomixer ,sink, NULL);
gst_bin_add_many (GST_BIN (pipeline), src1 , convert1 , NULL);
gst_bin_add_many (GST_BIN (pipeline), src2 , convert2 , NULL);
gst_element_link (src1, convert1 );
gst_element_link (src2, convert2 );
gst_element_link(audiomixer , sink);
conv_pad1= gst_element_get_static_pad (convert1, "src");
mixer1_sinkpad = gst_element_get_request_pad (audiomixer, "sink_%u");
gst_pad_link (conv_pad1, mixer1_sinkpad);
g_object_unref(mixer1_sinkpad);
conv_pad2= gst_element_get_static_pad (convert2, "src");
mixer2_sinkpad = gst_element_get_request_pad (audiomixer, "sink_%u");
gst_pad_link (conv_pad2, mixer2_sinkpad);
g_object_unref(mixer2_sinkpad);
/* adds a watch for new message on our pipeline’s message bus to
* the default GLib main context, which is the main context that our
* GLib main loop is attached to below
*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (loop);
g_object_unref(conv_pad1);
g_object_unref(conv_pad2);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_source_remove (bus_watch_id);
}

gst_element_link faild in second thread

I have problem with linking elements in second thread.
I create two thread in each I create pipeline to store IP camera stream to file.
When second thread try link stream element (h264parse -> matroskamux ) I recieve Segmentation fault signal.
Gstreamer print some error:
(Cam_recorder:5529): GLib-GObject-WARNING **: cannot register existing type 'GstMatroskamuxPad'
(Cam_recorder:5529): GLib-GObject-CRITICAL **: g_object_new: assertion 'G_TYPE_IS_OBJECT (object_type)' failed
When I run same code with added some delay (500ms) between execution thread, both thread create pipeline correctly and program work.
Can anyone help me?
EDIT:
My code:
void Camera::Thread_function(){
GstElement *pipeline=NULL;
GstElement * temp_ele;
GstBus *bus=NULL;
GstMessage *msg=NULL;
GError *error = NULL;
STRING text;
DEBUG<<"Starting camera:"<<name<<END;
text=stream_uri;
DEBUG<< text <<END;
pipeline=gst_parse_launch(text.c_str(),&error);
if (error != NULL){
CRITICAL<<"Parse error: "<< error->message<<END;
g_error_free (error);
error=NULL;
goto STOP;
}
if(pipeline==NULL){
CRITICAL<<"Pipeline is NULL"<<END;
goto STOP;
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);
g_timeout_add_seconds (1, CAM_REC::timeout_cb, this);
g_main_loop_run (loop);
g_main_loop_unref (loop);
STOP:
if (msg != NULL){
gst_message_unref (msg);
msg=NULL;
}
if(bus!=NULL){
gst_object_unref (bus);
bus=NULL;
}
if(pipeline){
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
pipeline=NULL;
}
//*/
INFO<<"Cam "<<name<<" end"<<END;
}
Pipelines:
rtspsrc name=kam3_stream location=\"rtsp://192.168.0.107/stream1\" ! rtph264depay name=kam3_deplay ! h264parse name=kam3_parse ! matroskamux name=kam3_mux ! filesink location=x.mkv name=kamera3_file_sink
rtspsrc name=kam4_stream location=\"rtsp://admin:admin#192.168.0.108/\" ! rtph264depay name=kam4_deplay ! h264parse name=kam4_parse ! matroskamux name=kam4_mux ! filesink location=x2.mkv name=kamera4_file_sink
Program crashes in function gst_parse_launch()
Tank