Code is stuck in gst_app_sink_pull_sample gStreamer - c++

I want to write all frames in a file from the stream using gStreamer from an rtsp source. I tried the following code but it does not pass the gst_app_sink_pull_sample and it gets stuck.
pipeline = gst_parse_launch("playbin uri=rtsp://IP/ ! jpegenc ! appsink name = sink", NULL);
bus = gst_element_get_bus(pipeline);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
appsink = (GstAppSink*)sink; //tried appsink = GST_APP_SINK(sink); also
sample = gst_app_sink_pull_sample(appsink);
GstBuffer* buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);
Mat frame(Size(320, 240), CV_8UC3, (char*)map.data, Mat::AUTO_STEP); //if its a 320x240 size image
imwrite("XYZ.jpg", frame);

Related

How to link v4l2src to capsfilter in c for gstreamer-1.14

I have a gstreamer media pipeline as shown below which I am trying to convert into a c code. The command line works fine.
gst-launch-1.0 v4l2src device=/dev/video1 ! capsfilter caps=video/x-raw,width=1280,height=720,format=UYVY ! queue ! videoconvert ! queue ! capsfilter caps=video/x-raw,format=NV12,width=1280,height=720,pixel-aspect-ratio=1/1 ! v4l2h264enc extra-controls="controls,h264_level=12,h264_profile=1" ! h264parse ! autovideosink
I have written the code and compilation is successful. When, I execute the code, the videosrc element is unable to link to capsfilter. I have surfed through the internet and was unsuccessful in rectifying the problem. Can someone help in correcting me what, I am doing wrong.
The code snippet is below:
/* Create the gstreamer elements */
source = gst_element_factory_make ("v4l2src", "source");
capsfilter = gst_element_factory_make ("capsfilter", "Caps-Filter");
capsfilter2 = gst_element_factory_make ("capsfilter", "caps-filter2");
video_convert = gst_element_factory_make ("videoconvert", "Video Convert");
queue1 = gst_element_factory_make ("queue", "Encoded Video Queue 1");
queue2 = gst_element_factory_make ("queue", "Encoded Video Queue 2");
encoder = gst_element_factory_make ("v4l2h264enc", "HW Accelerated Encoder");
H264_pay = gst_element_factory_make ("h264parse", "Payload-encode H264 video into RTP packets");
sink = gst_element_factory_make("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if(!source || !capsfilter || ! capsfilter2 || !video_convert || !queue1 || !queue2 || !encoder || !H264_pay || !sink)
/* Set Source element properties */
g_object_set (G_OBJECT(source), "device", "/dev/video1", NULL);
GstCaps* filtercaps = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)UYUY");
GstCaps* vconvertfilter = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)NV12,pixel-aspect-ratio=1/1");
GstStructure *test = gst_structure_new_from_string("controls,h264_level=12,h264_profile=1");
g_object_set(G_OBJECT(capsfilter), "caps", filtercaps,NULL);
g_object_set(G_OBJECT(capsfilter2), "caps", vconvertfilter, NULL);
g_object_set (G_OBJECT(encoder), "extra-controls", test, NULL);
gst_caps_unref(filtercaps);
gst_caps_unref(vconvertfilter);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline),
source, capsfilter,
queue1, video_convert, queue2,
capsfilter2, encoder,
H264_pay, sink, NULL);
if(!gst_element_link(source, capsfilter))
{
g_printerr("Unable to link Source to filter. check your caps. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (capsfilter, queue1, video_convert, NULL) != TRUE)
{
g_printerr("Capsfilter could not be linked to queue1. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (video_convert, queue2, capsfilter2, encoder, H264_pay, NULL) != TRUE)
{
g_printerr("video_convert could not be linked to queue2. \n");
gst_object_unref (pipeline);
}
if(gst_element_link_many (H264_pay, sink, NULL) != TRUE)
{
g_printerr("parse could not link to sink.\n");
gst_object_unref (pipeline);
}
I get the error as below;
Unable to link Source to filter. check your caps.
Can somebody help me correct the mistake?

GStreamer - Pipeline how to connect filesrc to qmlglsink

I'm new in the world of GStreamer, so I can't figure out how it works, and how to pair all GstElements.
I want to merge Video (mp4 for example, or any other video format) with qml (from Qt) as an overlay.
This example works perfectly fine.
GstElement *pipeline = gst_pipeline_new(NULL);
GstElement *src = gst_element_factory_make("videotestsrc",NULL);
GstElement *glupload = gst_element_factory_make("glupload",NULL);
GstElement *qmlglsink = gst_element_factory_make("qmlglsink",NULL);
g_assert(src && glupload && qmlglsink);
gst_bin_add_many(GST_BIN(pipeline), src, glupload, sink);
gst_element_link_many(src, glupload, sink, NULL);
But that example uses videotestsrc as Source, I would prefer to use something like filesrc.
I tried this code:
GstElement *pipeline = gst_pipeline_new (NULL);
GstElement *src = gst_element_factory_make ("filesrc", "file-source");
GstElement *parser = gst_element_factory_make("h264parse",NULL);
GstElement *decoder = gst_element_factory_make("avdec_h264",NULL);
GstElement *colors = gst_element_factory_make("glcolorconvert",NULL);
GstElement *glupload = gst_element_factory_make ("glupload", NULL);
GstElement *sink = gst_element_factory_make ("qmlglsink", NULL);
g_assert (src && parser && decoder %% colors && glupload && sink);
g_object_set (G_OBJECT (src), "location", "file:///home/test.mp4", NULL);
gst_bin_add_many (GST_BIN (pipeline), src, parser, decoder, glupload, colors, sink, NULL);
gst_element_link_many (src, parser, decoder, glupload, colors, sink, NULL);
It compiles, but the output is just a black screen.
Since I'm not sure how the GStreamer pipeline works, I tried that.
First, get the file from memory with filesrc, then parse it with h265parse and decode it with avdec_h264. Then forward that (I guess raw uncompressed data) to glupload and make colors good with glcolorconvert, since qmlglsink uses RGBA, and avdec_h264 is I420. After colors, are adjusted forward it to qmlglsink to be displayed in qml.
I'm missing something, and I don't know how to pair GstElemnts, as I said I need to pair filesrc (any video format) and qmlglsink.
You can try like below
MediaPlayer{
id: playVideo
source: "gst-pipeline: filesrc location=/home/root/skim-debris.mp4 ! qtdemux ! avdec_h264 ! qtvideosink"
autoLoad: true
autoPlay: true
playbackRate: 1.0
loops: 10
}
VideoOutput {
anchors.fill: parent
source: playVideo
}
It is easier to use a bin or any of GStreamers auto-pluggers.
But the main issue here is that you try treating an MP4 file as H.264 stream. This cannot work this way. You need to demux media streams from your container.
E.g. the pipeline should be something like this:
gst-launch-1.0 filesrc location=/home/test.mp4 ! qtdemux ! \
h264parse ! avdec_h264 ! glupload ! glcolorconvert ! qmlglsink

Get frames from RTSP stream using GStreamer 1.0 in C++

I am trying to get rtsp video frames using GStreamer 1.0 and Qt libs. I have problem with getting frames form appsink, because somehow my callback function does not work.
FrameFlow::FrameFlow()
{
pipeline_ = gst_parse_launch ("rtspsrc location=rtsp://admin:rce#192.168.88.240:554 ! decodebin ! appsink name=sink", nullptr);
sink_ = gst_bin_get_by_name(GST_BIN(pipeline_), "sink");
gst_app_sink_set_emit_signals(GST_APP_SINK(sink_), TRUE);
g_signal_connect(sink_, "new-sample", G_CALLBACK(newSample(GST_APP_SINK(sink_), (gpointer) this)), (gpointer)this);
gst_element_set_state(pipeline_, GST_STATE_PLAYING);
}
GstFlowReturn FrameFlow::newSample(GstAppSink *sink, gpointer gSelf)
{
GstSample* sample = NULL;
GstMapInfo bufferInfo;
FrameFlow* self = static_cast<FrameFlow* >(gSelf);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink_));
if(sample != NULL)
{
buffer_ = gst_sample_get_buffer(sample);
if(buffer_ != NULL)
{
gst_buffer_map(buffer_, &bufferInfo, GST_MAP_READ);
self->mutex_.lock();
self->image_ = QImage(bufferInfo.data, 320, 180, QImage::Format_RGB888);
self->mutex_.unlock();
gst_buffer_unmap(buffer_, &bufferInfo);
}
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
I am trying to register newSample() function as callback, but when I to debug that function, it's not called even once, but memory is leaking (i guess it works, because when I comment gst_element_set_state(pipeline_, GST_STATE_PLAYING);, it doesn't leak anymore).
Where am I doing mistake?
Thank you for your help!

GStreamer Two Piplines Synchronization

I have developed an application which takes data from file and add metadata to each frame and transfer that data to client through udp over rtp.
At receiver end client removes that metadata and has to play the video.
for that I used At server End:
pipeline1 :: gst-launch-1.0 filesrc ! videoparse ! appsink
at appsink adding metadata and push that buffer to appsrc.
pipeline 2 :: gst-launch-1.0 appsrc ! rtpgstpay ! udpsink
At Receivers end ::
pipeline1 :: gst-launch-1.0 udpsrc ! rtpgstdepay ! appsink
at appsink removing the metadata and push the buffer to appsrc.
pipeline2 :: gst-launch-1.0 appsrc ! videoparse ! autovideoconvert ! autovideosink
The Problem is At receivers end i am not getting all the frames and the video also not playing properly. Only one frame playing and stopping and again playing only single frame.
Can anyone provide some solution or suggestion?
/* code At Server End Processing Frame */
/* This is in Function which is called by g_timeout_add_seconds(0, new_sample, NULL);
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_buffer_new_wrapped(&header, sizeof(header)); // header is Structure
FrameBuffer = gst_sample_get_buffer(sample);
buffer = gst_buffer_append(buf, FrameBuffer);
g_signal_emit_by_name (appsrc2, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");
/* Above code is for processing frame at server end. */
/* Code For Processing Frame At Receiver END */
//This is in Function new_sample which is called by g_timeout_add_seconds(0, new_sample, NULL);
if(!gst_app_sink_is_eos ((GstAppSink *)sink))
{
GstSample *sample = NULL;
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_sample_get_buffer(sample);
gst_buffer_extract(buf, 0, temp, 8);
if(frameid != temp->FrameID)
{
gst_element_set_state(pipeline2, GST_STATE_PLAYING);
g_print("Frame Header :: %d , Frame ID :: %d\n", temp->FrameHeader, temp->FrameID);
gint size;
size = gst_buffer_get_size(buf);
buffer = gst_buffer_copy_region(buf, GST_BUFFER_OFFSET_NONE, 8, size-8);
g_print("Size :: -- %d\n",size);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");

GStreamer add probe with playbin

The following code adds a callback when each frame is displayed and it's working well:
pipeline = gst_parse_launch("filesrc location=/path ! decodebin ! autovideosink", &error);
video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
GstPad *pad = gst_element_get_static_pad(video_sink, "sink");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)cb_have_data, data, NULL);
The following code adds the same callback but it's never been called:
pipeline = gst_parse_launch("playbin uri=file:///path", &error);
video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
GstPad *pad = gst_element_get_static_pad(video_sink, "sink");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)cb_have_data, data, NULL);
Any idea why and how to fix that?
playbin has no input pads and no output pads, so you can't put a
probe, as a probe has to go on a pad.
However, there is a get-video-pad action signal you can run on
playbin, which it's possible to attach a probe to.