GStreamer Two Piplines Synchronization - gstreamer

I have developed an application which takes data from file and add metadata to each frame and transfer that data to client through udp over rtp.
At receiver end client removes that metadata and has to play the video.
for that I used At server End:
pipeline1 :: gst-launch-1.0 filesrc ! videoparse ! appsink
at appsink adding metadata and push that buffer to appsrc.
pipeline 2 :: gst-launch-1.0 appsrc ! rtpgstpay ! udpsink
At Receivers end ::
pipeline1 :: gst-launch-1.0 udpsrc ! rtpgstdepay ! appsink
at appsink removing the metadata and push the buffer to appsrc.
pipeline2 :: gst-launch-1.0 appsrc ! videoparse ! autovideoconvert ! autovideosink
The Problem is At receivers end i am not getting all the frames and the video also not playing properly. Only one frame playing and stopping and again playing only single frame.
Can anyone provide some solution or suggestion?
/* code At Server End Processing Frame */
/* This is in Function which is called by g_timeout_add_seconds(0, new_sample, NULL);
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_buffer_new_wrapped(&header, sizeof(header)); // header is Structure
FrameBuffer = gst_sample_get_buffer(sample);
buffer = gst_buffer_append(buf, FrameBuffer);
g_signal_emit_by_name (appsrc2, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");
/* Above code is for processing frame at server end. */
/* Code For Processing Frame At Receiver END */
//This is in Function new_sample which is called by g_timeout_add_seconds(0, new_sample, NULL);
if(!gst_app_sink_is_eos ((GstAppSink *)sink))
{
GstSample *sample = NULL;
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_sample_get_buffer(sample);
gst_buffer_extract(buf, 0, temp, 8);
if(frameid != temp->FrameID)
{
gst_element_set_state(pipeline2, GST_STATE_PLAYING);
g_print("Frame Header :: %d , Frame ID :: %d\n", temp->FrameHeader, temp->FrameID);
gint size;
size = gst_buffer_get_size(buf);
buffer = gst_buffer_copy_region(buf, GST_BUFFER_OFFSET_NONE, 8, size-8);
g_print("Size :: -- %d\n",size);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");

Related

Code is stuck in gst_app_sink_pull_sample gStreamer

I want to write all frames in a file from the stream using gStreamer from an rtsp source. I tried the following code but it does not pass the gst_app_sink_pull_sample and it gets stuck.
pipeline = gst_parse_launch("playbin uri=rtsp://IP/ ! jpegenc ! appsink name = sink", NULL);
bus = gst_element_get_bus(pipeline);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
appsink = (GstAppSink*)sink; //tried appsink = GST_APP_SINK(sink); also
sample = gst_app_sink_pull_sample(appsink);
GstBuffer* buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);
Mat frame(Size(320, 240), CV_8UC3, (char*)map.data, Mat::AUTO_STEP); //if its a 320x240 size image
imwrite("XYZ.jpg", frame);

c++ GStreamer RTP Receive Stream

Hi I'm trying run the rtp receiving string in c++. When I use gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false line i can get stream using cmd.
Code in the c++ also works when i use parse launch pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
Here is the question when I'm trying to write line by line the code in c++ I think I'm getting linking error because of sometimes pad. Here what I tried.
To easily debug the code tried to link elements one by one like this
int res = gst_element_link(udp, depay);
However, it seems like i can't link udpsrc to rtph264depay or decodebin to videoconvert.
Can you help me where I am doing mistake?
#include <gst/gst.h>
#include <iostream>
static void pad_added(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name;
name = gst_pad_get_name(pad);
std::cout<<"A new pad is created:\n"<< name;
g_free(name);
}
int main()
{
GstElement* pipeline, * conv, * dec, * depay, * udp,*videosink;
GstCaps* udpcaps;
GMainLoop* loop;
// init GStreamer
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
//pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
// setup pipeline
pipeline = gst_pipeline_new("pipeline");
conv = gst_element_factory_make("videoconvert", "conv");
videosink = gst_element_factory_make("autovideosink", "videosink");
//enc = gst_element_factory_make("x264enc", "enc");
depay = gst_element_factory_make("rtph264depay", "depay");
dec = gst_element_factory_make("decodebin", "decode");
//g_object_set(G_OBJECT(depay), "config-interval", 1, NULL);
udp = gst_element_factory_make("udpsrc", "udp");
//g_object_set(G_OBJECT(udp), "address", "127.0.0.1", NULL);
g_object_set(G_OBJECT(udp), "port", 5000, NULL);
udpcaps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 9000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
g_object_set(G_OBJECT(udp), "caps", udpcaps , NULL);
gst_caps_unref(udpcaps);
gst_bin_add_many(GST_BIN(pipeline), udp, depay, dec, conv, videosink, NULL);
// g_signal_connect(udp, "pad-added", G_CALLBACK(pad_added), depay);
// int res2= gst_element_link(dec, conv);
int res = gst_element_link(conv, videosink);
//int res = gst_element_link_many(udp,depay, dec, conv, videosink, NULL);
if (res!=TRUE)
{
std::cout << "Error!\n"<<res<<"\n";
// return -1;
}
// play
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
// clean up
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}

Latency increase with the time on display + record pipeline

Below is my pipeline to display and record my stream coming from an udp source. The problem is that a latency increase (start at no latency) with the time on my stream and my record. However, if I just display my stream, there is no latency.
Can someone have an of where the problem can come ?
pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
Thanks,
EDIT :
All my save and display code :
void MainWindow::SaveVideo()
{
std::string strPathVideo = m_VideoPath + CreateFileName("mkv");
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstElement* matrox;
GstElement* clocktime;
//GstElement* compression;
GstElement* textoverlay;
GstElement* sink;
GstPad* padsink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
(*ptrstats).pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! textoverlay halignment=center valignment=top name=text ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
}
if (!(*ptrstats).pipeline) {
outfile << "Save : ", error->message ,"\n";
exit(1);
}
sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
filesink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "myFile");
g_object_set(filesink, "location", strPathVideo.c_str(), NULL);
//compression = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "compression");
//g_object_set(G_OBJECT(compression), "bitrate", m_intcompression, NULL);
matrox = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "matrox");
g_object_set(G_OBJECT(matrox), "offset-to-zero", true, NULL);
source = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
padsink = gst_element_get_static_pad(sink, "sink");
gst_pad_add_probe(padsink, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)buffer_sink, ptrstats, NULL);
gst_object_unref(padsink);
(*ptrstats).bus = gst_element_get_bus(GST_ELEMENT((*ptrstats).pipeline));
#ifdef __WXGTK__
GstElement* sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_xid);
#elif defined __WXMSW__
WXWidget hwnd = (*ptrstats).m_renderWindow->GetHandle();
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink),
reinterpret_cast<guintptr>(hwnd));
#endif
PlayHelper();
}
void MainWindow::PlayHelper()
{
GstStateChangeReturn ret =
gst_element_set_state((*ptrstats).pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
outfile << "Playhelper : Unable to set the pipeline to the playing state.\n";
wxLogWarning("Unable to set the pipeline to the playing state.");
gst_object_unref((*ptrstats).pipeline);
(*ptrstats).pipeline = NULL;
}
}

gstreamer rtsp tee appsink can't emit signal new-sample

I am using gstreamer to play and slove the rtsp stream.
rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink
and i write in c++ code like this :
#include <gst/gst.h>
void printIt(GList *p) {
if(!p) {
g_print("p null\n");
return ;
}
while(p) {
GstPad *pad = (GstPad*)p->data;
g_print("[%s]", pad->object.name);
p = p->next;
}
g_print("\n");
}
GstFlowReturn new_sample_cb (GstElement * appsink, gpointer udata) {
g_print("new-sample cb\n");
return GST_FLOW_OK;
}
GstFlowReturn new_preroll_cb (GstElement* appsink, gpointer udata) {
g_print("new_preroll_cb cb\n");
return GST_FLOW_OK;
}
int
main (int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch("rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink", NULL);
GstElement *appsink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
printIt(appsink->pads);
g_signal_connect(appsink, "new-sample", G_CALLBACK(new_sample_cb), pipeline);
g_print("sig conn new-sample\n");
g_signal_connect(appsink, "new-preroll", G_CALLBACK(new_preroll_cb), pipeline);
g_print("sig conn new-preroll\n");
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
when i compile and run it. it has output video in the autovideosink but the appsink's signal new-sample is not be callbacked. what should i do if i what to slove a frame in appsink ?
thanks.
By default appsink favors to use callbacks instead of signals for performance reasons (but I wouldn't consider your use case as a performance problem). For appsink to emit signals you will need to set the emit-signals property of the appsink to true. It defaults to false.
P.S. Apart from the above, I think you will need a GMainLoop for the event processing as demonstrated in the GStreamer examples.

muxing jpeg to mkv using gstreamer

Situation:
When I'm trying to mux jpegs to mkv file I'll get a zero sized file. I must put encode and decode elements between parser and muxer for correct output. When I'm muxing a h264 video with same code I'll get correct video file, that means the time setting of buffers should be OK(duration and pts parameter). Anyway after bad buffer settings is size of file not zero.
Matroskamux requires on sink pad for "image/jpeg" only "width" and "heigth" capabilities but it looks like that this is not sufficient. Jpegparse is giving correct values and the program does not work after manual setting of this capabilities too.
Example of pipeline:
This pipeline doesn't work
appsrc ! "image/jpeg" ! jpegparse ! matroskamux ! filesink location=mjpeg.mkv
But this works
appsrc ! "image/jpeg" ! jpegparse ! avdec_mjpeg ! x264enc ! matroskamux ! filesink location=mjpeg.mkv
Example of code:
Working code, but with reencoding
app = new _App();
app->src = (GstAppSrc*)gst_element_factory_make ("appsrc", "source");
if(IsH264Frame(codecType))
app->parser = gst_element_factory_make("h264parse", "parser");
else if(codecType == IMAGE_MJPEG_FRAME)
app->parser = gst_element_factory_make("jpegparse", "parser");
//additional code
app->decoder = gst_element_factory_make("avdec_mjpeg", "decoder");
app->encoder = gst_element_factory_make("x264enc", "encoder");
app->muxer = gst_element_factory_make("matroskamux", "muxer");
app->sink = (GstAppSink*)gst_element_factory_make ("filesink", "sink");
if (!app->pipeline || !app->src || !app->decoder || !app->encoder || !app->muxer || !app->sink || !app->parser)
return;
app->bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
g_assert(app->bus);
gst_bus_add_watch (app->bus, (GstBusFunc) BusMessage, this);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement*)app->src, app->decoder, app->encoder, app->muxer, app->sink, app->parser
,NULL);
/* SETUP ELEMENTS */
g_object_set(app->src,
"stream-type", 0,
"format", GST_FORMAT_BUFFERS,
"is-live", true,
"block", true,
NULL);
if(IsH264Frame(codecType)){
g_object_set(app->src, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
} else if(codecType == IMAGE_MJPEG_FRAME) {
g_object_set(app->src, "caps", gst_caps_new_simple("image/jpeg",
"framerate",GST_TYPE_FRACTION,(int)framerate,1,
NULL), NULL);
//additional code
g_object_set(app->decoder, "caps", gst_caps_new_simple("video/x-raw",
NULL), NULL);
g_object_set(app->encoder, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
}
g_signal_connect(app->src, "need-data", G_CALLBACK(StartFeed), this);
g_signal_connect(app->src, "enough-data", G_CALLBACK(StopFeed), this);
g_object_set (app->sink,
"location", GenerateFileName().c_str(),
"buffer-mode", 0,
NULL);
/* LINKING */
GstPad *padDecSrc, *padMuxSink, *parserSrc,
GstPadTemplate *mux_sink_pad_template;
mux_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (app->muxer), "video_%u");
padMuxSink = gst_element_request_pad (app->muxer, mux_sink_pad_template, NULL, NULL);
parserSrc = gst_element_get_static_pad (app->parser, "src");
padEncSrc = gst_element_get_static_pad (app->encoder, "src");
if(!gst_element_link( (GstElement*)app->src, app->parser))
return;
if(IsH264Frame(codecType)){
if(gst_pad_link (parserSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
} else if(codecType == IMAGE_MJPEG_FRAME){
//additional code
if(!gst_element_link( app->parser, app->decoder))
return;
if(!gst_element_link( app->decoder, app->encoder))
return;
if(gst_pad_link (padDecSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
}
if(!gst_element_link( app->muxer, (GstElement*)app->sink))
return;
/* PLAY */
GstStateChangeReturn ret = gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
gst_object_unref (app->pipeline);
return;
}
Question:
What I am doing wrong? Any ideas to solve this problem?
I solved this problem with change of appsrc property "format" from GST_FORMAT_BUFFERS to GST_FORMAT_TIME. Correct timestamps on buffers is not enought.