Below is my pipeline to display and record my stream coming from an udp source. The problem is that a latency increase (start at no latency) with the time on my stream and my record. However, if I just display my stream, there is no latency.
Can someone have an of where the problem can come ?
pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
Thanks,
EDIT :
All my save and display code :
void MainWindow::SaveVideo()
{
std::string strPathVideo = m_VideoPath + CreateFileName("mkv");
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstElement* matrox;
GstElement* clocktime;
//GstElement* compression;
GstElement* textoverlay;
GstElement* sink;
GstPad* padsink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
(*ptrstats).pipeline = gst_parse_launch("udpsrc name=source ! rtpjitterbuffer mode=0 ! rtph264depay ! h264parse ! avdec_h264 ! textoverlay halignment=center valignment=top name=text ! tee name = t ! queue ! avenc_mpeg4 bitrate=10000000 ! matroskamux name=matrox !filesink name=myFile t. ! queue ! videoconvert ! d3dvideosink name=mysink sync=false", &error);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
}
if (!(*ptrstats).pipeline) {
outfile << "Save : ", error->message ,"\n";
exit(1);
}
sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
filesink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "myFile");
g_object_set(filesink, "location", strPathVideo.c_str(), NULL);
//compression = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "compression");
//g_object_set(G_OBJECT(compression), "bitrate", m_intcompression, NULL);
matrox = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "matrox");
g_object_set(G_OBJECT(matrox), "offset-to-zero", true, NULL);
source = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
textoverlay = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "text");
g_object_set(G_OBJECT(textoverlay), "text", m_text.ToStdString(), NULL);
padsink = gst_element_get_static_pad(sink, "sink");
gst_pad_add_probe(padsink, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)buffer_sink, ptrstats, NULL);
gst_object_unref(padsink);
(*ptrstats).bus = gst_element_get_bus(GST_ELEMENT((*ptrstats).pipeline));
#ifdef __WXGTK__
GstElement* sink = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "mysink");
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_xid);
#elif defined __WXMSW__
WXWidget hwnd = (*ptrstats).m_renderWindow->GetHandle();
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink),
reinterpret_cast<guintptr>(hwnd));
#endif
PlayHelper();
}
void MainWindow::PlayHelper()
{
GstStateChangeReturn ret =
gst_element_set_state((*ptrstats).pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
outfile << "Playhelper : Unable to set the pipeline to the playing state.\n";
wxLogWarning("Unable to set the pipeline to the playing state.");
gst_object_unref((*ptrstats).pipeline);
(*ptrstats).pipeline = NULL;
}
}
Related
I encountered different behavior with a gstreamer pipelines when implemented in C++ compared to gst-launch-1.0 execution in command line - the problem is with the bitrate property.
Same problem as described bellow in both implementations (C++ & command line execution) occurs with omxh264enc encoder as well with control-rate property set to 2 (CBR mode).
The pipeline which used in command line was:
gst-launch-1.0 ximagesrc ! autovideoconvert ! x264enc bitrate=800 pass=0 ! video/x-h264, stream-format=byte-stream ! h264parse ! mpegtsmux ! udpsink host=127.0.0.1 port=1234 sync=false
The C++ implementation is:
GstElement* pipeline;
GstElement* appsrc;
GstElement* videoConvert;
GstElement* encoder;
GstElement* encoderCapsFilter;
GstElement* parser;
GstElement* tsmux;
GstElement* udpsink;
pipeline = gst_pipeline_new ("pipeline");
appsrc = gst_element_factory_make ("appsrc", "source");
videoConvert = gst_element_factory_make ("autovideoconvert", "my_video_convertor");
encoder = gst_element_factory_make ("x264enc", "my_encoder");
encoderCapsFilter = gst_element_factory_make("capsfilter", "my_caps");
parser = gst_element_factory_make ("h264parse", "my_parser");
tsmux = gst_element_factory_make ("mpegtsmux", "my_muxer");
udpsink = gst_element_factory_make ("udpsink", "my_udpsink");
/*Configure appsrc*/
g_object_set (G_OBJECT (appsrc), "caps", gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 25, 1, NULL), NULL);
g_object_set(G_OBJECT(appsrc), "is-live" , true, NULL);
/*Configure videoConvert*/
/*Configure encoder*/
g_object_set(G_OBJECT(encoder), "bitrate" , 800, NULL);
g_object_set(G_OBJECT(encoder), "pass" , 0, NULL);
/*Configure encoder caps*/
g_object_set(G_OBJECT (encoderCapsFilter), "caps", gst_caps_from_string("video/x-h264, stream-format=byte-stream"), NULL);
/*Configure h264parse*/
/*Configure mpegtsmux*/
/*Configure udpsink*/
g_object_set(G_OBJECT(udpsink), "host" , "127.0.0.1", NULL);
g_object_set(G_OBJECT(udpsink), "port" , 1234, NULL);
g_object_set(G_OBJECT(udpsink), "sync" , false, NULL);
// add
gst_bin_add_many(GST_BIN(pipeline),
appsrc,
videoConvert,
encoder,
encoderCapsFilter,
parser,
tsmux,
udpsink,
NULL);
// link
if (!gst_element_link_many(appsrc,
videoConvert,
encoder,
encoderCapsFilter,
parser,
tsmux,
udpsink,
NULL))
{
g_printerr("Elements could not be linked");
}
bitrate is set to 800kbps and when testing this pipeline from command line with Wireshark the baudrate results around 800-850kbps which is good,
when tested the same pipeline in C++ (to use appsrc instead of ximagesrc) the baudrate results in different and higher bitrate (around 1200-1300kbps).
What is missing to reach the same bitrate result when executed through command line?
Is there more configuration to be done into the gst elements when implemented in C++?
Hi I'm trying run the rtp receiving string in c++. When I use gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false line i can get stream using cmd.
Code in the c++ also works when i use parse launch pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
Here is the question when I'm trying to write line by line the code in c++ I think I'm getting linking error because of sometimes pad. Here what I tried.
To easily debug the code tried to link elements one by one like this
int res = gst_element_link(udp, depay);
However, it seems like i can't link udpsrc to rtph264depay or decodebin to videoconvert.
Can you help me where I am doing mistake?
#include <gst/gst.h>
#include <iostream>
static void pad_added(GstElement* element, GstPad* pad, gpointer data)
{
gchar* name;
name = gst_pad_get_name(pad);
std::cout<<"A new pad is created:\n"<< name;
g_free(name);
}
int main()
{
GstElement* pipeline, * conv, * dec, * depay, * udp,*videosink;
GstCaps* udpcaps;
GMainLoop* loop;
// init GStreamer
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
//pipeline = gst_parse_launch("gst-launch-1.0 -v udpsrc port=5000 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! autovideosink sync=false",NULL);
// setup pipeline
pipeline = gst_pipeline_new("pipeline");
conv = gst_element_factory_make("videoconvert", "conv");
videosink = gst_element_factory_make("autovideosink", "videosink");
//enc = gst_element_factory_make("x264enc", "enc");
depay = gst_element_factory_make("rtph264depay", "depay");
dec = gst_element_factory_make("decodebin", "decode");
//g_object_set(G_OBJECT(depay), "config-interval", 1, NULL);
udp = gst_element_factory_make("udpsrc", "udp");
//g_object_set(G_OBJECT(udp), "address", "127.0.0.1", NULL);
g_object_set(G_OBJECT(udp), "port", 5000, NULL);
udpcaps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 9000,
"encoding-name", G_TYPE_STRING, "H264",
"payload", G_TYPE_INT, 96,
NULL);
g_object_set(G_OBJECT(udp), "caps", udpcaps , NULL);
gst_caps_unref(udpcaps);
gst_bin_add_many(GST_BIN(pipeline), udp, depay, dec, conv, videosink, NULL);
// g_signal_connect(udp, "pad-added", G_CALLBACK(pad_added), depay);
// int res2= gst_element_link(dec, conv);
int res = gst_element_link(conv, videosink);
//int res = gst_element_link_many(udp,depay, dec, conv, videosink, NULL);
if (res!=TRUE)
{
std::cout << "Error!\n"<<res<<"\n";
// return -1;
}
// play
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
// clean up
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}
I have a gstreamer media pipeline as shown below which I am trying to convert into a c code. The command line works fine.
gst-launch-1.0 v4l2src device=/dev/video1 ! capsfilter caps=video/x-raw,width=1280,height=720,format=UYVY ! queue ! videoconvert ! queue ! capsfilter caps=video/x-raw,format=NV12,width=1280,height=720,pixel-aspect-ratio=1/1 ! v4l2h264enc extra-controls="controls,h264_level=12,h264_profile=1" ! h264parse ! autovideosink
I have written the code and compilation is successful. When, I execute the code, the videosrc element is unable to link to capsfilter. I have surfed through the internet and was unsuccessful in rectifying the problem. Can someone help in correcting me what, I am doing wrong.
The code snippet is below:
/* Create the gstreamer elements */
source = gst_element_factory_make ("v4l2src", "source");
capsfilter = gst_element_factory_make ("capsfilter", "Caps-Filter");
capsfilter2 = gst_element_factory_make ("capsfilter", "caps-filter2");
video_convert = gst_element_factory_make ("videoconvert", "Video Convert");
queue1 = gst_element_factory_make ("queue", "Encoded Video Queue 1");
queue2 = gst_element_factory_make ("queue", "Encoded Video Queue 2");
encoder = gst_element_factory_make ("v4l2h264enc", "HW Accelerated Encoder");
H264_pay = gst_element_factory_make ("h264parse", "Payload-encode H264 video into RTP packets");
sink = gst_element_factory_make("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if(!source || !capsfilter || ! capsfilter2 || !video_convert || !queue1 || !queue2 || !encoder || !H264_pay || !sink)
/* Set Source element properties */
g_object_set (G_OBJECT(source), "device", "/dev/video1", NULL);
GstCaps* filtercaps = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)UYUY");
GstCaps* vconvertfilter = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)NV12,pixel-aspect-ratio=1/1");
GstStructure *test = gst_structure_new_from_string("controls,h264_level=12,h264_profile=1");
g_object_set(G_OBJECT(capsfilter), "caps", filtercaps,NULL);
g_object_set(G_OBJECT(capsfilter2), "caps", vconvertfilter, NULL);
g_object_set (G_OBJECT(encoder), "extra-controls", test, NULL);
gst_caps_unref(filtercaps);
gst_caps_unref(vconvertfilter);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline),
source, capsfilter,
queue1, video_convert, queue2,
capsfilter2, encoder,
H264_pay, sink, NULL);
if(!gst_element_link(source, capsfilter))
{
g_printerr("Unable to link Source to filter. check your caps. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (capsfilter, queue1, video_convert, NULL) != TRUE)
{
g_printerr("Capsfilter could not be linked to queue1. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (video_convert, queue2, capsfilter2, encoder, H264_pay, NULL) != TRUE)
{
g_printerr("video_convert could not be linked to queue2. \n");
gst_object_unref (pipeline);
}
if(gst_element_link_many (H264_pay, sink, NULL) != TRUE)
{
g_printerr("parse could not link to sink.\n");
gst_object_unref (pipeline);
}
I get the error as below;
Unable to link Source to filter. check your caps.
Can somebody help me correct the mistake?
I want to create a pipeline that takes a rtsp stream in input and output jpeg images of different resolutions. While the pipeline is playing, I would like to block a certain branch but I don't manage to block the element.
The command line looks like this:
gst-launch-1.0 -v rtspsrc location="rtsp://ip:port/live.sdp" ! rtph264depay ! h264parse ! avdec_h264 ! videorate ! video/x-raw,framerate=5/1 ! tee name=t ! queue ! videoscale ! video/x-raw,width=320,height=240 ! jpegenc ! multifilesink location=snapshot320-%05d.jpg t. ! queue ! videoscale ! video/x-raw,width=1280,height=720 ! jpegenc ! multifilesink location=snapshot1280-%05d.jpg
I want to be able to block the data from passing through a branch but I can't manage to get it working with the tee element.
I've seen that the function gst_pad_add_probe allows to block a pad of an element.
This is what I did:
1) Get the pads:
srcpad = gst_element_get_static_pad(tee, "src");
sinkpad = gst_element_get_static_pad(tee, "sink");
2) Add the probe:
gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_IDLE, &GstProbeCallback, this, NULL)
3) Flush the data:
gst_pad_send_event (sinkpad, gst_event_new_eos ());
4) Unref the pads
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
5) Set the pipeline in playing state:
gst_element_set_state(this->pipeline, GST_STATE_PLAYING)
This is the callback given to gst_pad_add_probe:
static GstPadProbeReturn
GstProbeCallback(GstPad* pad, GstPadProbeInfo* info, gpointer user_data) {
std::cout << "probe callback" << std::endl;
return GST_PAD_PROBE_DROP;
}
[Update]
If I set the probe on the queue which is right after the tee element, all my branches get blocked.
More code bellow:
this->pipeline = gst_pipeline_new(NULL);
if (this->pipeline == NULL) {
LOG_ERR("Failed to create the pipeline", "image_configuration");
return NULL;
}
this->elements.tree.src = gst_element_factory_make("rtspsrc", NULL);
this->elements.tree.depay = gst_element_factory_make("rtph264depay", NULL);
this->elements.tree.parse = gst_element_factory_make("h264parse", NULL);
this->elements.tree.dec = gst_element_factory_make("avdec_h264", NULL);
this->elements.tree.rate = gst_element_factory_make("videorate", NULL);
this->elements.tree.ratefilter = gst_element_factory_make("capsfilter", NULL);
this->elements.tree.tee = gst_element_factory_make("tee", NULL);
for (auto& branch : this->elements.branches) {
branch.queue = gst_element_factory_make("queue", NULL);
branch.scale = gst_element_factory_make("videoscale", NULL);
branch.scalecaps = gst_element_factory_make("capsfilter", NULL);
branch.enc = gst_element_factory_make("jpegenc", NULL);
branch.sink = gst_element_factory_make("redissink", NULL);
branch.fakesink = gst_element_factory_make("fakesink", NULL);
if (not(branch.queue && branch.scale && branch.scalecaps && branch.sink && branch.enc &&
branch.fakesink)) {
LOG_ERR("Failed to create elements", "image_configuration");
return NULL;
}
}
if (!this->pipeline || !this->elements.tree.src || !this->elements.tree.depay ||
!this->elements.tree.parse || !this->elements.tree.dec || !this->elements.tree.rate ||
!this->elements.tree.ratefilter || !this->elements.tree.tee) {
LOG_ERR("Failed to create elements", "image_configuration");
return NULL;
}
this->set_rate_caps(this->elements.tree.ratefilter);
g_object_set(
this->elements.tree.src, "location", this->loc_in.c_str(), "latency", this->latency, NULL);
for (auto& branch : this->elements.branches) {
this->set_scale_caps(branch.scalecaps, branch.resolution);
g_object_set(branch.enc, "quality", 50, NULL);
g_object_set(branch.sink, "func", &send_event, NULL);
g_object_set(branch.sink, "camera_id", this->camera_id, NULL);
g_object_set(branch.sink, "is_init", TRUE, NULL);
}
gst_bin_add_many(GST_BIN(this->pipeline),
this->elements.tree.src,
this->elements.tree.depay,
this->elements.tree.parse,
this->elements.tree.dec,
this->elements.tree.rate,
this->elements.tree.ratefilter,
this->elements.tree.tee,
NULL);
for (const auto& branch : this->elements.branches) {
gst_bin_add_many(GST_BIN(this->pipeline),
branch.queue,
branch.scale,
branch.scalecaps,
branch.enc,
branch.sink,
branch.fakesink,
NULL);
}
if (!gst_element_link_many(this->elements.tree.depay,
this->elements.tree.parse,
this->elements.tree.dec,
this->elements.tree.rate,
this->elements.tree.ratefilter,
this->elements.tree.tee,
NULL)) {
LOG_ERR("Failed to link elements", "image_configuration");
return NULL;
}
g_signal_connect(
this->elements.tree.src, "pad-added", G_CALLBACK(on_pad_added), &this->elements);
for (const auto& branch : this->elements.branches) {
if (!gst_element_link_many(this->elements.tree.tee,
branch.queue,
branch.scale,
branch.scalecaps,
branch.enc,
branch.sink,
branch.fakesink,
NULL)) {
LOG_ERR("Failed to link elements", "image_configuration");
return NULL;
}
}
if (not this->launch_pipeline()) return NULL;
getchar();
std::cout << "Add probe" << std::endl;
GstPad* srcpad;
GstPad* sinkpad;
srcpad = gst_element_get_static_pad(this->elements.branches[0].queue, "src");
sinkpad = gst_element_get_static_pad(this->elements.branches[0].queue, "sink");
this->elements.branches[0].probe_id =
gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BLOCK, &GstProbeCallback, this, NULL);
gst_pad_send_event (sinkpad, gst_event_new_eos ());
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
return this->pipeline;
Any help will be appreciated
Situation:
When I'm trying to mux jpegs to mkv file I'll get a zero sized file. I must put encode and decode elements between parser and muxer for correct output. When I'm muxing a h264 video with same code I'll get correct video file, that means the time setting of buffers should be OK(duration and pts parameter). Anyway after bad buffer settings is size of file not zero.
Matroskamux requires on sink pad for "image/jpeg" only "width" and "heigth" capabilities but it looks like that this is not sufficient. Jpegparse is giving correct values and the program does not work after manual setting of this capabilities too.
Example of pipeline:
This pipeline doesn't work
appsrc ! "image/jpeg" ! jpegparse ! matroskamux ! filesink location=mjpeg.mkv
But this works
appsrc ! "image/jpeg" ! jpegparse ! avdec_mjpeg ! x264enc ! matroskamux ! filesink location=mjpeg.mkv
Example of code:
Working code, but with reencoding
app = new _App();
app->src = (GstAppSrc*)gst_element_factory_make ("appsrc", "source");
if(IsH264Frame(codecType))
app->parser = gst_element_factory_make("h264parse", "parser");
else if(codecType == IMAGE_MJPEG_FRAME)
app->parser = gst_element_factory_make("jpegparse", "parser");
//additional code
app->decoder = gst_element_factory_make("avdec_mjpeg", "decoder");
app->encoder = gst_element_factory_make("x264enc", "encoder");
app->muxer = gst_element_factory_make("matroskamux", "muxer");
app->sink = (GstAppSink*)gst_element_factory_make ("filesink", "sink");
if (!app->pipeline || !app->src || !app->decoder || !app->encoder || !app->muxer || !app->sink || !app->parser)
return;
app->bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
g_assert(app->bus);
gst_bus_add_watch (app->bus, (GstBusFunc) BusMessage, this);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement*)app->src, app->decoder, app->encoder, app->muxer, app->sink, app->parser
,NULL);
/* SETUP ELEMENTS */
g_object_set(app->src,
"stream-type", 0,
"format", GST_FORMAT_BUFFERS,
"is-live", true,
"block", true,
NULL);
if(IsH264Frame(codecType)){
g_object_set(app->src, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
} else if(codecType == IMAGE_MJPEG_FRAME) {
g_object_set(app->src, "caps", gst_caps_new_simple("image/jpeg",
"framerate",GST_TYPE_FRACTION,(int)framerate,1,
NULL), NULL);
//additional code
g_object_set(app->decoder, "caps", gst_caps_new_simple("video/x-raw",
NULL), NULL);
g_object_set(app->encoder, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
}
g_signal_connect(app->src, "need-data", G_CALLBACK(StartFeed), this);
g_signal_connect(app->src, "enough-data", G_CALLBACK(StopFeed), this);
g_object_set (app->sink,
"location", GenerateFileName().c_str(),
"buffer-mode", 0,
NULL);
/* LINKING */
GstPad *padDecSrc, *padMuxSink, *parserSrc,
GstPadTemplate *mux_sink_pad_template;
mux_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (app->muxer), "video_%u");
padMuxSink = gst_element_request_pad (app->muxer, mux_sink_pad_template, NULL, NULL);
parserSrc = gst_element_get_static_pad (app->parser, "src");
padEncSrc = gst_element_get_static_pad (app->encoder, "src");
if(!gst_element_link( (GstElement*)app->src, app->parser))
return;
if(IsH264Frame(codecType)){
if(gst_pad_link (parserSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
} else if(codecType == IMAGE_MJPEG_FRAME){
//additional code
if(!gst_element_link( app->parser, app->decoder))
return;
if(!gst_element_link( app->decoder, app->encoder))
return;
if(gst_pad_link (padDecSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
}
if(!gst_element_link( app->muxer, (GstElement*)app->sink))
return;
/* PLAY */
GstStateChangeReturn ret = gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
gst_object_unref (app->pipeline);
return;
}
Question:
What I am doing wrong? Any ideas to solve this problem?
I solved this problem with change of appsrc property "format" from GST_FORMAT_BUFFERS to GST_FORMAT_TIME. Correct timestamps on buffers is not enought.