Restart gst pipeline - gstreamer

For some reason when I try to restart working gst pipeline, that reads and decodes mp4 or mkv file, I got "not-linked" failure at second pipeline state change to PLAYING. Sequence of actions looks like:
create pipeline (filesrc ! decodebin ! fakesink)
change state to PLAYING
wait for state change (gst_element_get_state)
stop pipeline (NULL)
restart pipeline with changing state to PLAYING again
wait for state change (gst_element_get_state)
So, at point 6 I have next error: streaming stopped, reason not-linked (-1) - qtdemux or matroskademux - doesn't matter, each time I got not-linked error. But why? This pipeline is working, it was already working. Why it fails?
Min Ex:
#include "gst/gstclock.h"
#include "gst/gstelement.h"
#include "gst/gstparse.h"
#include <cstdlib>
#include <gst/gst.h>
#include <stdio.h>
#include <string>
using namespace std::string_literals;
int main(int argc, char *argv[]) {
gst_init(&argc, &argv);
if (argc != 2) {
fprintf(stderr, "set filename as first argument\n");
return EXIT_FAILURE;
}
// clang-format off
std::string pipeline_str = "filesrc location="s + argv[1] + " ! "
"decodebin ! "
"fakesink"
;
// clang-format on
GError *gerr = nullptr;
GstElement *pipeline = gst_parse_launch(pipeline_str.c_str(), &gerr);
if (pipeline == nullptr) {
fprintf(stderr, "can not parse pipeline: %s\n", gerr->message);
return EXIT_FAILURE;
}
// start pipeline
fprintf(stderr, "start pipeline\n");
GstStateChangeReturn result;
GstState state;
if (gst_element_set_state(pipeline, GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "set: can not start pipeline\n");
return EXIT_FAILURE;
}
if (gst_element_get_state(pipeline, &state, nullptr, GST_CLOCK_TIME_NONE) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "get: can not start pipeline\n");
return EXIT_FAILURE;
}
fprintf(stderr, "current pipeline state: %d\n", state);
// stop pipeline
fprintf(stderr, "stop pipeline\n");
if (gst_element_set_state(pipeline, GST_STATE_NULL) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "set: can not stop pipeline\n");
return EXIT_FAILURE;
}
if (gst_element_get_state(pipeline, &state, nullptr, GST_CLOCK_TIME_NONE) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "get:can not stop pipeline\n");
return EXIT_FAILURE;
}
fprintf(stderr, "current pipeline state: %d\n", state);
// restart pipeline
fprintf(stderr, "restart pipeline\n");
if (gst_element_set_state(pipeline, GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "set: can not restart pipeline\n");
return EXIT_FAILURE;
}
if (gst_element_get_state(pipeline, &state, nullptr, GST_CLOCK_TIME_NONE) ==
GST_STATE_CHANGE_FAILURE) {
fprintf(stderr, "get: can not restart pipeline\n");
return EXIT_FAILURE;
}
fprintf(stderr, "current pipeline state: %d\n", state);
return EXIT_SUCCESS;
}
Program output with GST_DEBUG=2:
start pipeline
0:00:00.010198214 [335m22522[00m 0x55e7aa2e8b60 [33;01mWARN [00m [00m basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<filesrc0>[00m pad not activated yet
0:00:00.013832890 [335m22522[00m 0x7f4b6007ba80 [33;01mWARN [00m [00m qtdemux qtdemux_types.c:233:qtdemux_type_get:[00m unknown QuickTime node type pasp
0:00:00.013857586 [335m22522[00m 0x7f4b6007ba80 [33;01mWARN [00m [00m qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux0>[00m failed to find fragment defaults for stream 1
0:00:00.016950917 [335m22522[00m 0x7f4b6007ba80 [33;01mWARN [00m [00m vaapi gstvaapiutils.c:77:gst_vaapi_warning:[00m va_getDriverName() failed with unknown libva error,driver_name=(null)
0:00:00.065998280 [335m22522[00m 0x7f4b6007ba80 [33;01mWARN [00m [00m vaapi gstvaapiutils.c:77:gst_vaapi_warning:[00m va_getDriverName() failed with unknown libva error,driver_name=(null)
current pipeline state: 4
stop pipeline
current pipeline state: 1
restart pipeline
0:00:00.085806957 [335m22522[00m 0x55e7aa2e8b60 [33;01mWARN [00m [00m basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<filesrc0>[00m pad not activated yet
0:00:00.086340274 [335m22522[00m 0x7f4b58007d40 [33;01mWARN [00m [00m qtdemux qtdemux_types.c:233:qtdemux_type_get:[00m unknown QuickTime node type pasp
0:00:00.086362061 [335m22522[00m 0x7f4b58007d40 [33;01mWARN [00m [00m qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux1>[00m failed to find fragment defaults for stream 1
0:00:00.103089409 [335m22522[00m 0x7f4b58007d40 [33;01mWARN [00m [00m vaapi gstvaapiutils.c:77:gst_vaapi_warning:[00m va_getDriverName() failed with unknown libva error,driver_name=(null)
0:00:00.119275330 [335m22522[00m 0x7f4b58007d40 [33;01mWARN [00m [00m qtdemux qtdemux.c:6073:gst_qtdemux_loop:<qtdemux1>[00m error: Internal data stream error.
0:00:00.119296247 [335m22522[00m 0x7f4b58007d40 [33;01mWARN [00m [00m qtdemux qtdemux.c:6073:gst_qtdemux_loop:<qtdemux1>[00m error: streaming stopped, reason not-linked (-1)
get: can not restart pipeline
0:00:00.119853982 [335m22522[00m 0x55e7aa2e8280 [33;01mWARN [00m [00m queue gstqueue.c:988:gst_queue_handle_sink_event:<vaapi-queue>[00m error: Internal data stream error.
0:00:00.119865713 [335m22522[00m 0x55e7aa2e8280 [33;01mWARN [00m [00m queue gstqueue.c:988:gst_queue_handle_sink_event:<vaapi-queue>[00m error: streaming stopped, reason not-linked (-1)

Related

gstreamer rtsp tee appsink can't emit signal new-sample

I am using gstreamer to play and slove the rtsp stream.
rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink
and i write in c++ code like this :
#include <gst/gst.h>
void printIt(GList *p) {
if(!p) {
g_print("p null\n");
return ;
}
while(p) {
GstPad *pad = (GstPad*)p->data;
g_print("[%s]", pad->object.name);
p = p->next;
}
g_print("\n");
}
GstFlowReturn new_sample_cb (GstElement * appsink, gpointer udata) {
g_print("new-sample cb\n");
return GST_FLOW_OK;
}
GstFlowReturn new_preroll_cb (GstElement* appsink, gpointer udata) {
g_print("new_preroll_cb cb\n");
return GST_FLOW_OK;
}
int
main (int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch("rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink", NULL);
GstElement *appsink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
printIt(appsink->pads);
g_signal_connect(appsink, "new-sample", G_CALLBACK(new_sample_cb), pipeline);
g_print("sig conn new-sample\n");
g_signal_connect(appsink, "new-preroll", G_CALLBACK(new_preroll_cb), pipeline);
g_print("sig conn new-preroll\n");
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
when i compile and run it. it has output video in the autovideosink but the appsink's signal new-sample is not be callbacked. what should i do if i what to slove a frame in appsink ?
thanks.
By default appsink favors to use callbacks instead of signals for performance reasons (but I wouldn't consider your use case as a performance problem). For appsink to emit signals you will need to set the emit-signals property of the appsink to true. It defaults to false.
P.S. Apart from the above, I think you will need a GMainLoop for the event processing as demonstrated in the GStreamer examples.

In gstreamer cant remove tee section after EOS

I am trying to create a webcam on an embedded device and learn gstreamer c implementation at the same time. i have dealt with gstreamer launch pipelines for a while so i am somewhat familiar already with gstreamer.
my end goal is to eventually have a pipeline that will dynamically stream video, record video and save pictures all from external commands. I've started small with my implementation and right now I'm focusing on being able to take a picture in one branch of a tee while the other branch is still flowing data. the other branch is just a fakesink right now but eventually it will be an h264 encoder with mux and audio saving videos.
here is a simple view of my pipeline:
v4l2src ! capsfilter ! tee ! queue ! fakesink tee. ! queue ! videoconvert ! pngenc ! filesink
my idea was to dynamically add the picture portion of the pipeline while its running.
the flow of my program goes like this:
picture event is triggered (currently a simple timer)-> add blocking probe on tee -> add picture pipeline and link it to tee -> set to playing -> set blocking probe on filesink to verify it has received data -> send EOS down the pipeline starting at the videoconvert -> set blocking probe on tee pad linked to picture pipeline -> set the picture pipeline to null and remove it and the tee pad
when the program executes, the eos probe on the tee pad for the picture pipeline is never called and instead the whole pipeline goes to EOS and i get an internal data stream error and no picture.
i want to make sure the filesink only gets 1 buffer as i cant stop the v4l2src stream or give it a num-buffers=1. i guess my problem right now is: how do i verify the filesink gets only one buffer? which pad should i send the EOS event on in order for it to properly save the picture? and lastly, how do i make sure only this one branch sees the EOS?
ive poured over all of the gstreamer tutorials and SO questions but most are either not answered or havent helped my situation.
here is my code:
#include <QDebug>
#include <QTimer>
#include "gstpipeline.hpp"
#include "gsttypes.hpp"
using namespace INSP_GST_TYPES;
gstpipeline::gstpipeline()
: mV4l2Src(NULL)
, mEncoder(NULL)
, mPngEncoder(NULL)
, mVideoFileSink(NULL)
, mPictureFileSink(NULL)
, mRawCapsFilter(NULL)
, mEncodedCapsFilter(NULL)
, mEncoderVideoConvert(NULL)
, mPngVideoConvert(NULL)
, mEncoderQueue(NULL)
, mMatroskaMux(NULL)
, mPipeline(NULL)
{
}
void gstpipeline::init()
{
mV4l2Src = gst_element_factory_make("v4l2src", V4L2SOURCE_NAME);
mRawCapsFilter = gst_element_factory_make("capsfilter", RAW_CAPS_NAME);
mRawFakesinkQueue = gst_element_factory_make("queue", RAW_FAKESINK_QUEUE_NAME);
mRawFakeSink = gst_element_factory_make("fakesink", RAW_FAKESINK_NAME);
mRawTee = gst_element_factory_make("tee", RAW_TEE_NAME);
mPipeline = gst_pipeline_new(PIPELINE_NAME);
mRawCaps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "NV12",
"width", G_TYPE_INT, 1280,
"height", G_TYPE_INT, 720,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set(mRawCapsFilter, "caps", mRawCaps, NULL);
if(!mPipeline || !mV4l2Src || !mRawCapsFilter || !mRawTee || !mRawFakesinkQueue || !mRawFakeSink)
{
qCritical() << "Failed to create main gst elements";
return;
}
else
{
qWarning() << "created the initial pipeline";
}
linkRawPipeline();
}
void gstpipeline::linkRawPipeline()
{
gst_bin_add_many(GST_BIN(mPipeline), mV4l2Src, mRawCapsFilter, mRawTee, mRawFakesinkQueue, mRawFakeSink, NULL);
g_object_set(mPipeline, "message-forward", TRUE, NULL);
if(gst_element_link_many(mV4l2Src, mRawCapsFilter, mRawTee, NULL) != TRUE)
{
qCritical() << "Failed to link raw pipeline";
return;
}
if(gst_element_link_many(mRawFakesinkQueue, mRawFakeSink, NULL) != TRUE)
{
qCritical() << "Failed to link fakesink pipeline";
return;
}
/* Manually link the Tee, which has "Request" pads */
GstPad* tee_fakesink_pad = gst_element_get_request_pad (mRawTee, "src_%u");
qWarning ("Obtained request pad %s for fakesink branch.", gst_pad_get_name (tee_fakesink_pad));
GstPad* raw_queue_pad = gst_element_get_static_pad (mRawFakesinkQueue, "sink");
if (gst_pad_link (tee_fakesink_pad, raw_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("raw Tee could not be linked.");
}
gst_object_unref(tee_fakesink_pad);
gst_object_unref(raw_queue_pad);
if (gst_element_set_state (mPipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
{
qCritical() << "Unable to set the pipeline to the ready state";
gst_object_unref (mPipeline);
}
else
{
qWarning() << "set pipeline to playing";
GMainLoop* loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (mPipeline), sMainBusCallback, loop);
QTimer::singleShot(1000, this, SLOT(onBusTimeoutExpired()));
}
}
void gstpipeline::onBusTimeoutExpired()
{
blockRawPipeline();
}
void gstpipeline::blockRawPipeline()
{
qWarning() << "Blocking raw pipeline";
GstPad* srcpad = gst_element_get_static_pad(mRawFakesinkQueue, SRC_PAD);
gst_pad_add_probe(srcpad,
(GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_IDLE),
sRawFakesinkQueueBlockedCallback, NULL, NULL);
g_object_unref(srcpad);
qWarning() << "added fakesink queue probe";
}
GstPadProbeReturn gstpipeline::sRawFakesinkQueueBlockedCallback(GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
//create the picturesink pipeline and link it to a new src pad on the raw tee
mPictureQueue = gst_element_factory_make("queue", RAW_PICTURE_QUEUE_NAME);
mPngEncoder = gst_element_factory_make("pngenc", PNG_ENC_NAME);
mPictureFileSink = gst_element_factory_make("filesink", PICTURESINK_NAME);
mPngVideoConvert = gst_element_factory_make("videoconvert", VIDEOCONVERT_PNG_NAME);
if(!mPngEncoder || !mPictureFileSink || !mPngVideoConvert)
{
qCritical() << "failed to make picturesink elements";
}
g_object_set(G_OBJECT (mPictureFileSink), "location", "/mnt/userdata/pipelinetest.png", NULL);
gst_bin_add_many (GST_BIN (mPipeline), mPictureQueue, mPngVideoConvert,
mPngEncoder, mPictureFileSink, NULL);
if(gst_element_link_many(mPictureQueue, mPngVideoConvert, mPngEncoder, mPictureFileSink, NULL) != TRUE)
{
qCritical() << "failed to link picture pipeline";
}
GstPad* tee_picturesink_pad = gst_element_get_request_pad (mRawTee, "src_%u");
qWarning ("Obtained request pad %s for picturesink branch.", gst_pad_get_name (tee_picturesink_pad));
GstPad* raw_picture_queue_pad = gst_element_get_static_pad (mPictureQueue, "sink");
if (gst_pad_link (tee_picturesink_pad, raw_picture_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("picture Tee could not be linked.");
}
gst_element_sync_state_with_parent(mPictureQueue);
gst_element_sync_state_with_parent(mPngVideoConvert);
gst_element_sync_state_with_parent(mPngEncoder);
gst_element_sync_state_with_parent(mPictureFileSink);
qWarning() << "done adding picturesink";
//set data block to see when the filesink gets data so we can send an EOS
GstPad* srcpad = gst_element_get_static_pad(mPictureFileSink, SINK_PAD);
gst_pad_add_probe(srcpad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM),
sPictureSinkDownstreamBlockProbe, NULL, NULL);
g_object_unref(srcpad);
return GST_PAD_PROBE_DROP;
}
GstPadProbeReturn gstpipeline::sPictureSinkDownstreamBlockProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
//this is a data blocking pad probe on picture filesink
qWarning() << "setting the EOS event probe on the picturesink";
GstPad* srcpad = gst_element_get_static_pad(mPictureQueue, SRC_PAD);
gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),sPictureSinkEOSCallback, NULL, NULL);
g_object_unref(srcpad);
qWarning() << "sending eos through videoconvert";
gst_element_send_event(mPngVideoConvert, gst_event_new_eos());
qWarning() << "exiting pad probe";
return GST_PAD_PROBE_PASS;
}
GstPadProbeReturn gstpipeline::sPictureSinkEOSCallback(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) == GST_EVENT_EOS)
{
qWarning() << "setting raw queue pad block";
GstPad* srcpad = gst_element_get_static_pad(mPictureQueue, SRC_PAD);
gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_IDLE),sRawQueueBlockedCallback, NULL, NULL);
g_object_unref(srcpad);
}
else
{
qCritical() << "picturesink pad probe is NOT EOS";
}
return GST_PAD_PROBE_HANDLED;
}
GstPadProbeReturn gstpipeline::sRawQueueBlockedCallback(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) == GST_EVENT_EOS)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
gst_element_set_state(mPictureFileSink, GST_STATE_NULL);
gst_element_set_state(mPngEncoder, GST_STATE_NULL);
gst_element_set_state(mPngVideoConvert, GST_STATE_NULL);
gst_element_set_state(mPictureQueue, GST_STATE_NULL);
//unlink the picture pipeline from the src pad of the raw tee and remove that pad
GstPad* tee_picturesink_pad = gst_element_get_static_pad(mRawTee, "src_1");
qWarning ("Obtained request pad %s for picturesink branch.", gst_pad_get_name (tee_picturesink_pad));
GstPad* raw_picture_queue_pad = gst_element_get_static_pad (mPictureQueue, "sink");
if (gst_pad_unlink (tee_picturesink_pad, raw_picture_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("picture Tee could not be linked.");
}
if(gst_element_remove_pad(mRawTee, tee_picturesink_pad) != TRUE)
{
qCritical("could not remove raw tee pad");
}
g_object_unref(tee_picturesink_pad);
g_object_unref(raw_picture_queue_pad);
gst_bin_remove_many(GST_BIN(mPipeline), mPictureQueue, mPngVideoConvert, mPngEncoder, mPictureFileSink, NULL);
qWarning() << "we have set the fakesink back up";
}
else
{
qCritical() << "picturesink pad probe is NOT EOS";
}
return GST_PAD_PROBE_PASS;
}
gboolean gstpipeline::sMainBusCallback (GstBus*bus, GstMessage *msg, gpointer user_data)
{
GMainLoop *loop = (GMainLoop*)user_data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
{
GError *err = NULL;
gchar *dbg;
gst_message_parse_error (msg, &err, &dbg);
gst_object_default_error (msg->src, err, dbg);
g_clear_error (&err);
g_free (dbg);
g_main_loop_quit (loop);
}
break;
case GST_MESSAGE_EOS:
g_print ("we reached EOS\n");
g_main_loop_quit (loop);
break;
default:
// g_print ("msg: %s\n", GST_MESSAGE_TYPE_NAME(msg));
break;
}
}
so i managed to figure this out myself. here are the steps i took to get this working:
1. blocking probe on the fakesink queue
2. add the picture pipeline
3. put a blocking data probe on the picture files sink
4. wait until a segment buffer reaches the filesink
5. put a blocking probe on the picture piplines queue
6. in the queue blocking probe, send eos event and remove the picture pipeline

How to implement GStreamer tee in C code

I have the following working pipeline. It has been tested using both command-line tool gst-launch-1.0 and function gst_parse_launch(), and works in both cases.
videotestsrcĀ  ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi
I've tried to set it up manually in code, but I'm now stuck on the following error (the application opens, but no video is displayed):
Error received from element videotestsrc0 : Internal data flow error.
Debugging information: gstbasesrc.c(2948): gst_base_src_loop ():
/GstPipeline:pipeline0/GstVideoTestSrc:videotestsrc0: streaming task
paused, reason not-negotiated (-4)
I'm using GStreamer in a Qt application and the glimagesink links the video to a QML type. All code related to GStreamer is located in a GStreamer class called GStreamer. The entire cpp file is posted below, in case the issue is located somewhere I wouldn't guess. I apologize for non-relevant code.
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);
GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
qDebug() << "Constructed GSteamer";
}
void GStreamer::createPipeline()
{
qDebug() << "Creating pipeline";
if(m_source.isEmpty()){
qDebug() << "Error: Missing source property for GStreamer component";
return;
}
if(m_videoItem.isEmpty()){
qDebug() << "Error: Missing videoItem property for GStreamer component";
return;
}
m_pipeline = gst_pipeline_new(NULL);
m_sink = NULL;
QByteArray ba = m_source.toLatin1();
m_src = gst_element_factory_make(ba.data(), NULL);
g_assert(m_src);
m_filter = gst_element_factory_make("capsfilter", "filter");
g_assert(m_filter);
g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL),
NULL);
m_convert = gst_element_factory_make("videoconvert", NULL);
g_assert(m_convert);
m_crop = gst_element_factory_make("videocrop", "crop");
g_assert(m_crop);
g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);
// Tee
m_tee = gst_element_factory_make("tee", "videotee");
g_assert(m_tee);
// Display queue
m_displayQueue = gst_element_factory_make("queue", "displayQueue");
g_assert(m_displayQueue);
m_upload = gst_element_factory_make("glupload", NULL);
g_assert(m_upload);
m_sink = gst_element_factory_make("qmlglsink", NULL);
g_assert(m_sink);
// Record queue
m_recordQueue = gst_element_factory_make("queue", "recordQueue");
g_assert(m_recordQueue);
m_encode = gst_element_factory_make("jpegenc", NULL);
g_assert(m_encode);
m_mux = gst_element_factory_make("avimux", NULL);
g_assert(m_mux);
m_filesink = gst_element_factory_make("filesink", NULL);
g_assert(m_filesink);
g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);
gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);
// If I only link this simple pipeline, it works fine
/*
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
qDebug() << "Unable to link source";
}
*/
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
qDebug() << "Unable to link source";
}
if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
qDebug() << "Unable to link display queue";
}
if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
qDebug() << "Unable to link record queue";
}
GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");
GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");
if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link display tee";
}
if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link record tee";
}
//gst_object_unref(teeDisplayPad);
gst_object_unref(queueDisplayPad);
//gst_object_unref(teeRecordPad);
gst_object_unref(queueRecordPad);
QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
g_object_set(m_sink, "widget", videoItem, NULL);
// This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);
m_bus = gst_element_get_bus(m_pipeline);
gst_bus_add_watch(m_bus, busCallback, m_loop);
gst_object_unref(m_bus);
m_loop = g_main_loop_new(NULL, false);
g_main_loop_run(m_loop);
}
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
qDebug() << "Callback function reached";
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:
GError *error;
gchar *debugInfo;
gst_message_parse_error(message, &error, &debugInfo);
qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
//g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
//g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
g_clear_error (&error);
g_free (debugInfo);
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
case GST_MESSAGE_EOS:
qDebug() << "End-Of-Stream reached.";
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
default:
qDebug() << "Unexpected message received.";
break;
}
return true;
}
/**
The rest of the code is probably not relevant. It contains
only destructor and some getters and setters.
**/
GStreamer::~GStreamer()
{
gst_object_unref(m_bus);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
}
QString GStreamer::source() const
{
return m_source;
}
void GStreamer::setSource(const QString &source)
{
if(source != m_source){
m_source = source;
}
}
QString GStreamer::videoItem() const
{
return m_videoItem;
}
void GStreamer::setVideoItem(const QString &videoItem)
{
if(videoItem != m_videoItem){
m_videoItem = videoItem;
}
}
All member variables are defined in the .h file.
If I don't add tee element to the bin and links it in the pipeline, then the video shows up on the screen as expected. So I guess I'm messing up the pads on the tee element.
I've been following the tutorials in GStreamers documentation, so I don't understand why it's not working.
Hope someone can help.
Ok, so the difference between the the gst-launch line provided and the application code is the use of the qmlglsink element in the place of glimagesink.
The problem is that qmlglsink only accepts RGBA formatted video buffers however the jpegenc in the other branch of the tee does not accept RGBA formatted video buffers. This leads to a negotiation problem as there is not common format supported by both branches of the tee.
The fix is to add a videoconvert element before jpegenc or a glcolorconvert element before qmlglsink so that both branches of the tee can negotiate to the same video format.
Side note: glimagesink contains a glupload ! glcolorconvert ! actual-sink internally so is converting video formats already.

gst_element_link faild in second thread

I have problem with linking elements in second thread.
I create two thread in each I create pipeline to store IP camera stream to file.
When second thread try link stream element (h264parse -> matroskamux ) I recieve Segmentation fault signal.
Gstreamer print some error:
(Cam_recorder:5529): GLib-GObject-WARNING **: cannot register existing type 'GstMatroskamuxPad'
(Cam_recorder:5529): GLib-GObject-CRITICAL **: g_object_new: assertion 'G_TYPE_IS_OBJECT (object_type)' failed
When I run same code with added some delay (500ms) between execution thread, both thread create pipeline correctly and program work.
Can anyone help me?
EDIT:
My code:
void Camera::Thread_function(){
GstElement *pipeline=NULL;
GstElement * temp_ele;
GstBus *bus=NULL;
GstMessage *msg=NULL;
GError *error = NULL;
STRING text;
DEBUG<<"Starting camera:"<<name<<END;
text=stream_uri;
DEBUG<< text <<END;
pipeline=gst_parse_launch(text.c_str(),&error);
if (error != NULL){
CRITICAL<<"Parse error: "<< error->message<<END;
g_error_free (error);
error=NULL;
goto STOP;
}
if(pipeline==NULL){
CRITICAL<<"Pipeline is NULL"<<END;
goto STOP;
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);
g_timeout_add_seconds (1, CAM_REC::timeout_cb, this);
g_main_loop_run (loop);
g_main_loop_unref (loop);
STOP:
if (msg != NULL){
gst_message_unref (msg);
msg=NULL;
}
if(bus!=NULL){
gst_object_unref (bus);
bus=NULL;
}
if(pipeline){
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
pipeline=NULL;
}
//*/
INFO<<"Cam "<<name<<" end"<<END;
}
Pipelines:
rtspsrc name=kam3_stream location=\"rtsp://192.168.0.107/stream1\" ! rtph264depay name=kam3_deplay ! h264parse name=kam3_parse ! matroskamux name=kam3_mux ! filesink location=x.mkv name=kamera3_file_sink
rtspsrc name=kam4_stream location=\"rtsp://admin:admin#192.168.0.108/\" ! rtph264depay name=kam4_deplay ! h264parse name=kam4_parse ! matroskamux name=kam4_mux ! filesink location=x2.mkv name=kamera4_file_sink
Program crashes in function gst_parse_launch()
Tank

GStreamer Two Piplines Synchronization

I have developed an application which takes data from file and add metadata to each frame and transfer that data to client through udp over rtp.
At receiver end client removes that metadata and has to play the video.
for that I used At server End:
pipeline1 :: gst-launch-1.0 filesrc ! videoparse ! appsink
at appsink adding metadata and push that buffer to appsrc.
pipeline 2 :: gst-launch-1.0 appsrc ! rtpgstpay ! udpsink
At Receivers end ::
pipeline1 :: gst-launch-1.0 udpsrc ! rtpgstdepay ! appsink
at appsink removing the metadata and push the buffer to appsrc.
pipeline2 :: gst-launch-1.0 appsrc ! videoparse ! autovideoconvert ! autovideosink
The Problem is At receivers end i am not getting all the frames and the video also not playing properly. Only one frame playing and stopping and again playing only single frame.
Can anyone provide some solution or suggestion?
/* code At Server End Processing Frame */
/* This is in Function which is called by g_timeout_add_seconds(0, new_sample, NULL);
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_buffer_new_wrapped(&header, sizeof(header)); // header is Structure
FrameBuffer = gst_sample_get_buffer(sample);
buffer = gst_buffer_append(buf, FrameBuffer);
g_signal_emit_by_name (appsrc2, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");
/* Above code is for processing frame at server end. */
/* Code For Processing Frame At Receiver END */
//This is in Function new_sample which is called by g_timeout_add_seconds(0, new_sample, NULL);
if(!gst_app_sink_is_eos ((GstAppSink *)sink))
{
GstSample *sample = NULL;
g_signal_emit_by_name(sink, "pull-sample", &sample, NULL);
buf = gst_sample_get_buffer(sample);
gst_buffer_extract(buf, 0, temp, 8);
if(frameid != temp->FrameID)
{
gst_element_set_state(pipeline2, GST_STATE_PLAYING);
g_print("Frame Header :: %d , Frame ID :: %d\n", temp->FrameHeader, temp->FrameID);
gint size;
size = gst_buffer_get_size(buf);
buffer = gst_buffer_copy_region(buf, GST_BUFFER_OFFSET_NONE, 8, size-8);
g_print("Size :: -- %d\n",size);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if(ret != GST_FLOW_OK)
{
g_printerr("Failed to Push Buffer");
return FALSE;
}
g_print("Successfully Pushed..\n");