In gstreamer cant remove tee section after EOS - c++

I am trying to create a webcam on an embedded device and learn gstreamer c implementation at the same time. i have dealt with gstreamer launch pipelines for a while so i am somewhat familiar already with gstreamer.
my end goal is to eventually have a pipeline that will dynamically stream video, record video and save pictures all from external commands. I've started small with my implementation and right now I'm focusing on being able to take a picture in one branch of a tee while the other branch is still flowing data. the other branch is just a fakesink right now but eventually it will be an h264 encoder with mux and audio saving videos.
here is a simple view of my pipeline:
v4l2src ! capsfilter ! tee ! queue ! fakesink tee. ! queue ! videoconvert ! pngenc ! filesink
my idea was to dynamically add the picture portion of the pipeline while its running.
the flow of my program goes like this:
picture event is triggered (currently a simple timer)-> add blocking probe on tee -> add picture pipeline and link it to tee -> set to playing -> set blocking probe on filesink to verify it has received data -> send EOS down the pipeline starting at the videoconvert -> set blocking probe on tee pad linked to picture pipeline -> set the picture pipeline to null and remove it and the tee pad
when the program executes, the eos probe on the tee pad for the picture pipeline is never called and instead the whole pipeline goes to EOS and i get an internal data stream error and no picture.
i want to make sure the filesink only gets 1 buffer as i cant stop the v4l2src stream or give it a num-buffers=1. i guess my problem right now is: how do i verify the filesink gets only one buffer? which pad should i send the EOS event on in order for it to properly save the picture? and lastly, how do i make sure only this one branch sees the EOS?
ive poured over all of the gstreamer tutorials and SO questions but most are either not answered or havent helped my situation.
here is my code:
#include <QDebug>
#include <QTimer>
#include "gstpipeline.hpp"
#include "gsttypes.hpp"
using namespace INSP_GST_TYPES;
gstpipeline::gstpipeline()
: mV4l2Src(NULL)
, mEncoder(NULL)
, mPngEncoder(NULL)
, mVideoFileSink(NULL)
, mPictureFileSink(NULL)
, mRawCapsFilter(NULL)
, mEncodedCapsFilter(NULL)
, mEncoderVideoConvert(NULL)
, mPngVideoConvert(NULL)
, mEncoderQueue(NULL)
, mMatroskaMux(NULL)
, mPipeline(NULL)
{
}
void gstpipeline::init()
{
mV4l2Src = gst_element_factory_make("v4l2src", V4L2SOURCE_NAME);
mRawCapsFilter = gst_element_factory_make("capsfilter", RAW_CAPS_NAME);
mRawFakesinkQueue = gst_element_factory_make("queue", RAW_FAKESINK_QUEUE_NAME);
mRawFakeSink = gst_element_factory_make("fakesink", RAW_FAKESINK_NAME);
mRawTee = gst_element_factory_make("tee", RAW_TEE_NAME);
mPipeline = gst_pipeline_new(PIPELINE_NAME);
mRawCaps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "NV12",
"width", G_TYPE_INT, 1280,
"height", G_TYPE_INT, 720,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set(mRawCapsFilter, "caps", mRawCaps, NULL);
if(!mPipeline || !mV4l2Src || !mRawCapsFilter || !mRawTee || !mRawFakesinkQueue || !mRawFakeSink)
{
qCritical() << "Failed to create main gst elements";
return;
}
else
{
qWarning() << "created the initial pipeline";
}
linkRawPipeline();
}
void gstpipeline::linkRawPipeline()
{
gst_bin_add_many(GST_BIN(mPipeline), mV4l2Src, mRawCapsFilter, mRawTee, mRawFakesinkQueue, mRawFakeSink, NULL);
g_object_set(mPipeline, "message-forward", TRUE, NULL);
if(gst_element_link_many(mV4l2Src, mRawCapsFilter, mRawTee, NULL) != TRUE)
{
qCritical() << "Failed to link raw pipeline";
return;
}
if(gst_element_link_many(mRawFakesinkQueue, mRawFakeSink, NULL) != TRUE)
{
qCritical() << "Failed to link fakesink pipeline";
return;
}
/* Manually link the Tee, which has "Request" pads */
GstPad* tee_fakesink_pad = gst_element_get_request_pad (mRawTee, "src_%u");
qWarning ("Obtained request pad %s for fakesink branch.", gst_pad_get_name (tee_fakesink_pad));
GstPad* raw_queue_pad = gst_element_get_static_pad (mRawFakesinkQueue, "sink");
if (gst_pad_link (tee_fakesink_pad, raw_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("raw Tee could not be linked.");
}
gst_object_unref(tee_fakesink_pad);
gst_object_unref(raw_queue_pad);
if (gst_element_set_state (mPipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
{
qCritical() << "Unable to set the pipeline to the ready state";
gst_object_unref (mPipeline);
}
else
{
qWarning() << "set pipeline to playing";
GMainLoop* loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (mPipeline), sMainBusCallback, loop);
QTimer::singleShot(1000, this, SLOT(onBusTimeoutExpired()));
}
}
void gstpipeline::onBusTimeoutExpired()
{
blockRawPipeline();
}
void gstpipeline::blockRawPipeline()
{
qWarning() << "Blocking raw pipeline";
GstPad* srcpad = gst_element_get_static_pad(mRawFakesinkQueue, SRC_PAD);
gst_pad_add_probe(srcpad,
(GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_IDLE),
sRawFakesinkQueueBlockedCallback, NULL, NULL);
g_object_unref(srcpad);
qWarning() << "added fakesink queue probe";
}
GstPadProbeReturn gstpipeline::sRawFakesinkQueueBlockedCallback(GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
//create the picturesink pipeline and link it to a new src pad on the raw tee
mPictureQueue = gst_element_factory_make("queue", RAW_PICTURE_QUEUE_NAME);
mPngEncoder = gst_element_factory_make("pngenc", PNG_ENC_NAME);
mPictureFileSink = gst_element_factory_make("filesink", PICTURESINK_NAME);
mPngVideoConvert = gst_element_factory_make("videoconvert", VIDEOCONVERT_PNG_NAME);
if(!mPngEncoder || !mPictureFileSink || !mPngVideoConvert)
{
qCritical() << "failed to make picturesink elements";
}
g_object_set(G_OBJECT (mPictureFileSink), "location", "/mnt/userdata/pipelinetest.png", NULL);
gst_bin_add_many (GST_BIN (mPipeline), mPictureQueue, mPngVideoConvert,
mPngEncoder, mPictureFileSink, NULL);
if(gst_element_link_many(mPictureQueue, mPngVideoConvert, mPngEncoder, mPictureFileSink, NULL) != TRUE)
{
qCritical() << "failed to link picture pipeline";
}
GstPad* tee_picturesink_pad = gst_element_get_request_pad (mRawTee, "src_%u");
qWarning ("Obtained request pad %s for picturesink branch.", gst_pad_get_name (tee_picturesink_pad));
GstPad* raw_picture_queue_pad = gst_element_get_static_pad (mPictureQueue, "sink");
if (gst_pad_link (tee_picturesink_pad, raw_picture_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("picture Tee could not be linked.");
}
gst_element_sync_state_with_parent(mPictureQueue);
gst_element_sync_state_with_parent(mPngVideoConvert);
gst_element_sync_state_with_parent(mPngEncoder);
gst_element_sync_state_with_parent(mPictureFileSink);
qWarning() << "done adding picturesink";
//set data block to see when the filesink gets data so we can send an EOS
GstPad* srcpad = gst_element_get_static_pad(mPictureFileSink, SINK_PAD);
gst_pad_add_probe(srcpad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM),
sPictureSinkDownstreamBlockProbe, NULL, NULL);
g_object_unref(srcpad);
return GST_PAD_PROBE_DROP;
}
GstPadProbeReturn gstpipeline::sPictureSinkDownstreamBlockProbe(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
//this is a data blocking pad probe on picture filesink
qWarning() << "setting the EOS event probe on the picturesink";
GstPad* srcpad = gst_element_get_static_pad(mPictureQueue, SRC_PAD);
gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM),sPictureSinkEOSCallback, NULL, NULL);
g_object_unref(srcpad);
qWarning() << "sending eos through videoconvert";
gst_element_send_event(mPngVideoConvert, gst_event_new_eos());
qWarning() << "exiting pad probe";
return GST_PAD_PROBE_PASS;
}
GstPadProbeReturn gstpipeline::sPictureSinkEOSCallback(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) == GST_EVENT_EOS)
{
qWarning() << "setting raw queue pad block";
GstPad* srcpad = gst_element_get_static_pad(mPictureQueue, SRC_PAD);
gst_pad_add_probe(pad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_IDLE),sRawQueueBlockedCallback, NULL, NULL);
g_object_unref(srcpad);
}
else
{
qCritical() << "picturesink pad probe is NOT EOS";
}
return GST_PAD_PROBE_HANDLED;
}
GstPadProbeReturn gstpipeline::sRawQueueBlockedCallback(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) == GST_EVENT_EOS)
{
gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
gst_element_set_state(mPictureFileSink, GST_STATE_NULL);
gst_element_set_state(mPngEncoder, GST_STATE_NULL);
gst_element_set_state(mPngVideoConvert, GST_STATE_NULL);
gst_element_set_state(mPictureQueue, GST_STATE_NULL);
//unlink the picture pipeline from the src pad of the raw tee and remove that pad
GstPad* tee_picturesink_pad = gst_element_get_static_pad(mRawTee, "src_1");
qWarning ("Obtained request pad %s for picturesink branch.", gst_pad_get_name (tee_picturesink_pad));
GstPad* raw_picture_queue_pad = gst_element_get_static_pad (mPictureQueue, "sink");
if (gst_pad_unlink (tee_picturesink_pad, raw_picture_queue_pad) != GST_PAD_LINK_OK)
{
qCritical ("picture Tee could not be linked.");
}
if(gst_element_remove_pad(mRawTee, tee_picturesink_pad) != TRUE)
{
qCritical("could not remove raw tee pad");
}
g_object_unref(tee_picturesink_pad);
g_object_unref(raw_picture_queue_pad);
gst_bin_remove_many(GST_BIN(mPipeline), mPictureQueue, mPngVideoConvert, mPngEncoder, mPictureFileSink, NULL);
qWarning() << "we have set the fakesink back up";
}
else
{
qCritical() << "picturesink pad probe is NOT EOS";
}
return GST_PAD_PROBE_PASS;
}
gboolean gstpipeline::sMainBusCallback (GstBus*bus, GstMessage *msg, gpointer user_data)
{
GMainLoop *loop = (GMainLoop*)user_data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
{
GError *err = NULL;
gchar *dbg;
gst_message_parse_error (msg, &err, &dbg);
gst_object_default_error (msg->src, err, dbg);
g_clear_error (&err);
g_free (dbg);
g_main_loop_quit (loop);
}
break;
case GST_MESSAGE_EOS:
g_print ("we reached EOS\n");
g_main_loop_quit (loop);
break;
default:
// g_print ("msg: %s\n", GST_MESSAGE_TYPE_NAME(msg));
break;
}
}

so i managed to figure this out myself. here are the steps i took to get this working:
1. blocking probe on the fakesink queue
2. add the picture pipeline
3. put a blocking data probe on the picture files sink
4. wait until a segment buffer reaches the filesink
5. put a blocking probe on the picture piplines queue
6. in the queue blocking probe, send eos event and remove the picture pipeline

Related

pad creation and some mistakes on gst_parse_launch

I am a newbie to gstreamer and I would like to know if we have to create source and sink pads for convert like video convert in a pipeline. I have a pipeline like this
gst-launch-1.0 v4l2src ! video/x-raw,format=YUY2 ! videoconvert ! xvimagesink
I am trying to create a simple c application to understand the creation of pads and would like to know if video convert has a source pad and sink pad too. I am creating a source and sink pad for the filter.
EDIT:
yeah well you see, I tried following the dynamic pipelines example and wrote the code below
#include <gst/gst.h>
// easier to pass them as callbacks
typedef struct _CustomData{
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
}CustomData;
// callback function
// here src is the v4l2src, newpad is gstpad that has just been added to src element. This is usually the pad to which we want to lnk
// data is the pointer we provided when attaching to the signal.
static void pad_added_handler(GstElement *src, GstPad *new_pad,CustomData *data)
{
g_print("In pad handler\n");
GstPad *sink_pad = gst_element_get_static_pad(data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
if(gst_pad_is_linked(sink_pad))
{
g_print("we are linked. igonring\n");
}
// check the new pad types
// we have previously created a piece of pipeline which deals with videoconvert linked with xvimagesink and we will nto be able to link it to a pad producing video.
//gst-pad_get_current_caps()- retrieves current capabilities of pad
new_pad_caps = gst_pad_get_current_caps(new_pad);
new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
new_pad_type = gst_structure_get_name(new_pad_struct);
g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
if(!g_str_has_prefix(new_pad_type, "video/x-raw,format=(string)YUY2"))
{
g_print("It has new pad type");
}
// gst_pad_link tries to link two pads . the link must be specified from source to sink and both pads must be owned by elements residing in same pipeline
ret = gst_pad_link(new_pad, sink_pad);
if(GST_PAD_LINK_FAILED(ret))
{
g_print("type is new_pad_type");
}
if(new_pad_caps !=NULL)
{
gst_caps_unref(new_pad_caps);
}
gst_object_unref(sink_pad);
}
int main(int argc, char *argv[])
{
GMainLoop *loop;
CustomData data;
GstBus *bus;
GstMessage *msg;
gboolean terminate = FALSE;
gst_init(&argc, &argv);
// loop = g_main_loop_new(NULL, FALSE);
// create the elements
data.source = gst_element_factory_make("v4l2src", "source");
data.convert = gst_element_factory_make("videoconvert", "convert");
data.sink = gst_element_factory_make("xvimagesink", "sink");
data.pipeline = gst_pipeline_new("new-pipeline");
if(!data.pipeline || !data.source || !data.convert || !data.sink)
{
g_printerr("Not all elements could be created\n");
return -1;
}
//we did not link source at this point of time, we will do it later
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert, data.sink, NULL);
// we link convert element to sink, do not link them with source. we dont have source pads here. so we just have videoconvert->sink unlinked
// gst_element_link(data.source, data.convert);
if(!gst_element_link( data.convert,data.sink))
{
g_printerr("elements could not be linked\n");
gst_object_unref(data.pipeline);
return -1;
}
// we set the device source
//g_object_set(source, "device", "/dev/video0", NULL);
//connect to pad added signal.
// we want to attach pad added signal to source element. to do so, we are using g_signal_connect and provide callback function and datapointer.
// when source element has enough information to start producing data, it will create source pads and trigger the pad added signal. at this point, our callback is called
g_print("before signal connect\n");
gint id= g_signal_connect(G_OBJECT(data.source), "pad-added", G_CALLBACK(pad_added_handler), &data );
g_print("after signal connect with id = %d\n", id);
//g_signal_connect(G_OBJECT(data.source), "pad-added", G_CALLBACK(handler), &data);
// gst_element_link(data.source, data.convert);
GstStateChangeReturn ret;
ret =gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
// g_main_loop_run(loop);
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
and it gave me an error
before signal connect
after signal connect with id = 1
Pipeline state changed from NULL to READY:
Pipeline state changed from READY to PAUSED:
Error received from element source: Internal data stream error.
Debugging information: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:new-pipeline/GstV4l2Src:source:
streaming stopped, reason not-linked (-1)
(The above code works if I write gst_elements_link(data.source, data.convert) after the element link statement for convert and sink)
So I tried the normal way in which I just added and linked all the elements together and it began to work without use of the pads.
#include <gst/gst.h>
int main(int argc, char *argv[])
{
GstElement *pipeline, *source, *convert, *sink;
GstBus *bus;
GstMessage *msg;
gst_init(&argc, &argv);
source = gst_element_factory_make("v4l2src", "source");
convert = gst_element_factory_make("nvvidconv", "convert");
sink = gst_element_factory_make("xvimagesink", "sink");
pipeline = gst_pipeline_new("pipe");
gst_bin_add_many(GST_BIN(pipeline), source,convert,sink, NULL);
gst_element_link_many(source,convert,sink,NULL);
gst_element_set_state(pipeline,GST_STATE_PLAYING);
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref(bus);
gst_element_set_state(pipeline,GST_STATE_NULL);
gst_object_unref(pipeline);
}
But, inorder to fully grasp the knowledge of pads, I wanted to execute simpler pipelines with pads.
I just don't fully understand the usage of pads and link them and all.
EDIT2:
Ultimately I want to write application for pipeline like this which works on command line perfectly well,
gst-launch-1.0 v4l2src device='/dev/video0' ! 'video/x-raw,format=(string)YUY2,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! nvvidconv ! 'video/x-raw,format=(string)NV12,width=(int)640,height=(int)480' ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=(string)NV12,width=(int)640,height=(int)480' ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=640 height=480 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=640 height=480 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink sync=false -v
But as I don't understand the usage of both pads and bins, I am unable to implement them in my above pipeline. However, I tried this,
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"
#define MAX_DISPLAY_LEN 64
#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2
gint frame_number = 0;
gchar pgie_classes_str[4][32] = { "Vehicle", "TwoWheeler", "Person",
"Roadsign"
};
static GstPadProbeReturn osd_sink_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
GstBuffer *buf=(GstBuffer *)info->data;
guint num_rects =0;
NvDsObjectMeta *obj_meta = NULL;
guint vehicle_count = 0;
guint person_count = 0;
NvDsMetaList * l_frame = NULL;
NvDsMetaList * l_obj = NULL;
NvDsDisplayMeta *display_meta = NULL;
NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
l_frame = l_frame->next) {
NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
int offset = 0;
for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;
l_obj = l_obj->next) {
obj_meta = (NvDsObjectMeta *) (l_obj->data);
if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) {
vehicle_count++;
num_rects++;
}
if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) {
person_count++;
num_rects++;
}
}
display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
NvOSD_TextParams *txt_params = &display_meta->text_params[0];
display_meta->num_labels = 1;
txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);
offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);
offset = snprintf(txt_params->display_text + offset , MAX_DISPLAY_LEN, "Vehicle = %d ", vehicle_count);
/* Now set the offsets where the string should appear */
txt_params->x_offset = 10;
txt_params->y_offset = 12;
/* Font , font-color and font-size */
txt_params->font_params.font_name = "Serif";
txt_params->font_params.font_size = 10;
txt_params->font_params.font_color.red = 1.0;
txt_params->font_params.font_color.green = 1.0;
txt_params->font_params.font_color.blue = 1.0;
txt_params->font_params.font_color.alpha = 1.0;
/* Text background color */
txt_params->set_bg_clr = 1;
txt_params->text_bg_clr.red = 0.0;
txt_params->text_bg_clr.green = 0.0;
txt_params->text_bg_clr.blue = 0.0;
txt_params->text_bg_clr.alpha = 1.0;
nvds_add_display_meta_to_frame(frame_meta, display_meta);
}
g_print ("Frame Number = %d Number of objects = %d "
"Vehicle Count = %d Person Count = %d\n",
frame_number, num_rects, vehicle_count, person_count);
frame_number++;
return GST_PAD_PROBE_OK;
}
int main(int argc, char *argv[])
{
GstElement *pipeline, *source, *filter1, *convert,*filter2, *filter3, *vidconv, *filter4, *mux, *infer, *tiler, *osd, *transform , *sink, *bin, *convert2 , *vidconv2;
GMainLoop *loop;
GstCaps *caps1, *caps2, *caps3, *caps4;
GstPad *osd_sink_pad =NULL, *srcpad, *sinkpad;
loop = g_main_loop_new(NULL,FALSE);
gst_init(&argc, &argv);
pipeline = gst_pipeline_new("nv_pipeline");
gchar *string1 = "video/x-raw(memory:NVMM),format=(string)NV12";
source = gst_element_factory_make("v4l2src", "source");
filter1 = gst_element_factory_make("capsfilter", "filter1");
convert = gst_element_factory_make("nvvidconv", "convert");
filter2 = gst_element_factory_make("capsfilter", "filter2");
filter3 = gst_element_factory_make("capsfilter", "filter3");
filter4 = gst_element_factory_make("capsfilter", "filter4");
vidconv = gst_element_factory_make("nvvideoconvert", "vidconv");
mux = gst_element_factory_make("nvstreammux", "mux");
infer = gst_element_factory_make("nvinfer", "infer");
tiler = gst_element_factory_make("nvmultistreamtiler", "tiler");
osd = gst_element_factory_make("nvosd", "osd");
transform = gst_element_factory_make("nvegltransform", "transform");
sink = gst_element_factory_make("nveglglessink", "sink");
convert2 = gst_element_factory_make("nvvidconv", "convert2");
vidconv2 = gst_element_factory_make("nvvideoconvert", "vidconv2");
gst_bin_add_many(GST_BIN(pipeline), source,filter1,convert,filter2, convert2,filter3,vidconv, filter4,mux,infer, tiler,vidconv2, osd,transform,sink,NULL);
gst_element_link_many(source,filter1,convert,filter2, convert2,filter3, vidconv, filter4,mux,infer, tiler,vidconv2, osd,transform,sink,NULL);
osd_sink_pad = gst_element_get_static_pad(osd, "sink");
gst_pad_add_probe(osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER, osd_sink_pad_buffer_probe, NULL, NULL);
caps1 = gst_caps_new_simple("video/x-raw", "format",G_TYPE_STRING,"YUY2",NULL);
caps2 = gst_caps_from_string(string1);
caps3 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"NV12", NULL);
caps4 = gst_caps_from_string(string1);
g_object_set(G_OBJECT(filter1), "caps", caps1, NULL);
g_object_set(G_OBJECT(filter2), "caps", caps2, NULL);
g_object_set(G_OBJECT(filter3), "caps", caps3, NULL);
g_object_set(G_OBJECT(filter4), "caps", caps4, NULL);
g_object_set(G_OBJECT(mux), "live-source", 1, "name", "mux", "batch-size", 1, "width", 1280, "height", 720, NULL);
g_object_set(G_OBJECT(infer), "config-file-path","/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt",NULL);
g_object_set(G_OBJECT(infer), "batch-size", 1, NULL);
g_object_set(G_OBJECT(tiler), "rows", 1, "columns", 1, "width", 1280, "height", 720, NULL);
gst_caps_unref(caps1);
gst_caps_unref(caps2);
gst_caps_unref(caps3);
gst_caps_unref(caps4);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Running ...\n");
g_main_loop_run(loop);
gst_element_set_state(pipeline,GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
which gives the exact same output as the command line gst-launch-1.0 like,
(deep_pads:15648): GLib-GObject-WARNING **: 11:29:18.761: cannot register existing type 'GstInterpolationMethod'
(deep_pads:15648): GLib-GObject-CRITICAL **: 11:29:18.761: g_param_spec_enum: assertion 'G_TYPE_IS_ENUM (enum_type)' failed
(deep_pads:15648): GLib-GObject-CRITICAL **: 11:29:18.761: validate_pspec_to_install: assertion 'G_IS_PARAM_SPEC (pspec)' failed
(deep_pads:15648): GStreamer-CRITICAL **: 11:29:18.814: gst_element_get_static_pad: assertion 'GST_IS_ELEMENT (element)' failed
(deep_pads:15648): GStreamer-CRITICAL **: 11:29:18.814: gst_pad_add_probe: assertion 'GST_IS_PAD (pad)' failed
0:00:00.843318172 15648 0x5591be52c0 INFO nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<infer> NvDsInferContext[UID 1]:initialize(): Trying to create engine from model files
0:00:20.693301580 15648 0x5591be52c0 INFO nvinfer gstnvinfer.cpp:519:gst_nvinfer_logger:<infer> NvDsInferContext[UID 1]:generateTRTModel(): Storing the serialized cuda engine to file at /opt/nvidia/deepstream/deepstream-4.0/samples/models/Primary_Detector/resnet10.caffemodel_b1_int8.engine
except It doesn't display an output window from the above c application and doesn't display the rest.
Your first example fails because data.source element is actually never linked to data.convert element. Since both elements have static pads you need to "manually" create them and link them before setting pipeline to GST_STATE_PLAYING:
GstPad *source_pad = gst_element_get_static_pad(data.source, "src");
GstPad *sink_pad = gst_element_get_static_pad(data.convert, "sink");
ret = gst_pad_link(source_pad, sink_pad);
You probably expected that static source pad of your data.source element will somehow be automatically created so you registered
g_signal_connect(G_OBJECT(data.source), "pad-added", G_CALLBACK(pad_added_handler), &data );
But, as you can see from your debug, pad_added_handler was never called because g_signal_connect can be registered and will be called for elements that have dynamic pads. For example, demultiplexer tsdemux will dynamically create its source pads during discovery of elementary streams so, in that case, registration of pad-added callback would be necessary.
The first step in your learning curve would be to understand fundamental differences between "static" (mandatory, always exists, "manually" created), dynamic (exists sometimes, automatically created by element) and request (optional, "manually" created when needed) gstreamer pads. After that everything will be much easier for you.

How to implement GStreamer tee in C code

I have the following working pipeline. It has been tested using both command-line tool gst-launch-1.0 and function gst_parse_launch(), and works in both cases.
videotestsrcĀ  ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi
I've tried to set it up manually in code, but I'm now stuck on the following error (the application opens, but no video is displayed):
Error received from element videotestsrc0 : Internal data flow error.
Debugging information: gstbasesrc.c(2948): gst_base_src_loop ():
/GstPipeline:pipeline0/GstVideoTestSrc:videotestsrc0: streaming task
paused, reason not-negotiated (-4)
I'm using GStreamer in a Qt application and the glimagesink links the video to a QML type. All code related to GStreamer is located in a GStreamer class called GStreamer. The entire cpp file is posted below, in case the issue is located somewhere I wouldn't guess. I apologize for non-relevant code.
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);
GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
qDebug() << "Constructed GSteamer";
}
void GStreamer::createPipeline()
{
qDebug() << "Creating pipeline";
if(m_source.isEmpty()){
qDebug() << "Error: Missing source property for GStreamer component";
return;
}
if(m_videoItem.isEmpty()){
qDebug() << "Error: Missing videoItem property for GStreamer component";
return;
}
m_pipeline = gst_pipeline_new(NULL);
m_sink = NULL;
QByteArray ba = m_source.toLatin1();
m_src = gst_element_factory_make(ba.data(), NULL);
g_assert(m_src);
m_filter = gst_element_factory_make("capsfilter", "filter");
g_assert(m_filter);
g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL),
NULL);
m_convert = gst_element_factory_make("videoconvert", NULL);
g_assert(m_convert);
m_crop = gst_element_factory_make("videocrop", "crop");
g_assert(m_crop);
g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);
// Tee
m_tee = gst_element_factory_make("tee", "videotee");
g_assert(m_tee);
// Display queue
m_displayQueue = gst_element_factory_make("queue", "displayQueue");
g_assert(m_displayQueue);
m_upload = gst_element_factory_make("glupload", NULL);
g_assert(m_upload);
m_sink = gst_element_factory_make("qmlglsink", NULL);
g_assert(m_sink);
// Record queue
m_recordQueue = gst_element_factory_make("queue", "recordQueue");
g_assert(m_recordQueue);
m_encode = gst_element_factory_make("jpegenc", NULL);
g_assert(m_encode);
m_mux = gst_element_factory_make("avimux", NULL);
g_assert(m_mux);
m_filesink = gst_element_factory_make("filesink", NULL);
g_assert(m_filesink);
g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);
gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);
// If I only link this simple pipeline, it works fine
/*
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
qDebug() << "Unable to link source";
}
*/
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
qDebug() << "Unable to link source";
}
if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
qDebug() << "Unable to link display queue";
}
if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
qDebug() << "Unable to link record queue";
}
GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");
GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");
if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link display tee";
}
if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link record tee";
}
//gst_object_unref(teeDisplayPad);
gst_object_unref(queueDisplayPad);
//gst_object_unref(teeRecordPad);
gst_object_unref(queueRecordPad);
QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
g_object_set(m_sink, "widget", videoItem, NULL);
// This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);
m_bus = gst_element_get_bus(m_pipeline);
gst_bus_add_watch(m_bus, busCallback, m_loop);
gst_object_unref(m_bus);
m_loop = g_main_loop_new(NULL, false);
g_main_loop_run(m_loop);
}
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
qDebug() << "Callback function reached";
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:
GError *error;
gchar *debugInfo;
gst_message_parse_error(message, &error, &debugInfo);
qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
//g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
//g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
g_clear_error (&error);
g_free (debugInfo);
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
case GST_MESSAGE_EOS:
qDebug() << "End-Of-Stream reached.";
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
default:
qDebug() << "Unexpected message received.";
break;
}
return true;
}
/**
The rest of the code is probably not relevant. It contains
only destructor and some getters and setters.
**/
GStreamer::~GStreamer()
{
gst_object_unref(m_bus);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
}
QString GStreamer::source() const
{
return m_source;
}
void GStreamer::setSource(const QString &source)
{
if(source != m_source){
m_source = source;
}
}
QString GStreamer::videoItem() const
{
return m_videoItem;
}
void GStreamer::setVideoItem(const QString &videoItem)
{
if(videoItem != m_videoItem){
m_videoItem = videoItem;
}
}
All member variables are defined in the .h file.
If I don't add tee element to the bin and links it in the pipeline, then the video shows up on the screen as expected. So I guess I'm messing up the pads on the tee element.
I've been following the tutorials in GStreamers documentation, so I don't understand why it's not working.
Hope someone can help.
Ok, so the difference between the the gst-launch line provided and the application code is the use of the qmlglsink element in the place of glimagesink.
The problem is that qmlglsink only accepts RGBA formatted video buffers however the jpegenc in the other branch of the tee does not accept RGBA formatted video buffers. This leads to a negotiation problem as there is not common format supported by both branches of the tee.
The fix is to add a videoconvert element before jpegenc or a glcolorconvert element before qmlglsink so that both branches of the tee can negotiate to the same video format.
Side note: glimagesink contains a glupload ! glcolorconvert ! actual-sink internally so is converting video formats already.

Issue with Gstreamer under Qt in c++ : flow error

I'm a beginner in Gstreamer, I need to get the frame from an UDP camera and convert it to an cv::Mat(OpenCV).
I run my camera stream like this :
gst-launch-1.0 v4l2src device=/dev/video0 ! \
h264parse ! rtph264pay ! udpsink host=XXX.XXX.XXX.XXX port=5000
And in another terminal I can get the stream like this (and it works):
gst-launch-1.0 udpsrc port=5000 caps='application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264' ! \
rtph264depay ! avdec_h264 ! autovideosink
So in my C++ code here is my C++ code :
GstFlowReturn
new_preroll(GstAppSink *appsink, gpointer data) {
g_print ("Got preroll!\n");
return GST_FLOW_OK;
}
GstFlowReturn
new_sample(GstAppSink *appsink, gpointer data) {
static int framecount = 0;
framecount++;
GstSample *sample = gst_app_sink_pull_sample(appsink);
GstCaps *caps = gst_sample_get_caps(sample);
GstBuffer *buffer = gst_sample_get_buffer(sample);
const GstStructure *info = gst_sample_get_info(sample);
// ---- Read frame and convert to opencv format ---------------
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
// convert gstreamer data to OpenCV Mat, you could actually
// resolve height / width from caps...
Mat frame(Size(320, 240), CV_8UC3, (char*)map.data, Mat::AUTO_STEP);
int frameSize = map.size;
// TODO: synchronize this....
frameQueue.push_back(frame);
gst_buffer_unmap(buffer, &map);
// ------------------------------------------------------------
// print dot every 30 frames
if (framecount%30 == 0) {
g_print (".");
}
// show caps on first frame
if (framecount == 1) {
g_print ("%s\n", gst_caps_to_string(caps));
}
gst_sample_unref (sample);
return GST_FLOW_OK;
}
static gboolean
my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) {
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
break;
default:
/* unhandled message */
break;
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
int
main (int argc, char *argv[])
{
GError *error = NULL;
gst_init (&argc, &argv);
gchar *descr = g_strdup(
"udpsrc port=5000 ! "
"caps=application/x-rtp, media=(string)video, clock-rate=(int)9000, encoding-name=(string)H264 ! "
"rtph264depay ! "
"avdec_h264 ! "
"videoconvert ! "
"appsink name=sink "
);
GstElement *pipeline = gst_parse_launch (descr, &error);
if (pipeline== NULL) {
g_print ("could not construct pipeline: %s\n", error->message);
g_error_free (error);
exit (-1);
}
/* get sink */
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
gst_app_sink_set_drop((GstAppSink*)sink, true);
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);
GstBus *bus;
guint bus_watch_id;
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
gst_object_unref (bus);
GstStateChangeReturn test=gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
qDebug() <<test<< " this is the test";
namedWindow("edges",1);
while(1) {
g_main_iteration(false);
// TODO: synchronize...
if (frameQueue.size() >10) {
// this lags pretty badly even when grabbing frames from webcam
Mat frame = frameQueue.front();
imshow("edges", frame);
cv::waitKey(30);
frameQueue.clear();
}
}
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
And I get the ERROR :
Error: Internal data flow error.
I think it is from my declaration of my pipeline, but I can't find what's wrong with it.
Any suggestions?
You have a ! after udpsrc port=5000. That one is not present in your original pipeline. I haven't checked any further, maybe print out the pipeline and double check if its the desired one.

Timestamping error/or comptuer too slow with gstreamer/gstbasesink in Qt

I am building a simple video player in Qt using gstreamer-1.0. When I run it from Qt, or .exe in my pc, everything runs and works ok. But when I try it from another pc, it plays for some seconds that it skips some seconds/minutes and so on. I guess the problem is with sync, I have tried setting d3dvidesink property: sync=false, but is the same. I have read many similiar threads but none seems to help.
A lot of buffers are being dropped.
Additional debug info:
gstbasesink.c(2846): gst_base_sink_is_too_late ():
There may be a timestamping problem, or this computer is too slow.
I have tried setting different properties, but none helped. I have seen the following threads, but still the same problem:
Thread 1
Thread 2
Thread 3
On Thread 3 there is a suggestion setting "do-timestamp" property on appsrc to TRUE, but I use uridecodebin as source that has not a "do-timestamp" property.
My pipeline is as follows:
uridecodebin ! audioconvert ! volume ! autoaudiosink ! videoconvert ! gamma ! d3dvideosink
Thanks in Advance!
Here is some code from the elements creation/linking. Please comment if you need any other code.
// Create the elements
data.source = gst_element_factory_make ( "uridecodebin", "source" );
data.audio_convert = gst_element_factory_make ( "audioconvert", "audio_convert" );
data.volume = gst_element_factory_make ( "volume", "volume");
data.audio_sink = gst_element_factory_make ( "autoaudiosink", "audio_sink" );
data.video_convert = gst_element_factory_make ( "videoconvert", "video_convert" );
data.filter = gst_element_factory_make ( "gamma", "filter");
data.video_sink = gst_element_factory_make ( "d3dvideosink", "video_sink" );
// Create the empty pipeline
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.audio_convert || !data.volume || !data.audio_sink
|| !data.video_convert || !data.filter || !data.video_sink ) {
g_printerr ("Not all elements could be created.\n");}
return ;
}
// Build the pipeline. Note that we are NOT linking the source at this point. We will do it later.
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.audio_convert , data.volume, data.audio_sink,
data.video_convert, data.filter, data.video_sink, NULL);
if (!gst_element_link (data.audio_convert, data.volume)) {
g_printerr ("Elements AUDIO_CONVERT - VOLUME could not be linked.\n");
gst_object_unref (data.pipeline);
return ;
}
if (!gst_element_link (data.volume, data.audio_sink)) {
g_printerr ("Elements VOLUME - AUDIO_SINK could not be linked.\n");
gst_object_unref (data.pipeline);
return ;
}
if (!gst_element_link(data.video_convert, data.filter)) {
g_printerr("Elements VIDEO_CONVERT - FILTER could not be linked.\n");
gst_object_unref(data.pipeline);
return ;
}
if (!gst_element_link(data.filter, data.video_sink)) {
g_printerr("Elements FILTER - VIDEO_SINK could not be linked.\n");
gst_object_unref(data.pipeline);
return ;
}
When I open video:
// Set the URI to play
QString filePath = "file:///"+filename;
QByteArray ba = filePath.toLatin1();
const char *c_filePath = ba.data();
ret = gst_element_set_state (data.pipeline, GST_STATE_NULL);
gint64 max_lateness = 2000000; //2 milli sec
g_object_set (data.source, "uri", c_filePath, NULL);
// I have tried setting the following properties, but none helped
// g_object_set (data.source, "do-timestamp", true, NULL);
// g_object_set( data.video_sink, "sync", false, NULL);
// g_object_set( data.video_sink, "max-lateness", max_lateness, NULL);
qDebug() << &c_filePath;
// Link video_sink with playingWidget->winId()
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data.video_sink), xwinid);
// Connect to the pad-added signal
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data) ;
// Start playing
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
// Exit application
QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));}
data.playing = TRUE;
data.rate = 1.0;
// Iterate - gets the position and length every 200 msec
g_print ("Running...\n");
emit setMsg( "Running...\n" );
currFileName = filename;
timer->start(500);
Pad_added_handler:
void gst_pipeline::pad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data)
{
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
GstPad *sink_pad_audio = gst_element_get_static_pad (data->audio_queue, "sink");
GstPad *sink_pad_video = gst_element_get_static_pad (data->video_queue, "sink");
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
// If our audio converter is already linked, we have nothing to do here
if (gst_pad_is_linked (sink_pad_audio))
{
g_print (" We have already linked sink_pad_audio. Ignoring.\n");
// goto exit;
}
// If our video converter is already linked, we have nothing to do here
if (gst_pad_is_linked (sink_pad_video))
{
g_print (" We have already linked sink_pad_video. Ignoring.\n");
// goto exit;
}
// Check the new pad's type
new_pad_caps = gst_pad_get_current_caps (new_pad); //gst_pad_get_caps
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (g_str_has_prefix (new_pad_type, "audio/x-raw"))
{
// Attempt the link
ret = gst_pad_link (new_pad, sink_pad_audio);
if (GST_PAD_LINK_FAILED (ret))
{ g_print (" Type is '%s' but link failed.\n", new_pad_type); }
else
{ g_print (" Link succeeded (type '%s').\n", new_pad_type); }
}
else if (g_str_has_prefix (new_pad_type, "video/x-raw"))
{
// Attempt the link
ret = gst_pad_link (new_pad, sink_pad_video);
if (GST_PAD_LINK_FAILED (ret))
{ g_print (" Type is '%s' but link failed.\n", new_pad_type); }
else
{ g_print (" Link succeeded (type '%s').\n", new_pad_type); }
}
else
{
g_print (" It has type '%s' which is not audio/x-raw OR video/x-raw. Ignoring.\n", new_pad_type);
goto exit;
}
exit:
// Unreference the new pad's caps, if we got them
if (new_pad_caps != NULL)
{ gst_caps_unref (new_pad_caps); g_print("EXIT"); msg_STRING2 += "EXIT\n" ; }
// Unreference the sink pad
gst_object_unref (sink_pad_audio);
gst_object_unref (sink_pad_video);
}

running the gstreamer pipeline (not able to get video and audio data in the callback)

I'm a newbie to gstreamer and I wanted to get the audio and video both buffers from a 3gp file and do some processing in the callback.
(I'm starting my pipeline into a separate thread, pipeline gives audio buffers in a callback AudioCallback and video buffers in VideoCallback.)
This is how my pipeline looks:
GstElement* audioQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)
GstElement* videoQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)
//static functions
static gboolean
bus_call (GstBus* bus, GstMessage* msg, gpointer data)
{
GMainLoop* loop = (GMainLoop*) data;
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_EOS:
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return true;
}
static void link_two_elements(GstElement* src_element, GstElement* sink_element)
{
if(!gst_element_link(src_element, sink_element))
g_printerr ("Linking Error");
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstCaps *caps;
GstStructure *str;
gchar *tex;
GstPad* sinkpad;
/* check media type */
caps = gst_pad_get_caps (pad);
str = gst_caps_get_structure (caps, 0);
tex = (gchar*)gst_structure_get_name(str);
if(g_strrstr(tex,"audio"))
{
//GstElement *audioQueue = (GstElement *) data;
sinkpad = gst_element_get_static_pad (audioQueue, "sink");
if(sinkpad)
{
GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
}
if(g_strrstr(tex,"video"))
{
//GstElement *videoQueue = (GstElement *) data;
sinkpad = gst_element_get_static_pad (videoQueue, "sink");
GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
}
void runPipeline()
{
GMainLoop *loop;
GstElement *__pPipeline, *source, *demuxer, *audioDecoder, *audioConverter, *audioresample, /**audioQueue,*/ *audioSink, *videoDecoder, *videoSink, /**videoQueue,*/ *ffmpegcolorspace, *videoscale;
GstBus* bus;
//Initialisation
gst_init (null,null);
loop = g_main_loop_new (NULL, FALSE);
// Create gstreamer elements
__pPipeline = gst_pipeline_new("test_appsink");
source = gst_element_factory_make ("filesrc", "file-source");
demuxer = gst_element_factory_make("qtdemux", "demuxer");
//audioDecoder = gst_element_factory_make("ffdec_mp3", "audioDecoder");
audioDecoder = gst_element_factory_make("decodebin", "audioDecoder");
audioConverter = gst_element_factory_make("audioconvert", "audioConverter");
audioresample = gst_element_factory_make("audioresample", "audioresample");
audioSink = gst_element_factory_make("appsink", "audioSink");
audioQueue = gst_element_factory_make("queue2", "audioQueue");
//videoDecoder = gst_element_factory_make("ffdec_h264", "videoDecoder");
videoQueue = gst_element_factory_make("queue2", "videoQueue");
videoDecoder = gst_element_factory_make("decodebin ", "videoDecoder");
ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace");
videoscale = gst_element_factory_make("videoscale", "videoscale");
videoSink = gst_element_factory_make("appsink", "videoSink");
//appsink = gst_element_factory_make("appsink", "sink-buffer");
if (!__pPipeline || !source || !demuxer || !audioDecoder || !audioConverter ||!audioresample || !audioSink || !videoSink || !audioQueue || !videoQueue || !videoDecoder || !ffmpegcolorspace || !videoscale )
{
//return -1;
}
//we set the input filename to the source element
g_object_set (G_OBJECT (source), "location", "/etc/20000101-161404.3gp", NULL);
//Make appsink emit the "new-preroll" and "new-buffer" signals.
gst_app_sink_set_emit_signals ((GstAppSink*) audioSink, TRUE);
gst_app_sink_set_emit_signals ((GstAppSink*) videoSink, TRUE);
//we add a message handler
bus = gst_pipeline_get_bus (GST_PIPELINE (__pPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
//we add all elements into the pipeline
gst_bin_add_many (GST_BIN (__pPipeline),
source, demuxer, videoDecoder, audioDecoder, audioConverter, audioresample, audioSink, videoSink,
audioQueue, videoQueue, ffmpegcolorspace, videoscale, NULL);
//link source and demuxer seperately
link_two_elements(source, demuxer);
//link rest of the elements
int retValVideoLinking = (int)gst_element_link_many (videoQueue, videoDecoder, ffmpegcolorspace, videoscale, videoSink, NULL);
int retValAudioLinking = (int)gst_element_link_many (audioQueue, audioDecoder, audioConverter, audioresample, audioSink, NULL);
gulong sigConRet = g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), null);
_ArAudioIn audioInstance = _ArAudioIn::GetArAudioInstance();
g_signal_connect (videoSink, "new-buffer", G_CALLBACK (AudioCallback), null);//AudioCallback static API
g_signal_connect (audioSink, "new-buffer", G_CALLBACK (VideoCallback), null);//VideoCallback static API
//Set the pipeline to "playing" state
GstStateChangeReturn state = gst_element_set_state (__pPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
return null;
}
I'm just getting a single video buffer in my Videocallback and also in the on_pad_addded : I'm getting a linking err for audio pad linking.
GST_PAD_LINK_NOFORMAT = -4,
I'm trying to link the queue's sink pad to the pad recieved in on_pad_added, same is working for video but not for audio.
If anybody has any idea about this then please give me some pointers to get rid off this err and make this pipeline work.
It would be nice if you cleanup you code before asking us to debug it. As a general advice, check the return values and either log a warning or simply exit(1) to ensure that your pipeline setup works (E.g. in the pad_added handler). I'd also start using a normal video and audiosink to check that it plays.
Finally, it is usually a bad idea to pull out data from the pipleine. Perhaps you could tell what you want to do with the data once you have it in your callback, so that we can give better advice.