How to use videomixer element for gstreamer? - gstreamer

I want to play two local video file using gstreamer,but I got an error: Segmentation fault It from libgstvideomixer.so.What's wrong with my code? The videomixer element is needed to play two videos.Should I use videobox for that?
gst-launch --no-fault filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264 ! videomixer name=mix ! xvimagesink sync=false filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264! mix.
static void p_gst_init(void)
{
App *app = &s_app;
GError *error = NULL;
GstBus *bus;
GstElement *parse, *decoder, *queue;
GstElement *parse2, *decoder2, *queue2;
gst_init (NULL, NULL);
/* create a mainloop to get messages */
app->loop = g_main_loop_new (NULL, TRUE);
app->playbin = gst_pipeline_new ("pipeline");
app->appsrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (app->appsrc), "location", "/mnt/upan/test.264", NULL);
queue = gst_element_factory_make ("queue", "queue");
parse = gst_element_factory_make ("typefind", "parse");
decoder = gst_element_factory_make ("ffdec_h264", "decoder");
GstElement *filesrc2;
filesrc2 = gst_element_factory_make ("filesrc", "disk_source2");
g_object_set (G_OBJECT (appsrc2), "location", "/mnt/upan/source.264", NULL);
queue2 = gst_element_factory_make ("queue", "queue2");
parse2 = gst_element_factory_make ("typefind", "parse2");
decoder2 = gst_element_factory_make ("ffdec_h264", "decoder2");
/*
GstElement * videobox;
videobox = gst_element_factory_make("videobox", NULL);
g_object_set (videobox, "alpha", 0, "border-alpha", 0, "bottom", 100, "left", 100, "right", 100, "top", 100, NULL);
*/
GstElement * videomixer;
videomixer = gst_element_factory_make("videomixer","videomixer");
app->xvimagesink = gst_element_factory_make ("xvimagesink", "play_video");
g_object_set (G_OBJECT (app->xvimagesink), "synchronous", FALSE, NULL);
gst_bin_add_many (GST_BIN (app->playbin), app->appsrc, queue, parse, decoder, videomixer, app->xvimagesink, filesrc2, queue2, parse2, decoder2, NULL);
if(gst_element_link_many (app->appsrc, queue, parse, decoder, videomixer, NULL))
{
printf("---------link element success-----------------\n");
}
else
printf("---------link element failed-----------------\n");
gst_element_link_many (filesrc2, queue2, parse2, decoder2, videomixer, NULL);
gst_element_link_many(videomixer, app->xvimagesink, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));
gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)create_window, app);
g_signal_connect (app->appsrc, "need-data", G_CALLBACK (feed_data), app);
return ;
}

Related

How to do gstreamer streaming using GstElement and udpsrc in qwidget

I am trying to embed streaming of IP camera using udpsrc in qwidget. Below pipeline works :
gst-launch-1.0 udpsrc port=20000 ! application/x-rtp,encoding-name=JPEG,payload=26,width=640,height=460 ! rtpjpegdepay ! jpegparse ! jpegdec ! videoconvert ! videoscale ! ximagesink sync=false
When tried to embed in qwidget, it's showing a plain window. My code is as follows :
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <QApplication>
#include <QTimer>
#include <QWidget>
int main(int argc, char *argv[])
{
if (!g_thread_supported ())
g_thread_init (NULL);
gst_init (&argc, &argv);
QApplication app(argc, argv);
app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
// prepare the pipeline
GstElement *pipeline = gst_pipeline_new ("pipeline");
GstElement *src = gst_element_factory_make ("udpsrc", NULL);
GstCaps *caps = gst_caps_from_string ("application/x-rtp,encoding-name=JPEG,payload=26,width=640,height=460");
g_object_set(G_OBJECT(src),
"port", 20000,
"caps", caps, NULL);
GstElement *parser = gst_element_factory_make ("rtpjpegdepay", NULL);
GstElement *mux = gst_element_factory_make ("jpegparse", NULL);
GstElement *parse2 = gst_element_factory_make ("jpegdec", NULL);
GstElement *dec = gst_element_factory_make ("videoconvert", NULL);
GstElement *conv = gst_element_factory_make ("videoscale", NULL);
GstElement *sink = gst_element_factory_make ("ximagesink", NULL);
g_object_set(G_OBJECT(sink), "sync", FALSE, NULL);
gst_bin_add_many (GST_BIN (pipeline), src, parser, mux, parse2, dec, conv, sink, NULL);
gst_element_link (src, sink);
GstState state, pending;
//this is the call to overlay the gstreamer's output to the Qt Widgets...
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_object_unref (bus);
GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); //Playback can be initiated by setting the element to PLAYING state using gst_element_set_state()
qDebug()<<"####-1"<<sret;
if (sret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
// Exit application
QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
}
gst_element_get_state (pipeline,
&state,
&pending,
10);
qDebug()<<state<<pending;
window->show();
app.exec();
g_main_loop_run (loop);
return 0;
}
I solved it by Creating pipeline using gst_parse_launch()
GstElement *pipeline_2= gst_parse_launch("udpsrc port=20000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegparse ! jpegdec ! videoconvert ! videoscale ! ximagesink name=mySink", NULL);
GstElement *sink = gst_bin_get_by_name((GstBin*)pipeline_2,"mySink");
QWidget *window = new QWidget();
window->setWindowTitle("udpsrc video stream");
window->resize(700, 700);
WId xwinid = window->winId();
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), (guintptr)xwinid);
window->show();
GstStateChangeReturn sret = gst_element_set_state (pipeline_2, GST_STATE_PLAYING);
Hope this helps.

Gstreamer rtsp application for audio and video

I was trying to develop an application for the pipeline:
gst-launch-1.0 rtspsrc location="rtsp://192.168.3.30:8554/rajvi" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink
Following is the code which I have implemented:
#include <gst/gst.h>
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
g_free(description);
GstElement *depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, depay, "sink") == 0)
{
g_print("cb_new_rtspsrc_pad : failed to link elements \n");
}
g_free(name);
}
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Getting error to link pipeline->audio->video bins
If you put the video and audio in the pipeline bin all together then you can do it. Figure out what you caps are for the video and audio and should be able to link them.
// ----------------------------------
// pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
gchar *name;
GstCaps * p_caps;
GstElement* nextElement;
GstElement* pipeline = (GstElement*)user_data;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
p_caps = gst_pad_get_pad_template_caps(pad);
if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
}
else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");
}
if (nextElement != NULL)
{
if (!gst_element_link_filtered(element, nextElement, p_caps))
//if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
{
std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
}
gst_object_unref(nextElement);
}
g_free(name);
gst_caps_unref(p_caps);
}
// ----------------------------------
// main
// ----------------------------------
int main(int argc, char *argv[])
{
GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init(&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make("queue", "audio-queue");
audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make("aacparse", "audio-parser");
audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make("audioconvert", "aconv");
audioResample = gst_element_factory_make("audioresample", "audio-resample");
audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
videoQueue = gst_element_factory_make("queue", "video-queue");
videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make("h264parse", "video-parser");
videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
gst_caps_unref(capsFilter);
if (!link_ok) {
g_warning("Failed to link element1 and element2!");
}
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
}

GStreamer error : Gstreamer encountered a general stream error

I am working gstreamer for iOS, but having error with demux.
It says "Eror received from element matroskademux0 : Gstreamer encountered a general stream error".
Here is the soure code.
GstBus *bus;
GSource *bus_source;
GstElement *source1, *clrspace1, *clrspace2, *clrspace, *videobox1, *sink, *source2, *mpegenc, *avimux;
GstElement *videomixer;
GstElement *decodebin1, *decodebin2;
GstElement *queue;
/* Build pipeline */
pipeline = gst_pipeline_new("pipelinecomposer");
source1 = gst_element_factory_make("filesrc", "source1");
source2 = gst_element_factory_make("filesrc", "source2");
videobox1 = gst_element_factory_make("videobox", "videobox");
videomixer = gst_element_factory_make("videomixer", "videomixer");
clrspace1 = gst_element_factory_make("videoconvert", "clrspace1");
clrspace2 = gst_element_factory_make("videoconvert", "clrspace2");
clrspace = gst_element_factory_make("videoconvert", "clrspace");
sink = gst_element_factory_make("filesink", "sink");
mpegenc = gst_element_factory_make("x264enc", "mpegenc");
avimux = gst_element_factory_make("mpegtsmux", "avimux");
decodebin1 = gst_element_factory_make("decodebin", "decodebin1");
decodebin2 = gst_element_factory_make("decodebin", "decodebin2");
queue = gst_element_factory_make("queue", "queue");
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/%#", documentsDirectory, #"1.avi"];
NSString *filePath_1 = [NSString stringWithFormat:#"%#", #"/Users/john/Movies/1.webm"];
NSString *filePath_2 = [NSString stringWithFormat:#"%#", #"/Users/john/Movies/2.webm"];
g_object_set (G_OBJECT (source1), "location", [filePath_1 UTF8String], NULL);
g_object_set (G_OBJECT (source2), "location", [filePath_2 UTF8String], NULL);
g_object_set (G_OBJECT (sink), "location", [filePath UTF8String], NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
gst_bin_add_many(GST_BIN (pipeline), source1, sink, videobox1, videomixer, clrspace1, clrspace2, clrspace, source2, mpegenc, avimux, decodebin1, decodebin2, nil);
gst_element_link_many(source1, decodebin1, clrspace1, videobox1, videomixer, clrspace, mpegenc, avimux, sink, nil);
gst_element_link_many(source2, decodebin2, clrspace2, videomixer, nil);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
Please help me.
Hello guys,
solved problem.
I have to use pad for filesrc.
Here is the modified source codes.
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
/* Main method for the bus monitoring code */
-(void) app_function
{
GstBus *bus;
GSource *bus_source;
GstElement *source1, *clrspace1, *clrspace2, *clrspace, *videobox1, *sink, *source2, *mpegenc, *avimux;
GstElement *videomixer;
GstElement *decodebin1, *decodebin2;
GstElement *matroskademux1, *vp8dec1;
GST_DEBUG ("Creating pipeline");
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
/* Build pipeline */
pipeline = gst_pipeline_new("pipelinecomposer");
source1 = gst_element_factory_make("filesrc", "source1");
source2 = gst_element_factory_make("filesrc", "source2");
videobox1 = gst_element_factory_make("videobox", "videobox");
videomixer = gst_element_factory_make("videomixer", "videomixer");
clrspace1 = gst_element_factory_make("videoconvert", "clrspace1");
clrspace2 = gst_element_factory_make("videoconvert", "clrspace2");
clrspace = gst_element_factory_make("videoconvert", "clrspace");
sink = gst_element_factory_make("filesink", "sink");
mpegenc = gst_element_factory_make("x264enc", "mpegenc");
avimux = gst_element_factory_make("mpegtsmux", "avimux");
decodebin1 = gst_element_factory_make("decodebin", "decodebin1");
decodebin2 = gst_element_factory_make("decodebin", "decodebin2");
matroskademux1 = gst_element_factory_make("matroskademux", "matroskademux1");
vp8dec1 = gst_element_factory_make("vp8dec", "vp8dec1");
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/%#", documentsDirectory, #"1.avi"];
NSString *filePath_1 = [NSString stringWithFormat:#"%#", #"/Users/johanbasore1/Movies/1.webm"];
NSString *filePath_2 = [NSString stringWithFormat:#"%#", #"/Users/johanbasore1/Movies/2.webm"];
g_object_set (G_OBJECT (source1), "location", [filePath_1 UTF8String], NULL);
g_object_set (G_OBJECT (source2), "location", [filePath_2 UTF8String], NULL);
g_object_set (G_OBJECT (sink), "location", [filePath UTF8String], NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_signal_connect(matroskademux1, "pad-added", G_CALLBACK(on_pad_added), vp8dec1);
gst_bin_add_many(GST_BIN (pipeline), source1, matroskademux1, vp8dec1 , sink, clrspace1, mpegenc, avimux, nil);
gst_element_link(source1, matroskademux1);
gst_element_link_many(vp8dec1, clrspace1, mpegenc, avimux, sink, nil);
}

Gstreamer udp-source pcm playback

I have these gst-launch parameters that do what I want:
gst-launch-1.0.exe udpsrc port=22122 ! audio/x-raw,format=S16LE,rate=16000,channels=1 ! autoaudiosink
However, I cannot convert it into code. I'm trying the following:
GstElement *pipeline = gst_pipeline_new("audio-player");
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
guint bus_watch_id = gst_bus_add_watch(bus, bus_call, m_gstMainLoop);
gst_object_unref(bus);
GstElement *source = gst_element_factory_make("udpsrc", "udpsrc0");
GstElement *sink = gst_element_factory_make("autoaudiosink", "autoaudiosink0");
g_object_set(G_OBJECT(source), "port", 7200, "buffer-size", 1000000, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, sink, NULL);
GstCaps *caps = gst_caps_new_simple("audio/x-raw",
"format", G_TYPE_STRING, "S16LE",
"layout", G_TYPE_STRING, "INTERLEAVED",
"rate", G_TYPE_INT, 16000,
"channels", G_TYPE_INT, 1,
NULL);
gst_element_link_filtered(source, sink, caps);
gst_caps_unref(caps);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(m_gstMainLoop);
In the dot-graph they look almost alike, but not entirely, though I can't figure out what I'm missing.
Not sure why, but it works if I leave out INTERLEAVED (which should be lower-case if entered) and I also missed that I entered the wrong port-numer (doh!).

GstCaps filter issue

trying to create filter using caps:
GMainLoop *loop;
GstElement *pipeline, *source, *demuxer, *decoder, *typefind, *conv, *sink;;
GstBus *bus;
guint bus_watch_id;
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new ("audio-player");
source = gst_element_factory_make ("filesrc", "file-source");
typefind = gst_element_factory_make ("typefind", "typefinder");
demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
conv = gst_element_factory_make ("audioconvert", "converter");
sink = gst_element_factory_make ("autoaudiosink", "audio-output");
if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
return -1;
}
g_signal_connect (typefind, "have-type", G_CALLBACK (cb_typefound), loop);
g_object_set(G_OBJECT(source), "location", name_of_file, NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
gst_bin_add_many(GST_BIN(pipeline), source, typefind, demuxer, decoder, conv, sink, NULL);
GstCaps *my_caps = gst_caps_new_simple ("audio/x-raw","rate", G_TYPE_INT, 384,NULL);
gboolean test=false;
test = gst_element_link_filtered(source,typefind, my_caps);
test = gst_element_link(typefind, demuxer);
test = gst_element_link_many(decoder, conv,sink, NULL);
g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), decoder);
......
Debug: gsttypefindelement.c(1169): gst_type_find_element_loop (): /GstPipeline:a
udio-player/GstTypeFindElement:typefinder:
stream stopped, reason not-linked
Var test == 1 after gst_element_link_filtered(source,typefind,
my_caps);
test == 0 after gst_element_link(typefind, demuxer);
test == 1 after gst_element_link_many(decoder, conv,sink, NULL);
If I just change gst_element_link_filtered on gst_element_link all going ok and file plays.
What am I doing wrong?
You seem to also have posted this to the gst-devel mailing list and you have a reply there:
http://gstreamer-devel.966125.n4.nabble.com/GstCaps-filter-issue-td4672796.html
Setting a filter caps of raw audio between the filesrc and the demuxer while you seem to want to play an OGG file is wrong. You should explain better what you want to do in order for us to provide some more refined guidance.