How to link v4l2src to capsfilter in c for gstreamer-1.14 - gstreamer

I have a gstreamer media pipeline as shown below which I am trying to convert into a c code. The command line works fine.
gst-launch-1.0 v4l2src device=/dev/video1 ! capsfilter caps=video/x-raw,width=1280,height=720,format=UYVY ! queue ! videoconvert ! queue ! capsfilter caps=video/x-raw,format=NV12,width=1280,height=720,pixel-aspect-ratio=1/1 ! v4l2h264enc extra-controls="controls,h264_level=12,h264_profile=1" ! h264parse ! autovideosink
I have written the code and compilation is successful. When, I execute the code, the videosrc element is unable to link to capsfilter. I have surfed through the internet and was unsuccessful in rectifying the problem. Can someone help in correcting me what, I am doing wrong.
The code snippet is below:
/* Create the gstreamer elements */
source = gst_element_factory_make ("v4l2src", "source");
capsfilter = gst_element_factory_make ("capsfilter", "Caps-Filter");
capsfilter2 = gst_element_factory_make ("capsfilter", "caps-filter2");
video_convert = gst_element_factory_make ("videoconvert", "Video Convert");
queue1 = gst_element_factory_make ("queue", "Encoded Video Queue 1");
queue2 = gst_element_factory_make ("queue", "Encoded Video Queue 2");
encoder = gst_element_factory_make ("v4l2h264enc", "HW Accelerated Encoder");
H264_pay = gst_element_factory_make ("h264parse", "Payload-encode H264 video into RTP packets");
sink = gst_element_factory_make("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if(!source || !capsfilter || ! capsfilter2 || !video_convert || !queue1 || !queue2 || !encoder || !H264_pay || !sink)
/* Set Source element properties */
g_object_set (G_OBJECT(source), "device", "/dev/video1", NULL);
GstCaps* filtercaps = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)UYUY");
GstCaps* vconvertfilter = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)NV12,pixel-aspect-ratio=1/1");
GstStructure *test = gst_structure_new_from_string("controls,h264_level=12,h264_profile=1");
g_object_set(G_OBJECT(capsfilter), "caps", filtercaps,NULL);
g_object_set(G_OBJECT(capsfilter2), "caps", vconvertfilter, NULL);
g_object_set (G_OBJECT(encoder), "extra-controls", test, NULL);
gst_caps_unref(filtercaps);
gst_caps_unref(vconvertfilter);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline),
source, capsfilter,
queue1, video_convert, queue2,
capsfilter2, encoder,
H264_pay, sink, NULL);
if(!gst_element_link(source, capsfilter))
{
g_printerr("Unable to link Source to filter. check your caps. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (capsfilter, queue1, video_convert, NULL) != TRUE)
{
g_printerr("Capsfilter could not be linked to queue1. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (video_convert, queue2, capsfilter2, encoder, H264_pay, NULL) != TRUE)
{
g_printerr("video_convert could not be linked to queue2. \n");
gst_object_unref (pipeline);
}
if(gst_element_link_many (H264_pay, sink, NULL) != TRUE)
{
g_printerr("parse could not link to sink.\n");
gst_object_unref (pipeline);
}
I get the error as below;
Unable to link Source to filter. check your caps.
Can somebody help me correct the mistake?

Related

GStreamer - Pipeline how to connect filesrc to qmlglsink

I'm new in the world of GStreamer, so I can't figure out how it works, and how to pair all GstElements.
I want to merge Video (mp4 for example, or any other video format) with qml (from Qt) as an overlay.
This example works perfectly fine.
GstElement *pipeline = gst_pipeline_new(NULL);
GstElement *src = gst_element_factory_make("videotestsrc",NULL);
GstElement *glupload = gst_element_factory_make("glupload",NULL);
GstElement *qmlglsink = gst_element_factory_make("qmlglsink",NULL);
g_assert(src && glupload && qmlglsink);
gst_bin_add_many(GST_BIN(pipeline), src, glupload, sink);
gst_element_link_many(src, glupload, sink, NULL);
But that example uses videotestsrc as Source, I would prefer to use something like filesrc.
I tried this code:
GstElement *pipeline = gst_pipeline_new (NULL);
GstElement *src = gst_element_factory_make ("filesrc", "file-source");
GstElement *parser = gst_element_factory_make("h264parse",NULL);
GstElement *decoder = gst_element_factory_make("avdec_h264",NULL);
GstElement *colors = gst_element_factory_make("glcolorconvert",NULL);
GstElement *glupload = gst_element_factory_make ("glupload", NULL);
GstElement *sink = gst_element_factory_make ("qmlglsink", NULL);
g_assert (src && parser && decoder %% colors && glupload && sink);
g_object_set (G_OBJECT (src), "location", "file:///home/test.mp4", NULL);
gst_bin_add_many (GST_BIN (pipeline), src, parser, decoder, glupload, colors, sink, NULL);
gst_element_link_many (src, parser, decoder, glupload, colors, sink, NULL);
It compiles, but the output is just a black screen.
Since I'm not sure how the GStreamer pipeline works, I tried that.
First, get the file from memory with filesrc, then parse it with h265parse and decode it with avdec_h264. Then forward that (I guess raw uncompressed data) to glupload and make colors good with glcolorconvert, since qmlglsink uses RGBA, and avdec_h264 is I420. After colors, are adjusted forward it to qmlglsink to be displayed in qml.
I'm missing something, and I don't know how to pair GstElemnts, as I said I need to pair filesrc (any video format) and qmlglsink.
You can try like below
MediaPlayer{
id: playVideo
source: "gst-pipeline: filesrc location=/home/root/skim-debris.mp4 ! qtdemux ! avdec_h264 ! qtvideosink"
autoLoad: true
autoPlay: true
playbackRate: 1.0
loops: 10
}
VideoOutput {
anchors.fill: parent
source: playVideo
}
It is easier to use a bin or any of GStreamers auto-pluggers.
But the main issue here is that you try treating an MP4 file as H.264 stream. This cannot work this way. You need to demux media streams from your container.
E.g. the pipeline should be something like this:
gst-launch-1.0 filesrc location=/home/test.mp4 ! qtdemux ! \
h264parse ! avdec_h264 ! glupload ! glcolorconvert ! qmlglsink

How to change gstreamer Element property

I want to make the gstreamer app full screen.
Waylandsink property has fullscreen, how do I change it?
Or is there a way to go full screen without changing the Waylandsink property?
When setting by command
$ gst-launch-1.0 filesrc location=/home/root/testpi.h264 ! decodebin ! vspmfilter ! waylandsink fullscreen=TRUE
When writing in code
`#define INPUT_FILE "/home/root/testpi.h264"
GstElement *pipeline, *source, *parser, *decoder, *filter, *sink;
const gchar *input_file = INPUT_FILE;
gst_init(&argc, &argv);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-play");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("h264parse", "h264-parser");
decoder = gst_element_factory_make ("omxh264dec", "h264-decoder");
filter = gst_element_factory_make ("vspmfilter", "filter");
sink = gst_element_factory_make ("waylandsink", "video-output");
/* Set input video file for source element */
g_object_set (G_OBJECT (source), "location", input_file, NULL);
/* Set element property */
g_object_set(G_OBJECT(sink),"fullscreen",TRUE,NULL);
/* Add all elements into the pipeline */
/* pipeline---[ file-source + h264-parser + h264-decoder + filter + video-output ] */
gst_bin_add_many (GST_BIN (pipeline), source, parser, decoder, filter, sink, NULL);
/* Link the elements together */
/* file-source -> h264-parser -> h264-decoder -> filter -> video-output */
if (gst_element_link_many (source, parser, decoder, filter, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", input_file);
if (gst_element_set_state (pipeline,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
g_print ("Running...\n");
while(1)
{
;
}
`

gstreamer audiomixer command to code converting

I want to use audiomixer in my application which receives audios from different sources and should play them together in speaker.
my final application should do something like this command:
gst-launch-1.0 audiomixer name=mix ! autoaudiosink autoaudiosrc ! \
audioconvert ! mix. udpsrc port=5001 caps="application/x-rtp" ! queue !\
rtppcmudepay ! mulawdec ! audioconvert ! audioresample ! mix.
I already wrote a code to use tee and queues and know how to work with tee and queues in code based on this code. but I don't know how to use mixer in my code.
so for simplicity I just want to write a code to work as this command does:
gst-launch-1.0 audiotestsrc freq=100 ! audiomixer name=mix ! audioconvert ! autoaudiosink autoaudiosrc ! mix.
I didn't find any useful example to reach this goal, how can I write a C code to do this?
for the second part:
gst-launch-1.0 audiotestsrc freq=100 ! audiomixer name=mix ! audioconvert ! autoaudiosink autoaudiosrc ! mix.
this code works:
#include <gst/gst.h>
static GMainLoop *loop;
int bus_callback (GstBus *bus, GstMessage *message, gpointer data)
{
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
g_main_loop_quit (loop);
break;
default:
/* unhandled message */
break;
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
int main(int argc, char *argv[])
{
/* Initialize GStreamer */
gst_init (nullptr, nullptr);
GstElement *pipeline, *src1,*src2, *sink, *convert1,*convert2,*audiomixer;
GstPad *conv_pad1, *conv_pad2, *mixer1_sinkpad,*mixer2_sinkpad;
gint i;
static GstBus *bus;
static guint bus_watch_id;
pipeline = gst_pipeline_new ("pipeline");
audiomixer = gst_element_factory_make ("adder", "mixer");
sink = gst_element_factory_make ("autoaudiosink", "sink");
src1 = gst_element_factory_make ("audiotestsrc", "src1");
convert1 = gst_element_factory_make ("audioconvert", "convert1");
src2 = gst_element_factory_make ("autoaudiosrc", "src2");
convert2 = gst_element_factory_make ("audioconvert", "convert2");
//g_object_set (sink, "async-handling", TRUE, NULL);
gst_bin_add_many (GST_BIN (pipeline), audiomixer ,sink, NULL);
gst_bin_add_many (GST_BIN (pipeline), src1 , convert1 , NULL);
gst_bin_add_many (GST_BIN (pipeline), src2 , convert2 , NULL);
gst_element_link (src1, convert1 );
gst_element_link (src2, convert2 );
gst_element_link(audiomixer , sink);
conv_pad1= gst_element_get_static_pad (convert1, "src");
mixer1_sinkpad = gst_element_get_request_pad (audiomixer, "sink_%u");
gst_pad_link (conv_pad1, mixer1_sinkpad);
g_object_unref(mixer1_sinkpad);
conv_pad2= gst_element_get_static_pad (convert2, "src");
mixer2_sinkpad = gst_element_get_request_pad (audiomixer, "sink_%u");
gst_pad_link (conv_pad2, mixer2_sinkpad);
g_object_unref(mixer2_sinkpad);
/* adds a watch for new message on our pipeline’s message bus to
* the default GLib main context, which is the main context that our
* GLib main loop is attached to below
*/
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_callback, NULL);
gst_object_unref (bus);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (loop);
g_object_unref(conv_pad1);
g_object_unref(conv_pad2);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_source_remove (bus_watch_id);
}

How to use videomixer element for gstreamer?

I want to play two local video file using gstreamer,but I got an error: Segmentation fault It from libgstvideomixer.so.What's wrong with my code? The videomixer element is needed to play two videos.Should I use videobox for that?
gst-launch --no-fault filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264 ! videomixer name=mix ! xvimagesink sync=false filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264! mix.
static void p_gst_init(void)
{
App *app = &s_app;
GError *error = NULL;
GstBus *bus;
GstElement *parse, *decoder, *queue;
GstElement *parse2, *decoder2, *queue2;
gst_init (NULL, NULL);
/* create a mainloop to get messages */
app->loop = g_main_loop_new (NULL, TRUE);
app->playbin = gst_pipeline_new ("pipeline");
app->appsrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (app->appsrc), "location", "/mnt/upan/test.264", NULL);
queue = gst_element_factory_make ("queue", "queue");
parse = gst_element_factory_make ("typefind", "parse");
decoder = gst_element_factory_make ("ffdec_h264", "decoder");
GstElement *filesrc2;
filesrc2 = gst_element_factory_make ("filesrc", "disk_source2");
g_object_set (G_OBJECT (appsrc2), "location", "/mnt/upan/source.264", NULL);
queue2 = gst_element_factory_make ("queue", "queue2");
parse2 = gst_element_factory_make ("typefind", "parse2");
decoder2 = gst_element_factory_make ("ffdec_h264", "decoder2");
/*
GstElement * videobox;
videobox = gst_element_factory_make("videobox", NULL);
g_object_set (videobox, "alpha", 0, "border-alpha", 0, "bottom", 100, "left", 100, "right", 100, "top", 100, NULL);
*/
GstElement * videomixer;
videomixer = gst_element_factory_make("videomixer","videomixer");
app->xvimagesink = gst_element_factory_make ("xvimagesink", "play_video");
g_object_set (G_OBJECT (app->xvimagesink), "synchronous", FALSE, NULL);
gst_bin_add_many (GST_BIN (app->playbin), app->appsrc, queue, parse, decoder, videomixer, app->xvimagesink, filesrc2, queue2, parse2, decoder2, NULL);
if(gst_element_link_many (app->appsrc, queue, parse, decoder, videomixer, NULL))
{
printf("---------link element success-----------------\n");
}
else
printf("---------link element failed-----------------\n");
gst_element_link_many (filesrc2, queue2, parse2, decoder2, videomixer, NULL);
gst_element_link_many(videomixer, app->xvimagesink, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));
gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)create_window, app);
g_signal_connect (app->appsrc, "need-data", G_CALLBACK (feed_data), app);
return ;
}

muxing jpeg to mkv using gstreamer

Situation:
When I'm trying to mux jpegs to mkv file I'll get a zero sized file. I must put encode and decode elements between parser and muxer for correct output. When I'm muxing a h264 video with same code I'll get correct video file, that means the time setting of buffers should be OK(duration and pts parameter). Anyway after bad buffer settings is size of file not zero.
Matroskamux requires on sink pad for "image/jpeg" only "width" and "heigth" capabilities but it looks like that this is not sufficient. Jpegparse is giving correct values and the program does not work after manual setting of this capabilities too.
Example of pipeline:
This pipeline doesn't work
appsrc ! "image/jpeg" ! jpegparse ! matroskamux ! filesink location=mjpeg.mkv
But this works
appsrc ! "image/jpeg" ! jpegparse ! avdec_mjpeg ! x264enc ! matroskamux ! filesink location=mjpeg.mkv
Example of code:
Working code, but with reencoding
app = new _App();
app->src = (GstAppSrc*)gst_element_factory_make ("appsrc", "source");
if(IsH264Frame(codecType))
app->parser = gst_element_factory_make("h264parse", "parser");
else if(codecType == IMAGE_MJPEG_FRAME)
app->parser = gst_element_factory_make("jpegparse", "parser");
//additional code
app->decoder = gst_element_factory_make("avdec_mjpeg", "decoder");
app->encoder = gst_element_factory_make("x264enc", "encoder");
app->muxer = gst_element_factory_make("matroskamux", "muxer");
app->sink = (GstAppSink*)gst_element_factory_make ("filesink", "sink");
if (!app->pipeline || !app->src || !app->decoder || !app->encoder || !app->muxer || !app->sink || !app->parser)
return;
app->bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
g_assert(app->bus);
gst_bus_add_watch (app->bus, (GstBusFunc) BusMessage, this);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement*)app->src, app->decoder, app->encoder, app->muxer, app->sink, app->parser
,NULL);
/* SETUP ELEMENTS */
g_object_set(app->src,
"stream-type", 0,
"format", GST_FORMAT_BUFFERS,
"is-live", true,
"block", true,
NULL);
if(IsH264Frame(codecType)){
g_object_set(app->src, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
} else if(codecType == IMAGE_MJPEG_FRAME) {
g_object_set(app->src, "caps", gst_caps_new_simple("image/jpeg",
"framerate",GST_TYPE_FRACTION,(int)framerate,1,
NULL), NULL);
//additional code
g_object_set(app->decoder, "caps", gst_caps_new_simple("video/x-raw",
NULL), NULL);
g_object_set(app->encoder, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
}
g_signal_connect(app->src, "need-data", G_CALLBACK(StartFeed), this);
g_signal_connect(app->src, "enough-data", G_CALLBACK(StopFeed), this);
g_object_set (app->sink,
"location", GenerateFileName().c_str(),
"buffer-mode", 0,
NULL);
/* LINKING */
GstPad *padDecSrc, *padMuxSink, *parserSrc,
GstPadTemplate *mux_sink_pad_template;
mux_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (app->muxer), "video_%u");
padMuxSink = gst_element_request_pad (app->muxer, mux_sink_pad_template, NULL, NULL);
parserSrc = gst_element_get_static_pad (app->parser, "src");
padEncSrc = gst_element_get_static_pad (app->encoder, "src");
if(!gst_element_link( (GstElement*)app->src, app->parser))
return;
if(IsH264Frame(codecType)){
if(gst_pad_link (parserSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
} else if(codecType == IMAGE_MJPEG_FRAME){
//additional code
if(!gst_element_link( app->parser, app->decoder))
return;
if(!gst_element_link( app->decoder, app->encoder))
return;
if(gst_pad_link (padDecSrc, padMuxSink) != GST_PAD_LINK_OK)
return;
}
if(!gst_element_link( app->muxer, (GstElement*)app->sink))
return;
/* PLAY */
GstStateChangeReturn ret = gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
gst_object_unref (app->pipeline);
return;
}
Question:
What I am doing wrong? Any ideas to solve this problem?
I solved this problem with change of appsrc property "format" from GST_FORMAT_BUFFERS to GST_FORMAT_TIME. Correct timestamps on buffers is not enought.