How to change gstreamer Element property - gstreamer

I want to make the gstreamer app full screen.
Waylandsink property has fullscreen, how do I change it?
Or is there a way to go full screen without changing the Waylandsink property?

When setting by command
$ gst-launch-1.0 filesrc location=/home/root/testpi.h264 ! decodebin ! vspmfilter ! waylandsink fullscreen=TRUE
When writing in code
`#define INPUT_FILE "/home/root/testpi.h264"
GstElement *pipeline, *source, *parser, *decoder, *filter, *sink;
const gchar *input_file = INPUT_FILE;
gst_init(&argc, &argv);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-play");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("h264parse", "h264-parser");
decoder = gst_element_factory_make ("omxh264dec", "h264-decoder");
filter = gst_element_factory_make ("vspmfilter", "filter");
sink = gst_element_factory_make ("waylandsink", "video-output");
/* Set input video file for source element */
g_object_set (G_OBJECT (source), "location", input_file, NULL);
/* Set element property */
g_object_set(G_OBJECT(sink),"fullscreen",TRUE,NULL);
/* Add all elements into the pipeline */
/* pipeline---[ file-source + h264-parser + h264-decoder + filter + video-output ] */
gst_bin_add_many (GST_BIN (pipeline), source, parser, decoder, filter, sink, NULL);
/* Link the elements together */
/* file-source -> h264-parser -> h264-decoder -> filter -> video-output */
if (gst_element_link_many (source, parser, decoder, filter, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", input_file);
if (gst_element_set_state (pipeline,
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
g_print ("Running...\n");
while(1)
{
;
}
`

Related

How to link v4l2src to capsfilter in c for gstreamer-1.14

I have a gstreamer media pipeline as shown below which I am trying to convert into a c code. The command line works fine.
gst-launch-1.0 v4l2src device=/dev/video1 ! capsfilter caps=video/x-raw,width=1280,height=720,format=UYVY ! queue ! videoconvert ! queue ! capsfilter caps=video/x-raw,format=NV12,width=1280,height=720,pixel-aspect-ratio=1/1 ! v4l2h264enc extra-controls="controls,h264_level=12,h264_profile=1" ! h264parse ! autovideosink
I have written the code and compilation is successful. When, I execute the code, the videosrc element is unable to link to capsfilter. I have surfed through the internet and was unsuccessful in rectifying the problem. Can someone help in correcting me what, I am doing wrong.
The code snippet is below:
/* Create the gstreamer elements */
source = gst_element_factory_make ("v4l2src", "source");
capsfilter = gst_element_factory_make ("capsfilter", "Caps-Filter");
capsfilter2 = gst_element_factory_make ("capsfilter", "caps-filter2");
video_convert = gst_element_factory_make ("videoconvert", "Video Convert");
queue1 = gst_element_factory_make ("queue", "Encoded Video Queue 1");
queue2 = gst_element_factory_make ("queue", "Encoded Video Queue 2");
encoder = gst_element_factory_make ("v4l2h264enc", "HW Accelerated Encoder");
H264_pay = gst_element_factory_make ("h264parse", "Payload-encode H264 video into RTP packets");
sink = gst_element_factory_make("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if(!source || !capsfilter || ! capsfilter2 || !video_convert || !queue1 || !queue2 || !encoder || !H264_pay || !sink)
/* Set Source element properties */
g_object_set (G_OBJECT(source), "device", "/dev/video1", NULL);
GstCaps* filtercaps = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)UYUY");
GstCaps* vconvertfilter = gst_caps_from_string("video/x-raw,width=1280,height=720,format=(string)NV12,pixel-aspect-ratio=1/1");
GstStructure *test = gst_structure_new_from_string("controls,h264_level=12,h264_profile=1");
g_object_set(G_OBJECT(capsfilter), "caps", filtercaps,NULL);
g_object_set(G_OBJECT(capsfilter2), "caps", vconvertfilter, NULL);
g_object_set (G_OBJECT(encoder), "extra-controls", test, NULL);
gst_caps_unref(filtercaps);
gst_caps_unref(vconvertfilter);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline),
source, capsfilter,
queue1, video_convert, queue2,
capsfilter2, encoder,
H264_pay, sink, NULL);
if(!gst_element_link(source, capsfilter))
{
g_printerr("Unable to link Source to filter. check your caps. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (capsfilter, queue1, video_convert, NULL) != TRUE)
{
g_printerr("Capsfilter could not be linked to queue1. \n");
gst_object_unref (pipeline);
}
if (gst_element_link_many (video_convert, queue2, capsfilter2, encoder, H264_pay, NULL) != TRUE)
{
g_printerr("video_convert could not be linked to queue2. \n");
gst_object_unref (pipeline);
}
if(gst_element_link_many (H264_pay, sink, NULL) != TRUE)
{
g_printerr("parse could not link to sink.\n");
gst_object_unref (pipeline);
}
I get the error as below;
Unable to link Source to filter. check your caps.
Can somebody help me correct the mistake?

Why the gstreamer pipeline won't play if I added a udpsink plugin to it

I tried to send an RTP stream with gstreamer, but I found that the pipeline won't play at all. When I simplified my code I found that if the udpsink plugin was added in the pipeline, the pipeline is blocked, and the status is always READY.
My code:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink, *udp, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("videotestsrc", "source");
convert = gst_element_factory_make ("videoconvert", "convert");
sink = gst_element_factory_make ("autovideosink", "sink");
udp = gst_element_factory_make ("udpsink", "udp");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, /*udp,*/ NULL);
gst_element_link_many (source, convert, sink, NULL);
/* Modify the source's properties */
g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
bus = gst_element_get_bus (pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
case GST_MESSAGE_EOS:
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
// ...
}
As you can see, the pipeline works fine if the udpsink is not added. This also happens in command line:
gst-launch-1.0 -v udpsink videotestsrc ! videoconvert ! autovideosink
The command above will popup a window and the video stops at the first frame.
I don't know what's wrong with my code, can anyone give me help, thanks!
A pipeline ends with a sink element, e.g. (autovideosink) you cannot add anything after that. That is why autovideosink does not have src pad and you can not link it to udpsink. You have to have a second thread linked to udpsink. To create threads you can use queue and tee elements.
You can find more here (GStreamer Basic tutorial 7: Multithreading and Pad Availability)

gstreamer 1.0 rtspsrc to rtph264depay cannot link

I am trying to connect to a ubiquiti camera and successfully with the following command with gstreamer.
gst-launch-1.0 --gst-debug=4 rtspsrc location="rtsp://:554/live/ch00_0" ! rtph264depay ! h264parse ! openh264dec ! d3dvideosink
I look at the debug and it says could not link pads rtspsrc to rtph264depay.
Picture
but it gets the stream alright and can see the video. when I put this into a c project it says cannot link source to rtph264parse. I looked around it says to use a dynamic pad with the following code
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the rtsp-decoder sink pad */
g_print ("Dynamic pad created, linking source/demuxer\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int main(int argc, char *argv[])
{
/* Initialize GStreamer */
gst_init(&argc,&argv);
/* Build Pipeline */
pipel.pipeline = gst_pipeline_new("My pipeline");
pipel.source = gst_element_factory_make ("rtspsrc","source");
g_object_set (G_OBJECT (pipel.source), "latency",2000,NULL);
pipel.rtppay = gst_element_factory_make( "rtph264depay", "depayl");
pipel.parse = gst_element_factory_make("h264parse","parse");
pipel.filter1 = gst_element_factory_make("capsfilter","filter");
pipel.decodebin = gst_element_factory_make ("openh264dec","decode");
pipel.sink = gst_element_factory_make("d3dvideosink","sink");
g_object_set (G_OBJECT (pipel.sink), "sync",FALSE,NULL);
//create_uri(url,url_size,ip_address,port);
g_object_set(GST_OBJECT(pipel.source),"location","rtsp://<IP>:554/live/ch00_0",NULL);
filtercaps = gst_caps_from_string("application/x-rtp");
g_object_set (G_OBJECT (pipel.filter1), "caps",filtercaps,NULL);
gst_caps_unref(filtercaps);
gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.source
,pipel.rtppay
,pipel.parse
,pipel.decodebin
,pipel.sink
,NULL);
if(!gst_element_link(pipel.source,pipel.rtppay))
printf("\nFailed source to rtppay\n");
if(!gst_element_link_many(pipel.parse,pipel.decodebin,pipel.sink,NULL))
printf("\nFailed to link parse to sink");
g_signal_connect(pipel.rtppay, "pad-added", G_CALLBACK(on_pad_added), pipel.parse);
}
static void cb_new_rtspsrc_pad(GstElement *element,GstPad*pad,gpointer data)
{
gchar *name;
GstCaps * p_caps;
gchar * description;
GstElement *p_rtph264depay;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
// here, you would setup a new pad link for the newly created pad
// sooo, now find that rtph264depay is needed and link them?
p_caps = gst_pad_get_pad_template_caps (pad);
description = gst_caps_to_string(p_caps);
printf("%s\n",p_caps,", ",description,"\n");
g_free(description);
p_rtph264depay = GST_ELEMENT(data);
// try to link the pads then ...
if(!gst_element_link_pads(element, name, p_rtph264depay, "sink"))
{
printf("Failed to link elements 3\n");
}
g_free(name);
}
/* ---------- Main --------------- */
int main(int argc, char *argv[])
{
/* Initialize GStreamer */
gst_init(&argc,&argv);
/* Build Pipeline */
pipel.pipeline = gst_pipeline_new("My pipeline");
creating_pipeline(ip_address,port);
pipel.source = gst_element_factory_make ("rtspsrc","source");
g_object_set (G_OBJECT (pipel.source), "latency",2000,NULL);
pipel.rtppay = gst_element_factory_make( "rtph264depay", "depayl");
pipel.parse = gst_element_factory_make("h264parse","parse");
pipel.filter1 = gst_element_factory_make("capsfilter","filter");
pipel.decodebin = gst_element_factory_make ("openh264dec","decode");
pipel.sink = gst_element_factory_make("d3dvideosink","sink");
g_object_set (G_OBJECT (pipel.sink), "sync",FALSE,NULL);
//create_uri(url,url_size,ip_address,port);
g_object_set(GST_OBJECT(pipel.source),"location","rtsp://<ip>:554/live/ch00_0",NULL);
filtercaps = gst_caps_from_string("application/x-rtp");
g_object_set (G_OBJECT (pipel.filter1), "caps",filtercaps,NULL);
gst_caps_unref(filtercaps);
gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.source
,pipel.rtppay
,NULL);
// listen for newly created pads
g_signal_connect(pipel.source, "pad-added", G_CALLBACK(cb_new_rtspsrc_pad),pipel.rtppay);
gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.parse,NULL);
if(!gst_element_link(pipel.rtppay,pipel.parse))
printf("\nNOPE\n");
gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.decodebin
,pipel.sink
,NULL);
if(!gst_element_link_many(pipel.parse,pipel.decodebin,pipel.sink,NULL))
printf("\nFailed to link parse to sink");
g_signal_connect(pipel.rtppay, "pad-added", G_CALLBACK(on_pad_added), pipel.parse);
}
Works Now!
use cb_new_rtspsrc_pad to add the pad dynamically
add the parse to bin
link up between rtppay and parse
add the necessary elements the rest of the string to make it work.

Gstreamer source code doesnt work

i have the following pipelines that one of them sends voice signals on udp port and the other receives them on the same port number on the receiver side
gst-launch-1.0 -v alsasrc ! audioconvert
! audio/x-raw,channels=2,depth=16,width=16,rate=44100 !
rtpL16pay ! udpsink
host=127.0.0.1 port=5000 //sender
and
gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp,
media=(string)audio, clock-rate=(int)44100,
encoding-name=(string)L16, channels=(int)2,
payload=(int)96" ! rtpL16depay ! audioconvert
! alsasink //receiver
now i am trying to write a source code using Gstreamer SDK that does the same thing. I have come so far:
#include <gst/gst.h>
#include <string.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *audiosink,*rtppay,*rtpdepay,*filter,*filter1,*conv,*conv1,*udpsink,*udpsrc,*receive_resample;
GstBus *bus;
GstMessage *msg;
GstCaps *filtercaps;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("alsasrc", "source");
conv= gst_element_factory_make ("audioconvert", "conv");
conv1= gst_element_factory_make ("audioconvert", "conv1");
filter=gst_element_factory_make("capsfilter","filter");
rtppay=gst_element_factory_make("rtpL16pay","rtppay");
rtpdepay=gst_element_factory_make("rtpL16depay","rtpdepay");
udpsink=gst_element_factory_make("udpsink","udpsink");
audiosink = gst_element_factory_make ("autoaudiosink", "audiosink");
receive_resample = gst_element_factory_make("audioresample", NULL);
udpsrc=gst_element_factory_make("udpsrc",NULL);
filter1=gst_element_factory_make("capsfilter","filter");
g_object_set(udpsrc,"port",5000,NULL);
g_object_set (G_OBJECT (udpsrc), "caps", gst_caps_from_string("application/x-rtp,media=audio,payload=96,clock-rate=44100,encoding-name=L16,channels=2"), NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !filter || !conv || !rtppay || !udpsink ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(udpsink),"host","127.0.0.1",NULL);
g_object_set(G_OBJECT(udpsink),"port",5000,NULL);
filtercaps = gst_caps_new_simple ("audio/x-raw",
"channels", G_TYPE_INT, 2,
"width", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
filtercaps = gst_caps_new_simple ("application/x-rtp",
"media",G_TYPE_STRING,"audio",
"clock-rate",G_TYPE_INT,44100,
"encoding-name",G_TYPE_STRING,"L16",
"channels", G_TYPE_INT, 2,
"payload",G_TYPE_INT,96,
NULL);
g_object_set (G_OBJECT (filter1), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source,filter,conv,rtppay,udpsink, NULL);
if (gst_element_link_many (source,filter,conv,rtppay,udpsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
gst_bin_add_many (GST_BIN (pipeline),udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL);
if (gst_element_link_many (udpsrc,rtpdepay,conv1,receive_resample,audiosink,NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
// g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
but somehow i dont receive any voice on the receiver. i dont get any errors of any kind.
Any ideas why this is happening?
Well i figured it out. I don't know why but when i divided the source code into two separate ones and in one of them i included the code up until the UDPsink element and included the rest of the elements after that ( meaning udpsrc, rtpdepay and audiosink) in another source code file and compiled them separately in two separate Terminals it worked. I still don't know why it is like this , but i am happy that it works.
The sender and reciever are supposed to be two different processes, which is why it works when you use two terminals.
In your code, you're putting two different pipelines in the same pipeline element and setting it to playing.
This is not supported, you need to create a different pipeline for that.
pipeline1 = gst_pipeline_new ("src-pipeline");
pipeline2 = gst_pipeline_new ("sink-pipeline");

why is gstreamer caps is blocking pipeline

I am trying a basic pipeline below. If I run the pipeline without the caps function it passes straight through (assuming same I/O format). Once I add the caps the pre-roll starts but the video does not continue through to the sink output.
Am I coding this wrong?
thx
Art
#include <gst/gst.h>
#include <glib.h>
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
g_print ("Dynamic pad created, linking out/in \n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
gboolean link_ok;
GstElement *pipeline, *source, *decoder, *ffcs, *vidsc, *capsfout, *sink;
GstBus *bus;
GstCaps *caps;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("filesrc", "file-source");
decoder = gst_element_factory_make ("decodebin2", "dec-bin2");
ffcs = gst_element_factory_make ("ffmpegcolorspace", "ffcs");
vidsc = gst_element_factory_make ("videoscale", "vidsc");
capsfout = gst_element_factory_make ("capsfilter", "capsfout");
sink = gst_element_factory_make ("filesink", "vidout");
if (!pipeline || !source || !decoder || !ffcs || !vidsc || !capsfout || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* we set the input/output filename to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
g_object_set (G_OBJECT (sink), "location", argv[2], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bin_add_many (GST_BIN (pipeline),
source, decoder, ffcs, vidsc, capsfout, sink, NULL);
/* we link the elements together */
gst_element_link (source, decoder);
gst_element_link (decoder, ffcs);
gst_element_link (ffcs, vidsc);
caps = gst_caps_new_simple("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
"width", G_TYPE_INT, 384,
"height", G_TYPE_INT, 216,
"framerate", GST_TYPE_FRACTION, 25, 1,
NULL);
link_ok = gst_element_link_filtered(vidsc,sink,caps);
gst_caps_unref (caps);
if (!link_ok) {
g_warning ("Failed to link vidsc to sink!");
}else{
g_print("seems ok, no error reported?\n");
}
/* Set the pipeline to "playing" state*/
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
It may be late to answer but you maybe forgot to do the dynamic pad link between some elements and you never call the on_pad_added function. I had the same problem.
Have you tried to add it since then? For example this line to dynamically link two elements instead of the gst_element_link:
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), ffcs);
For what reasons do you need the capsfilter? Only constrain the properties you need. E.g. you set a framerate, but don't have a videorate element before the capsfilter. If you don't care about the framerate (e.g. just want to force a size) remove the framerate from the caps you set on capsfilter..