GStreamer tutorial 3 unable to link video pads - gstreamer

I'm trying to go through excercise of GStreamer Tutorial 3:
https://gstreamer.freedesktop.org/documentation/tutorials/basic/dynamic-pipelines.html?gi-language=c#exercise
Working on Ubuntu 18.04
I've added and linked autovideosink and videoconvert:
data.videosink = gst_element_factory_make("autovideosink", "videosink");
data.videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
And modified the callback:
if (g_str_has_prefix (new_pad_type, "audio/x-raw")) {
sink_pad = gst_element_get_static_pad (data->convert, "sink");
}
else if (g_str_has_prefix (new_pad_type, "video/x-raw")){
sink_pad = gst_element_get_static_pad (data->videoconvert, "sink");
}
else
{
g_print ("It has type '%s' which is not raw audio. Ignoring.\n",
new_pad_type);
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print ("Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print ("Link succeeded (type '%s').\n", new_pad_type);
}
However I cant link the video pads:
Type is 'video/x-raw' but link failed.
I tried checking if pads can link and that returns false.
Is there more settings or more information about compatibility between pads that I can get?

First of all, thank you because the correct part of your code helped me to fix my troubles.
Actually all your changes are correct, I ran it here. But has other things to change to, let's verify it:
-- First we have to verify everything and add to bin:
if (!data.pipeline || !data.source || !data.convert || !data.sink || !data.videoconvert || !data.videosink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, data.videoconvert , data.videosink, NULL);
if (!gst_element_link (data.convert, data.sink)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
if (!gst_element_link (data.videoconvert, data.videosink)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
-- After be sure that your callback function has the definitions before your modifications:
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
-- Also, at the end of callback has the part to release memory.
Here this code is running, I expect I helped you, also verify your connection since the codes is streaming from internet.

This is the solved code for GStreamer Tutorial 3
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *vconvert;
GstElement *resample;
GstElement *asink;
GstElement *vsink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.vconvert = gst_element_factory_make ("videoconvert", "vc");
data.resample = gst_element_factory_make ("audioresample", "resample");
data.asink = gst_element_factory_make ("autoaudiosink", "asink");
data.vsink = gst_element_factory_make ("autovideosink", "vsink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.resample || !data.asink)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert, data.vconvert,data.resample, data.asink, data.vsink,NULL);
if (!gst_element_link_many (data.convert, data.resample, data.asink,NULL)) {
g_printerr ("Elements could not be linked1.\n");
gst_object_unref (data.pipeline);
return -1;
}
if (!gst_element_link_many (data.vconvert, data.vsink,NULL)) {
g_printerr ("Elements could not be linked2.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the URI to play */
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (g_str_has_prefix (new_pad_type, "audio/x-raw")) // && !g_str_has_prefix (new_pad_type, "video/x-raw)")
{
g_print ("It has type '%s' Test it.\n", new_pad_type);
goto test_audio;
}
if (g_str_has_prefix (new_pad_type, "video/x-raw")) // && !g_str_has_prefix (new_pad_type, "video/x-raw)")
{
g_print ("It has type '%s' Test it.\n", new_pad_type);
goto test_video;
}
goto exit ;
GstPad *sink_pad;
test_audio:
sink_pad = gst_element_get_static_pad (data->convert, "sink");
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
goto next_step;
test_video:
sink_pad= gst_element_get_static_pad (data->vconvert, "sink");
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
goto next_step;
next_step:
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print ("Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print ("Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
if (sink_pad != NULL)
gst_object_unref (sink_pad);
}

Related

How to create gstreamer pipeline with parallel branches having different FPS using tee plugin

Hi I want to create a gstreamer pipeline with two branches having different FPS. The C++ code I wrote is given below
#include <iostream>
#include <string.h>
#include <gst/gst.h>
#include <gst/app/app.h>
using namespace std;
GstElement *src, *dbin, *conv, *tee, *mux, *parse, *pipeline;
GstElement *queue1,*videorate1, *conv1, *jenc1, *sink1;
GstElement *queue2,*videorate2, *conv2, *jenc2, *sink2;
GMainLoop *loop;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
//Cpipeline *obj_pipeline = (Cpipeline*)user_data;
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:
g_print ("\nGot EOS\n");
g_main_loop_quit (loop);
break;
default:
break;
}
return TRUE;
}
static void pad_added_handler (GstElement *src, GstPad *new_pad, gpointer x)
{
GstPad *sink_pad = gst_element_get_static_pad (parse, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "video/x-h264")) {
g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print ("Type is '%s' but link failed.\n", new_pad_type);
goto exit;
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
int main()
{
gst_init (NULL, NULL);
pipeline = gst_pipeline_new (NULL);
src = gst_element_factory_make ("filesrc", NULL);
mux = gst_element_factory_make("qtdemux",NULL);
parse = gst_element_factory_make("h264parse",NULL);
dbin = gst_element_factory_make ("nvv4l2decoder", NULL);
conv = gst_element_factory_make ("nvvideoconvert", NULL);
tee = gst_element_factory_make ("tee", NULL);
std::string url = "VD19_peoplewalking.mp4";
if (!pipeline || !src || !dbin || !conv || !tee || !mux || !parse) {
g_error ("Failed to create elements");
return -1;
}
g_object_set (src, "location", url.c_str(), NULL);
gst_bin_add_many (GST_BIN (pipeline), src, dbin, mux, parse, conv, tee, NULL);
if (!gst_element_link_many(src,mux,NULL) || !gst_element_link_many(parse,dbin,conv, tee,NULL) )//|| !gst_element_link_many (conv, tee, NULL))
{
g_error("Failed to link elements");
return -3;
}
g_signal_connect (mux, "pad-added", G_CALLBACK (pad_added_handler), NULL);
//First Branch creation
GstPadTemplate *templ;
templ =
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),
"src_%u");
GstPad *teepad1 = gst_element_request_pad (tee, templ, NULL, NULL);
queue1 = gst_element_factory_make ("queue", NULL);
videorate1 = gst_element_factory_make("videorate",NULL);
conv1 = gst_element_factory_make ("nvvideoconvert", NULL);
//jenc = gst_element_factory_make ("jpegenc",NULL);
sink1 = gst_element_factory_make ("autovideosink", NULL);
//sink = gst_element_factory_make ("appsink", NULL);
g_object_set (G_OBJECT(videorate1), "rate", 1.0, NULL);
gst_bin_add_many (GST_BIN (pipeline), queue1, videorate1, conv1, sink1, NULL);
if (!gst_element_link_many ( queue1, conv1, videorate1, sink1, NULL))
{
g_error ("Failed to link elements");
}
GstPad *sinkpad = gst_element_get_static_pad ( queue1, "sink");
gst_pad_link ( teepad1, sinkpad);
gst_object_unref (sinkpad);
//First Branch creation ends
//Second Branc
GstPadTemplate *templ2;
templ2 =
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),
"src_%u");
GstPad *teepad2 = gst_element_request_pad (tee, templ2, NULL, NULL);
queue2 = gst_element_factory_make ("queue", NULL);
videorate2 = gst_element_factory_make("videorate",NULL);
conv2 = gst_element_factory_make ("nvvideoconvert", NULL);
sink2 = gst_element_factory_make ("autovideosink", NULL);
g_object_set (G_OBJECT(videorate2), "rate", 0.5, NULL);
gst_bin_add_many (GST_BIN (pipeline), queue2, videorate2, conv2, sink2, NULL);
if (!gst_element_link_many ( queue2, conv2, videorate2, sink2, NULL))
{
g_error ("Failed to link elements");
}
GstPad *sinkpad2 = gst_element_get_static_pad ( queue2, "sink");
gst_pad_link ( teepad2, sinkpad2);
gst_object_unref (sinkpad2);
//Second brach creation ends
GstBus *bus;
loop = g_main_loop_new (NULL, FALSE);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (message_cb), NULL);
gst_object_unref (GST_OBJECT (bus));
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
}
Through command line I am able to run multiple branches with different fps please see the command below
gst-launch-1.0 filesrc location=VD19_peoplewalking.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! tee name=t ! queue ! videorate ! "video/x-raw(ANY),framerate=1/1" ! nvvideoconvert ! autovideosink t. ! videorate ! "video/x-raw(ANY),framerate=30/1" ! nvvideoconvert ! autovideosink
I am able to run C++ the code but the streams are not played as expected. Both streams get stuck in between while running the code.
Am I missing something?

Gstreamer recording video with audio

I'm trying to record on a file a video from my webcam along with audio using Gstreamer on my Ubuntu 16 machine through glib library.
I'm able to watch the video streaming from the webcam through these code lines
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("v4l2src", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
convert =gst_element_factory_make("videoconvert","convert");
//convert = gst_element_factory_make ("audioconvert", "convert");
//sink = gst_element_factory_make ("autoaudiosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !convert) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/*set der source*/
g_object_set (source, "device", "/dev/video0", NULL);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, NULL);
if (gst_element_link (convert, sink) != TRUE) {
g_printerr ("Elements could not be linked confert sink.\n");
gst_object_unref (pipeline);
return -1;
}
if (gst_element_link (source, convert) != TRUE) {
g_printerr ("Elements could not be linked source -convert.\n");
gst_object_unref (pipeline);
return -1;
}
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,(GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
and to capture audio from microphone and listen it through the speakers using these code lines
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data){
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
/* Main function for audio pipeline initialization and looping streaming process */
gint
main (gint argc, gchar **argv) {
GMainLoop *loop;
GstElement *pipeline, *audio_source, *sink;
GstBus *bus;
guint bus_watch_id;
GstCaps *caps;
gboolean ret;
/* Initialization of gstreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Elements creation */
pipeline = gst_pipeline_new ("audio_stream");
audio_source = gst_element_factory_make ("alsasrc", "audio_source");
sink = gst_element_factory_make ("alsasink", "audio_sink");
// video_source = gst_element_factory_make ("v4l2src", "source");
// video_sink = gst_element_factory_make ("autovideosink", "sink");
// video_convert= gst_element_factory_make("videoconvert","convert");
if (!pipeline) {
g_printerr ("Audio: Pipeline couldn't be created\n");
return -1;
}
if (!audio_source) {
g_printerr ("Audio: alsasrc couldn't be created\n");
return -1;
}
if (!sink) {
g_printerr ("Audio: Output file couldn't be created\n");
return -1;
}
g_object_set (G_OBJECT (audio_source), "device", "hw:1,0", NULL);
g_object_set (G_OBJECT (sink), "device", "hw:1,0", NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN(pipeline), audio_source, sink, NULL);
caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16LE", "layout", G_TYPE_STRING, "interleaved", "rate", G_TYPE_INT, (int)44100, "channels", G_TYPE_INT, (int)2, NULL);
ret = gst_element_link_filtered (audio_source, sink, caps);
if (!ret) {
g_print ("audio_source and sink couldn't be linked\n");
gst_caps_unref (caps);
return FALSE;
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("streaming...\n");
g_main_loop_run (loop);
g_print ("Returned, stopping stream\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
What i really don't understand is how to get video from the webcam and audio from my alsa hw at the same time and save them into a file (such as .mp4 for ex). Can anyone help me? I tried to find something useful, but there's nothing on the board. In addition, it would be really appreciate also how to save just the video stream or just the audio stream in separated files.
UPDATE
I looked again to the tutorials and to the git link gave by #nayana, so i tried myself to code something. I have two results:
#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video;
static GstElement *src_audio, *encoder_audio, *queue_audio;
static GstBus *bus;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:{
g_print ("Got EOS\n");
g_main_loop_quit (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
exit(0);
break;
}
default:
break;
}
return TRUE;
}
void sigintHandler(int unused) {
g_print("You ctrl-c-ed! Sending EoS");
gst_element_send_event(pipeline, gst_event_new_eos());
}
int main(int argc, char *argv[])
{
signal(SIGINT, sigintHandler);
gst_init (&argc, &argv);
pipeline = gst_pipeline_new(NULL);
src_video = gst_element_factory_make("v4l2src", NULL);
encoder_video = gst_element_factory_make("x264enc", NULL);
queue_video = gst_element_factory_make("queue", NULL);
src_audio = gst_element_factory_make ("alsasrc", NULL);
encoder_audio = gst_element_factory_make("lamemp3enc", NULL);
queue_audio = gst_element_factory_make("queue", NULL);
muxer = gst_element_factory_make("mp4mux", NULL);
sink = gst_element_factory_make("filesink", NULL);
if (!pipeline || !src_video || !encoder_video || !src_audio || !encoder_audio
|| !queue_video || !queue_audio || !muxer || !sink) {
g_error("Failed to create elements");
return -1;
}
g_object_set(src_audio, "device", "hw:1,0", NULL);
g_object_set(sink, "location", "video_audio_test.mp4", NULL);
gst_bin_add_many(GST_BIN(pipeline), src_video, encoder_video, queue_video,
src_audio, encoder_audio, queue_audio, muxer, sink, NULL);
gst_element_link_many (src_video,encoder_video,queue_video, muxer,NULL);
gst_element_link_many (src_audio,encoder_audio,queue_audio, muxer,NULL);
if (!gst_element_link_many(muxer, sink, NULL)){
g_error("Failed to link elements");
return -2;
}
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
gst_object_unref(GST_OBJECT(bus));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Starting loop");
g_main_loop_run(loop);
return 0;
}
With this upon i am able to record the video from the cam, but the audio is recorded for just one second somewhere randomly during the recording and it gives me this error
ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Can't record audio fast enough
Additional debug info:
gstaudiobasesrc.c(869): gst_audio_base_src_create (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
Dropped 206388 samples. This is most likely because downstream can't keep up and is consuming samples too slowly.<br>
So i tried to add some setting and queues
#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video, *rate_video, *scale_video, *capsfilter_video;
static GstElement *src_audio, *encoder_audio, *queue_audio, *queue_audio2, *capsfilter_audio, *rate_audio;
static GstBus *bus;
static GstCaps *caps;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:{
g_print ("Got EOS\n");
g_main_loop_quit (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
exit(0);
break;
}
default:
break;
}
return TRUE;
}
void sigintHandler(int unused) {
g_print("You ctrl-c-ed! Sending EoS");
gst_element_send_event(pipeline, gst_event_new_eos());
}
int main(int argc, char *argv[])
{
signal(SIGINT, sigintHandler);
gst_init (&argc, &argv);
pipeline = gst_pipeline_new(NULL);
src_video = gst_element_factory_make("v4l2src", NULL);
rate_video = gst_element_factory_make ("videorate", NULL);
scale_video = gst_element_factory_make ("videoscale", NULL);
capsfilter_video = gst_element_factory_make ("capsfilter", NULL);
queue_video = gst_element_factory_make("queue", NULL);
encoder_video = gst_element_factory_make("x264enc", NULL);
src_audio = gst_element_factory_make ("alsasrc", NULL);
capsfilter_audio = gst_element_factory_make ("capsfilter", NULL);
queue_audio = gst_element_factory_make("queue", NULL);
rate_audio = gst_element_factory_make ("audiorate", NULL);
queue_audio2 = gst_element_factory_make("queue", NULL);
encoder_audio = gst_element_factory_make("lamemp3enc", NULL);
muxer = gst_element_factory_make("mp4mux", NULL);
sink = gst_element_factory_make("filesink", NULL);
if (!pipeline || !src_video || !rate_video || !scale_video || !capsfilter_video
|| !queue_video || !encoder_video || !src_audio || !capsfilter_audio
|| !queue_audio || !rate_audio || !queue_audio2 || !encoder_audio
|| !muxer || !sink) {
g_error("Failed to create elements");
return -1;
}
// Set up the pipeline
g_object_set(src_video, "device", "/dev/video0", NULL);
g_object_set(src_audio, "device", "hw:1,0", NULL);
g_object_set(sink, "location", "video_audio_test.mp4", NULL);
// video settings
caps = gst_caps_from_string("video/x-raw,format=(string)I420,width=480,height=384,framerate=(fraction)25/1");
g_object_set (G_OBJECT (capsfilter_video), "caps", caps, NULL);
gst_caps_unref (caps);
// audio settings
caps = gst_caps_from_string("audio/x-raw,rate=44100,channels=1");
g_object_set (G_OBJECT (capsfilter_audio), "caps", caps, NULL);
gst_caps_unref (caps);
// add all elements into the pipeline
gst_bin_add_many(GST_BIN(pipeline), src_video, rate_video, scale_video, capsfilter_video,
queue_video, encoder_video, src_audio, capsfilter_audio, queue_audio, rate_audio,
queue_audio2, encoder_audio, muxer, sink, NULL);
if (!gst_element_link_many (src_video,rate_video,scale_video, capsfilter_video,
queue_video, encoder_video, muxer,NULL))
{
g_error("Failed to link video elements");
return -2;
}
if (!gst_element_link_many (src_audio, capsfilter_audio, queue_audio, rate_audio,
queue_audio2, encoder_audio, muxer,NULL))
{
g_error("Failed to link audio elements");
return -2;
}
if (!gst_element_link_many(muxer, sink, NULL))
{
g_error("Failed to link elements");
return -2;
}
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
gst_object_unref(GST_OBJECT(bus));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Starting loop");
g_main_loop_run(loop);
return 0;
}
This time the code doesnt record anything and give me the following error
ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Internal data flow error.
Additional debug info:
gstbasesrc.c(2948): gst_base_src_loop (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
streaming task paused, reason not-negotiated (-4)
Can you address me to fix the error?
Thanks in advance
What you need is the multiplexer - such GStreamer element that can merge two streams into one.
mp4, mkv, avi.. are just a container formats which contains multiple "data streams", which can be audio, video, subtitles (not all formats support this).
I don't know about your use case, but you don't need C code for what you do. You can just use gst-launch-1.0 tool which has its own GStreamer kind-of-scripting language.
For simplicity I will use debugging elements videotestsrc and audiotestsrc for simulating input (instead of actual camera etc).
gst-launch-1.0 -e videotestsrc ! x264enc ! mp4mux name=mux ! filesink location="bla.mp4" audiotestsrc ! lamemp3enc ! mux.
videotestsrc --> x264enc -----\
>---> mp4mux ---> filesink
audiotestsrc --> lamemp3enc --/
Explanation:
Videotestsrc generates raw video which is in GStreamer terms called "video/x-raw".
However mp4 cannot hold raw video, so we need to encode it with for example x264enc which makes our data "video/x-h264".
Then we can finally mux this into our mp4 with mp4mux element.
When we take a look into GStreamer docs using gst-inspect-1.0 mp4mux we see that this element supports various formats amongst which there is also video/x-h264.
The same thing we do with audio with either faac for AAC format or lamemp3enc for mp3.
With gst-launch-1.0 I did two tricks and one bonus trick:
ability to have separate branches in one line. This is achieved by just separating those branches with space instead of !
ability to make alias with name=mux and later on using it with adding dot right at the end of name like mux. . You can make up any name for that element you like.
Write EOS after hitting ctrl+c to stop the recording. This is achieved with parameter -e
Finally the output goes to filesink which just writes anything you give it to file.
Now for a homework you:
Use your elements for what you need - v4l2, alsasrc
Add queue elements to add buffering and thread separation

Internal data flow error in gstreamer c code that uses "adder" element?

I want to convert my gst-launch command into c code. I am new to gstreamer coding. can anyone help me?
command: gst-launch-0.10 uridecodebin uri=file:///media/afeb7785-7c21-45bf-b1b7-41d3263022f6/gst/bigcity.wav ! audioconvert ! volume volume='0.9' ! audioconvert ! adder name = m ! autoaudiosink uridecodebin uri=file:///media/afeb7785-7c21-45bf-b1b7-41d3263022f6/gst/tereliya.wav ! audioconvert ! volume volume='0.3' ! audioconvert ! m.
c-code:
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking \n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source1, *source2, *mixer, *conv, *conv2, *sink;
GstBus *bus;
guint bus_watch_id;
GstPad *adder_sinkpad;
GstPad *adder_sinkpad2;
GstPad *conv1_pad;
GstPad *conv2_pad;
gchar *pad1name;
gchar *pad2name;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/* if (argc != 3) {
g_printerr ("Usage: %s \n", argv[0]);
return -1;
}*/
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("audio-player");
source1 = gst_element_factory_make ("uridecodebin", "uri-source1");
source2 = gst_element_factory_make ("uridecodebin", "uri-source2");
mixer = gst_element_factory_make ("adder", "audio-mix");
conv = gst_element_factory_make ("audioconvert", "conv");
conv2 = gst_element_factory_make ("audioconvert", "conv2");
sink = gst_element_factory_make ("alsasink", "audio-output");
if (!pipeline || !source1 || !source2 || !mixer || !conv || !conv2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "uri", "file:///home/baibhav/gst/shadowoftheday.wav", NULL);
g_object_set (G_OBJECT (source2), "uri", "file:///home/baibhav/gst/valentinesday.wav" , NULL);
g_object_set (G_OBJECT (mixer), "name", "mix", NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1, conv, mixer, sink, source2, conv2, NULL);
/* we link the elements together */
int k,n;
if((k=gst_element_link (source1, conv)) !=0 ) {
g_print ("link1 error: %d\n",k);
g_print ("cannot link source1 with conv\n");
}
if((n=gst_element_link (source2, conv2)) != 0) {
g_print ("link2 error: %d\n",n);
g_print ("cannot link source2 with conv2\n");
}
if(gst_element_link (mixer, sink) != TRUE) {
g_print ("cannot link sink with mixer\n");
}
conv1_pad= gst_element_get_static_pad (conv, "src");
conv2_pad= gst_element_get_static_pad (conv2, "src");
adder_sinkpad = gst_element_get_request_pad (mixer, "sink%d");
pad1name = gst_pad_get_name (adder_sinkpad);
g_print ("pad1name: %s\n",pad1name );
adder_sinkpad2 = gst_element_get_request_pad (mixer, "sink%d");
pad2name = gst_pad_get_name (adder_sinkpad2);
g_print ("pad2name: %s\n",pad2name );
int i,j;
if((i=gst_pad_link (conv1_pad, adder_sinkpad)) != 0) {
g_print ("pad error: %d\n",i);
g_print ("cannot link conv1 with adder1\n");
}
if((j=gst_pad_link (conv2_pad, adder_sinkpad2))!= 0) {
g_print ("pad2 error: %d\n",j);
g_print ("cannot link conv2 with adder2\n");
}
// g_signal_connect (conv, "pad-added", G_CALLBACK (on_pad_added), mixer);
// g_signal_connect (conv2, "pad-added", G_CALLBACK (on_pad_added), mixer);
/* Set the pipeline to "playing" state*/
g_print ("Now playing\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
// gst_pad_unlink (conv2_pad, adder_sinkpad2);
// gst_pad_unlink ((conv1_pad, adder_sinkpad);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
For your application, there is not any requirement of adding signal handler for pad-added signal. You can simply write your application as follows :
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source1, *source2, *mixer, *conv, *conv2, *sink;
GstBus *bus;
guint bus_watch_id;
GstPad *adder_sinkpad;
GstPad *adder_sinkpad2;
GstPad *conv1_pad;
GstPad *conv2_pad;
gchar *pad1name;
gchar *pad2name;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
/* if (argc != 3) {
* g_printerr ("Usage: %s \n", argv[0]);
* return -1;
* }*/
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("audio-player");
source1 = gst_element_factory_make ("audiotestsrc", "uri-source1");
source2 = gst_element_factory_make ("audiotestsrc", "uri-source2");
mixer = gst_element_factory_make ("adder", "audio-mix");
conv = gst_element_factory_make ("audioconvert", "conv");
conv2 = gst_element_factory_make ("audioconvert", "conv2");
sink = gst_element_factory_make ("alsasink", "audio-output");
if (!pipeline || !source1 || !source2 || !mixer || !conv || !conv2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "uri", "file:///home/baibhav/gst/shadowoftheday.wav", NULL);
g_object_set (G_OBJECT (source2), "uri", "file:///home/baibhav/gst/valentinesday.wav" , NULL);
g_object_set (G_OBJECT (mixer), "name", "mix", NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1, conv, mixer, sink, source2, conv2, NULL);
/* we link the elements together */
int k,n;
if((k=gst_element_link_many (source1, conv,mixer,NULL)== 0) ) {
g_print ("link1 error: %d\n",k);
g_print ("cannot link source1 with conv\n");
}
if((n=gst_element_link_many (source2, conv2,mixer,NULL))== 0 ) {
g_print ("link2 error: %d\n",n);
g_print ("cannot link source2 with conv2\n");
}
if(gst_element_link (mixer, sink) == 0 ) {
g_print ("cannot link sink with mixer\n");
}
// /* Set the pipeline to "playing" state*/
g_print ("Now playing\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
//
//
// /* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
//
// /* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
// gst_pad_unlink (conv2_pad, adder_sinkpad2);
// gst_pad_unlink ((conv1_pad, adder_sinkpad);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
Below code works.
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking \n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *source1, *source2, *mixer, *conv1, *conv2, *sink;
GstBus *bus;
guint bus_watch_id;
GstPad *adder_sinkpad1;
GstPad *adder_sinkpad2;
GstPad *vol1_pad, *vol2_pad;
gchar *pad1name;
gchar *pad2name;
GstElement *vol1, *vol2;
int n;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
if (argc != 3) {
g_printerr ("Usage: %s file1 file2\n", argv[0]);
return -1;
}
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("audio-player");
source1 = gst_element_factory_make ("uridecodebin", "uri-source1");
source2 = gst_element_factory_make ("uridecodebin", "uri-source2");
mixer = gst_element_factory_make ("adder", "audio-mix");
conv1 = gst_element_factory_make ("audioconvert", "conv1");
conv2 = gst_element_factory_make ("audioconvert", "conv2");
vol1 = gst_element_factory_make("volume", "vol1");
vol2 = gst_element_factory_make("volume", "vol2");
sink = gst_element_factory_make ("alsasink", "audio-output");
if (!pipeline || !source1 || !source2 || !mixer || !conv1 || !conv2 || !vol1 || !vol2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source1), "uri", argv[1], NULL);
g_object_set (G_OBJECT (source2), "uri", argv[2], NULL);
g_object_set (G_OBJECT (mixer), "name", "mix", NULL);
g_object_set(G_OBJECT (vol1), "volume", 0.9, NULL);
g_object_set(G_OBJECT (vol2), "volume", 0.3, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source1, conv1, vol1, mixer, sink, source2, conv2, vol2, NULL);
/* we link the elements together */
#if 0 //Doesnt work directly linking elements
g_print("link elements directly \n");
if((n=gst_element_link (source1, conv1)) == 0 ) {
g_print ("link1 error: %d\n",n);
g_print ("cannot link source1 with conv1\n");
}
if((n=gst_element_link (source2, conv2)) == 0) {
g_print ("link2 error: %d\n",n);
g_print ("cannot link source2 with conv2\n");
}
#else
g_print("use dynamic pads to link elements \n");
g_signal_connect (source1, "pad-added", G_CALLBACK (on_pad_added), conv1);
g_signal_connect (source2, "pad-added", G_CALLBACK (on_pad_added), conv2);
#endif
if((n=gst_element_link (conv1, vol1)) == 0) {
g_print ("link2 error: %d\n",n);
g_print ("cannot link conv with vol1\n");
}
if((n=gst_element_link (conv2, vol2)) == 0) {
g_print ("link2 error: %d\n",n);
g_print ("cannot link conv2 with conv2\n");
}
if((n = gst_element_link (mixer, sink)) == 0) {
g_print ("cannot link sink with mixer\n");
}
vol1_pad= gst_element_get_static_pad (vol1, "src");
vol2_pad= gst_element_get_static_pad (vol2, "src");
adder_sinkpad1 = gst_element_get_request_pad (mixer, "sink_%u");
pad1name = gst_pad_get_name (adder_sinkpad1);
g_print ("pad1name: %s\n",pad1name );
adder_sinkpad2 = gst_element_get_request_pad (mixer, "sink_%u");
pad2name = gst_pad_get_name (adder_sinkpad2);
g_print ("pad2name: %s\n",pad2name );
int i,j;
if((i=gst_pad_link (vol1_pad, adder_sinkpad1)) != 0) {
g_print ("pad error: %d\n",i);
g_print ("cannot link conv1 with adder1\n");
}
if((j=gst_pad_link (vol2_pad, adder_sinkpad2))!= 0) {
g_print ("pad2 error: %d\n",j);
g_print ("cannot link conv2 with adder2\n");
}
/* Set the pipeline to "playing" state*/
g_print ("Now playing\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

Playing .AVI file with GStreamer SDK in Windows

I want to play an .AVI file using GStreamer in Windows 7. GStreamer SDK was installed as given in this link. Then a GStreamer SDK project was created and the following code was added to a C file as given is this link with the suggested corrections. Project properties -> Configurations properties -> Debugging -> Working directory was changed to "$(GSTREAMER_SDK_ROOT_X86)\bin" and the same was added to windows PATH variable as suggested in the installation link given above. When run the code, it just exits without playing the video, last few lines of the output is given below. Please note that I have installed 32 bit Gstreamer SDK on a 64 bit Windows 7.
Code:
#include<stdio.h>
#include<gst/gst.h>
#include<glib.h>
//Function to process message on bus of pipeline
gboolean process_message(GstBus *bus, GstMessage *msg,gpointer data);
//Function to add pad dynamically for ogg demux
void dynamic_addpad(GstElement *element, GstPad *pad, gpointer data);
void dynamic_decodepad (GstElement* object, GstPad* arg0, gboolean arg1,gpointer user_data);
GstElement *source, *demuxer, *audio_decoder, *video_decoder, *audio_convertor,*video_convertor, *audio_sink,*video_sink,*audioqueue,*videoqueue;//*audio_demuxer, *video_demuxer,
int main(int argc,char* argv[]){
GstPipeline *pipeline;
GstBin *Bin;
GstBus *bus;
GMainLoop *Mainloop;
gst_init (&argc,&argv);
Mainloop = g_main_loop_new(NULL,FALSE);//NULL to use the current context and False to tell its not in running state
GstElement *pipeline = gst_pipeline_new("PIPELINE");
Bin = GST_BIN(pipeline);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
source = gst_element_factory_make("filesrc","file-source");
g_object_set(G_OBJECT(source), "location", "file:///C:/Video.avi", NULL);
demuxer = gst_element_factory_make("avidemux","avi-demuxer");
audioqueue = gst_element_factory_make("queue","Queue for audio");
videoqueue = gst_element_factory_make("queue","Queue for video");
audio_decoder = gst_element_factory_make("decodebin","a_decodebin");
video_decoder = gst_element_factory_make("decodebin","decoderbin");//"Vorbis audio decoder","vorbis");
audio_convertor = gst_element_factory_make("audioconvert","audio convertor");//"Audio converter","audioconvert");
video_convertor = gst_element_factory_make("videoscale","video convertor");//"Audio converter","audioconvert");
audio_sink = gst_element_factory_make("autoaudiosink","Auto audio sink");
video_sink = gst_element_factory_make("xvimagesink","XV video sink ");
if(!source || !demuxer || !audioqueue || !videoqueue || !video_decoder ||!audio_convertor || ! video_convertor || !audio_sink || !video_sink ){
g_print("Could not not create element\n");
return 0;
}
gst_bin_add(Bin,source);
gst_bin_add_many(
Bin,
demuxer,
audioqueue,videoqueue,
audio_decoder,audio_convertor,
video_decoder,video_convertor,
audio_sink,video_sink,
NULL);
gst_element_link(source,demuxer);
gst_element_link_many(audioqueue,audio_decoder,NULL);
gst_element_link_many(audio_convertor,audio_sink,NULL);
gst_element_link_many(videoqueue,video_decoder,NULL);
gst_element_link_many(video_convertor,video_sink,NULL);
g_signal_connect(demuxer,"pad-added",G_CALLBACK(dynamic_addpad),NULL);//demuxer and decoder are passed as instance and data as pads of both the elements are linked in dynamic_addpad
g_signal_connect(audio_decoder,"new-decoded-pad",G_CALLBACK(dynamic_decodepad),NULL);
g_signal_connect(video_decoder,"new-decoded-pad",G_CALLBACK(dynamic_decodepad),NULL);//demuxer and decoder are passed as instance and data as pads of both the elements are linked in dynamic_addpad
gst_bus_add_watch(bus,process_message,Mainloop); //Mainloop is passed as user data as in the process_message actions are taken on the loop
g_object_unref(bus);
g_print("In playing state\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);//Pipeline is also a bin and bin is also an element at abstract level and hence gst_element_set_state call is used to set state of pipeline.
g_main_loop_run(Mainloop);
g_print("In playing state2\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_object_unref(G_OBJECT(pipeline));
}
//Function to process message on bus of pipeline
gboolean process_message(GstBus *bus, GstMessage *msg,gpointer data){
GError *error;
gchar *debug;
GMainLoop *loop = (GMainLoop *)data;
g_print(" In process message msg->type : %d\n",GST_MESSAGE_TYPE(msg));
switch(GST_MESSAGE_TYPE(msg)){
case GST_MESSAGE_UNKNOWN :
g_print("GST_MESSAGE_UNKNOWN \n");
break;
case GST_MESSAGE_EOS :
g_print("GST_MESSAGE_EOS \n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR :
g_print("GST_MESSAGE_ERROR \n");
gst_message_parse_error (msg, &error, &debug);
g_free(debug);
//if(!error)
{
g_print("GST_MESSAGE_ERROR message : %s \n",error->message);
}
g_main_loop_quit(loop);
break;
case GST_MESSAGE_WARNING :
g_print("GST_MESSAGE_WARNING \n");
break;
case GST_MESSAGE_INFO :
g_print("GST_MESSAGE_INFO \n");
break;
case GST_MESSAGE_TAG :
g_print("GST_MESSAGE_TAG \n");
break;
case GST_MESSAGE_BUFFERING:
g_print("GST_MESSAGE_BUFFERING \n");
break;
case GST_MESSAGE_STATE_CHANGED:
g_print("GST_MESSAGE_STATE_CHANGED \n");
break;
default :
g_print("default \n");
break;
}
return TRUE; //returns true always as it has to be always registered returning false will deregister the function
}
void dynamic_addpad(GstElement *element, GstPad *pad, gpointer data) {
char* pad_name = gst_pad_get_name(pad);
g_print(" In dynamic ADDING PAD %s\n", pad_name);
if (g_str_has_prefix(pad_name,"audio")) {
GstPad *audiodemuxsink = gst_element_get_static_pad(audioqueue,"sink");
gst_pad_link(pad,audiodemuxsink );
}
else if (g_str_has_prefix(pad_name,"video")) {
GstPad *videodemuxsink = gst_element_get_static_pad(videoqueue,"sink");
gst_pad_link(pad,videodemuxsink );
}
g_free (pad_name);
}
void dynamic_decodepad (GstElement* object, GstPad* pad, gboolean arg1,gpointer user_data) {
GstPad* videoconvertsink = gst_element_get_static_pad(video_convertor,"sink");
if (gst_pad_can_link(pad,videoconvertsink)) {
gst_pad_link(pad,videoconvertsink);
}
GstPad* audioconvertsink = gst_element_get_static_pad(audio_convertor,"sink");
if (gst_pad_can_link(pad,audioconvertsink)) {
gst_pad_link(pad,audioconvertsink);
}
}
Output:
The thread 'Win32 Thread' (0x19c4) has exited with code 0 (0x0).
The thread 'Win32 Thread' (0x2370) has exited with code 0 (0x0).
The thread 'Win32 Thread' (0x2040) has exited with code 0 (0x0).
The program '[5368] GstProject2.exe: Native' has exited with code 0 (0x0).
Finally I was able to play the AVI file using the following code which is based on this example in Gstreamer SDK website.
In command prompt:
Option 1:
gst-launch-0.10 filesrc location=C:\\Video.avi ! decodebin2 name=dec ! queue ! ffmpegcolorspace ! autovideosink dec. ! queue ! audioconvert ! audioresample ! autoaudiosink
Option 2:
gst-launch-0.10 filesrc location=C:\\Video.avi ! decodebin2 name=dec ! ffmpegcolorspace ! autovideosink dec. ! audioconvert ! audioresample ! autoaudiosink
Option 3:
gst-launch-0.10 uridecodebin uri=file:///C:/Video.avi name=dec ! ffmpegcolorspace ! autovideosink dec. ! audioconvert ! autoaudiosink
In Visual Studio:
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *audio_sink;
GstElement *colorspace;
GstElement *video_sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
data.colorspace = gst_element_factory_make ("ffmpegcolorspace", "colorspace");
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.audio_sink || !data.colorspace || !data.video_sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.audio_sink, data.colorspace, data.video_sink, NULL);
if (!(gst_element_link (data.convert, data.audio_sink) && gst_element_link (data.colorspace, data.video_sink))) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the URI to play */
g_object_set (data.source, "uri", "file:///C:/Video.avi", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad_audio = gst_element_get_static_pad (data->convert, "sink");
GstPad *sink_pad_video = gst_element_get_static_pad (data->colorspace, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
///* If our converter is already linked, we have nothing to do here */
//if (gst_pad_is_linked (sink_pad)) {
// g_print (" We are already linked. Ignoring.\n");
// goto exit;
//}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is raw video. Connecting.\n", new_pad_type);
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad_video);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad_audio);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad_audio);
gst_object_unref (sink_pad_video);
}

How to convert a Gstreamer program to stream video via udp into a Qt program?

I have a Gstreamer program that streams video via UDP source. I need to implement the program in Qt. Please let me know how can I do it.
The program I'm using is shown below.
#include <gst/gst.h>
#include <stdio.h>
#include <stdlib.h>
GstElement *pipeline,
*source,
*decoder,
*video_sink,
*text,
*audio_sink;
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad = NULL;
const gchar *mime;
GstCaps *caps;
// get capabilities
caps = gst_pad_get_caps (pad);
// get mime type
mime = gst_structure_get_name (gst_caps_get_structure (caps, 0));
g_print ("Dynamic pad %s:%s created with mime-type %s\n", GST_OBJECT_NAME (element), GST_OBJECT_NAME (pad), mime);
if (g_strrstr (mime, "video"))
{
g_print ("Linking video...\n");
sinkpad = gst_element_get_static_pad (text, "video_sink");
}
if (g_strrstr (mime, "audio"))
{
g_print ("Linking audio...\n");
sinkpad = gst_element_get_static_pad (audio_sink, "sink");
}
if(sinkpad!=NULL)
{
// link
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
gst_caps_unref (caps);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
// initialize GStreamer
gst_init (&argc, &argv);
printf("step 0\n");
loop = g_main_loop_new (NULL, FALSE);
/* check input arguments
if (argc != 2)
{
g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}*/
// argv[1]="http://192.168.0.247:1234/Documents/6.mpg";
//"udp://192.168.0.247:1234";
//"/home/quarkgluon/Documents/rajesh/gstreamer/Serenity.mp4";
printf("step 1\n");
// create elements
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("udpsrc", "source");
decoder = gst_element_factory_make ("decodebin2", "decoder");
text = gst_element_factory_make ("textoverlay", "text");
video_sink = gst_element_factory_make ("xvimagesink", "vsink");
audio_sink = gst_element_factory_make ("alsasink", "asink");
if (!pipeline || !source || !decoder || !video_sink || !text || !audio_sink)
{
g_print ("One element could not be created\n");
return -1;
}
// set filename property on the file source. Also add a message
// handler.
g_object_set (G_OBJECT (source),"port",1234, NULL);
// g_object_set (G_OBJECT (text), "text", "hello world awertyuiop!!!", NULL);
// g_object_set (G_OBJECT (text), "italic", 1, NULL);
//g_object_set (G_OBJECT (text), "bold", 1, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// put all elements in a bin
gst_bin_add_many (GST_BIN (pipeline), source, decoder, video_sink, text, audio_sink, NULL);
// link together - note that we cannot link the decoder and
// sink yet, because the decoder uses dynamic pads. For that,
// we set a pad-added signal handler.
gst_element_link (source, decoder);
gst_element_link (text, video_sink);
g_signal_connect (decoder, "pad-added", G_CALLBACK (new_pad), NULL);
printf("step 2\n");
// Now set to playing and iterate.
g_print ("Setting to PLAYING\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run (loop);
// clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
http://cgit.freedesktop.org/gstreamer/qt-gstreamer/