videoconvert element breaks udpsrc timeout messages - c++

This code prints "Timeout received from udpsrc" each second. The videoconvert element is commented out of the pipeline. If I uncomment it then the messages stop printing.
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* fake_sink;
GMainLoop* main_loop;
};
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
static void element_callback(GstBus* bus, GstMessage* message, gpointer data) {
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
}
}
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.fake_sink = gst_element_factory_make("fakesink", "fake_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.fake_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
I have tried setting the debug level higher, but I'm not seeing anything to account for it. Is there something special about the videoconvert element?

Maybe you were sending data to that udpsrc after all?
I have confirmed that un-commenting the two lines that I do get the log messages as expected.

Related

Basler Pylon to GStreamer to multicast UDP

I am attempting to use GStreamer to send captured images from a Basler acA720-290gm GigE camera to multiple clients over UDP multicast. I have been able to acquire the images using the Pylon 7.1.0.25066 API and send them through a GStreamer pipeline consisting of appsrc, videoconvert and ximagesink. However, if I try to add elements to convert the stream to RTP and back, then I only get a single static image.
This code successfully displays the stream on Debian 11. I'm not sure if I can use something other than GRAY8 for the appsrc caps format or not.
Test.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
//g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
//g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}
Makefile
# Makefile for Basler pylon sample program
.PHONY: all clean
# The program to build
NAME := Test
# Installation directories for pylon
PYLON_ROOT ?= /opt/pylon
# Build tools and flags
LD := $(CXX)
CPPFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --cflags) $(shell pkg-config --cflags gstreamer-1.0) $(shell pkg-config --cflags opencv4) -DUSE_GIGE
CXXFLAGS := #e.g., CXXFLAGS=-g -O0 for debugging
LDFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs-rpath)
LDLIBS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs) $(shell pkg-config --libs gstreamer-1.0) -lopencv_core -lopencv_imgproc -lpthread
# Rules for building
all: $(NAME)
$(NAME): $(NAME).o
$(LD) $(LDFLAGS) -o $# $^ $(LDLIBS)
$(NAME).o: $(NAME).cpp
$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c -o $# $<
clean:
$(RM) $(NAME).o $(NAME)
This code displays just a single image:
Test.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* x264enc1;
GstElement* rtph264pay1;
GstElement* rtph264depay1;
GstElement* avdec_h2641;
GstElement* videoconvert2;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.x264enc1 = gst_element_factory_make("x264enc", "x264enc1");
data.rtph264pay1 = gst_element_factory_make("rtph264pay", "rtph264pay1");
data.rtph264depay1 = gst_element_factory_make("rtph264depay", "rtph264depay1");
data.avdec_h2641 = gst_element_factory_make("avdec_h264", "avdec_h2641");
data.videoconvert2 = gst_element_factory_make("videoconvert", "videoconvert2");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.x264enc1 ||
!data.rtph264pay1 ||
!data.rtph264depay1 ||
!data.avdec_h2641 ||
!data.videoconvert2 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}
Maybe instead of copying image from PtrGrabResult To CPylonImage, try with alternate method of receiving buffers: through image.AttachGrabResultBuffer() method. It is well used in GUI_ImageWindow sample project of Pylon SDK.

Dynamically link audio source elemnt into gstreamer audiomixer element

I am trying to mix internal audio and microphone audio using gstreamer audiomixer element, and then mux the single stream with video data, so far I can do it only when the soundcard is already active.
I am using Waspisrc , waspisrc loopback=true property.
What I mean is my code works when there is already some song is being played in the computer and then i start my code, it works.
What i want to acheive is , internal sound src can link with audiomixer element dynamically , and it just gives me error, the program crashes , what i did so far is put the soundcard source elemnt in another bin than the main pipeline, and add a data probe in the wasapisrc element, when there is sound from audio i try to link the source with queue and then audiomixer in the main pipeline.
Any help how can i acheive dynamically link and unlink src element into audiomixer?
my code is below:
#include <gst/gst.h>
//#include "pch.h"
#include <windows.h>
#include <stdio.h>
GMainLoop* mainLoop;
GstElement *mainPipeline;
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix);
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem);
typedef struct _elemStruct
{
GstElement *micSource, *micSourceQueue, *soundCardSrc, *soundCardSrcQueue, *micSrcRate, *micRateQueue, *soundCardRate, *soundCardRateQueue, *audioMixer, *audioMixerQueue;
GstElement* audioConverter, *audioConverterQueue, *audioEncoder, *audioEncoderQueue, *avMuxer, *gdiGrabber, *videoConverter, *x264encoder;
GstElement* muxerQueue, *fileSinker, *gdiGrabberQueue, *videoConverterQueue, *x264encoderQueue;
GstCaps *caps;
GstElement* message;
GstStateChangeReturn stateRet;
GstElement *micBin, *soundCardBin, *screenBin, *audioBin;
GstPad *micMixPad, *soundCardMixPad, *audioMuxPad, *videoMuxPad;
GstBus* mainBus;
GstStateChangeReturn ret;
GstMessage* msg;
guint bus_watch_id;
GstElement* soundCardTempSink;
}elemStruct;
BOOL WINAPI CtrlHandler(DWORD fdwCtrlType)
{
switch (fdwCtrlType)
{
// Handle the CTRL-C signal.
case CTRL_C_EVENT:
printf("Ctrl-C event\n\n");
Beep(750, 300);
return TRUE;
// CTRL-CLOSE: confirm that the user wants to exit.
case CTRL_CLOSE_EVENT:
Beep(600, 200);
printf("Ctrl-Close event\n\n");
return TRUE;
// Pass other signals to the next handler.
case CTRL_BREAK_EVENT:
Beep(900, 200);
printf("Ctrl-Break event\n\n");
return FALSE;
case CTRL_LOGOFF_EVENT:
Beep(1000, 200);
printf("Ctrl-Logoff event\n\n");
return FALSE;
case CTRL_SHUTDOWN_EVENT:
Beep(750, 500);
printf("Ctrl-Shutdown event\n\n");
return FALSE;
default:
return FALSE;
}
}
void addsoundsrc_toMainline(GstPadProbeInfo* info, GstElement* bin)
{
// we got data , add pipeline to audiomixer
// add bin to audiomixer
// get bin src pad
// call retrieve ghostsrc function
//retrieve_ghost_pad()
GstElement* queue = gst_bin_get_by_name(GST_BIN(bin), "sound_card_source_queue");
GstPad* mixpad = retrieve_ghost_pad(bin, queue);
//link_to_mixer(mixpad, )
}
GstPadProbeReturn soundCardProbe(GstPad* pad, GstPadProbeInfo* info, gpointer data)
{
//GstBuffer* buffer = gst_pad_probe_info_get_buffer(info);
GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
elemStruct* mainElem = (elemStruct*)data;
g_print("received data in the soundcard probe ");
//GstElement* bin = mainElem->soundCardBin;
//bool add = gst_bin_add(GST_BIN(mainElem->audioBin), mainElem->soundCardBin);
//gst_element_sync_state_with_parent(mainElem->soundCardBin);
//GstElement* queue = gst_bin_get_by_name((GST_BIN(bin)), "sound_card_source_queue");
//GstPad* mixpad = retrieve_ghost_pad(bin, mainElem->soundCardSrcQueue);
//GstPad* mixPad = gst_element_get_static_pad(mainElem->soundCardSrcQueue, "sink");
//link_to_mixer(mixPad, mainElem->audioMixer);
//addsoundsrc_toMainline(info, bin);
return GST_PAD_PROBE_PASS;
}
void set_queue_property(GstElement* _queue)
{
g_object_set(G_OBJECT(_queue), "max-size-buffers", 1000, "max-size-time", 1000000000000, NULL);
}
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix)
{
GstPad* mixerPad;
gchar* binPadName, *mixerPadName;
mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//mixerPad = gst_element_get_request_pad(mix, "sink_%u");
binPadName = gst_pad_get_name(binPad);
mixerPadName = gst_pad_get_name(mixerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, mixerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, mixerPadName);
g_free(binPadName);
g_free(mixerPadName);
//gst_object_unref(binPad);
gst_object_unref(mixerPad);
//gst_element_release_request_pad(mix, mixerPad);
return retVal;
}
GstPadLinkReturn audio_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "audio_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn video_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "video_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn link_to_mpeg_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
muxerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//muxerPad = gst_element_get_request_pad(mix, "sink_%d");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
gst_object_unref(muxerPad);
//gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem)
{
GstPad* elemPad = gst_element_get_static_pad(elem, "src");
GstPad* ghost = gst_ghost_pad_new("ghostsrc", elemPad);
gst_element_add_pad(bin, ghost);
gst_object_unref(elemPad);
return ghost;
}
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
{
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
g_print("Element %s changed state from %s to %s.\n",
GST_OBJECT_NAME(msg->src),
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
//if (new_state == GST_STATE_PAUSED)
//{
// gst_element_set_state(mainPipeline, GST_STATE_NULL);
//}
break;
}
break;
default:
break;
}
return TRUE;
}
int main(int argc, char** argv)
{
//gst - launch - 1.0.exe wasapisrc loopback = true\
// ! audiorate ! queue ! mix. wasapisrc low-latency=true \
// ! audiorate ! queue ! mix. audiomixer name=mix ! queue ! audioconvert \
// ! queue ! avenc_aac ! queue ! muxer. gdiscreencapsrc ! videoconvert \
// ! x264enc ! mpegtsmux name = muxer !queue ! filesink location=muxin.mp4 sync=false
elemStruct* mainStruct = new elemStruct();;
if (!gst_init_check(&argc, &argv, NULL))
{
g_printerr("couldn't initialize gstreamer\n");
return -1;
}
mainLoop = g_main_loop_new(NULL, FALSE);
if ((mainPipeline = gst_pipeline_new("main_pipeline")) == NULL)
{
}
mainStruct->micSource = gst_element_factory_make("wasapisrc", "mic_source");
mainStruct->soundCardSrc = gst_element_factory_make("wasapisrc", "sound_card_source");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "dx9_screen_capture_source");
mainStruct->micSourceQueue = gst_element_factory_make("queue", "mic_source_queue_elem");
mainStruct->soundCardSrcQueue = gst_element_factory_make("queue", "sound_card_source_queue");
mainStruct->micSrcRate = gst_element_factory_make("audiorate", "mic_audio_rate_elem");
mainStruct->soundCardRate = gst_element_factory_make("audiorate", "soundCard_audiorate_elem");
mainStruct->micRateQueue = gst_element_factory_make("queue", "mic_audiorate_queue");
mainStruct->soundCardRateQueue = gst_element_factory_make("queue", "soundCard_audiorate_queue");
mainStruct->audioMixer = gst_element_factory_make("audiomixer", "audio_mixer_elem");
mainStruct->audioMixerQueue = gst_element_factory_make("queue", "audio_mixer_queue_elem");
mainStruct->soundCardTempSink = gst_element_factory_make("autoaudiosink", "soundcard_temp_sink_elem");
mainStruct->audioEncoder = gst_element_factory_make("avenc_aac", "audio_encoder_elem");
mainStruct->audioEncoderQueue = gst_element_factory_make("queue", "audio_encoder_queue_elem");
mainStruct->audioConverter = gst_element_factory_make("audioconvert", "audio_convert_elem");
mainStruct->audioConverterQueue = gst_element_factory_make("queue", "audio_convert_queue_elem");
mainStruct->gdiGrabberQueue = gst_element_factory_make("queue", "gdi_grabber_queue_elem");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "gdi_grabber_elem");
mainStruct->videoConverterQueue = gst_element_factory_make("queue", "videoconvert_queue_elem");
mainStruct->x264encoderQueue = gst_element_factory_make("queue", "x264encoder_queue_elem");
mainStruct->videoConverter = gst_element_factory_make("videoconvert", "videoconvert_elem");
mainStruct->x264encoder = gst_element_factory_make("x264enc", "x264enc_elem");
mainStruct->avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem");
//if ((avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem")) == NULL)
mainStruct->fileSinker = gst_element_factory_make("filesink", "filesink_elem");
// set up all the sources
g_object_set(G_OBJECT(mainStruct->micSource), "do-timestamp", true, NULL);
g_object_set(G_OBJECT(mainStruct->soundCardSrc), "do-timestamp", true, "loopback", true, NULL);
g_object_set(G_OBJECT(mainStruct->gdiGrabber), "do-timestamp", true, "cursor", true, NULL);
g_object_set(G_OBJECT(mainStruct->x264encoder), "pass", 17, NULL);
g_object_set(G_OBJECT(mainStruct->fileSinker), "location", "sani_1486.mp4", "sync", false, NULL);
// set up all the queues
set_queue_property(mainStruct->micSourceQueue);
set_queue_property(mainStruct->soundCardSrcQueue);
set_queue_property(mainStruct->audioMixerQueue);
set_queue_property(mainStruct->audioEncoderQueue);
set_queue_property(mainStruct->gdiGrabberQueue);
set_queue_property(mainStruct->videoConverterQueue);
set_queue_property(mainStruct->x264encoderQueue);
// add the src elements to each src bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->micSource, mainStruct->micSourceQueue, NULL);
mainStruct->soundCardBin = gst_bin_new("sound_card_bin");
gst_bin_add_many(GST_BIN(mainStruct->soundCardBin), mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue,NULL);
GstPad* soundSourceprober = gst_element_get_static_pad(mainStruct->soundCardSrc, "src");
gst_pad_add_probe(soundSourceprober, GST_PAD_PROBE_TYPE_BUFFER, soundCardProbe, &mainStruct, NULL);
gst_element_set_state(mainStruct->soundCardBin, GST_STATE_PLAYING);
// link elements in each source bin
gst_element_link(mainStruct->micSource, mainStruct->micSourceQueue);
//gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
// put this two bin in audiobin, we will connect audiobin to screenBin later
gst_bin_add_many(GST_BIN(mainPipeline),mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
//GstStateChangeReturn ret = gst_element_set_state(mainStruct->soundCardSrc, GST_STATE_PLAYING);
//GstStateChangeReturn retu = gst_element_get_state(mainStruct->soundCardSrc);
mainStruct->micMixPad = gst_element_get_static_pad(mainStruct->micSourceQueue, "src");
link_to_mixer(mainStruct->micMixPad, mainStruct->audioMixer);
//mainStruct->soundCardMixPad = gst_element_get_static_pad(mainStruct->soundCardSrcQueue, "src");
//link_to_mixer(mainStruct->soundCardMixPad, mainStruct->audioMixer);
bool one_ = gst_element_link_many(mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverterQueue, mainStruct->videoConverter, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
// so add this element , with main bin
gst_element_link_many(mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverter, mainStruct->videoConverterQueue, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
//link_to_mixer(videoMuxPad, avMuxer);
mainStruct->videoMuxPad = gst_element_get_static_pad(mainStruct->x264encoderQueue, "src");
mainStruct->audioMuxPad = gst_element_get_static_pad(mainStruct->audioEncoderQueue, "src");
// add all the bin and muxer and filesink to main pipeline bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->avMuxer, mainStruct->fileSinker, NULL);
link_to_mpeg_muxer(mainStruct->videoMuxPad, mainStruct->avMuxer);
link_to_mpeg_muxer(mainStruct->audioMuxPad, mainStruct->avMuxer);
gst_element_link(mainStruct->avMuxer, mainStruct->fileSinker);
//gst_element_link(videoMuxPad, avMuxer);
/* Start playing the pipeline */
mainStruct->ret = gst_element_set_state(mainPipeline, GST_STATE_PLAYING);
// TODO , deal with ret
mainStruct->mainBus = gst_element_get_bus(mainPipeline);
mainStruct->bus_watch_id = gst_bus_add_watch(mainStruct->mainBus, bus_call, mainLoop);
gst_object_unref(mainStruct->mainBus);
// msg = gst_bus_timed_pop_filtered(mainBus, GST_CLOCK_TIME_NONE, GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
g_main_loop_run(mainLoop);
gst_element_set_state(mainPipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(mainPipeline));
g_source_remove(mainStruct->bus_watch_id);
g_main_loop_unref(mainLoop);
//g_main_loop_quit(mainLoop);
return 0;
}

g_signal_connect must be called before QObject::connect?

I am creating an application that combines GStreamer and Qt. It appears that if I use QObject::connect to connect a signal to a slot before I use g_signal_connect to register a callback function to events on the GStreamer bus, then the g_signal_connect callback function is never called. If I reverse the order it is. Is this expected?
Example:
main.cpp
#include <QApplication>
#include <QPushButton>
#include "acquisitiontype.h"
int main(int argc, char *argv[]) {
QApplication app(argc, argv);
AcquisitionType acquisition("224.1.1.1", 5004);
QPushButton* button = new QPushButton("click me");
QObject::connect(button, SIGNAL(clicked()), &app, SLOT(quit()));
button->show();
return app.exec();
}
acquisitiontype.cpp
#include "acquisitiontype.h"
void AcquisitionType::udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("callback called\n");
}
}
}
void AcquisitionType::bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
exit(-1);
}
AcquisitionType::AcquisitionType(char const* address, gint port) {
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"address", address,
"port", port,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.sink = gst_element_factory_make("fakesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) bus_error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) udp_source_timeout_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
}
AcquisitionType::~AcquisitionType() {
GstBus* bus;
gst_element_set_state(data.pipeline, GST_STATE_NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_remove_signal_watch(bus);
gst_object_unref(bus);
gst_object_unref(data.pipeline);
}
acquisitiontype.h
#include <gst/gst.h>
#include <QObject>
class AcquisitionType;
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* sink;
};
class AcquisitionType : public QObject
{
Q_OBJECT
public:
AcquisitionType(char const* address, gint port);
~AcquisitionType();
private:
static void bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
static void udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
gstreamer_data data;
};
If this is run as is, then the callback is called. If AcquisitionType acquisition("224.1.1.1", 5004); is moved to after button->show() then it is not.
It seems that I needed to change "timeout", 1000000000, to "timeout", G_GUINT64_CONSTANT(1000000000),.

Display no signal image if video sender is closed

If I close the sender in this example, the video displayed by the receiver freezes. Is there a way to display a static no signal image instead, for example an all blue image, and have the video return when the sender restarts?
Sender
gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
Receiver
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
EDIT
This code seems to come close, but for some reason if I add in the videotestsrc by uncommenting the commented out lines, the udpsrc no longer calls the timeout callback:
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* no_signal_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* input_selector;
GstElement* video_converter;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.no_signal_source = gst_element_factory_make("videotestsrc", "no_signal_source");
g_object_set(G_OBJECT(data.no_signal_source),
"pattern", 6,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.no_signal_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.input_selector ||
!data.video_converter ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
//data.no_signal_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.input_selector,
data.video_converter,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_decoder, "src");
GstPad* sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, sink_1);
/*
GstPad* src_2 = gst_element_get_static_pad(data.no_signal_source, "src");
GstPad* sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, sink_2);
*/
g_object_set(G_OBJECT(data.input_selector),
"active-pad", sink_1,
NULL);
if (gst_element_link_many(
data.input_selector,
data.video_converter,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT
This code seems fine until I uncomment the selection of the active pad in the callbacks. Do I need to do something before I change the active pad, like stop the pipeline?
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* video_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* input_selector;
GstPad* sink_1;
GstPad* sink_2;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
/*
g_object_set(G_OBJECT(user_data->input_selector),
"active-pad", user_data->sink_2,
NULL);
*/
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("no data\n");
/*
g_object_set(G_OBJECT(data->input_selector),
"active-pad", data->sink_1,
NULL);
*/
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
gst_object_unref(pad);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.video_source = gst_element_factory_make("videotestsrc", "video_source");
g_object_set(G_OBJECT(data.video_source),
"pattern", 6,
"is-live", true,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.video_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.input_selector ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.video_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
data.input_selector,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_source, "src");
data.sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, data.sink_1);
gst_object_unref(src_1);
GstPad* src_2 = gst_element_get_static_pad(data.video_converter, "src");
data.sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, data.sink_2);
gst_object_unref(src_2);
if (gst_element_link_many(
data.input_selector,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
gst_object_unref(pad);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT:
I seems fine all of a sudden. I don't understand. Is this suitable code? Can it be improved?
EDIT:
Setting a width and height on the videotestsrc in the sender seems to make it work. If I remove those, it breaks. Why?

Gstreamer recording video with audio

I'm trying to record on a file a video from my webcam along with audio using Gstreamer on my Ubuntu 16 machine through glib library.
I'm able to watch the video streaming from the webcam through these code lines
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("v4l2src", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
convert =gst_element_factory_make("videoconvert","convert");
//convert = gst_element_factory_make ("audioconvert", "convert");
//sink = gst_element_factory_make ("autoaudiosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !convert) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/*set der source*/
g_object_set (source, "device", "/dev/video0", NULL);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, NULL);
if (gst_element_link (convert, sink) != TRUE) {
g_printerr ("Elements could not be linked confert sink.\n");
gst_object_unref (pipeline);
return -1;
}
if (gst_element_link (source, convert) != TRUE) {
g_printerr ("Elements could not be linked source -convert.\n");
gst_object_unref (pipeline);
return -1;
}
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,(GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
and to capture audio from microphone and listen it through the speakers using these code lines
#include <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data){
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
/* Main function for audio pipeline initialization and looping streaming process */
gint
main (gint argc, gchar **argv) {
GMainLoop *loop;
GstElement *pipeline, *audio_source, *sink;
GstBus *bus;
guint bus_watch_id;
GstCaps *caps;
gboolean ret;
/* Initialization of gstreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Elements creation */
pipeline = gst_pipeline_new ("audio_stream");
audio_source = gst_element_factory_make ("alsasrc", "audio_source");
sink = gst_element_factory_make ("alsasink", "audio_sink");
// video_source = gst_element_factory_make ("v4l2src", "source");
// video_sink = gst_element_factory_make ("autovideosink", "sink");
// video_convert= gst_element_factory_make("videoconvert","convert");
if (!pipeline) {
g_printerr ("Audio: Pipeline couldn't be created\n");
return -1;
}
if (!audio_source) {
g_printerr ("Audio: alsasrc couldn't be created\n");
return -1;
}
if (!sink) {
g_printerr ("Audio: Output file couldn't be created\n");
return -1;
}
g_object_set (G_OBJECT (audio_source), "device", "hw:1,0", NULL);
g_object_set (G_OBJECT (sink), "device", "hw:1,0", NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN(pipeline), audio_source, sink, NULL);
caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16LE", "layout", G_TYPE_STRING, "interleaved", "rate", G_TYPE_INT, (int)44100, "channels", G_TYPE_INT, (int)2, NULL);
ret = gst_element_link_filtered (audio_source, sink, caps);
if (!ret) {
g_print ("audio_source and sink couldn't be linked\n");
gst_caps_unref (caps);
return FALSE;
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("streaming...\n");
g_main_loop_run (loop);
g_print ("Returned, stopping stream\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
What i really don't understand is how to get video from the webcam and audio from my alsa hw at the same time and save them into a file (such as .mp4 for ex). Can anyone help me? I tried to find something useful, but there's nothing on the board. In addition, it would be really appreciate also how to save just the video stream or just the audio stream in separated files.
UPDATE
I looked again to the tutorials and to the git link gave by #nayana, so i tried myself to code something. I have two results:
#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video;
static GstElement *src_audio, *encoder_audio, *queue_audio;
static GstBus *bus;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:{
g_print ("Got EOS\n");
g_main_loop_quit (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
exit(0);
break;
}
default:
break;
}
return TRUE;
}
void sigintHandler(int unused) {
g_print("You ctrl-c-ed! Sending EoS");
gst_element_send_event(pipeline, gst_event_new_eos());
}
int main(int argc, char *argv[])
{
signal(SIGINT, sigintHandler);
gst_init (&argc, &argv);
pipeline = gst_pipeline_new(NULL);
src_video = gst_element_factory_make("v4l2src", NULL);
encoder_video = gst_element_factory_make("x264enc", NULL);
queue_video = gst_element_factory_make("queue", NULL);
src_audio = gst_element_factory_make ("alsasrc", NULL);
encoder_audio = gst_element_factory_make("lamemp3enc", NULL);
queue_audio = gst_element_factory_make("queue", NULL);
muxer = gst_element_factory_make("mp4mux", NULL);
sink = gst_element_factory_make("filesink", NULL);
if (!pipeline || !src_video || !encoder_video || !src_audio || !encoder_audio
|| !queue_video || !queue_audio || !muxer || !sink) {
g_error("Failed to create elements");
return -1;
}
g_object_set(src_audio, "device", "hw:1,0", NULL);
g_object_set(sink, "location", "video_audio_test.mp4", NULL);
gst_bin_add_many(GST_BIN(pipeline), src_video, encoder_video, queue_video,
src_audio, encoder_audio, queue_audio, muxer, sink, NULL);
gst_element_link_many (src_video,encoder_video,queue_video, muxer,NULL);
gst_element_link_many (src_audio,encoder_audio,queue_audio, muxer,NULL);
if (!gst_element_link_many(muxer, sink, NULL)){
g_error("Failed to link elements");
return -2;
}
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
gst_object_unref(GST_OBJECT(bus));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Starting loop");
g_main_loop_run(loop);
return 0;
}
With this upon i am able to record the video from the cam, but the audio is recorded for just one second somewhere randomly during the recording and it gives me this error
ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Can't record audio fast enough
Additional debug info:
gstaudiobasesrc.c(869): gst_audio_base_src_create (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
Dropped 206388 samples. This is most likely because downstream can't keep up and is consuming samples too slowly.<br>
So i tried to add some setting and queues
#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video, *rate_video, *scale_video, *capsfilter_video;
static GstElement *src_audio, *encoder_audio, *queue_audio, *queue_audio2, *capsfilter_audio, *rate_audio;
static GstBus *bus;
static GstCaps *caps;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:{
g_print ("Got EOS\n");
g_main_loop_quit (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
exit(0);
break;
}
default:
break;
}
return TRUE;
}
void sigintHandler(int unused) {
g_print("You ctrl-c-ed! Sending EoS");
gst_element_send_event(pipeline, gst_event_new_eos());
}
int main(int argc, char *argv[])
{
signal(SIGINT, sigintHandler);
gst_init (&argc, &argv);
pipeline = gst_pipeline_new(NULL);
src_video = gst_element_factory_make("v4l2src", NULL);
rate_video = gst_element_factory_make ("videorate", NULL);
scale_video = gst_element_factory_make ("videoscale", NULL);
capsfilter_video = gst_element_factory_make ("capsfilter", NULL);
queue_video = gst_element_factory_make("queue", NULL);
encoder_video = gst_element_factory_make("x264enc", NULL);
src_audio = gst_element_factory_make ("alsasrc", NULL);
capsfilter_audio = gst_element_factory_make ("capsfilter", NULL);
queue_audio = gst_element_factory_make("queue", NULL);
rate_audio = gst_element_factory_make ("audiorate", NULL);
queue_audio2 = gst_element_factory_make("queue", NULL);
encoder_audio = gst_element_factory_make("lamemp3enc", NULL);
muxer = gst_element_factory_make("mp4mux", NULL);
sink = gst_element_factory_make("filesink", NULL);
if (!pipeline || !src_video || !rate_video || !scale_video || !capsfilter_video
|| !queue_video || !encoder_video || !src_audio || !capsfilter_audio
|| !queue_audio || !rate_audio || !queue_audio2 || !encoder_audio
|| !muxer || !sink) {
g_error("Failed to create elements");
return -1;
}
// Set up the pipeline
g_object_set(src_video, "device", "/dev/video0", NULL);
g_object_set(src_audio, "device", "hw:1,0", NULL);
g_object_set(sink, "location", "video_audio_test.mp4", NULL);
// video settings
caps = gst_caps_from_string("video/x-raw,format=(string)I420,width=480,height=384,framerate=(fraction)25/1");
g_object_set (G_OBJECT (capsfilter_video), "caps", caps, NULL);
gst_caps_unref (caps);
// audio settings
caps = gst_caps_from_string("audio/x-raw,rate=44100,channels=1");
g_object_set (G_OBJECT (capsfilter_audio), "caps", caps, NULL);
gst_caps_unref (caps);
// add all elements into the pipeline
gst_bin_add_many(GST_BIN(pipeline), src_video, rate_video, scale_video, capsfilter_video,
queue_video, encoder_video, src_audio, capsfilter_audio, queue_audio, rate_audio,
queue_audio2, encoder_audio, muxer, sink, NULL);
if (!gst_element_link_many (src_video,rate_video,scale_video, capsfilter_video,
queue_video, encoder_video, muxer,NULL))
{
g_error("Failed to link video elements");
return -2;
}
if (!gst_element_link_many (src_audio, capsfilter_audio, queue_audio, rate_audio,
queue_audio2, encoder_audio, muxer,NULL))
{
g_error("Failed to link audio elements");
return -2;
}
if (!gst_element_link_many(muxer, sink, NULL))
{
g_error("Failed to link elements");
return -2;
}
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
gst_object_unref(GST_OBJECT(bus));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Starting loop");
g_main_loop_run(loop);
return 0;
}
This time the code doesnt record anything and give me the following error
ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Internal data flow error.
Additional debug info:
gstbasesrc.c(2948): gst_base_src_loop (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
streaming task paused, reason not-negotiated (-4)
Can you address me to fix the error?
Thanks in advance
What you need is the multiplexer - such GStreamer element that can merge two streams into one.
mp4, mkv, avi.. are just a container formats which contains multiple "data streams", which can be audio, video, subtitles (not all formats support this).
I don't know about your use case, but you don't need C code for what you do. You can just use gst-launch-1.0 tool which has its own GStreamer kind-of-scripting language.
For simplicity I will use debugging elements videotestsrc and audiotestsrc for simulating input (instead of actual camera etc).
gst-launch-1.0 -e videotestsrc ! x264enc ! mp4mux name=mux ! filesink location="bla.mp4" audiotestsrc ! lamemp3enc ! mux.
videotestsrc --> x264enc -----\
>---> mp4mux ---> filesink
audiotestsrc --> lamemp3enc --/
Explanation:
Videotestsrc generates raw video which is in GStreamer terms called "video/x-raw".
However mp4 cannot hold raw video, so we need to encode it with for example x264enc which makes our data "video/x-h264".
Then we can finally mux this into our mp4 with mp4mux element.
When we take a look into GStreamer docs using gst-inspect-1.0 mp4mux we see that this element supports various formats amongst which there is also video/x-h264.
The same thing we do with audio with either faac for AAC format or lamemp3enc for mp3.
With gst-launch-1.0 I did two tricks and one bonus trick:
ability to have separate branches in one line. This is achieved by just separating those branches with space instead of !
ability to make alias with name=mux and later on using it with adding dot right at the end of name like mux. . You can make up any name for that element you like.
Write EOS after hitting ctrl+c to stop the recording. This is achieved with parameter -e
Finally the output goes to filesink which just writes anything you give it to file.
Now for a homework you:
Use your elements for what you need - v4l2, alsasrc
Add queue elements to add buffering and thread separation