Basler Pylon to GStreamer to multicast UDP - c++

I am attempting to use GStreamer to send captured images from a Basler acA720-290gm GigE camera to multiple clients over UDP multicast. I have been able to acquire the images using the Pylon 7.1.0.25066 API and send them through a GStreamer pipeline consisting of appsrc, videoconvert and ximagesink. However, if I try to add elements to convert the stream to RTP and back, then I only get a single static image.
This code successfully displays the stream on Debian 11. I'm not sure if I can use something other than GRAY8 for the appsrc caps format or not.
Test.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
//g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
//g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}
Makefile
# Makefile for Basler pylon sample program
.PHONY: all clean
# The program to build
NAME := Test
# Installation directories for pylon
PYLON_ROOT ?= /opt/pylon
# Build tools and flags
LD := $(CXX)
CPPFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --cflags) $(shell pkg-config --cflags gstreamer-1.0) $(shell pkg-config --cflags opencv4) -DUSE_GIGE
CXXFLAGS := #e.g., CXXFLAGS=-g -O0 for debugging
LDFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs-rpath)
LDLIBS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs) $(shell pkg-config --libs gstreamer-1.0) -lopencv_core -lopencv_imgproc -lpthread
# Rules for building
all: $(NAME)
$(NAME): $(NAME).o
$(LD) $(LDFLAGS) -o $# $^ $(LDLIBS)
$(NAME).o: $(NAME).cpp
$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c -o $# $<
clean:
$(RM) $(NAME).o $(NAME)
This code displays just a single image:
Test.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* x264enc1;
GstElement* rtph264pay1;
GstElement* rtph264depay1;
GstElement* avdec_h2641;
GstElement* videoconvert2;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.x264enc1 = gst_element_factory_make("x264enc", "x264enc1");
data.rtph264pay1 = gst_element_factory_make("rtph264pay", "rtph264pay1");
data.rtph264depay1 = gst_element_factory_make("rtph264depay", "rtph264depay1");
data.avdec_h2641 = gst_element_factory_make("avdec_h264", "avdec_h2641");
data.videoconvert2 = gst_element_factory_make("videoconvert", "videoconvert2");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.x264enc1 ||
!data.rtph264pay1 ||
!data.rtph264depay1 ||
!data.avdec_h2641 ||
!data.videoconvert2 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}

Maybe instead of copying image from PtrGrabResult To CPylonImage, try with alternate method of receiving buffers: through image.AttachGrabResultBuffer() method. It is well used in GUI_ImageWindow sample project of Pylon SDK.

Related

Dynamically link audio source elemnt into gstreamer audiomixer element

I am trying to mix internal audio and microphone audio using gstreamer audiomixer element, and then mux the single stream with video data, so far I can do it only when the soundcard is already active.
I am using Waspisrc , waspisrc loopback=true property.
What I mean is my code works when there is already some song is being played in the computer and then i start my code, it works.
What i want to acheive is , internal sound src can link with audiomixer element dynamically , and it just gives me error, the program crashes , what i did so far is put the soundcard source elemnt in another bin than the main pipeline, and add a data probe in the wasapisrc element, when there is sound from audio i try to link the source with queue and then audiomixer in the main pipeline.
Any help how can i acheive dynamically link and unlink src element into audiomixer?
my code is below:
#include <gst/gst.h>
//#include "pch.h"
#include <windows.h>
#include <stdio.h>
GMainLoop* mainLoop;
GstElement *mainPipeline;
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix);
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem);
typedef struct _elemStruct
{
GstElement *micSource, *micSourceQueue, *soundCardSrc, *soundCardSrcQueue, *micSrcRate, *micRateQueue, *soundCardRate, *soundCardRateQueue, *audioMixer, *audioMixerQueue;
GstElement* audioConverter, *audioConverterQueue, *audioEncoder, *audioEncoderQueue, *avMuxer, *gdiGrabber, *videoConverter, *x264encoder;
GstElement* muxerQueue, *fileSinker, *gdiGrabberQueue, *videoConverterQueue, *x264encoderQueue;
GstCaps *caps;
GstElement* message;
GstStateChangeReturn stateRet;
GstElement *micBin, *soundCardBin, *screenBin, *audioBin;
GstPad *micMixPad, *soundCardMixPad, *audioMuxPad, *videoMuxPad;
GstBus* mainBus;
GstStateChangeReturn ret;
GstMessage* msg;
guint bus_watch_id;
GstElement* soundCardTempSink;
}elemStruct;
BOOL WINAPI CtrlHandler(DWORD fdwCtrlType)
{
switch (fdwCtrlType)
{
// Handle the CTRL-C signal.
case CTRL_C_EVENT:
printf("Ctrl-C event\n\n");
Beep(750, 300);
return TRUE;
// CTRL-CLOSE: confirm that the user wants to exit.
case CTRL_CLOSE_EVENT:
Beep(600, 200);
printf("Ctrl-Close event\n\n");
return TRUE;
// Pass other signals to the next handler.
case CTRL_BREAK_EVENT:
Beep(900, 200);
printf("Ctrl-Break event\n\n");
return FALSE;
case CTRL_LOGOFF_EVENT:
Beep(1000, 200);
printf("Ctrl-Logoff event\n\n");
return FALSE;
case CTRL_SHUTDOWN_EVENT:
Beep(750, 500);
printf("Ctrl-Shutdown event\n\n");
return FALSE;
default:
return FALSE;
}
}
void addsoundsrc_toMainline(GstPadProbeInfo* info, GstElement* bin)
{
// we got data , add pipeline to audiomixer
// add bin to audiomixer
// get bin src pad
// call retrieve ghostsrc function
//retrieve_ghost_pad()
GstElement* queue = gst_bin_get_by_name(GST_BIN(bin), "sound_card_source_queue");
GstPad* mixpad = retrieve_ghost_pad(bin, queue);
//link_to_mixer(mixpad, )
}
GstPadProbeReturn soundCardProbe(GstPad* pad, GstPadProbeInfo* info, gpointer data)
{
//GstBuffer* buffer = gst_pad_probe_info_get_buffer(info);
GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
elemStruct* mainElem = (elemStruct*)data;
g_print("received data in the soundcard probe ");
//GstElement* bin = mainElem->soundCardBin;
//bool add = gst_bin_add(GST_BIN(mainElem->audioBin), mainElem->soundCardBin);
//gst_element_sync_state_with_parent(mainElem->soundCardBin);
//GstElement* queue = gst_bin_get_by_name((GST_BIN(bin)), "sound_card_source_queue");
//GstPad* mixpad = retrieve_ghost_pad(bin, mainElem->soundCardSrcQueue);
//GstPad* mixPad = gst_element_get_static_pad(mainElem->soundCardSrcQueue, "sink");
//link_to_mixer(mixPad, mainElem->audioMixer);
//addsoundsrc_toMainline(info, bin);
return GST_PAD_PROBE_PASS;
}
void set_queue_property(GstElement* _queue)
{
g_object_set(G_OBJECT(_queue), "max-size-buffers", 1000, "max-size-time", 1000000000000, NULL);
}
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix)
{
GstPad* mixerPad;
gchar* binPadName, *mixerPadName;
mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//mixerPad = gst_element_get_request_pad(mix, "sink_%u");
binPadName = gst_pad_get_name(binPad);
mixerPadName = gst_pad_get_name(mixerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, mixerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, mixerPadName);
g_free(binPadName);
g_free(mixerPadName);
//gst_object_unref(binPad);
gst_object_unref(mixerPad);
//gst_element_release_request_pad(mix, mixerPad);
return retVal;
}
GstPadLinkReturn audio_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "audio_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn video_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "video_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn link_to_mpeg_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
muxerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//muxerPad = gst_element_get_request_pad(mix, "sink_%d");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
gst_object_unref(muxerPad);
//gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem)
{
GstPad* elemPad = gst_element_get_static_pad(elem, "src");
GstPad* ghost = gst_ghost_pad_new("ghostsrc", elemPad);
gst_element_add_pad(bin, ghost);
gst_object_unref(elemPad);
return ghost;
}
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
{
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
g_print("Element %s changed state from %s to %s.\n",
GST_OBJECT_NAME(msg->src),
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
//if (new_state == GST_STATE_PAUSED)
//{
// gst_element_set_state(mainPipeline, GST_STATE_NULL);
//}
break;
}
break;
default:
break;
}
return TRUE;
}
int main(int argc, char** argv)
{
//gst - launch - 1.0.exe wasapisrc loopback = true\
// ! audiorate ! queue ! mix. wasapisrc low-latency=true \
// ! audiorate ! queue ! mix. audiomixer name=mix ! queue ! audioconvert \
// ! queue ! avenc_aac ! queue ! muxer. gdiscreencapsrc ! videoconvert \
// ! x264enc ! mpegtsmux name = muxer !queue ! filesink location=muxin.mp4 sync=false
elemStruct* mainStruct = new elemStruct();;
if (!gst_init_check(&argc, &argv, NULL))
{
g_printerr("couldn't initialize gstreamer\n");
return -1;
}
mainLoop = g_main_loop_new(NULL, FALSE);
if ((mainPipeline = gst_pipeline_new("main_pipeline")) == NULL)
{
}
mainStruct->micSource = gst_element_factory_make("wasapisrc", "mic_source");
mainStruct->soundCardSrc = gst_element_factory_make("wasapisrc", "sound_card_source");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "dx9_screen_capture_source");
mainStruct->micSourceQueue = gst_element_factory_make("queue", "mic_source_queue_elem");
mainStruct->soundCardSrcQueue = gst_element_factory_make("queue", "sound_card_source_queue");
mainStruct->micSrcRate = gst_element_factory_make("audiorate", "mic_audio_rate_elem");
mainStruct->soundCardRate = gst_element_factory_make("audiorate", "soundCard_audiorate_elem");
mainStruct->micRateQueue = gst_element_factory_make("queue", "mic_audiorate_queue");
mainStruct->soundCardRateQueue = gst_element_factory_make("queue", "soundCard_audiorate_queue");
mainStruct->audioMixer = gst_element_factory_make("audiomixer", "audio_mixer_elem");
mainStruct->audioMixerQueue = gst_element_factory_make("queue", "audio_mixer_queue_elem");
mainStruct->soundCardTempSink = gst_element_factory_make("autoaudiosink", "soundcard_temp_sink_elem");
mainStruct->audioEncoder = gst_element_factory_make("avenc_aac", "audio_encoder_elem");
mainStruct->audioEncoderQueue = gst_element_factory_make("queue", "audio_encoder_queue_elem");
mainStruct->audioConverter = gst_element_factory_make("audioconvert", "audio_convert_elem");
mainStruct->audioConverterQueue = gst_element_factory_make("queue", "audio_convert_queue_elem");
mainStruct->gdiGrabberQueue = gst_element_factory_make("queue", "gdi_grabber_queue_elem");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "gdi_grabber_elem");
mainStruct->videoConverterQueue = gst_element_factory_make("queue", "videoconvert_queue_elem");
mainStruct->x264encoderQueue = gst_element_factory_make("queue", "x264encoder_queue_elem");
mainStruct->videoConverter = gst_element_factory_make("videoconvert", "videoconvert_elem");
mainStruct->x264encoder = gst_element_factory_make("x264enc", "x264enc_elem");
mainStruct->avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem");
//if ((avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem")) == NULL)
mainStruct->fileSinker = gst_element_factory_make("filesink", "filesink_elem");
// set up all the sources
g_object_set(G_OBJECT(mainStruct->micSource), "do-timestamp", true, NULL);
g_object_set(G_OBJECT(mainStruct->soundCardSrc), "do-timestamp", true, "loopback", true, NULL);
g_object_set(G_OBJECT(mainStruct->gdiGrabber), "do-timestamp", true, "cursor", true, NULL);
g_object_set(G_OBJECT(mainStruct->x264encoder), "pass", 17, NULL);
g_object_set(G_OBJECT(mainStruct->fileSinker), "location", "sani_1486.mp4", "sync", false, NULL);
// set up all the queues
set_queue_property(mainStruct->micSourceQueue);
set_queue_property(mainStruct->soundCardSrcQueue);
set_queue_property(mainStruct->audioMixerQueue);
set_queue_property(mainStruct->audioEncoderQueue);
set_queue_property(mainStruct->gdiGrabberQueue);
set_queue_property(mainStruct->videoConverterQueue);
set_queue_property(mainStruct->x264encoderQueue);
// add the src elements to each src bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->micSource, mainStruct->micSourceQueue, NULL);
mainStruct->soundCardBin = gst_bin_new("sound_card_bin");
gst_bin_add_many(GST_BIN(mainStruct->soundCardBin), mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue,NULL);
GstPad* soundSourceprober = gst_element_get_static_pad(mainStruct->soundCardSrc, "src");
gst_pad_add_probe(soundSourceprober, GST_PAD_PROBE_TYPE_BUFFER, soundCardProbe, &mainStruct, NULL);
gst_element_set_state(mainStruct->soundCardBin, GST_STATE_PLAYING);
// link elements in each source bin
gst_element_link(mainStruct->micSource, mainStruct->micSourceQueue);
//gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
// put this two bin in audiobin, we will connect audiobin to screenBin later
gst_bin_add_many(GST_BIN(mainPipeline),mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
//GstStateChangeReturn ret = gst_element_set_state(mainStruct->soundCardSrc, GST_STATE_PLAYING);
//GstStateChangeReturn retu = gst_element_get_state(mainStruct->soundCardSrc);
mainStruct->micMixPad = gst_element_get_static_pad(mainStruct->micSourceQueue, "src");
link_to_mixer(mainStruct->micMixPad, mainStruct->audioMixer);
//mainStruct->soundCardMixPad = gst_element_get_static_pad(mainStruct->soundCardSrcQueue, "src");
//link_to_mixer(mainStruct->soundCardMixPad, mainStruct->audioMixer);
bool one_ = gst_element_link_many(mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverterQueue, mainStruct->videoConverter, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
// so add this element , with main bin
gst_element_link_many(mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverter, mainStruct->videoConverterQueue, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
//link_to_mixer(videoMuxPad, avMuxer);
mainStruct->videoMuxPad = gst_element_get_static_pad(mainStruct->x264encoderQueue, "src");
mainStruct->audioMuxPad = gst_element_get_static_pad(mainStruct->audioEncoderQueue, "src");
// add all the bin and muxer and filesink to main pipeline bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->avMuxer, mainStruct->fileSinker, NULL);
link_to_mpeg_muxer(mainStruct->videoMuxPad, mainStruct->avMuxer);
link_to_mpeg_muxer(mainStruct->audioMuxPad, mainStruct->avMuxer);
gst_element_link(mainStruct->avMuxer, mainStruct->fileSinker);
//gst_element_link(videoMuxPad, avMuxer);
/* Start playing the pipeline */
mainStruct->ret = gst_element_set_state(mainPipeline, GST_STATE_PLAYING);
// TODO , deal with ret
mainStruct->mainBus = gst_element_get_bus(mainPipeline);
mainStruct->bus_watch_id = gst_bus_add_watch(mainStruct->mainBus, bus_call, mainLoop);
gst_object_unref(mainStruct->mainBus);
// msg = gst_bus_timed_pop_filtered(mainBus, GST_CLOCK_TIME_NONE, GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
g_main_loop_run(mainLoop);
gst_element_set_state(mainPipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(mainPipeline));
g_source_remove(mainStruct->bus_watch_id);
g_main_loop_unref(mainLoop);
//g_main_loop_quit(mainLoop);
return 0;
}

g_signal_connect must be called before QObject::connect?

I am creating an application that combines GStreamer and Qt. It appears that if I use QObject::connect to connect a signal to a slot before I use g_signal_connect to register a callback function to events on the GStreamer bus, then the g_signal_connect callback function is never called. If I reverse the order it is. Is this expected?
Example:
main.cpp
#include <QApplication>
#include <QPushButton>
#include "acquisitiontype.h"
int main(int argc, char *argv[]) {
QApplication app(argc, argv);
AcquisitionType acquisition("224.1.1.1", 5004);
QPushButton* button = new QPushButton("click me");
QObject::connect(button, SIGNAL(clicked()), &app, SLOT(quit()));
button->show();
return app.exec();
}
acquisitiontype.cpp
#include "acquisitiontype.h"
void AcquisitionType::udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("callback called\n");
}
}
}
void AcquisitionType::bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
exit(-1);
}
AcquisitionType::AcquisitionType(char const* address, gint port) {
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"address", address,
"port", port,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.sink = gst_element_factory_make("fakesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) bus_error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) udp_source_timeout_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
}
AcquisitionType::~AcquisitionType() {
GstBus* bus;
gst_element_set_state(data.pipeline, GST_STATE_NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_remove_signal_watch(bus);
gst_object_unref(bus);
gst_object_unref(data.pipeline);
}
acquisitiontype.h
#include <gst/gst.h>
#include <QObject>
class AcquisitionType;
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* sink;
};
class AcquisitionType : public QObject
{
Q_OBJECT
public:
AcquisitionType(char const* address, gint port);
~AcquisitionType();
private:
static void bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
static void udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
gstreamer_data data;
};
If this is run as is, then the callback is called. If AcquisitionType acquisition("224.1.1.1", 5004); is moved to after button->show() then it is not.
It seems that I needed to change "timeout", 1000000000, to "timeout", G_GUINT64_CONSTANT(1000000000),.

videoconvert element breaks udpsrc timeout messages

This code prints "Timeout received from udpsrc" each second. The videoconvert element is commented out of the pipeline. If I uncomment it then the messages stop printing.
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* fake_sink;
GMainLoop* main_loop;
};
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
static void element_callback(GstBus* bus, GstMessage* message, gpointer data) {
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
}
}
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.fake_sink = gst_element_factory_make("fakesink", "fake_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.fake_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
I have tried setting the debug level higher, but I'm not seeing anything to account for it. Is there something special about the videoconvert element?
Maybe you were sending data to that udpsrc after all?
I have confirmed that un-commenting the two lines that I do get the log messages as expected.

Display no signal image if video sender is closed

If I close the sender in this example, the video displayed by the receiver freezes. Is there a way to display a static no signal image instead, for example an all blue image, and have the video return when the sender restarts?
Sender
gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
Receiver
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
EDIT
This code seems to come close, but for some reason if I add in the videotestsrc by uncommenting the commented out lines, the udpsrc no longer calls the timeout callback:
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* no_signal_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* input_selector;
GstElement* video_converter;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.no_signal_source = gst_element_factory_make("videotestsrc", "no_signal_source");
g_object_set(G_OBJECT(data.no_signal_source),
"pattern", 6,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.no_signal_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.input_selector ||
!data.video_converter ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
//data.no_signal_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.input_selector,
data.video_converter,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_decoder, "src");
GstPad* sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, sink_1);
/*
GstPad* src_2 = gst_element_get_static_pad(data.no_signal_source, "src");
GstPad* sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, sink_2);
*/
g_object_set(G_OBJECT(data.input_selector),
"active-pad", sink_1,
NULL);
if (gst_element_link_many(
data.input_selector,
data.video_converter,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT
This code seems fine until I uncomment the selection of the active pad in the callbacks. Do I need to do something before I change the active pad, like stop the pipeline?
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* video_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* input_selector;
GstPad* sink_1;
GstPad* sink_2;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
/*
g_object_set(G_OBJECT(user_data->input_selector),
"active-pad", user_data->sink_2,
NULL);
*/
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("no data\n");
/*
g_object_set(G_OBJECT(data->input_selector),
"active-pad", data->sink_1,
NULL);
*/
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
gst_object_unref(pad);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.video_source = gst_element_factory_make("videotestsrc", "video_source");
g_object_set(G_OBJECT(data.video_source),
"pattern", 6,
"is-live", true,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.video_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.input_selector ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.video_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
data.input_selector,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_source, "src");
data.sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, data.sink_1);
gst_object_unref(src_1);
GstPad* src_2 = gst_element_get_static_pad(data.video_converter, "src");
data.sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, data.sink_2);
gst_object_unref(src_2);
if (gst_element_link_many(
data.input_selector,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
gst_object_unref(pad);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT:
I seems fine all of a sudden. I don't understand. Is this suitable code? Can it be improved?
EDIT:
Setting a width and height on the videotestsrc in the sender seems to make it work. If I remove those, it breaks. Why?

gstreamer appsrc video streaming over the network

I'm trying to use gstreamer appsrc to play video stream over the network.
I found good examples here.
gstreamer appsrc test application
http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
Using examples above I can play a video in X Window using Xlib. When pipeline is set PLAYING state, then somehow "need-data" signal emitted and in the start_feed callback function data read from a video file are injected to the appsrc GstBuffer and play sample video.
I'm trying to get data from the network instead of a file, so I think simple echo server reads a video file exactly the same way above and send data to the client when connection is occurred. The client should get these data and put in the appsrc.
My question is how to put stream data to appsrc pipeline? Does anybody give any suggession or good reference?
Here's the working sample code using above links' examples.
// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <stdio.h>
#include <unistd.h> // sleep()
#include <stdbool.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappbuffer.h>
#include <gst/interfaces/xoverlay.h>
#define BUFF_SIZE (640*480*3)//(1024)
#define BORDER_WIDTH 2
#define DEBUG printf
typedef unsigned int uint32;
typedef unsigned char uint8;
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *videosink;
GMainLoop *loop;
guint sourceid;
FILE *file;
} gst_app_t;
static gst_app_t gst_app;
static Window child_window = 0;
static Window window = 0;
static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;
ptr = g_malloc(BUFF_SIZE);
g_assert(ptr);
size = fread(ptr, 1, BUFF_SIZE, app->file);
if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
ret = gst_app_src_push_buffer(app->src, buffer);
if(ret != GST_FLOW_OK){
DEBUG("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}
if(size != BUFF_SIZE){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
DEBUG ("start feeding\n");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
DEBUG ("stop feeding\n");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static void on_pad_added(GstElement *element, GstPad *pad)
{
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;
DEBUG("pad added\n");
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
g_assert(str);
name = (gchar*)gst_structure_get_name(str);
DEBUG("pad name %s\n", name);
if(g_strrstr(name, "video")){
ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
DEBUG("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message))
{
case GST_MESSAGE_ELEMENT: {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window);
}
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
DEBUG("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *err;
gchar *name;
gst_message_parse_warning(message, &err, &debug);
DEBUG("Warning %s\nDebug %s\n", err->message, debug);
name = GST_MESSAGE_SRC_NAME(message);
DEBUG("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;
case GST_MESSAGE_EOS:
DEBUG("End of stream\n");
g_main_loop_quit(app->loop);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
DEBUG("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
static gboolean terminate_playback (GstElement * loop)
{
DEBUG ("Terminating playback\n");
g_main_loop_quit ((GMainLoop *)loop);
return FALSE;
}
int gstreamer_init(int argc, char *argv[])
{
gst_app_t *app = &gst_app;
GstBus *bus;
GstStateChangeReturn state_ret;
app->file = fopen(argv[1], "r");
g_assert(app->file);
/* initialization */
gst_init((int)0, NULL);
app->loop = g_main_loop_new(NULL, FALSE);
/* create elements */
app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline");
app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc");
app->decoder = gst_element_factory_make("decodebin2", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->videosink = gst_element_factory_make("autovideosink", "myvideosink");
if (!app->videosink) {
DEBUG ("output could not be found - check your install\n");
}
g_assert(app->src);
g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->videosink);
bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);
g_signal_connect(app->decoder, "pad-added",
G_CALLBACK(on_pad_added), app->ffmpeg);
//gst_app_src_set_emit_signals(app->src, true);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src,
app->decoder, app->ffmpeg, app->videosink, NULL);
/* link everything together */
if (!gst_element_link((GstElement *)app->src, app->decoder)) {
DEBUG ("Failed to link one or more elements!\n");
return -1;
}
if(!gst_element_link(app->ffmpeg, app->videosink)){
DEBUG("failed to link ffmpeg and videosink");
return -1;
}
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
DEBUG("Failed to start up pipeline!\n");
return 1;
}
DEBUG("set state returned %d\n", state_ret);
//g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop);
g_main_loop_run(app->loop);
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL);
DEBUG("set state null returned %d\n", state_ret);
gst_object_unref(app->pipeline);
return 1;
}
/*
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink
*
* 1. dependency library install
* $ sudo apt-get install gstreamer0.10-plugins-bad
* $ sudo apt-get install gstreamer0.10-ffmpeg
*
* 2. compile
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10
*
* 3. how to run program
* $ ./hello <video_file_name>
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts
*/
int main(int argc, char *argv[])
{
Display *disp;
Window root;
long fgcolor, bgcolor;
GC gc;
XGCValues gc_val;
XEvent event;
char *msg = "Hello, World!";
int screen;
disp = XOpenDisplay(NULL);
if (disp == NULL) {
fprintf(stderr, "Cannot open display\n");
exit(1);
}
screen = DefaultScreen(disp);
root = RootWindow(disp, screen);
fgcolor = BlackPixel(disp, screen);
bgcolor = WhitePixel(disp, screen);
window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1,
fgcolor, bgcolor);
child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1,
fgcolor, bgcolor);
gc_val.foreground = fgcolor;
gc_val.background = bgcolor;
gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val);
XSelectInput(disp, child_window, ExposureMask | KeyPressMask);
g_warning("map xwindow");
//XMapWindow(disp, window);
XMapWindow(disp, window);
XMapWindow(disp, child_window);
XSync(disp, FALSE);
//XDrawLine (disp, window, gc, 0, 0, 1000, 800);
//XDrawLine (disp, child_window, gc, 0, 0, 800, 600);
gstreamer_init(argc, argv);
XDestroyWindow( disp, window );
XDestroyWindow( disp, child_window );
XCloseDisplay( disp );
return 0;
}
You'll want to have at least one other thread (on each end) to handle communication over a socket (like TCP, or UDP if on a local network). This typically has a blocking call to wait for packets. To send data, you can form a gstreamer tee and queue, and then an appsrc to buffer/send data to a socket. To receive, you can pull the data from the socket to a buffer. Keep in mind the OS's socket buffer is relatively small and will drop packets if you don't pull from it fast enough, or push to one too fast. Hence the buffers.
On a NEED_DATA signal, you pull from that buffer to the pipeline using pushBuffer(). And on an ENOUGH_DATA signal, you can just keep buffering or dispose of it, whatever your application needs to do.