g_signal_connect must be called before QObject::connect? - c++

I am creating an application that combines GStreamer and Qt. It appears that if I use QObject::connect to connect a signal to a slot before I use g_signal_connect to register a callback function to events on the GStreamer bus, then the g_signal_connect callback function is never called. If I reverse the order it is. Is this expected?
Example:
main.cpp
#include <QApplication>
#include <QPushButton>
#include "acquisitiontype.h"
int main(int argc, char *argv[]) {
QApplication app(argc, argv);
AcquisitionType acquisition("224.1.1.1", 5004);
QPushButton* button = new QPushButton("click me");
QObject::connect(button, SIGNAL(clicked()), &app, SLOT(quit()));
button->show();
return app.exec();
}
acquisitiontype.cpp
#include "acquisitiontype.h"
void AcquisitionType::udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("callback called\n");
}
}
}
void AcquisitionType::bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data) {
(void) bus;
(void) user_data;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
exit(-1);
}
AcquisitionType::AcquisitionType(char const* address, gint port) {
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"address", address,
"port", port,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.sink = gst_element_factory_make("fakesink", "sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) bus_error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) udp_source_timeout_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
}
AcquisitionType::~AcquisitionType() {
GstBus* bus;
gst_element_set_state(data.pipeline, GST_STATE_NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_remove_signal_watch(bus);
gst_object_unref(bus);
gst_object_unref(data.pipeline);
}
acquisitiontype.h
#include <gst/gst.h>
#include <QObject>
class AcquisitionType;
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* sink;
};
class AcquisitionType : public QObject
{
Q_OBJECT
public:
AcquisitionType(char const* address, gint port);
~AcquisitionType();
private:
static void bus_error_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
static void udp_source_timeout_callback(GstBus* bus, GstMessage* message, gstreamer_data* user_data);
gstreamer_data data;
};
If this is run as is, then the callback is called. If AcquisitionType acquisition("224.1.1.1", 5004); is moved to after button->show() then it is not.

It seems that I needed to change "timeout", 1000000000, to "timeout", G_GUINT64_CONSTANT(1000000000),.

Related

videoconvert element breaks udpsrc timeout messages

This code prints "Timeout received from udpsrc" each second. The videoconvert element is commented out of the pipeline. If I uncomment it then the messages stop printing.
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* fake_sink;
GMainLoop* main_loop;
};
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
static void element_callback(GstBus* bus, GstMessage* message, gpointer data) {
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
}
}
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.fake_sink = gst_element_factory_make("fakesink", "fake_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.fake_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
I have tried setting the debug level higher, but I'm not seeing anything to account for it. Is there something special about the videoconvert element?
Maybe you were sending data to that udpsrc after all?
I have confirmed that un-commenting the two lines that I do get the log messages as expected.

Display no signal image if video sender is closed

If I close the sender in this example, the video displayed by the receiver freezes. Is there a way to display a static no signal image instead, for example an all blue image, and have the video return when the sender restarts?
Sender
gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
Receiver
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
EDIT
This code seems to come close, but for some reason if I add in the videotestsrc by uncommenting the commented out lines, the udpsrc no longer calls the timeout callback:
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* no_signal_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* input_selector;
GstElement* video_converter;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.no_signal_source = gst_element_factory_make("videotestsrc", "no_signal_source");
g_object_set(G_OBJECT(data.no_signal_source),
"pattern", 6,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.no_signal_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.input_selector ||
!data.video_converter ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
//data.no_signal_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.input_selector,
data.video_converter,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_decoder, "src");
GstPad* sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, sink_1);
/*
GstPad* src_2 = gst_element_get_static_pad(data.no_signal_source, "src");
GstPad* sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, sink_2);
*/
g_object_set(G_OBJECT(data.input_selector),
"active-pad", sink_1,
NULL);
if (gst_element_link_many(
data.input_selector,
data.video_converter,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT
This code seems fine until I uncomment the selection of the active pad in the callbacks. Do I need to do something before I change the active pad, like stop the pipeline?
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* video_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* input_selector;
GstPad* sink_1;
GstPad* sink_2;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
/*
g_object_set(G_OBJECT(user_data->input_selector),
"active-pad", user_data->sink_2,
NULL);
*/
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("no data\n");
/*
g_object_set(G_OBJECT(data->input_selector),
"active-pad", data->sink_1,
NULL);
*/
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
gst_object_unref(pad);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.video_source = gst_element_factory_make("videotestsrc", "video_source");
g_object_set(G_OBJECT(data.video_source),
"pattern", 6,
"is-live", true,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.video_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.input_selector ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.video_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
data.input_selector,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_source, "src");
data.sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, data.sink_1);
gst_object_unref(src_1);
GstPad* src_2 = gst_element_get_static_pad(data.video_converter, "src");
data.sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, data.sink_2);
gst_object_unref(src_2);
if (gst_element_link_many(
data.input_selector,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
gst_object_unref(pad);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT:
I seems fine all of a sudden. I don't understand. Is this suitable code? Can it be improved?
EDIT:
Setting a width and height on the videotestsrc in the sender seems to make it work. If I remove those, it breaks. Why?

Opencv Mat as AppSrc to srtclientsink

I am trying to use an Opencv mat as an appsrc in my pipeline and push it via srt to a local server, but there will not open any window to play the video stream.
My system is a mac OS 10.14 with gstreamer 1.15.
The pipeline consists of the following elements:
appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtclientsink
I want to get the srt stream and show it with the following command:
gst-launch-1.0 srtserversrc uri=srt://:8888 ! decodebin3 ! autovideosink
In the debug logs, it says:
GST_BUFFER gstbuffer.c:445:void _memory_add(GstBuffer *, gint, GstMemory *): buffer 0x7fd1aca38500, idx -1, mem 0x7fd1aca3a2b0
0:00:08.150919000 974 0x7fd1ac864b20 DEBUG tsdemux tsdemux.c:2980:gst_ts_demux_push_pending_data: Not enough information to push buffers yet, storing buffer
0:00:08.150931000 974 0x7fd1ac864b20 LOG tsdemux tsdemux.c:3098:gst_ts_demux_push_pending_data: Resetting to EMPTY, returning ok
0:00:08.150942000 974 0x7fd1ac864b20 LOG mpegtspacketizer mpegtspacketizer.c:689:mpegts_packetizer_flush_bytes: flushing 564 bytes from adapter
0:00:08.151214000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:634:void gst_adapter_flush_unchecked(GstAdapter *, gsize): flushing 564 bytes
0:00:08.151234000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:572:void gst_adapter_unmap(GstAdapter *): unmap memory buffer 0x7fd1aca383f0
0:00:08.151247000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:655:void gst_adapter_flush_unchecked(GstAdapter *, gsize): flushing out head buffer
so I assume, there is a problem with the demuxer, maybe because I only use video data and no audio data, but without the mpegtsmuxer in my code, I get the error that the payload size exceeds the maximum allowed 1316 bytes in the srt protocol.
Here is the code:
main.cpp
#include <iostream>
#include <string>
#include <mutex>
#include <thread>
#include <time.h>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
#include <opencv2/highgui/highgui.hpp>
#include <gstreamer-1.0/gst/gstelement.h>
#include <gstreamer-1.0/gst/gstpipeline.h>
#include <gstreamer-1.0/gst/gstutils.h>
#include <gstreamer-1.0/gst/app/gstappsrc.h>
#include <gstreamer-1.0/gst/base/gstbasesrc.h>
#include <gstreamer-1.0/gst/video/video.h>
#include <gstreamer-1.0/gst/gst.h>
#include <gstreamer-1.0/gst/check/gstbufferstraw.h>
#include <glib.h>
#define GST_CAT_DEFAULT appsrc_pipeline_debug
GST_DEBUG_CATEGORY(appsrc_pipeline_debug);
using namespace std;
/*
* bus: simple system for forwarding messages from streaming threads to app in own thread context
* pad:
* caps:
* signal:
* callback:
*
*/
static std::mutex m;
GMainLoop *loop;
typedef struct _App App;
struct _App {
GstElement *videoenc;
GstElement *appsrc;
GstElement *videoconvert;
GstElement *sink;
guint sourceid;
GstElement *mpegts;
};
App s_app;
int counter = 0;
static gboolean cb_need_data(App *app) {
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint buffersize;
GstFlowReturn ret;
GstMapInfo info;
counter++;
m.lock();
cv::Mat image_mat = cv::imread("./../data/squat.jpg");
cv::Mat resized_mat;
cv::resize(image_mat, resized_mat, cv::Size(640, 480));
buffersize = guint(resized_mat.cols * resized_mat.rows * resized_mat.channels());
buffer = gst_buffer_new_and_alloc(buffersize);
uchar *img_data = image_mat.data;
m.unlock();
if (gst_buffer_map(buffer, &info, (GstMapFlags) GST_MAP_WRITE)) {
memcpy(info.data, img_data, buffersize);
gst_buffer_unmap(buffer, &info);
} else {
g_print("error at memcpy");
}
g_signal_emit_by_name(app->appsrc, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
g_print("Ops\n");
GST_DEBUG ("something wrong in cb_need_data");
g_main_loop_quit(loop);
}
gst_buffer_unref(buffer);
return TRUE;
}
static void start_feed(GstElement *pipeline, guint size, App *app) {
if (app->sourceid == 0) {
app->sourceid = g_timeout_add(67, (GSourceFunc) cb_need_data, app);
}
}
static void stop_feed(GstElement *pipeline, App *app) {
if (app->sourceid != 0) {
g_source_remove(app->sourceid);
app->sourceid = 0;
}
}
static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data) {
GError *err = nullptr;
gchar *dbg_info = nullptr;
GST_DEBUG ("got message %s", gst_message_type_get_name(GST_MESSAGE_TYPE(message)));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
gst_message_parse_error(message, &err, &dbg_info);
g_printerr("ERROR from element %s: %s\n",
GST_OBJECT_NAME (message->src), err->message);
g_printerr("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
g_error_free(err);
g_free(dbg_info);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_EOS:
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
void startStream() {
App *app = &s_app;
GstCaps *caps2;
GstCaps *caps3;
GstBus *bus;
GstElement *pipeline;
gst_init(nullptr, nullptr);
loop = g_main_loop_new(nullptr, TRUE);
/*
* pipeline elements:
* appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtsink
*/
// create pipeline
pipeline = gst_pipeline_new("gstreamer-encoder");
if (!pipeline) {
g_print("Error creating pipeline");
}
// create appsrc element
app->appsrc = gst_element_factory_make("appsrc", "appsrc");
if (!app->appsrc) {
g_print("Error creating appsrc");
}
// create videoconvert element
app->videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
if (!app->videoconvert) {
g_print("Error creating videoconvert element");
}
// create videoencoder element
app->videoenc = gst_element_factory_make("x264enc", "encoder");
if (!app->videoenc) {
g_print("Error creating encoder");
}
app->mpegts = gst_element_factory_make("mpegtsmux", "mpegtsmux");
if (!app->mpegts) {
g_print("Error creating mpegtsmuxer");
}
app->sink = gst_element_factory_make("srtclientsink", "sink");
if (!app->sink) {
g_print("Error creating sink");
}
g_print("Elements are created\n");
g_object_set(G_OBJECT(app->sink), "uri", "srt://127.0.0.1:8888", nullptr);
g_object_set(G_OBJECT(app->sink), "msg-size", 1316, nullptr);
g_object_set(G_OBJECT(app->sink), "latency", 120, nullptr);
g_object_set(G_OBJECT(app->videoenc), "bitrate", 256, nullptr);
g_print("End of settings\n");
caps2 = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 25, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
nullptr);
gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps2);
g_object_set(G_OBJECT (app->appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, nullptr);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
g_assert(bus);
gst_bus_add_watch(bus, (GstBusFunc) bus_call, app);
gst_bin_add_many(GST_BIN(pipeline), app->appsrc, app->videoconvert, app->videoenc,
app->mpegts, app->sink, nullptr);
g_print("Added all the elements to the pipeline\n");
int ok = FALSE;
ok = gst_element_link_many(app->appsrc, app->videoconvert, app->videoenc,
app->sink, nullptr);
if (ok)
g_print("Linked all elements together\n");
else
g_print("Linking error\n");
g_assert(app->appsrc);
g_assert(GST_IS_APP_SRC(app->appsrc));
g_signal_connect(app->appsrc, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->appsrc, "enough-data", G_CALLBACK(stop_feed), app);
g_print("Playing the video\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Running...\n");
g_main_loop_run(loop);
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(bus);
g_main_loop_unref(loop);
g_print("Deleting pipeline\n");
}
int main(int argc, char **argv) {
startStream();
return 0;
}
CMakeLists.txt
cmake_minimum_required(VERSION 3.13)
project(opencv_gstreamer)
set(CMAKE_CXX_STANDARD 14)
find_package(PkgConfig REQUIRED)
pkg_search_module(OPENCV opencv4 REQUIRED)
pkg_search_module(GSTREAMER gstreamer-1.0 REQUIRED)
pkg_search_module(APP_GSTREAMER gstreamer-app-1.0 REQUIRED)
pkg_search_module(SRT srt REQUIRED)
pkg_search_module(GLIB glib-2.0 REQUIRED)
include_directories(
${OPENCV_INCLUDE_DIRS}
${GSTREAMER_INCLUDE_DIRS}
${APP_GSTREAMER_INCLUDE_DIRS}
${GLIB_INCLUDE_DIRS}
${SRT_INCLUDE_DIRS})
link_directories(
${OPENCV_LIBRARY_DIRS}
${GSTREAMER_LIBRARY_DIRS}
${APP_GSTREAMER_LIBRARY_DIRS}
${GLIB_LIBRARY_DIRS}
${SRT_LIBRARY_DIRS})
link_libraries(
${OPENCV_LDFLAGS}
pthread
${GSTREAMER_LDFLAGS}
${APP_GSTREAMER_LDFLAGS}
${GLIB_LDFLAGS}
${SRT_LDFLAGS})
add_compile_options(
${OPENCV_CFLAGS}
${GSTREAMER_CFLAGS}
${APP_GSTREAMER_CFLAGS}
${GLIB_CFLAGS}
${SRT_CFLAGS})
add_executable(opencv_gstreamer src/main.cpp)

place external live video frames from non supported V4L file into Gstreamer Qt , C++ Qthreads

OS: Ubuntu 14.04
SDK: Qt
GStreamer: > 1.0
I am wondering how would I put continuously captured frames from a non supported V4L camera into GStreamer.
Actually my task is to grab frames from the camera and use only GStreamer to send them to different computer via UDP. But at the moment, I just want to display it on my machine.
What I did so far:
a) Implemented code in Qt for an IDS camera that captures frames and displays then on Qt as live streaming.
b) Separately, I have written ( or rather copied ) code that displays live streaming via gstreamer using a webcam that supports V4L file.
Now as I mentioned, I want to use gstreamer to display capture frames in Qt environment.
I have developed code in qt 5.5 which makes use of multithreading to run separate threads for gstreamer, capturing frames and GUI. The code has become quite long but I will try best to place minimum code here.
Issue: when I try to run the code and added debug message, I can see frames are continuously coming from another thread into main but gstreamer function start successfully and at the very first time I get debug message from cb_need_data` but nothing after data.
Source code is shown below.
streaming gstream class:
class StreamG : public QObject
{
Q_OBJECT
public:
explicit StreamG(QObject *parent = 0);
bool addLinkElements();
static void cb_need_data (GstElement *appsrc,
guint unused_size,
gpointer user_data);
static GMainLoop *loop;
static char* bufferFrame;
signals:
void sigFinish();
public slots:
void start();
void stop();
private:
GstElement *pipeline, *source, *sink, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
};
Streaming using gstreaming cpp file below
GMainLoop* StreamG::loop;
char* StreamG::bufferFrame = NULL; // this will take buffer frames from other function
void StreamG::cb_need_data (GstElement *appsrc,
guint unused_size,
gpointer user_data )
{
qDebug()<< " cb_need_data is called ...";
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
guchar *data1;
GstMapInfo map;
data1 = (guchar *)bufferFrame;
size = 385*288*2;
if( data1 )
{
buffer = gst_buffer_new_allocate (NULL, size, NULL);
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
memcpy( (guchar *)map.data, data1, gst_buffer_get_size( buffer ) );
GST_BUFFER_PTS (buffer) = timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
timestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK)
{
// something wrong, stop pushing //
g_debug("push buffer returned %d for %d bytes \n", ret, size);
g_main_loop_quit (loop);
}
}
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
qDebug() <<" end of msg in gstreamer";
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
qDebug() <<" end of msg in gstreamer";
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
StreamG::StreamG(QObject *parent) : QObject(parent)
{
// Initialize GStreamer /
gst_init( NULL, NULL );
loop = g_main_loop_new( NULL, FALSE );
// Create the elements
source = gst_element_factory_make ("appsrc", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
convert =gst_element_factory_make("videoconvert","convert");
g_assert( convert );
pipeline = gst_pipeline_new ("test-pipeline");
/* g_object_set (G_OBJECT (source), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 360,
"framerate", GST_TYPE_FRACTION, 1, 1,
NULL), NULL);*/
g_object_set (G_OBJECT (source), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 360, NULL), NULL);
}
void StreamG::start()
{
addLinkElements();
gst_element_set_state (pipeline, GST_STATE_PLAYING);
// Iterate
g_print ("Running...Gstreamer\n");
g_main_loop_run (loop);
// Out of the main loop, clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
}
void StreamG::stop()
{
g_print ("Deleting pipeline\n");
g_main_loop_quit(loop);
gst_object_unref(GST_OBJECT(pipeline));
gst_object_unref (bus);
g_main_loop_unref (loop);
emit sigFinish();
}
bool StreamG::addLinkElements()
{
if (!pipeline || !source || !sink || !convert )
{
g_printerr ("Not all elements could be created.\n");
return false;
}
// g_object_set (G_OBJECT ( source ), "device", "/dev/video0", NULL);
gst_bin_add_many( GST_BIN (pipeline), source , sink, convert, NULL );
if (gst_element_link (convert, sink) != TRUE)
{
g_printerr ("Elements could not be linked confert sink.\n");
gst_object_unref (pipeline);
return false;
}
if (gst_element_link (source, convert) != TRUE)
{
g_printerr ("Elements could not be linked source -convert.\n");
gst_object_unref (pipeline);
return false;
}
g_print("Linked all the Elements together\n");
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
g_object_set (G_OBJECT (source),
"stream-type", 0,
"format", GST_FORMAT_TIME, NULL);
g_signal_connect (source, "need-data", G_CALLBACK (cb_need_data), NULL);
return true;
}
Function in MainWidget .. I have places only imp member variables and functions
class UEYEMain : public QWidget
{
Q_OBJECT
public:
int openCamera( bool bStartLive );
INT _GetImageID (char* pbuf);//
bool _AllocImages(); //function for IDS camera
void onLive(); // function for IDS camera
void transferLastFrameToGstream();
private slots:
void eventreceived (int event); // this is slot which receives frames and copied into StreamingG static varibale
private:
Ui::UEYEMain *ui;
.......
.......
StreamG* StreamingG;
QElapsedTimer m_Time;
QRgb m_table[256];
int m_nUpdateTicks;
QThread* threadForStream;
char *m_pLastBuffer;
EventThread *m_pEvFrame; // Another thread to recive frames
void ProcessFrame(); // function on receiving frames
void DrawImage (char *pBuffer); // this draw image to Qt widget , I use it for testing purpose
};
void UEYEMain::eventreceived (int event)
{
bool bUpdateCameraList = false;
switch (event)
{... some other cases
case IS_SET_EVENT_FRAME:
qDebug() << " new frame received";
if (!m_hCamera)
{
break;
}
ProcessFrame ();
break;
default:
break;
}
}
void UEYEMain::transferLastFrameToGstream()
{
//memcpy( StreamingG->bufferFrame, m_pLastBuffer, sizeof(m_pLastBuffer) );
if(m_pLastBuffer ) // just pointing buffer to streamG variable
{
StreamingG->bufferFrame = m_pLastBuffer;
}
}
void UEYEMain::ProcessFrame ()
{
INT dummy = 0;
char *pLast = NULL, *pMem = NULL;
qDebug() << " counter for frame recv -->" << countFrameDebug;
countFrameDebug++;
is_GetActSeqBuf (m_hCamera, &dummy, &pMem, &pLast);
m_pLastBuffer = pLast;
if (m_bReady)
{
m_bReady = FALSE;
update();
if (m_pLastBuffer )
{
int nTicks = 0;
// Frame rate limit ?
if (m_nUpdateTicks > 0)
{
nTicks = m_Time.elapsed();
bDraw = (nTicks >= m_nUpdateTicks) ? true : false;
}
if (bDraw)
{
nDisplayed++;
m_Time.restart();
transferLastFrameToGstream();
//DrawImage(m_pLastBuffer); // this func succesffully stream video on Qt widget
}
}
}
}
void UEYEMain::onLive()
{
INT nRet = 1;
time_t start;
static char str[64];
if (!m_bLive)
{
m_bLive = TRUE;
m_bReady = TRUE;
is_CaptureVideo (m_hCamera, IS_DONT_WAIT);
threadForStream->start();
}
}
The above function onLive() is called from another thread workThreadFinished
connect(m_workThread, SIGNAL(finished()), this, SLOT(workThreadFinished()), Qt::QueuedConnection);
The following is the output I get and I dont see StreamG::cb_need_data has been called more than once.
no of camera detected : 1
started event 2 detection!// this thread acquire frames
started event 8 detection!
Linked all the Elements together // gst
Running...Gstreamer //gstreamer
cb_need_data is called ... // gstreamer
new frame received //
counter for frame recv --> 0
new frame received
counter for frame recv --> 1
new frame received
counter for frame recv --> 2
new frame received
........... and so on

gstreamer appsrc video streaming over the network

I'm trying to use gstreamer appsrc to play video stream over the network.
I found good examples here.
gstreamer appsrc test application
http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
Using examples above I can play a video in X Window using Xlib. When pipeline is set PLAYING state, then somehow "need-data" signal emitted and in the start_feed callback function data read from a video file are injected to the appsrc GstBuffer and play sample video.
I'm trying to get data from the network instead of a file, so I think simple echo server reads a video file exactly the same way above and send data to the client when connection is occurred. The client should get these data and put in the appsrc.
My question is how to put stream data to appsrc pipeline? Does anybody give any suggession or good reference?
Here's the working sample code using above links' examples.
// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <stdio.h>
#include <unistd.h> // sleep()
#include <stdbool.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappbuffer.h>
#include <gst/interfaces/xoverlay.h>
#define BUFF_SIZE (640*480*3)//(1024)
#define BORDER_WIDTH 2
#define DEBUG printf
typedef unsigned int uint32;
typedef unsigned char uint8;
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *videosink;
GMainLoop *loop;
guint sourceid;
FILE *file;
} gst_app_t;
static gst_app_t gst_app;
static Window child_window = 0;
static Window window = 0;
static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;
ptr = g_malloc(BUFF_SIZE);
g_assert(ptr);
size = fread(ptr, 1, BUFF_SIZE, app->file);
if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
ret = gst_app_src_push_buffer(app->src, buffer);
if(ret != GST_FLOW_OK){
DEBUG("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}
if(size != BUFF_SIZE){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
DEBUG ("start feeding\n");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
DEBUG ("stop feeding\n");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static void on_pad_added(GstElement *element, GstPad *pad)
{
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;
DEBUG("pad added\n");
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
g_assert(str);
name = (gchar*)gst_structure_get_name(str);
DEBUG("pad name %s\n", name);
if(g_strrstr(name, "video")){
ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
DEBUG("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message))
{
case GST_MESSAGE_ELEMENT: {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window);
}
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
DEBUG("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *err;
gchar *name;
gst_message_parse_warning(message, &err, &debug);
DEBUG("Warning %s\nDebug %s\n", err->message, debug);
name = GST_MESSAGE_SRC_NAME(message);
DEBUG("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;
case GST_MESSAGE_EOS:
DEBUG("End of stream\n");
g_main_loop_quit(app->loop);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
DEBUG("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
static gboolean terminate_playback (GstElement * loop)
{
DEBUG ("Terminating playback\n");
g_main_loop_quit ((GMainLoop *)loop);
return FALSE;
}
int gstreamer_init(int argc, char *argv[])
{
gst_app_t *app = &gst_app;
GstBus *bus;
GstStateChangeReturn state_ret;
app->file = fopen(argv[1], "r");
g_assert(app->file);
/* initialization */
gst_init((int)0, NULL);
app->loop = g_main_loop_new(NULL, FALSE);
/* create elements */
app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline");
app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc");
app->decoder = gst_element_factory_make("decodebin2", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->videosink = gst_element_factory_make("autovideosink", "myvideosink");
if (!app->videosink) {
DEBUG ("output could not be found - check your install\n");
}
g_assert(app->src);
g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->videosink);
bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);
g_signal_connect(app->decoder, "pad-added",
G_CALLBACK(on_pad_added), app->ffmpeg);
//gst_app_src_set_emit_signals(app->src, true);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src,
app->decoder, app->ffmpeg, app->videosink, NULL);
/* link everything together */
if (!gst_element_link((GstElement *)app->src, app->decoder)) {
DEBUG ("Failed to link one or more elements!\n");
return -1;
}
if(!gst_element_link(app->ffmpeg, app->videosink)){
DEBUG("failed to link ffmpeg and videosink");
return -1;
}
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
DEBUG("Failed to start up pipeline!\n");
return 1;
}
DEBUG("set state returned %d\n", state_ret);
//g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop);
g_main_loop_run(app->loop);
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL);
DEBUG("set state null returned %d\n", state_ret);
gst_object_unref(app->pipeline);
return 1;
}
/*
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink
*
* 1. dependency library install
* $ sudo apt-get install gstreamer0.10-plugins-bad
* $ sudo apt-get install gstreamer0.10-ffmpeg
*
* 2. compile
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10
*
* 3. how to run program
* $ ./hello <video_file_name>
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts
*/
int main(int argc, char *argv[])
{
Display *disp;
Window root;
long fgcolor, bgcolor;
GC gc;
XGCValues gc_val;
XEvent event;
char *msg = "Hello, World!";
int screen;
disp = XOpenDisplay(NULL);
if (disp == NULL) {
fprintf(stderr, "Cannot open display\n");
exit(1);
}
screen = DefaultScreen(disp);
root = RootWindow(disp, screen);
fgcolor = BlackPixel(disp, screen);
bgcolor = WhitePixel(disp, screen);
window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1,
fgcolor, bgcolor);
child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1,
fgcolor, bgcolor);
gc_val.foreground = fgcolor;
gc_val.background = bgcolor;
gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val);
XSelectInput(disp, child_window, ExposureMask | KeyPressMask);
g_warning("map xwindow");
//XMapWindow(disp, window);
XMapWindow(disp, window);
XMapWindow(disp, child_window);
XSync(disp, FALSE);
//XDrawLine (disp, window, gc, 0, 0, 1000, 800);
//XDrawLine (disp, child_window, gc, 0, 0, 800, 600);
gstreamer_init(argc, argv);
XDestroyWindow( disp, window );
XDestroyWindow( disp, child_window );
XCloseDisplay( disp );
return 0;
}
You'll want to have at least one other thread (on each end) to handle communication over a socket (like TCP, or UDP if on a local network). This typically has a blocking call to wait for packets. To send data, you can form a gstreamer tee and queue, and then an appsrc to buffer/send data to a socket. To receive, you can pull the data from the socket to a buffer. Keep in mind the OS's socket buffer is relatively small and will drop packets if you don't pull from it fast enough, or push to one too fast. Hence the buffers.
On a NEED_DATA signal, you pull from that buffer to the pipeline using pushBuffer(). And on an ENOUGH_DATA signal, you can just keep buffering or dispose of it, whatever your application needs to do.