If I close the sender in this example, the video displayed by the receiver freezes. Is there a way to display a static no signal image instead, for example an all blue image, and have the video return when the sender restarts?
Sender
gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
Receiver
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
EDIT
This code seems to come close, but for some reason if I add in the videotestsrc by uncommenting the commented out lines, the udpsrc no longer calls the timeout callback:
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* no_signal_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* input_selector;
GstElement* video_converter;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.no_signal_source = gst_element_factory_make("videotestsrc", "no_signal_source");
g_object_set(G_OBJECT(data.no_signal_source),
"pattern", 6,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.no_signal_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.input_selector ||
!data.video_converter ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
//data.no_signal_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.input_selector,
data.video_converter,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_decoder, "src");
GstPad* sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, sink_1);
/*
GstPad* src_2 = gst_element_get_static_pad(data.no_signal_source, "src");
GstPad* sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, sink_2);
*/
g_object_set(G_OBJECT(data.input_selector),
"active-pad", sink_1,
NULL);
if (gst_element_link_many(
data.input_selector,
data.video_converter,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT
This code seems fine until I uncomment the selection of the active pad in the callbacks. Do I need to do something before I change the active pad, like stop the pipeline?
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* video_source;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* input_selector;
GstPad* sink_1;
GstPad* sink_2;
GstElement* video_sink;
gulong signal_handler_id;
GMainLoop* main_loop;
};
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
GstBus* bus;
printf("have data\n");
/*
g_object_set(G_OBJECT(user_data->input_selector),
"active-pad", user_data->sink_2,
NULL);
*/
bus = gst_element_get_bus(user_data->pipeline);
user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
gst_object_unref(bus);
return GST_PAD_PROBE_REMOVE;
}
static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
const GstStructure* st = gst_message_get_structure(message);
GstPad* pad;
if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("no data\n");
/*
g_object_set(G_OBJECT(data->input_selector),
"active-pad", data->sink_1,
NULL);
*/
g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);
pad = gst_element_get_static_pad(data->udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
gst_object_unref(pad);
}
}
}
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
GstPad* pad;
gst_init(NULL, NULL);
data.video_source = gst_element_factory_make("videotestsrc", "video_source");
g_object_set(G_OBJECT(data.video_source),
"pattern", 6,
"is-live", true,
NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.input_selector = gst_element_factory_make("input-selector", "input_selector");
data.video_sink = gst_element_factory_make("autovideosink", "video_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.video_source ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.input_selector ||
!data.video_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.video_source,
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
data.input_selector,
data.video_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
data.video_converter,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
GstPad* src_1 = gst_element_get_static_pad(data.video_source, "src");
data.sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_1, data.sink_1);
gst_object_unref(src_1);
GstPad* src_2 = gst_element_get_static_pad(data.video_converter, "src");
data.sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
gst_pad_link(src_2, data.sink_2);
gst_object_unref(src_2);
if (gst_element_link_many(
data.input_selector,
data.video_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
pad = gst_element_get_static_pad(data.udp_source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
gst_object_unref(pad);
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
EDIT:
I seems fine all of a sudden. I don't understand. Is this suitable code? Can it be improved?
EDIT:
Setting a width and height on the videotestsrc in the sender seems to make it work. If I remove those, it breaks. Why?
Related
Hi I want to create a gstreamer pipeline with two branches having different FPS. The C++ code I wrote is given below
#include <iostream>
#include <string.h>
#include <gst/gst.h>
#include <gst/app/app.h>
using namespace std;
GstElement *src, *dbin, *conv, *tee, *mux, *parse, *pipeline;
GstElement *queue1,*videorate1, *conv1, *jenc1, *sink1;
GstElement *queue2,*videorate2, *conv2, *jenc2, *sink2;
GMainLoop *loop;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
//Cpipeline *obj_pipeline = (Cpipeline*)user_data;
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:
g_print ("\nGot EOS\n");
g_main_loop_quit (loop);
break;
default:
break;
}
return TRUE;
}
static void pad_added_handler (GstElement *src, GstPad *new_pad, gpointer x)
{
GstPad *sink_pad = gst_element_get_static_pad (parse, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print ("We are already linked. Ignoring.\n");
goto exit;
}
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "video/x-h264")) {
g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print ("Type is '%s' but link failed.\n", new_pad_type);
goto exit;
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
int main()
{
gst_init (NULL, NULL);
pipeline = gst_pipeline_new (NULL);
src = gst_element_factory_make ("filesrc", NULL);
mux = gst_element_factory_make("qtdemux",NULL);
parse = gst_element_factory_make("h264parse",NULL);
dbin = gst_element_factory_make ("nvv4l2decoder", NULL);
conv = gst_element_factory_make ("nvvideoconvert", NULL);
tee = gst_element_factory_make ("tee", NULL);
std::string url = "VD19_peoplewalking.mp4";
if (!pipeline || !src || !dbin || !conv || !tee || !mux || !parse) {
g_error ("Failed to create elements");
return -1;
}
g_object_set (src, "location", url.c_str(), NULL);
gst_bin_add_many (GST_BIN (pipeline), src, dbin, mux, parse, conv, tee, NULL);
if (!gst_element_link_many(src,mux,NULL) || !gst_element_link_many(parse,dbin,conv, tee,NULL) )//|| !gst_element_link_many (conv, tee, NULL))
{
g_error("Failed to link elements");
return -3;
}
g_signal_connect (mux, "pad-added", G_CALLBACK (pad_added_handler), NULL);
//First Branch creation
GstPadTemplate *templ;
templ =
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),
"src_%u");
GstPad *teepad1 = gst_element_request_pad (tee, templ, NULL, NULL);
queue1 = gst_element_factory_make ("queue", NULL);
videorate1 = gst_element_factory_make("videorate",NULL);
conv1 = gst_element_factory_make ("nvvideoconvert", NULL);
//jenc = gst_element_factory_make ("jpegenc",NULL);
sink1 = gst_element_factory_make ("autovideosink", NULL);
//sink = gst_element_factory_make ("appsink", NULL);
g_object_set (G_OBJECT(videorate1), "rate", 1.0, NULL);
gst_bin_add_many (GST_BIN (pipeline), queue1, videorate1, conv1, sink1, NULL);
if (!gst_element_link_many ( queue1, conv1, videorate1, sink1, NULL))
{
g_error ("Failed to link elements");
}
GstPad *sinkpad = gst_element_get_static_pad ( queue1, "sink");
gst_pad_link ( teepad1, sinkpad);
gst_object_unref (sinkpad);
//First Branch creation ends
//Second Branc
GstPadTemplate *templ2;
templ2 =
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),
"src_%u");
GstPad *teepad2 = gst_element_request_pad (tee, templ2, NULL, NULL);
queue2 = gst_element_factory_make ("queue", NULL);
videorate2 = gst_element_factory_make("videorate",NULL);
conv2 = gst_element_factory_make ("nvvideoconvert", NULL);
sink2 = gst_element_factory_make ("autovideosink", NULL);
g_object_set (G_OBJECT(videorate2), "rate", 0.5, NULL);
gst_bin_add_many (GST_BIN (pipeline), queue2, videorate2, conv2, sink2, NULL);
if (!gst_element_link_many ( queue2, conv2, videorate2, sink2, NULL))
{
g_error ("Failed to link elements");
}
GstPad *sinkpad2 = gst_element_get_static_pad ( queue2, "sink");
gst_pad_link ( teepad2, sinkpad2);
gst_object_unref (sinkpad2);
//Second brach creation ends
GstBus *bus;
loop = g_main_loop_new (NULL, FALSE);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (message_cb), NULL);
gst_object_unref (GST_OBJECT (bus));
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
}
Through command line I am able to run multiple branches with different fps please see the command below
gst-launch-1.0 filesrc location=VD19_peoplewalking.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! tee name=t ! queue ! videorate ! "video/x-raw(ANY),framerate=1/1" ! nvvideoconvert ! autovideosink t. ! videorate ! "video/x-raw(ANY),framerate=30/1" ! nvvideoconvert ! autovideosink
I am able to run C++ the code but the streams are not played as expected. Both streams get stuck in between while running the code.
Am I missing something?
I have an USB camera. I have working terminal commands to record or display fullHD video and to save one 4k image. I would like to handle it all via C++ app. If we will concentrate on the video-saving:
gst-launch-1.0 v4l2src device=/dev/video0 num-buffers=900! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location=/mnt/ssd/test.avi
will save 900frames (aka 30s) of video. I would like to have C++ code to record indefinetly (in future maybe in hour-long segments) until I (the app) tell it to end.
I came up with
struct {
GstElement *pipeline_sink, *source, *appsink;
GstElement *pipeline_src, *appsrc, *decoder, *mux, *sink, *encoder;
} usbCam::mGstData;
int usbCam::gstInit(){
GstCaps *caps;
GstStateChangeReturn ret;
// Initialize GStreamer
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
// Create the elements
mGstData.source = gst_element_factory_make ("v4l2src", "source");
g_object_set (mGstData.source, "device", "/dev/video0", NULL);
mGstData.pipeline_sink = gst_pipeline_new ("pipeline_sink");
caps = gst_caps_new_any();
gst_app_sink_set_caps(GST_APP_SINK(mGstData.appsink), caps);
gst_caps_unref (caps);
gst_app_sink_set_emit_signals(GST_APP_SINK(mGstData.appsink), true);
// Build the pipeline
gst_bin_add_many (GST_BIN (mGstData.pipeline_sink), mGstData.source, mGstData.appsink, NULL);
if (gst_element_link_many(mGstData.source, mGstData.appsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_sink);
return -1;
}
return 0;
}
int usbCam::videoStart(){
GstCaps *caps;
GstStateChangeReturn ret;
if (!mGstData.pipeline_sink || !mGstData.source) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
mGstData.appsrc = gst_element_factory_make ("appsrc", "appsrc");
mGstData.decoder = gst_element_factory_make ("imxvpudec", "transform_enc");
mGstData.mux = gst_element_factory_make ("avimux", "avimux");
mGstData.sink = gst_element_factory_make ("filesink", "sink");
g_object_set (mGstData.sink, "location", "/mnt/ssd/videoTest.avi", NULL);
mGstData.pipeline_src = gst_pipeline_new ("pipeline_src");
if (!mGstData.pipeline_src || !mGstData.appsrc || !mGstData.decoder || !mGstData.mux || !mGstData.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
NULL);
gst_app_src_set_caps(GST_APP_SRC(mGstData.appsrc), caps);
gst_caps_unref (caps);
gst_app_src_set_duration(GST_APP_SRC(mGstData.appsrc), GST_TIME_AS_MSECONDS(80));
gst_app_src_set_stream_type(GST_APP_SRC(mGstData.appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(mGstData.appsrc), -1, 0);
gst_bin_add_many (GST_BIN (mGstData.pipeline_src), mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL);
if (gst_element_link_many(mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
ret = gst_element_set_state (mGstData.pipeline_src, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
return 0;
}
int usbCam::videoEnd(){
{
gst_app_src_end_of_stream(GST_APP_SRC(mGstData.appsrc));
usleep(500000);
gst_element_set_state (mGstData.pipeline_src, GST_STATE_NULL);
gst_object_unref (mGstData.pipeline_src);
return 0;
}
Now, this code runs. No error in the output, one warning though:
(GLib-GObject-WARNING **: 17:51:34.132: g_object_set_is_valid_property: object class 'GstSplitMuxSink' has no property named 'h}\x9fe h\xe6a_no_\xc1')
.
What actually bothers me is the output file. It is created, but it is an empty file with 0b size. Can anyone point me in the direction of the proper fix?
Edit: Today I came up with two other attempts. The firs one is not that different from the one already posted here. The second gives me pipeline with wrong parameters (different FPS) and I am unable to correctly stop it so that the file have correct EOF.
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
std::string command = "v4l2src device=/dev/video0 ! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location = /mnt/ssd/testPipeline.avi";
/* Build the pipeline */
pipeline =
gst_parse_launch
(command.c_str(),
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GstMessageType(
GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
EDIT2:
OK now my code looks like this:
GstElement *pipeline;
GstElement *tee; //in the future I would like to save video and images AND stream or use thi pipeline data internally.
void gstFail(const gchar* message){
g_printerr(message);
gst_object_unref (pipeline);
return;
}
void videoStart(std::string path){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstCaps *caps;
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", mVideoDevice.toStdString().c_str(), NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
g_object_set (sink, "location", path.c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink, NULL);
if (gst_element_link_filtered(source, muxer, caps) != TRUE) {
gst_caps_unref (caps);
gstFail("Elements could not be linked or caps set.\n");
return;
}
gst_caps_unref (caps);
if (gst_element_link_many(muxer,tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gstFail("Unable to set the pipeline to the playing state.\n");
return;
}
return;
}
void videoEnd(void)
{
GstMessage *message = gst_message_new_eos(&pipeline->object);
gst_bus_post(pipeline->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PLAYING_TO_PAUSED);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PAUSED_TO_READY);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
}
void takeImage(std::string path){
GstElement *sink = gst_element_factory_make("multifilesink", "multifilesink");
g_object_set (sink, "location", path.c_str(), NULL);
gst_bin_add_many (GST_BIN (pipeline), sink, NULL);
if (gst_element_link_many(tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
return;
}
This saves the video ALMOST ok (VLC does not display correct lenght. But when I see the video file properties via Nautilus in Ubuntu the correct lenght is displayed and the video is playable). It does not save the pictures.
OK, so here's how I solved it: my initial pipeline is split with tee element into two sinks: the original sink that saves the video and appsink. In the callback functuion for the appsink I create new pipeline and push the frame any time I want to save the image. Basically:
...
int saveSampleFromAppsinkJpeg( GstSample *sample){
if (!shouldSaveImage) {
return -2;
}
if (capturing){
return -3;
}
std::thread([=]{
capturing = true;
GstStateChangeReturn ret;
GstElement *appsrc = gst_element_factory_make ("appsrc", "appsrc");
GstElement *sink = gst_element_factory_make ("multifilesink", "sink");
g_object_set (sink, "location", "some/path", NULL);
GstElement *pipeline_img = gst_pipeline_new ("pipeline_img");
if (!pipeline_img || !appsrc || !sink) {
g_printerr ("Not all elements could be created.\n");
capturing = false;
return -1;
}
gst_app_src_set_caps(GST_APP_SRC(appsrc), caps);
gst_app_src_set_duration(GST_APP_SRC(appsrc), GST_TIME_AS_MSECONDS(80)); // TODO 80
gst_app_src_set_stream_type(GST_APP_SRC(appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(appsrc), -1, 0);
gst_bin_add_many (GST_BIN (pipeline_img), appsrc, sink, NULL);
if (gst_element_link_many(appsrc, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
ret = gst_element_set_state (pipeline_img, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
//push the image in the pipeline
GstFlowReturn status = GstFlowReturn::GST_FLOW_OK;
status = gst_app_src_push_sample(GST_APP_SRC(appsrc), sample);
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("Sample for saving image not pushed.\n");
status = gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("EOS for saving image not pushed.\n");
//end the pipeline
usleep(500000); // Important
GstMessage *message = gst_message_new_eos(&pipeline_img->object);
gst_bus_post(pipeline_img->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_set_state (pipeline_img, GST_STATE_PAUSED);
gst_element_set_state (pipeline_img, GST_STATE_NULL);
gst_object_unref (pipeline_img);
shouldSaveImage = false;
capturing = false;
return 1;
}).detach();
return 1;
}
static GstFlowReturn new_sample_jpeg(GstElement * elt)
{
GstSample *sample;
GstBuffer *buffer;
GstMemory *memory;
GstFlowReturn ret = GST_FLOW_OK;
// get the sample from appsink
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
buffer = gst_sample_get_buffer (sample);
if (buffer != NULL) {
memory = gst_buffer_get_memory (buffer, 0);
if (memory != NULL) {
//now all data are image data. If image wanted->image save!
if (wantToSave) saveSampleFromAppsinkJpeg(sample);
}
...
}
}
void startVideo(){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink, *queue_rcr, *queue_app, *appsink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", "/dev/video1", NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
queue_rcr = gst_element_factory_make ("queue", "record_queue");
queue_app = gst_element_factory_make ("queue", "app_queue");
appsink = gst_element_factory_make("appsink", "appsink");
g_object_set (sink, "location", path.toStdString().c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink || !queue_rcr || !appsink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink,queue_rcr, appsink, queue_app, NULL);
if (gst_element_link_filtered(source, tee, caps) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_rcr, muxer, sink, NULL) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_app, appsink, NULL) != TRUE) {
//failhandling
}
gst_app_sink_set_emit_signals(GST_APP_SINK(appsink), true);
g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_jpeg));
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
//failhandling
}
// Start playing
recording = true;
return;
}
I am trying to mix internal audio and microphone audio using gstreamer audiomixer element, and then mux the single stream with video data, so far I can do it only when the soundcard is already active.
I am using Waspisrc , waspisrc loopback=true property.
What I mean is my code works when there is already some song is being played in the computer and then i start my code, it works.
What i want to acheive is , internal sound src can link with audiomixer element dynamically , and it just gives me error, the program crashes , what i did so far is put the soundcard source elemnt in another bin than the main pipeline, and add a data probe in the wasapisrc element, when there is sound from audio i try to link the source with queue and then audiomixer in the main pipeline.
Any help how can i acheive dynamically link and unlink src element into audiomixer?
my code is below:
#include <gst/gst.h>
//#include "pch.h"
#include <windows.h>
#include <stdio.h>
GMainLoop* mainLoop;
GstElement *mainPipeline;
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix);
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem);
typedef struct _elemStruct
{
GstElement *micSource, *micSourceQueue, *soundCardSrc, *soundCardSrcQueue, *micSrcRate, *micRateQueue, *soundCardRate, *soundCardRateQueue, *audioMixer, *audioMixerQueue;
GstElement* audioConverter, *audioConverterQueue, *audioEncoder, *audioEncoderQueue, *avMuxer, *gdiGrabber, *videoConverter, *x264encoder;
GstElement* muxerQueue, *fileSinker, *gdiGrabberQueue, *videoConverterQueue, *x264encoderQueue;
GstCaps *caps;
GstElement* message;
GstStateChangeReturn stateRet;
GstElement *micBin, *soundCardBin, *screenBin, *audioBin;
GstPad *micMixPad, *soundCardMixPad, *audioMuxPad, *videoMuxPad;
GstBus* mainBus;
GstStateChangeReturn ret;
GstMessage* msg;
guint bus_watch_id;
GstElement* soundCardTempSink;
}elemStruct;
BOOL WINAPI CtrlHandler(DWORD fdwCtrlType)
{
switch (fdwCtrlType)
{
// Handle the CTRL-C signal.
case CTRL_C_EVENT:
printf("Ctrl-C event\n\n");
Beep(750, 300);
return TRUE;
// CTRL-CLOSE: confirm that the user wants to exit.
case CTRL_CLOSE_EVENT:
Beep(600, 200);
printf("Ctrl-Close event\n\n");
return TRUE;
// Pass other signals to the next handler.
case CTRL_BREAK_EVENT:
Beep(900, 200);
printf("Ctrl-Break event\n\n");
return FALSE;
case CTRL_LOGOFF_EVENT:
Beep(1000, 200);
printf("Ctrl-Logoff event\n\n");
return FALSE;
case CTRL_SHUTDOWN_EVENT:
Beep(750, 500);
printf("Ctrl-Shutdown event\n\n");
return FALSE;
default:
return FALSE;
}
}
void addsoundsrc_toMainline(GstPadProbeInfo* info, GstElement* bin)
{
// we got data , add pipeline to audiomixer
// add bin to audiomixer
// get bin src pad
// call retrieve ghostsrc function
//retrieve_ghost_pad()
GstElement* queue = gst_bin_get_by_name(GST_BIN(bin), "sound_card_source_queue");
GstPad* mixpad = retrieve_ghost_pad(bin, queue);
//link_to_mixer(mixpad, )
}
GstPadProbeReturn soundCardProbe(GstPad* pad, GstPadProbeInfo* info, gpointer data)
{
//GstBuffer* buffer = gst_pad_probe_info_get_buffer(info);
GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);
elemStruct* mainElem = (elemStruct*)data;
g_print("received data in the soundcard probe ");
//GstElement* bin = mainElem->soundCardBin;
//bool add = gst_bin_add(GST_BIN(mainElem->audioBin), mainElem->soundCardBin);
//gst_element_sync_state_with_parent(mainElem->soundCardBin);
//GstElement* queue = gst_bin_get_by_name((GST_BIN(bin)), "sound_card_source_queue");
//GstPad* mixpad = retrieve_ghost_pad(bin, mainElem->soundCardSrcQueue);
//GstPad* mixPad = gst_element_get_static_pad(mainElem->soundCardSrcQueue, "sink");
//link_to_mixer(mixPad, mainElem->audioMixer);
//addsoundsrc_toMainline(info, bin);
return GST_PAD_PROBE_PASS;
}
void set_queue_property(GstElement* _queue)
{
g_object_set(G_OBJECT(_queue), "max-size-buffers", 1000, "max-size-time", 1000000000000, NULL);
}
GstPadLinkReturn link_to_mixer(GstPad* binPad, GstElement* mix)
{
GstPad* mixerPad;
gchar* binPadName, *mixerPadName;
mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//mixerPad = gst_element_get_request_pad(mix, "sink_%u");
binPadName = gst_pad_get_name(binPad);
mixerPadName = gst_pad_get_name(mixerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, mixerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, mixerPadName);
g_free(binPadName);
g_free(mixerPadName);
//gst_object_unref(binPad);
gst_object_unref(mixerPad);
//gst_element_release_request_pad(mix, mixerPad);
return retVal;
}
GstPadLinkReturn audio_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "audio_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn video_link_to_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
//mixerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
muxerPad = gst_element_get_request_pad(mix, "video_%u");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
//gst_object_unref(mixerPad);
gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPadLinkReturn link_to_mpeg_muxer(GstPad* binPad, GstElement* mix)
{
GstPad* muxerPad;
gchar* binPadName, *muxerPadName;
muxerPad = gst_element_get_compatible_pad(mix, binPad, NULL);
//muxerPad = gst_element_get_request_pad(mix, "sink_%d");
binPadName = gst_pad_get_name(binPad);
muxerPadName = gst_pad_get_name(muxerPad);
GstPadLinkReturn retVal = gst_pad_link(binPad, muxerPad); // check if succesfull;
g_print(" a new link is creatd with %s and %s pads\n", binPadName, muxerPadName);
g_free(binPadName);
g_free(muxerPadName);
//gst_object_unref(binPad);
gst_object_unref(muxerPad);
//gst_element_release_request_pad(mix, muxerPad);
return retVal;
}
GstPad* retrieve_ghost_pad(GstElement* bin, GstElement* elem)
{
GstPad* elemPad = gst_element_get_static_pad(elem, "src");
GstPad* ghost = gst_ghost_pad_new("ghostsrc", elemPad);
gst_element_add_pad(bin, ghost);
gst_object_unref(elemPad);
return ghost;
}
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
{
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_STATE_CHANGED:
{
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
g_print("Element %s changed state from %s to %s.\n",
GST_OBJECT_NAME(msg->src),
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
//if (new_state == GST_STATE_PAUSED)
//{
// gst_element_set_state(mainPipeline, GST_STATE_NULL);
//}
break;
}
break;
default:
break;
}
return TRUE;
}
int main(int argc, char** argv)
{
//gst - launch - 1.0.exe wasapisrc loopback = true\
// ! audiorate ! queue ! mix. wasapisrc low-latency=true \
// ! audiorate ! queue ! mix. audiomixer name=mix ! queue ! audioconvert \
// ! queue ! avenc_aac ! queue ! muxer. gdiscreencapsrc ! videoconvert \
// ! x264enc ! mpegtsmux name = muxer !queue ! filesink location=muxin.mp4 sync=false
elemStruct* mainStruct = new elemStruct();;
if (!gst_init_check(&argc, &argv, NULL))
{
g_printerr("couldn't initialize gstreamer\n");
return -1;
}
mainLoop = g_main_loop_new(NULL, FALSE);
if ((mainPipeline = gst_pipeline_new("main_pipeline")) == NULL)
{
}
mainStruct->micSource = gst_element_factory_make("wasapisrc", "mic_source");
mainStruct->soundCardSrc = gst_element_factory_make("wasapisrc", "sound_card_source");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "dx9_screen_capture_source");
mainStruct->micSourceQueue = gst_element_factory_make("queue", "mic_source_queue_elem");
mainStruct->soundCardSrcQueue = gst_element_factory_make("queue", "sound_card_source_queue");
mainStruct->micSrcRate = gst_element_factory_make("audiorate", "mic_audio_rate_elem");
mainStruct->soundCardRate = gst_element_factory_make("audiorate", "soundCard_audiorate_elem");
mainStruct->micRateQueue = gst_element_factory_make("queue", "mic_audiorate_queue");
mainStruct->soundCardRateQueue = gst_element_factory_make("queue", "soundCard_audiorate_queue");
mainStruct->audioMixer = gst_element_factory_make("audiomixer", "audio_mixer_elem");
mainStruct->audioMixerQueue = gst_element_factory_make("queue", "audio_mixer_queue_elem");
mainStruct->soundCardTempSink = gst_element_factory_make("autoaudiosink", "soundcard_temp_sink_elem");
mainStruct->audioEncoder = gst_element_factory_make("avenc_aac", "audio_encoder_elem");
mainStruct->audioEncoderQueue = gst_element_factory_make("queue", "audio_encoder_queue_elem");
mainStruct->audioConverter = gst_element_factory_make("audioconvert", "audio_convert_elem");
mainStruct->audioConverterQueue = gst_element_factory_make("queue", "audio_convert_queue_elem");
mainStruct->gdiGrabberQueue = gst_element_factory_make("queue", "gdi_grabber_queue_elem");
mainStruct->gdiGrabber = gst_element_factory_make("dx9screencapsrc", "gdi_grabber_elem");
mainStruct->videoConverterQueue = gst_element_factory_make("queue", "videoconvert_queue_elem");
mainStruct->x264encoderQueue = gst_element_factory_make("queue", "x264encoder_queue_elem");
mainStruct->videoConverter = gst_element_factory_make("videoconvert", "videoconvert_elem");
mainStruct->x264encoder = gst_element_factory_make("x264enc", "x264enc_elem");
mainStruct->avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem");
//if ((avMuxer = gst_element_factory_make("mpegtsmux", "mp4_muxer_elem")) == NULL)
mainStruct->fileSinker = gst_element_factory_make("filesink", "filesink_elem");
// set up all the sources
g_object_set(G_OBJECT(mainStruct->micSource), "do-timestamp", true, NULL);
g_object_set(G_OBJECT(mainStruct->soundCardSrc), "do-timestamp", true, "loopback", true, NULL);
g_object_set(G_OBJECT(mainStruct->gdiGrabber), "do-timestamp", true, "cursor", true, NULL);
g_object_set(G_OBJECT(mainStruct->x264encoder), "pass", 17, NULL);
g_object_set(G_OBJECT(mainStruct->fileSinker), "location", "sani_1486.mp4", "sync", false, NULL);
// set up all the queues
set_queue_property(mainStruct->micSourceQueue);
set_queue_property(mainStruct->soundCardSrcQueue);
set_queue_property(mainStruct->audioMixerQueue);
set_queue_property(mainStruct->audioEncoderQueue);
set_queue_property(mainStruct->gdiGrabberQueue);
set_queue_property(mainStruct->videoConverterQueue);
set_queue_property(mainStruct->x264encoderQueue);
// add the src elements to each src bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->micSource, mainStruct->micSourceQueue, NULL);
mainStruct->soundCardBin = gst_bin_new("sound_card_bin");
gst_bin_add_many(GST_BIN(mainStruct->soundCardBin), mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue,NULL);
GstPad* soundSourceprober = gst_element_get_static_pad(mainStruct->soundCardSrc, "src");
gst_pad_add_probe(soundSourceprober, GST_PAD_PROBE_TYPE_BUFFER, soundCardProbe, &mainStruct, NULL);
gst_element_set_state(mainStruct->soundCardBin, GST_STATE_PLAYING);
// link elements in each source bin
gst_element_link(mainStruct->micSource, mainStruct->micSourceQueue);
//gst_element_link_many(mainStruct->soundCardSrc, mainStruct->soundCardSrcQueue, NULL);
// put this two bin in audiobin, we will connect audiobin to screenBin later
gst_bin_add_many(GST_BIN(mainPipeline),mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
//GstStateChangeReturn ret = gst_element_set_state(mainStruct->soundCardSrc, GST_STATE_PLAYING);
//GstStateChangeReturn retu = gst_element_get_state(mainStruct->soundCardSrc);
mainStruct->micMixPad = gst_element_get_static_pad(mainStruct->micSourceQueue, "src");
link_to_mixer(mainStruct->micMixPad, mainStruct->audioMixer);
//mainStruct->soundCardMixPad = gst_element_get_static_pad(mainStruct->soundCardSrcQueue, "src");
//link_to_mixer(mainStruct->soundCardMixPad, mainStruct->audioMixer);
bool one_ = gst_element_link_many(mainStruct->audioMixer, mainStruct->audioMixerQueue, mainStruct->audioEncoder, mainStruct->audioEncoderQueue, NULL);
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverterQueue, mainStruct->videoConverter, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
// so add this element , with main bin
gst_element_link_many(mainStruct->gdiGrabber, mainStruct->gdiGrabberQueue, mainStruct->videoConverter, mainStruct->videoConverterQueue, mainStruct->x264encoder, mainStruct->x264encoderQueue, NULL);
//link_to_mixer(videoMuxPad, avMuxer);
mainStruct->videoMuxPad = gst_element_get_static_pad(mainStruct->x264encoderQueue, "src");
mainStruct->audioMuxPad = gst_element_get_static_pad(mainStruct->audioEncoderQueue, "src");
// add all the bin and muxer and filesink to main pipeline bin
gst_bin_add_many(GST_BIN(mainPipeline), mainStruct->avMuxer, mainStruct->fileSinker, NULL);
link_to_mpeg_muxer(mainStruct->videoMuxPad, mainStruct->avMuxer);
link_to_mpeg_muxer(mainStruct->audioMuxPad, mainStruct->avMuxer);
gst_element_link(mainStruct->avMuxer, mainStruct->fileSinker);
//gst_element_link(videoMuxPad, avMuxer);
/* Start playing the pipeline */
mainStruct->ret = gst_element_set_state(mainPipeline, GST_STATE_PLAYING);
// TODO , deal with ret
mainStruct->mainBus = gst_element_get_bus(mainPipeline);
mainStruct->bus_watch_id = gst_bus_add_watch(mainStruct->mainBus, bus_call, mainLoop);
gst_object_unref(mainStruct->mainBus);
// msg = gst_bus_timed_pop_filtered(mainBus, GST_CLOCK_TIME_NONE, GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
g_main_loop_run(mainLoop);
gst_element_set_state(mainPipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(mainPipeline));
g_source_remove(mainStruct->bus_watch_id);
g_main_loop_unref(mainLoop);
//g_main_loop_quit(mainLoop);
return 0;
}
This code prints "Timeout received from udpsrc" each second. The videoconvert element is commented out of the pipeline. If I uncomment it then the messages stop printing.
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`
#include <gst/gst.h>
#include <cstdlib>
#include <cstdio>
struct gstreamer_data {
GstElement* pipeline;
GstElement* udp_source;
GstElement* rtp_decoder;
GstElement* video_decoder;
GstElement* video_converter;
GstElement* fake_sink;
GMainLoop* main_loop;
};
static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(message, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
static void element_callback(GstBus* bus, GstMessage* message, gpointer data) {
const GstStructure* st = gst_message_get_structure(message);
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT) {
if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
printf("Timeout received from udpsrc\n");
}
}
}
int main() {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
gst_init(NULL, NULL);
data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
g_object_set(G_OBJECT(data.udp_source),
"port", 5000,
"caps", gst_caps_new_empty_simple("application/x-rtp"),
"timeout", 1000000000,
NULL);
data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");
data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");
data.video_converter = gst_element_factory_make("videoconvert", "video_converter");
data.fake_sink = gst_element_factory_make("fakesink", "fake_sink");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.udp_source ||
!data.rtp_decoder ||
!data.video_decoder ||
!data.video_converter ||
!data.fake_sink
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL);
if (gst_element_link_many(
data.udp_source,
data.rtp_decoder,
data.video_decoder,
//data.video_converter,
data.fake_sink,
NULL) != TRUE)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
return 0;
}
I have tried setting the debug level higher, but I'm not seeing anything to account for it. Is there something special about the videoconvert element?
Maybe you were sending data to that udpsrc after all?
I have confirmed that un-commenting the two lines that I do get the log messages as expected.
I was trying to develop an application for the pipeline:
gst-launch-1.0 rtspsrc location="rtsp://192.168.3.30:8554/rajvi" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink
Following is the code which I have implemented:
#include <gst/gst.h>
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
g_free(description);
GstElement *depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, depay, "sink") == 0)
{
g_print("cb_new_rtspsrc_pad : failed to link elements \n");
}
g_free(name);
}
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Getting error to link pipeline->audio->video bins
If you put the video and audio in the pipeline bin all together then you can do it. Figure out what you caps are for the video and audio and should be able to link them.
// ----------------------------------
// pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
gchar *name;
GstCaps * p_caps;
GstElement* nextElement;
GstElement* pipeline = (GstElement*)user_data;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
p_caps = gst_pad_get_pad_template_caps(pad);
if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
}
else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");
}
if (nextElement != NULL)
{
if (!gst_element_link_filtered(element, nextElement, p_caps))
//if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
{
std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
}
gst_object_unref(nextElement);
}
g_free(name);
gst_caps_unref(p_caps);
}
// ----------------------------------
// main
// ----------------------------------
int main(int argc, char *argv[])
{
GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init(&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make("queue", "audio-queue");
audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make("aacparse", "audio-parser");
audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make("audioconvert", "aconv");
audioResample = gst_element_factory_make("audioresample", "audio-resample");
audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
videoQueue = gst_element_factory_make("queue", "video-queue");
videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make("h264parse", "video-parser");
videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
gst_caps_unref(capsFilter);
if (!link_ok) {
g_warning("Failed to link element1 and element2!");
}
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
}