Gstreamer audiofirfilter - c++

I'm trying to use the element audiofirfilter in a gstreamer pipeline. For now without luck.
I searched the docs and the mailinglist for examples, but there is only one that, unfortunately, I can't compile due to some missing pieces (I'm on an embedded system).
My pipeline is
if (data.pipeline == NULL) {
data.pipeline = gst_pipeline_new ("pipeline");
data.fakesrc = gst_element_factory_make("fakesrc", NULL);
data.capsfilter = gst_element_factory_make("capsfilter", NULL);
data.audioconvert = gst_element_factory_make("audioconvert", NULL);
data.audiofirfilter = gst_element_factory_make("audiofirfilter", NULL);
data.alsasink = gst_element_factory_make("alsasink", NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.fakesrc, data.capsfilter, data.audioconvert, data.audiofirfilter, data.alsasink, NULL);
if (!gst_element_link_many (data.fakesrc, data.capsfilter, data.audioconvert, data.audiofirfilter, data.alsasink, NULL) ) {
qDebug() << "Error: not all elements could be linked!";
return;
}
GstCaps* caps = gst_caps_new_simple("audio/x-raw",
"format", G_TYPE_STRING, "S16LE",
"rate", G_TYPE_INT, SAMPLING_FREQUENCY,
"channels", G_TYPE_INT,2,
"layout", G_TYPE_STRING, "interleaved",
NULL);
g_object_set(G_OBJECT(data.capsfilter), "caps", caps, NULL);
g_object_set (G_OBJECT (data.fakesrc),
"sync", TRUE,
"signal-handoffs", TRUE,
"sizemax",BUFFER_SIZE,
"sizetype",2,NULL);
gdouble filter_kernel[16] = {1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
GValueArray *va;
va = g_value_array_new (1);
GValue v = { 0, };
g_value_init (&v, G_TYPE_DOUBLE);
for (int i = 0; i < 16; i++) {
g_value_set_double (&v, filter_kernel[i]);
g_value_array_append (va, &v);
g_value_reset (&v);
}
g_object_set (G_OBJECT (data.audiofirfilter), "kernel", va, NULL);
g_object_set (G_OBJECT (data.audiofirfilter), "latency", (gint64) (16 / 2), NULL);
g_value_array_free (va);
g_signal_connect (data.fakesrc, "handoff", G_CALLBACK (SourceHandoffCallback), /*&data*/ this);
GstBus *bus;
bus = gst_pipeline_get_bus (GST_PIPELINE(data.pipeline));
gst_object_unref (bus);
}
I only want to implement an unitary impulse response for now.
The pipeline won't play at all. It is a stereo pipeline.
Does anyone has a working example of an application of audiofirfilter that doesn't involve fft? the inverse-fft?
Thanks

I tried a simpler pipeline, and this works:
gst_init (NULL,NULL);
cFusionDrumsPlayerData data;
data.pipeline = gst_pipeline_new ("pipeline");
data.src = gst_element_factory_make("audiotestsrc", NULL);
data.audiofirfilter = gst_element_factory_make("audiofirfilter", NULL);
data.sink = gst_element_factory_make("autoaudiosink", NULL);
gst_bin_add_many (GST_BIN (data.pipeline), data.src, data.audiofirfilter, data.sink, NULL);
if (!gst_element_link_many (data.src, data.audiofirfilter, data.sink, NULL) ) {
return;
}
gdouble filter_kernel[16] = {0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
GValueArray *va;
va = g_value_array_new (1);
GValue v = { 0, };
g_value_init (&v, G_TYPE_DOUBLE);
for (int i = 0; i < 16; i++) {
g_value_set_double (&v, filter_kernel[i]);
g_value_array_append (va, &v);
g_value_reset (&v);
}
g_object_set (G_OBJECT (data.audiofirfilter), "kernel", va, NULL);
g_object_set (G_OBJECT (data.audiofirfilter), "latency", (gint64) (16 / 2), NULL);
g_value_array_free (va);
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
I think that the problem can be the source (fakesrc)

Related

Properly close the pipeline to save image of udpsrc (currently my image is empty)

I would like to save one image of my updsrc. When the user click on a button the code bellow is running. But when I look at my image, it is empty. I try a lot of "way" to stop the pipeline but I think that I did not closed the pipeline properly.
Does anyone have any idea ?
GstElement* snappipe;
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
m_strPathNameSave += CreateFileName("png");
snappipe = gst_parse_launch("udpsrc name=source num-buffers=1 !rtph264depay !h264parse !avdec_h264 !autovideoconvert ! pngenc ! filesink name=mysink", &error);
if (!snappipe) {
g_print("Parse error: %s\n", error->message);
exit(1);
}
filesink = gst_bin_get_by_name(GST_BIN(snappipe), "mysink");
g_object_set(filesink, "location", m_strPathNameSave.c_str(), NULL);
source = gst_bin_get_by_name(GST_BIN(snappipe), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
gst_element_set_state(snappipe, GST_STATE_PLAYING);
GstBus* bus = gst_element_get_bus(snappipe);
gst_object_unref(bus);
Sleep(10000);
gst_element_set_state(snappipe, GST_STATE_NULL);
gst_object_unref(snappipe);
I solve the problem like that :
std::string strPathImage = "\\image.png";
GstCaps* caps;
GstSample* from_sample, * to_sample;
GError* err = NULL;
GstBuffer* buf;
GstMapInfo map_info;
g_object_get((*ptrstats).sink, "last-sample", &from_sample, NULL);
if (from_sample == NULL) {
GST_ERROR("Error getting last sample form sink");
return;
}
caps = gst_caps_from_string("image/png");
to_sample = gst_video_convert_sample(from_sample, caps, GST_CLOCK_TIME_NONE, &err);
gst_caps_unref(caps);
gst_sample_unref(from_sample);
if (to_sample == NULL && err) {
GST_ERROR("Error converting frame: %s", err->message);
g_error_free(err);
return;
}
buf = gst_sample_get_buffer(to_sample);
if (gst_buffer_map(buf, &map_info, GST_MAP_READ)) {
if (!g_file_set_contents(strPathImage.c_str(), (const char*)map_info.data,
map_info.size, &err)) {
GST_WARNING("Could not save thumbnail: %s", err->message);
g_error_free(err);
}
}
gst_sample_unref(to_sample);
gst_buffer_unmap(buf, &map_info);

gstreamer pipeline from cam to file C code ends up with empty output file

I have an USB camera. I have working terminal commands to record or display fullHD video and to save one 4k image. I would like to handle it all via C++ app. If we will concentrate on the video-saving:
gst-launch-1.0 v4l2src device=/dev/video0 num-buffers=900! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location=/mnt/ssd/test.avi
will save 900frames (aka 30s) of video. I would like to have C++ code to record indefinetly (in future maybe in hour-long segments) until I (the app) tell it to end.
I came up with
struct {
GstElement *pipeline_sink, *source, *appsink;
GstElement *pipeline_src, *appsrc, *decoder, *mux, *sink, *encoder;
} usbCam::mGstData;
int usbCam::gstInit(){
GstCaps *caps;
GstStateChangeReturn ret;
// Initialize GStreamer
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
// Create the elements
mGstData.source = gst_element_factory_make ("v4l2src", "source");
g_object_set (mGstData.source, "device", "/dev/video0", NULL);
mGstData.pipeline_sink = gst_pipeline_new ("pipeline_sink");
caps = gst_caps_new_any();
gst_app_sink_set_caps(GST_APP_SINK(mGstData.appsink), caps);
gst_caps_unref (caps);
gst_app_sink_set_emit_signals(GST_APP_SINK(mGstData.appsink), true);
// Build the pipeline
gst_bin_add_many (GST_BIN (mGstData.pipeline_sink), mGstData.source, mGstData.appsink, NULL);
if (gst_element_link_many(mGstData.source, mGstData.appsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_sink);
return -1;
}
return 0;
}
int usbCam::videoStart(){
GstCaps *caps;
GstStateChangeReturn ret;
if (!mGstData.pipeline_sink || !mGstData.source) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
mGstData.appsrc = gst_element_factory_make ("appsrc", "appsrc");
mGstData.decoder = gst_element_factory_make ("imxvpudec", "transform_enc");
mGstData.mux = gst_element_factory_make ("avimux", "avimux");
mGstData.sink = gst_element_factory_make ("filesink", "sink");
g_object_set (mGstData.sink, "location", "/mnt/ssd/videoTest.avi", NULL);
mGstData.pipeline_src = gst_pipeline_new ("pipeline_src");
if (!mGstData.pipeline_src || !mGstData.appsrc || !mGstData.decoder || !mGstData.mux || !mGstData.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
NULL);
gst_app_src_set_caps(GST_APP_SRC(mGstData.appsrc), caps);
gst_caps_unref (caps);
gst_app_src_set_duration(GST_APP_SRC(mGstData.appsrc), GST_TIME_AS_MSECONDS(80));
gst_app_src_set_stream_type(GST_APP_SRC(mGstData.appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(mGstData.appsrc), -1, 0);
gst_bin_add_many (GST_BIN (mGstData.pipeline_src), mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL);
if (gst_element_link_many(mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
ret = gst_element_set_state (mGstData.pipeline_src, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
return 0;
}
int usbCam::videoEnd(){
{
gst_app_src_end_of_stream(GST_APP_SRC(mGstData.appsrc));
usleep(500000);
gst_element_set_state (mGstData.pipeline_src, GST_STATE_NULL);
gst_object_unref (mGstData.pipeline_src);
return 0;
}
Now, this code runs. No error in the output, one warning though:
(GLib-GObject-WARNING **: 17:51:34.132: g_object_set_is_valid_property: object class 'GstSplitMuxSink' has no property named 'h}\x9fe h\xe6a_no_\xc1')
.
What actually bothers me is the output file. It is created, but it is an empty file with 0b size. Can anyone point me in the direction of the proper fix?
Edit: Today I came up with two other attempts. The firs one is not that different from the one already posted here. The second gives me pipeline with wrong parameters (different FPS) and I am unable to correctly stop it so that the file have correct EOF.
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
std::string command = "v4l2src device=/dev/video0 ! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location = /mnt/ssd/testPipeline.avi";
/* Build the pipeline */
pipeline =
gst_parse_launch
(command.c_str(),
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GstMessageType(
GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
EDIT2:
OK now my code looks like this:
GstElement *pipeline;
GstElement *tee; //in the future I would like to save video and images AND stream or use thi pipeline data internally.
void gstFail(const gchar* message){
g_printerr(message);
gst_object_unref (pipeline);
return;
}
void videoStart(std::string path){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstCaps *caps;
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", mVideoDevice.toStdString().c_str(), NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
g_object_set (sink, "location", path.c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink, NULL);
if (gst_element_link_filtered(source, muxer, caps) != TRUE) {
gst_caps_unref (caps);
gstFail("Elements could not be linked or caps set.\n");
return;
}
gst_caps_unref (caps);
if (gst_element_link_many(muxer,tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gstFail("Unable to set the pipeline to the playing state.\n");
return;
}
return;
}
void videoEnd(void)
{
GstMessage *message = gst_message_new_eos(&pipeline->object);
gst_bus_post(pipeline->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PLAYING_TO_PAUSED);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PAUSED_TO_READY);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
}
void takeImage(std::string path){
GstElement *sink = gst_element_factory_make("multifilesink", "multifilesink");
g_object_set (sink, "location", path.c_str(), NULL);
gst_bin_add_many (GST_BIN (pipeline), sink, NULL);
if (gst_element_link_many(tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
return;
}
This saves the video ALMOST ok (VLC does not display correct lenght. But when I see the video file properties via Nautilus in Ubuntu the correct lenght is displayed and the video is playable). It does not save the pictures.
OK, so here's how I solved it: my initial pipeline is split with tee element into two sinks: the original sink that saves the video and appsink. In the callback functuion for the appsink I create new pipeline and push the frame any time I want to save the image. Basically:
...
int saveSampleFromAppsinkJpeg( GstSample *sample){
if (!shouldSaveImage) {
return -2;
}
if (capturing){
return -3;
}
std::thread([=]{
capturing = true;
GstStateChangeReturn ret;
GstElement *appsrc = gst_element_factory_make ("appsrc", "appsrc");
GstElement *sink = gst_element_factory_make ("multifilesink", "sink");
g_object_set (sink, "location", "some/path", NULL);
GstElement *pipeline_img = gst_pipeline_new ("pipeline_img");
if (!pipeline_img || !appsrc || !sink) {
g_printerr ("Not all elements could be created.\n");
capturing = false;
return -1;
}
gst_app_src_set_caps(GST_APP_SRC(appsrc), caps);
gst_app_src_set_duration(GST_APP_SRC(appsrc), GST_TIME_AS_MSECONDS(80)); // TODO 80
gst_app_src_set_stream_type(GST_APP_SRC(appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(appsrc), -1, 0);
gst_bin_add_many (GST_BIN (pipeline_img), appsrc, sink, NULL);
if (gst_element_link_many(appsrc, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
ret = gst_element_set_state (pipeline_img, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
//push the image in the pipeline
GstFlowReturn status = GstFlowReturn::GST_FLOW_OK;
status = gst_app_src_push_sample(GST_APP_SRC(appsrc), sample);
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("Sample for saving image not pushed.\n");
status = gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("EOS for saving image not pushed.\n");
//end the pipeline
usleep(500000); // Important
GstMessage *message = gst_message_new_eos(&pipeline_img->object);
gst_bus_post(pipeline_img->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_set_state (pipeline_img, GST_STATE_PAUSED);
gst_element_set_state (pipeline_img, GST_STATE_NULL);
gst_object_unref (pipeline_img);
shouldSaveImage = false;
capturing = false;
return 1;
}).detach();
return 1;
}
static GstFlowReturn new_sample_jpeg(GstElement * elt)
{
GstSample *sample;
GstBuffer *buffer;
GstMemory *memory;
GstFlowReturn ret = GST_FLOW_OK;
// get the sample from appsink
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
buffer = gst_sample_get_buffer (sample);
if (buffer != NULL) {
memory = gst_buffer_get_memory (buffer, 0);
if (memory != NULL) {
//now all data are image data. If image wanted->image save!
if (wantToSave) saveSampleFromAppsinkJpeg(sample);
}
...
}
}
void startVideo(){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink, *queue_rcr, *queue_app, *appsink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", "/dev/video1", NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
queue_rcr = gst_element_factory_make ("queue", "record_queue");
queue_app = gst_element_factory_make ("queue", "app_queue");
appsink = gst_element_factory_make("appsink", "appsink");
g_object_set (sink, "location", path.toStdString().c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink || !queue_rcr || !appsink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink,queue_rcr, appsink, queue_app, NULL);
if (gst_element_link_filtered(source, tee, caps) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_rcr, muxer, sink, NULL) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_app, appsink, NULL) != TRUE) {
//failhandling
}
gst_app_sink_set_emit_signals(GST_APP_SINK(appsink), true);
g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_jpeg));
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
//failhandling
}
// Start playing
recording = true;
return;
}

Is there any way to reduce the CPU consumption of GStreamer streaming?

I use GStreamer to implement RTMP streaming, and the CPU usage is as high as 400%. Is there any way to reduce the CPU usage?
The main function of the code is to continuously receive the mat image of OpenCV, put it into appsrc, and finally push the RTMP stream
The code is as follows:
void cb_need_data(GstElement *appsrc, guint unused_size, gpointer user_data)
{
printf("need data!\n");
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
GstMapInfo map;
size = 700 * 700 * 3;
buffer = gst_buffer_new_allocate(NULL, size, NULL);
gst_buffer_map(buffer, &map, GST_MAP_READ);
while (image.data == NULL)
{
std::this_thread::sleep_for(std::chrono::milliseconds(100));
};
memcpy(map.data, image.data, size);
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30);
timestamp += GST_BUFFER_DURATION(buffer);
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
gst_buffer_unmap(buffer, &map);
gst_buffer_unref(buffer);
if (ret != GST_FLOW_OK)
{
/* something wrong, stop pushing */
g_main_loop_quit(loop);
}
}
void *rtmpPush(void *p)
{
/* init GStreamer */
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
GstElement *pipeline, *appsrc, *conv, *x264enc, *h264parse, *flvmux, *videosink;
/* setup pipeline */
pipeline = gst_pipeline_new("pipeline");
appsrc = gst_element_factory_make("appsrc", "source");
conv = gst_element_factory_make("videoconvert", "conv");
h264parse = gst_element_factory_make("h264parse", "h264parse");
flvmux = gst_element_factory_make("flvmux", "flvmux");
x264enc = gst_element_factory_make("x264enc", "x264enc");
videosink = gst_element_factory_make("rtmpsink", "videosink");
g_object_set(G_OBJECT(videosink), "location", "rtmp://218.77.60.210:1935/rtmplive/354", NULL);
/* setup */
// g_object_set(G_OBJECT(appsrc),
// "stream-type", 0,
// "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(appsrc),
"is-live", 1,
"stream-type", 0,
//"format", GST_FORMAT_TIME, NULL,
"caps",
gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, 700,
"height", G_TYPE_INT, 700,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL),
NULL);
gst_bin_add_many(GST_BIN(pipeline), appsrc, conv, x264enc, flvmux, videosink, NULL);
gst_element_link_many(appsrc, conv, x264enc, flvmux, videosink, NULL);
g_signal_connect(appsrc, "need-data", G_CALLBACK(cb_need_data), NULL);
//g_signal_connect(appsrc, "enough-data", G_CALLBACK(cb_enough_data), NULL);
/* play */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
/* clean up */
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
while (1)
{
sleep(INT_MAX);
}
}
Is there any other plugin that can replace the existing plugin and reduce CPU usage?
Can anyone help me? Thank you very much

Gstreamer debug pipeline c++

I am trying to transcode a gstreamer bash script to c++ code, y but I am not able to save de debuggin log into a file
This is my code
int main(int argc, char *argv[])
{
YoctoLinuxSystem* hola;
//hola->YoctoLinuxSystem();
CustomData DataTest, DataDvi;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
GMainLoop *loop;
//vector<string> lines = YoctoLinuxSystem::getCmdOutputAsLines("./scripts/get_system_temps.sh");
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/******************************/
/****AJUSTES GSTREAMER TEST****/
/******************************/
DataTest.source = gst_element_factory_make ("videotestsrc", "source");
DataTest.capsfilter = gst_element_factory_make ("capsfilter","caps");
DataTest.sink = gst_element_factory_make ("imxipuvideosink", "sink");
DataTest.pipeline = gst_pipeline_new ("test-pipeline");
if (!DataTest.pipeline || !DataTest.source || !DataTest.capsfilter || !DataTest.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Modify the source's properties */
g_object_set (DataTest.source, "pattern", 0, NULL);
g_object_set(DataTest.capsfilter, "caps", gst_caps_new_simple("video/x-raw", "framerate", GST_TYPE_FRACTION, 25, 1,"width", G_TYPE_INT, 1920, "height", G_TYPE_INT, 1080, "format", G_TYPE_STRING, "RGB", NULL), NULL);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (DataTest.pipeline), DataTest.source, DataTest.capsfilter, DataTest.sink, NULL);
if (gst_element_link (DataTest.source, DataTest.capsfilter) != TRUE) {
g_printerr ("Elements source-caps could not be linked.\n");
gst_object_unref (DataTest.pipeline);
return -1;
}
if (gst_element_link (DataTest.capsfilter, DataTest.sink) != TRUE) {
g_printerr ("Elements caps-sink could not be linked.\n");
gst_object_unref (DataTest.pipeline);
return -1;
}
gst_element_link_many (DataTest.source, DataTest.capsfilter, DataTest.sink, NULL);
/******************************/
/****AJUSTES GSTREAMER DVI****/
/******************************/
DataDvi.source = gst_element_factory_make ("v4l2src", "source");
DataDvi.sink = gst_element_factory_make ("imxipuvideosink", "sink");
DataDvi.pipeline = gst_pipeline_new ("test-pipeline");
if (!DataDvi.pipeline || !DataDvi.source || !DataDvi.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Modify the source's properties */
g_object_set (DataDvi.source, "device", "/dev/video0", NULL);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (DataDvi.pipeline), DataDvi.source, DataDvi.sink, NULL);
if (gst_element_link (DataDvi.source, DataDvi.sink) != TRUE) {
g_printerr ("Elements caps-sink could not be linked.\n");
gst_object_unref (DataDvi.pipeline);
return -1;
}
gst_element_link_many (DataDvi.source, DataDvi.sink, NULL);
GST_DEBUG=2;
ifstream fileread;
// fileread.open("/var/log/data.log");
while(1)
{
ifstream fileread("/var/log/data.log");
if (!fileread.good())
{
/* Start playing */
//g_print ("Now playing: \n");
gst_element_set_state (DataDvi.pipeline, GST_STATE_PAUSED);
ret = gst_element_set_state (DataTest.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (DataTest.pipeline);
return -1;
}
}
else
{
gst_element_set_state (DataTest.pipeline, GST_STATE_PAUSED);
ret = gst_element_set_state (DataDvi.pipeline, GST_STATE_PLAYING);
/*HERE I NEED TO KNOW THE DEBUG OF THE PIPELINE*/
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (DataDvi.pipeline);
return -1;
}
}
}
g_print ("SALE\n");
return 0;
}
I am using gstreamer-1.0 library and I have seen that I need use GST_DEBUG_FILE but I do not know how call these functions from c++.
Thanks for the help!
There is a rich API behind the debugging and logging infrastructure. Take a look at this documentation.
For example, you might set some default values when the environment variables have not been setup.
if (!gst_debug_is_active()) {
gst_debug_set_active(TRUE);
GstDebugLevel dbglevel = gst_debug_get_default_threshold();
if (dbglevel < GST_LEVEL_ERROR) {
dbglevel = GST_LEVEL_ERROR;
gst_debug_set_default_threshold(dbglevel);
}
}
For that, the previous poster correctly advised you about the environment variable, GST_DEBUG_FILE. You could do putenv() before gst_init().
If you want to get fancy, you could can replace the default log function, gst_debug_log_default(). You do this by adding your own via, gst_debug_add_log_function(); then remove the default, gst_debug_remove_log_function(gst_debug_log_default). If you simply wish to change the file, then gst_debug_add_log_function (gst_debug_log_default, log_file, NULL), for some open and ready FILE* log_file.
GST_DEBUG_FILE is environment variable, so it has nothing to do with C++.
You could just use something like
export GST_DEBUG_FILE=~/gst.log
before run your application. Or add something like this to your bash startup script.

Videomixer fails on sources rather than Videotestsrc

I want to play two different local video files at the same time in a single
window. The code below without demux and decoder works fine.
static void play_video(){
GMainLoop *loop;
GstElement *pipeline,*videomixer;
GstElement *src,*sink,*filter,*csp,*videobox;
GstElement *src1,*filter1,*csp1,*videobox1;
GstElement *srcb,*filterb,*cspb,*videoboxb;
GstCaps *filtercaps,*filtercaps1,*filtercapsb;
GstPad *pad,*pad1;
const char pattern = "snow";
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new("my-pipeline");
src = gst_element_factory_make ("videotestsrc","src");
src1 = gst_element_factory_make ("videotestsrc","src1");
g_object_set (G_OBJECT (src), "pattern", 10, NULL);
filter = gst_element_factory_make("capsfilter","filter");
filter1 = gst_element_factory_make("capsfilter","filter1");
csp = gst_element_factory_make("ffmpegcolorspace","csp");
csp1 = gst_element_factory_make("ffmpegcolorspace","csp1");
/**/
videobox=gst_element_factory_make("videobox","videobox");
g_object_set(videobox,"top",0,"bottom",0,"left",0,"right",0,NULL);
videobox1=gst_element_factory_make("videobox","videobox1");
g_object_set(videobox1,"top",-20,"bottom",0,"left",0,"right",0,NULL);
videomixer=gst_element_factory_make("videomixer","videomixer");
/**/
sink = gst_element_factory_make("xvimagesink","sink");
if(sink == NULL)
sink = gst_element_factory_make("ximagesink","sink");
if(sink == NULL)
g_error("'ximagesink' yaratılamadı.");
gst_bin_add_many(GST_BIN(pipeline),src,filter,videobox,videomixer,csp,sink,
src1,filter1,videobox1,csp1,NULL);
gst_element_link_many(src,filter,csp,videobox,videomixer,NULL);
gst_element_link_many(src1,filter1,csp1,videobox1,videomixer,NULL);
/*
videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=1/1, width=350,
height=250 ! \
textoverlay font-desc="Sans 24" text="CAM2" valign=top halign=left
shaded-background=true ! \
videobox border-alpha=0 top=-200 left=-450 ! mix. \
*/
gst_element_link_many(videomixer,sink,NULL);
filtercaps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 1024,
"height", G_TYPE_INT, 768,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
filtercaps1 = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 500,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);
gst_caps_unref (filtercaps1);
/*pad = gst_element_get_pad (src, "src");
pad1 = gst_element_get_pad (src1, "src1");
//gst_pad_add_buffer_probe (pad, G_CALLBACK (cb_have_data), NULL);
//gst_pad_add_buffer_probe (pad1, G_CALLBACK (cb_have_data), NULL);
//gst_object_unref (pad);
//gst_object_unref (pad1);*/
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) ==
GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
}
g_print ("Running ...\n");
g_main_loop_run (loop);
/* exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
}
Problem is when I replace the videotestsrc with filesrc it fails and the
only error message I get is "Could not lookup object NULL on signal destroy
of object window". I'm not an gstreamer expert and my opinion is I am
failing at setting demux and decoder.
static void play_video5(){
GMainLoop *loop;
GstElement *pipeline,*videomixer;
GstElement *src,*sink,*filter,*csp,*videobox;
GstElement *src1,*filter1,*csp1,*videobox1;
GstElement *srcb,*filterb,*cspb,*videoboxb;
GstCaps *filtercaps,*filtercaps1,*filtercapsb;
GstPad *pad,*pad1;
GstElement *demux,*decoder;
const char pattern = "snow";
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new("my-pipeline");
//Source
src = gst_element_factory_make ("videotestsrc","src");
src1 = gst_element_factory_make ("filesrc","src1");
g_object_set (G_OBJECT (src1), "location", "file:///root/yu.mp4", NULL);
//Demux
demux = gst_element_factory_make ("mpegdemux", "demux");
//Decoder
decoder = gst_element_factory_make ("decodebin", "decoder");
// decoder = gst_element_factory_make ("ffdec_mpeg4","mpeg4-decoder");
//Filter
filter = gst_element_factory_make("capsfilter","filter");
filter1 = gst_element_factory_make("capsfilter","filter1");
//Colorspace
csp = gst_element_factory_make("ffmpegcolorspace","csp");
csp1 = gst_element_factory_make("ffmpegcolorspace","csp1");
//Videobox
videobox=gst_element_factory_make("videobox","videobox");
g_object_set(videobox,"top",0,"bottom",0,"left",0,"right",0,NULL);
videobox1=gst_element_factory_make("videobox","videobox1");
g_object_set(videobox1,"top",-20,"bottom",0,"left",0,"right",0,NULL);
//videomixer
videomixer=gst_element_factory_make("videomixer","videomixer");
//Sink
sink = gst_element_factory_make("xvimagesink","sink");
if(sink == NULL)
sink = gst_element_factory_make("ximagesink","sink");
if(sink == NULL)
g_error("'ximagesink' yaratılamadı.");
//Add to Bin
gst_bin_add_many(GST_BIN(pipeline),src,filter,videobox,videomixer,csp,
src1,decoder,filter1,videobox1,csp1,sink,NULL);
//Link Elements
gst_element_link(src,filter);
gst_element_link(filter,csp);
gst_element_link(csp,videobox);
gst_element_link(videobox, videomixer);
gst_element_link(src1,decoder);
gst_element_link(decoder,filter1);
// gst_element_link(decoder,csp1);
gst_element_link(filter1,csp1);
gst_element_link(csp1,videobox1);
gst_element_link(videobox1, videomixer);
gst_element_link(videomixer,sink);
//Cap definition
filtercaps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 1024,
"height", G_TYPE_INT, 768,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
filtercaps1 = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 25, 1,
/*"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,*/
NULL);
//Cap to Filter
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);
gst_caps_unref (filtercaps1);
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) ==
GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
}
g_print ("Running ...\n");
g_main_loop_run (loop);
/* exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
}
Any ideas or corrections are welcome.
Several issues:
filesrc does not take uris, but file-paths
/* wait until it's up and running or failed */ + the code below is not needed, better listen on the bus for the error and warning messages
"Could not lookup object NULL on signal destroy of object window" has nothing to do with gstreamer
the whole videobox business is not needed as the pads of videomixer have xpos, ypos and zorder properties