How to properly release the memory used by a pipeline? - gstreamer

I am developing a program that need to create and delete a pipeline several times, but memory grows every time I do this.
How to properly release the memory used by a pipeline?
I am using GStreamer 1.12.2 and Ubuntu 14.04.
For example this code:
int main(int argc, char* argv[]) {
g_print("gst_init...\n");
gst_init(NULL, NULL);
sleep(10);
for (int pp = 0; pp < 10; pp++) {
g_print("Iter #%d \n",pp);
GstElement *pipeline;
GError* err = NULL;
pipeline = gst_parse_launch(
" filesrc location=/media/datos/video12.mp4 "
" ! qtdemux ! avdec_h264 ! videoconvert ! fakesink name=appsink",
&err);
if (err) {
g_printerr(err->message);
g_error_free(err);
return -1;
}
gst_element_set_state(pipeline, GST_STATE_PLAYING);
sleep(5);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
sleep(5);
}
gst_deinit();
g_print("gst_deinit...\n");
sleep(10);
return 0;
}

Related

Same audio into gstreamer two results out

I have found that if I send the same OGG/Vorbis audio into a gstreamer 1.0 pipeline within a program twice I get different audio out (similar, but not identical). I have tried unreferencing the pipeline and rebuilding it between uses, but too no avail. Something seems to maintain some sort of state. The second version is the same as other second versions, with the same differences to the first.
I'm looking for reproducable results from a server processing audio, and this is getting in the way.
I'm sorry this is so long. Getting it to reproduce and rebuild to pipeline seemed like a relevant thing to leave in. I have reproduced this with several OGG/vorbis files (not empty though). Call the file "a.ogg" and then running the program will produce "job1.raw" and "job2.raw" which have been different each time.
Thanks for any help,
Richard.
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <glib.h>
#include <gst/gst.h>
typedef struct _Decoder2Data {
GstElement * appsrc;
GstElement * decodebin;
GstElement * audioconvert;
GstElement * audioresample;
GstElement * queue1;
GstElement * filesink;
GstElement * pipeline;
GstBus *bus;
const char* request_id;
const char* outdir;
GMainLoop * main_loop;
} Decoder2Data;
void start_request(const char* caps_str, Decoder2Data * data);
void process_data(Decoder2Data * obj, char * audio, int audioSize);
void end_request(Decoder2Data* data);
void finish_request(Decoder2Data * data);
int create_pipeline(int argc, char *argv[], Decoder2Data * data);
void closeGstreamer(Decoder2Data * data);
void *g_loop_thread(void *ptr);
void start_request(const char* caps_str, Decoder2Data * data) {
g_printerr("Test %s: Starting request\n", data->request_id);
g_object_set(data->appsrc, "caps", NULL, NULL);
if (data->outdir) {
char path[128];
sprintf(path, "%s/%s.raw", data->outdir, data->request_id);
FILE *fp = fopen(path, "w+");
if(fp != NULL) {
fclose(fp);
gst_element_set_state(data->pipeline, GST_STATE_PAUSED);
gst_element_set_state(data->filesink, GST_STATE_NULL);
g_object_set(data->filesink, "location", path, NULL);
gst_element_set_state(data->filesink, GST_STATE_PLAYING);
} else {
g_warning("Test %s: Unable to open raw audio file %s.\n", data->request_id, path);
}
}
gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
gst_element_set_state(data->filesink, GST_STATE_PLAYING);
g_printerr("Test Started request\n");
}
void process_data(Decoder2Data * obj, char * audio, int audioSize) {
GstFlowReturn ret;
GstBuffer * buf = gst_buffer_new_and_alloc(audioSize);
gst_buffer_fill(buf, 0, audio, audioSize);
g_signal_emit_by_name (obj->appsrc, "push-buffer", buf, &ret);
if(ret != GST_FLOW_OK)
g_warning("Test Pushing audio resulted in flow state %d\n", ret);
}
void end_request(Decoder2Data* data) {
GstFlowReturn ret;
g_signal_emit_by_name (data->appsrc, "end-of-stream", &ret);
}
GstElement * createElement(const char* name, const char* factoryName) {
GstElement * ret = gst_element_factory_make(name, factoryName);
if (!ret)
g_printerr ("Test failed to create element of type '%s'\n", name);
return ret;
}
// Handler for the pad-added signal
static void _connect_decoder(GstElement *src, GstPad *pad, Decoder2Data *data) {
g_debug("Test _connect_decoder\n");
GstPad * sink_pad = gst_element_get_static_pad (data->audioconvert, "sink");
GstPadLinkReturn ret = gst_pad_link(pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret))
g_printerr("Test Link failed with GstPadLinkError %d.\n", ret);
else
g_debug("Test Link succeeded.\n");
g_debug("Test Connected audio decoder\n");
}
void gstLink(GstElement * src, GstElement * target) {
if (!gst_element_link (src, target))
g_printerr ("Test ----------------- elements could not be linked.\n");
}
static void _on_eos(GstElement *src, GstPad *pad, Decoder2Data *data) {
g_debug("Test _on_eos\n");
finish_request(data);
}
static void _on_error(GstElement *src, GstMessage *pad, Decoder2Data *data) {
g_debug("Test _on_error\n");
GError *err = NULL;
gchar *dbg_info = NULL;
gst_message_parse_error (pad, &err, &dbg_info);
if (err) {
size_t len = strlen(err->message);
g_printerr("ERROR: %s", err->message);
}
g_error_free(err);
g_free(dbg_info);
finish_request(data);
}
void create_and_link(Decoder2Data * data) {
data->appsrc = createElement("appsrc", "appsrc");
data->decodebin = createElement("decodebin", "decodebin");
data->audioconvert = createElement("audioconvert", "audioconvert");
data->audioresample = createElement("audioresample", "audioresample");
data->queue1 = createElement("capsfilter", "capsfilter");
data->filesink = createElement("filesink", "filesink");
g_object_set (data->appsrc, "is-live", TRUE, NULL);
const gchar *caps_str = "audio/x-raw, channels=1, rate=16000, format=S16LE";
GstCaps * caps = gst_caps_from_string(caps_str);
g_object_set (data->queue1, "caps", caps, NULL);
g_object_set (data->filesink, "location", "/dev/null", NULL);
g_debug("Test Created GStreamer elements");
data->pipeline = gst_pipeline_new("pipeline");
if (!data->pipeline) {
g_printerr ("Test pipe line could not be created.\n");
}
// Add all elements to the pipeline
gst_bin_add_many (GST_BIN (data->pipeline), data->appsrc, data->decodebin, data->audioconvert, data->audioresample, data->queue1, data->filesink, NULL);
gstLink(data->appsrc, data->decodebin);
g_signal_connect(data->decodebin, "pad-added", G_CALLBACK (_connect_decoder), data);
gstLink(data->audioconvert, data->audioresample);
gstLink(data->audioresample, data->queue1);
gstLink(data->queue1, data->filesink);
g_debug("Linked GStreamer elements\n");
// Create bus
data->bus = gst_element_get_bus(data->pipeline);
gst_bus_add_signal_watch(data->bus);
gst_bus_enable_sync_message_emission(data->bus);
gst_pipeline_use_clock((GstPipeline*)data->pipeline, (GstClock*)NULL);
g_signal_connect(data->bus, "message::eos", G_CALLBACK(_on_eos), data);
g_signal_connect(data->bus, "message::error", G_CALLBACK(_on_error), data);
}
void finish_request(Decoder2Data * data) {
g_printerr("Test finish_request %s\n", data->request_id);
if (data->outdir) {
gst_element_set_state(data->filesink, GST_STATE_NULL);
g_object_set(data->filesink, "location", "/dev/null", NULL);
gst_element_set_state(data->filesink, GST_STATE_PLAYING);
}
gst_element_set_state(data->pipeline, GST_STATE_NULL);
// Destroy the old pipeline.
gst_element_set_state(data->appsrc, GST_STATE_NULL);
gst_element_set_state(data->decodebin, GST_STATE_NULL);
gst_element_set_state(data->audioconvert, GST_STATE_NULL);
gst_element_set_state(data->audioresample, GST_STATE_NULL);
gst_element_set_state(data->queue1, GST_STATE_NULL);
gst_element_set_state(data->filesink, GST_STATE_NULL);
gst_object_unref(data->pipeline);
// Build a new pipeline
create_and_link(data);
gst_element_set_state(data->pipeline, GST_STATE_READY);
g_printerr("Rebuilt pipeline.");
g_printerr("Finished request complete.\n");
}
int create_pipeline(int argc, char *argv[], Decoder2Data * data) {
g_printerr("Test create_pipeline\n");
gst_init (&argc, &argv);
data->request_id = "<undefined>";
data->outdir = "./";
create_and_link(data);
g_debug("Setting pipeline to READY\n");
gst_element_set_state(data->pipeline, GST_STATE_READY);
g_debug("Set pipeline to READY\n");
return 0;
}
void closeGstreamer(Decoder2Data * data) {
gst_object_unref (data->bus);
gst_element_set_state (data->pipeline, GST_STATE_NULL);
gst_object_unref (data->pipeline);
}
#include <pthread.h>
void *g_loop_thread(void *ptr) {
g_debug("Test main loop thread started\n");
Decoder2Data * data = (Decoder2Data*) ptr;
data->main_loop = g_main_loop_new(NULL, FALSE);
g_debug("Test main loop created, executing g_main_loop_run\n");
g_main_loop_run(data->main_loop); // This is blocking
g_debug("Test main loop thread ENDED\n");
return NULL;
}
int main(int argc, char *argv[]) {
Decoder2Data data;
memset (&data, 0, sizeof (data));
create_pipeline(argc, argv, &data);
pthread_t thread;
int ret = pthread_create(&thread, NULL, g_loop_thread, &data);
if (ret != 0) {
g_printerr("Test Thread not started");
return -1;
}
usleep(250000); // Wait a bit to make sure the thread started
g_printerr("Test starting test\n");
data.request_id = "job1";
start_request("", &data);
FILE * file = fopen("./a.ogg", "rb");
int size = 86*1024/8/4;
char buffer[size];
int n;
while ((n = fread(buffer, 1, size, file)) > 0) {
g_printerr("read %d of data\n", n);
process_data(&data, buffer, n);
}
fclose(file);
g_printerr("finished reading data\n");
end_request(&data);
usleep(250000);
finish_request(&data);
// Switch to second request and do it again.
data.request_id = "job2";
start_request("", &data);
file = fopen("./a.ogg", "rb");
while ((n = fread(buffer, 1, size, file)) > 0) {
g_printerr("read %d of data\n", n);
process_data(&data, buffer, n);
}
fclose(file);
g_printerr("finished reading data again\n");
end_request(&data);
usleep(250000);
finish_request(&data);
g_printerr("waiting for the gstreamer thread to end...\n");
g_main_loop_quit (data.main_loop);
pthread_join(thread, NULL);
g_printerr("Closing\n");
closeGstreamer(&data);
g_printerr("Exit OK\n");
return 0;
}

GStreamer memory leak issue

I have an object to generate videos with GStreamer. Each time I want to generate a new video y create one new object and add frames. After video is finished, I delete the object but GStreamer internal memory looks like is not released.
After several videos generated all RAM is allocated and linux kills the process.
Why is this happening? How can I solve this issue? Is there any other way to do it?
GVideo.h:
#ifndef GVIDEO_H
#define GVIDEO_H
#include <gst/gst.h>
#include <string>
class GVideo
{
public:
GVideo();
~GVideo();
void startVideo(std::string filename);
void endVideo();
void addFrame(GstSample* element);
bool isRecording(){return _isRecording;}
bool isDataNeed(){return _dataNeed;}
private:
void setDataNeed(bool dataNeed){_dataNeed = dataNeed;}
protected:
bool _isRecording;
bool _dataNeed;
int _frameRate;
int _duration;
GstClockTime _timestamp;
GstElement *_pipeline;
GstElement *_source;
};
#endif //GVIDEO_H
GVideo.cpp
#include "GVideo.h"
#include <gst/app/gstappsrc.h>
#include <iostream>
static gboolean bus_video_call(GstBus* bus, GstMessage* msg, void* user_data)
{
//std::cout << "BUS_CALL" << std::endl;
GVideo* video = (GVideo*)user_data;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
{
std::cout << "VIDEO GST_MESSAGE_EOS" << std::endl;
video->endVideo();
break;
}
case GST_MESSAGE_ERROR:
{
std::cout << "GST_MESSAGE_ERROR" << std::endl;
GError *err;
gst_message_parse_error(msg, &err, NULL);
g_error("%s", err->message);
g_error_free(err);
video->endVideo();
break;
}
default:
break;
}
return true;
}
GVideo::GVideo()
: _dataNeed(false), _isRecording(false)
{
_pipeline = NULL;
_source = NULL;
}
GVideo::~GVideo()
{
std::cout << "Deleting GstVideo." << std::endl;
if(_pipeline != NULL)
endVideo();
}
void GVideo::startVideo(std::string filename)
{
_isRecording = true;
_frameRate = 2;
_duration = 5;
_timestamp = 0;
_dataNeed = true;
_pipeline = gst_pipeline_new ("video_pipeline");
_source = gst_element_factory_make ("appsrc" , "video_source");
GstElement* _videorate = gst_element_factory_make ("videorate" , "video_vidrate");
GstElement* _capsfilter = gst_element_factory_make ("capsfilter" , "video_capsfilter");
GstElement* _videoconvert = gst_element_factory_make ("videoconvert", "video_conv");
GstElement* _encoder = gst_element_factory_make ("x264enc" , "video_enc");
GstElement* _muxer = gst_element_factory_make ("mp4mux" , "video_mux");
GstElement* _filesink = gst_element_factory_make ("filesink" , "video_filesink");
// g_object_set (G_OBJECT (_source), "num-buffers", _duration*_frameRate, NULL);
g_object_set (G_OBJECT (_source), "caps",
gst_caps_new_simple ( "video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 352,
"height", G_TYPE_INT, 288,
"framerate", GST_TYPE_FRACTION, _frameRate, 1,
NULL), NULL);
/* setup appsrc */
g_object_set (G_OBJECT (_source),
// "do-timestamp", TRUE,
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"format", GST_FORMAT_TIME, NULL);
g_object_set (G_OBJECT (_capsfilter), "caps",
gst_caps_new_simple ("video/x-raw",
// "format", G_TYPE_STRING, "I420",
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL), NULL);
gst_bin_add_many (GST_BIN (_pipeline), _source, _videorate, _capsfilter, _videoconvert, _encoder, _muxer, _filesink, NULL);
gst_element_link_many (_source, _videorate, _capsfilter, _videoconvert, _encoder, _muxer, _filesink, NULL);
g_object_set (G_OBJECT (_filesink), "location", filename.c_str(), NULL);
GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(_pipeline));
gst_bus_add_watch(bus, bus_video_call, this);
gst_object_unref(bus);
gst_element_set_state (_pipeline, GST_STATE_PLAYING);
}
void GVideo::addFrame(GstSample* element)
{
GstBuffer* buf = gst_sample_get_buffer((GstSample*)element);
GST_BUFFER_PTS (buf) = _timestamp;
GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (1, GST_SECOND, _frameRate);
_timestamp += GST_BUFFER_DURATION (buf);
gst_app_src_push_sample(GST_APP_SRC(_source), element);
if(_timestamp >= _duration*GST_SECOND)
{
_dataNeed = false;
gst_app_src_end_of_stream(GST_APP_SRC(_source));
}
}
void GVideo::endVideo()
{
std::cout << "gst_element_set_state." << std::endl;
gst_element_set_state (_pipeline, GST_STATE_NULL);
std::cout << "gst_object_unref." << std::endl;
gst_object_unref(_pipeline);
std::cout << "_pipeline= NULL." << std::endl;
_pipeline = NULL;
std::cout << "setDataNeed." << std::endl;
_isRecording = false;
}
I would guess that whatever is calling addFrame() might need to unref the sample.
https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsrc.html#gst-app-src-push-sample
In the docs there it indicates "transfer: none" on the sample parameter, which I believe means the caller needs to unref. For some reason the older method gst_app_src_push_buffer has transfer "full". Dunno why.
https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsrc.html#gst-app-src-push-buffer

How to access data from GMemoryOutputStream

I want to decode an audio file and store the PCM/int values into an array. For that I use gstreamer and the giostreamsink, which gives me a GMemoryOutputStream. So far so good, but how can I now access or loop through the GMemoryOutputStream?
What I did to get the gpointer:
gpointer out_data = g_memory_output_stream_get_data(G_MEMORY_OUTPUT_STREAM(stream));
but what can I do now with that gpointer? How can I access the stream data?
The full code that I have so far:
#include <string>
#include <stdio.h>
#include <gst/gst.h>
#include <gio/gio.h>
#include <boost/thread.hpp>
static void on_pad_added(GstElement *decodebin,
GstPad *pad,
gpointer data) {
GstElement *convert = (GstElement *) data;
GstCaps *caps;
GstStructure *str;
GstPad *audiopad;
audiopad = gst_element_get_static_pad(convert, "sink");
if (GST_PAD_IS_LINKED(audiopad)) {
g_object_unref(audiopad);
return;
}
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
printf("here %s\n",gst_structure_get_name(str));
if (!g_strrstr(gst_structure_get_name(str), "audio")) {
gst_caps_unref(caps);
gst_object_unref(audiopad);
return;
}
gst_caps_unref(caps);
gst_pad_link(pad, audiopad);
g_object_unref(audiopad);
}
static gboolean bus_call(GstBus *bus,
GstMessage *msg,
gpointer data) {
GMainLoop *loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free (debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char **argv) {
gst_init(&argc, &argv);
GstElement *pipeline, *source, *decode, *sink, *convert;
int rate = 44100;
int channels = 1;
int depth = 16;
bool output_signed = true;
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
GMemoryOutputStream *stream;
gpointer out_data;
// loop
loop = g_main_loop_new(NULL, false);
// pipeline
pipeline = gst_pipeline_new("test_pipeline");
// sink
stream = G_MEMORY_OUTPUT_STREAM(g_memory_output_stream_new(NULL, 0, (GReallocFunc)g_realloc, (GDestroyNotify)g_free));
sink = gst_element_factory_make ("giostreamsink", "sink");
g_object_set(G_OBJECT(sink), "stream", stream, NULL);
// source
source = gst_element_factory_make("filesrc", "source");
g_object_set(G_OBJECT(source), "location", "/home/sam/Desktop/audio/audio.wav", NULL);
// convert
convert = gst_element_factory_make("audioconvert", "convert");
// decode
decode = gst_element_factory_make("decodebin", "decoder");
// link decode to convert
g_signal_connect(decode, "pad-added", G_CALLBACK(on_pad_added), convert);
// bus
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
// add elements into pipeline
gst_bin_add_many(GST_BIN(pipeline), source, decode, convert, sink, NULL);
// link source to decode
gst_element_link(source, decode);
// caps
GstCaps *caps;
caps = gst_caps_new_simple("audio/x-raw-int",
"rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, channels,
"width", G_TYPE_INT, depth,
"depth", G_TYPE_INT, depth,
"signed", G_TYPE_BOOLEAN, output_signed,
NULL);
// link convert to sink
gst_element_link_filtered(convert, sink, caps);
gst_caps_unref(caps);
// start playing
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
// iterate
g_print("Running...\n");
g_main_loop_run(loop);
// out of the main loop, clean up nicely
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref(loop);
// get data
g_print("get data\n");
out_data = g_memory_output_stream_get_data(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long size = g_memory_output_stream_get_size(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long sizeData = g_memory_output_stream_get_data_size(G_MEMORY_OUTPUT_STREAM(stream));
std::cout << "stream size: " << size << std::endl;
std::cout << "stream data size: " << sizeData << std::endl;
for (int i = 0; i < 5; ++i) {
// std::cout << out_data[i] << std::endl; // not working
}
return 0;
}
I solved the problem, I had to cast the gpointer to gint16*:
std::vector<int16_t> data;
for (unsigned long i = 0; i < sizeData/2; ++i) {
data.push_back(((gint16*)out_data)[i]);
}
The casting has to be changed, depending on the depth you use e.g. depth 8 and unsigned guint8* and change also the for loop limit.
The full code for those interested:
#include <string>
#include <stdio.h>
#include <gst/gst.h>
#include <gio/gio.h>
#include <boost/thread.hpp>
static void on_pad_added(GstElement *decodebin,
GstPad *pad,
gpointer data) {
GstElement *convert = (GstElement *) data;
GstCaps *caps;
GstStructure *str;
GstPad *audiopad;
audiopad = gst_element_get_static_pad(convert, "sink");
if (GST_PAD_IS_LINKED(audiopad)) {
g_object_unref(audiopad);
return;
}
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
printf("here %s\n",gst_structure_get_name(str));
if (!g_strrstr(gst_structure_get_name(str), "audio")) {
gst_caps_unref(caps);
gst_object_unref(audiopad);
return;
}
gst_caps_unref(caps);
gst_pad_link(pad, audiopad);
g_object_unref(audiopad);
}
static gboolean bus_call(GstBus *bus,
GstMessage *msg,
gpointer data) {
GMainLoop *loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free (debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char **argv) {
gst_init(&argc, &argv);
GstElement *pipeline, *source, *decode, *sink, *convert;
int rate = 44100;
int channels = 1;
int depth = 16;
bool output_signed = true;
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
GMemoryOutputStream *stream;
gpointer out_data;
// loop
loop = g_main_loop_new(NULL, false);
// pipeline
pipeline = gst_pipeline_new("test_pipeline");
// sink
stream = G_MEMORY_OUTPUT_STREAM(g_memory_output_stream_new(NULL, 0, (GReallocFunc)g_realloc, (GDestroyNotify)g_free));
sink = gst_element_factory_make ("giostreamsink", "sink");
g_object_set(G_OBJECT(sink), "stream", stream, NULL);
// source
source = gst_element_factory_make("filesrc", "source");
g_object_set(G_OBJECT(source), "location", "/home/sam/Desktop/audio/audio.wav", NULL);
// convert
convert = gst_element_factory_make("audioconvert", "convert");
// decode
decode = gst_element_factory_make("decodebin", "decoder");
// link decode to convert
g_signal_connect(decode, "pad-added", G_CALLBACK(on_pad_added), convert);
// bus
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
// add elements into pipeline
gst_bin_add_many(GST_BIN(pipeline), source, decode, convert, sink, NULL);
// link source to decode
gst_element_link(source, decode);
// caps
GstCaps *caps;
caps = gst_caps_new_simple("audio/x-raw-int",
"rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, channels,
"width", G_TYPE_INT, depth,
"depth", G_TYPE_INT, depth,
"signed", G_TYPE_BOOLEAN, output_signed,
NULL);
// link convert to sink
gst_element_link_filtered(convert, sink, caps);
gst_caps_unref(caps);
// start playing
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
// iterate
g_print("Running...\n");
g_main_loop_run(loop);
// out of the main loop, clean up nicely
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref(loop);
// get data
g_print("get data\n");
out_data = g_memory_output_stream_get_data(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long size = g_memory_output_stream_get_size(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long sizeData = g_memory_output_stream_get_data_size(G_MEMORY_OUTPUT_STREAM(stream));
std::cout << "stream size: " << size << std::endl;
std::cout << "stream data size: " << sizeData << std::endl;
// access data and store in vector
std::vector<int16_t> data;
for (unsigned long i = 0; i < sizeData/2; ++i) {
data.push_back(((gint16*)out_data)[i]);
}
return 0;
}

Streaming video from PtGrey Camera using GStreamer

I am having one heck of a time reaching my end goal with this one and really hope someone can help me out. I am very new to gstreamer and have been working on this issue for several days. The end goal of everything is to have a piece of software that will take video from my Point Grey Blackfly USB 3 camera and compress it then send it out over the UDPSink through gstreamer. As of now I am attempting to simply show that this is possible by getting the video from the camera to display via gstreamer. In other words, I am trying to do something like this
Appsrc(camera)->ffmpegcolorspace->xvimagesink
just to get some sort of result...
My setup is as follows:
Nvidia Jetson TK1,
Ubunutu 14.04,
Gstreamer0.10,
OpenCV(installed but not currently used, have been considering throwing that in too)
Below is the code I currently have, it does not work but it will compile and run.
#include <stdio.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/video/video.h>
#include "FlyCapture2.h"
#include <iostream>
#include <sstream>
using namespace FlyCapture2;
using namespace std;
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *encoder;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *xvimagesink;
GMainLoop *loop;
guint sourceid;
FILE *file;
}gst_app_t;
static gst_app_t gst_app;
Camera camera;
#define BUFF_SIZE (1024)
void getImagePtr(guint8 * &ptr, gint &size);
static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;
getImagePtr(ptr, size);
//cout << size << endl;
// size = fread(ptr, 1, BUFF_SIZE, app->file);
if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
g_debug("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
GstCaps *caps = NULL;
std::stringstream video_caps_text;
video_caps_text << "video/x-raw-rgb,bpp=(int)24,depth=(int)24,endianness=(int)4321,red_mask=(int)16711680,green_mask=(int)65280,blue_mask=(int)255,width=(int)1288,height=(int)964,framerate=(fraction)0/1";
caps = gst_caps_from_string( video_caps_text.str().c_str() );
g_object_set( G_OBJECT(app->src), "caps", caps, NULL);
buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
{
GstCaps *caps_source = NULL;
std::stringstream video_caps_text;
video_caps_text << "video/x-raw-rgb,bpp=(int)24,depth=(int)24,endianness=(int)4321,red_mask=(int)16711680,green_mask=(int)65280,blue_mask=(int)255,width=(int)1288,height=(int)964,framerate=(fraction)0/1";
caps_source = gst_caps_from_string( video_caps_text.str().c_str() );
cout<<video_caps_text.str()<<endl;
if( !GST_IS_CAPS( caps_source) ){
cout<<"ERROR MAKING CAPS"<<endl;
exit(1);
}
gst_app_src_set_caps( GST_APP_SRC( app->src ), caps_source);
gst_buffer_set_caps( buffer, caps_source);
gst_caps_unref( caps_source );
}
ret = gst_app_src_push_buffer(app->src, buffer);
if(ret != GST_FLOW_OK){
g_debug("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}
else if(ret == GST_FLOW_OK){
//cout<<"FLOW OK"<<endl;
}
if(!(size > BUFF_SIZE)){
cout<<"ISSUE FOUND"<<endl;
ret = gst_app_src_end_of_stream(app->src);
g_debug("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
GST_DEBUG ("start feeding");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
GST_DEBUG ("stop feeding");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static void on_pad_added(GstElement *element, GstPad *pad)
{
cout<<"PAD ADDED"<<endl;
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;
g_debug("pad added");
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
cout<<"CAPS: "<<str<<endl;
g_assert(str);
name = (gchar*)gst_structure_get_name(str);
cout<<"NAME IS: "<<name<<endl;
g_debug("pad name %s", name);
if(g_strrstr(name, "video")){
ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
g_debug("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
g_print("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_WARNING:{
gchar *debug;
GError *err;
const gchar *name;
gst_message_parse_warning(message, &err, &debug);
g_print("Warning %s\nDebug %s\n", err->message, debug);
name = GST_MESSAGE_SRC_NAME(message);
g_print("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(app->loop);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
g_print("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
void PrintError( Error error )
{
error.PrintErrorTrace();
}
void connectCamera(){
cout<<"STARTING CONNECTION FUNCTION"<<endl;
Error error;
BusManager busMgr;
unsigned int numCameras;
PGRGuid guid;
error = busMgr.GetNumOfCameras(&numCameras);
if (error != PGRERROR_OK)
{
PrintError (error);
}
cout << "Number of cameras detected: " << numCameras << endl;
for (unsigned int i=0; i < numCameras; i++)
{
error = busMgr.GetCameraFromIndex(i, &guid);
if (error != PGRERROR_OK)
{
PrintError( error );
}
}
// Connect the camera
error = camera.Connect( &guid );
if ( error != PGRERROR_OK )
{
std::cout << "Failed to connect to camera" << std::endl;
return;
}
else
std::cout << "CONNECTED!" << std::endl;
}
void getImagePtr( guint8 * &ptr, gint &size){
// Get the image
Image rawImage;
Error error = camera.RetrieveBuffer( &rawImage );
if ( error != PGRERROR_OK )
{
std::cout << "capture error" << std::endl;
}
// convert to rgb
Image bgrImage;
rawImage.Convert( FlyCapture2::PIXEL_FORMAT_BGR, &bgrImage );
// cout << rawImage.GetDataSize() << endl;
ptr = (guint8*)g_malloc(bgrImage.GetDataSize());
g_assert(ptr);
memcpy( ptr,bgrImage.GetData(), bgrImage.GetDataSize() );
size = bgrImage.GetDataSize();
// ptr = bgrImage.GetData();
}
int main(int argc, char *argv[])
{
gst_app_t *app = &gst_app;
GstBus *bus;
GstStateChangeReturn state_ret;
if(argc != 2){
printf("File name not specified\n");
return 1;
}
connectCamera();
camera.StartCapture();
app->file = fopen(argv[1], "r");
g_assert(app->file);
gst_init(NULL, NULL);
app->pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
bus = gst_pipeline_get_bus(app->pipeline);
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);
app->src = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
//app->encoder = gst_element_factory_make("nv_omx_h264enc", "nvidEnc");
//app->decoder = gst_element_factory_make("decodebin", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->xvimagesink = gst_element_factory_make("xvimagesink", "myvsink");
g_assert(app->src);
//g_assert(app->encoder);
//g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->xvimagesink);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
//g_signal_connect(app->decoder, "pad-added",
// G_CALLBACK(on_pad_added), app->decoder);
//gst_bin_add_many(GST_BIN(app->pipeline), (GstElement*)app->src, app->encoder,
//app->decoder, app->ffmpeg, app->xvimagesink, NULL);
gst_bin_add_many(GST_BIN(app->pipeline), (GstElement*)app->src, app->ffmpeg, app->xvimagesink, NULL);
//if(!gst_element_link((GstElement*)app->src, app->encoder)){
//g_warning("failed to link src anbd decoder");
//}
//if(!gst_element_link(app->encoder, app->decoder)){
// g_warning("failed to link encoder and decoder");
//}
if(!gst_element_link(app->ffmpeg, app->xvimagesink)){
g_warning("failed to link ffmpeg and xvsink");
}
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
g_warning("set state returned %d\n", state_ret);
app->loop = g_main_loop_new(NULL, FALSE);
//GstCaps *appsrcCaps = NULL;
//appsrcCaps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR, 1288, 964, 0, 1, 4, 3);
//gst_app_src_set_caps(GST_APP_SRC(app->src), appsrcCaps);
g_main_loop_run(app->loop);
camera.StopCapture();
camera.Disconnect();
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_NULL);
g_warning("set state null returned %d\n", state_ret);
return 0;
}
I keep getting an Internal data flow error on every run and I am not sure from what. I think one of my issues may be the "caps" but like I said, I am very new to this.
Any help will be greatly appreciated. Let me know if there is anything else I can add to be more clear. Thank you so much!
For the first look (I haven't gone into details) seems you link ffmpeg with xvimagesink only, try to link your src element also:
gst_element_link(app->src, app->ffmpeg);
You can read about linking elements more here: http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/section-elements-link.html

OpenCL Limit on for loop size?

UPDATE: clEnqueueReadBuffer(command_queue, c_mem_obj, CL_TRUE, 0, LIST_SIZE * sizeof(double), C, 0, NULL, NULL); is returning -5, CL_OUT_OF_RESOURCES. This funciton/call should never return this!
I've started using OpenCL and have come across a problem. If I allow a for loop (in the kernel) to run 10000 times I get all of C to be 0 if I allow the loop to run for 8000 the results are all correct.
I have added waits around the kernel to ensure it completes, thinking I was pulling the data out before completion and have tried both Clwaitforevent and CLFinish. No errors are signalled by any of the calls. I when I used ints the for loop would work at a size of 4000000. Float and doubles have the same problem however floats work at 10000, but not at 20000, when I used the floats I removed #pragma OPENCL EXTENSION cl_khr_fp64 : enable to check that wasn't the problem.
Is this some weird memory thing, I'm I using OpenCL wrong? I realise that in most kernels I woun't be implementing for loops like this, but this seems like an issue. I have also removed __private to see if that was the problem, no change. So is there a limit on the size of for loops in OpenCL kernels? Is is hardware specific? Or is this a bug?
The kernel is a simple kernel, which adds 2 arrays (A+B) together and outputs another (C). In order to get a feel for performance I put a for loop around each calculation to slow it up/increase the number of operations per run through.
The code for the kernel is as follows:
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
__kernel void vector_add(__global double *A, __global double *B, __global double *C)
{
// Get the index of the current element
int i = get_global_id(0);
// Do the operation
for (__private unsigned int j = 0; j < 10000; j++)
{
C[i] = A[i] + B[i];
}
}
The code I'm running is as follows: (I ensure that the variables are consistent between both pieces of code when I switch between float and double)
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#ifdef __APPLE__
#include <OpenCL/opencl.h>
#else
#include <CL/cl.h>
#endif
#define MAX_SOURCE_SIZE (0x100000)
int main(void) {
// Create the two input vectors
int i;
const int LIST_SIZE = 4000000;
double *A = (double*)malloc(sizeof(double)*LIST_SIZE);
double *B = (double*)malloc(sizeof(double)*LIST_SIZE);
for(i = 0; i < LIST_SIZE; i++) {
A[i] = static_cast<double>(i);
B[i] = static_cast<double>(LIST_SIZE - i);
}
// Load the kernel source code into the array source_str
FILE *fp;
char *source_str;
size_t source_size;
fp = fopen("vector_add_kernel.cl", "r");
if (!fp) {
fprintf(stderr, "Failed to load kernel.\n");
exit(1);
}
source_str = (char*)malloc(MAX_SOURCE_SIZE);
source_size = fread( source_str, 1, MAX_SOURCE_SIZE, fp);
fclose( fp );
// Get platform and device information
cl_platform_id platform_id = NULL;
cl_device_id device_id = NULL;
cl_uint ret_num_devices;
cl_uint ret_num_platforms;
// clGetPlatformIDs(1, &platform_id, NULL);
//clGetDeviceIDs(platform_id, CL_DEVICE_TYPE_GPU, 1, &device_id, ret_num_devices);
cl_int ret = clGetPlatformIDs(1, &platform_id, NULL);
if (ret != CL_SUCCESS) {
printf("Error: Failed to get platforms! (%d) \n", ret);
return EXIT_FAILURE;
}
ret = clGetDeviceIDs(platform_id, CL_DEVICE_TYPE_GPU, 1, &device_id, &ret_num_devices);
if (ret != CL_SUCCESS) {
printf("Error: Failed to query platforms to get devices! (%d) \n", ret);
return EXIT_FAILURE;
}
/*
cl_int ret = clGetPlatformIDs(1, &platform_id, NULL);
if (ret != CL_SUCCESS) {
printf("Error: Failed to get platforms! (%d) \n", ret);
return EXIT_FAILURE;
}
ret = clGetDeviceIDs( platform_id, CL_DEVICE_TYPE_CPU, 1,
&device_id, &ret_num_devices);
if (ret != CL_SUCCESS) {
printf("Error: Failed to query platforms to get devices! (%d) \n", ret);
return EXIT_FAILURE;
}
*/
// Create an OpenCL context
cl_context context = clCreateContext( NULL, 1, &device_id, NULL, NULL, &ret);
// Create a command queue
cl_command_queue command_queue = clCreateCommandQueue(context, device_id, 0, &ret);
// Create memory buffers on the device for each vector
cl_mem a_mem_obj = clCreateBuffer(context, CL_MEM_READ_ONLY,
LIST_SIZE * sizeof(double), NULL, &ret);
cl_mem b_mem_obj = clCreateBuffer(context, CL_MEM_READ_ONLY,
LIST_SIZE * sizeof(double), NULL, &ret);
cl_mem c_mem_obj = clCreateBuffer(context, CL_MEM_WRITE_ONLY,
LIST_SIZE * sizeof(double), NULL, &ret);
if (ret != CL_SUCCESS) {
printf("Error: Buffer Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
// Copy the lists A and B to their respective memory buffers
ret = clEnqueueWriteBuffer(command_queue, a_mem_obj, CL_TRUE, 0,
LIST_SIZE * sizeof(double), A, 0, NULL, NULL);
ret = clEnqueueWriteBuffer(command_queue, b_mem_obj, CL_TRUE, 0,
LIST_SIZE * sizeof(double), B, 0, NULL, NULL);
std::cout << "Begin Compile" << "\n";
// Create a program from the kernel source
cl_program program = clCreateProgramWithSource(context, 1,
(const char **)&source_str, (const size_t *)&source_size, &ret);
if (ret != CL_SUCCESS) {
printf("Error: Program Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
// Build the program
ret = clBuildProgram(program, 1, &device_id, NULL, NULL, NULL);
if (ret != CL_SUCCESS) {
printf("Error: ProgramBuild Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
// Create the OpenCL kernel
cl_kernel kernel = clCreateKernel(program, "vector_add", &ret);
if (ret != CL_SUCCESS) {
printf("Error: Kernel Build Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
std::cout << "End Compile" << "\n";
std::cout << "Begin Data Move" << "\n";
// Set the arguments of the kernel
ret = clSetKernelArg(kernel, 0, sizeof(cl_mem), (void *)&a_mem_obj);
ret = clSetKernelArg(kernel, 1, sizeof(cl_mem), (void *)&b_mem_obj);
ret = clSetKernelArg(kernel, 2, sizeof(cl_mem), (void *)&c_mem_obj);
std::cout << "End Data Move" << "\n";
// Execute the OpenCL kernel on the list
size_t global_item_size = LIST_SIZE; // Process the entire lists
size_t local_item_size = 64; // Process in groups of 64
std::cout << "Begin Execute" << "\n";
cl_event event;
ret = clEnqueueNDRangeKernel(command_queue, kernel, 1, NULL,
&global_item_size, &local_item_size, 0, NULL, &event);
clFinish(command_queue);
//clWaitForEvents(1, &event);
std::cout << "End Execute" << "\n";
if (ret != CL_SUCCESS) {
printf("Error: Execute Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
// Read the memory buffer C on the device to the local variable C
std::cout << "Begin Data Move" << "\n";
double *C = (double*)malloc(sizeof(double)*LIST_SIZE);
ret = clEnqueueReadBuffer(command_queue, c_mem_obj, CL_TRUE, 0,
LIST_SIZE * sizeof(double), C, 0, NULL, NULL);
if (ret != CL_SUCCESS) {
printf("Error: Read Fail! (%d) \n", ret);
return EXIT_FAILURE;
}
clFinish(command_queue);
std::cout << "End Data Move" << "\n";
std::cout << "Done" << "\n";
std::cin.get();
// Display the result to the screen
for(i = 0; i < LIST_SIZE; i++)
printf("%f + %f = %f \n", A[i], B[i], C[i]);
// Clean up
ret = clFlush(command_queue);
ret = clFinish(command_queue);
ret = clReleaseKernel(kernel);
ret = clReleaseProgram(program);
ret = clReleaseMemObject(a_mem_obj);
ret = clReleaseMemObject(b_mem_obj);
ret = clReleaseMemObject(c_mem_obj);
ret = clReleaseCommandQueue(command_queue);
ret = clReleaseContext(context);
free(A);
free(B);
free(C);
std::cout << "Number of Devices: " << ret_num_devices << "\n";
std::cin.get();
return 0;
}
I've had a look on the internet and can't find people with similar problems, this is a concern as it could lead to code that works well till scaled up...
I'm running Ubuntu 14.04, and have a laptop graphics card for a RC520 which I run with bumblebee/optirun. If this bug isn't reproducible on other machines up to a loop size of 4000000 then I will log a bug with bumblebee/optirun.
Cheers
I found the issue, GPUs attached to displays/active VGAs/etc have a Watch Dog Timer that times out after ~5s. This is the case for cards that aren't teslas, which have this functionality to be turned off. Running on a secondary card is a work around. This sucks and needs to be fixed ASAP. It's definitely an NVidia issue, not sure about about AMD, either way, this is terrible.
Workarounds are registry changes in Windows and, in Linux/Ubuntu, altering the X conf and placing:
option "Interactive" "0"
In the gap with the graphics card, however X conf is now not generated in later versions and may have to be manually created. If anyone has a copy and paste console code fix to this that would be great and a better answer.