I am relatively new to asking question on stack overflow, but I will do my best to explain the problem thoroughly.
I am currently using an Axis IP Camera to obtain live video to a CARMA board. GStreamer then takes these frames using an RTSP client, performs an RTP depayload, and then decodes the h.264 images that are being sent from the camera. When I perform this process on my computer (currently equipped with an i7 processor) there is no lag time and the stream is output to the screen in real time, updating at a rate of 30 Hz. The problem arises when I switch over to the CARMA board I am working on. Instead of displaying in real time, the appsink is receives buffers at a rate much slower than normal. More specifically, instead of receiving buffers at a rate of 30 Hz, it only receives buffers at a rate of about 10 Hz on average when no other processing is occurring on the CARMA board. It should also be noted that no frames are dropped; the appsink that is receiving buffers is receiving all buffers, but not in real time. Any insight as to why this is occurring is greatly appreciate. I have checked to ensure that the timestamps are not an issue as well (i.e. the rate at which the appsink receives a buffer is does not change if I am or am not using a GST timestamp). The CARMA board is currently using ubuntu 11.04 and using the GCC to compile. Below are some code snippets and their respective explanations.
Some definitions
#define APPSINK_CAPS "video/x-raw-yuv,format=(fourcc)I420"
#define RTSP_URI "rtsp://(ipaddress)/axis-media/media.amp?videocodec=h264"
#define RTSP_LATENCY 0
#define RTSP_BUFFER_MODE 0
#define RTSP_RTP_BLOCKSIZE 65536
GStreamer pipeline set-up code:
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.rtspsrc = gst_element_factory_make("rtspsrc", NULL);
data.rtph264depay = gst_element_factory_make("rtph264depay", NULL);
data.nv_omx_h264dec = gst_element_factory_make("nv_omx_h264dec", NULL);
data.appsink = gst_element_factory_make("appsink", NULL);
if (!data.rtspsrc || !data.rtph264depay || !data.nv_omx_h264dec || !data.appsink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set element properties */
g_object_set( data.rtspsrc, "location", RTSP_URI,
"latency", RTSP_LATENCY,
"buffer-mode", RTSP_BUFFER_MODE,
"rtp-blocksize", RTSP_RTP_BLOCKSIZE,
NULL);
g_object_set( data.rtph264depay, "byte-stream", FALSE, NULL);
g_object_set( data.nv_omx_h264dec, "use-timestamps", TRUE, NULL);
/* Configure appsink. This plugin will allow us to access buffer data */
GstCaps *appsink_caps;
appsink_caps = gst_caps_from_string (APPSINK_CAPS);
g_object_set (data.appsink, "emit-signals", TRUE,
"caps", appsink_caps,
NULL);
g_signal_connect (data.appsink, "new-buffer", G_CALLBACK (appsink_new_buffer), &data);
gst_caps_unref (appsink_caps);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline) {
g_printerr ("Pipeline could not be created.");
}
/* Build the pipeline */
/* Note that we are NOT linking the source at this point. We will do it later. */
gst_bin_add_many (GST_BIN(data.pipeline),
data.rtspsrc,
data.rtph264depay,
data.nv_omx_h264dec,
data.appsink,
NULL);
if (gst_element_link (data.rtph264depay, data.nv_omx_h264dec) != TRUE) {
g_printerr ("rtph264depay and nv_omx_h264dec could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
if (gst_element_link (data.nv_omx_h264dec, data.appsink) != TRUE) {
g_printerr ("nv_omx_h264dec and appsink could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal (CALLBACK!) */
g_signal_connect (data.rtspsrc, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Add a probe to perform hashing on H.264 bytestream */
GstPad *rtph264depay_src_pad = gst_element_get_static_pad (data.rtph264depay, "src");
(gulong) gst_pad_add_buffer_probe (rtph264depay_src_pad, G_CALLBACK (hash_and_report), (gpointer)(&data));
gst_object_unref (rtph264depay_src_pad); //unreference the source pad
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-stream reached.\n");
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
//we should not reach here because we only asked for ERRORs and EOS and State Changes
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
Now the pad_added_handler:
/* This function will be called by the pad-added signal */
//Thread 1
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->rtph264depay, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "application/x-rtp")) {
g_print (" It has type '%s' which is not RTP. Ignoring.\n", new_pad_type);
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
And now the appsink that is called every time the appsink receives a buffer. This is the function that I believe (though am not certain) is not receiving buffers at real time, leading me to believe that there is some kind of processing that I am doing that is causing too much time to pass before another buffer can be processed:
// Called when appsink receives a buffer: Thread 1
void appsink_new_buffer (GstElement *sink, CustomData *data) {
GstBuffer *buffer;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-buffer", &buffer);
if (buffer) {
(((CustomData*)data)->appsink_buffer_count)++;
//push buffer onto queue, to be processed in different thread
if (GstBufferQueue->size() > GSTBUFFERQUEUE_SIZE) {
//error message
printf ("GstBufferQueue is full!\n");
//release buffer
gst_buffer_unref (buffer);
} else {
//push onto queue
GstBufferQueue->push(buffer);
//activate thread
connectionDataAvailable_GstBufferQueue.notify_all();
}
}
}
A link to the camera I am using:
http://www.axis.com/products/cam_p1357/index.htm
Hope this helps. I will continue to investigate this problem myself and provide updates as they come. Let me know if you need any other information and I look forward to reading your responses!
Thanks
So apparently the problem was not the program (i.e. the software design) but rather that the hardware components on the CARMA board were not able to keep up with the amount of processing that I was doing. In other words, the Tegra 3 processor on the CARMA was insufficient as a device. Possible solutions are to cut down the processing I am doing on the CARMA board or upgrade to a different board. I hope this helps people understand both that the limited processing that is available on smaller devices, but also to be aware that processors (specifically, in the category of Tegra 3 that implement the System on a Chip model) may not have currently have the computational power required to keep up with projects or systems that require large, real-time calculations.
To put it short, be careful what you buy! Do your best to ensure that what you are purchasing is right for the project! That being said, don't be scared to try new devices. Despite not being able to do what I wanted, I learned more than I could have ever expected. After all, computer science is just continuous learning :p
Related
I have the following example code which works correctly:
It's use filesink as sink
/* GStreamer
*
* appsink-snoop.c: example for modify data in video pipeline
* using appsink and appsrc.
*
* Based on the appsink-src.c example
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <gst/gst.h>
#include <string.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
typedef struct
{
GMainLoop *loop;
GstElement *source;
GstElement *sink;
} ProgramData;
/* user modify the data here */
static GstFlowReturn
modify_in_data (GstMapInfo * map)
{
int dataLength;
guint8 *rdata;
dataLength = map->size;
rdata = map->data;
g_print ("%s dataLen = %d\n", __func__, dataLength);
/* Modify half of frame to plane white */
for (int i=0; i <= dataLength/2; i++) {
rdata[i] = 0xff;
}
}
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static GstFlowReturn
on_new_sample_from_sink (GstElement * elt, ProgramData * data)
{
GstSample *sample;
GstBuffer *app_buffer, *buffer;
GstElement *source;
GstFlowReturn ret;
GstMapInfo map;
guint8 *rdata;
int dataLength;
int i;
g_print ("%s\n", __func__);
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
buffer = gst_sample_get_buffer (sample);
/* make a copy */
app_buffer = gst_buffer_copy_deep (buffer);
gst_buffer_map (app_buffer, &map, GST_MAP_WRITE);
/* Here You modify your buffer data */
modify_in_data (&map);
/* get source an push new buffer */
source = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
ret = gst_app_src_push_buffer (GST_APP_SRC (source), app_buffer);
gst_sample_unref (sample);
/* we don't need the appsink sample anymore */
gst_buffer_unmap (app_buffer, &map);
gst_object_unref (source);
return ret;
}
/* called when we get a GstMessage from the source pipeline when we get EOS, we
* notify the appsrc of it. */
static gboolean
on_source_message (GstBus * bus, GstMessage * message, ProgramData * data)
{
GstElement *source;
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("The source got dry\n");
source = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
gst_app_src_end_of_stream (GST_APP_SRC (source));
gst_object_unref (source);
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (data->loop);
break;
default:
break;
}
return TRUE;
}
/* called when we get a GstMessage from the sink pipeline when we get EOS, we
* exit the mainloop and this testapp. */
static gboolean
on_sink_message (GstBus * bus, GstMessage * message, ProgramData * data)
{
/* nil */
g_print ("%s\n", __func__);
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
g_print ("Finished playback\n");
g_main_loop_quit (data->loop);
break;
case GST_MESSAGE_ERROR:
g_print ("Received error\n");
g_main_loop_quit (data->loop);
break;
default:
break;
}
return TRUE;
}
int
main (int argc, char *argv[])
{
ProgramData *data = NULL;
gchar *string = NULL;
GstBus *bus = NULL;
GstElement *testsink = NULL;
GstElement *testsource = NULL;
gst_init (&argc, &argv);
data = g_new0 (ProgramData, 1);
data->loop = g_main_loop_new (NULL, FALSE);
/* setting up source pipeline, we read from a file and convert to our desired
* caps. */
string =
g_strdup_printf
("videotestsrc num-buffers=5 ! video/x-raw, width=640, height=480, format=RGB ! appsink name=testsink");
data->source = gst_parse_launch (string, NULL);
g_free (string);
if (data->source == NULL) {
g_print ("Bad source\n");
g_main_loop_unref (data->loop);
g_free (data);
return -1;
}
g_print ("Capture bin launched\n");
/* to be notified of messages from this pipeline, mostly EOS */
bus = gst_element_get_bus (data->source);
gst_bus_add_watch (bus, (GstBusFunc) on_source_message, data);
gst_object_unref (bus);
/* we use appsink in push mode, it sends us a signal when data is available
* and we pull out the data in the signal callback. */
testsink = gst_bin_get_by_name (GST_BIN (data->source), "testsink");
g_object_set (G_OBJECT (testsink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (testsink, "new-sample",
G_CALLBACK (on_new_sample_from_sink), data);
gst_object_unref (testsink);
/* setting up sink pipeline, we push video data into this pipeline that will
* then copy in file using the default filesink. We have no blocking
* behaviour on the src which means that we will push the entire file into
* memory. */
string =
g_strdup_printf ("appsrc name=testsource ! filesink location=tmp.yuv");
data->sink = gst_parse_launch (string, NULL);
g_free (string);
if (data->sink == NULL) {
g_print ("Bad sink\n");
gst_object_unref (data->source);
g_main_loop_unref (data->loop);
g_free (data);
return -1;
}
g_print ("Play bin launched\n");
testsource = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
/* configure for time-based format */
g_object_set (testsource, "format", GST_FORMAT_TIME, NULL);
/* uncomment the next line to block when appsrc has buffered enough */
/* g_object_set (testsource, "block", TRUE, NULL); */
gst_object_unref (testsource);
bus = gst_element_get_bus (data->sink);
gst_bus_add_watch (bus, (GstBusFunc) on_sink_message, data);
gst_object_unref (bus);
g_print ("Going to set state to play\n");
/* launching things */
gst_element_set_state (data->sink, GST_STATE_PLAYING);
gst_element_set_state (data->source, GST_STATE_PLAYING);
/* let's run !, this loop will quit when the sink pipeline goes EOS or when an
* error occurs in the source or sink pipelines. */
g_print ("Let's run!\n");
g_main_loop_run (data->loop);
g_print ("Going out\n");
gst_element_set_state (data->source, GST_STATE_NULL);
gst_element_set_state (data->sink, GST_STATE_NULL);
gst_object_unref (data->source);
gst_object_unref (data->sink);
g_main_loop_unref (data->loop);
g_free (data);
return 0;
}
But when I replace "filesink" with "ximagesink" or "autovideosink" I get this error
on_sink_message
Received error on_sink_message
Error: Internal error: can't allocate images
Going out
segfault
Im trying to get some sample video (videotestsrc or some rtsp video from internet) , modify it and the show it in a window
Any ideas?
I am trying to use appsrc element of Gstreamer on a trivial example. I am creating a buffer, filling it with dummy data and trying to send it to a fakesink. The code is a watered down version of the tutorial given in link below. It only has two elements, appsrc and fakesink. My code is also given below.
When I run this code I get "Error: Internal data stream error." I have searched for a solution and as far as I can tell, the issue is attributed to mismatch of caps between elements. I don't think this is the issue here since I only have two elements.
I have tried setting "caps" property of appsrc to NULL here, but I have also tried passing the proper "caps" property which was "audio/G729." Both have failed. Also it seems like the appsrc is fine for the first 4 chunks but then it generates an error. It is always after 4 Chunks. Not sure if that is a clue.
Also, I am running the code on an embedded system (ARM Cortex-A15) but I don't think that is related. I can succesfully stream a G729 encoded Audio file on this system via following command:
gst-launch-1.0 -v filesrc location=encodedData.g729 ! 'audio/G729' ! rtpg729pay ! udpsink host=192.168.XX.XX auto-multicast=true port=5004
What could be the reason behind this error? How can I fix this?
Thanks for all the responses.
Link: Link to Tutorial
Code:
#include <gst/gst.h>
#include <glib.h>
#include "glibconfig.h"
#include <stdio.h>
#define CHUNK_SIZE 10
typedef struct gstreamstruct {
GstElement *pipeline, *app_source, *fakesink;
guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */
guint sample;
} gstreamstruct;
static gboolean busCall (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static gboolean pushData (gstreamstruct *streamer)
{
printf("--->PushData!\n");
GstMapInfo map;
GstBuffer *buffer;
GstFlowReturn ret;
guint8 *raw;
int i;
/* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (streamer->sample, GST_SECOND, 1000);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, 1000);
//Put some dummy into buffer
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
raw = (guint8 *)map.data;
for(i = 0; i<CHUNK_SIZE; i++)
{
raw[0] = 0;
}
//update sample value
streamer->sample += CHUNK_SIZE;
printf("currentDuration: %d ms\n", streamer->sample);
gst_buffer_unmap (buffer, &map);
/* Push the buffer into the appsrc */
g_signal_emit_by_name (streamer->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK)
{
/* We got some error, stop sending data */
printf("Data sending Failed!\n");
return FALSE;
}
return TRUE;
}
/* This signal callback triggers when appsrc needs data.
* Here, we add an idle handler to the mainloop to start pushing data into the appsrc
*
* Whenever Gstreamer goes idle, it will call this function. Maybe we can utilize this for
* G729 etc!
*
* */
static void startFeed (GstElement *source, guint size, gstreamstruct *streamer)
{
if (streamer->sourceid == 0)
{
g_print ("Start feeding\n");
streamer->sourceid = g_idle_add ((GSourceFunc) pushData, streamer);
}
}
/* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
static void stopFeed (GstElement *source, gstreamstruct *streamer)
{
if (streamer->sourceid != 0)
{
g_print ("Stop feeding\n");
g_source_remove (streamer->sourceid);
streamer->sourceid = 0;
}
}
void appSrcTest (void* args)
{
printf("---> appSrcTest\n");
gstreamstruct my_streamer;
GstCaps *caps;
GstBus *bus;
//GstPad *pad;
guint bus_watch_id;
memset (&my_streamer, 0, sizeof (gstreamstruct));
gst_init (NULL, NULL);
my_streamer.main_loop = g_main_loop_new (NULL, FALSE);
printf("Gst Initialized!\n");
my_streamer.sample = 0;
my_streamer.app_source = gst_element_factory_make("appsrc", "appSrc");
my_streamer.fakesink = gst_element_factory_make("fakesink", "fakeSink");
my_streamer.pipeline = gst_pipeline_new ("g729-pipeline");
if(!my_streamer.app_source || !my_streamer.fakesink || !my_streamer.pipeline)
{
g_printerr ("Not all elements could be created.\n");
return;
}
printf("Elements Created!\n");
caps=NULL;
/*
caps = gst_caps_new_simple ("audio/G729",
"channels", G_TYPE_INT, 1,
"rate", G_TYPE_INT, 8000,
NULL);
*/
//g_object_set (G_OBJECT(my_streamer.app_source), "caps", caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (my_streamer.app_source, "need-data", G_CALLBACK (startFeed), &my_streamer);
g_signal_connect (my_streamer.app_source, "enough-data", G_CALLBACK (stopFeed), &my_streamer);
printf("Properties Set!\n");
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (my_streamer.pipeline));
bus_watch_id = gst_bus_add_watch (bus, busCall, my_streamer.main_loop);
gst_object_unref (bus);
gst_bin_add_many (GST_BIN (my_streamer.pipeline), my_streamer.app_source, my_streamer.fakesink, NULL);
printf("Elements Added!\n");
printf("Pipeline Starting!\n");
gst_element_set_state (my_streamer.pipeline, GST_STATE_PLAYING);
g_main_loop_run (my_streamer.main_loop);
gst_element_set_state (my_streamer.pipeline, GST_STATE_NULL);
gst_object_unref (my_streamer.pipeline);
g_source_remove (bus_watch_id);
g_main_loop_unref (my_streamer.main_loop);
}
The output from this code is generated as:
Gst Initialized!
Elements Created!
Properties Set!
Elements Added!
Pipeline Starting!
Start feeding
--->PushData!
currentDuration: 10 ms
--->PushData!
currentDuration: 20 ms
--->PushData!
currentDuration: 30 ms
--->PushData!
currentDuration: 40 ms
Error: Internal data stream error.
Edit: After more trials I have realized that the error is not generated after 4 Chunks consistently. When I reboot the system and call the function the error is generated after 156 Chunks for instance. After a couple more tries, the error starts occur much sooner (like 4 Chunks.) Also I have tried running the code with GST_DEBUG=2 but could not really find anything useful. Below you can find the DEBUG output.
DEBUG:
---> appSrcTest
Gst Initialized!
Elements Created!
Properties Set!
Elements Added!
Pipeline Starting!
Start feeding
--->PushData!
currentDuration: 10 ms
--->PushData!
currentDuration: 20 ms
--->PushData!
currentDuration: 30 ms
--->PushData!
currentDuration: 40 ms
--->PushData!
0:00:00.084280528 1344 0x18fa00 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop:<appSrc> error: Internal data stream error.
currentDuration: 50 ms
--->PushData!
0:00:00.084342504 1344 0x18fa00 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop:<appSrc> error: streaming stopped, reason not-linked (-1)
currentDuration: 60 ms
--->PushData!
currentDuration: 70 ms
Error: Internal data stream error.
Edit 2: After further debugging I have realized that the fakesink element was not linked to appsrc. So I manually linked them via the following line
gst_element_link_pads (my_streamer.app_source, "src", my_streamer.fakesink, "sink");
I think it works fine now, I will come back again after I verify it completely.
Edit 3: Yeah, I can confirm that was the issue. I forgot to link the elements.
I forgot to link the elements. The following line solves the issue.
gst_element_link_pads (my_streamer.app_source, "src", my_streamer.fakesink, "sink");
Don't know how, but this thing worked for me:
sudo apt remove gstreamer1.0-vaapi
I tried to send an RTP stream with gstreamer, but I found that the pipeline won't play at all. When I simplified my code I found that if the udpsink plugin was added in the pipeline, the pipeline is blocked, and the status is always READY.
My code:
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink, *udp, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("videotestsrc", "source");
convert = gst_element_factory_make ("videoconvert", "convert");
sink = gst_element_factory_make ("autovideosink", "sink");
udp = gst_element_factory_make ("udpsink", "udp");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, /*udp,*/ NULL);
gst_element_link_many (source, convert, sink, NULL);
/* Modify the source's properties */
g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
bus = gst_element_get_bus (pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
case GST_MESSAGE_EOS:
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
// ...
}
As you can see, the pipeline works fine if the udpsink is not added. This also happens in command line:
gst-launch-1.0 -v udpsink videotestsrc ! videoconvert ! autovideosink
The command above will popup a window and the video stops at the first frame.
I don't know what's wrong with my code, can anyone give me help, thanks!
A pipeline ends with a sink element, e.g. (autovideosink) you cannot add anything after that. That is why autovideosink does not have src pad and you can not link it to udpsink. You have to have a second thread linked to udpsink. To create threads you can use queue and tee elements.
You can find more here (GStreamer Basic tutorial 7: Multithreading and Pad Availability)
I'm a newbie to gstreamer and I wanted to get the audio and video both buffers from a 3gp file and do some processing in the callback.
(I'm starting my pipeline into a separate thread, pipeline gives audio buffers in a callback AudioCallback and video buffers in VideoCallback.)
This is how my pipeline looks:
GstElement* audioQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)
GstElement* videoQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)
//static functions
static gboolean
bus_call (GstBus* bus, GstMessage* msg, gpointer data)
{
GMainLoop* loop = (GMainLoop*) data;
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_EOS:
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return true;
}
static void link_two_elements(GstElement* src_element, GstElement* sink_element)
{
if(!gst_element_link(src_element, sink_element))
g_printerr ("Linking Error");
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstCaps *caps;
GstStructure *str;
gchar *tex;
GstPad* sinkpad;
/* check media type */
caps = gst_pad_get_caps (pad);
str = gst_caps_get_structure (caps, 0);
tex = (gchar*)gst_structure_get_name(str);
if(g_strrstr(tex,"audio"))
{
//GstElement *audioQueue = (GstElement *) data;
sinkpad = gst_element_get_static_pad (audioQueue, "sink");
if(sinkpad)
{
GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
}
if(g_strrstr(tex,"video"))
{
//GstElement *videoQueue = (GstElement *) data;
sinkpad = gst_element_get_static_pad (videoQueue, "sink");
GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
}
void runPipeline()
{
GMainLoop *loop;
GstElement *__pPipeline, *source, *demuxer, *audioDecoder, *audioConverter, *audioresample, /**audioQueue,*/ *audioSink, *videoDecoder, *videoSink, /**videoQueue,*/ *ffmpegcolorspace, *videoscale;
GstBus* bus;
//Initialisation
gst_init (null,null);
loop = g_main_loop_new (NULL, FALSE);
// Create gstreamer elements
__pPipeline = gst_pipeline_new("test_appsink");
source = gst_element_factory_make ("filesrc", "file-source");
demuxer = gst_element_factory_make("qtdemux", "demuxer");
//audioDecoder = gst_element_factory_make("ffdec_mp3", "audioDecoder");
audioDecoder = gst_element_factory_make("decodebin", "audioDecoder");
audioConverter = gst_element_factory_make("audioconvert", "audioConverter");
audioresample = gst_element_factory_make("audioresample", "audioresample");
audioSink = gst_element_factory_make("appsink", "audioSink");
audioQueue = gst_element_factory_make("queue2", "audioQueue");
//videoDecoder = gst_element_factory_make("ffdec_h264", "videoDecoder");
videoQueue = gst_element_factory_make("queue2", "videoQueue");
videoDecoder = gst_element_factory_make("decodebin ", "videoDecoder");
ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace");
videoscale = gst_element_factory_make("videoscale", "videoscale");
videoSink = gst_element_factory_make("appsink", "videoSink");
//appsink = gst_element_factory_make("appsink", "sink-buffer");
if (!__pPipeline || !source || !demuxer || !audioDecoder || !audioConverter ||!audioresample || !audioSink || !videoSink || !audioQueue || !videoQueue || !videoDecoder || !ffmpegcolorspace || !videoscale )
{
//return -1;
}
//we set the input filename to the source element
g_object_set (G_OBJECT (source), "location", "/etc/20000101-161404.3gp", NULL);
//Make appsink emit the "new-preroll" and "new-buffer" signals.
gst_app_sink_set_emit_signals ((GstAppSink*) audioSink, TRUE);
gst_app_sink_set_emit_signals ((GstAppSink*) videoSink, TRUE);
//we add a message handler
bus = gst_pipeline_get_bus (GST_PIPELINE (__pPipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
//we add all elements into the pipeline
gst_bin_add_many (GST_BIN (__pPipeline),
source, demuxer, videoDecoder, audioDecoder, audioConverter, audioresample, audioSink, videoSink,
audioQueue, videoQueue, ffmpegcolorspace, videoscale, NULL);
//link source and demuxer seperately
link_two_elements(source, demuxer);
//link rest of the elements
int retValVideoLinking = (int)gst_element_link_many (videoQueue, videoDecoder, ffmpegcolorspace, videoscale, videoSink, NULL);
int retValAudioLinking = (int)gst_element_link_many (audioQueue, audioDecoder, audioConverter, audioresample, audioSink, NULL);
gulong sigConRet = g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), null);
_ArAudioIn audioInstance = _ArAudioIn::GetArAudioInstance();
g_signal_connect (videoSink, "new-buffer", G_CALLBACK (AudioCallback), null);//AudioCallback static API
g_signal_connect (audioSink, "new-buffer", G_CALLBACK (VideoCallback), null);//VideoCallback static API
//Set the pipeline to "playing" state
GstStateChangeReturn state = gst_element_set_state (__pPipeline, GST_STATE_PLAYING);
g_main_loop_run (loop);
return null;
}
I'm just getting a single video buffer in my Videocallback and also in the on_pad_addded : I'm getting a linking err for audio pad linking.
GST_PAD_LINK_NOFORMAT = -4,
I'm trying to link the queue's sink pad to the pad recieved in on_pad_added, same is working for video but not for audio.
If anybody has any idea about this then please give me some pointers to get rid off this err and make this pipeline work.
It would be nice if you cleanup you code before asking us to debug it. As a general advice, check the return values and either log a warning or simply exit(1) to ensure that your pipeline setup works (E.g. in the pad_added handler). I'd also start using a normal video and audiosink to check that it plays.
Finally, it is usually a bad idea to pull out data from the pipleine. Perhaps you could tell what you want to do with the data once you have it in your callback, so that we can give better advice.
After playing around with some toy applications, exploring the
documentation and googling around (including the mailing list
archives) I am still puzzled for what I would think is a rather common
use case.
I have an existing code that generates images (in memory) and I would
like to push these images into a gstreamer pipeline (to create a flv
video at the end).
I could not find an "obvious way to do it". My best guess will be to
dig in the source code of GstMultiFileSrc and its parent GstPushSrc,
to figure it out.
Could any of you point me out to the "obvious way" of doing this ?
Is it there any related piece of documentation/tutorial/example on this ?
Once I have the input right, the rest is a piece of cake, thanks to
Gstreamer awesomeness !
(something like "my magic input -> ffmpegcolorspace ! ffenc_flv !
flvmux ! filesink location=desktop.flv" )
Thanks for your answers.
GStreamer uses plugins to do everything. Plugins that create data or take it from an external source are called "src" plugins.
The generic src plugin for injecting application-generated data into a pipeline is called appsrc. The API provided by appsrc is documented as part of the App Library.
Here's one example that demonstrates feeding appsrc with generated images: gdk-gstappsrc-stream.c. It seems to be derived from some test code in the GStreamer source tree: here.
Another approach would be to create your own src plugin. Look at the goom music visualization plugin for an example that seems to work in a way similar to what you have specified.
I found a solution (maybe) to this (i get the images with OpenCV) ... but i have an error with the pipeline: ERROR from element mysource: Error en el flujo de datos interno.
Debugging info: gstbasesrc.c(2574): gst_base_src_loop (): /GstPipeline:pipeline0/GstAppSrc:mysource:
streaming task paused, reason not-negotiated (-4)
this is the code:
typedef struct _App App;
struct _App{
GstElement *pipeline;
GstElement *appsrc;
GMainLoop *loop;
guint sourceid;
GTimer *timer;
};
App s_app;
CvCapture *capture;
static gboolean read_data(App *app){
GstFlowReturn ret;
GstBuffer *buffer = gst_buffer_new();
IplImage* frame = cvQueryFrame(capture);
GST_BUFFER_DATA(buffer) = (uchar*)frame->imageData;
GST_BUFFER_SIZE(buffer) = frame->width*frame->height*sizeof(uchar*);
g_signal_emit_by_name(app->appsrc,"push-buffer",buffer,&ret);
gst_buffer_unref(buffer);
if(ret != GST_FLOW_OK){
GST_DEBUG("Error al alimentar buffer");
return FALSE;
}
return TRUE;
}
static void start_feed(GstElement* pipeline,guint size, App* app){
if(app->sourceid == 0){
GST_DEBUG("Alimentando");
app->sourceid = g_idle_add((GSourceFunc) read_data, app);
}
}
static void stop_feed(GstElement* pipeline, App* app){
if(app->sourceid !=0 ){
GST_DEBUG("Stop feeding");
g_source_remove(app->sourceid);
app->sourceid = 0;
}
}
static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
GST_DEBUG ("got message %s",
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err = NULL;
gchar *dbg_info = NULL;
gst_message_parse_error (message, &err, &dbg_info);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (message->src), err->message);
g_printerr ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
g_error_free (err);
g_free (dbg_info);
g_main_loop_quit (app->loop);
break;
}
case GST_MESSAGE_EOS:
g_main_loop_quit (app->loop);
break;
default:
break;
}
return TRUE;
}
int main(int argc, char* argv[]){
App *app = &s_app;
GError *error = NULL;
GstBus *bus;
GstCaps *caps;
capture = cvCaptureFromCAM(0);
gst_init(&argc,&argv);
/* create a mainloop to get messages and to handle the idle handler that will
* feed data to appsrc. */
app->loop = g_main_loop_new (NULL, TRUE);
app->timer = g_timer_new();
app->pipeline = gst_parse_launch("appsrc name=mysource ! video/x-raw-rgb,width=640,height=480,bpp=24,depth=24 ! ffmpegcolorspace ! videoscale method=1 ! theoraenc bitrate=150 ! tcpserversink host=127.0.0.1 port=5000", NULL);
g_assert (app->pipeline);
bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
g_assert(bus);
/* add watch for messages */
gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
/* get the appsrc */
app->appsrc = gst_bin_get_by_name (GST_BIN(app->pipeline), "mysource");
g_assert(app->appsrc);
g_assert(GST_IS_APP_SRC(app->appsrc));
g_signal_connect (app->appsrc, "need-data", G_CALLBACK (start_feed), app);
g_signal_connect (app->appsrc, "enough-data", G_CALLBACK (stop_feed), app);
/* set the caps on the source */
caps = gst_caps_new_simple ("video/x-raw-rgb",
"bpp",G_TYPE_INT,24,
"depth",G_TYPE_INT,24,
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL);
gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps);
/* go to playing and wait in a mainloop. */
gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
/* this mainloop is stopped when we receive an error or EOS */
g_main_loop_run (app->loop);
GST_DEBUG ("stopping");
gst_element_set_state (app->pipeline, GST_STATE_NULL);
gst_object_unref (bus);
g_main_loop_unref (app->loop);
cvReleaseCapture(&capture);
return 0;
}
Any idea???
You might try hacking imagefreeze to do what you want. appsrc might also do it.