Opencv Mat as AppSrc to srtclientsink - c++

I am trying to use an Opencv mat as an appsrc in my pipeline and push it via srt to a local server, but there will not open any window to play the video stream.
My system is a mac OS 10.14 with gstreamer 1.15.
The pipeline consists of the following elements:
appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtclientsink
I want to get the srt stream and show it with the following command:
gst-launch-1.0 srtserversrc uri=srt://:8888 ! decodebin3 ! autovideosink
In the debug logs, it says:
GST_BUFFER gstbuffer.c:445:void _memory_add(GstBuffer *, gint, GstMemory *): buffer 0x7fd1aca38500, idx -1, mem 0x7fd1aca3a2b0
0:00:08.150919000 974 0x7fd1ac864b20 DEBUG tsdemux tsdemux.c:2980:gst_ts_demux_push_pending_data: Not enough information to push buffers yet, storing buffer
0:00:08.150931000 974 0x7fd1ac864b20 LOG tsdemux tsdemux.c:3098:gst_ts_demux_push_pending_data: Resetting to EMPTY, returning ok
0:00:08.150942000 974 0x7fd1ac864b20 LOG mpegtspacketizer mpegtspacketizer.c:689:mpegts_packetizer_flush_bytes: flushing 564 bytes from adapter
0:00:08.151214000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:634:void gst_adapter_flush_unchecked(GstAdapter *, gsize): flushing 564 bytes
0:00:08.151234000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:572:void gst_adapter_unmap(GstAdapter *): unmap memory buffer 0x7fd1aca383f0
0:00:08.151247000 974 0x7fd1ac864b20 LOG adapter gstadapter.c:655:void gst_adapter_flush_unchecked(GstAdapter *, gsize): flushing out head buffer
so I assume, there is a problem with the demuxer, maybe because I only use video data and no audio data, but without the mpegtsmuxer in my code, I get the error that the payload size exceeds the maximum allowed 1316 bytes in the srt protocol.
Here is the code:
main.cpp
#include <iostream>
#include <string>
#include <mutex>
#include <thread>
#include <time.h>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
#include <opencv2/highgui/highgui.hpp>
#include <gstreamer-1.0/gst/gstelement.h>
#include <gstreamer-1.0/gst/gstpipeline.h>
#include <gstreamer-1.0/gst/gstutils.h>
#include <gstreamer-1.0/gst/app/gstappsrc.h>
#include <gstreamer-1.0/gst/base/gstbasesrc.h>
#include <gstreamer-1.0/gst/video/video.h>
#include <gstreamer-1.0/gst/gst.h>
#include <gstreamer-1.0/gst/check/gstbufferstraw.h>
#include <glib.h>
#define GST_CAT_DEFAULT appsrc_pipeline_debug
GST_DEBUG_CATEGORY(appsrc_pipeline_debug);
using namespace std;
/*
* bus: simple system for forwarding messages from streaming threads to app in own thread context
* pad:
* caps:
* signal:
* callback:
*
*/
static std::mutex m;
GMainLoop *loop;
typedef struct _App App;
struct _App {
GstElement *videoenc;
GstElement *appsrc;
GstElement *videoconvert;
GstElement *sink;
guint sourceid;
GstElement *mpegts;
};
App s_app;
int counter = 0;
static gboolean cb_need_data(App *app) {
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint buffersize;
GstFlowReturn ret;
GstMapInfo info;
counter++;
m.lock();
cv::Mat image_mat = cv::imread("./../data/squat.jpg");
cv::Mat resized_mat;
cv::resize(image_mat, resized_mat, cv::Size(640, 480));
buffersize = guint(resized_mat.cols * resized_mat.rows * resized_mat.channels());
buffer = gst_buffer_new_and_alloc(buffersize);
uchar *img_data = image_mat.data;
m.unlock();
if (gst_buffer_map(buffer, &info, (GstMapFlags) GST_MAP_WRITE)) {
memcpy(info.data, img_data, buffersize);
gst_buffer_unmap(buffer, &info);
} else {
g_print("error at memcpy");
}
g_signal_emit_by_name(app->appsrc, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
g_print("Ops\n");
GST_DEBUG ("something wrong in cb_need_data");
g_main_loop_quit(loop);
}
gst_buffer_unref(buffer);
return TRUE;
}
static void start_feed(GstElement *pipeline, guint size, App *app) {
if (app->sourceid == 0) {
app->sourceid = g_timeout_add(67, (GSourceFunc) cb_need_data, app);
}
}
static void stop_feed(GstElement *pipeline, App *app) {
if (app->sourceid != 0) {
g_source_remove(app->sourceid);
app->sourceid = 0;
}
}
static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data) {
GError *err = nullptr;
gchar *dbg_info = nullptr;
GST_DEBUG ("got message %s", gst_message_type_get_name(GST_MESSAGE_TYPE(message)));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
gst_message_parse_error(message, &err, &dbg_info);
g_printerr("ERROR from element %s: %s\n",
GST_OBJECT_NAME (message->src), err->message);
g_printerr("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
g_error_free(err);
g_free(dbg_info);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_EOS:
g_main_loop_quit(loop);
break;
default:
break;
}
return TRUE;
}
void startStream() {
App *app = &s_app;
GstCaps *caps2;
GstCaps *caps3;
GstBus *bus;
GstElement *pipeline;
gst_init(nullptr, nullptr);
loop = g_main_loop_new(nullptr, TRUE);
/*
* pipeline elements:
* appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtsink
*/
// create pipeline
pipeline = gst_pipeline_new("gstreamer-encoder");
if (!pipeline) {
g_print("Error creating pipeline");
}
// create appsrc element
app->appsrc = gst_element_factory_make("appsrc", "appsrc");
if (!app->appsrc) {
g_print("Error creating appsrc");
}
// create videoconvert element
app->videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
if (!app->videoconvert) {
g_print("Error creating videoconvert element");
}
// create videoencoder element
app->videoenc = gst_element_factory_make("x264enc", "encoder");
if (!app->videoenc) {
g_print("Error creating encoder");
}
app->mpegts = gst_element_factory_make("mpegtsmux", "mpegtsmux");
if (!app->mpegts) {
g_print("Error creating mpegtsmuxer");
}
app->sink = gst_element_factory_make("srtclientsink", "sink");
if (!app->sink) {
g_print("Error creating sink");
}
g_print("Elements are created\n");
g_object_set(G_OBJECT(app->sink), "uri", "srt://127.0.0.1:8888", nullptr);
g_object_set(G_OBJECT(app->sink), "msg-size", 1316, nullptr);
g_object_set(G_OBJECT(app->sink), "latency", 120, nullptr);
g_object_set(G_OBJECT(app->videoenc), "bitrate", 256, nullptr);
g_print("End of settings\n");
caps2 = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 25, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
nullptr);
gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps2);
g_object_set(G_OBJECT (app->appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, nullptr);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
g_assert(bus);
gst_bus_add_watch(bus, (GstBusFunc) bus_call, app);
gst_bin_add_many(GST_BIN(pipeline), app->appsrc, app->videoconvert, app->videoenc,
app->mpegts, app->sink, nullptr);
g_print("Added all the elements to the pipeline\n");
int ok = FALSE;
ok = gst_element_link_many(app->appsrc, app->videoconvert, app->videoenc,
app->sink, nullptr);
if (ok)
g_print("Linked all elements together\n");
else
g_print("Linking error\n");
g_assert(app->appsrc);
g_assert(GST_IS_APP_SRC(app->appsrc));
g_signal_connect(app->appsrc, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->appsrc, "enough-data", G_CALLBACK(stop_feed), app);
g_print("Playing the video\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Running...\n");
g_main_loop_run(loop);
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(bus);
g_main_loop_unref(loop);
g_print("Deleting pipeline\n");
}
int main(int argc, char **argv) {
startStream();
return 0;
}
CMakeLists.txt
cmake_minimum_required(VERSION 3.13)
project(opencv_gstreamer)
set(CMAKE_CXX_STANDARD 14)
find_package(PkgConfig REQUIRED)
pkg_search_module(OPENCV opencv4 REQUIRED)
pkg_search_module(GSTREAMER gstreamer-1.0 REQUIRED)
pkg_search_module(APP_GSTREAMER gstreamer-app-1.0 REQUIRED)
pkg_search_module(SRT srt REQUIRED)
pkg_search_module(GLIB glib-2.0 REQUIRED)
include_directories(
${OPENCV_INCLUDE_DIRS}
${GSTREAMER_INCLUDE_DIRS}
${APP_GSTREAMER_INCLUDE_DIRS}
${GLIB_INCLUDE_DIRS}
${SRT_INCLUDE_DIRS})
link_directories(
${OPENCV_LIBRARY_DIRS}
${GSTREAMER_LIBRARY_DIRS}
${APP_GSTREAMER_LIBRARY_DIRS}
${GLIB_LIBRARY_DIRS}
${SRT_LIBRARY_DIRS})
link_libraries(
${OPENCV_LDFLAGS}
pthread
${GSTREAMER_LDFLAGS}
${APP_GSTREAMER_LDFLAGS}
${GLIB_LDFLAGS}
${SRT_LDFLAGS})
add_compile_options(
${OPENCV_CFLAGS}
${GSTREAMER_CFLAGS}
${APP_GSTREAMER_CFLAGS}
${GLIB_CFLAGS}
${SRT_CFLAGS})
add_executable(opencv_gstreamer src/main.cpp)

Related

How to feed an Opencv frame into Nvidia Deepstream pipeline?

I am struggling to find a away to input a single cv::Mat frame into a Nvidia Deepstream Pipeline using c++. I tried the code below but I received the following Error message:
ERROR from element gstappsrc: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:dst_opencv/GstAppSrc:source:
streaming stopped, reason not-negotiated (-4)
Returned, stopping playback
Deleting pipeline
If anyone have an idea how to do it or show me where I am doing wrong, I will be very thankful.
#include <gst/gst.h>
#include <glib.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include "gstnvdsmeta.h"
#include "nvdsmeta_schema.h"
#include <gst/app/gstappsrc.h>
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
/* NVIDIA Decoder source pad memory feature. This feature signifies that source
* pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"
// detection models
#define MODEL_CONFIG "dstest3_pgie_config.txt"
//#define MODEL_CONFIG "yoloV2_pgie_config.txt"
//#define MODEL_CONFIG "fd_lpd_config.txt"
#define FPS_PRINT_INTERVAL 300
static gboolean bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *error;
gst_message_parse_warning (msg, &error, &debug);
g_printerr ("WARNING from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
g_free (debug);
g_printerr ("Warning: %s\n", error->message);
g_error_free (error);
break;
}
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr ("Error details: %s\n", debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
//-------------------------------------------------------
static void cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, gpointer data)
{
g_print ("In cb_newpad\n");
GstCaps *caps = gst_pad_get_current_caps (decoder_src_pad);
const GstStructure *str = gst_caps_get_structure (caps, 0);
const gchar *name = gst_structure_get_name (str);
GstElement *source_bin = (GstElement *) data;
GstCapsFeatures *features = gst_caps_get_features (caps, 0);
/* Need to check if the pad created by the decodebin is for video and not
* audio. */
if (!strncmp (name, "video", 5)) {
/* Link the decodebin pad only if decodebin has picked nvidia
* decoder plugin nvdec_*. We do this by checking if the pad caps contain
* NVMM memory features. */
if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) {
/* Get the source bin ghost pad */
GstPad *bin_ghost_pad = gst_element_get_static_pad (source_bin, "src");
if (!gst_ghost_pad_set_target (GST_GHOST_PAD (bin_ghost_pad),
decoder_src_pad)) {
g_printerr ("Failed to link decoder src pad to source bin ghost pad\n");
}
gst_object_unref (bin_ghost_pad);
} else {
g_printerr ("Error: Decodebin did not pick nvidia decoder plugin.\n");
}
}
}
//-------------------------------------------------------
static void decodebin_child_added (GstChildProxy * child_proxy, GObject * object,gchar * name, gpointer user_data)
{
g_print ("Decodebin child added: %s\n", name);
if (g_strrstr (name, "decodebin") == name) {
g_signal_connect (G_OBJECT (object), "child-added",
G_CALLBACK (decodebin_child_added), user_data);
}
if (g_strstr_len (name, -1, "nvv4l2decoder") == name) {
g_print ("Seting bufapi_version\n");
g_object_set (object, "bufapi-version", TRUE, NULL);
}
}
//-------------------------------------------------------
void buffer_destroy(gpointer data) {cv::Mat* done = (cv::Mat*)data; delete done;}
//-----------------------------------------------------
static gboolean cb_need_data(GstElement* appsrc,guint unused_size,gpointer user_data)
{
g_print("cb_need_data function \n");
GstBuffer* buffer;
GstMapInfo map;
guint size,depth,height,width,step,channels;
GstFlowReturn ret;
guchar *data1;
g_print("userdata: %s \n",user_data);
cv::Mat frame=cv::imread((const char*)user_data, CV_LOAD_IMAGE_COLOR);
height = frame.size().height;
width = frame.size().width;
channels = frame.channels();
data1 = (guchar *)frame.data;
gsize sizeInBytes = height*width*channels;
g_print("frame_height: %d \n",height);
g_print("frame_width: %d \n",width);
g_print("frame_channels: %d \n",channels);
g_print("frame_size: %d \n",sizeInBytes);
buffer=gst_buffer_new_allocate(NULL,sizeInBytes,NULL);
gst_buffer_map(buffer,&map,GST_MAP_WRITE);
memcpy( (guchar *)map.data, data1, gst_buffer_get_size( buffer ) );
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {g_print("cv 2 gst got an error"); return false;}
gst_buffer_unref(buffer);
//gst_buffer_unmap (buffer, &map);
g_print("cv converted to gst \n ");
return true;
}
//-------------------------------------------------------
static GstPadProbeReturn tiler_src_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info,gpointer u_data)
{
char *msg;
g_object_get(G_OBJECT(u_data),"last-message",&msg,NULL);
if (msg!=NULL) {g_print("FPS =%s \n",msg);}
return GST_PAD_PROBE_OK;
}
//-------------------------------------------------------
//------------------MAIN---------------------------------
//-------------------------------------------------------
int main(int argc,char** argv)
{
GMainLoop *loop;
GstElement *pipeline,*sink,*tiler,*nvvidconv,*nvosd,*nvsink,*pgie; //,*streammux
GstElement* appsrc,*conv;
GstBus *bus;
guint bus_watch_id;
GstPad *tiler_src_pad;
guint num_sources;
guint tiler_rows,tiler_columns;
guint pgie_batch_size;
GstCaps *caps;
//check input args
if(argc <2) {g_printerr("Usage: %s <uri1> [uri2] ... [uriN] \n", argv[0]); return -1;}
num_sources=argc-1;
//start gstreamer
gst_init(&argc,&argv);
loop=g_main_loop_new(NULL,FALSE);
//Creating pipeline
pipeline=gst_pipeline_new("dst_opencv");
//streammux=gst_element_factory_make("nvstreammux","nvstream-muxer");
if(!pipeline){g_printerr("pipeline could not be created");}
//if(!streammux){g_printerr("Streammux could not be created");}
//gst_bin_add(GST_BIN(pipeline),streammux);
// Creating bin with all sources
appsrc=gst_element_factory_make("appsrc","gstappsrc");
conv=gst_element_factory_make("videoconvert","conv");
g_object_set (G_OBJECT (appsrc), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 1200,
"height", G_TYPE_INT, 600,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
NULL), NULL);
g_object_set(G_OBJECT(appsrc),"stream-type",0,"format",GST_FORMAT_TIME,NULL);
/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
/* Use nvtiler to composite the batched frames into a 2D tiled array based
* on the source of the frames. */
tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler");
nvvidconv=gst_element_factory_make ("nvvideoconvert","nvvideo-converter");
/* Use convertor to convert from NV12 to RGBA as required by nvosd */
// nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
nvosd=gst_element_factory_make("nvdsosd","nv-onscreendisplay");
nvsink=gst_element_factory_make ("nveglglessink", "nvvideo-renderer"); //show on display
//nvsink=gst_element_factory_make("fakesink","nvvideo-render"); //Dont show frames on screen
sink=gst_element_factory_make("fpsdisplaysink","fps_display");
//sink=gst_element_factory_make("autovideosink","videosink");
//check if all plugin were created
if(!appsrc){g_printerr("appsrc could not be created"); return -1;}
if(!conv){g_printerr("conv could not be created"); return -1;}
if(!tiler){g_printerr("tiler could not be created"); return -1;}
if(!sink){g_printerr("sink could not be created"); return -1;}
if(!nvvidconv){g_printerr("nvvidconv could not be created"); return -1;}
if(!pgie){g_printerr("pgie could not be created"); return -1;}
if(!nvosd){g_printerr("nvosd could not be created"); return -1;}
//set streammux
/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie),"config-file-path", MODEL_CONFIG, NULL);
/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), "batch-size", &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n",pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), "batch-size", num_sources, NULL);}
//g_print("Flag \n");
//set tiler
tiler_rows = (guint) sqrt (num_sources);
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set (G_OBJECT (tiler), "rows", tiler_rows, "columns", tiler_columns,
"width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
//set fps sink
g_object_set (G_OBJECT (sink), "text-overlay", FALSE, "video-sink", nvsink, "sync", FALSE, NULL);
//linking all elements
gst_bin_add_many(GST_BIN(pipeline),appsrc,conv,pgie,tiler,nvvidconv,nvosd,sink,NULL);
if (!gst_element_link_many(appsrc,conv,pgie,tiler,nvvidconv,nvosd,sink,NULL)){g_printerr("Elements could not be linked"); return -1;}
tiler_src_pad = gst_element_get_static_pad (pgie, "src");
if (!tiler_src_pad) {g_print ("Unable to get src pad\n");}
else{gst_pad_add_probe (tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,tiler_src_pad_buffer_probe, (gpointer)sink, NULL);}
g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data),(gpointer)argv[1]);
/* Set the pipeline to "playing" state */
g_print ("Now playing:");
for (int i = 0; i < num_sources; i++) {g_print (" %s,", argv[i + 1]);}
g_print ("\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

How to listen to oggdemux gstreamer failures?

I wrote a gstreamer app to convert from opus audio to raw audio. If I feed bad audio (just random bytes) to the pipeline, the pipeline gets stuck and /i don't receive an error message on the message bus.
I'm listening to the error messages flowing through the pipeline, but not getting an error code to indicate the failure. The gstreamer debug logs indicate the demux failed though, I can see the following in the logs:
0:00:00.021614679 22541 0xe5b190 WARN oggdemux gstoggdemux.c:4609:gst_ogg_demux_send_event:<oggdemux0> No chain to forward event to
0:00:00.021656681 22541 0xe5b190 WARN oggdemux gstoggdemux.c:2433:gst_ogg_demux_sink_event:<oggdemux0> EOS while trying to retrieve chain, seeking disabled
The following is an app sample that I wrote:
#include <gst/gst.h>
#include <gst/gstbin.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
static GMainLoop *loop;
FILE *file = NULL;
size_t bytesRead = 0;
typedef struct _CustomData
{
GstElement *pipeline;
GstAppSrc *app_source;
guint sourceid; /* To control the GSource */
} CustomData;
static gboolean push_data(CustomData *data)
{
GstBuffer *gbuffer;
GstFlowReturn ret;
char buffer[1024];
gbuffer = gst_buffer_new_and_alloc(sizeof(buffer));
GstMapInfo info;
bytesRead = fread(buffer, 1, sizeof(buffer), file);
gst_buffer_map(gbuffer, &info, GST_MAP_WRITE);
memcpy(info.data, buffer, bytesRead);
gst_buffer_unmap(gbuffer, &info);
if (bytesRead > 0)
{
//g_print("Pushing %d\n", (int)bytesRead);
/* Push the buffer into the appsrc */
g_signal_emit_by_name(data->app_source, "push-buffer", gbuffer, &ret);
return TRUE;
}
else
{
g_print("file complete\n");
gst_app_src_end_of_stream(data->app_source);
return FALSE;
}
gst_buffer_unref(gbuffer);
}
static void stop_feed(GstElement *source, CustomData *data)
{
if (data->sourceid != 0)
{
g_print("Stop feeding\n");
g_source_remove(data->sourceid);
data->sourceid = 0;
}
}
static void start_feed(GstElement *source, guint size, CustomData *data)
{
if (data->sourceid == 0)
{
g_print("Start feeding\n");
data->sourceid = g_idle_add((GSourceFunc)push_data, data);
}
}
static gboolean bus_call(GstBus * bus, GstMessage * msg, gpointer user_data)
{
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: from %s %s\n", GST_OBJECT_NAME(msg->src), error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
int main(int argc,
char *argv[])
{
CustomData data;
memset(&data, 0, sizeof(data));
GstBus *bus;
guint bus_watch_id;
/* Initialisation */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
GError *error = NULL;
data.pipeline = gst_parse_launch("concat name=c ! filesink location=program.wav appsrc name=src_00 ! oggdemux ! opusdec ! audioconvert ! audioresample ! audio/x-raw,format=S16LE,channels=1,rate=16000 ! queue ! c.", &error);
if (!data.pipeline)
{
g_printerr("Pipeline could not be created. Exiting.\n");
return -1;
}
data.app_source = (G_TYPE_CHECK_INSTANCE_CAST((gst_bin_get_by_name(GST_BIN(data.pipeline), "src_00")), GST_TYPE_APP_SRC, GstAppSrc));
g_signal_connect(data.app_source, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.app_source, "enough-data", G_CALLBACK(stop_feed), &data);
/* we add a message handler */
bus = gst_pipeline_get_bus(GST_PIPELINE(data.pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, NULL);
gst_object_unref(bus);
file = fopen("junk.wav", "rb");
/* Set the pipeline to "playing" state*/
g_print("Now playing");
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(data.pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(data.pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
I would have expected that the demux failure would follow to the message bus, but it is not. How can I listen to such errors ?
I've tried with other pipelines that uses decodebin and I get the error messages on the message bus. The following pipeline works as expected:
gst_parse_launch("concat name=c ! filesink location=program.wav appsrc name=src_00 ! decodebin ! audioconvert ! audioresample ! audio/x-raw,format=S16LE,channels=1,rate=16000 ! queue ! c.", &error);
GStreamer version: 1.8.3
OS: Ubuntu 16.04
The issue seems to be resolved in Gstreamer 1.14. After updating I now get an error message on the message bus:
Message: Error: from oggdemux0 Could not demultiplex stream.
Error Code: GST_STREAM_ERROR_DEMUX

Gstreamer EOS message handling in filesink to change location on the fly

Trying to switch output files on the fly, but can't handle EOS.
http://gstreamer-devel.966125.n4.nabble.com/Dynamically-updating-filesink-location-at-run-time-on-the-fly-td4660569.html
Quote:
Assuming you have a pipeline that looks like this:
audiosrc --> encoder --> mux --> filesink
then you'll need to change it to:
audiosrc --> encoder --> queue --> muxsink_bin
where muxsink_bin is a bin
ghostpad --> mux --> filesink
then the procedure is:
1 - Block the queue srcpad using gst_pad_set_blocked_async()
2 - In the blocked callback:
2a - unlink muxsink_bin with gst_pad_unlink()
2b - send an EOS event to the muxsink_bin sink pad with gst_pad_send_event()
2b - create a new muxsink_bin
2c - set filesink location
2d - add the new bin to the pipeline with gst_bin_add()
2e - sync with parent using gst_element_sync_state_with_parent()
2f - link it to the queue srcpad with gst_pad_link()
2g - unblock the queue srcpad with gst_pad_set_blocked_async(). When the unblocked callback occurs you're recording again & no data has been lost. No action is required in the unblocked callback
3 - handle the EOS & delete the old muxsink_bin. I had a msg handler that I installed in my bin_init() function using "gstbin_class->handle_message = GST_DEBUG_FUNCPTR(msg_handler)" & in the handler:
3a - lock the bin state with gst_element_set_locked_state()
3b - set the state to NULL with gst_element_set_state()
3c - remove it from the pipeline with gst_bin_remove()
That's it. The only thing to be mindful of is that data must be flowing thru the pipeline for this to work.
Paddy
The main sequence works except for the finalization of the old pipeline.
The difficulty is with the point 3: I can send EOS to the ghostpad, and the filesink gets it. But how to catch that EOS?
What does it mean "install msg handler using gstbin_class->handle_message = GST_DEBUG_FUNCPTR(msg_handler)"?
There is message forwarding.
Must be enabled on the bus:
g_object_set(G_OBJECT(bin), "message-forward", TRUE, 0);
Handling:
case GST_MESSAGE_ELEMENT:
{
const GstStructure *s = gst_message_get_structure (msg);
if (gst_structure_has_name (s, "GstBinForwarded"))
{
GstMessage *forward_msg = NULL;
gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
if (GST_MESSAGE_TYPE (forward_msg) == GST_MESSAGE_EOS)
{
g_print ("EOS from element %s\n",
GST_OBJECT_NAME (GST_MESSAGE_SRC (forward_msg)));
DestroyBin();
CreateNewBin();
RemovePad();
}
gst_message_unref (forward_msg);
}
}
Full code:
#include <gst/gst.h>
#include <iostream>
#include <cstring>
#include <cstdio>
static gchar *opt_effects = NULL;
#define DEFAULT_EFFECTS "identity,exclusion,navigationtest," \
"agingtv,videoflip,vertigotv,gaussianblur,shagadelictv,edgetv"
static GstElement *pipeline;
static GstElement * muxer;
static GstElement * sink;
static GstElement * q2;
static int i=0;
GstElement * bin;
GstPad * muxerSinkPad;
gulong probeId;
static GQueue effects = G_QUEUE_INIT;
void CreateNewBin();
void DestroyBin();
void ChangeLocation();
void RemovePad();
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
GstPad *sinkPad = gst_element_get_static_pad(bin, "sink");
gst_pad_unlink(pad, sinkPad);
gst_pad_send_event(sinkPad, gst_event_new_eos());
gst_object_unref(sinkPad);
return GST_PAD_PROBE_OK;
}
static gboolean
timeout_cb (gpointer user_data)
{
static int i=0;
if(i==0)
{
GstPad * q2SrcPad;
q2SrcPad = gst_element_get_static_pad(q2, "src");
std::cout << "Timeout: " << q2SrcPad << std::endl;
probeId = gst_pad_add_probe (q2SrcPad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
pad_probe_cb, user_data, NULL);
gst_object_unref(q2SrcPad);
return TRUE;
}
return FALSE;
}
static gboolean
bus_cb (GstBus * bus, GstMessage * msg, gpointer user_data)
{
GMainLoop *loop = (GMainLoop*)user_data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *dbg;
gst_message_parse_error (msg, &err, &dbg);
gst_object_default_error (msg->src, err, dbg);
g_error_free (err);
g_free (dbg);
g_main_loop_quit (loop);
break;
}
case GST_EVENT_EOS:
std::cout << "EOS message is got" << std::endl;
break;
case GST_MESSAGE_ELEMENT:
{
const GstStructure *s = gst_message_get_structure (msg);
if (gst_structure_has_name (s, "GstBinForwarded"))
{
GstMessage *forward_msg = NULL;
gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
if (GST_MESSAGE_TYPE (forward_msg) == GST_MESSAGE_EOS)
{
g_print ("EOS from element %s\n",
GST_OBJECT_NAME (GST_MESSAGE_SRC (forward_msg)));
DestroyBin();
CreateNewBin();
RemovePad();
}
gst_message_unref (forward_msg);
}
}
break;
default:
break;
}
return TRUE;
}
int
main (int argc, char **argv)
{
GError *err = NULL;
GMainLoop *loop;
GstElement *src, *q1,/* *q2,*/ /**effect,*/ /**filter1*//*, *filter2*/ *encoder;/*, *sink*/;
gst_init(&argc, &argv);
pipeline = gst_pipeline_new ("pipeline");
src = gst_element_factory_make ("videotestsrc", NULL);
//Create a caps filter between videosource videoconvert
std::string capsString = "video/x-raw,format=YV12,width=320,height=240,framerate=30/1";
GstCaps * dataFilter = gst_caps_from_string(capsString.c_str());
q1 = gst_element_factory_make ("queue", NULL);
encoder = gst_element_factory_make ("x264enc", NULL);
q2 = gst_element_factory_make("queue", NULL);
gst_bin_add_many(GST_BIN(pipeline), src, q1, encoder, q2, 0);
gboolean link = gst_element_link_filtered(src, q1, dataFilter);
link &= gst_element_link(q1, encoder);
link &= gst_element_link(encoder, q2);
CreateNewBin();
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);
g_timeout_add_seconds (10, timeout_cb, loop);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
void RemovePad()
{
GstPad * q2SrcPad;
q2SrcPad = gst_element_get_static_pad(q2, "src");
gst_pad_remove_probe(q2SrcPad, probeId);
gst_object_unref(q2SrcPad);
}
void DestroyBin()
{
gst_element_set_state(bin, GST_STATE_NULL);
gst_bin_remove(GST_BIN(pipeline), bin);
}
void CreateNewBin()
{
static std::string fileLocPattern = "deneme%d.mkv";
char buffer[12];
memset(buffer, 0, sizeof(buffer));
sprintf(buffer, fileLocPattern.c_str(), i++);
//Create Muxer Element
muxer = gst_element_factory_make("matroskamux", "MatroskaMuxer");
//Create File Sink Element
sink = gst_element_factory_make("filesink", buffer);
g_object_set(G_OBJECT(sink), "location", buffer, 0);
//Create muxsinkBin
bin = gst_bin_new(buffer);
g_object_set(G_OBJECT(bin), "message-forward", TRUE, 0);
//Add a src pad to the bin
gst_bin_add_many(GST_BIN(bin), muxer, sink, 0);
gboolean linkState = TRUE;
//Connect elements within muxsink_bin
//Link: matroskamuxer -> filesink
linkState &= gst_element_link_many(muxer, sink, 0);
//Add this bin to pipeline
gst_bin_add(GST_BIN(pipeline), bin);
//Create ghostpad and manually link muxsinkBin and remaining part of the pipeline
{
GstPadTemplate * muxerSinkPadTemplate;
if( !(muxerSinkPadTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(muxer), "video_%u")) )
{
std::cout << "Unable to get source pad template from muxing element" << std::endl;
}
//Obtain dynamic pad from element
muxerSinkPad = gst_element_request_pad(muxer, muxerSinkPadTemplate, 0, 0);
//Add ghostpad
GstPad * ghostPad = gst_ghost_pad_new("sink", muxerSinkPad);
gst_element_add_pad(bin, ghostPad);
gst_object_unref(GST_OBJECT(muxerSinkPad));
gst_element_sync_state_with_parent(bin);
//Get src pad from queue element
GstPad * queueBeforeBinSrcPad = gst_element_get_static_pad(q2, "src");
//Link queuebeforebin to ghostpad
if (gst_pad_link(queueBeforeBinSrcPad, ghostPad) != GST_PAD_LINK_OK )
{
std::cout << "QueueBeforeBin cannot be linked to MuxerSinkPad." << std::endl;
}
gst_object_unref(queueBeforeBinSrcPad);
}
}
http://gstreamer-devel.966125.n4.nabble.com/Listening-on-EOS-events-for-GstBin-td4669126.html
http://gstreamer-devel.966125.n4.nabble.com/file/n4669476/main.cpp
Depending on your use case you can use multifilesink element. It will switch files on the fly on certain events. A file for each buffer, a file for each segment... Check its properties and see if there is anything that would work for you.
It also serves as a good code base in case you want to write something similar (or maybe extend it?)
I'll post the code of actual custom GstBin aka 'muxsink_bin' that I ended up implementing to do that forwarding and EOS handling for the 'detachable sink part' of the pipeline.
plisolatedbin.h:
#pragma once
#include <gst/gst.h>
#include <gst/gstbin.h>
G_BEGIN_DECLS
#define PL_TYPE_ISOLATED_BIN (pl_isolated_bin_get_type ())
#define PL_IS_ISOLATED_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), PL_TYPE_ISOLATED_BIN))
#define PL_IS_ISOLATED_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), PL_TYPE_ISOLATED_BIN))
#define PL_ISOLATED_BIN_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), PL_TYPE_ISOLATED_BIN, PlIsolatedBinClass))
#define PL_ISOLATED_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), PL_TYPE_ISOLATED_BIN, PlIsolatedBin))
#define PL_ISOLATED_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), PL_TYPE_ISOLATED_BIN, PlIsolatedBinClass))
#define PL_ISOLATED_BIN_CAST(obj) ((PlIsolatedBin*)(obj))
typedef struct _PlIsolatedBin PlIsolatedBin;
typedef struct _PlIsolatedBinClass PlIsolatedBinClass;
/**
* Does not forward EOS to parent by default.
*/
struct _PlIsolatedBin
{
GstBin bin;
};
struct _PlIsolatedBinClass
{
GstBinClass parent_class;
};
GType pl_isolated_bin_get_type();
GstElement* pl_isolated_bin_new();
G_END_DECLS
plisolatedbin.c:
#include "plisolatedbin.h"
#include <assert.h>
G_DEFINE_TYPE(PlIsolatedBin, pl_isolated_bin, GST_TYPE_BIN)
static void pl_isolated_bin_init(PlIsolatedBin *plisolatedbin)
{
}
static void pl_isolated_bin_handle_message_func(GstBin *bin, GstMessage *message)
{
if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_EOS)
{
GST_BIN_CLASS(pl_isolated_bin_parent_class)->handle_message(bin, message);
}
else
{
GstMessage *forwarded = gst_message_new_element(GST_OBJECT_CAST(bin), gst_structure_new("PlIsolatedBinForwarded", "message", GST_TYPE_MESSAGE, message, NULL));
gst_element_post_message(GST_ELEMENT_CAST(bin), forwarded);
}
}
static void pl_isolated_bin_class_init(PlIsolatedBinClass *class)
{
class->parent_class.handle_message = GST_DEBUG_FUNCPTR(pl_isolated_bin_handle_message_func);
}
GstElement* pl_isolated_bin_new()
{
return g_object_new(PL_TYPE_ISOLATED_BIN, NULL);
}

C++ (Ubuntu): load audio file (wav, mp3, aiff) to array/vector with gstreamer

how can I decode with C++ (Ubuntu) an audio file (wav, mp3, aiff) and store it (PCM/int) in a vector/array?
What I did so far: I used gstreamer (I'm a very beginner) to decode the file and I can play it and get data with pull-buffer, however I did't find a method to get the whole audio data at ones to store in an array.
Is there such a method in gstreamer? Or exists there an other C++ library to decode audio files and get the raw (PCM/int) data?
edit: change frequency to PCM
I solved the problem by myself with gstreamer. The trick is to use giostreamsink as a sink, this stores the data into a G_MEMORY_OUTPUT_STREAM.
The full code sample:
#include <string>
#include <stdio.h>
#include <gst/gst.h>
#include <gio/gio.h>
#include <boost/thread.hpp>
static void on_pad_added(GstElement *decodebin,
GstPad *pad,
gpointer data) {
GstElement *convert = (GstElement *) data;
GstCaps *caps;
GstStructure *str;
GstPad *audiopad;
audiopad = gst_element_get_static_pad(convert, "sink");
if (GST_PAD_IS_LINKED(audiopad)) {
g_object_unref(audiopad);
return;
}
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
printf("here %s\n",gst_structure_get_name(str));
if (!g_strrstr(gst_structure_get_name(str), "audio")) {
gst_caps_unref(caps);
gst_object_unref(audiopad);
return;
}
gst_caps_unref(caps);
gst_pad_link(pad, audiopad);
g_object_unref(audiopad);
}
static gboolean bus_call(GstBus *bus,
GstMessage *msg,
gpointer data) {
GMainLoop *loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free (debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char **argv) {
gst_init(&argc, &argv);
GstElement *pipeline, *source, *decode, *sink, *convert;
int rate = 44100;
int channels = 1;
int depth = 16;
bool output_signed = true;
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
GMemoryOutputStream *stream;
gpointer out_data;
// loop
loop = g_main_loop_new(NULL, false);
// pipeline
pipeline = gst_pipeline_new("test_pipeline");
// sink
stream = G_MEMORY_OUTPUT_STREAM(g_memory_output_stream_new(NULL, 0, (GReallocFunc)g_realloc, (GDestroyNotify)g_free));
sink = gst_element_factory_make ("giostreamsink", "sink");
g_object_set(G_OBJECT(sink), "stream", stream, NULL);
// source
source = gst_element_factory_make("filesrc", "source");
g_object_set(G_OBJECT(source), "location", "/home/sam/Desktop/audio/audio.wav", NULL);
// convert
convert = gst_element_factory_make("audioconvert", "convert");
// decode
decode = gst_element_factory_make("decodebin", "decoder");
// link decode to convert
g_signal_connect(decode, "pad-added", G_CALLBACK(on_pad_added), convert);
// bus
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
// add elements into pipeline
gst_bin_add_many(GST_BIN(pipeline), source, decode, convert, sink, NULL);
// link source to decode
gst_element_link(source, decode);
// caps
GstCaps *caps;
caps = gst_caps_new_simple("audio/x-raw-int",
"rate", G_TYPE_INT, rate,
"channels", G_TYPE_INT, channels,
"width", G_TYPE_INT, depth,
"depth", G_TYPE_INT, depth,
"signed", G_TYPE_BOOLEAN, output_signed,
NULL);
// link convert to sink
gst_element_link_filtered(convert, sink, caps);
gst_caps_unref(caps);
// start playing
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
// iterate
g_print("Running...\n");
g_main_loop_run(loop);
// out of the main loop, clean up nicely
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref(loop);
// get data
g_print("get data\n");
out_data = g_memory_output_stream_get_data(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long size = g_memory_output_stream_get_size(G_MEMORY_OUTPUT_STREAM(stream));
unsigned long sizeData = g_memory_output_stream_get_data_size(G_MEMORY_OUTPUT_STREAM(stream));
std::cout << "stream size: " << size << std::endl;
std::cout << "stream data size: " << sizeData << std::endl;
// access data and store in vector
std::vector<int16_t> data;
for (unsigned long i = 0; i < sizeData/2; ++i) {
data.push_back(((gint16*)out_data)[i]);
}
return 0;
}

gstreamer appsrc video streaming over the network

I'm trying to use gstreamer appsrc to play video stream over the network.
I found good examples here.
gstreamer appsrc test application
http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
Using examples above I can play a video in X Window using Xlib. When pipeline is set PLAYING state, then somehow "need-data" signal emitted and in the start_feed callback function data read from a video file are injected to the appsrc GstBuffer and play sample video.
I'm trying to get data from the network instead of a file, so I think simple echo server reads a video file exactly the same way above and send data to the client when connection is occurred. The client should get these data and put in the appsrc.
My question is how to put stream data to appsrc pipeline? Does anybody give any suggession or good reference?
Here's the working sample code using above links' examples.
// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <stdio.h>
#include <unistd.h> // sleep()
#include <stdbool.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappbuffer.h>
#include <gst/interfaces/xoverlay.h>
#define BUFF_SIZE (640*480*3)//(1024)
#define BORDER_WIDTH 2
#define DEBUG printf
typedef unsigned int uint32;
typedef unsigned char uint8;
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *videosink;
GMainLoop *loop;
guint sourceid;
FILE *file;
} gst_app_t;
static gst_app_t gst_app;
static Window child_window = 0;
static Window window = 0;
static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;
ptr = g_malloc(BUFF_SIZE);
g_assert(ptr);
size = fread(ptr, 1, BUFF_SIZE, app->file);
if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
ret = gst_app_src_push_buffer(app->src, buffer);
if(ret != GST_FLOW_OK){
DEBUG("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}
if(size != BUFF_SIZE){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
DEBUG ("start feeding\n");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
DEBUG ("stop feeding\n");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static void on_pad_added(GstElement *element, GstPad *pad)
{
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;
DEBUG("pad added\n");
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
g_assert(str);
name = (gchar*)gst_structure_get_name(str);
DEBUG("pad name %s\n", name);
if(g_strrstr(name, "video")){
ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
DEBUG("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message))
{
case GST_MESSAGE_ELEMENT: {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window);
}
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
DEBUG("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *err;
gchar *name;
gst_message_parse_warning(message, &err, &debug);
DEBUG("Warning %s\nDebug %s\n", err->message, debug);
name = GST_MESSAGE_SRC_NAME(message);
DEBUG("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;
case GST_MESSAGE_EOS:
DEBUG("End of stream\n");
g_main_loop_quit(app->loop);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
DEBUG("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
static gboolean terminate_playback (GstElement * loop)
{
DEBUG ("Terminating playback\n");
g_main_loop_quit ((GMainLoop *)loop);
return FALSE;
}
int gstreamer_init(int argc, char *argv[])
{
gst_app_t *app = &gst_app;
GstBus *bus;
GstStateChangeReturn state_ret;
app->file = fopen(argv[1], "r");
g_assert(app->file);
/* initialization */
gst_init((int)0, NULL);
app->loop = g_main_loop_new(NULL, FALSE);
/* create elements */
app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline");
app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc");
app->decoder = gst_element_factory_make("decodebin2", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->videosink = gst_element_factory_make("autovideosink", "myvideosink");
if (!app->videosink) {
DEBUG ("output could not be found - check your install\n");
}
g_assert(app->src);
g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->videosink);
bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);
g_signal_connect(app->decoder, "pad-added",
G_CALLBACK(on_pad_added), app->ffmpeg);
//gst_app_src_set_emit_signals(app->src, true);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src,
app->decoder, app->ffmpeg, app->videosink, NULL);
/* link everything together */
if (!gst_element_link((GstElement *)app->src, app->decoder)) {
DEBUG ("Failed to link one or more elements!\n");
return -1;
}
if(!gst_element_link(app->ffmpeg, app->videosink)){
DEBUG("failed to link ffmpeg and videosink");
return -1;
}
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
DEBUG("Failed to start up pipeline!\n");
return 1;
}
DEBUG("set state returned %d\n", state_ret);
//g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop);
g_main_loop_run(app->loop);
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL);
DEBUG("set state null returned %d\n", state_ret);
gst_object_unref(app->pipeline);
return 1;
}
/*
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink
*
* 1. dependency library install
* $ sudo apt-get install gstreamer0.10-plugins-bad
* $ sudo apt-get install gstreamer0.10-ffmpeg
*
* 2. compile
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10
*
* 3. how to run program
* $ ./hello <video_file_name>
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts
*/
int main(int argc, char *argv[])
{
Display *disp;
Window root;
long fgcolor, bgcolor;
GC gc;
XGCValues gc_val;
XEvent event;
char *msg = "Hello, World!";
int screen;
disp = XOpenDisplay(NULL);
if (disp == NULL) {
fprintf(stderr, "Cannot open display\n");
exit(1);
}
screen = DefaultScreen(disp);
root = RootWindow(disp, screen);
fgcolor = BlackPixel(disp, screen);
bgcolor = WhitePixel(disp, screen);
window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1,
fgcolor, bgcolor);
child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1,
fgcolor, bgcolor);
gc_val.foreground = fgcolor;
gc_val.background = bgcolor;
gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val);
XSelectInput(disp, child_window, ExposureMask | KeyPressMask);
g_warning("map xwindow");
//XMapWindow(disp, window);
XMapWindow(disp, window);
XMapWindow(disp, child_window);
XSync(disp, FALSE);
//XDrawLine (disp, window, gc, 0, 0, 1000, 800);
//XDrawLine (disp, child_window, gc, 0, 0, 800, 600);
gstreamer_init(argc, argv);
XDestroyWindow( disp, window );
XDestroyWindow( disp, child_window );
XCloseDisplay( disp );
return 0;
}
You'll want to have at least one other thread (on each end) to handle communication over a socket (like TCP, or UDP if on a local network). This typically has a blocking call to wait for packets. To send data, you can form a gstreamer tee and queue, and then an appsrc to buffer/send data to a socket. To receive, you can pull the data from the socket to a buffer. Keep in mind the OS's socket buffer is relatively small and will drop packets if you don't pull from it fast enough, or push to one too fast. Hence the buffers.
On a NEED_DATA signal, you pull from that buffer to the pipeline using pushBuffer(). And on an ENOUGH_DATA signal, you can just keep buffering or dispose of it, whatever your application needs to do.