Problem about saving cv::Mat to mp4 file via appsrc - computer-vision

I want to save CV:: mat as MP4 file through appsrc or forward it through RTMP. I implemented a simple code that can run successfully, but the obtained MP4 file cannot be played. Can someone tell me what the problem is? I think maybe the PTS is set wrong, but I don't know how to solve it.Thank you in advance.
#include <gst/gst.h>
#include <glib.h>
#include <gst/app/gstappsrc.h>
#include <opencv2/opencv.hpp>
#include <unistd.h>
typedef struct {
GstPipeline *pipeline = nullptr;
GstAppSrc *app_src = nullptr;
GstElement *video_convert = nullptr;
GstElement *encoder = nullptr;
GstElement *h264_parser = nullptr;
GstElement *qt_mux = nullptr;
GstElement *file_sink = nullptr;
}CustomData;
int main(int argc,char * argv[]){
CustomData data;
GstBus *bus = nullptr;
GstMessage *msg = nullptr;
GstStateChangeReturn ret;
gboolean terminate = false;
GstClockTime timestamp = 0;
gst_init(&argc, &argv);
data.pipeline = (GstPipeline*)gst_pipeline_new("m_pipeline");
data.app_src = (GstAppSrc*)gst_element_factory_make("appsrc","m_app_src");
data.video_convert = gst_element_factory_make("videoconvert","m_video_convert");
data.encoder = gst_element_factory_make("x264enc","m_x264enc");
data.h264_parser = gst_element_factory_make("h264parse","m_h264_parser");
data.qt_mux = gst_element_factory_make("qtmux","qt_mux");
data.file_sink = gst_element_factory_make("filesink","file_sink");
if (!data.app_src || !data.video_convert || !data.encoder || !data.h264_parser || !data.qt_mux || !data.file_sink || !data.pipeline){
g_printerr("failed to create all elements\n");
return -1;
}
gst_bin_add_many(GST_BIN(data.pipeline), (GstElement*)data.app_src, data.video_convert, data.encoder, data.h264_parser, data.qt_mux, data.file_sink, NULL);
g_assert(gst_element_link_many((GstElement*)data.app_src, data.video_convert, data.encoder, data.h264_parser, data.qt_mux, data.file_sink,NULL));
GstCaps *caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR",
"width",G_TYPE_INT,1280,
"height",G_TYPE_INT,720,
"framerate",GST_TYPE_FRACTION,25,1,
NULL);
gst_app_src_set_caps(GST_APP_SRC(data.app_src), caps);
g_object_set(data.app_src,"is_live",true,NULL);
g_object_set(data.app_src,"format",GST_FORMAT_TIME,NULL);
std::string mp4_url = "des.mp4";
g_object_set(data.file_sink,"location",mp4_url.c_str(),NULL);
ret = gst_element_set_state((GstElement*)data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE){
g_printerr("Unable to set the pipeline to the playing state. \n");
gst_object_unref(data.pipeline);
return -1;
}
cv::VideoCapture cap;
cap.open("src.mp4");
if(!cap.isOpened())
return -2;
cv::Mat frame;
while(true){
cap.read(frame);
if(frame.empty()){
break;
}
GstBuffer *buffer;
buffer = gst_buffer_new_wrapped(frame.data, frame.size().width * frame.size().height * frame.channels());
GST_BUFFER_PTS (buffer) = timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 25);
timestamp += GST_BUFFER_DURATION (buffer);
GstFlowReturn ret;
g_signal_emit_by_name(data.app_src, "push-buffer", buffer, &ret);
usleep(1000000/25);
}
gst_element_set_state((GstElement*)data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}

You cannot simply set the pipeline state to NULL. Instead you need to send an EOS event to pipeline and wait until the EOS signal is reported back on the pipeline's bus. If you don't do that then the MP4 file will not be written with some requirement headers and the file cannot play.
See https://gstreamer.freedesktop.org/documentation/application-development/basics/helloworld.html?gi-language=c for how to check for EOS message on the bus.
Check https://gstreamer.freedesktop.org/documentation/applib/gstappsrc.html#gst_app_src_end_of_stream on how to tell your appsrc that the last data buffer has been pushed.

Related

Gstreamer: trickplay mode in rtsp-server

I need to implement trickplay mode in rtsp-server by sending seek-event to an GstElement. Pipeline: (appsrc name=vsrc !h264parse ! rtph264pay pt=96 name=pay0)
But if i send seek-event to any of 3 GstElements - function gst_element_send_event return 0, so it doesn't work.
What am I doing wrong? Or is there any another approach to implement trickplay mode on rtsp-server?
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <string>
#include <fstream>
static GstElement *pMy = NULL;
static GstElement *pMy2 = NULL;
static gboolean timeout(GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool(server);
gst_rtsp_session_pool_cleanup(pool);
g_object_unref(pool);
return TRUE;
}
static void onNeedVideoData(GstElement * appsrc)
{
static int NN = 0;
++NN;
int Size = sFileSize(NN);
GstBuffer* buf = gst_buffer_new_and_alloc(Size);
GstMapInfo map;
gst_buffer_map(buf, &map, GST_MAP_WRITE);
FILE *fp = fopen(std::string("C:\\rtsp_files\\body" + std::to_string(NN) + ".bin").c_str(), "rb");
fread(map.data, sizeof(unsigned char), Size, fp);
fclose(fp);
gst_buffer_unmap(buf, &map);
//in random moment we send seek-event to some GstElement
if (NN % 300 == 0){
double dspeed = 4.;
gint64 position;
if (!gst_element_query_position(pMy, GST_FORMAT_TIME, &position)) {
g_printerr("Unable to retrieve current position.\n");
return;
}
GstEvent * seek_event = gst_event_new_seek(dspeed, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0);
auto res1 = gst_element_send_event(pMy2, seek_event);
g_print("%d\n", res1);
}
GstFlowReturn ret;
g_signal_emit_by_name(appsrc, "push-buffer", buf, &ret);
gst_buffer_unref(buf);
}
static void need_video_data(GstElement * appsrc, guint unused)
{
onNeedVideoData(appsrc);
}
static void media_constructed(GstRTSPMediaFactory * factory, GstRTSPMedia * media)
{
GstElement* element = pMy = gst_rtsp_media_get_element(media);
GstElement* vsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "vsrc");
g_signal_connect(vsrc, "need-data", (GCallback)need_video_data, NULL);
pMy2 = gst_bin_get_by_name_recurse_up(GST_BIN(element), "h264parse0");
}
int main(int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(factory, "( "
"appsrc name=vsrc !"
"h264parse ! rtph264pay pt=96 name=pay0 )");
gst_rtsp_media_factory_set_shared(factory, TRUE);
g_signal_connect(factory, "media-constructed", (GCallback)
media_constructed, NULL);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref(mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach(server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds(2, (GSourceFunc)timeout, server);
g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run(loop);
return 0;
/* ERRORS */
failed:
{
g_print("failed to attach the server\n");
return -1;
}
}
I solved my own problem:
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <string>
#include <fstream>
static GstElement *pMediaElement = NULL;
/* this timeout is periodically run to clean up the expired sessions from the
* pool. This needs to be run explicitly currently but might be done
* automatically as part of the mainloop. */
static gboolean
timeout(GstRTSPServer * server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool(server);
gst_rtsp_session_pool_cleanup(pool);
g_object_unref(pool);
return TRUE;
}
static int sFileSize(const std::string &filename)
{
std::ifstream in(filename, std::ifstream::ate | std::ifstream::binary);
return in.tellg();
}
static void onNeedVideoData(GstElement * appsrc)
{
static int NN = 0;
++NN;
std::string filename = "C:\\rtsp_files\\body" + std::to_string(NN) + ".bin";
int Size = sFileSize(filename);
GstBuffer* buf = gst_buffer_new_and_alloc(Size);
GstMapInfo map;
gst_buffer_map(buf, &map, GST_MAP_WRITE);
FILE *fp = fopen(filename.c_str(), "rb");
fread(map.data, sizeof(unsigned char), Size, fp);
fclose(fp);
gst_buffer_unmap(buf, &map);
//in random moment we send seek-event to MediaElement
if (NN == 300){
gint64 position;
if (!gst_element_query_position(pMediaElement, GST_FORMAT_TIME, &position)) {
g_printerr("Unable to retrieve current position.\n");
return;
}
GstEvent * seek_event = gst_event_new_seek(4., GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0);
auto res = gst_element_send_event(pMediaElement, seek_event);
g_print("%d\n", res);
}
GstFlowReturn ret;
g_signal_emit_by_name(appsrc, "push-buffer", buf, &ret);
gst_buffer_unref(buf);
}
static void need_video_data(GstElement * appsrc, guint unused)
{
onNeedVideoData(appsrc);
}
/* called when appsrc wants us to return data from a new position with the next
* call to push-buffer. */
static gboolean seek_data(GstElement * appsrc, guint64 position)
{
g_print("seek_data call\n");
//GST_DEBUG("seek to offset %" G_GUINT64_FORMAT, position);
//app->offset = position;
return TRUE;
}
static void media_constructed(GstRTSPMediaFactory * factory, GstRTSPMedia * media)
{
GstElement* element = pMediaElement = gst_rtsp_media_get_element(media);
GstElement* vsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "vsrc");
gst_util_set_object_arg(G_OBJECT(vsrc), "stream-type", "seekable");
g_signal_connect(vsrc, "need-data", (GCallback)need_video_data, NULL);
g_signal_connect(vsrc, "seek-data", G_CALLBACK(seek_data), NULL);
}
int main(int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(factory, "( "
"appsrc name=vsrc !"
"h264parse config-interval=1 ! rtph264pay pt=96 name=pay0 )");
gst_rtsp_media_factory_set_shared(factory, TRUE);
g_signal_connect(factory, "media-constructed", (GCallback)
media_constructed, NULL);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref(mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach(server, NULL) == 0)
goto failed;
/* add a timeout for the session cleanup */
g_timeout_add_seconds(2, (GSourceFunc)timeout, server);
/* start serving, this never stops */
g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run(loop);
return 0;
/* ERRORS */
failed:
{
g_print("failed to attach the server\n");
return -1;
}
}

Transfer RSTP webcam frames to openCV with gstreamer

I want to write a C++ program that gets RTSP stream frames with gstreamer and put it in openCV mat. Below is my code. I get an error when trying to get a sample from the appsink. I highlighted the code step bold.
Do you have any idea how to get the sample from appsink?
Thanks in advance.
#include <gst/gst.h>
#include <opencv2/opencv.hpp>
#include <gstappsink.h>
#ifdef HAVE_GTK
#include <gtk/gtk.h>
#include <gdk-pixbuf/gdk-pixbuf.h>
#endif
#include <stdlib.h>
int main (int argc, char *argv[])
{
GstElement *pipeline;
gint width, height;
gchar *descr;
GError *error = NULL;
gint64 duration, position;
GstStateChangeReturn ret;
gboolean res;
GstBus *bus;
GstMessage *msg;
gst_init (0, NULL);
pipeline = gst_parse_launch ("rtspsrc location=rtsp://admin:admin#192.168.1.109:554/stream1 latency=0 ! decodebin ! appsink name=sink ",NULL);
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
if(!sink){
printf("sink is NULL\n");
exit(1);
}
GstAppSink *appsink = GST_APP_SINK(sink);
if(!appsink){
printf("appsink is NULL\n");
exit(1);
}
**GstSample *sample = gst_app_sink_pull_sample(appsink);**
if(!sample){
printf("sample is NULL\n");
exit(1);
}
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
cv::Mat frame(cv::Size(320, 240), CV_8UC3, (char*)map.data, cv::Mat::AUTO_STEP);
imwrite("XYZ.jpg",frame);
}

Gstreamer EOS message handling in filesink to change location on the fly

Trying to switch output files on the fly, but can't handle EOS.
http://gstreamer-devel.966125.n4.nabble.com/Dynamically-updating-filesink-location-at-run-time-on-the-fly-td4660569.html
Quote:
Assuming you have a pipeline that looks like this:
audiosrc --> encoder --> mux --> filesink
then you'll need to change it to:
audiosrc --> encoder --> queue --> muxsink_bin
where muxsink_bin is a bin
ghostpad --> mux --> filesink
then the procedure is:
1 - Block the queue srcpad using gst_pad_set_blocked_async()
2 - In the blocked callback:
2a - unlink muxsink_bin with gst_pad_unlink()
2b - send an EOS event to the muxsink_bin sink pad with gst_pad_send_event()
2b - create a new muxsink_bin
2c - set filesink location
2d - add the new bin to the pipeline with gst_bin_add()
2e - sync with parent using gst_element_sync_state_with_parent()
2f - link it to the queue srcpad with gst_pad_link()
2g - unblock the queue srcpad with gst_pad_set_blocked_async(). When the unblocked callback occurs you're recording again & no data has been lost. No action is required in the unblocked callback
3 - handle the EOS & delete the old muxsink_bin. I had a msg handler that I installed in my bin_init() function using "gstbin_class->handle_message = GST_DEBUG_FUNCPTR(msg_handler)" & in the handler:
3a - lock the bin state with gst_element_set_locked_state()
3b - set the state to NULL with gst_element_set_state()
3c - remove it from the pipeline with gst_bin_remove()
That's it. The only thing to be mindful of is that data must be flowing thru the pipeline for this to work.
Paddy
The main sequence works except for the finalization of the old pipeline.
The difficulty is with the point 3: I can send EOS to the ghostpad, and the filesink gets it. But how to catch that EOS?
What does it mean "install msg handler using gstbin_class->handle_message = GST_DEBUG_FUNCPTR(msg_handler)"?
There is message forwarding.
Must be enabled on the bus:
g_object_set(G_OBJECT(bin), "message-forward", TRUE, 0);
Handling:
case GST_MESSAGE_ELEMENT:
{
const GstStructure *s = gst_message_get_structure (msg);
if (gst_structure_has_name (s, "GstBinForwarded"))
{
GstMessage *forward_msg = NULL;
gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
if (GST_MESSAGE_TYPE (forward_msg) == GST_MESSAGE_EOS)
{
g_print ("EOS from element %s\n",
GST_OBJECT_NAME (GST_MESSAGE_SRC (forward_msg)));
DestroyBin();
CreateNewBin();
RemovePad();
}
gst_message_unref (forward_msg);
}
}
Full code:
#include <gst/gst.h>
#include <iostream>
#include <cstring>
#include <cstdio>
static gchar *opt_effects = NULL;
#define DEFAULT_EFFECTS "identity,exclusion,navigationtest," \
"agingtv,videoflip,vertigotv,gaussianblur,shagadelictv,edgetv"
static GstElement *pipeline;
static GstElement * muxer;
static GstElement * sink;
static GstElement * q2;
static int i=0;
GstElement * bin;
GstPad * muxerSinkPad;
gulong probeId;
static GQueue effects = G_QUEUE_INIT;
void CreateNewBin();
void DestroyBin();
void ChangeLocation();
void RemovePad();
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
GstPad *sinkPad = gst_element_get_static_pad(bin, "sink");
gst_pad_unlink(pad, sinkPad);
gst_pad_send_event(sinkPad, gst_event_new_eos());
gst_object_unref(sinkPad);
return GST_PAD_PROBE_OK;
}
static gboolean
timeout_cb (gpointer user_data)
{
static int i=0;
if(i==0)
{
GstPad * q2SrcPad;
q2SrcPad = gst_element_get_static_pad(q2, "src");
std::cout << "Timeout: " << q2SrcPad << std::endl;
probeId = gst_pad_add_probe (q2SrcPad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
pad_probe_cb, user_data, NULL);
gst_object_unref(q2SrcPad);
return TRUE;
}
return FALSE;
}
static gboolean
bus_cb (GstBus * bus, GstMessage * msg, gpointer user_data)
{
GMainLoop *loop = (GMainLoop*)user_data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *dbg;
gst_message_parse_error (msg, &err, &dbg);
gst_object_default_error (msg->src, err, dbg);
g_error_free (err);
g_free (dbg);
g_main_loop_quit (loop);
break;
}
case GST_EVENT_EOS:
std::cout << "EOS message is got" << std::endl;
break;
case GST_MESSAGE_ELEMENT:
{
const GstStructure *s = gst_message_get_structure (msg);
if (gst_structure_has_name (s, "GstBinForwarded"))
{
GstMessage *forward_msg = NULL;
gst_structure_get (s, "message", GST_TYPE_MESSAGE, &forward_msg, NULL);
if (GST_MESSAGE_TYPE (forward_msg) == GST_MESSAGE_EOS)
{
g_print ("EOS from element %s\n",
GST_OBJECT_NAME (GST_MESSAGE_SRC (forward_msg)));
DestroyBin();
CreateNewBin();
RemovePad();
}
gst_message_unref (forward_msg);
}
}
break;
default:
break;
}
return TRUE;
}
int
main (int argc, char **argv)
{
GError *err = NULL;
GMainLoop *loop;
GstElement *src, *q1,/* *q2,*/ /**effect,*/ /**filter1*//*, *filter2*/ *encoder;/*, *sink*/;
gst_init(&argc, &argv);
pipeline = gst_pipeline_new ("pipeline");
src = gst_element_factory_make ("videotestsrc", NULL);
//Create a caps filter between videosource videoconvert
std::string capsString = "video/x-raw,format=YV12,width=320,height=240,framerate=30/1";
GstCaps * dataFilter = gst_caps_from_string(capsString.c_str());
q1 = gst_element_factory_make ("queue", NULL);
encoder = gst_element_factory_make ("x264enc", NULL);
q2 = gst_element_factory_make("queue", NULL);
gst_bin_add_many(GST_BIN(pipeline), src, q1, encoder, q2, 0);
gboolean link = gst_element_link_filtered(src, q1, dataFilter);
link &= gst_element_link(q1, encoder);
link &= gst_element_link(encoder, q2);
CreateNewBin();
gst_element_set_state (pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new (NULL, FALSE);
gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);
g_timeout_add_seconds (10, timeout_cb, loop);
g_main_loop_run (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
void RemovePad()
{
GstPad * q2SrcPad;
q2SrcPad = gst_element_get_static_pad(q2, "src");
gst_pad_remove_probe(q2SrcPad, probeId);
gst_object_unref(q2SrcPad);
}
void DestroyBin()
{
gst_element_set_state(bin, GST_STATE_NULL);
gst_bin_remove(GST_BIN(pipeline), bin);
}
void CreateNewBin()
{
static std::string fileLocPattern = "deneme%d.mkv";
char buffer[12];
memset(buffer, 0, sizeof(buffer));
sprintf(buffer, fileLocPattern.c_str(), i++);
//Create Muxer Element
muxer = gst_element_factory_make("matroskamux", "MatroskaMuxer");
//Create File Sink Element
sink = gst_element_factory_make("filesink", buffer);
g_object_set(G_OBJECT(sink), "location", buffer, 0);
//Create muxsinkBin
bin = gst_bin_new(buffer);
g_object_set(G_OBJECT(bin), "message-forward", TRUE, 0);
//Add a src pad to the bin
gst_bin_add_many(GST_BIN(bin), muxer, sink, 0);
gboolean linkState = TRUE;
//Connect elements within muxsink_bin
//Link: matroskamuxer -> filesink
linkState &= gst_element_link_many(muxer, sink, 0);
//Add this bin to pipeline
gst_bin_add(GST_BIN(pipeline), bin);
//Create ghostpad and manually link muxsinkBin and remaining part of the pipeline
{
GstPadTemplate * muxerSinkPadTemplate;
if( !(muxerSinkPadTemplate = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(muxer), "video_%u")) )
{
std::cout << "Unable to get source pad template from muxing element" << std::endl;
}
//Obtain dynamic pad from element
muxerSinkPad = gst_element_request_pad(muxer, muxerSinkPadTemplate, 0, 0);
//Add ghostpad
GstPad * ghostPad = gst_ghost_pad_new("sink", muxerSinkPad);
gst_element_add_pad(bin, ghostPad);
gst_object_unref(GST_OBJECT(muxerSinkPad));
gst_element_sync_state_with_parent(bin);
//Get src pad from queue element
GstPad * queueBeforeBinSrcPad = gst_element_get_static_pad(q2, "src");
//Link queuebeforebin to ghostpad
if (gst_pad_link(queueBeforeBinSrcPad, ghostPad) != GST_PAD_LINK_OK )
{
std::cout << "QueueBeforeBin cannot be linked to MuxerSinkPad." << std::endl;
}
gst_object_unref(queueBeforeBinSrcPad);
}
}
http://gstreamer-devel.966125.n4.nabble.com/Listening-on-EOS-events-for-GstBin-td4669126.html
http://gstreamer-devel.966125.n4.nabble.com/file/n4669476/main.cpp
Depending on your use case you can use multifilesink element. It will switch files on the fly on certain events. A file for each buffer, a file for each segment... Check its properties and see if there is anything that would work for you.
It also serves as a good code base in case you want to write something similar (or maybe extend it?)
I'll post the code of actual custom GstBin aka 'muxsink_bin' that I ended up implementing to do that forwarding and EOS handling for the 'detachable sink part' of the pipeline.
plisolatedbin.h:
#pragma once
#include <gst/gst.h>
#include <gst/gstbin.h>
G_BEGIN_DECLS
#define PL_TYPE_ISOLATED_BIN (pl_isolated_bin_get_type ())
#define PL_IS_ISOLATED_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), PL_TYPE_ISOLATED_BIN))
#define PL_IS_ISOLATED_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), PL_TYPE_ISOLATED_BIN))
#define PL_ISOLATED_BIN_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), PL_TYPE_ISOLATED_BIN, PlIsolatedBinClass))
#define PL_ISOLATED_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), PL_TYPE_ISOLATED_BIN, PlIsolatedBin))
#define PL_ISOLATED_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), PL_TYPE_ISOLATED_BIN, PlIsolatedBinClass))
#define PL_ISOLATED_BIN_CAST(obj) ((PlIsolatedBin*)(obj))
typedef struct _PlIsolatedBin PlIsolatedBin;
typedef struct _PlIsolatedBinClass PlIsolatedBinClass;
/**
* Does not forward EOS to parent by default.
*/
struct _PlIsolatedBin
{
GstBin bin;
};
struct _PlIsolatedBinClass
{
GstBinClass parent_class;
};
GType pl_isolated_bin_get_type();
GstElement* pl_isolated_bin_new();
G_END_DECLS
plisolatedbin.c:
#include "plisolatedbin.h"
#include <assert.h>
G_DEFINE_TYPE(PlIsolatedBin, pl_isolated_bin, GST_TYPE_BIN)
static void pl_isolated_bin_init(PlIsolatedBin *plisolatedbin)
{
}
static void pl_isolated_bin_handle_message_func(GstBin *bin, GstMessage *message)
{
if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_EOS)
{
GST_BIN_CLASS(pl_isolated_bin_parent_class)->handle_message(bin, message);
}
else
{
GstMessage *forwarded = gst_message_new_element(GST_OBJECT_CAST(bin), gst_structure_new("PlIsolatedBinForwarded", "message", GST_TYPE_MESSAGE, message, NULL));
gst_element_post_message(GST_ELEMENT_CAST(bin), forwarded);
}
}
static void pl_isolated_bin_class_init(PlIsolatedBinClass *class)
{
class->parent_class.handle_message = GST_DEBUG_FUNCPTR(pl_isolated_bin_handle_message_func);
}
GstElement* pl_isolated_bin_new()
{
return g_object_new(PL_TYPE_ISOLATED_BIN, NULL);
}

Callback g_source_set_callback allways be called?

i am learning about gsocket and want to try to write a simple program to handle any packet i received, but function callback always be called when just only one packet came.This is my simple code:
#include <glib.h>
#include <gio/gio.h>
#include <libsoup/soup.h>
#include <glib-object.h>
gboolean has_packet(GIOChannel *source, GIOCondition condition, gpointer data){
g_printf("has packet\n");
return TRUE;
}
int main(int argc, char **argv) {
g_type_init();
GInetAddress *iface_address = g_inet_address_new_from_string ("0.0.0.0");
GSocketAddress *bind_address = g_inet_socket_address_new (iface_address, 12345);
GSocket *sock;
GError *err = NULL;
sock = g_socket_new(G_SOCKET_FAMILY_IPV4,
G_SOCKET_TYPE_DATAGRAM,
G_SOCKET_PROTOCOL_UDP,
&err);
g_assert(err == NULL);
g_socket_bind(sock, bind_address, TRUE, &err);
g_assert(err == NULL);
//int fd = g_socket_get_fd(sock);
//GIOChannel* channel = g_io_channel_unix_new(fd);
//guint source = g_io_add_watch(channel, G_IO_IN,
// (GIOFunc) test, NULL);
//g_io_channel_unref(channel);
GSource *source = g_socket_create_source (sock, G_IO_IN,
NULL);
g_source_set_callback (source, (GSourceFunc)has_packet, NULL, NULL);
GMainLoop *loop = g_main_loop_new(NULL, FALSE);
g_source_attach(source ,g_main_loop_get_context(loop));
g_main_loop_run(loop);
g_main_loop_unref(loop);
}
i hope when has one packet come, just one link "has packet" will be print, but it run forever. Could you help me work out where I am going wrong please.
UPDATE: i need to read data in socket.
gboolean has_packet(GIOChannel *source, GIOCondition condition, gpointer data)
{
GSocket *sock = (GSocket *)data;
char buf[65536];
GSocketAddress *address = NULL;
gssize bytes;
GInputVector vector;
GSocketControlMessage **messages;
gint num_messages;
GError *error = NULL;
vector.buffer = buf;
vector.size = 65536;
bytes = g_socket_receive_message (sock,
&address,
&vector,
1,
&messages,
&num_messages,
NULL,
NULL,
&error);
g_printf("has packet\n");
g_printf("bytes = %zd\n", bytes);
g_printf("num_message = %d\n", num_messages);
return TRUE;
}
You should return FALSE (or return G_SOURCE_REMOVE) from the callback if you only want it to be called once.
In addition, you start the main loop with g_main_loop_run() and that will run forever until g_main_loop_quit() is called. So you should quit the main loop at some point (presumably you want it to quit after a packet is received, so you should call g_main_loop_quit() in has_packet().

gstreamer appsrc video streaming over the network

I'm trying to use gstreamer appsrc to play video stream over the network.
I found good examples here.
gstreamer appsrc test application
http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
Using examples above I can play a video in X Window using Xlib. When pipeline is set PLAYING state, then somehow "need-data" signal emitted and in the start_feed callback function data read from a video file are injected to the appsrc GstBuffer and play sample video.
I'm trying to get data from the network instead of a file, so I think simple echo server reads a video file exactly the same way above and send data to the client when connection is occurred. The client should get these data and put in the appsrc.
My question is how to put stream data to appsrc pipeline? Does anybody give any suggession or good reference?
Here's the working sample code using above links' examples.
// http://amarghosh.blogspot.kr/2012/01/gstreamer-appsrc-in-action.html
// http://www.cs.odu.edu/~cs476/Xlib/xlines.c
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <stdio.h>
#include <unistd.h> // sleep()
#include <stdbool.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappbuffer.h>
#include <gst/interfaces/xoverlay.h>
#define BUFF_SIZE (640*480*3)//(1024)
#define BORDER_WIDTH 2
#define DEBUG printf
typedef unsigned int uint32;
typedef unsigned char uint8;
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *sink;
GstElement *decoder;
GstElement *ffmpeg;
GstElement *videosink;
GMainLoop *loop;
guint sourceid;
FILE *file;
} gst_app_t;
static gst_app_t gst_app;
static Window child_window = 0;
static Window window = 0;
static gboolean read_data(gst_app_t *app)
{
GstBuffer *buffer;
guint8 *ptr;
gint size;
GstFlowReturn ret;
ptr = g_malloc(BUFF_SIZE);
g_assert(ptr);
size = fread(ptr, 1, BUFF_SIZE, app->file);
if(size == 0){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
buffer = gst_buffer_new();
GST_BUFFER_MALLOCDATA(buffer) = ptr;
GST_BUFFER_SIZE(buffer) = size;
GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);
ret = gst_app_src_push_buffer(app->src, buffer);
if(ret != GST_FLOW_OK){
DEBUG("push buffer returned %d for %d bytes \n", ret, size);
return FALSE;
}
if(size != BUFF_SIZE){
ret = gst_app_src_end_of_stream(app->src);
DEBUG("eos returned %d at %d\n", ret, __LINE__);
return FALSE;
}
return TRUE;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
if (app->sourceid == 0) {
DEBUG ("start feeding\n");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
if (app->sourceid != 0) {
DEBUG ("stop feeding\n");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static void on_pad_added(GstElement *element, GstPad *pad)
{
GstCaps *caps;
GstStructure *str;
gchar *name;
GstPad *ffmpegsink;
GstPadLinkReturn ret;
DEBUG("pad added\n");
caps = gst_pad_get_caps(pad);
str = gst_caps_get_structure(caps, 0);
g_assert(str);
name = (gchar*)gst_structure_get_name(str);
DEBUG("pad name %s\n", name);
if(g_strrstr(name, "video")){
ffmpegsink = gst_element_get_pad(gst_app.ffmpeg, "sink");
g_assert(ffmpegsink);
ret = gst_pad_link(pad, ffmpegsink);
DEBUG("pad_link returned %d\n", ret);
gst_object_unref(ffmpegsink);
}
gst_caps_unref(caps);
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message))
{
case GST_MESSAGE_ELEMENT: {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC(message)), child_window);
}
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
DEBUG("Error %s\n", err->message);
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *err;
gchar *name;
gst_message_parse_warning(message, &err, &debug);
DEBUG("Warning %s\nDebug %s\n", err->message, debug);
name = GST_MESSAGE_SRC_NAME(message);
DEBUG("Name of src %s\n", name ? name : "nil");
g_error_free(err);
g_free(debug);
}
break;
case GST_MESSAGE_EOS:
DEBUG("End of stream\n");
g_main_loop_quit(app->loop);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
DEBUG("got message %s\n", \
gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
static gboolean terminate_playback (GstElement * loop)
{
DEBUG ("Terminating playback\n");
g_main_loop_quit ((GMainLoop *)loop);
return FALSE;
}
int gstreamer_init(int argc, char *argv[])
{
gst_app_t *app = &gst_app;
GstBus *bus;
GstStateChangeReturn state_ret;
app->file = fopen(argv[1], "r");
g_assert(app->file);
/* initialization */
gst_init((int)0, NULL);
app->loop = g_main_loop_new(NULL, FALSE);
/* create elements */
app->pipeline = (GstPipeline *)gst_pipeline_new("my_pipeline");
app->src = (GstAppSrc *)gst_element_factory_make("appsrc", "myappsrc");
app->decoder = gst_element_factory_make("decodebin2", "mydecoder");
app->ffmpeg = gst_element_factory_make("ffmpegcolorspace", "myffmpeg");
app->videosink = gst_element_factory_make("autovideosink", "myvideosink");
if (!app->videosink) {
DEBUG ("output could not be found - check your install\n");
}
g_assert(app->src);
g_assert(app->decoder);
g_assert(app->ffmpeg);
g_assert(app->videosink);
bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
gst_object_unref(bus);
g_signal_connect(app->decoder, "pad-added",
G_CALLBACK(on_pad_added), app->ffmpeg);
//gst_app_src_set_emit_signals(app->src, true);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement *)app->src,
app->decoder, app->ffmpeg, app->videosink, NULL);
/* link everything together */
if (!gst_element_link((GstElement *)app->src, app->decoder)) {
DEBUG ("Failed to link one or more elements!\n");
return -1;
}
if(!gst_element_link(app->ffmpeg, app->videosink)){
DEBUG("failed to link ffmpeg and videosink");
return -1;
}
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
DEBUG("Failed to start up pipeline!\n");
return 1;
}
DEBUG("set state returned %d\n", state_ret);
//g_timeout_add (15000, (GSourceFunc) terminate_playback, app->loop);
g_main_loop_run(app->loop);
state_ret = gst_element_set_state((GstElement *)app->pipeline, GST_STATE_NULL);
DEBUG("set state null returned %d\n", state_ret);
gst_object_unref(app->pipeline);
return 1;
}
/*
* gst-launch filesrc location=test.avi ! decodebin2 ! ffmpegcolorspace ! autovideosink
*
* 1. dependency library install
* $ sudo apt-get install gstreamer0.10-plugins-bad
* $ sudo apt-get install gstreamer0.10-ffmpeg
*
* 2. compile
* $ gcc hello.c -o hello -lX11 `pkg-config --cflags --libs gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10
*
* 3. how to run program
* $ ./hello <video_file_name>
* $ GST_DEBUG=appsrc:5 ./hello ./hbo_dtc_sd.ts
*/
int main(int argc, char *argv[])
{
Display *disp;
Window root;
long fgcolor, bgcolor;
GC gc;
XGCValues gc_val;
XEvent event;
char *msg = "Hello, World!";
int screen;
disp = XOpenDisplay(NULL);
if (disp == NULL) {
fprintf(stderr, "Cannot open display\n");
exit(1);
}
screen = DefaultScreen(disp);
root = RootWindow(disp, screen);
fgcolor = BlackPixel(disp, screen);
bgcolor = WhitePixel(disp, screen);
window = XCreateSimpleWindow(disp, root, 100, 100, 1000, 840, 1,
fgcolor, bgcolor);
child_window = XCreateSimpleWindow(disp, window, 100, 100, 800, 600, 1,
fgcolor, bgcolor);
gc_val.foreground = fgcolor;
gc_val.background = bgcolor;
gc = XCreateGC(disp, child_window, GCForeground|GCBackground, &gc_val);
XSelectInput(disp, child_window, ExposureMask | KeyPressMask);
g_warning("map xwindow");
//XMapWindow(disp, window);
XMapWindow(disp, window);
XMapWindow(disp, child_window);
XSync(disp, FALSE);
//XDrawLine (disp, window, gc, 0, 0, 1000, 800);
//XDrawLine (disp, child_window, gc, 0, 0, 800, 600);
gstreamer_init(argc, argv);
XDestroyWindow( disp, window );
XDestroyWindow( disp, child_window );
XCloseDisplay( disp );
return 0;
}
You'll want to have at least one other thread (on each end) to handle communication over a socket (like TCP, or UDP if on a local network). This typically has a blocking call to wait for packets. To send data, you can form a gstreamer tee and queue, and then an appsrc to buffer/send data to a socket. To receive, you can pull the data from the socket to a buffer. Keep in mind the OS's socket buffer is relatively small and will drop packets if you don't pull from it fast enough, or push to one too fast. Hence the buffers.
On a NEED_DATA signal, you pull from that buffer to the pipeline using pushBuffer(). And on an ENOUGH_DATA signal, you can just keep buffering or dispose of it, whatever your application needs to do.