GstOverlay and gtk : Display without padding (Black strips) - c++

I render my streaming in a wxWidget ImagePanel (m_renderwindow in my code) like in the picture : Image. The problem is that I have padding at top and bottom and I don't succeed to remove it. I Try gst_video_overlay_set_render_rectangle but the area stay black and don't display the stream. Does anyone have an idea?
GstState state;
GstState pending;
GstPad* pad = nullptr;
GError* error = NULL;
GstElement* source;
GstElement* clocktime;
GstElement* textoverlay;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
(*ptrstats).pipeline = gst_parse_launch("udpsrc name=source !rtph264depay !h264parse !avdec_h264 ! videoconvert !d3dvideosink name=mysink sync=false ", &error);
if (!(*ptrstats).pipeline) {
outfile << "Load video : ", error->message, "\n";
exit(1);
}
source = gst_bin_get_by_name(GST_BIN((*ptrstats).pipeline), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
pad = gst_element_get_static_pad(source, "src");
gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)buffer_out_cb, ptrstats, NULL);
gst_object_unref(pad);
#ifdef __WXGTK__
outfile << "__WXGTK__\n";
GstElement* sink = gst_bin_get_by_name((GstBin*)(*ptrstats).pipeline, "mysink");
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_xid);
#elif defined __WXMSW__
GstElement* sink = gst_bin_get_by_name((GstBin*)(*ptrstats).pipeline, "mysink");
WXWidget hwnd = m_renderWindow->GetHandle();
//gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(sink), 0, 0, 1224, 1024);
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink),
reinterpret_cast<guintptr>(hwnd));
#endif
gst_element_set_state((*ptrstats).pipeline, GST_STATE_PLAYING);
SOLUTION : add d3dvideosink force-aspect-ratio=false

Related

Properly close the pipeline to save image of udpsrc (currently my image is empty)

I would like to save one image of my updsrc. When the user click on a button the code bellow is running. But when I look at my image, it is empty. I try a lot of "way" to stop the pipeline but I think that I did not closed the pipeline properly.
Does anyone have any idea ?
GstElement* snappipe;
GError* error = NULL;
GstElement* source;
GstElement* filesink;
GstCaps* caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"payload", G_TYPE_INT, 96,
"encoding-name", G_TYPE_STRING, "H264",
NULL);
m_strPathNameSave += CreateFileName("png");
snappipe = gst_parse_launch("udpsrc name=source num-buffers=1 !rtph264depay !h264parse !avdec_h264 !autovideoconvert ! pngenc ! filesink name=mysink", &error);
if (!snappipe) {
g_print("Parse error: %s\n", error->message);
exit(1);
}
filesink = gst_bin_get_by_name(GST_BIN(snappipe), "mysink");
g_object_set(filesink, "location", m_strPathNameSave.c_str(), NULL);
source = gst_bin_get_by_name(GST_BIN(snappipe), "source");
g_object_set(G_OBJECT(source), "caps", caps, NULL);
g_object_set(G_OBJECT(source), "port", m_port, NULL);
gst_element_set_state(snappipe, GST_STATE_PLAYING);
GstBus* bus = gst_element_get_bus(snappipe);
gst_object_unref(bus);
Sleep(10000);
gst_element_set_state(snappipe, GST_STATE_NULL);
gst_object_unref(snappipe);
I solve the problem like that :
std::string strPathImage = "\\image.png";
GstCaps* caps;
GstSample* from_sample, * to_sample;
GError* err = NULL;
GstBuffer* buf;
GstMapInfo map_info;
g_object_get((*ptrstats).sink, "last-sample", &from_sample, NULL);
if (from_sample == NULL) {
GST_ERROR("Error getting last sample form sink");
return;
}
caps = gst_caps_from_string("image/png");
to_sample = gst_video_convert_sample(from_sample, caps, GST_CLOCK_TIME_NONE, &err);
gst_caps_unref(caps);
gst_sample_unref(from_sample);
if (to_sample == NULL && err) {
GST_ERROR("Error converting frame: %s", err->message);
g_error_free(err);
return;
}
buf = gst_sample_get_buffer(to_sample);
if (gst_buffer_map(buf, &map_info, GST_MAP_READ)) {
if (!g_file_set_contents(strPathImage.c_str(), (const char*)map_info.data,
map_info.size, &err)) {
GST_WARNING("Could not save thumbnail: %s", err->message);
g_error_free(err);
}
}
gst_sample_unref(to_sample);
gst_buffer_unmap(buf, &map_info);

Is there any way to reduce the CPU consumption of GStreamer streaming?

I use GStreamer to implement RTMP streaming, and the CPU usage is as high as 400%. Is there any way to reduce the CPU usage?
The main function of the code is to continuously receive the mat image of OpenCV, put it into appsrc, and finally push the RTMP stream
The code is as follows:
void cb_need_data(GstElement *appsrc, guint unused_size, gpointer user_data)
{
printf("need data!\n");
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
GstMapInfo map;
size = 700 * 700 * 3;
buffer = gst_buffer_new_allocate(NULL, size, NULL);
gst_buffer_map(buffer, &map, GST_MAP_READ);
while (image.data == NULL)
{
std::this_thread::sleep_for(std::chrono::milliseconds(100));
};
memcpy(map.data, image.data, size);
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30);
timestamp += GST_BUFFER_DURATION(buffer);
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
gst_buffer_unmap(buffer, &map);
gst_buffer_unref(buffer);
if (ret != GST_FLOW_OK)
{
/* something wrong, stop pushing */
g_main_loop_quit(loop);
}
}
void *rtmpPush(void *p)
{
/* init GStreamer */
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
GstElement *pipeline, *appsrc, *conv, *x264enc, *h264parse, *flvmux, *videosink;
/* setup pipeline */
pipeline = gst_pipeline_new("pipeline");
appsrc = gst_element_factory_make("appsrc", "source");
conv = gst_element_factory_make("videoconvert", "conv");
h264parse = gst_element_factory_make("h264parse", "h264parse");
flvmux = gst_element_factory_make("flvmux", "flvmux");
x264enc = gst_element_factory_make("x264enc", "x264enc");
videosink = gst_element_factory_make("rtmpsink", "videosink");
g_object_set(G_OBJECT(videosink), "location", "rtmp://218.77.60.210:1935/rtmplive/354", NULL);
/* setup */
// g_object_set(G_OBJECT(appsrc),
// "stream-type", 0,
// "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(appsrc),
"is-live", 1,
"stream-type", 0,
//"format", GST_FORMAT_TIME, NULL,
"caps",
gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, 700,
"height", G_TYPE_INT, 700,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL),
NULL);
gst_bin_add_many(GST_BIN(pipeline), appsrc, conv, x264enc, flvmux, videosink, NULL);
gst_element_link_many(appsrc, conv, x264enc, flvmux, videosink, NULL);
g_signal_connect(appsrc, "need-data", G_CALLBACK(cb_need_data), NULL);
//g_signal_connect(appsrc, "enough-data", G_CALLBACK(cb_enough_data), NULL);
/* play */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_main_loop_run(loop);
/* clean up */
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
while (1)
{
sleep(INT_MAX);
}
}
Is there any other plugin that can replace the existing plugin and reduce CPU usage?
Can anyone help me? Thank you very much

Gstreamer rtsp application for audio and video

I was trying to develop an application for the pipeline:
gst-launch-1.0 rtspsrc location="rtsp://192.168.3.30:8554/rajvi" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink
Following is the code which I have implemented:
#include <gst/gst.h>
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
g_free(description);
GstElement *depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, depay, "sink") == 0)
{
g_print("cb_new_rtspsrc_pad : failed to link elements \n");
}
g_free(name);
}
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Getting error to link pipeline->audio->video bins
If you put the video and audio in the pipeline bin all together then you can do it. Figure out what you caps are for the video and audio and should be able to link them.
// ----------------------------------
// pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
gchar *name;
GstCaps * p_caps;
GstElement* nextElement;
GstElement* pipeline = (GstElement*)user_data;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
p_caps = gst_pad_get_pad_template_caps(pad);
if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
}
else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");
}
if (nextElement != NULL)
{
if (!gst_element_link_filtered(element, nextElement, p_caps))
//if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
{
std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
}
gst_object_unref(nextElement);
}
g_free(name);
gst_caps_unref(p_caps);
}
// ----------------------------------
// main
// ----------------------------------
int main(int argc, char *argv[])
{
GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init(&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make("queue", "audio-queue");
audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make("aacparse", "audio-parser");
audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make("audioconvert", "aconv");
audioResample = gst_element_factory_make("audioresample", "audio-resample");
audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
videoQueue = gst_element_factory_make("queue", "video-queue");
videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make("h264parse", "video-parser");
videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
gst_caps_unref(capsFilter);
if (!link_ok) {
g_warning("Failed to link element1 and element2!");
}
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
}

How to use videomixer element for gstreamer?

I want to play two local video file using gstreamer,but I got an error: Segmentation fault It from libgstvideomixer.so.What's wrong with my code? The videomixer element is needed to play two videos.Should I use videobox for that?
gst-launch --no-fault filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264 ! videomixer name=mix ! xvimagesink sync=false filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264! mix.
static void p_gst_init(void)
{
App *app = &s_app;
GError *error = NULL;
GstBus *bus;
GstElement *parse, *decoder, *queue;
GstElement *parse2, *decoder2, *queue2;
gst_init (NULL, NULL);
/* create a mainloop to get messages */
app->loop = g_main_loop_new (NULL, TRUE);
app->playbin = gst_pipeline_new ("pipeline");
app->appsrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (app->appsrc), "location", "/mnt/upan/test.264", NULL);
queue = gst_element_factory_make ("queue", "queue");
parse = gst_element_factory_make ("typefind", "parse");
decoder = gst_element_factory_make ("ffdec_h264", "decoder");
GstElement *filesrc2;
filesrc2 = gst_element_factory_make ("filesrc", "disk_source2");
g_object_set (G_OBJECT (appsrc2), "location", "/mnt/upan/source.264", NULL);
queue2 = gst_element_factory_make ("queue", "queue2");
parse2 = gst_element_factory_make ("typefind", "parse2");
decoder2 = gst_element_factory_make ("ffdec_h264", "decoder2");
/*
GstElement * videobox;
videobox = gst_element_factory_make("videobox", NULL);
g_object_set (videobox, "alpha", 0, "border-alpha", 0, "bottom", 100, "left", 100, "right", 100, "top", 100, NULL);
*/
GstElement * videomixer;
videomixer = gst_element_factory_make("videomixer","videomixer");
app->xvimagesink = gst_element_factory_make ("xvimagesink", "play_video");
g_object_set (G_OBJECT (app->xvimagesink), "synchronous", FALSE, NULL);
gst_bin_add_many (GST_BIN (app->playbin), app->appsrc, queue, parse, decoder, videomixer, app->xvimagesink, filesrc2, queue2, parse2, decoder2, NULL);
if(gst_element_link_many (app->appsrc, queue, parse, decoder, videomixer, NULL))
{
printf("---------link element success-----------------\n");
}
else
printf("---------link element failed-----------------\n");
gst_element_link_many (filesrc2, queue2, parse2, decoder2, videomixer, NULL);
gst_element_link_many(videomixer, app->xvimagesink, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));
gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)create_window, app);
g_signal_connect (app->appsrc, "need-data", G_CALLBACK (feed_data), app);
return ;
}

Videomixer fails on sources rather than Videotestsrc

I want to play two different local video files at the same time in a single
window. The code below without demux and decoder works fine.
static void play_video(){
GMainLoop *loop;
GstElement *pipeline,*videomixer;
GstElement *src,*sink,*filter,*csp,*videobox;
GstElement *src1,*filter1,*csp1,*videobox1;
GstElement *srcb,*filterb,*cspb,*videoboxb;
GstCaps *filtercaps,*filtercaps1,*filtercapsb;
GstPad *pad,*pad1;
const char pattern = "snow";
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new("my-pipeline");
src = gst_element_factory_make ("videotestsrc","src");
src1 = gst_element_factory_make ("videotestsrc","src1");
g_object_set (G_OBJECT (src), "pattern", 10, NULL);
filter = gst_element_factory_make("capsfilter","filter");
filter1 = gst_element_factory_make("capsfilter","filter1");
csp = gst_element_factory_make("ffmpegcolorspace","csp");
csp1 = gst_element_factory_make("ffmpegcolorspace","csp1");
/**/
videobox=gst_element_factory_make("videobox","videobox");
g_object_set(videobox,"top",0,"bottom",0,"left",0,"right",0,NULL);
videobox1=gst_element_factory_make("videobox","videobox1");
g_object_set(videobox1,"top",-20,"bottom",0,"left",0,"right",0,NULL);
videomixer=gst_element_factory_make("videomixer","videomixer");
/**/
sink = gst_element_factory_make("xvimagesink","sink");
if(sink == NULL)
sink = gst_element_factory_make("ximagesink","sink");
if(sink == NULL)
g_error("'ximagesink' yaratılamadı.");
gst_bin_add_many(GST_BIN(pipeline),src,filter,videobox,videomixer,csp,sink,
src1,filter1,videobox1,csp1,NULL);
gst_element_link_many(src,filter,csp,videobox,videomixer,NULL);
gst_element_link_many(src1,filter1,csp1,videobox1,videomixer,NULL);
/*
videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=1/1, width=350,
height=250 ! \
textoverlay font-desc="Sans 24" text="CAM2" valign=top halign=left
shaded-background=true ! \
videobox border-alpha=0 top=-200 left=-450 ! mix. \
*/
gst_element_link_many(videomixer,sink,NULL);
filtercaps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 1024,
"height", G_TYPE_INT, 768,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
filtercaps1 = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 200,
"height", G_TYPE_INT, 500,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);
gst_caps_unref (filtercaps1);
/*pad = gst_element_get_pad (src, "src");
pad1 = gst_element_get_pad (src1, "src1");
//gst_pad_add_buffer_probe (pad, G_CALLBACK (cb_have_data), NULL);
//gst_pad_add_buffer_probe (pad1, G_CALLBACK (cb_have_data), NULL);
//gst_object_unref (pad);
//gst_object_unref (pad1);*/
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) ==
GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
}
g_print ("Running ...\n");
g_main_loop_run (loop);
/* exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
}
Problem is when I replace the videotestsrc with filesrc it fails and the
only error message I get is "Could not lookup object NULL on signal destroy
of object window". I'm not an gstreamer expert and my opinion is I am
failing at setting demux and decoder.
static void play_video5(){
GMainLoop *loop;
GstElement *pipeline,*videomixer;
GstElement *src,*sink,*filter,*csp,*videobox;
GstElement *src1,*filter1,*csp1,*videobox1;
GstElement *srcb,*filterb,*cspb,*videoboxb;
GstCaps *filtercaps,*filtercaps1,*filtercapsb;
GstPad *pad,*pad1;
GstElement *demux,*decoder;
const char pattern = "snow";
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new("my-pipeline");
//Source
src = gst_element_factory_make ("videotestsrc","src");
src1 = gst_element_factory_make ("filesrc","src1");
g_object_set (G_OBJECT (src1), "location", "file:///root/yu.mp4", NULL);
//Demux
demux = gst_element_factory_make ("mpegdemux", "demux");
//Decoder
decoder = gst_element_factory_make ("decodebin", "decoder");
// decoder = gst_element_factory_make ("ffdec_mpeg4","mpeg4-decoder");
//Filter
filter = gst_element_factory_make("capsfilter","filter");
filter1 = gst_element_factory_make("capsfilter","filter1");
//Colorspace
csp = gst_element_factory_make("ffmpegcolorspace","csp");
csp1 = gst_element_factory_make("ffmpegcolorspace","csp1");
//Videobox
videobox=gst_element_factory_make("videobox","videobox");
g_object_set(videobox,"top",0,"bottom",0,"left",0,"right",0,NULL);
videobox1=gst_element_factory_make("videobox","videobox1");
g_object_set(videobox1,"top",-20,"bottom",0,"left",0,"right",0,NULL);
//videomixer
videomixer=gst_element_factory_make("videomixer","videomixer");
//Sink
sink = gst_element_factory_make("xvimagesink","sink");
if(sink == NULL)
sink = gst_element_factory_make("ximagesink","sink");
if(sink == NULL)
g_error("'ximagesink' yaratılamadı.");
//Add to Bin
gst_bin_add_many(GST_BIN(pipeline),src,filter,videobox,videomixer,csp,
src1,decoder,filter1,videobox1,csp1,sink,NULL);
//Link Elements
gst_element_link(src,filter);
gst_element_link(filter,csp);
gst_element_link(csp,videobox);
gst_element_link(videobox, videomixer);
gst_element_link(src1,decoder);
gst_element_link(decoder,filter1);
// gst_element_link(decoder,csp1);
gst_element_link(filter1,csp1);
gst_element_link(csp1,videobox1);
gst_element_link(videobox1, videomixer);
gst_element_link(videomixer,sink);
//Cap definition
filtercaps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 1024,
"height", G_TYPE_INT, 768,
"framerate", GST_TYPE_FRACTION, 25, 1,
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
NULL);
filtercaps1 = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 25, 1,
/*"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, G_BYTE_ORDER,*/
NULL);
//Cap to Filter
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set (G_OBJECT (filter1), "caps", filtercaps1, NULL);
gst_caps_unref (filtercaps1);
/* run */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* wait until it's up and running or failed */
if (gst_element_get_state (pipeline, NULL, NULL, -1) ==
GST_STATE_CHANGE_FAILURE) {
g_error ("Failed to go into PLAYING state");
}
g_print ("Running ...\n");
g_main_loop_run (loop);
/* exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
}
Any ideas or corrections are welcome.
Several issues:
filesrc does not take uris, but file-paths
/* wait until it's up and running or failed */ + the code below is not needed, better listen on the bus for the error and warning messages
"Could not lookup object NULL on signal destroy of object window" has nothing to do with gstreamer
the whole videobox business is not needed as the pads of videomixer have xpos, ypos and zorder properties