Related
I have an USB camera. I have working terminal commands to record or display fullHD video and to save one 4k image. I would like to handle it all via C++ app. If we will concentrate on the video-saving:
gst-launch-1.0 v4l2src device=/dev/video0 num-buffers=900! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location=/mnt/ssd/test.avi
will save 900frames (aka 30s) of video. I would like to have C++ code to record indefinetly (in future maybe in hour-long segments) until I (the app) tell it to end.
I came up with
struct {
GstElement *pipeline_sink, *source, *appsink;
GstElement *pipeline_src, *appsrc, *decoder, *mux, *sink, *encoder;
} usbCam::mGstData;
int usbCam::gstInit(){
GstCaps *caps;
GstStateChangeReturn ret;
// Initialize GStreamer
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
// Create the elements
mGstData.source = gst_element_factory_make ("v4l2src", "source");
g_object_set (mGstData.source, "device", "/dev/video0", NULL);
mGstData.pipeline_sink = gst_pipeline_new ("pipeline_sink");
caps = gst_caps_new_any();
gst_app_sink_set_caps(GST_APP_SINK(mGstData.appsink), caps);
gst_caps_unref (caps);
gst_app_sink_set_emit_signals(GST_APP_SINK(mGstData.appsink), true);
// Build the pipeline
gst_bin_add_many (GST_BIN (mGstData.pipeline_sink), mGstData.source, mGstData.appsink, NULL);
if (gst_element_link_many(mGstData.source, mGstData.appsink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_sink);
return -1;
}
return 0;
}
int usbCam::videoStart(){
GstCaps *caps;
GstStateChangeReturn ret;
if (!mGstData.pipeline_sink || !mGstData.source) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
mGstData.appsrc = gst_element_factory_make ("appsrc", "appsrc");
mGstData.decoder = gst_element_factory_make ("imxvpudec", "transform_enc");
mGstData.mux = gst_element_factory_make ("avimux", "avimux");
mGstData.sink = gst_element_factory_make ("filesink", "sink");
g_object_set (mGstData.sink, "location", "/mnt/ssd/videoTest.avi", NULL);
mGstData.pipeline_src = gst_pipeline_new ("pipeline_src");
if (!mGstData.pipeline_src || !mGstData.appsrc || !mGstData.decoder || !mGstData.mux || !mGstData.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
NULL);
gst_app_src_set_caps(GST_APP_SRC(mGstData.appsrc), caps);
gst_caps_unref (caps);
gst_app_src_set_duration(GST_APP_SRC(mGstData.appsrc), GST_TIME_AS_MSECONDS(80));
gst_app_src_set_stream_type(GST_APP_SRC(mGstData.appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(mGstData.appsrc), -1, 0);
gst_bin_add_many (GST_BIN (mGstData.pipeline_src), mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL);
if (gst_element_link_many(mGstData.appsrc, mGstData.decoder, mGstData.sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
ret = gst_element_set_state (mGstData.pipeline_src, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (mGstData.pipeline_src);
return -1;
}
return 0;
}
int usbCam::videoEnd(){
{
gst_app_src_end_of_stream(GST_APP_SRC(mGstData.appsrc));
usleep(500000);
gst_element_set_state (mGstData.pipeline_src, GST_STATE_NULL);
gst_object_unref (mGstData.pipeline_src);
return 0;
}
Now, this code runs. No error in the output, one warning though:
(GLib-GObject-WARNING **: 17:51:34.132: g_object_set_is_valid_property: object class 'GstSplitMuxSink' has no property named 'h}\x9fe h\xe6a_no_\xc1')
.
What actually bothers me is the output file. It is created, but it is an empty file with 0b size. Can anyone point me in the direction of the proper fix?
Edit: Today I came up with two other attempts. The firs one is not that different from the one already posted here. The second gives me pipeline with wrong parameters (different FPS) and I am unable to correctly stop it so that the file have correct EOF.
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
std::string command = "v4l2src device=/dev/video0 ! image/jpeg, width=1920, height=1080, io-mode=4 ! imxvpudec ! imxvpuenc_mjpeg ! avimux ! filesink location = /mnt/ssd/testPipeline.avi";
/* Build the pipeline */
pipeline =
gst_parse_launch
(command.c_str(),
NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GstMessageType(
GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
EDIT2:
OK now my code looks like this:
GstElement *pipeline;
GstElement *tee; //in the future I would like to save video and images AND stream or use thi pipeline data internally.
void gstFail(const gchar* message){
g_printerr(message);
gst_object_unref (pipeline);
return;
}
void videoStart(std::string path){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstCaps *caps;
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", mVideoDevice.toStdString().c_str(), NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
g_object_set (sink, "location", path.c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink, NULL);
if (gst_element_link_filtered(source, muxer, caps) != TRUE) {
gst_caps_unref (caps);
gstFail("Elements could not be linked or caps set.\n");
return;
}
gst_caps_unref (caps);
if (gst_element_link_many(muxer,tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gstFail("Unable to set the pipeline to the playing state.\n");
return;
}
return;
}
void videoEnd(void)
{
GstMessage *message = gst_message_new_eos(&pipeline->object);
gst_bus_post(pipeline->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PLAYING_TO_PAUSED);
gst_element_change_state(pipeline, GST_STATE_CHANGE_PAUSED_TO_READY);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
}
void takeImage(std::string path){
GstElement *sink = gst_element_factory_make("multifilesink", "multifilesink");
g_object_set (sink, "location", path.c_str(), NULL);
gst_bin_add_many (GST_BIN (pipeline), sink, NULL);
if (gst_element_link_many(tee, sink, NULL) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
return;
}
This saves the video ALMOST ok (VLC does not display correct lenght. But when I see the video file properties via Nautilus in Ubuntu the correct lenght is displayed and the video is playable). It does not save the pictures.
OK, so here's how I solved it: my initial pipeline is split with tee element into two sinks: the original sink that saves the video and appsink. In the callback functuion for the appsink I create new pipeline and push the frame any time I want to save the image. Basically:
...
int saveSampleFromAppsinkJpeg( GstSample *sample){
if (!shouldSaveImage) {
return -2;
}
if (capturing){
return -3;
}
std::thread([=]{
capturing = true;
GstStateChangeReturn ret;
GstElement *appsrc = gst_element_factory_make ("appsrc", "appsrc");
GstElement *sink = gst_element_factory_make ("multifilesink", "sink");
g_object_set (sink, "location", "some/path", NULL);
GstElement *pipeline_img = gst_pipeline_new ("pipeline_img");
if (!pipeline_img || !appsrc || !sink) {
g_printerr ("Not all elements could be created.\n");
capturing = false;
return -1;
}
gst_app_src_set_caps(GST_APP_SRC(appsrc), caps);
gst_app_src_set_duration(GST_APP_SRC(appsrc), GST_TIME_AS_MSECONDS(80)); // TODO 80
gst_app_src_set_stream_type(GST_APP_SRC(appsrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(appsrc), -1, 0);
gst_bin_add_many (GST_BIN (pipeline_img), appsrc, sink, NULL);
if (gst_element_link_many(appsrc, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
ret = gst_element_set_state (pipeline_img, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline_img);
capturing = false;
return -1;
}
//push the image in the pipeline
GstFlowReturn status = GstFlowReturn::GST_FLOW_OK;
status = gst_app_src_push_sample(GST_APP_SRC(appsrc), sample);
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("Sample for saving image not pushed.\n");
status = gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
if (status != GstFlowReturn::GST_FLOW_OK) g_printerr ("EOS for saving image not pushed.\n");
//end the pipeline
usleep(500000); // Important
GstMessage *message = gst_message_new_eos(&pipeline_img->object);
gst_bus_post(pipeline_img->bus, message);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_set_state (pipeline_img, GST_STATE_PAUSED);
gst_element_set_state (pipeline_img, GST_STATE_NULL);
gst_object_unref (pipeline_img);
shouldSaveImage = false;
capturing = false;
return 1;
}).detach();
return 1;
}
static GstFlowReturn new_sample_jpeg(GstElement * elt)
{
GstSample *sample;
GstBuffer *buffer;
GstMemory *memory;
GstFlowReturn ret = GST_FLOW_OK;
// get the sample from appsink
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
buffer = gst_sample_get_buffer (sample);
if (buffer != NULL) {
memory = gst_buffer_get_memory (buffer, 0);
if (memory != NULL) {
//now all data are image data. If image wanted->image save!
if (wantToSave) saveSampleFromAppsinkJpeg(sample);
}
...
}
}
void startVideo(){
if (!gst_is_initialized()) {
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink, *queue_rcr, *queue_app, *appsink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", "/dev/video1", NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
queue_rcr = gst_element_factory_make ("queue", "record_queue");
queue_app = gst_element_factory_make ("queue", "app_queue");
appsink = gst_element_factory_make("appsink", "appsink");
g_object_set (sink, "location", path.toStdString().c_str(), NULL);
pipeline = gst_pipeline_new ("pipeline_src");
if (!pipeline || !source || !muxer || !sink || !queue_rcr || !appsink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline), source, muxer,tee, sink,queue_rcr, appsink, queue_app, NULL);
if (gst_element_link_filtered(source, tee, caps) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_rcr, muxer, sink, NULL) != TRUE) {
//failhandling
}
if (gst_element_link_many(tee, queue_app, appsink, NULL) != TRUE) {
//failhandling
}
gst_app_sink_set_emit_signals(GST_APP_SINK(appsink), true);
g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_jpeg));
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
//failhandling
}
// Start playing
recording = true;
return;
}
I was trying to develop an application for the pipeline:
gst-launch-1.0 rtspsrc location="rtsp://192.168.3.30:8554/rajvi" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink
Following is the code which I have implemented:
#include <gst/gst.h>
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
g_free(description);
GstElement *depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, depay, "sink") == 0)
{
g_print("cb_new_rtspsrc_pad : failed to link elements \n");
}
g_free(name);
}
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Getting error to link pipeline->audio->video bins
If you put the video and audio in the pipeline bin all together then you can do it. Figure out what you caps are for the video and audio and should be able to link them.
// ----------------------------------
// pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
gchar *name;
GstCaps * p_caps;
GstElement* nextElement;
GstElement* pipeline = (GstElement*)user_data;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
p_caps = gst_pad_get_pad_template_caps(pad);
if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
}
else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");
}
if (nextElement != NULL)
{
if (!gst_element_link_filtered(element, nextElement, p_caps))
//if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
{
std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
}
gst_object_unref(nextElement);
}
g_free(name);
gst_caps_unref(p_caps);
}
// ----------------------------------
// main
// ----------------------------------
int main(int argc, char *argv[])
{
GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init(&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make("queue", "audio-queue");
audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make("aacparse", "audio-parser");
audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make("audioconvert", "aconv");
audioResample = gst_element_factory_make("audioresample", "audio-resample");
audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
videoQueue = gst_element_factory_make("queue", "video-queue");
videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make("h264parse", "video-parser");
videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
gst_caps_unref(capsFilter);
if (!link_ok) {
g_warning("Failed to link element1 and element2!");
}
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
}
I am working gstreamer for iOS, but having error with demux.
It says "Eror received from element matroskademux0 : Gstreamer encountered a general stream error".
Here is the soure code.
GstBus *bus;
GSource *bus_source;
GstElement *source1, *clrspace1, *clrspace2, *clrspace, *videobox1, *sink, *source2, *mpegenc, *avimux;
GstElement *videomixer;
GstElement *decodebin1, *decodebin2;
GstElement *queue;
/* Build pipeline */
pipeline = gst_pipeline_new("pipelinecomposer");
source1 = gst_element_factory_make("filesrc", "source1");
source2 = gst_element_factory_make("filesrc", "source2");
videobox1 = gst_element_factory_make("videobox", "videobox");
videomixer = gst_element_factory_make("videomixer", "videomixer");
clrspace1 = gst_element_factory_make("videoconvert", "clrspace1");
clrspace2 = gst_element_factory_make("videoconvert", "clrspace2");
clrspace = gst_element_factory_make("videoconvert", "clrspace");
sink = gst_element_factory_make("filesink", "sink");
mpegenc = gst_element_factory_make("x264enc", "mpegenc");
avimux = gst_element_factory_make("mpegtsmux", "avimux");
decodebin1 = gst_element_factory_make("decodebin", "decodebin1");
decodebin2 = gst_element_factory_make("decodebin", "decodebin2");
queue = gst_element_factory_make("queue", "queue");
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/%#", documentsDirectory, #"1.avi"];
NSString *filePath_1 = [NSString stringWithFormat:#"%#", #"/Users/john/Movies/1.webm"];
NSString *filePath_2 = [NSString stringWithFormat:#"%#", #"/Users/john/Movies/2.webm"];
g_object_set (G_OBJECT (source1), "location", [filePath_1 UTF8String], NULL);
g_object_set (G_OBJECT (source2), "location", [filePath_2 UTF8String], NULL);
g_object_set (G_OBJECT (sink), "location", [filePath UTF8String], NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
gst_bin_add_many(GST_BIN (pipeline), source1, sink, videobox1, videomixer, clrspace1, clrspace2, clrspace, source2, mpegenc, avimux, decodebin1, decodebin2, nil);
gst_element_link_many(source1, decodebin1, clrspace1, videobox1, videomixer, clrspace, mpegenc, avimux, sink, nil);
gst_element_link_many(source2, decodebin2, clrspace2, videomixer, nil);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
Please help me.
Hello guys,
solved problem.
I have to use pad for filesrc.
Here is the modified source codes.
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
/* Main method for the bus monitoring code */
-(void) app_function
{
GstBus *bus;
GSource *bus_source;
GstElement *source1, *clrspace1, *clrspace2, *clrspace, *videobox1, *sink, *source2, *mpegenc, *avimux;
GstElement *videomixer;
GstElement *decodebin1, *decodebin2;
GstElement *matroskademux1, *vp8dec1;
GST_DEBUG ("Creating pipeline");
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
/* Build pipeline */
pipeline = gst_pipeline_new("pipelinecomposer");
source1 = gst_element_factory_make("filesrc", "source1");
source2 = gst_element_factory_make("filesrc", "source2");
videobox1 = gst_element_factory_make("videobox", "videobox");
videomixer = gst_element_factory_make("videomixer", "videomixer");
clrspace1 = gst_element_factory_make("videoconvert", "clrspace1");
clrspace2 = gst_element_factory_make("videoconvert", "clrspace2");
clrspace = gst_element_factory_make("videoconvert", "clrspace");
sink = gst_element_factory_make("filesink", "sink");
mpegenc = gst_element_factory_make("x264enc", "mpegenc");
avimux = gst_element_factory_make("mpegtsmux", "avimux");
decodebin1 = gst_element_factory_make("decodebin", "decodebin1");
decodebin2 = gst_element_factory_make("decodebin", "decodebin2");
matroskademux1 = gst_element_factory_make("matroskademux", "matroskademux1");
vp8dec1 = gst_element_factory_make("vp8dec", "vp8dec1");
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/%#", documentsDirectory, #"1.avi"];
NSString *filePath_1 = [NSString stringWithFormat:#"%#", #"/Users/johanbasore1/Movies/1.webm"];
NSString *filePath_2 = [NSString stringWithFormat:#"%#", #"/Users/johanbasore1/Movies/2.webm"];
g_object_set (G_OBJECT (source1), "location", [filePath_1 UTF8String], NULL);
g_object_set (G_OBJECT (source2), "location", [filePath_2 UTF8String], NULL);
g_object_set (G_OBJECT (sink), "location", [filePath UTF8String], NULL);
g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
g_signal_connect(matroskademux1, "pad-added", G_CALLBACK(on_pad_added), vp8dec1);
gst_bin_add_many(GST_BIN (pipeline), source1, matroskademux1, vp8dec1 , sink, clrspace1, mpegenc, avimux, nil);
gst_element_link(source1, matroskademux1);
gst_element_link_many(vp8dec1, clrspace1, mpegenc, avimux, sink, nil);
}
I want to play two local video file using gstreamer,but I got an error: Segmentation fault It from libgstvideomixer.so.What's wrong with my code? The videomixer element is needed to play two videos.Should I use videobox for that?
gst-launch --no-fault filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264 ! videomixer name=mix ! xvimagesink sync=false filesrc location=/mnt/upan/source.264 ! queue ! typefind ! ffdec_h264! mix.
static void p_gst_init(void)
{
App *app = &s_app;
GError *error = NULL;
GstBus *bus;
GstElement *parse, *decoder, *queue;
GstElement *parse2, *decoder2, *queue2;
gst_init (NULL, NULL);
/* create a mainloop to get messages */
app->loop = g_main_loop_new (NULL, TRUE);
app->playbin = gst_pipeline_new ("pipeline");
app->appsrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (app->appsrc), "location", "/mnt/upan/test.264", NULL);
queue = gst_element_factory_make ("queue", "queue");
parse = gst_element_factory_make ("typefind", "parse");
decoder = gst_element_factory_make ("ffdec_h264", "decoder");
GstElement *filesrc2;
filesrc2 = gst_element_factory_make ("filesrc", "disk_source2");
g_object_set (G_OBJECT (appsrc2), "location", "/mnt/upan/source.264", NULL);
queue2 = gst_element_factory_make ("queue", "queue2");
parse2 = gst_element_factory_make ("typefind", "parse2");
decoder2 = gst_element_factory_make ("ffdec_h264", "decoder2");
/*
GstElement * videobox;
videobox = gst_element_factory_make("videobox", NULL);
g_object_set (videobox, "alpha", 0, "border-alpha", 0, "bottom", 100, "left", 100, "right", 100, "top", 100, NULL);
*/
GstElement * videomixer;
videomixer = gst_element_factory_make("videomixer","videomixer");
app->xvimagesink = gst_element_factory_make ("xvimagesink", "play_video");
g_object_set (G_OBJECT (app->xvimagesink), "synchronous", FALSE, NULL);
gst_bin_add_many (GST_BIN (app->playbin), app->appsrc, queue, parse, decoder, videomixer, app->xvimagesink, filesrc2, queue2, parse2, decoder2, NULL);
if(gst_element_link_many (app->appsrc, queue, parse, decoder, videomixer, NULL))
{
printf("---------link element success-----------------\n");
}
else
printf("---------link element failed-----------------\n");
gst_element_link_many (filesrc2, queue2, parse2, decoder2, videomixer, NULL);
gst_element_link_many(videomixer, app->xvimagesink, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));
gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
gst_bus_set_sync_handler(bus, (GstBusSyncHandler)create_window, app);
g_signal_connect (app->appsrc, "need-data", G_CALLBACK (feed_data), app);
return ;
}
I am trying a basic pipeline below. If I run the pipeline without the caps function it passes straight through (assuming same I/O format). Once I add the caps the pre-roll starts but the video does not continue through to the sink output.
Am I coding this wrong?
thx
Art
#include <gst/gst.h>
#include <glib.h>
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
g_print ("Dynamic pad created, linking out/in \n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
gboolean link_ok;
GstElement *pipeline, *source, *decoder, *ffcs, *vidsc, *capsfout, *sink;
GstBus *bus;
GstCaps *caps;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
pipeline = gst_pipeline_new ("video-player");
source = gst_element_factory_make ("filesrc", "file-source");
decoder = gst_element_factory_make ("decodebin2", "dec-bin2");
ffcs = gst_element_factory_make ("ffmpegcolorspace", "ffcs");
vidsc = gst_element_factory_make ("videoscale", "vidsc");
capsfout = gst_element_factory_make ("capsfilter", "capsfout");
sink = gst_element_factory_make ("filesink", "vidout");
if (!pipeline || !source || !decoder || !ffcs || !vidsc || !capsfout || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* we set the input/output filename to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);
g_object_set (G_OBJECT (sink), "location", argv[2], NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bin_add_many (GST_BIN (pipeline),
source, decoder, ffcs, vidsc, capsfout, sink, NULL);
/* we link the elements together */
gst_element_link (source, decoder);
gst_element_link (decoder, ffcs);
gst_element_link (ffcs, vidsc);
caps = gst_caps_new_simple("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
"width", G_TYPE_INT, 384,
"height", G_TYPE_INT, 216,
"framerate", GST_TYPE_FRACTION, 25, 1,
NULL);
link_ok = gst_element_link_filtered(vidsc,sink,caps);
gst_caps_unref (caps);
if (!link_ok) {
g_warning ("Failed to link vidsc to sink!");
}else{
g_print("seems ok, no error reported?\n");
}
/* Set the pipeline to "playing" state*/
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
It may be late to answer but you maybe forgot to do the dynamic pad link between some elements and you never call the on_pad_added function. I had the same problem.
Have you tried to add it since then? For example this line to dynamically link two elements instead of the gst_element_link:
g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), ffcs);
For what reasons do you need the capsfilter? Only constrain the properties you need. E.g. you set a framerate, but don't have a videorate element before the capsfilter. If you don't care about the framerate (e.g. just want to force a size) remove the framerate from the caps you set on capsfilter..