I am developing an audio player for Sailfish OS, and trying to play file via gstreamer, but problem: there is no sound.
I checked gstream via console:
gst-launch-0.10 filesrc location=/path/to/file.ogg ! decodebin !
autoaudiosink
And it is working fine!
I tested converting audio file to audio file:
int
main (int argc,
char *argv[])
{
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
gst_init (&argc, &argv);
pipeline = gst_parse_launch ("filesrc location=/home/nemo/Music/Ringtones/Myfile.mp3 ! decodebin ! audioconvert ! vorbisenc ! oggmux ! filesink location=test.ogg", NULL);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
And it is working fine!
But, when i am trying to play it, there is no sound:
pipeline = gst_parse_launch ("filesrc location=/home/nemo/Music/Ringtones/Myfile.mp3 ! decodebin ! audioconvert ! autoaudiosink", NULL);
Gstreamer version: 0.10
The problem was because resources need acquiring before usage:
gst-launch is statically set as "player" in resource policy configuration, so it can be run without any extra work.
However when you are creating your own application, you will need to acquire audio playback resources yourself.
Check https://github.com/nemomobile/libaudioresource if you are developing C-only application or https://github.com/nemomobile/libaudioresource-qt for Qt application.
Example:
#include <gst/gst.h>
#include <audioresource.h>
#include <glib.h>
#include <unistd.h>
#include <stdio.h>
/*
* Dependencies glib2-devel, libaudioresource-devel, gstreamer-devel.
* Compile with:
* gcc `pkg-config --cflags --libs gstreamer-0.10` `pkg-config --cflags --libs audioresource` `pkg-config --cflags --libs glib-2.0` gst-example.c -o gst-example
*
* Check https://github.com/nemomobile/libaudioresource
*/
static GstElement *pipeline;
static int got_reply = 0;
static void on_acquired(audioresource_t *audio_resource, bool acquired, void *user_data)
{
got_reply = 1;
printf("on_acquired: %s\n", acquired ? "true" : "false");
if (acquired) {
// start playback here
printf("start playback\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
} else {
// stop playback here
}
}
static void naive_wait()
{
got_reply = 0;
while (!got_reply) {
g_main_context_iteration(NULL, false);
usleep(1000);
}
}
int main(int argc, char *argv[])
{
audioresource_t *resource;
void *user_data = NULL;
char tmp[1024];
GstBus *bus;
GstMessage *msg;
if (argc < 2) {
printf("audio file argument needed.\n");
return 1;
}
gst_init (&argc, &argv);
printf("initialize audioresource for media player\n");
resource = audioresource_init(AUDIO_RESOURCE_MEDIA, on_acquired, user_data);
snprintf(tmp, 1024, "filesrc location=%s ! decodebin ! audiocovert ! autoaudiosink", argv[1]);
printf("create pipeline: %s\n", tmp);
pipeline = gst_parse_launch (tmp, NULL);
printf("acquire audioresource..\n");
// When you want to start playback
audioresource_acquire(resource);
// Wait for the reply for acquire..
naive_wait();
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
printf("release audioresource..\n");
audioresource_release(resource);
// Wait for release..
naive_wait();
// When you close your application
audioresource_free(resource);
return 0;
}
Thanks to Juho Hämäläinen for an answer!
Related
I am using gstreamer to play and slove the rtsp stream.
rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink
and i write in c++ code like this :
#include <gst/gst.h>
void printIt(GList *p) {
if(!p) {
g_print("p null\n");
return ;
}
while(p) {
GstPad *pad = (GstPad*)p->data;
g_print("[%s]", pad->object.name);
p = p->next;
}
g_print("\n");
}
GstFlowReturn new_sample_cb (GstElement * appsink, gpointer udata) {
g_print("new-sample cb\n");
return GST_FLOW_OK;
}
GstFlowReturn new_preroll_cb (GstElement* appsink, gpointer udata) {
g_print("new_preroll_cb cb\n");
return GST_FLOW_OK;
}
int
main (int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch("rtspsrc location=rtspt://admin:scut123456#192.168.1.64:554/Streaming/Channels/1 ! tee name=t ! queue ! decodebin ! videoconvert ! autovideosink t. ! queue ! rtph264depay ! h264parse ! appsink name=mysink", NULL);
GstElement *appsink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
printIt(appsink->pads);
g_signal_connect(appsink, "new-sample", G_CALLBACK(new_sample_cb), pipeline);
g_print("sig conn new-sample\n");
g_signal_connect(appsink, "new-preroll", G_CALLBACK(new_preroll_cb), pipeline);
g_print("sig conn new-preroll\n");
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GstMessageType(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
when i compile and run it. it has output video in the autovideosink but the appsink's signal new-sample is not be callbacked. what should i do if i what to slove a frame in appsink ?
thanks.
By default appsink favors to use callbacks instead of signals for performance reasons (but I wouldn't consider your use case as a performance problem). For appsink to emit signals you will need to set the emit-signals property of the appsink to true. It defaults to false.
P.S. Apart from the above, I think you will need a GMainLoop for the event processing as demonstrated in the GStreamer examples.
I wrote a gstreamer app to convert from opus audio to raw audio. If I feed bad audio (just random bytes) to the pipeline, the pipeline gets stuck and /i don't receive an error message on the message bus.
I'm listening to the error messages flowing through the pipeline, but not getting an error code to indicate the failure. The gstreamer debug logs indicate the demux failed though, I can see the following in the logs:
0:00:00.021614679 22541 0xe5b190 WARN oggdemux gstoggdemux.c:4609:gst_ogg_demux_send_event:<oggdemux0> No chain to forward event to
0:00:00.021656681 22541 0xe5b190 WARN oggdemux gstoggdemux.c:2433:gst_ogg_demux_sink_event:<oggdemux0> EOS while trying to retrieve chain, seeking disabled
The following is an app sample that I wrote:
#include <gst/gst.h>
#include <gst/gstbin.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
static GMainLoop *loop;
FILE *file = NULL;
size_t bytesRead = 0;
typedef struct _CustomData
{
GstElement *pipeline;
GstAppSrc *app_source;
guint sourceid; /* To control the GSource */
} CustomData;
static gboolean push_data(CustomData *data)
{
GstBuffer *gbuffer;
GstFlowReturn ret;
char buffer[1024];
gbuffer = gst_buffer_new_and_alloc(sizeof(buffer));
GstMapInfo info;
bytesRead = fread(buffer, 1, sizeof(buffer), file);
gst_buffer_map(gbuffer, &info, GST_MAP_WRITE);
memcpy(info.data, buffer, bytesRead);
gst_buffer_unmap(gbuffer, &info);
if (bytesRead > 0)
{
//g_print("Pushing %d\n", (int)bytesRead);
/* Push the buffer into the appsrc */
g_signal_emit_by_name(data->app_source, "push-buffer", gbuffer, &ret);
return TRUE;
}
else
{
g_print("file complete\n");
gst_app_src_end_of_stream(data->app_source);
return FALSE;
}
gst_buffer_unref(gbuffer);
}
static void stop_feed(GstElement *source, CustomData *data)
{
if (data->sourceid != 0)
{
g_print("Stop feeding\n");
g_source_remove(data->sourceid);
data->sourceid = 0;
}
}
static void start_feed(GstElement *source, guint size, CustomData *data)
{
if (data->sourceid == 0)
{
g_print("Start feeding\n");
data->sourceid = g_idle_add((GSourceFunc)push_data, data);
}
}
static gboolean bus_call(GstBus * bus, GstMessage * msg, gpointer user_data)
{
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: from %s %s\n", GST_OBJECT_NAME(msg->src), error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
int main(int argc,
char *argv[])
{
CustomData data;
memset(&data, 0, sizeof(data));
GstBus *bus;
guint bus_watch_id;
/* Initialisation */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
GError *error = NULL;
data.pipeline = gst_parse_launch("concat name=c ! filesink location=program.wav appsrc name=src_00 ! oggdemux ! opusdec ! audioconvert ! audioresample ! audio/x-raw,format=S16LE,channels=1,rate=16000 ! queue ! c.", &error);
if (!data.pipeline)
{
g_printerr("Pipeline could not be created. Exiting.\n");
return -1;
}
data.app_source = (G_TYPE_CHECK_INSTANCE_CAST((gst_bin_get_by_name(GST_BIN(data.pipeline), "src_00")), GST_TYPE_APP_SRC, GstAppSrc));
g_signal_connect(data.app_source, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.app_source, "enough-data", G_CALLBACK(stop_feed), &data);
/* we add a message handler */
bus = gst_pipeline_get_bus(GST_PIPELINE(data.pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, NULL);
gst_object_unref(bus);
file = fopen("junk.wav", "rb");
/* Set the pipeline to "playing" state*/
g_print("Now playing");
gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(data.pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(data.pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
I would have expected that the demux failure would follow to the message bus, but it is not. How can I listen to such errors ?
I've tried with other pipelines that uses decodebin and I get the error messages on the message bus. The following pipeline works as expected:
gst_parse_launch("concat name=c ! filesink location=program.wav appsrc name=src_00 ! decodebin ! audioconvert ! audioresample ! audio/x-raw,format=S16LE,channels=1,rate=16000 ! queue ! c.", &error);
GStreamer version: 1.8.3
OS: Ubuntu 16.04
The issue seems to be resolved in Gstreamer 1.14. After updating I now get an error message on the message bus:
Message: Error: from oggdemux0 Could not demultiplex stream.
Error Code: GST_STREAM_ERROR_DEMUX
I made a .pcm audio file via the following command:
gst-launch-1.0 filesrc location=/home/pi/rawaudio/can-you-keep-a-secret.wav ! wavparse ! audioresample ! audioconvert ! audio/x-raw,format=S16BE,channels=1,rate=44100,layout=interleaved ! filesink location=/home/pi/rawaudio/test.pcm
I can play it with:
gst-launch-1.0 filesrc location=/home/pi/rawaudio/test.pcm ! audio/x-raw,format=S16BE,channels=1,rate=44100,layout=interleaved ! audioconvert ! audioresample ! alsasink
This is working perfectly. But now, I need to implement this into my c++ application. This is what I already have:
#include <string>
#include <stdio.h>
#include <gst/gst.h>
#include <gio/gio.h>
#include <boost/thread.hpp>
#define AUDIO_LOCATION "/home/pi/rawaudio/test.pcm"
static gboolean bus_call(GstBus *bus,
GstMessage *msg,
gpointer data) {
GMainLoop *loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free (debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return true;
}
int main (int argc, char **argv) {
gst_init(&argc, &argv);
GstElement *pipeline, *source, *parser, *sink, *convert;
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
// loop
loop = g_main_loop_new(NULL, false);
// pipeline
pipeline = gst_pipeline_new("test_pipeline");
sink = gst_element_factory_make ("alsasink", "sink");
source = gst_element_factory_make("filesrc", "source");
g_object_set(G_OBJECT(source), "location", AUDIO_LOCATION, NULL);
//convert = gst_element_factory_make("audioconvert", "convert");
//parser = gst_element_factory_make("audioresample","parse");
GstPad *sourcepad, *sinkpad;
sourcepad = gst_element_get_static_pad(source, "src");
gst_pad_set_caps(sourcepad,
gst_caps_new_simple("audio/x-raw",
"rate", G_TYPE_INT, 44100,
"channels", G_TYPE_INT, 1,
"format", G_TYPE_STRING, "S16BE",
"layout", G_TYPE_STRING, "interleaved",
NULL));
gst_object_unref(sourcepad);
// bus
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
// add elements into pipeline
gst_bin_add_many(GST_BIN(pipeline), source, sink, NULL);
// link source to decode
gst_element_link_many(source, sink, NULL);
// start playing
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
// iterate
g_print("Running...\n");
g_main_loop_run(loop);
// out of the main loop, clean up nicely
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
But it is not working. I get
Error: The stream is in the wrong format
when running.
What am I missing? some elements like audioparse, audioconvert, audioresample ... I also tried inserting the audioconvert and audioresample after the source into the pipeline but then I get an internal data stream error exception.
I am trying to embed streaming of IP camera using udpsrc in qwidget. Below pipeline works :
gst-launch-1.0 udpsrc port=20000 ! application/x-rtp,encoding-name=JPEG,payload=26,width=640,height=460 ! rtpjpegdepay ! jpegparse ! jpegdec ! videoconvert ! videoscale ! ximagesink sync=false
When tried to embed in qwidget, it's showing a plain window. My code is as follows :
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <QApplication>
#include <QTimer>
#include <QWidget>
int main(int argc, char *argv[])
{
if (!g_thread_supported ())
g_thread_init (NULL);
gst_init (&argc, &argv);
QApplication app(argc, argv);
app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
// prepare the pipeline
GstElement *pipeline = gst_pipeline_new ("pipeline");
GstElement *src = gst_element_factory_make ("udpsrc", NULL);
GstCaps *caps = gst_caps_from_string ("application/x-rtp,encoding-name=JPEG,payload=26,width=640,height=460");
g_object_set(G_OBJECT(src),
"port", 20000,
"caps", caps, NULL);
GstElement *parser = gst_element_factory_make ("rtpjpegdepay", NULL);
GstElement *mux = gst_element_factory_make ("jpegparse", NULL);
GstElement *parse2 = gst_element_factory_make ("jpegdec", NULL);
GstElement *dec = gst_element_factory_make ("videoconvert", NULL);
GstElement *conv = gst_element_factory_make ("videoscale", NULL);
GstElement *sink = gst_element_factory_make ("ximagesink", NULL);
g_object_set(G_OBJECT(sink), "sync", FALSE, NULL);
gst_bin_add_many (GST_BIN (pipeline), src, parser, mux, parse2, dec, conv, sink, NULL);
gst_element_link (src, sink);
GstState state, pending;
//this is the call to overlay the gstreamer's output to the Qt Widgets...
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_object_unref (bus);
GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); //Playback can be initiated by setting the element to PLAYING state using gst_element_set_state()
qDebug()<<"####-1"<<sret;
if (sret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
// Exit application
QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
}
gst_element_get_state (pipeline,
&state,
&pending,
10);
qDebug()<<state<<pending;
window->show();
app.exec();
g_main_loop_run (loop);
return 0;
}
I solved it by Creating pipeline using gst_parse_launch()
GstElement *pipeline_2= gst_parse_launch("udpsrc port=20000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegparse ! jpegdec ! videoconvert ! videoscale ! ximagesink name=mySink", NULL);
GstElement *sink = gst_bin_get_by_name((GstBin*)pipeline_2,"mySink");
QWidget *window = new QWidget();
window->setWindowTitle("udpsrc video stream");
window->resize(700, 700);
WId xwinid = window->winId();
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), (guintptr)xwinid);
window->show();
GstStateChangeReturn sret = gst_element_set_state (pipeline_2, GST_STATE_PLAYING);
Hope this helps.
I'm using gstreamer(gst-launch-1.0 version 1.8.3) to record flac files. the command line looks like this:
gst-launch-1.0 -v alsasrc ! flacenc ! filesink location="output.flac"
mediainfo output.flac
mediainfo result
The above picture shows the results of using the mediainfo.
This file can play at media player, but it does not support navigation and play time.
I think that there is no duration information.
player screen
How to add duration imformation to flac files?
I think .flac is a very basic stream format. It does not support random access or carry a duration. You can't know the exact duration unless you parse the complete file. Some players may make a "best effort" approach here and seek to roughly the file position to where you out the slider to but its nothing the format itself provides.
I think for seeking you are supposed to put .flac into a container like .ogg. This is actually very similar to .aac files which should be put into .mp4.
So try gst-launch-1.0 -e -v alsasrc ! flacenc ! oggmux ! filesink location="output.ogg".
$ mediainfo output.ogg
General
Complete name : output.ogg
Format : Ogg
Format/Info : Free Lossless Audio Codec
File size : 598 KiB
Duration : 7 s 941 ms
Overall bit rate mode : Variable
Overall bit rate : 617 kb/s
Audio
ID : 256729656 (0xF4D6238)
Format : FLAC
Format/Info : Free Lossless Audio Codec
Duration : 7 s 941 ms
Bit rate mode : Variable
Channel(s) : 2 channels
Channel positions : Front: L R
Sampling rate : 44.1 kHz
Bit depth : 16 bits
Writing library : libFLAC 1.3.2 (UTC 2017-01-01)
I made some modifications to user199309's answer
https://stackoverflow.com/a/47569428/5564626
compile: g++ -o test test.c $(pkg-config --cflags --libs gstreamer-1.0)
#include <stdio.h>
#include <gst/gst.h>
#define GLIB_DISABLE_DEPRECATION_WARNINGS
static GstElement *pipeline;
static GstPad *queue_src_pad;
static GstElement *bins[2];
static GstPad *bin_pads[2];
static GstElement *filesink[2];
static GMainLoop *loop;
static GstElement *flacenc[2];
static size_t current_bin = 0;
static int current_file = 0;
static GstPadProbeReturn
pad_probe_cb(GstPad * pad, GstPadProbeInfo * info, gpointer user_data) {
gst_pad_remove_probe(pad, GST_PAD_PROBE_INFO_ID (info));
gst_pad_unlink(queue_src_pad, bin_pads[current_bin]);
gst_pad_send_event(bin_pads[current_bin], gst_event_new_eos());
gst_element_set_state(bins[current_bin], GST_STATE_NULL);
gst_object_ref(bins[current_bin]);
gst_bin_remove(GST_BIN(pipeline), bins[current_bin]);
current_file++;
current_bin = (current_file % 2);
{
char file_location[32];
sprintf(file_location, "recording_%ld.flac", current_file);
g_object_set(G_OBJECT(
filesink[current_bin]), "location", file_location, NULL);
printf("now writing to %s\n", file_location);
}
gst_bin_add(GST_BIN(pipeline), bins[current_bin]);
gst_pad_link(queue_src_pad, bin_pads[current_bin]);
gst_element_set_state(bins[current_bin], GST_STATE_PLAYING);
gst_element_sync_state_with_parent(bins[current_bin]);
return GST_PAD_PROBE_OK;
}
static gboolean timeout_cb(gpointer user_data) {
gst_pad_add_probe(queue_src_pad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
pad_probe_cb, NULL, NULL);
return TRUE;
}
static gboolean
bus_cb (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
g_print("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
//g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
gint main(gint argc, gchar *argv[])
{
GstElement *audiosrc;
GstElement *queue;
GstBus *bus;
GMainLoop *loop;
pthread_t libusb_tid;
guint bus_watch_id;
gst_init (&argc, &argv);
audiosrc = gst_element_factory_make("alsasrc", "audiosrc");
queue = gst_element_factory_make("queue", "queue");
bins[0] = gst_bin_new("bin0");
bins[1] = gst_bin_new("bin1");
flacenc[0] = gst_element_factory_make("flacenc", "flacenc0");
flacenc[1] = gst_element_factory_make("flacenc", "flacenc1");
filesink[0] = gst_element_factory_make("filesink", "filesink0");
filesink[1] = gst_element_factory_make("filesink", "filesink1");
pipeline = gst_pipeline_new("test-pipeline");
if (!pipeline || !audiosrc || !queue
|| !flacenc[0] || !filesink[0]
|| !flacenc[1] || !filesink[1]
) {
g_printerr ("not all elements could be created\n");
//return -1;
}
gst_bin_add_many(GST_BIN(bins[0]), flacenc[0], filesink[0], NULL);
gst_bin_add_many(GST_BIN(bins[1]), flacenc[1], filesink[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), audiosrc, queue, bins[0], NULL);
g_assert(gst_element_link(audiosrc, queue));
g_assert(gst_element_link_many(flacenc[0], filesink[0], NULL));
g_assert(gst_element_link_many(flacenc[1], filesink[1], NULL));
GstPad* pad = gst_element_get_static_pad(flacenc[0], "sink");
gst_element_add_pad(bins[0], gst_ghost_pad_new("sink", pad));
gst_object_unref(pad);
GstPad* pad2 = gst_element_get_static_pad(flacenc[1], "sink");
gst_element_add_pad(bins[1], gst_ghost_pad_new("sink", pad2));
gst_object_unref(pad2);
bin_pads[0] = gst_element_get_static_pad(bins[0], "sink");
bin_pads[1] = gst_element_get_static_pad(bins[1], "sink");
current_bin = 0;
gst_element_link(queue, bins[current_bin]);
g_object_set(filesink[current_bin], "location", "recording_0.flac", NULL);
queue_src_pad = gst_element_get_static_pad(queue, "src");
bus = gst_element_get_bus(pipeline);
bus_watch_id = gst_bus_add_watch(bus, bus_cb, loop);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
loop = g_main_loop_new(NULL, FALSE);
g_timeout_add_seconds(10, timeout_cb, NULL);
g_main_loop_run (loop);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}