Use Named Pipe (C++) to send images to FFMPEG - c++

I have the following code in C++:
#include <iostream>
#include <windows.h>
#include <iostream> // std::cout
#include <fstream> // std::ifstream
#include <vector>
#include <stdlib.h>
using namespace std;
int main(int argc, const char **argv)
{
wcout << "Creating an instance of a named pipe..." << endl;
// Create a pipe to send data
HANDLE pipe = CreateNamedPipe(
L"\\\\.\\pipe\\my_pipe", // name of the pipe
PIPE_ACCESS_OUTBOUND, // 1-way pipe -- send only
PIPE_TYPE_BYTE, // send data as a byte stream
1, // only allow 1 instance of this pipe
0, // no outbound buffer
0, // no inbound buffer
0, // use default wait time
NULL // use default security attributes
);
if (pipe == NULL || pipe == INVALID_HANDLE_VALUE) {
wcout << "Failed to create outbound pipe instance.";
// look up error code here using GetLastError()
system("pause");
return 1;
}
wcout << "Waiting for a client to connect to the pipe..." << endl;
// This call blocks until a client process connects to the pipe
BOOL result = ConnectNamedPipe(pipe, NULL);
if (!result) {
wcout << "Failed to make connection on named pipe." << endl;
// look up error code here using GetLastError()
CloseHandle(pipe); // close the pipe
system("pause");
return 1;
}
wcout << "Sending data to pipe..." << endl;
//opening file
ifstream infile;
infile.open("E:/xmen.jpg",std::ios::binary);
ofstream out("E:/lelel.jpg",std::ios::binary);
infile.seekg(0,std::ios::end);
size_t file_size_in_byte = infile.tellg();
vector<char> file_vec;
file_vec.resize(file_size_in_byte);
infile.seekg(0,std::ios::beg);
infile.read(&file_vec[0],file_size_in_byte);
out.write(&file_vec[0],file_vec.size());
wcout<<file_size_in_byte<<endl;
// This call blocks until a client process reads all the data
DWORD numBytesWritten = 0;
result = WriteFile(
pipe, // handle to our outbound pipe
&file_vec[0], // data to send
61026, // length of data to send (bytes)
&numBytesWritten, // will store actual amount of data sent
NULL // not using overlapped IO
);
if (result) {
wcout << "Number of bytes sent: " << numBytesWritten << endl;
} else {
wcout << "Failed to send data." << endl;
// look up error code here using GetLastError()
}
// Close the pipe (automatically disconnects client too)
CloseHandle(pipe);
wcout << "Done." << endl;
system("pause");
return 0;
}
Which I use to create a named pipe \\.\pipe\my_pipe, to which FFMPEG connects to, using the following command:
64-static\bin\Video>ffmpeg.exe -loop 1 -s 4cif -f image2 -y -i \\.\pipe\\my_pipe
-r 25 -vframes 250 -vcodec rawvideo -an eaeew.mov
Output:
ffmpeg version N-54233-g86190af Copyright (c) 2000-2013 the FFmpeg developers
built on Jun 27 2013 16:49:12 with gcc 4.7.3 (GCC)
configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libcaca --enable-libfreetype --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvo-aacenc --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxavs --enable-libxvid --enable-zlib libavutil 52. 37.101 / 52. 37.101
libavcodec 55. 17.100 / 55. 17.100
libavformat 55. 10.100 / 55. 10.100
libavdevice 55. 2.100 / 55. 2.100
libavfilter 3. 77.101 / 3. 77.101
libswscale 2. 3.100 / 2. 3.100
libswresample 0. 17.102 / 0. 17.102
libpostproc 52. 3.100 / 52. 3.100
[image2 # 0000000003ee04a0] Could find no file with with path '\\.\pipe\\my_pipe
' and index in the range 0-4
\\.\pipe\\my_pipe: No such file or directory
I can see on my console that my C++ app received a connection, but I get the error above in FFMPEG. Can someone please advise?
EDIT 1
Using the command below
ffmpeg.exe -s 4cif -i \\.\pipe\my_pipe -r 25 -vframes 250 -vcodec rawvideo -an tess.mov
I get the following output
ffmpeg version N-54233-g86190af Copyright (c) 2000-2013 the FFmpeg developers
built on Jun 27 2013 16:49:12 with gcc 4.7.3 (GCC)
configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libcaca --enable-libfreetype --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvo-aacenc --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxavs --enable-libxvid --enable-zlib
libavutil 52. 37.101 / 52. 37.101
libavcodec 55. 17.100 / 55. 17.100
libavformat 55. 10.100 / 55. 10.100
libavdevice 55. 2.100 / 55. 2.100
libavfilter 3. 77.101 / 3. 77.101
libswscale 2. 3.100 / 2. 3.100
libswresample 0. 17.102 / 0. 17.102
libpostproc 52. 3.100 / 52. 3.100
\\.\pipe\my_pipe: Invalid data found when processing input
So, now it seems it was able to connect to the pipe but is not able to process the input.

I'm doing the same thing but with normal pipe, not named one. My code is working great. Hope it helps
#include <stdio.h>
#include <iostream>
#include <iomanip>
using namespace std;
int main(int argc, char **argv) {
FILE *pPipe;
long lSize;
char * imgdata;
int imgcols = 640, imgrows = 480, elemSize = 3;
imgdata = ...;
stringstream sstm;
sstm << "/usr/local/bin/ffmpeg -y -f rawvideo -vcodec rawvideo -s " << imgcols << "x" << imgrows <<" -pix_fmt rgb24 -i - -c:v libx264 -shortest my_output.mp4";
if ( !(pPipe = popen(sstm.str().c_str(), "w")) ) {
cout << "popen error" << endl;
exit(1);
}
// open a pipe to FFmpeg
lSize = imgrows * imgcols * elemSize;
// write to pipe
fwrite(imgdata, 1, lSize, pPipe);
fflush(pPipe);
fclose(pPipe);
return 0;
}

Related

Slow FPS with OpenCV C++ RaspberryPi

I have written a simple C++ program using OpenCV to test fps on raspberry pi.
#include <iostream>
#include <chrono>
#include <thread>
#include <unistd.h>
#include <string>
#include <opencv2/opencv.hpp>
#include <opencv2/tracking.hpp>
#include <opencv2/core/ocl.hpp>
using namespace cv;
using namespace std;
using namespace std::chrono;
using namespace std::this_thread;
using namespace cv::ml;
int main(void)
{
VideoCapture camera(0);
camera.set(CAP_PROP_FRAME_WIDTH, 640);
camera.set(CAP_PROP_FRAME_HEIGHT, 480);
camera.set(CAP_PROP_FPS, 25);
Mat frame;
while (camera.read(frame)) {
for (size_t i = 0; i < frame.dataend - frame.datastart; i++)
std::cout << frame.data[i];
}
}
I then use the following scrips to test fps.
The first uses raspvid as the input, the second used my C++ program as input
The raspvid version gets to the 25fps.
My C++ program never goes over 10fps.
Any ide why this is?
raspivid -w 640 -h 480 -fps 25 -t 120000 -o -| ffmpeg -re -i pipe:0 -y -an -c:v copy -f null /dev/null
and
./PiImageAnalyzer.out | ffmpeg -re -f rawvideo -pixel_format bgr24 -video_size 640x480 -framerate 25 -i pipe:0 -y -an -c:v copy -f null /dev/null
Update
these scripts can also be used
raspivid -w 640 -h 480 -fps 25 -t 120000 -o -| ffplay -i -
and
./PiImageAnalyzer.out | ffplay -f rawvideo -pixel_format bgr24 -video_size 640x480 -framerate 25 -i -

Redirect ffmpeg console output to a string or a file in C++

I'm trying to use ffmpeg to do some operations for me. It's really simple for now. I want to omit the ffmpeg output in my console, either redirecting them to strings or a .txt file that I can control. I'm on Windows 10.
I have tried _popen (with and "r" and "w") and system("ffmpeg command > output.txt")', with no success.
#include <iostream>
#include <stdio.h>
using namespace std;
#define BUFSIZE 256
int main()
{
/* 1.
x = system("ffmpeg -i video.mp4 -i audio.mp4 -c copy output.mp4 > output.txt");
*/
/* 2.
FILE* p;
p = _popen("ffmpeg -i video.mp4 -i audio.mp4 -c copy output.mp4", "w");
_pclose(p);
*/
/* 3.
char cmd[200] = { "ffmpeg -i video.mp4 -i audio.mp4 -c copy output.mp4" };
char buf[BUFSIZE];
FILE* fp;
if ((fp = _popen(cmd, "r")) == NULL) {
printf("Error opening pipe!\n");
return -1;
}
while (fgets(buf, BUFSIZE, fp) != NULL) {
// Do whatever you want here...
// printf("OUTPUT: %s", buf);
}
if (_pclose(fp)) {
printf("Command not found or exited with error status\n");
return -1;
}
*/
return 0;
}
Further in the development, I would like to know when the ffmpeg process finished (maybe I can monitor the ffmpeg return value?) or to display only the last line if the some error occurred.
I have made it to work.
In the solution 1, I added " 2>&1" to the end of the string.
Found it here: ffmpeg command line write output to a text file
output-to-a-text-file
Thanks!

zlib compression seems to decompress via gunzip but not curl

I've written a http server that only sends back compressed http responses:
https://github.com/ericcurtin/asio/commit/1d37a1d225d1e812a747b595c02f9770ebd75dd0
So if you you use curl to request the data and decompress the response by piping through gunzip it works fine:
curl -x "" 127.0.0.1:5000/main.cpp --output - | gunzip
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 758 100 758 0 0 740k 0 --:--:-- --:--:-- --:--:-- 740k
// g++ -O0 main.cpp server.cpp connection_manager.cpp request_handler.cpp
// connection.cpp reply.cpp mime_types.cpp request_parser.cpp -lboost_system
// -lpthread -lz
//
// run like: ./a.out 0.0.0.0 5000 .
//
// main.cpp
// ~~~~~~~~
//
// Copyright (c) 2003-2017 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include <iostream>
#include <string>
#include <boost/asio.hpp>
#include "server.hpp"
int main(int argc, char* argv[])
{
try
{
// Check command line arguments.
if (argc != 4)
{
std::cerr << "Usage: http_server <address> <port> <doc_root>\n";
std::cerr << " For IPv4, try:\n";
std::cerr << " receiver 0.0.0.0 80 .\n";
std::cerr << " For IPv6, try:\n";
std::cerr << " receiver 0::0 80 .\n";
return 1;
}
// Initialise the server.
http::server::server s(argv[1], argv[2], argv[3]);
// Run the server until stopped.
s.run();
}
catch (std::exception& e)
{
std::cerr << "exception: " << e.what() << "\n";
}
return 0;
}
But if you use curl using --compressed which works with other http servers like the one at example.com it fails after the first 512 bytes:
curl -x "" 127.0.0.1:5000/main.cpp --compressed
// g++ -O0 main.cpp server.cpp connection_manager.cpp request_handler.cpp
// connection.cpp reply.cpp mime_types.cpp request_parser.cpp -lboost_system
// -lpthread -lz
//
// run like: ./a.out 0.0.0.0 5000 .
//
// main.cpp
// ~~~~~~~~
//
// Copyright (c) 2003-2017 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include <iostream>
curl: (23) Failed writing received data to disk/application
#include <string
Any idea on how my compression could be fixed?

cv::cudacodec::VideoReader unable to Play rtsp stream

System information
OpenCV => 3.3.0
Operating System / Platform => Ubuntu 16.04, x86_64
Compiler => gcc version 5.4.1 20160904
Cuda => 8.0
Nvidia card => GTX 1080 Ti
ffmpeg details
libavutil 55. 74.100 / 55. 74.100
libavcodec 57.103.100 / 57.103.100
libavformat 57. 77.100 / 57. 77.100
libavdevice 57. 7.101 / 57. 7.101
libavfilter 6.100.100 / 6.100.100
libswscale 4. 7.103 / 4. 7.103
libswresample 2. 8.100 / 2. 8.100
Detailed description
i am trying to play a rtsp stream using cudacodec::VideoReader
Rtsp Stream Details ( from vlc )
this stream plays fine in vlc and cv::VideoCapture but when i try to play it in cudacodec::VideoReader i get a error saying:
OpenCV Error: Gpu API call (CUDA_ERROR_FILE_NOT_FOUND [Code = 301]) in CuvidVideoSource, file /home/deep/Development/libraries/opencv/opencv/modules/cudacodec/src/cuvid_video_source.cpp, line 66
OpenCV Error: Assertion failed (init_MediaStream_FFMPEG()) in FFmpegVideoSource, file /home/deep/Development/libraries/opencv/opencv/modules/cudacodec/src/ffmpeg_video_source.cpp, line 101
Steps to reproduce
#include <iostream>
#include "opencv2/opencv_modules.hpp"
#if defined(HAVE_OPENCV_CUDACODEC)
#include <opencv2/core.hpp>
#include <opencv2/cudacodec.hpp>
#include <opencv2/highgui.hpp>
int main(int argc, const char* argv[])
{
const std::string fname = "rtsp://admin:admin#192.168.1.13/media/video2";
cv::namedWindow("GPU", cv::WINDOW_NORMAL);
cv::cuda::GpuMat d_frame;
cv::Ptr<cv::cudacodec::VideoReader> d_reader = cv::cudacodec::createVideoReader(fname);
for (;;)
{
if (!d_reader->nextFrame(d_frame))
break;
cv::Mat frame;
d_frame.download(frame);
cv::imshow("GPU", frame);
if (cv::waitKey(3) > 0)
break;
}
return 0;
}
#else
int main()
{
std::cout << "OpenCV was built without CUDA Video decoding support\n" << std::endl;
return 0;
}
#endif
I tried debugging it using GDB and saw that in ffmpeg_video_source.cpp bool init_MediaStream_FFMPEG() directly returns without checking the if condition.
GDB output
cv::cudacodec::detail::FFmpegVideoSource::FFmpegVideoSource
(this=0x402a20 <_start>, fname=...) at /home/deep/Development/libraries/opencv/opencv/modules/cudacodec/src/ffmpeg_video_source.cpp:98
98 cv::cudacodec::detail::FFmpegVideoSource::FFmpegVideoSource(const String& fname) :
(gdb) n
99 stream_(0)
(gdb) n
101 CV_Assert( init_MediaStream_FFMPEG() );
(gdb) s
(anonymous namespace)::init_MediaStream_FFMPEG () at /home/deep/Development/libraries/opencv/opencv/modules/cudacodec/src/ffmpeg_video_source.cpp:94
94 return initialized;
(gdb) display initialized
4: initialized = false
(gdb) s
95 }
UPDATE:
I have solved the problem. solution link
In the solution provided here the problem was related to the pixel format detected by ffmpeg.
In order to check your rtsp pixel format you can use ffprobe.
Then inside your cap_ffmpeg_impl.hpp you should add the case related to your pixel format like
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUVJ420P:
*chroma_format = ::VideoChromaFormat_YUV420;
break;
And then rebuild opencv.

Calling FFMPEG with Boost.Process

I'm trying to call FFMPEG from my C++ process to stream video from an IP camera. The FFMPEG command I use is ffmpeg.exe -rtsp_transport tcp -i rtsp://10.0.1.21/ONVIF/MediaInput?profile=1_def_profile4 -f image2pipe -pix_fmt rgb24 -vcodec rawvideo -r 15 -. I've verified this command in the command prompt and it does start a video stream and dumps the frames to stdout. I've also written similar code in Python and it works.
This is the code I'm using to call FFMPEG with the arguments from the previous paragraph in C++ and read the individual frames from stdout.
bool build_ffmpeg_arguments(const std::string &uri, std::vector<std::string> &args)
{
args.push_back("-rtsp_transport");
args.push_back("tcp");
args.push_back("-i");
args.push_back(uri);
args.push_back("-f");
args.push_back("image2pipe");
args.push_back("-pix_fmt");
args.push_back("rgb24");
args.push_back("-vcodec");
args.push_back("rawvideo");
args.push_back("-r");
args.push_back("15");
args.push_back("-");
return true;
}
boost::process::child start_ffmpeg(const std::string &uri,
const std::string &ffmpeg_path = "c:\\Tools\\ffmpeg.exe")
{
std::vector<std::string> args;
build_ffmpeg_arguments(uri, args);
boost::process::context ctx;
ctx.stdout_behavior = boost::process::capture_stream();
ctx.stderr_behavior = boost::process::capture_stream();
return boost::process::launch(ffmpeg_path, args, ctx);
}
bool read_frame(boost::process::pistream &is, int frame_size, std::vector<char> &frame_bytes)
{
char *buffer = new char[frame_size];
frame_bytes.clear();
is.read(buffer, frame_size);
int bytes_read = is.gcount();
frame_bytes.assign(buffer, buffer + bytes_read);
// std::cout << "Is Bad: " << is.bad() << std::endl;
// std::cout << "Is EOF: " << is.eof() << std::endl;
// std::cout << "gcount: " << bytes_read << std::endl;
delete[] buffer;
if(is.bad() || is.eof() || bytes_read < frame_size)
{
//We read in gunk, skip this time.
is.clear();
return false;
}
else
{
return true;
}
}
//This is where the code is invoked.
BOOST_AUTO_TEST_CASE(test_ffmpeg_stream)
{
std::string uri = "rtsp://10.0.1.21/ONVIF/MediaInput?profile=1_def_profile4";
int width = 320;
int height = 240;
int bpp = 3;
int bytes_expected = width * height * 3;
boost::process::child c = start_ffmpeg(uri);
boost::process::pistream &is = c.get_stdout();
boost::process::pistream &err = c.get_stderr();
std::vector<char> buffer;
bool result = read_frame(is, bytes_expected, buffer);
//BOOST_CHECK_EQUAL(true, result);
std::cout << "Buffer size: " << buffer.size() << std::endl;
std::string line;
while (std::getline(err, line))
std::cout << line << std::endl;
}
The output from stderr suggests that the parameters could be passed in wrong.
ffmpeg version 2.8.3 Copyright (c) 2000-2015 the FFmpeg developers
built with gcc 5.2.0 (GCC)
configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-av
isynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enab
le-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --
enable-libdcadec --enable-libfreetype --enable-libgme --enable-libgsm --enable-l
ibilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enab
le-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --en
able-libschroedinger --enable-libsoxr --enable-libspeex --enable-libtheora --ena
ble-libtwolame --enable-libvidstab --enable-libvo-aacenc --enable-libvo-amrwbenc
--enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enabl
e-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-lzma --ena
ble-decklink --enable-zlib
libavutil 54. 31.100 / 54. 31.100
libavcodec 56. 60.100 / 56. 60.100
libavformat 56. 40.101 / 56. 40.101
libavdevice 56. 4.100 / 56. 4.100
libavfilter 5. 40.101 / 5. 40.101
libswscale 3. 1.101 / 3. 1.101
libswresample 1. 2.101 / 1. 2.101
libpostproc 53. 3.100 / 53. 3.100
rtsp://10.0.1.21/ONVIF/MediaInput?profile=1_def_profile4: Unknown error
Is there a way of showing the full command line with arguments that boost::process::launch is calling? Is there anything obvious that I'm doing wrong with boost::process?
Update:
Suspecting that it could be the command line arguments being passed in wrong, I've created a dummy executable that prints out the command line arguments it receives. It's a drop-in replacement for ffmpeg.exe purely so that I can see what command lines are being passed. The command line I'm getting is -rtsp_transport tcp -i rtsp://10.0.1.21/ONVIF/MediaInput?profile=1_def_profile4 -f image2pipe -pix_fmt rgb24 -vcodec rawvideo -r 15 -. Manually calling ffmpeg with that command line works as expected. Yet somehow it doesn't work when launched via boost::process.
** Solution **
It looks like I need to set the the environment field of the context. No idea why that fixes the problem, but it does.
boost::process::context ctx;
ctx.environment = boost::process::self::get_environment();
ctx.stdout_behavior = boost::process::capture_stream();
ctx.stderr_behavior = boost::process::capture_stream();