By accident I found out that I can use read and write on socket descriptors. Can I somehow (ab)use the fstream mechanism to output data into the socket descriptor?
The standard file stream doesn't support use of a file descriptor. However, the I/O stream classes make it reasonably easy to create your own abstraction which allows creating your own sources of or destination for characters. The magic class is std::streambuf whose responsibility is to buffer characters and read or write characters at appropriate times. Nicolai Josuttis's "The C++ Standard Library" has a detailed description of how to do so (the basis of which I contributed to Nico many years ago). A simple implementation of a stream buffer using a socket for reading and writing would look something like this:
#include <algorithm>
#include <iostream>
#include <iterator>
#include <streambuf>
#include <cstddef>
#include <unistd.h>
class fdbuf
: public std::streambuf
{
private:
enum { bufsize = 1024 };
char outbuf_[bufsize];
char inbuf_[bufsize + 16 - sizeof(int)];
int fd_;
public:
typedef std::streambuf::traits_type traits_type;
fdbuf(int fd);
~fdbuf();
void open(int fd);
void close();
protected:
int overflow(int c);
int underflow();
int sync();
};
fdbuf::fdbuf(int fd)
: fd_(-1) {
this->open(fd);
}
fdbuf::~fdbuf() {
this->close();
}
void fdbuf::open(int fd) {
this->close();
this->fd_ = fd;
this->setg(this->inbuf_, this->inbuf_, this->inbuf_);
this->setp(this->outbuf_, this->outbuf_ + bufsize - 1);
}
void fdbuf::close() {
if (!(this->fd_ < 0)) {
this->sync();
::close(this->fd_);
}
}
int fdbuf::overflow(int c) {
if (!traits_type::eq_int_type(c, traits_type::eof())) {
*this->pptr() = traits_type::to_char_type(c);
this->pbump(1);
}
return this->sync() == -1
? traits_type::eof()
: traits_type::not_eof(c);
}
int fdbuf::sync() {
if (this->pbase() != this->pptr()) {
std::streamsize size(this->pptr() - this->pbase());
std::streamsize done(::write(this->fd_, this->outbuf_, size));
// The code below assumes that it is success if the stream made
// some progress. Depending on the needs it may be more
// reasonable to consider it a success only if it managed to
// write the entire buffer and, e.g., loop a couple of times
// to try achieving this success.
if (0 < done) {
std::copy(this->pbase() + done, this->pptr(), this->pbase());
this->setp(this->pbase(), this->epptr());
this->pbump(size - done);
}
}
return this->pptr() != this->epptr()? 0: -1;
}
int fdbuf::underflow()
{
if (this->gptr() == this->egptr()) {
std::streamsize pback(std::min(this->gptr() - this->eback(),
std::ptrdiff_t(16 - sizeof(int))));
std::copy(this->egptr() - pback, this->egptr(), this->eback());
int done(::read(this->fd_, this->eback() + pback, bufsize));
this->setg(this->eback(),
this->eback() + pback,
this->eback() + pback + std::max(0, done));
}
return this->gptr() == this->egptr()
? traits_type::eof()
: traits_type::to_int_type(*this->gptr());
}
int main()
{
fdbuf inbuf(0);
std::istream in(&inbuf);
fdbuf outbuf(1);
std::ostream out(&outbuf);
std::copy(std::istreambuf_iterator<char>(in),
std::istreambuf_iterator<char>(),
std::ostreambuf_iterator<char>(out));
}
Related
Similarly to how we construct bsoncxx::document::view objects from a buffer with a single binary document, is there a way to extract single documents from a collection in a .bson dump in this framework without having to load them into a DB?
i.e. what works for single document objects
uint8 *buffer; // single bson document
size_t length; // size of buffer
bsoncxx::document::view view(buffer, length);
for (auto elem : view) {
doSomethingWithElem()
}
I'd like to be able to construct a cursor for the whole dump, but without loading it into a collection. Is something like this possible?
Found the solution and it was pretty simple in the end - I utilized the libbson library.
An example of what I used below:
#include <bson.h>
// and other includes
void read_bson() {
bson_reader_t *reader;
const bson_t *doc;
bson_error_t error;
bool eof;
char *path;
reader = bson_reader_new_from_file(path, &error);
if (reader)
{
while ((doc = bson_reader_read(reader, &eof)))
{
const uint8_t *buffer = bson_get_data(doc);
auto view = bsoncxx::document::view(buffer, doc->len);
}
}
}
If you are using mongo-cxx-driver, here is a example for reading bson file dumped by mongodump tool.
#include <iostream>
#include <vector>
#include <fstream>
#include <mongocxx/client.hpp>
#include <mongocxx/uri.hpp>
#include <bsoncxx/json.hpp>
#include <bsoncxx/builder/stream/document.hpp>
#include <sstream>
void parse_one_doc(const char *data, const size_t &length) {
// TODO fill your code
bsoncxx::document::view view((std::uint8_t *) data, length);
std::cout << bsoncxx::to_json(view) << std::endl;
}
int unpack_size(const char *data, size_t position) {
return *(int *) (data + position);
}
bool parse_mongo_dumper(const std::string &data) {
size_t position = 0u, end = data.length() - 1u, data_len = data.size();
size_t obj_size, obj_end;
const char *dc = data.c_str();
while (position < end) {
obj_size = unpack_size(dc, position);
if (position + obj_size > data_len) {
return false;
}
obj_end = position + obj_size;
if (*(dc + obj_end - 1) != '\0') {
return false;
}
parse_one_doc(dc + position, obj_size);
position = obj_end;
}
return true;
}
int main() {
std::string f = "/path/to/data.bson";
// read all data into string
std::ifstream t(f);
std::stringstream buffer;
buffer << t.rdbuf();
std::string s = buffer.str();
// parse bson
parse_mongo_dumper(s);
return 0;
}
I trying to write buffered std::streambuf for socket. I already wrote unbuffered std::streambuf. I don't understand why buffered streambuf doesn't work.
Socket::StreamBuf::StreamBuf(const IO &io, tcp::Socket *socket) :
socket(socket),
// wb is std::vector<char>
wb(io.writing) // set size
{
// write
char_type *buf = wb.data();
setp(buf, buf + (wb.size() - 1));
};
int Socket::StreamBuf::overflow(int c) {
if(c != traits_type::eof()) {
*pptr() = c;
pbump(1);
if(sync() == -1) return traits_type::eof();
}
return c;
}
int Socket::StreamBuf::sync() {
if(pptr() && pptr() > pbase()) {
Int32 sz = Int32(pptr() - pbase());
// int Socket::send(char *data, int size)
// return sent bytes count
if (socket->send(pbase(), sz) == sz) {
pbump(-sz);
return 0;
}
}
return -1;
}
I'm trying to convert a program (it's a bridge between vscode and a debug)
This program is written in C#.
It was based on the o vscode-mono-debug
(https://github.com/Microsoft/vscode-mono-debug/blob/master/src/Protocol.cs)
Well,
In C # I can read the standard input as a stream:
byte[] buffer = new byte[BUFFER_SIZE];
Stream inputStream = Console.OpenStandardInput();
_rawData = new ByteBuffer();
while (!_stopRequested) {
var read = await inputStream.ReadAsync(buffer, 0, buffer.Length);
if (read == 0) {
// end of stream
break;
}
if (read > 0) {
_rawData.Append(buffer, read);
ProcessData();
}
}
I try this :
#define _WIN32_WINNT 0x05017
#define BUFFER_SIZE 4096
#include<iostream>
#include<thread>
#include <sstream>
using namespace std;
class ProtocolServer
{
private:
bool _stopRequested;
ostringstream _rawData;
public:
void Start()
{
char buffer[BUFFER_SIZE];
while (!cin.eof())
{
cin.getline(buffer,BUFFER_SIZE);
if (cin.fail())
{
//error
break;
}
else
{
_rawData << buffer;
}
}
}
};
int main()
{
ProtocolServer *server = new ProtocolServer();
server->Start();
return 0;
}
Input:
Content-Length: 261\r\n\r\n{\"command\":\"initialize\",\"arguments\":{\"clientID\":\"vscode\",\"adapterID\":\"advpl\",\"pathFormat\":\"path\",\"linesStartAt1\":true,\"columnsStartAt1\":true,\"supportsVariableType\":true,\"supportsVariablePaging\":true,\"supportsRunInTerminalRequest\":true},\"type\":\"request\",\"seq\":1}
This reads the first 2 lines correctly. Since the protocol does not put \n at the end, it gets stuck in cin.getline in the 3 interaction.
Switching to read() causes it to stay stopped at cin.read (), and does not read anything at all.
I found some similar questions:
StackOverFlow Question
And examples:
Posix_chat_client
But I do not need it to be necessarily asynchronous, but it works on windows and linux.
I'm sorry for my English
Thanks!
What you want is known as unformatted input operations.
Here's a 1:1 translation using just std::iostream. The only "trick" is using and honouring gcount():
std::vector<char> buffer(BUFFER_SIZE);
auto& inputStream = std::cin;
_rawData = std::string {}; // or _rawData.clear(), e.g.
while (!_stopRequested) {
inputStream.read(buffer.data(), buffer.size());
auto read = inputStream.gcount();
if (read == 0) {
// end of stream
break;
}
if (read > 0) {
_rawData.append(buffer.begin(), buffer.begin() + read);
ProcessData();
}
}
I'd personally suggest dropping that read == 0 check in favour of the more accurate:
if (inputStream.eof()) { break; } // end of stream
if (!inputStream.good()) { break; } // failure
Note that !good() also catches eof(), so you can
if (!inputStream.good()) { break; } // failure or end of stream
Live Demo
Live On Coliru
#include <iostream>
#include <vector>
#include <atomic>
struct Foo {
void bar() {
std::vector<char> buffer(BUFFER_SIZE);
auto& inputStream = std::cin;
_rawData = std::string {};
while (!_stopRequested) {
inputStream.read(buffer.data(), buffer.size());
auto read = inputStream.gcount();
if (read > 0) {
_rawData.append(buffer.begin(), buffer.begin() + read);
ProcessData();
}
if (!inputStream.good()) { break; } // failure or end of stream
}
}
protected:
void ProcessData() {
//std::cout << "got " << _rawData.size() << " bytes: \n-----\n" << _rawData << "\n-----\n";
std::cout << "got " << _rawData.size() << " bytes\n";
_rawData.clear();
}
static constexpr size_t BUFFER_SIZE = 128;
std::atomic_bool _stopRequested { false };
std::string _rawData;
};
int main() {
Foo foo;
foo.bar();
}
Prints (e.g. when reading its own source file):
got 128 bytes
got 128 bytes
got 128 bytes
got 128 bytes
got 128 bytes
got 128 bytes
got 128 bytes
got 92 bytes
I have to write C++ program that like gzip can
*Take input from file or from char stream like compression below
gzip file
type file | gzip
*Program have file or char stream output like decompression below
gzip -d file.gz
gzip -dc file.gz
I don't know how to take to the task and what techniques have to use and how to create classes buffering input and output. I have classes buffering input and output and read/write data from/to file.
DataBuffer.h (taking uncompressed data from file):
#ifndef DataBuffer_h
#define DataBuffer_h
#include <fstream>
#include <string>
enum DataBufferState
{
DATABUFFER_OK = 0,
DATABUFFER_EOF = 1
};
class DataBuffer
{
std::fstream file;
std::string buffer;
unsigned int maxBufferSize;
public:
DataBuffer(const std::string& filename, unsigned int maxBuffSize);
~DataBuffer();
bool OpenFile(const std::string& filename);
void SetMaxBufferSize(unsigned int maxBuffSize);
DataBufferState FullBufferWithDataOld();
DataBufferState FullBufferWithData();
std::string GetDataBuffer();
};
#endif
DataBuffer.cpp:
#include "DataBuffer.h"
using namespace std;
DataBuffer::DataBuffer(const string& filename, unsigned int maxBuffSize)
{
OpenFile(filename);
SetMaxBufferSize(maxBuffSize);
}
DataBuffer::~DataBuffer()
{
file.close();
}
bool DataBuffer::OpenFile(const string& filename)
{
file.open(filename.c_str(),ios::in);
if(!file.is_open())
return false;
return true;
}
void DataBuffer::SetMaxBufferSize(unsigned int maxBuffSize)
{
maxBufferSize = maxBuffSize;
}
DataBufferState DataBuffer::FullBufferWithDataOld()
{
while(true)
{
string line;
streampos pos = file.tellg(); // Zapamietaj polozenie przed pobraniem linii
getline(file,line);
if( buffer.size()+line.size()>maxBufferSize )
{
// Cofnac wskaznik pliku
file.seekg(pos,ios::beg); // Przywroc polozenie sprzed pobrania linii
break;
}
buffer += line + "\n";
if(file.eof())
return DATABUFFER_EOF;
}
return DATABUFFER_OK;
}
DataBufferState DataBuffer::FullBufferWithData()
{
char c;
for(unsigned int i=0;i<maxBufferSize;++i)
{
c = file.get();
if(file.eof()) break;
buffer += c;
}
if(file.eof())
return DATABUFFER_EOF;
return DATABUFFER_OK;
}
string DataBuffer::GetDataBuffer()
{
string buf = buffer;
buffer.clear();
return buf;
}
BufferWriter.h (Save uncompressed data into file):
#ifndef BufferWriter_h
#define BufferWriter_h
#include <string>
#include <fstream>
class BufferWriter
{
std::string filename;
std::fstream file;
public:
BufferWriter(const std::string& filename_);
~BufferWriter();
bool OpenFile(const std::string& filename, bool appending);
void SendBufferToFile(std::string& buffer);
};
#endif
BufferWriter.cpp
#include "BufferWriter.h"
using namespace std;
BufferWriter::BufferWriter(const string& filename_)
{
filename = filename_;
OpenFile(filename.c_str(),false);
file.close();
}
BufferWriter::~BufferWriter()
{
file.close();
}
bool BufferWriter::OpenFile(const string& filename, bool appending)
{
if(appending)
file.open(filename.c_str(),ios::out | ios::app);
else
file.open(filename.c_str(),ios::out);
if(!file.is_open())
return false;
return true;
}
void BufferWriter::SendBufferToFile(string& buffer)
{
OpenFile(filename,true);
file.write(buffer.c_str(),buffer.size());
file.close();
}
Can you give me some hints how to improve code for input and output mechanisms?
Assume that I have class presented below, how to use istream or iterators to fill buffer with data from file or standard input. What classes from std or boost? What parameters? Somelike to support definition of class with this functionality.
[EDIT]:
#ifndef StreamBuffer_h
#define StreamBuffer_h
#include <string>
using namespace std;
enum DataBufferState
{
DATABUFFER_OK = 0,
DATABUFFER_EOF = 1
};
// gzip plik
// type plik | gzip -d
// gzip -d plik.gz
// gzip -dc plik.gz
// Parametr konstruktora to strumien z ktorego chcemy czytac i dlugosc bufora
class StreamBuffer
{
int maxBufferSize;
std::string buffer;
StreamBuffer(int maxBuffSize)
{
SetMaxBufferSize(maxBuffSize);
}
~StreamBuffer()
{
}
void SetMaxBufferSize(unsigned int maxBuffSize)
{
maxBufferSize = maxBuffSize;
}
DataBufferState FullBufferWithData()
{
// What to use what to do in this method to read part of file or standard char input to buffer?
}
std::string GetDataBuffer()
{
return buffer;
}
};
#endif
[EDIT2]:
I want to do the same thing as in this thread: Read from file or stdin, but in C++.
In general you read input from a source and write it to a sink. The simplest case is when you simply write what you read. You, however, want to apply a transformation (or filter) to the data that you read. Seeing as you're after "the c++ way," I'd suggest taking a look at boost::iostreams which abstracts the task in terms of sources/sinks.
Boost defines an abstract source by:
struct Source {
typedef char char_type;
typedef source_tag category;
std::streamsize read(char* s, std::streamsize n)
{
// Read up to n characters from the input
// sequence into the buffer s, returning
// the number of characters read, or -1
// to indicate end-of-sequence.
}
};
And sinks are defined in a similar way (with a write instead of a read, of course). The benefit of this is that the details of the source/sink is irrelevant - you can read/write to file, to a network adapter, or whatever, without any structural changes.
To apply filters I'd again suggest looking at boost::iostreams, although they do abstract a lot which somewhat complicates implementation..
Suppose I "popen" an executable, I get a FILE* in return. Furthermore, suppose I'd like to "connect" this file to an istream object for easier processing, is there a way to do this?
You can get away by deriving std::basic_streambuf or std::streambuf classes.
Something along these lines:
#include <stdio.h>
#include <iostream>
#define BUFFER_SIZE 1024
class popen_streambuf : public std::streambuf {
public:
popen_streambuf() : fp(NULL) {
}
~popen_streambuf() {
close();
}
popen_streambuf *open(const char *command, const char *mode) {
fp = popen(command, mode);
if (fp == NULL)
return NULL;
buffer = new char_type[BUFFER_SIZE];
// It's good to check because exceptions can be disabled
if (buffer == NULL) {
close();
return NULL;
}
setg(buffer, buffer, buffer);
return this;
}
void close() {
if (fp != NULL) {
pclose(fp);
fp = NULL;
}
}
std::streamsize xsgetn(char_type *ptr, std::streamsize n) {
std::streamsize got = showmanyc();
if (n <= got) {
memcpy(ptr, gptr(), n * sizeof(char_type));
gbump(n);
return n;
}
memcpy(ptr, gptr(), got * sizeof(char_type));
gbump(got);
if (traits_type::eof() == underflow()) {
return got;
}
return (got + xsgetn(ptr + got, n - got));
}
int_type underflow() {
if (gptr() == 0) {
return traits_type::eof();
}
if (gptr() < egptr()) {
return traits_type::to_int_type(*gptr());
}
size_t len = fread(eback(), sizeof(char_type), BUFFER_SIZE, fp);
setg(eback(), eback(), eback() + (sizeof(char_type) * len));
if (0 == len) {
return traits_type::eof();
}
return traits_type::to_int_type(*gptr());
}
std::streamsize showmanyc() {
if (gptr() == 0) {
return 0;
}
if (gptr() < egptr()) {
return egptr() - gptr();
}
return 0;
}
private:
FILE *fp;
char_type *buffer;
};
int main(int argc, char *argv)
{
char c;
popen_streambuf sb;
std::istream is(&sb);
if (NULL == sb.open("ls -la", "r")) {
return 1;
}
while (is.read(&c, 1)) {
std::cout << c;
}
return 0;
}
There is no standard way but if you want a quick solution you can get the file descriptor with fileno() and then use Josuttis' fdstream. There may be similar efforts around but I used this in the distant past and it worked fine. If nothing else it should be a very good map to implementing your own.
Sure there's a way, implement your own istream that can be constructed from a FILE*.
If you're asking whether there is a standard way to do this, then no.