Coloring a specified char in C++ - c++

I have a minesweeper console game and I want to make it a bit more beautiful. I found some coloring libraries from the internet. and used them.
the library is:
// ConsoleColor.h
#pragma once
#include <iostream>
#include <windows.h>
inline std::ostream& blue(std::ostream &s)
{
HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout, FOREGROUND_BLUE
|FOREGROUND_GREEN|FOREGROUND_INTENSITY);
return s;
}
inline std::ostream& red(std::ostream &s)
{
HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout,
FOREGROUND_RED|FOREGROUND_INTENSITY);
return s;
}
inline std::ostream& green(std::ostream &s)
{
HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout,
FOREGROUND_GREEN|FOREGROUND_INTENSITY);
return s;
}
inline std::ostream& yellow(std::ostream &s)
{
HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout,
FOREGROUND_GREEN|FOREGROUND_RED|FOREGROUND_INTENSITY);
return s;
}
inline std::ostream& white(std::ostream &s)
{
HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout,
FOREGROUND_RED|FOREGROUND_GREEN|FOREGROUND_BLUE);
return s;
}
struct color {
color(WORD attribute):m_color(attribute){};
WORD m_color;
};
template <class _Elem, class _Traits>
std::basic_ostream<_Elem,_Traits>&
operator<<(std::basic_ostream<_Elem,_Traits>& i, color& c)
{
HANDLE hStdout=GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleTextAttribute(hStdout,c.m_color);
return i;
}
It works fine but I have a question. is there anyway to colorize a character in cpp? for example I want to print all 'X' of my program red. is is possible?
thanks for helping

Yes, what you're asking for is entirely possible, though it appears that only a few people know much about the applicable parts of the library. Setting the highlight color with a manipulator adds a little bit more work, but not a whole lot. Code could look something like this:
#include <iostream>
#include <windows.h>
class attribute {
DWORD attrib;
public:
attribute(DWORD attrib) : attrib(attrib) {}
DWORD operator()() const { return attrib; }
};
class outbuf : public std::streambuf {
HANDLE h;
DWORD default_color = FOREGROUND_RED|FOREGROUND_GREEN|FOREGROUND_BLUE;
DWORD highlight_color = FOREGROUND_GREEN;
public:
outbuf(HANDLE h) : h(h) {
SetConsoleTextAttribute(h, default_color);
}
void set_highlight(DWORD color) { highlight_color = color; }
protected:
virtual int_type overflow(int_type c) override {
if (c != EOF) {
if (c == 'x') {
SetConsoleTextAttribute(h, highlight_color);
DWORD written;
WriteConsole(h, &c, 1, &written, nullptr);
SetConsoleTextAttribute(h, default_color);
}
else {
DWORD written;
WriteConsole(h, &c, 1, &written, nullptr);
}
}
return c;
}
};
std::ostream &operator<<(std::ostream &os, attribute a) {
outbuf *out = dynamic_cast<outbuf *>(os.rdbuf());
if (out) {
out->set_highlight(a());
}
return os;
}
int main() {
outbuf buf(GetStdHandle(STD_OUTPUT_HANDLE));
attribute red{FOREGROUND_RED};
attribute blue{FOREGROUND_BLUE | FOREGROUND_INTENSITY};
std::cout.rdbuf(&buf);
std::cout << "oxen\n" << red << "axis\n" << blue << "waxy";
}
Result:

There are essentially two ways you can go about it:
Using the Console API (as illustrated in Jerry Coffin's answer).
Taking advantage of the console's ability to process Virtual Terminal Sequences1.
The latter is easier to implement and a lot more versatile. For example, it allows you to use the full range of 24-bit colors, something that isn't available through the Console API. The following illustrates how to enable processing of virtual terminal sequences, and how to use them:
#include <Windows.h>
#include <iostream>
// Define commonly used formatting control sequences
auto const& reset { L"\x1b[0m" };
auto const& red { L"\x1b[31m" };
auto const& bright_red { L"\x1b[91m" };
int wmain()
{
// Enable processing of virtual terminal sequences
auto output_handle { ::GetStdHandle(STD_OUTPUT_HANDLE) };
DWORD mode {};
auto success { ::GetConsoleMode(output_handle, &mode) };
mode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING;
success = ::SetConsoleMode(output_handle, mode);
std::wcout << red << L"Red Text\n"
<< bright_red << L"Bright Red Text\n"
<< reset << L"Normal Text\n";
}
This produces the following output:
1 I wasn't able to find information on when virtual terminal sequence processing was introduced into Windows' console.

Related

Save exr/pfm as little endian

I am load a bmp file into a CImg object and I save it into pfm file. Successful. And this .pfm file I am using it into another library, but this library doesn't accept big-endian, just little endian.
CImg<float> image;
image.load_bmp(_T("D:\\Temp\\memorial.bmp"));
image.normalize(0.0, 1.0);
image.save_pfm(_T("D:\\Temp\\memorial.pfm"));
So, how can I save bmp file to pfm file as little endian, not big endian .. it is possible ?
Later edit:
I have checked first 5 elements from .pfm header file. This is the result without invert_endianness:
CImg<float> image;
image.load_bmp(_T("D:\\Temp\\memorial.bmp"));
image.normalize(0.0, 1.0);
image.save_pfm(_T("D:\\Temp\\memorial.pfm"));
PF
512
768
1.0
=øøù=€€=‘>
and this is the result with invert_endianness:
CImg<float> image;
image.load_bmp(_T("D:\\Temp\\memorial.bmp"));
image.invert_endianness();
image.normalize(0.0, 1.0);
image.save_pfm(_T("D:\\Temp\\memorial.pfm"));
PF
512
768
1.0
?yôx?!ù=‚ì:„ç‹?
Result is the same.
This is definitely not a proper answer but might work as a workaround for the time being.
I didn't find out how to properly invert the endianness using CImgs functions, so I modified the resulting file instead. It's a hack. The result opens fine in GIMP an looks very close to the original image, but I can't say if it works with the library you are using. It may be worth a try.
Comments in the code:
#include "CImg/CImg.h"
#include <algorithm>
#include <filesystem> // >= C++17 must be selected as Language Standard
#include <ios>
#include <iostream>
#include <iterator>
#include <fstream>
#include <string>
using namespace cimg_library;
namespace fs = std::filesystem;
// a class to remove temporary files
class remove_after_use {
public:
remove_after_use(const std::string& filename) : name(filename) {}
remove_after_use(const remove_after_use&) = delete;
remove_after_use& operator=(const remove_after_use&) = delete;
const char* c_str() const { return name.c_str(); }
operator std::string const& () const { return name; }
~remove_after_use() {
try {
fs::remove(name);
}
catch (const std::exception & ex) {
std::cerr << "remove_after_use: " << ex.what() << "\n";
}
}
private:
std::string name;
};
// The function to hack the file saved by CImg
template<typename T>
bool save_pfm_endianness_inverted(const T& img, const std::string& filename) {
remove_after_use tempfile("tmp.pfm");
// get CImg's endianness inverted image and save it to a temporary file
img.get_invert_endianness().save_pfm(tempfile.c_str());
// open the final file
std::ofstream os(filename, std::ios::binary);
// read "tmp.pfm" and modify
// The Scale Factor / Endianness line
if (std::ifstream is; os && (is = std::ifstream(tempfile, std::ios::binary))) {
std::string lines[3];
// Read the 3 PFM header lines as they happen to be formatted by
// CImg. Will maybe not work with another library.
size_t co = 0;
for (; co < std::size(lines) && std::getline(is, lines[co]); ++co);
if (co == std::size(lines)) { // success
// write the first two lines back unharmed:
os << lines[0] << '\n' << lines[1] << '\n';
if (lines[2].empty()) {
std::cerr << "something is wrong with the pfm header\n";
return false;
}
// add a '-' if it's missing, remove it if it's there:
if (lines[2][0] == '-') { // remove the - to invert
os << lines[2].substr(1);
}
else { // add a - to invert
os << '-' << lines[2] << '\n';
}
// copy all the rest as-is:
std::copy(std::istreambuf_iterator<char>(is),
std::istreambuf_iterator<char>{},
std::ostreambuf_iterator<char>(os));
}
else {
std::cerr << "failed reading pfm header\n";
return false;
}
}
else {
std::cerr << "opening files failed\n";
return false;
}
return true;
}
int main()
{
CImg<float> img("memorial.bmp");
img.normalize(0.f, 1.f);
std::cout << "saved ok: " << std::boolalpha
<< save_pfm_endianness_inverted(img, "memorial.pfm") << "\n";
}
Wanting to solve the same issue in classic C++ style (as language sake), I wrote:
BOOL CMyDoc::SavePfmEndiannessInverted(CImg<float>& img, const CString sFileName)
{
CString sDrive, sDir;
_splitpath(sFileName, sDrive.GetBuffer(), sDir.GetBuffer(), NULL, NULL);
CString sTemp;
sTemp.Format(_T("%s%sTemp.tmp"), sDrive, sDir);
sDrive.ReleaseBuffer();
sDir.ReleaseBuffer();
CRemoveAfterUse TempFile(sTemp);
img.get_invert_endianness().save_pfm(TempFile.c_str());
CFile fileTemp;
if (! fileTemp.Open(sTemp, CFile::typeBinary))
return FALSE;
char c;
UINT nRead = 0;
int nCount = 0;
ULONGLONG nPosition = 0;
CString sScale;
CByteArray arrHeader, arrData;
do
{
nRead = fileTemp.Read((char*)&c, sizeof(char));
switch (nCount)
{
case 0:
case 1:
arrHeader.Add(static_cast<BYTE>(c));
break;
case 2: // retrieve the '1.0' string
sScale += c;
break;
}
if ('\n' == c) // is new line
{
nCount++;
}
if (nCount >= 3) // read the header, so go out
{
nPosition = fileTemp.GetPosition();
break;
}
}while (nRead > 0);
if (nPosition > 1)
{
arrData.SetSize(fileTemp.GetLength() - nPosition);
fileTemp.Read(arrData.GetData(), (UINT)arrData.GetSize());
}
fileTemp.Close();
CFile file;
if (! file.Open(sFileName, CFile::typeBinary | CFile::modeCreate | CFile::modeReadWrite))
return FALSE;
CByteArray arrTemp;
ConvertCStringToByteArray(sScale, arrTemp);
arrHeader.Append(arrTemp);
arrHeader.Append(arrData);
file.Write(arrHeader.GetData(), (UINT)arrHeader.GetSize());
file.Close();
return TRUE;
}
But seem to not do the job, because the image result is darker
What I have done wrong here ? The code seem to me very clear, still, is not work as expected ...
Of course, this approach is more inefficient, I know, but as I said before, just for language sake.
I think it is nothing wrong with my code :)
Here is the trial:
CImg<float> image;
image.load_bmp(_T("D:\\Temp\\memorial.bmp"));
image.normalize(0.0f, 1.0f);
image.save_pfm(_T("D:\\Temp\\memorial.pfm"));
image.get_invert_endianness().save(_T("D:\\Temp\\memorial_inverted.pfm"));
and the memorial.pfm looks like this:
and memorial_inverted.pfm looks like this:

How to find current input language

I'm trying to make a program that gets a process name, finds it's ID,
and then finds the language with the function GetKeyboardLayout.
Although I'm having difficulties and it seem not to work.
It finds the processID although the language that return is always 00000000.
That is my code :
#include <iostream>
#include <windows.h>
#include <string>
#include <tlhelp32.h>
DWORD FindProcessId(LPCTSTR ProcessName);
int main() {
HKL currentKBLayout;
DWORD processID;
LPCTSTR processName = "chrome.exe";
while (true) {
processID = FindProcessId(processName);
if (processID == 0); // TODO: pause system for 5 seconds
else {
currentKBLayout = GetKeyboardLayout(processID);
std::cout << processID << " | "<< currentKBLayout << std::endl;
}
}
system("pause");
return 0;
}
DWORD FindProcessId(LPCTSTR ProcessName)
{
PROCESSENTRY32 pt;
HANDLE hsnap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
pt.dwSize = sizeof(PROCESSENTRY32);
if (Process32First(hsnap, &pt)) { // must call this first
do {
if (!lstrcmpi(pt.szExeFile, ProcessName)) {
CloseHandle(hsnap);
return pt.th32ProcessID;
}
} while (Process32Next(hsnap, &pt));
}
CloseHandle(hsnap); // close handle on failure
return 0;
}
I agree with Remys comment about using a simpler way to get the keyboard layout for the processes if that's all you need. If you however are interested in adding more information to your current approach using snapshots, this could be a way to start. It takes a snapshot of all processes and threads. Each Process has a vector of Thread objects. Adding Thread objects to each Process is done via an unordered_map<processId, Process>. To get a unique set of keyboard layouts for each process (since each thread may in theory have its own), an unordered_set<HKL> is used.
#include "pch.h"
#include <iostream>
#include <string>
#include <vector>
#include <unordered_set>
#include <unordered_map>
#include <windows.h>
#include <tlhelp32.h>
struct Thread {
DWORD m_id;
HKL m_keyboard_layout;
Thread(DWORD Id) :
m_id(Id), m_keyboard_layout(GetKeyboardLayout(m_id))
{}
};
struct Process {
std::vector<Thread> m_threads;
DWORD m_id;
std::wstring m_exefile;
Process() = default;
Process(DWORD Id, std::wstring Name) :
m_id(Id), m_exefile(Name)
{}
// get a unique set of HKL:s from all the threads in the process
std::unordered_set<HKL> GetKeyboardLayouts() const {
std::unordered_set<HKL> rv;
for (auto& t : m_threads) {
if(t.m_keyboard_layout) // does it have a keyboard layout?
rv.emplace(t.m_keyboard_layout);
}
return rv;
}
// if you'd like to iterate over the individual threads
std::vector<Thread>::iterator begin() { return m_threads.begin(); }
std::vector<Thread>::iterator end() { return m_threads.end(); }
};
class Snapshot {
HANDLE hSnap;
std::unordered_map<DWORD, Process> m_processes;
void GetProcesses() {
PROCESSENTRY32 pt;
pt.dwSize = sizeof(pt);
if (Process32First(hSnap, &pt)) { // must call this first
do {
m_processes[pt.th32ProcessID] = Process(pt.th32ProcessID, pt.szExeFile);
} while (Process32Next(hSnap, &pt));
}
}
void GetThreads() {
THREADENTRY32 pt;
pt.dwSize = sizeof(pt);
if (Thread32First(hSnap, &pt)) { // must call this first
do {
m_processes[pt.th32OwnerProcessID].m_threads.emplace_back(pt.th32ThreadID);
} while (Thread32Next(hSnap, &pt));
}
}
void Populate() {
GetProcesses();
GetThreads();
}
public:
Snapshot() :
hSnap(CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS | TH32CS_SNAPTHREAD, 0)),
m_processes()
{
// TODO: make this exception better
if (hSnap == INVALID_HANDLE_VALUE) throw GetLastError();
Populate();
CloseHandle(hSnap);
}
std::unordered_map<DWORD, Process>::iterator begin() { return m_processes.begin(); }
std::unordered_map<DWORD, Process>::iterator end() { return m_processes.end(); }
};
int main() {
Snapshot snap;
// show processes with keyboard layouts
for (const auto& m : snap) { // std::pair m.first = processId, m.second = Process
const Process& p = m.second;
auto layouts = p.GetKeyboardLayouts();
if (layouts.size()) { // only show processes with keyboard layouts
std::wcout << p.m_id << L" " << p.m_exefile << L"\n";
for (const auto& l : layouts) {
std::wcout << L" layout " << l << L"\n";
}
}
}
return 0;
}

std::cin: empty the input buffer without blocking

A followup to this question.
How can I clear the input buffer?
sleep(2);
// clear cin
getchar();
I only want the character that was typed in last, I want to discard any input that was put in while the program was asleep. Is there a way to do that?
Also, I don't want to wait actively to clear cin, thus I can't use ignore().
My approach would have been to get the current size of the buffer and if it's not equal to 0, silently empty the buffer. However, I have not found a way to get the size of the buffer. std::cin.rdbuf()->in_avail() always returns 0 for me. peek() also waits actively.
I don't want to use ncurses.
Having a system supporting tcgetattr/tcsetattr:
#include <iostream>
#include <stdexcept>
#include <termios.h>
#include <unistd.h>
class StdInput
{
// Constants
// =========
public:
enum {
Blocking = 0x01,
Echo = 0x02
};
// Static
// ======
public:
static void clear() {
termios attributes = disable_attributes(Blocking);
while(std::cin)
std::cin.get();
std::cin.clear();
set(attributes);
}
// StdInput
// ========
public:
StdInput()
: m_restore(get())
{}
~StdInput()
{
set(m_restore, false);
}
void disable(unsigned flags) { disable_attributes(flags); }
void disable_blocking() { disable_attributes(Blocking); }
void restore() { set(m_restore); }
private:
static termios get() {
const int fd = fileno(stdin);
termios attributes;
if(tcgetattr(fd, &attributes) < 0) {
throw std::runtime_error("StdInput");
}
return attributes;
}
static void set(const termios& attributes, bool exception = true) {
const int fd = fileno(stdin);
if(tcsetattr(fd, TCSANOW, &attributes) < 0 && exception) {
throw std::runtime_error("StdInput");
}
}
static termios disable_attributes(unsigned flags) {
termios attributes = get();
termios a = attributes;
if(flags & Blocking) {
a.c_lflag &= ~ICANON;
a.c_cc[VMIN] = 0;
a.c_cc[VTIME] = 0;
}
if(flags & Echo) {
a.c_lflag &= ~ECHO;
}
set(a);
return attributes;
}
termios m_restore;
};
int main()
{
// Get something to ignore
std::cout << "Ignore: ";
std::cin.get();
// Do something
StdInput::clear();
std::cout << " Input: ";
std::string line;
std::getline(std::cin, line);
std::cout << "Output: " << line << std::endl;
return 0;
}

Small logger class [closed]

Closed. This question is opinion-based. It is not currently accepting answers.
Want to improve this question? Update the question so it can be answered with facts and citations by editing this post.
Closed 7 months ago.
The community reviewed whether to reopen this question 7 months ago and left it closed:
Original close reason(s) were not resolved
Improve this question
I am looking for a small lightweight logging system in c++. I have found some existing frameworks but I don't need all of their features at this point in time. I primarily am looking for a small system that can for example configure the log level output file. I am looking for an existing solution as I don't want to reinvent the wheel.
I strongly recommend this simple logging system: http://www.drdobbs.com/cpp/201804215. It is composed of a single header file. I have successfully used it on Linux, Windows and Mac OS X.
You write to the log like this:
FILE_LOG(logWARNING) << "Ops, variable x should be " << expectedX << "; is " << realX;
I really like the stream syntax. It is unobtrusive, typesafe and expressive. The logging framework automatically adds a \n at the end of the line, plus date, time and indentation.
Configuring the logs is pretty easy:
FILELog::ReportingLevel() = logDEBUG3;
FILE* log_fd = fopen( "mylogfile.txt", "w" );
Output2FILE::Stream() = log_fd;
This framework is also easy to extend. At work, we have recently made some adaptations to it so that it now uses an std::ofstream instead of a FILE*. As a result, we are now able to add nice features such as encrypting the logs, by chaining the streams.
For anyone wanting a simple solution, I recommend: easylogging++
Single header only C++ logging library. It is extremely light-weight,
robust, fast performing, thread and type safe and consists of many
built-in features. It provides ability to write logs in your own
customized format. It also provide support for logging your classes,
third-party libraries, STL and third-party containers etc.
This library has everything built-in to prevent usage of external
libraries.
Simple example: (more advanced examples available on the link above).
#include "easylogging++.h"
INITIALIZE_EASYLOGGINGPP
int main(int argv, char* argc[]) {
LOG(INFO) << "My first info log using default logger";
return 0;
}
Example output inside a class:
2015-08-28 10:38:45,900 DEBUG [default] [user#localhost]
[Config::Config(const string)] [src/Config.cpp:7] Reading config file:
'config.json'
I tried log4cpp and boost::log but they are not as easy as this one.
EXTRA CONTENT: Minimal version - LOG header
I created a small code for even simpler applications based on easylogging but requires no initialization (be aware that it is probably not thread safe). Here is the code:
/*
* File: Log.h
* Author: Alberto Lepe <dev#alepe.com>
*
* Created on December 1, 2015, 6:00 PM
*/
#ifndef LOG_H
#define LOG_H
#include <iostream>
using namespace std;
enum typelog {
DEBUG,
INFO,
WARN,
ERROR
};
struct structlog {
bool headers = false;
typelog level = WARN;
};
extern structlog LOGCFG;
class LOG {
public:
LOG() {}
LOG(typelog type) {
msglevel = type;
if(LOGCFG.headers) {
operator << ("["+getLabel(type)+"]");
}
}
~LOG() {
if(opened) {
cout << endl;
}
opened = false;
}
template<class T>
LOG &operator<<(const T &msg) {
if(msglevel >= LOGCFG.level) {
cout << msg;
opened = true;
}
return *this;
}
private:
bool opened = false;
typelog msglevel = DEBUG;
inline string getLabel(typelog type) {
string label;
switch(type) {
case DEBUG: label = "DEBUG"; break;
case INFO: label = "INFO "; break;
case WARN: label = "WARN "; break;
case ERROR: label = "ERROR"; break;
}
return label;
}
};
#endif /* LOG_H */
Usage:
#include "Log.h"
int main(int argc, char** argv) {
//Config: -----(optional)----
structlog LOGCFG = {};
LOGCFG.headers = false;
LOGCFG.level = DEBUG;
//---------------------------
LOG(INFO) << "Main executed with " << (argc - 1) << " arguments";
}
This code print the message using "cout", but you can change it to use "cerr" or append a file, etc. I hope its useful to someone. (Note: I'm not C++ expert in any way, so this code may explode in extreme cases).
I recommend to try plog library (I'm the author). It's about 1000 lines of code, header only and easy to use:
#include <plog/Log.h>
int main()
{
plog::init(plog::debug, "Sample.log");
LOGD << "Hello log!";
LOGD_IF(true) << "conditional logging";
return 0;
}
all of the mentioned loggers so far make use of macros for logging calls. To me, that is so ugly, I don't care about what performance boost that gives, I won't go near it.
https://github.com/gabime/spdlog is what I like. Clean syntax, handles all typical usages. Fast and small. e.g. for a file logger it is:
auto my_logger = spd::basic_logger_mt("basic_logger", "logs/basic.txt");
my_logger->info("Some log message");
This question has my attempt with some fanciness. It is completely Standard C++ and makes no platform assumptions whatsoever. It basically consists of a temporary object used like this:
Debug(5) << "This is level 5 debug info.\n";
I'm sure you can figure out how to specify different files and other stuff when you have the basic layout. I tried to keep the class structured so that in a release build, every form of Debug output is removed as good as possible.
Mind you: if you specify a filename each time you construct it, and open the file and close it again, performance will suffer. In the case of multiple output files, it would certainly be best to have several static data members that open the different files when the program is run or if they are opened for the first time.
If you don't have size limitations on the project and you expect it to live a long time, I would suggest looking at Apache Log4cxx. It's not a small library, but it supports just about everything you ever wanted (including some things you didn't even knew you wanted) in logging, and it's portable.
In any larger project sooner or later you'll want your logging solution to do more than a "small logger class", so indeed why reinvent the wheel.
An update to Dr. Dobb's "A Lightweight Logger for C++":
There are actually a couple of loggers referred to in Dr. Dobb's. The first one Logging In C++ which is listed in one of the answers. I tried to use this one but the source is no longer available on the Dr. Dobb's site.
The second one that worked for me and that I recommend is A Lightweight Logger for C++ by Filip Janiszewski working at Nokia Siemens Networks. At first I had some problems getting this code to run so as I was searching for solutions, I ran across an update by the original author at: GitHub: fjanisze/logger. I found this code to be easy to understand, modify, and to use. It is thread safe and works with Visual Studio with Windows.
Another logger mentioned above is easylogging++ . When I first tried this one it looked promising. But when I added threading and sockets2 under Windows, it crashed. I did have the defines set for threading and Sock2 but I still couldn't get it to work so I can't recommend this one. The source code is also very complex so I had no chance to modify and fix it within a reasonable amount of time.
The above answers are all great.
Doubt anyone will ever see this answer, but this is what I use
https://github.com/asn10038/Cpp_Logger
Easy to set up after a configuration of 4-5 variable names in the .h file, and implemented without non standard dependencies.
Not header only but could be pretty easily.
Maybe this helps someone.
I, as well as many others, also answered this question with some code.
This isn't really "ready" in all ways, but it could be easily modified:
#pragma once
#include <codecvt>
#include <condition_variable>
#include <fstream>
#include <iostream>
#include <locale>
#include <memory>
#include <mutex>
#include <ostream>
#include <queue>
#include <sstream>
#include <string>
#include <thread>
#include <unordered_map>
#include <vector>
#ifdef _WIN32
#include <windows.h>
#endif
#include <string.h>
#define LOGL(level, msg) \
if (Loggy::isLevel(level)) { \
Loggy::writer(level, __FILE__, __LINE__) << msg; \
Loggy::queue(); \
}
#define LOG_FLUSH() \
{ \
Loggy::wait_queues(); \
}
#define LOGT(msg) LOGL(Loggy::LTRACE, msg)
#define LOGD(msg) LOGL(Loggy::LDEBUG, msg)
#define LOGI(msg) LOGL(Loggy::LINFO, msg)
#define LOGE(msg) LOGL(Loggy::LERROR, msg)
namespace Loggy {
using namespace std;
constexpr int DEFAULT_BUF_CNT = 1000;
constexpr const char *DEFAULT_TIME_FMT = "%Y%m%d.%H%M%S";
constexpr double DROP_NOTIFY_SECONDS = 5.0;
constexpr double FLUSH_SECONDS = 1.0;
enum {
LINVALID = 0,
LTRACE = 9,
LDEBUG = 10,
LINFO = 20,
LERROR = 40,
LWARN = 30,
LCRITICAL = 50,
LMAX = 50,
};
unordered_map<int, string> levelNames_ = {
{ LINVALID, "INVALID" },
{ LTRACE, "TRACE" },
{ LDEBUG, "DEBUG" },
{ LINFO, "INFO" },
{ LERROR, "ERROR" },
{ LWARN, "WARN" },
{ LCRITICAL, "CRITICAL" },
};
wstring str2w(const string &in)
{
#ifdef _WIN32
if (in.empty())
return std::wstring();
int size_needed = MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.size(), NULL, 0);
std::wstring wstrTo(size_needed, 0);
MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.size(), &wstrTo[0], size_needed);
return wstrTo;
#else
thread_local std::wstring_convert<std::codecvt_utf8<wchar_t>> wcu16;
return wcu16.from_bytes(in);
#endif
}
string w2str(const wstring &in)
{
#ifdef _WIN32
if (in.empty())
return std::string();
int size_needed
= WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.size(), NULL, 0, NULL, NULL);
std::string strTo(size_needed, 0);
WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.size(), &strTo[0], size_needed, NULL, NULL);
return strTo;
#else
thread_local std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> wcu8;
return wcu8.to_bytes( in );
#endif
}
template <class T> class SafeQueue {
public:
SafeQueue(void)
: q()
, m()
, c()
, x()
{
}
~SafeQueue(void) { lock_guard<mutex> lock(m); }
// Add an element to the queue.
void push(T t)
{
lock_guard<mutex> lock(m);
q.push(t);
c.notify_one();
}
// Get the "front"-element.
// If the queue is empty, wait till a element is avaiable.
T pop(void)
{
unique_lock<mutex> lock(m);
while (!x && q.empty()) {
// release lock as long as the wait and reaquire it afterwards.
c.wait(lock);
}
if (x) {
return T();
};
T val = q.front();
q.pop();
if (q.empty()) {
c.notify_all();
}
return val;
}
size_t size() { return q.size(); }
void join(void)
{
unique_lock<mutex> lock(m);
while (!q.empty()) {
c.wait(lock);
}
}
size_t drain(void)
{
unique_lock<mutex> lock(m);
std::queue<T> empty;
swap(q, empty);
c.notify_all();
return empty.size();
}
size_t quit()
{
x = true;
return drain();
}
private:
queue<T> q;
mutable mutex m;
condition_variable c;
bool x;
};
static string timestamp(const char format[], const time_t &rawtime)
{
struct tm timeinfo;
char buffer[120];
#ifdef _WIN32
localtime_s(&timeinfo, &rawtime);
#else
localtime_r(&rawtime, &timeinfo);
#endif
strftime(buffer, sizeof(buffer), format, &timeinfo);
return string(buffer);
}
#ifdef _WIN32
#define _LOGGY_CVT_FILENAME(s) s
#else
#define _LOGGY_CVT_FILENAME(s) Loggy::w2str(s)
#endif
class Output {
SafeQueue<wstring> queue_; // this should be first
wofstream fstream_;
wostream &wstream_;
size_t max_;
int level_;
size_t dropped_ = 0;
bool alive_ = true;
time_t firstDrop_ = 0;
std::thread thread_; // this must be last
public:
Output(wostream &s, int level, int max)
: wstream_(s)
, level_(level)
, max_(max)
, thread_(&Output::worker, this)
{
}
Output(const wstring &s, int level, size_t max)
: fstream_(_LOGGY_CVT_FILENAME(s), std::wofstream::out | std::wofstream::app)
, wstream_(fstream_)
, level_(level)
, max_(max)
, thread_(&Output::worker, this)
{
}
~Output()
{
alive_ = false;
dropped_ += queue_.quit();
if (dropped_) {
logDropped();
}
thread_.join();
}
void wait() { queue_.join(); wstream_.flush(); }
void logDropped()
{
wstringstream ws;
time_t t;
time(&t);
ws << Loggy::timestamp(DEFAULT_TIME_FMT, t).c_str();
ws << " dropped " << dropped_ << " entries";
queue_.push(ws.str());
dropped_ = 0;
}
void add(wstring &str, time_t &t)
{
if (alive_) {
if (max_ == 0 || queue_.size() < max_) {
queue_.push(str);
} else {
++dropped_;
if (dropped_ == 1) {
firstDrop_ = t;
} else if (difftime(t, firstDrop_) > DROP_NOTIFY_SECONDS) {
logDropped();
}
}
}
}
void worker()
{
int written = 0;
time_t lastFlush = 0;
while (alive_) {
if (!queue_.size() && written > 0) {
time_t t;
time(&t);
if (difftime(t, lastFlush) > FLUSH_SECONDS) {
wstream_.flush();
lastFlush = t;
written = 0;
}
}
auto t = queue_.pop();
if (alive_) {
wstream_ << t << std::endl;
written += 1;
}
}
}
};
class Log {
public:
~Log() { resetOutput(); };
int level_ = LINFO;
int trigFrom_ = LINVALID;
int trigTo_ = LINVALID;
int trigCnt_ = LINVALID;
string timeFormat_ = DEFAULT_TIME_FMT;
mutex mutex_;
deque<Output> outputs_;
Output default_output_;
vector<wstring> buffer_;
Log()
: default_output_(wcout, LINFO, 1) {};
bool isLevel(int level) { return level >= level_; }
void resetOutput()
{
lock_guard<mutex> lock(mutex_);
outputs_.clear();
}
void addOutput(const wstring &path, int level, int bufferSize)
{
lock_guard<mutex> lock(mutex_);
outputs_.emplace_back(path, level, bufferSize);
}
void addOutput(wostream &stream, int level, int bufferSize)
{
lock_guard<mutex> lock(mutex_);
outputs_.emplace_back(stream, level, bufferSize);
}
std::vector<const char *> getFiles()
{
std::vector<const char *> ret;
return ret;
}
void setTrigger(int levelFrom, int levelTo, int lookbackCount)
{
trigFrom_ = levelFrom;
trigTo_ = levelTo;
trigCnt_ = lookbackCount;
}
void setLevel(int level) { level_ = level; }
struct LastLog {
wstringstream ws;
time_t tm = 0;
};
static LastLog &lastLog()
{
thread_local LastLog ll_;
return ll_;
}
static const char *basename(const char *file)
{
const char *b = strrchr(file, '\\');
if (!b)
b = strrchr(file, '/');
return b ? b + 1 : file;
}
static const char *levelname(int level) { return levelNames_[level].c_str(); }
wostream &writer(int level, const char *file, int line)
{
auto &ll = lastLog();
time(&ll.tm);
ll.ws.clear();
ll.ws.str(L"");
return ll.ws << timestamp(timeFormat_.c_str(), ll.tm).c_str() << " " << basename(file)
<< ":" << line << " " << levelname(level) << " ";
}
void queue()
{
lock_guard<mutex> lock(mutex_);
auto &ll = lastLog();
auto s = ll.ws.str();
if (outputs_.empty()) {
default_output_.add(s, ll.tm);
} else {
for (auto &out : outputs_) {
out.add(s, ll.tm);
}
}
}
void wait_queues()
{
if (outputs_.empty()) {
default_output_.wait();
} else {
for (auto &out : outputs_) {
out.wait();
}
}
}
};
static Log &getInstance()
{
static Log l;
return l;
}
void resetOutput() { getInstance().resetOutput(); }
void addOutput(const wstring &path, int level = LDEBUG, int bufferSize = DEFAULT_BUF_CNT)
{
getInstance().addOutput(path, level, bufferSize);
}
void addOutput(wostream &stream, int level = LDEBUG, int bufferSize = DEFAULT_BUF_CNT)
{
getInstance().addOutput(stream, level, bufferSize);
}
void setTrigger(int levelFrom, int levelTo, int lookbackCount)
{
getInstance().setTrigger(levelFrom, levelTo, lookbackCount);
}
std::vector<const char *> getFiles() { return getInstance().getFiles(); }
void setLevel(int level) { getInstance().setLevel(level); }
bool isLevel(int level) { return getInstance().isLevel(level); }
wostream &writer(int level, const char *file, int line)
{
return getInstance().writer(level, file, line);
}
void queue() { getInstance().queue(); }
void wait_queues() { getInstance().wait_queues(); }
} // end namespace Loggy
Features:
writing to the log doesn't block on i/o
similar macros to other solutions (LOGE(blah << stream))
prefers discarding log entries to slowing down
lazy flushing
header only, very small, stl classes only
tested on osx/win/nix
time format is configurable
Missing stuff:
easy, flexible log formatting (predefining a macro would be fine)
triggers have an interface but don't work yet
microseconds aren't working yet
If anyone actually likes this solution in any way, lmk and I'll make a real repo out of it with tests, etc. It's pretty fast. Probably not as fast as speedlogger (a heavier feature complete library), but not sure.
Original gist:
https://gist.github.com/earonesty/977b14c93358fe9b9ee674baac5d42d7
i created a small logging class, cuz i had issues to include other examples in VSCode while compiling, here for a one file header:
#include <iostream>
#include <fstream>
#include <string>
#include <iomanip>
#include <ctime>
#include <sstream>
using namespace std;
class logging
{
private:
ofstream myfile;
std::string get_time()
{
auto t = std::time(nullptr);
auto tm = *std::localtime(&t);
std::ostringstream oss;
//2047-03-11 20:18:26
oss << std::put_time(&tm, "%Y-%m-%d-%H:%M:%S");
auto str = oss.str();
return str;
}
public:
logging(string filepath)
{
myfile.open (filepath);
}
~logging()
{
myfile.close();
}
void write(string line)
{
myfile << get_time() << " " << line <<std::endl;
}
};

UTF-8 output on Windows console

The following code shows unexpected behaviour on my machine (tested with Visual C++ 2008 SP1 on Windows XP and VS 2012 on Windows 7):
#include <iostream>
#include "Windows.h"
int main() {
SetConsoleOutputCP( CP_UTF8 );
std::cout << "\xc3\xbc";
int fail = std::cout.fail() ? '1': '0';
fputc( fail, stdout );
fputs( "\xc3\xbc", stdout );
}
I simply compiled with cl /EHsc test.cpp.
Windows XP: Output in a console window is
ü0ü (translated to Codepage 1252, originally shows some line drawing
charachters in the default Codepage, perhaps 437). When I change the settings
of the console window to use the "Lucida Console" character set and run my
test.exe again, output is changed to 1ü, which means
the character ü can be written using fputs and its UTF-8 encoding C3 BC
std::cout does not work for whatever reason
the streams failbit is setting after trying to write the character
Windows 7: Output using Consolas is ��0ü. Even more interesting. The correct bytes are written, probably (at least when redirecting the output to a file) and the stream state is ok, but the two bytes are written as separate characters).
I tried to raise this issue on "Microsoft Connect" (see here),
but MS has not been very helpful. You might as well look here
as something similar has been asked before.
Can you reproduce this problem?
What am I doing wrong? Shouldn't the std::cout and the fputs have the same
effect?
SOLVED: (sort of) Following mike.dld's idea I implemented a std::stringbuf doing the conversion from UTF-8 to Windows-1252 in sync() and replaced the streambuf of std::cout with this converter (see my comment on mike.dld's answer).
I understand the question is quite old, but if someone would still be interested, below is my solution. I've implemented a quite simple std::streambuf descendant and then passed it to each of standard streams on the very beginning of program execution.
This allows you to use UTF-8 everywhere in your program. On input, data is taken from console in Unicode and then converted and returned to you in UTF-8. On output the opposite is done, taking data from you in UTF-8, converting it to Unicode and sending to console. No issues found so far.
Also note, that this solution doesn't require any codepage modification, with either SetConsoleCP, SetConsoleOutputCP or chcp, or something else.
That's the stream buffer:
class ConsoleStreamBufWin32 : public std::streambuf
{
public:
ConsoleStreamBufWin32(DWORD handleId, bool isInput);
protected:
// std::basic_streambuf
virtual std::streambuf* setbuf(char_type* s, std::streamsize n);
virtual int sync();
virtual int_type underflow();
virtual int_type overflow(int_type c = traits_type::eof());
private:
HANDLE const m_handle;
bool const m_isInput;
std::string m_buffer;
};
ConsoleStreamBufWin32::ConsoleStreamBufWin32(DWORD handleId, bool isInput) :
m_handle(::GetStdHandle(handleId)),
m_isInput(isInput),
m_buffer()
{
if (m_isInput)
{
setg(0, 0, 0);
}
}
std::streambuf* ConsoleStreamBufWin32::setbuf(char_type* /*s*/, std::streamsize /*n*/)
{
return 0;
}
int ConsoleStreamBufWin32::sync()
{
if (m_isInput)
{
::FlushConsoleInputBuffer(m_handle);
setg(0, 0, 0);
}
else
{
if (m_buffer.empty())
{
return 0;
}
std::wstring const wideBuffer = utf8_to_wstring(m_buffer);
DWORD writtenSize;
::WriteConsoleW(m_handle, wideBuffer.c_str(), wideBuffer.size(), &writtenSize, NULL);
}
m_buffer.clear();
return 0;
}
ConsoleStreamBufWin32::int_type ConsoleStreamBufWin32::underflow()
{
if (!m_isInput)
{
return traits_type::eof();
}
if (gptr() >= egptr())
{
wchar_t wideBuffer[128];
DWORD readSize;
if (!::ReadConsoleW(m_handle, wideBuffer, ARRAYSIZE(wideBuffer) - 1, &readSize, NULL))
{
return traits_type::eof();
}
wideBuffer[readSize] = L'\0';
m_buffer = wstring_to_utf8(wideBuffer);
setg(&m_buffer[0], &m_buffer[0], &m_buffer[0] + m_buffer.size());
if (gptr() >= egptr())
{
return traits_type::eof();
}
}
return sgetc();
}
ConsoleStreamBufWin32::int_type ConsoleStreamBufWin32::overflow(int_type c)
{
if (m_isInput)
{
return traits_type::eof();
}
m_buffer += traits_type::to_char_type(c);
return traits_type::not_eof(c);
}
The usage then is as follows:
template<typename StreamT>
inline void FixStdStream(DWORD handleId, bool isInput, StreamT& stream)
{
if (::GetFileType(::GetStdHandle(handleId)) == FILE_TYPE_CHAR)
{
stream.rdbuf(new ConsoleStreamBufWin32(handleId, isInput));
}
}
// ...
int main()
{
FixStdStream(STD_INPUT_HANDLE, true, std::cin);
FixStdStream(STD_OUTPUT_HANDLE, false, std::cout);
FixStdStream(STD_ERROR_HANDLE, false, std::cerr);
// ...
std::cout << "\xc3\xbc" << std::endl;
// ...
}
Left out wstring_to_utf8 and utf8_to_wstring could easily be implemented with WideCharToMultiByte and MultiByteToWideChar WinAPI functions.
Oi. Congratulations on finding a way to change the code page of the console from inside your program. I didn't know about that call, I always had to use chcp.
I'm guessing the C++ default locale is getting involved. By default it will use the code page provide by GetThreadLocale() to determine the text encoding of non-wstring stuff. This generally defaults to CP1252. You could try using SetThreadLocale() to get to UTF-8 (if it even does that, can't recall), with the hope that std::locale defaults to something that can handle your UTF-8 encoding.
It's time to close this now. Stephan T. Lavavej says the behaviour is "by design", although I cannot follow this explanation.
My current knowledge is: Windows XP console in UTF-8 codepage does not work with C++ iostreams.
Windows XP is getting out of fashion now and so does VS 2008. I'd be interested to hear if the problem still exists on newer Windows systems.
On Windows 7 the effect is probably due to the way the C++ streams output characters. As seen in an answer to Properly print utf8 characters in windows console, UTF-8 output fails with C stdio when printing one byte after after another like putc('\xc3'); putc('\xbc'); as well. Perhaps this is what C++ streams do here.
I just follow mike.dld's answer in this question, and add the printf support for the UTF-8 string.
As mkluwe mentioned in his answer that by default, printf function will output to the console one by one byte, while the console can't handle single byte correctly. My method is quite simple, I use the snprintf function to print the whole content to a internal string buffer, and then dump the buffer to std::cout.
Here is the full testing code:
#include <iostream>
#include <locale>
#include <windows.h>
#include <cstdlib>
using namespace std;
// https://stackoverflow.com/questions/4358870/convert-wstring-to-string-encoded-in-utf-8
#include <codecvt>
#include <string>
// convert UTF-8 string to wstring
std::wstring utf8_to_wstring (const std::string& str)
{
std::wstring_convert<std::codecvt_utf8<wchar_t>> myconv;
return myconv.from_bytes(str);
}
// convert wstring to UTF-8 string
std::string wstring_to_utf8 (const std::wstring& str)
{
std::wstring_convert<std::codecvt_utf8<wchar_t>> myconv;
return myconv.to_bytes(str);
}
// https://stackoverflow.com/questions/1660492/utf-8-output-on-windows-console
// mike.dld's answer
class ConsoleStreamBufWin32 : public std::streambuf
{
public:
ConsoleStreamBufWin32(DWORD handleId, bool isInput);
protected:
// std::basic_streambuf
virtual std::streambuf* setbuf(char_type* s, std::streamsize n);
virtual int sync();
virtual int_type underflow();
virtual int_type overflow(int_type c = traits_type::eof());
private:
HANDLE const m_handle;
bool const m_isInput;
std::string m_buffer;
};
ConsoleStreamBufWin32::ConsoleStreamBufWin32(DWORD handleId, bool isInput) :
m_handle(::GetStdHandle(handleId)),
m_isInput(isInput),
m_buffer()
{
if (m_isInput)
{
setg(0, 0, 0);
}
}
std::streambuf* ConsoleStreamBufWin32::setbuf(char_type* /*s*/, std::streamsize /*n*/)
{
return 0;
}
int ConsoleStreamBufWin32::sync()
{
if (m_isInput)
{
::FlushConsoleInputBuffer(m_handle);
setg(0, 0, 0);
}
else
{
if (m_buffer.empty())
{
return 0;
}
std::wstring const wideBuffer = utf8_to_wstring(m_buffer);
DWORD writtenSize;
::WriteConsoleW(m_handle, wideBuffer.c_str(), wideBuffer.size(), &writtenSize, NULL);
}
m_buffer.clear();
return 0;
}
ConsoleStreamBufWin32::int_type ConsoleStreamBufWin32::underflow()
{
if (!m_isInput)
{
return traits_type::eof();
}
if (gptr() >= egptr())
{
wchar_t wideBuffer[128];
DWORD readSize;
if (!::ReadConsoleW(m_handle, wideBuffer, ARRAYSIZE(wideBuffer) - 1, &readSize, NULL))
{
return traits_type::eof();
}
wideBuffer[readSize] = L'\0';
m_buffer = wstring_to_utf8(wideBuffer);
setg(&m_buffer[0], &m_buffer[0], &m_buffer[0] + m_buffer.size());
if (gptr() >= egptr())
{
return traits_type::eof();
}
}
return sgetc();
}
ConsoleStreamBufWin32::int_type ConsoleStreamBufWin32::overflow(int_type c)
{
if (m_isInput)
{
return traits_type::eof();
}
m_buffer += traits_type::to_char_type(c);
return traits_type::not_eof(c);
}
template<typename StreamT>
inline void FixStdStream(DWORD handleId, bool isInput, StreamT& stream)
{
if (::GetFileType(::GetStdHandle(handleId)) == FILE_TYPE_CHAR)
{
stream.rdbuf(new ConsoleStreamBufWin32(handleId, isInput));
}
}
// some code are from this blog
// https://blog.csdn.net/witton/article/details/108087135
#define printf(fmt, ...) __fprint(stdout, fmt, ##__VA_ARGS__ )
int __vfprint(FILE *fp, const char *fmt, va_list va)
{
// https://stackoverflow.com/questions/7315936/which-of-sprintf-snprintf-is-more-secure
size_t nbytes = snprintf(NULL, 0, fmt, va) + 1; /* +1 for the '\0' */
char *str = (char*)malloc(nbytes);
snprintf(str, nbytes, fmt, va);
std::cout << str;
free(str);
return nbytes;
}
int __fprint(FILE *fp, const char *fmt, ...)
{
va_list va;
va_start(va, fmt);
int n = __vfprint(fp, fmt, va);
va_end(va);
return n;
}
int main()
{
FixStdStream(STD_INPUT_HANDLE, true, std::cin);
FixStdStream(STD_OUTPUT_HANDLE, false, std::cout);
FixStdStream(STD_ERROR_HANDLE, false, std::cerr);
// ...
std::cout << "\xc3\xbc" << std::endl;
printf("\xc3\xbc");
// ...
return 0;
}
The source code is saved in UTF-8 format, and build under Msys2's GCC and run under Windows 7 64bit. Here is the result
ü
ü