AES128 in libgcrypt not encrypting - c++

I've been trying to the libgcrypt for a small cryptography project of mine, but I can't seem to be able to implement the en/decryption correctly. The following the class and the usage of it.
#include <iostream>
#include <string>
#include <cstdlib>
#include <gcrypt.h>
#include "aes.h"
#define GCRY_CIPHER GCRY_CIPHER_AES128
#define GCRY_MODE GCRY_CIPHER_MODE_ECB
using namespace std;
aes::aes(string a) {
key = a;
keyLength = gcry_cipher_get_algo_keylen(GCRY_CIPHER);
gcry_control (GCRYCTL_DISABLE_SECMEM, 0);
gcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0);
gcry_cipher_open(&handle, GCRY_CIPHER, GCRY_MODE, 0);
gcry_cipher_setkey(handle, key.c_str(), keyLength);
}
string aes::encrypt(string text) {
size_t textLength = text.size() + 1;
char * encBuffer = (char *)malloc(textLength);
gcry_cipher_encrypt(handle, encBuffer, textLength, text.c_str(), textLength);
string ret (encBuffer);
return ret;
}
string aes::decrypt(string text) {
size_t textLength = text.size() + 1;
char * decBuffer = (char * )malloc(textLength);
gcry_cipher_decrypt(handle, decBuffer, textLength, text.c_str(), textLength);
string ret (decBuffer);
return ret;
}
And I use it in a main function like so:
...
aes bb = aes("one test AES key");
string test = "Some Message";
string enc = bb.encrypt(test);
string dec = bb.decrypt(enc);
for (size_t index = 0; index<enc.size(); index++)
printf("%c", enc[index]);
printf("\n");
cout << dec << endl;
...
And the output is
BBBBBBBBBBBBB
�n�S[
The odd thing is, I made a test program with almost the exact same statements that works perfectly. It started falling apart when I tried to package it into a class. The following is the code for this program, if anyone wants to see it.
#include <cstdlib>
#include <iostream>
#include <string>
#include <gcrypt.h>
using namespace std;
#define GCRY_CIPHER GCRY_CIPHER_AES128 // Pick the cipher here
#define GCRY_MODE GCRY_CIPHER_MODE_ECB // Pick the cipher mode here
void aesTest(void)
{
gcry_cipher_hd_t handle;
size_t keyLength = gcry_cipher_get_algo_keylen(GCRY_CIPHER);
string txtBuffer ("123456789 abcdefghijklmnopqrstuvwzyz ABCDEFGHIJKLMNOPQRSTUVWZYZ");
size_t txtLength = txtBuffer.size() +1; // string plus termination
char * encBuffer = (char *)malloc(txtLength);
char * outBuffer = (char *)malloc(txtLength);
char * key = "one test AES key"; // 16 bytes
gcry_control (GCRYCTL_DISABLE_SECMEM, 0);
gcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0);
gcry_cipher_open(&handle, GCRY_CIPHER, GCRY_MODE, 0);
gcry_cipher_setkey(handle, key, keyLength);
gcry_cipher_encrypt(handle, encBuffer, txtLength, txtBuffer.c_str(), txtLength);
gcry_cipher_decrypt(handle, outBuffer, txtLength, encBuffer, txtLength);
size_t index;
printf("encBuffer = ");
for (index = 0; index<txtLength; index++)
printf("%c", encBuffer[index]);
printf("\n");
printf("outBuffer = %s\n", outBuffer);
gcry_cipher_close(handle);
free(encBuffer);
free(outBuffer);
}
int main() {
aesTest();
return 0;
}

When you use a std::string to capture the encrypted data, you are possibly losing some data due to the presence of '\0' in the encrypted string.
Try using std::vector<char> instead.
void aes::encrypt(string text, std::vector<char>& ret) {
size_t textLength = text.size() + 1;
ret.resize(textLength);
gcry_cipher_encrypt(handle, ret.data(), textLength, text.c_str(), textLength);
}
string aes::decrypt(std::vector<char> const& text) {
size_t textLength = text.size() + 1;
// Since you are in C++ land, use new and delete
// instead of malloc and free.
char * decBuffer = new char[textLength];
gcry_cipher_decrypt(handle, decBuffer, textLength, text.data(), textLength);
string ret (decBuffer);
delete [] decBuffer;
return ret;
}

Related

C++ OPENSSL - How to convert OPENSSL output to a readable text and store it to a variable (if possible)

I have the working code below using OPENSSL AES 256 CBC to encrypt/decrypt.
It is working but I am missing something really important that is to CONVERT the Encryption result to readable text and STORE it to a STRING variable if possible (for later use).
For example, I need to see something like this: UkV8ecEWh+b1Dz0ZdwMzFVFieCI5Ps3fxYrfqAoPmOY=
Trying hard to find how to do that and what format OPENSSL is throwing out from Encryption process. (binary format ??) See image attached.
ps. Don't worry about the hashes below. They are not in production.
Thanks in Advance!!
Here is my code so far:
#include <iostream>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <openssl/evp.h>
#include <openssl/aes.h>
#include <openssl/rand.h>
using namespace std;
// HEX PRINT
static void hex_print(const void* pv, size_t len)
{
const unsigned char* p = (const unsigned char*)pv;
if (NULL == pv)
printf("NULL");
else
{
size_t i = 0;
for (; i < len; ++i)
printf("%02X ", *p++);
}
printf("\n");
}
// Starting MAIN function
int main()
{
int keylength = 256;
unsigned char aes_key[] = "1Tb2lYkqstqbh9lPAbeWpQOs3seHk6cX";
// Message we want to encrypt
unsigned char aes_input[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 abcdefghijklmnopqrstuvwxyz";
size_t inputslength = sizeof(aes_input)-1; // -1 because we don't want to encrypt the \0 character
// initialization vector IV - same for Encryption and Decryption
unsigned char iv_enc[] = "JxebB512Gl3brfx4" ;
unsigned char iv_dec[] = "JxebB512Gl3brfx4" ;
// buffers for encryption and decryption
const size_t encslength = inputslength ;
unsigned char enc_out[257];
unsigned char dec_out[257];
memset(enc_out, 0, sizeof(enc_out));
memset(dec_out, 0, sizeof(dec_out));
//Encryption START
AES_KEY enc_key, dec_key;
AES_set_encrypt_key(aes_key, keylength, &enc_key);
AES_cbc_encrypt(aes_input, enc_out, inputslength, &enc_key, iv_enc, AES_ENCRYPT);
//Decryption START
AES_set_decrypt_key(aes_key, keylength, &dec_key);
AES_cbc_encrypt(enc_out, dec_out, encslength, &dec_key, iv_dec, AES_DECRYPT);
// Printing Results
printf("original: \t");
hex_print(aes_input, sizeof(aes_input));
cout << aes_input << endl;
printf("encrypted: \t");
hex_print(enc_out, sizeof(enc_out));
cout << enc_out << endl;
printf("decrypt: \t");
hex_print(dec_out, sizeof(dec_out));
cout << dec_out << endl;
return 0;
}
Image of the Process
All Right. Thanks for the tips #RemyLebeau and #PaulSanders !!
I could resolve the issue using another tip from here -->
Base64 C++
Working REALLY fine now!!
Thanks Much!!
Here is the code for "encode" and "decode" Base64, just in case someone wants to do the same. Very usefull!!
typedef unsigned char uchar;
static const string b = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static string base64_encode(const string &in) {
string out;
int val=0, valb=-6;
for (uchar c : in) {
val = (val<<8) + c;
valb += 8;
while (valb>=0) {
out.push_back(b[(val>>valb)&0x3F]);
valb-=6;
}
}
if (valb>-6) out.push_back(b[((val<<8)>>(valb+8))&0x3F]);
while (out.size()%4) out.push_back('=');
return out;
}
static string base64_decode(const string &in) {
string out;
vector<int> T(256,-1);
for (int i=0; i<64; i++) T[b[i]] = i;
int val=0, valb=-8;
for (uchar c : in) {
if (T[c] == -1) break;
val = (val<<6) + T[c];
valb += 6;
if (valb>=0) {
out.push_back(char((val>>valb)&0xFF));
valb-=8;
}
}
return out;
}

Decrypt AES in C++ Example

I need some help with decrypt a char array in C++ using AES decrypt with Open SSL library. I already done encryption mode and works fine, but decryption is not working.
This is the Encrypt Function:
string Encrypt(char *Key, char *Msg, int size)
{
static char* Res;
static const char* const lut = "0123456789ABCDEF";
string output;
AES_KEY enc_key;
Res = (char *)malloc(size);
AES_set_encrypt_key((unsigned char *)Key, 128, &enc_key);
for(int vuelta = 0; vuelta <= size; vuelta += 16)
{
AES_ecb_encrypt((unsigned char *)Msg + vuelta, (unsigned char *)Res + vuelta, &enc_key, AES_ENCRYPT);
}
output.reserve(2 * size);
for (size_t i = 0; i < size; ++i)
{
const unsigned char c = Res[i];
output.push_back(lut[c >> 4]);
output.push_back(lut[c & 15]);
}
free(Res);
return output;
}
This is the Decrypt Function (not working):
char * Decrypt( char *Key, char *Msg, int size)
{
static char* Res;
AES_KEY dec_key;
Res = ( char * ) malloc( size );
AES_set_decrypt_key(( unsigned char * ) Key, 128, &dec_key);
for(int vuelta= 0; vuelta<=size; vuelta+=16)
{
AES_ecb_encrypt(( unsigned char * ) Msg+vuelta, ( unsigned char * ) Res+vuelta, &dec_key, AES_DECRYPT);
}
return (Res);
}
This is an Example of the Main function that call the methods, the problem is thar no mather how i print the "Res" variable in the Decrypt function, it always show random ASCII values, and i like to show the result in a string like the Encrypt function:
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <string.h>
#include "openSSL/aes.h"
using namespace std;
int main(int argc, char const *argv[])
{
char key[16];
char message[128];
char enc_message[128];
string s_key = "THIS_IS_THE_KEY_";
string s_message = "Hello World !!!";
memset(key, 0, sizeof(key));
strcpy(key, s_key.c_str());
memset(message, 0, sizeof(message));
strcpy(message, s_message.c_str());
string response = Encrypt(key, message, sizeof(message));
cout<<"This is the Encrypted Message: "<<response<<endl;
memset(enc_message, 0, sizeof(enc_message));
strcpy(enc_message, response.c_str());
Decrypt(key, enc_message, sizeof(enc_message));
return 0;
}
Any improve in this methods?
I wanted to put the answer to how I solved it: The problem with my example was that I was trying to use the decrypt function with a HEXADECIMAL STRING and it should be done with an ASCII STRING with the values ​​as delivered by the encryption function.
That is, instead of trying to decrypt a string like this: 461D019896EFA3
It must be decrypted with a string like this: #(%_!#$
After that, the decryption will be delivered in ASCII values. They must be passed to Hexadecimal and finally to a String.
Here is the example that worked for me:
string Decrypt_string(char *Key, string HEX_Message, int size)
{
static const char* const lut = "0123456789ABCDEF";
int i = 0;
char* Res;
AES_KEY dec_key;
string auxString, output, newString;
for(i = 0; i < size; i += 2)
{
string byte = HEX_Message.substr(i, 2);
char chr = (char) (int)strtol(byte.c_str(), NULL, 16);
auxString.push_back(chr);
}
const char *Msg = auxString.c_str();
Res = (char *)malloc(size);
AES_set_decrypt_key((unsigned char *)Key, 128, &dec_key);
for(i = 0; i <= size; i += 16)
{
AES_ecb_encrypt((unsigned char *)Msg + i, (unsigned char *)Res + i, &dec_key, AES_DECRYPT);
}
output.reserve(2 * size);
for (size_t i = 0; i < size; ++i)
{
const unsigned char c = Res[i];
output.push_back(lut[c >> 4]);
output.push_back(lut[c & 15]);
}
int len = output.length();
for(int i = 0; i < len; i += 2)
{
string byte = output.substr(i, 2);
char chr = (char) (int)strtol(byte.c_str(), NULL, 16);
newString.push_back(chr);
}
free(Res);
return newString;
}

C++ Output UTF-8 strings as UTF-16 to std::cout

I have a lot of code written based on UTF-8 using C++03, STL and Boost 1.54.
All the code outputs data to the console via std::cout or std::cerr.
I do not want to introduce a new library to my code base or switch to C++11,
but I want to port the code to Windows.
Rewriting all code to either use std::wcout or std::wcerr instead of
std::cout or std::cerr is not what I intend but I still want to display
all on console as UTF-16.
Is there a way to change std::cout and std::cerr to transform all char based data (which is UTF-8 encoded) to wchar_t based data (which would be UTF-16 encoded) before writing it to the console?
It would be great to see a solution for this using just C++03, STL and Boost 1.54.
I found that Boost Locale has conversion functions for single strings and there is a UTF-8 to UTF-32 iterator in Boost Spirit available but I could not find any facet codecvt to transform UTF-8 to UTF-16 without using an additional library or switching to C++11.
Thanks in advance.
PS: I know it is doable with something like this, but I hope to find a better solution here.
I did not came up with a better solution than already hinted.
So I will just share the solution based on streambuf here for anyone who is interested in it.
Hopefully, someone will come up with a better solution and share it here.
#include <cstdlib>
#include <cstdio>
#include <iostream>
#include <string>
#if defined(_WIN32) && defined(_UNICODE) && (defined(__MSVCRT__) ||defined(_MSC_VER))
#define TEST_ARG_TYPE wchar_t
#else /* not windows, unicode */
#define TEST_ARG_TYPE char
#endif /* windows, unicode */
#ifndef _O_U16TEXT
#define _O_U16TEXT 0x20000
#endif
static size_t countValidUtf8Bytes(const unsigned char * buf, const size_t size) {
size_t i, charSize;
const unsigned char * src = buf;
for (i = 0; i < size && (*src) != 0; i += charSize, src += charSize) {
charSize = 0;
if ((*src) >= 0xFC) {
charSize = 6;
} else if ((*src) >= 0xF8) {
charSize = 5;
} else if ((*src) >= 0xF0) {
charSize = 4;
} else if ((*src) >= 0xE0) {
charSize = 3;
} else if ((*src) >= 0xC0) {
charSize = 2;
} else if ((*src) >= 0x80) {
/* Skip continuous UTF-8 character (should never happen). */
for (; (i + charSize) < size && src[charSize] != 0 && src[charSize] >= 0x80; charSize++) {
charSize++;
}
} else {
/* ASCII character. */
charSize = 1;
}
if ((i + charSize) > size) break;
}
return i;
}
#if defined(_WIN32) && defined(_UNICODE) && (defined(__MSVCRT__) ||defined(_MSC_VER))
#include <locale>
#include <streambuf>
#include <boost/locale.hpp>
extern "C" {
#include <fcntl.h>
#include <io.h>
#include <windows.h>
int _CRT_glob;
extern void __wgetmainargs(int *, wchar_t ***, wchar_t ***, int, int *);
}
class Utf8ToUtf16Buffer : public std::basic_streambuf< char, std::char_traits<char> > {
private:
char * outBuf;
FILE * outFd;
public:
static const size_t BUFFER_SIZE = 1024;
typedef std::char_traits<char> traits_type;
typedef traits_type::int_type int_type;
typedef traits_type::pos_type pos_type;
typedef traits_type::off_type off_type;
explicit Utf8ToUtf16Buffer(FILE * o) : outBuf(new char[BUFFER_SIZE]), outFd(o) {
/* Initialize the put pointer. Overflow won't get called until this
* buffer is filled up, so we need to use valid pointers.
*/
this->setp(outBuf, outBuf + BUFFER_SIZE - 1);
}
~Utf8ToUtf16Buffer() {
delete[] outBuf;
}
protected:
virtual int_type overflow(int_type c);
virtual int_type sync();
};
Utf8ToUtf16Buffer::int_type Utf8ToUtf16Buffer::overflow(Utf8ToUtf16Buffer::int_type c) {
char * iBegin = this->outBuf;
char * iEnd = this->pptr();
int_type result = traits_type::not_eof(c);
/* If this is the end, add an eof character to the buffer.
* This is why the pointers passed to setp are off by 1
* (to reserve room for this).
*/
if ( ! traits_type::eq_int_type(c, traits_type::eof()) ) {
*iEnd = traits_type::to_char_type(c);
iEnd++;
}
/* Calculate output data length. */
int_type iLen = static_cast<int_type>(iEnd - iBegin);
int_type iLenU8 = static_cast<int_type>(
countValidUtf8Bytes(reinterpret_cast<const unsigned char *>(iBegin), static_cast<size_t>(iLen))
);
/* Convert string to UTF-16 and write to defined file descriptor. */
if (fwprintf(this->outFd, boost::locale::conv::utf_to_utf<wchar_t>(std::string(outBuf, outBuf + iLenU8)).c_str()) < 0) {
/* Failed to write data to output file descriptor. */
result = traits_type::eof();
}
/* Reset the put pointers to indicate that the buffer is free. */
if (iLenU8 == iLen) {
this->setp(outBuf, outBuf + BUFFER_SIZE + 1);
} else {
/* Move incomplete UTF-8 characters remaining in buffer. */
const size_t overhead = static_cast<size_t>(iLen - iLenU8);
memmove(outBuf, outBuf + iLenU8, overhead);
this->setp(outBuf + overhead, outBuf + BUFFER_SIZE + 1);
}
return result;
}
Utf8ToUtf16Buffer::int_type Utf8ToUtf16Buffer::sync() {
return traits_type::eq_int_type(this->overflow(traits_type::eof()), traits_type::eof()) ? -1 : 0;
}
#endif /* windows, unicode */
int test_main(int argc, TEST_ARG_TYPE ** argv);
#if defined(_WIN32) && defined(_UNICODE) && (defined(__MSVCRT__) ||defined(_MSC_VER))
int main(/*int argc, char ** argv*/) {
wchar_t ** wenpv, ** wargv;
int wargc, si = 0;
/* this also creates the global variable __wargv */
__wgetmainargs(&wargc, &wargv, &wenpv, _CRT_glob, &si);
/* enable UTF-16 output to standard output console */
_setmode(_fileno(stdout), _O_U16TEXT);
std::locale::global(boost::locale::generator().generate("UTF-8"));
Utf8ToUtf16Buffer u8cout(stdout);
std::streambuf * out = std::cout.rdbuf();
std::cout.rdbuf(&u8cout);
/* process user defined main function */
const int result = test_main(wargc, wargv);
/* revert stream buffers to let cout clean up remaining memory correctly */
std::cout.rdbuf(out);
return result;
#else /* not windows or unicode */
int main(int argc, char ** argv) {
return test_main(argc, argv);
#endif /* windows, unicode */
}
int test_main(int /*argc*/, TEST_ARG_TYPE ** /*argv*/) {
const std::string str("\x61\x62\x63\xC3\xA4\xC3\xB6\xC3\xBC\xE3\x81\x82\xE3\x81\x88\xE3\x81\x84\xE3\x82\xA2\xE3\x82\xA8\xE3\x82\xA4\xE4\xBA\x9C\xE6\xB1\x9F\xE6\x84\x8F");
for (size_t i = 1; i <= str.size(); i++) {
const std::string part(str.begin(), str.begin() + i);
const size_t validByteCount = countValidUtf8Bytes(reinterpret_cast<const unsigned char *>(part.c_str()), part.size());
wprintf(L"i = %u, v = %u\n", i, validByteCount);
const std::string valid(str.begin(), str.begin() + validByteCount);
std::cout << valid << std::endl;
std::cout.flush();
for (size_t j = 0; j < part.size(); j++) {
wprintf(L"%02X", static_cast<int>(part[j]) & 0xFF);
}
wprintf(L"\n");
}
return EXIT_SUCCESS;
}
I feel like this is probably a bad idea.. but I think it should still be seen as it does work provided that the console has the right font..
#include <iostream>
#include <windows.h>
//#include <io.h>
//#include <fcntl.h>
std::wstring UTF8ToUTF16(const char* utf8)
{
std::wstring utf16;
int len = MultiByteToWideChar(CP_UTF8, 0, utf8, -1, NULL, 0);
if (len > 1)
{
utf16.resize(len);
MultiByteToWideChar(CP_UTF8, 0, utf8, -1, &utf16[0], len);
}
return utf16;
}
std::ostream& operator << (std::ostream& os, const char* data)
{
//_setmode(_fileno(stdout), _O_U16TEXT);
SetConsoleCP(1200);
std::wstring str = UTF8ToUTF16(data);
DWORD slen = str.size();
WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), str.c_str(), slen, &slen, nullptr);
MessageBoxW(NULL, str.c_str(), L"", 0);
return os;
}
std::ostream& operator << (std::ostream& os, const std::string& data)
{
//_setmode(_fileno(stdout), _O_U16TEXT);
SetConsoleCP(1200);
std::wstring str = UTF8ToUTF16(&data[0]);
DWORD slen = str.size();
WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), str.c_str(), slen, &slen, nullptr);
return os;
}
std::wostream& operator <<(std::wostream& os, const wchar_t* data)
{
DWORD slen = wcslen(data);
WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), data, slen, &slen, nullptr);
return os;
}
std::wostream& operator <<(std::wostream& os, const std::wstring& data)
{
WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), data.c_str(), data.size(), nullptr, nullptr);
return os;
}
int main()
{
std::cout<<"Россия";
}
And now cout and std::wcout both use the WriteConsoleW function.. You'd have to overload it for const char*, char*, std::string, char, etc.. whatever you need.. Maybe template it.

Espeak SAPI/dll usage on Windows?

Question: I am trying to use the espeak text-to-speech engine.
So for I got it working wounderfully on linux (code below).
Now I wanted to port this basic program to windows, too, but it's nearly impossible...
Part of the problem is that the windows dll only allows for AUDIO_OUTPUT_SYNCHRONOUS, which means it requires a callback, but I can't figure out how to play the audio from the callback... First it crashed, then I realized, I need a callback function, now I get the data in the callback function, but I don't know how to play it... as it is neither a wav file nor plays automatically as on Linux.
The sourceforge site is rather useless, because it basically says use the SAPI version, but then there is no example on how to use the sapi espeak dll...
Anyway, here's my code, can anybody help?
#ifdef __cplusplus
#include <cstdio>
#include <cstdlib>
#include <cstring>
#else
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#endif
#include <assert.h>
#include <ctype.h>
//#include "speak_lib.h"
#include "espeak/speak_lib.h"
// libespeak-dev: /usr/include/espeak/speak_lib.h
// apt-get install libespeak-dev
// apt-get install libportaudio-dev
// g++ -o mine mine.cpp -lespeak
// g++ -o mine mine.cpp -I/usr/include/espeak/ -lespeak
// gcc -o mine mine.cpp -I/usr/include/espeak/ -lespeak
char voicename[40];
int samplerate;
int quiet = 0;
static char genders[4] = {' ','M','F',' '};
//const char *data_path = "/usr/share/"; // /usr/share/espeak-data/
const char *data_path = NULL; // use default path for espeak-data
int strrcmp(const char *s, const char *sub)
{
int slen = strlen(s);
int sublen = strlen(sub);
return memcmp(s + slen - sublen, sub, sublen);
}
char * strrcpy(char *dest, const char *source)
{
// Pre assertions
assert(dest != NULL);
assert(source != NULL);
assert(dest != source);
// tk: parentheses
while((*dest++ = *source++))
;
return(--dest);
}
const char* GetLanguageVoiceName(const char* pszShortSign)
{
#define LANGUAGE_LENGTH 30
static char szReturnValue[LANGUAGE_LENGTH] ;
memset(szReturnValue, 0, LANGUAGE_LENGTH);
for (int i = 0; pszShortSign[i] != '\0'; ++i)
szReturnValue[i] = (char) tolower(pszShortSign[i]);
const espeak_VOICE **voices;
espeak_VOICE voice_select;
voices = espeak_ListVoices(NULL);
const espeak_VOICE *v;
for(int ix=0; (v = voices[ix]) != NULL; ix++)
{
if( !strrcmp( v->languages, szReturnValue) )
{
strcpy(szReturnValue, v->name);
return szReturnValue;
}
} // End for
strcpy(szReturnValue, "default");
return szReturnValue;
} // End function getvoicename
void ListVoices()
{
const espeak_VOICE **voices;
espeak_VOICE voice_select;
voices = espeak_ListVoices(NULL);
const espeak_VOICE *v;
for(int ix=0; (v = voices[ix]) != NULL; ix++)
{
printf("Shortsign: %s\n", v->languages);
printf("age: %d\n", v->age);
printf("gender: %c\n", genders[v->gender]);
printf("name: %s\n", v->name);
printf("\n\n");
} // End for
} // End function getvoicename
int main()
{
printf("Hello World!\n");
const char* szVersionInfo = espeak_Info(NULL);
printf("Espeak version: %s\n", szVersionInfo);
samplerate = espeak_Initialize(AUDIO_OUTPUT_PLAYBACK,0,data_path,0);
strcpy(voicename, "default");
// espeak --voices
strcpy(voicename, "german");
strcpy(voicename, GetLanguageVoiceName("DE"));
if(espeak_SetVoiceByName(voicename) != EE_OK)
{
printf("Espeak setvoice error...\n");
}
static char word[200] = "Hello World" ;
strcpy(word, "TV-fäns aufgepasst, es ist 20 Uhr 15. Zeit für Rambo 3");
strcpy(word, "Unnamed Player wurde zum Opfer von GSG9");
int speed = 220;
int volume = 500; // volume in range 0-100 0=silence
int pitch = 50; // base pitch, range 0-100. 50=normal
// espeak.cpp 625
espeak_SetParameter(espeakRATE, speed, 0);
espeak_SetParameter(espeakVOLUME,volume,0);
espeak_SetParameter(espeakPITCH,pitch,0);
// espeakRANGE: pitch range, range 0-100. 0-monotone, 50=normal
// espeakPUNCTUATION: which punctuation characters to announce:
// value in espeak_PUNCT_TYPE (none, all, some),
espeak_VOICE *voice_spec = espeak_GetCurrentVoice();
voice_spec->gender=2; // 0=none 1=male, 2=female,
//voice_spec->age = age;
espeak_SetVoiceByProperties(voice_spec);
espeak_Synth( (char*) word, strlen(word)+1, 0, POS_CHARACTER, 0, espeakCHARS_AUTO, NULL, NULL);
espeak_Synchronize();
strcpy(voicename, GetLanguageVoiceName("EN"));
espeak_SetVoiceByName(voicename);
strcpy(word, "Geany was fragged by GSG9 Googlebot");
strcpy(word, "Googlebot");
espeak_Synth( (char*) word, strlen(word)+1, 0, POS_CHARACTER, 0, espeakCHARS_AUTO, NULL, NULL);
espeak_Synchronize();
espeak_Terminate();
printf("Espeak terminated\n");
return EXIT_SUCCESS;
}
/*
if(espeak_SetVoiceByName(voicename) != EE_OK)
{
memset(&voice_select,0,sizeof(voice_select));
voice_select.languages = voicename;
if(espeak_SetVoiceByProperties(&voice_select) != EE_OK)
{
fprintf(stderr,"%svoice '%s'\n",err_load,voicename);
exit(2);
}
}
*/
The above code is for Linux.
The below code is about as far as I got on Vista x64 (32 bit emu):
#ifdef __cplusplus
#include <cstdio>
#include <cstdlib>
#include <cstring>
#else
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#endif
#include <assert.h>
#include <ctype.h>
#include "speak_lib.h"
//#include "espeak/speak_lib.h"
// libespeak-dev: /usr/include/espeak/speak_lib.h
// apt-get install libespeak-dev
// apt-get install libportaudio-dev
// g++ -o mine mine.cpp -lespeak
// g++ -o mine mine.cpp -I/usr/include/espeak/ -lespeak
// gcc -o mine mine.cpp -I/usr/include/espeak/ -lespeak
char voicename[40];
int iSampleRate;
int quiet = 0;
static char genders[4] = {' ','M','F',' '};
//const char *data_path = "/usr/share/"; // /usr/share/espeak-data/
//const char *data_path = NULL; // use default path for espeak-data
const char *data_path = "C:\\Users\\Username\\Desktop\\espeak-1.43-source\\espeak-1.43-source\\";
int strrcmp(const char *s, const char *sub)
{
int slen = strlen(s);
int sublen = strlen(sub);
return memcmp(s + slen - sublen, sub, sublen);
}
char * strrcpy(char *dest, const char *source)
{
// Pre assertions
assert(dest != NULL);
assert(source != NULL);
assert(dest != source);
// tk: parentheses
while((*dest++ = *source++))
;
return(--dest);
}
const char* GetLanguageVoiceName(const char* pszShortSign)
{
#define LANGUAGE_LENGTH 30
static char szReturnValue[LANGUAGE_LENGTH] ;
memset(szReturnValue, 0, LANGUAGE_LENGTH);
for (int i = 0; pszShortSign[i] != '\0'; ++i)
szReturnValue[i] = (char) tolower(pszShortSign[i]);
const espeak_VOICE **voices;
espeak_VOICE voice_select;
voices = espeak_ListVoices(NULL);
const espeak_VOICE *v;
for(int ix=0; (v = voices[ix]) != NULL; ix++)
{
if( !strrcmp( v->languages, szReturnValue) )
{
strcpy(szReturnValue, v->name);
return szReturnValue;
}
} // End for
strcpy(szReturnValue, "default");
return szReturnValue;
} // End function getvoicename
void ListVoices()
{
const espeak_VOICE **voices;
espeak_VOICE voice_select;
voices = espeak_ListVoices(NULL);
const espeak_VOICE *v;
for(int ix=0; (v = voices[ix]) != NULL; ix++)
{
printf("Shortsign: %s\n", v->languages);
printf("age: %d\n", v->age);
printf("gender: %c\n", genders[v->gender]);
printf("name: %s\n", v->name);
printf("\n\n");
} // End for
} // End function getvoicename
/* Callback from espeak. Directly speaks using AudioTrack. */
#define LOGI(x) printf("%s\n", x)
static int AndroidEspeakDirectSpeechCallback(short *wav, int numsamples, espeak_EVENT *events)
{
char buf[100];
sprintf(buf, "AndroidEspeakDirectSpeechCallback: %d samples", numsamples);
LOGI(buf);
if (wav == NULL)
{
LOGI("Null: speech has completed");
}
if (numsamples > 0)
{
//audout->write(wav, sizeof(short) * numsamples);
sprintf(buf, "AudioTrack wrote: %d bytes", sizeof(short) * numsamples);
LOGI(buf);
}
return 0; // continue synthesis (1 is to abort)
}
static int AndroidEspeakSynthToFileCallback(short *wav, int numsamples,espeak_EVENT *events)
{
char buf[100];
sprintf(buf, "AndroidEspeakSynthToFileCallback: %d samples", numsamples);
LOGI(buf);
if (wav == NULL)
{
LOGI("Null: speech has completed");
}
// The user data should contain the file pointer of the file to write to
//void* user_data = events->user_data;
FILE* user_data = fopen ( "myfile1.wav" , "ab" );
FILE* fp = static_cast<FILE *>(user_data);
// Write all of the samples
fwrite(wav, sizeof(short), numsamples, fp);
return 0; // continue synthesis (1 is to abort)
}
int main()
{
printf("Hello World!\n");
const char* szVersionInfo = espeak_Info(NULL);
printf("Espeak version: %s\n", szVersionInfo);
iSampleRate = espeak_Initialize(AUDIO_OUTPUT_SYNCHRONOUS, 4096, data_path, 0);
if (iSampleRate <= 0)
{
printf("Unable to initialize espeak");
return EXIT_FAILURE;
}
//samplerate = espeak_Initialize(AUDIO_OUTPUT_PLAYBACK,0,data_path,0);
//ListVoices();
strcpy(voicename, "default");
// espeak --voices
//strcpy(voicename, "german");
//strcpy(voicename, GetLanguageVoiceName("DE"));
if(espeak_SetVoiceByName(voicename) != EE_OK)
{
printf("Espeak setvoice error...\n");
}
static char word[200] = "Hello World" ;
strcpy(word, "TV-fäns aufgepasst, es ist 20 Uhr 15. Zeit für Rambo 3");
strcpy(word, "Unnamed Player wurde zum Opfer von GSG9");
int speed = 220;
int volume = 500; // volume in range 0-100 0=silence
int pitch = 50; // base pitch, range 0-100. 50=normal
// espeak.cpp 625
espeak_SetParameter(espeakRATE, speed, 0);
espeak_SetParameter(espeakVOLUME,volume,0);
espeak_SetParameter(espeakPITCH,pitch,0);
// espeakRANGE: pitch range, range 0-100. 0-monotone, 50=normal
// espeakPUNCTUATION: which punctuation characters to announce:
// value in espeak_PUNCT_TYPE (none, all, some),
//espeak_VOICE *voice_spec = espeak_GetCurrentVoice();
//voice_spec->gender=2; // 0=none 1=male, 2=female,
//voice_spec->age = age;
//espeak_SetVoiceByProperties(voice_spec);
//espeak_SetSynthCallback(AndroidEspeakDirectSpeechCallback);
espeak_SetSynthCallback(AndroidEspeakSynthToFileCallback);
unsigned int unique_identifier;
espeak_ERROR err = espeak_Synth( (char*) word, strlen(word)+1, 0, POS_CHARACTER, 0, espeakCHARS_AUTO, &unique_identifier, NULL);
err = espeak_Synchronize();
/*
strcpy(voicename, GetLanguageVoiceName("EN"));
espeak_SetVoiceByName(voicename);
strcpy(word, "Geany was fragged by GSG9 Googlebot");
strcpy(word, "Googlebot");
espeak_Synth( (char*) word, strlen(word)+1, 0, POS_CHARACTER, 0, espeakCHARS_AUTO, NULL, NULL);
espeak_Synchronize();
*/
// espeak_Cancel();
espeak_Terminate();
printf("Espeak terminated\n");
system("pause");
return EXIT_SUCCESS;
}
Have you tried passing the buffer you obtain in your callback to sndplaysnd()??
Declare Function sndPlaySound Lib "winmm.dll" Alias "sndPlaySoundA" (ByVal lpszSoundName As String, ByVal uFlags As Long) As Long
Its standard winAPI is as follows:
sndPlaySound(buffer[0], SND_ASYNC | SND_MEMORY)
Alternately, if you have a wav-file that has the audio to play:
sndPlaySound(filename, SND_ASYNC)
playsound has a ASYNC mode that wouldn't block your program's execution while the audio is being played.
NOTE: I have used it in VB and the above snippets are for use in VB. If you are coding in VC++, you might have to modify them accordingly. But the basic intention remains the same; to pass the buffer to sndPlaySound with the ASYNC flag set.
Good LUCK!!
Several changes in source code are needed to make the windows library have the same functionality as the one on Linux. I listed the changes here. The ready to use binary is also available.
All the patches and the description were also sent to espeak maintainer (publicly, through the mailing list and patches tracker), so maybe in future it will be available directly.

Generate SHA hash in C++ using OpenSSL library

How can I generate SHA1 or SHA2 hashes using the OpenSSL libarary?
I searched google and could not find any function or example code.
From the command line, it's simply:
printf "compute sha1" | openssl sha1
You can invoke the library like this:
#include <stdio.h>
#include <string.h>
#include <openssl/sha.h>
int main()
{
unsigned char ibuf[] = "compute sha1";
unsigned char obuf[20];
SHA1(ibuf, strlen(ibuf), obuf);
int i;
for (i = 0; i < 20; i++) {
printf("%02x ", obuf[i]);
}
printf("\n");
return 0;
}
OpenSSL has a horrible documentation with no code examples, but here you are:
#include <openssl/sha.h>
bool simpleSHA256(void* input, unsigned long length, unsigned char* md)
{
SHA256_CTX context;
if(!SHA256_Init(&context))
return false;
if(!SHA256_Update(&context, (unsigned char*)input, length))
return false;
if(!SHA256_Final(md, &context))
return false;
return true;
}
Usage:
unsigned char md[SHA256_DIGEST_LENGTH]; // 32 bytes
if(!simpleSHA256(<data buffer>, <data length>, md))
{
// handle error
}
Afterwards, md will contain the binary SHA-256 message digest. Similar code can be used for the other SHA family members, just replace "256" in the code.
If you have larger data, you of course should feed data chunks as they arrive (multiple SHA256_Update calls).
Adaptation of #AndiDog version for big file:
static const int K_READ_BUF_SIZE{ 1024 * 16 };
std::optional<std::string> CalcSha256(std::string filename)
{
// Initialize openssl
SHA256_CTX context;
if(!SHA256_Init(&context))
{
return std::nullopt;
}
// Read file and update calculated SHA
char buf[K_READ_BUF_SIZE];
std::ifstream file(filename, std::ifstream::binary);
while (file.good())
{
file.read(buf, sizeof(buf));
if(!SHA256_Update(&context, buf, file.gcount()))
{
return std::nullopt;
}
}
// Get Final SHA
unsigned char result[SHA256_DIGEST_LENGTH];
if(!SHA256_Final(result, &context))
{
return std::nullopt;
}
// Transform byte-array to string
std::stringstream shastr;
shastr << std::hex << std::setfill('0');
for (const auto &byte: result)
{
shastr << std::setw(2) << (int)byte;
}
return shastr.str();
}
correct syntax at command line should be
echo -n "compute sha1" | openssl sha1
otherwise you'll hash the trailing newline character as well.
Here is OpenSSL example of calculating sha-1 digest using BIO:
#include <openssl/bio.h>
#include <openssl/evp.h>
std::string sha1(const std::string &input)
{
BIO * p_bio_md = nullptr;
BIO * p_bio_mem = nullptr;
try
{
// make chain: p_bio_md <-> p_bio_mem
p_bio_md = BIO_new(BIO_f_md());
if (!p_bio_md) throw std::bad_alloc();
BIO_set_md(p_bio_md, EVP_sha1());
p_bio_mem = BIO_new_mem_buf((void*)input.c_str(), input.length());
if (!p_bio_mem) throw std::bad_alloc();
BIO_push(p_bio_md, p_bio_mem);
// read through p_bio_md
// read sequence: buf <<-- p_bio_md <<-- p_bio_mem
std::vector<char> buf(input.size());
for (;;)
{
auto nread = BIO_read(p_bio_md, buf.data(), buf.size());
if (nread < 0) { throw std::runtime_error("BIO_read failed"); }
if (nread == 0) { break; } // eof
}
// get result
char md_buf[EVP_MAX_MD_SIZE];
auto md_len = BIO_gets(p_bio_md, md_buf, sizeof(md_buf));
if (md_len <= 0) { throw std::runtime_error("BIO_gets failed"); }
std::string result(md_buf, md_len);
// clean
BIO_free_all(p_bio_md);
return result;
}
catch (...)
{
if (p_bio_md) { BIO_free_all(p_bio_md); }
throw;
}
}
Though it's longer than just calling SHA1 function from OpenSSL, but it's more universal and can be reworked for using with file streams (thus processing data of any length).
C version of #Nayfe code, generating SHA1 hash from file:
#include <stdio.h>
#include <openssl/sha.h>
static const int K_READ_BUF_SIZE = { 1024 * 16 };
unsigned char* calculateSHA1(char *filename)
{
if (!filename) {
return NULL;
}
FILE *fp = fopen(filename, "rb");
if (fp == NULL) {
return NULL;
}
unsigned char* sha1_digest = malloc(sizeof(char)*SHA_DIGEST_LENGTH);
SHA_CTX context;
if(!SHA1_Init(&context))
return NULL;
unsigned char buf[K_READ_BUF_SIZE];
while (!feof(fp))
{
size_t total_read = fread(buf, 1, sizeof(buf), fp);
if(!SHA1_Update(&context, buf, total_read))
{
return NULL;
}
}
fclose(fp);
if(!SHA1_Final(sha1_digest, &context))
return NULL;
return sha1_digest;
}
It can be used as follows:
unsigned char *sha1digest = calculateSHA1("/tmp/file1");
The res variable contains the sha1 hash.
You can print it on the screen using the following for-loop:
char *sha1hash = (char *)malloc(sizeof(char) * 41);
sha1hash[40] = '\0';
int i;
for (i = 0; i < SHA_DIGEST_LENGTH; i++)
{
sprintf(&sha1hash[i*2], "%02x", sha1digest[i]);
}
printf("SHA1 HASH: %s\n", sha1hash);