Here is my code:
int function(const char * buffer,size_t len,unsigned char * value)
{
char* user = "username";
char*password = "password";
size_t text_len = strlen(user) + strlen(password) + 2;
unsigned char* key = (unsigned char*)calloc(1,16);
unsigned char* text= (unsigned char *)calloc(1,text_len);
snprintf((char*)text, text_len, "%s:%s",user,password );
MD5(text, text_len-1, key)
HMAC_CTX *ctx = NULL;
unsigned int md_len = 20;
ctx = (HMAC_CTX*) calloc(1,sizeof(HMAC_CTX));
if(ctx == NULL){return -1;}
HMAC_CTX_init(ctx);
`HMAC_Init(ctx, key, 16, EVP_sha1());` //crashing everytime, saying heap corruption
HMAC_Update(ctx, buffer, len);
HMAC_Final(ctx, value, &md_len);
HMAC_CTX_cleanup(ctx);
return 0;
}
I am using openssl 0.9.8.c. If anyone faced this problem please let me know.
According to the man page HMAC_Init is deprecated. Might be worth trying HMAC_Init_ex.
Related
I recently started a small side-project to recreate the Minecraft Server in C++ but I stumbled on a problem.
I need to use the AES/CFB8 Cipher according to this link with continuous updating (not finished and restarted every packet).
I surfed trough the internet to try to find a proper solution but could not figure out one that was actually working or it was using the command line interface.
Any help is welcomed !
Cipher.h
class Cipher {
public:
enum CipherType {
ENCRYPT,
DECRYPT
};
private:
EVP_CIPHER_CTX* ctx;
CipherType type;
bool hasFinished;
public:
Cipher(const char* key, const char* iv, CipherType type);
~Cipher();
int update(char* in, int inl, char* out);
int final(char* out);
int getMinimumBufLength(int size);
};
Cipher.cpp
Cipher::Cipher(const char* key, const char* iv, CipherType type) : type(type), hasFinished(false)
{
ctx = EVP_CIPHER_CTX_new();
EVP_CIPHER_CTX_init(ctx);
if(type == CipherType::ENCRYPT)
EVP_EncryptInit_ex(ctx, EVP_aes_256_cfb8(), nullptr, (unsigned char*)key, (unsigned char*)iv);
else
EVP_DecryptInit_ex(ctx, EVP_aes_256_cfb8(), nullptr, (unsigned char*)key, (unsigned char*)iv);
}
Cipher::~Cipher()
{
EVP_CIPHER_CTX_free(ctx);
}
int Cipher::getMinimumBufLength(int size)
{
return type == CipherType::DECRYPT ? size : size + AES_BLOCK_SIZE * 2;
}
int Cipher::update(char* in, int inl, char* out)
{
int len;
EVP_CipherUpdate(ctx, (unsigned char*)out, &len, (unsigned char*)in, inl);
return len;
}
int Cipher::final(char* out)
{
int len;
EVP_EncryptFinal_ex(ctx, (unsigned char*)out, &len);
return len;
}
Cipher-test.cpp
TEST_CASE("Cipher AES/CFB8")
{
char* key = "AAAAAAAAAZEFRGUINJFKDFIVJ";
char* iv = "AUJVFEUB";
Cipher encryptionCipher(key, iv, Cipher::CipherType::ENCRYPT);
Cipher decryptionCipher(key, iv, Cipher::CipherType::DECRYPT);
std::string data = "Hello World ! This is some soon to be encrypted string !";
char encData[encryptionCipher.getMinimumBufLength(data.size() + 1)];
int outLen = encryptionCipher.update(data.data(), data.size() + 1, encData);
int fLen = encryptionCipher.final(encData + outLen);
CHECK(data != std::string(encData));
char decData[decryptionCipher.getMinimumBufLength(outLen + fLen)];
outLen = decryptionCipher.update(encData, outLen + fLen, decData);
decryptionCipher.final(decData + outLen);
CHECK(data == std::string(decData));
}
Thanks in advance
EDIT: I am just wondering if there was a need for supplementary code because of the "continuous" aspect of the Cipher.
In my code, I want to generate HMAC MD5. so:
void Gen()
{
CString newKey = L"320E6FADB2738DA273A41E14F85027E1";
unsigned char bNewKey[16];
memset(bNewKey, 0, 16);
string k = ws2s(newKey.GetString());
hex_to_bytes(k.c_str(), bNewKey, 16);
CString data = L"35413B1DD9AB9FA0F1395759BD72451C";
string d = ws2s(data.GetString());
unsigned char bData[16];
memset(bData, 0, 16);
hex_to_bytes(d.c_str(), bData, 16);
//unsigned char bNewKey[16] = { 0x32,0x0E,0x6F,0xAD,0xB2,0x73,0x8D,0xA2,0x73,0xA4,0x1E,0x14,0xF8,0x50,0x27,0xE1 };
//unsigned char bData[16] = { 0x35,0x41,0x3B,0x1D,0xD9,0xAB,0x9F,0xA0,0xF1,0x39,0x57,0x59,0xBD,0x72,0x45,0x1C };
unsigned char hash[16];
unsigned int len = 16;
HMAC(EVP_md5(), bNewKey, 16, bData, 16, hash, &len);
char* cData = new char[33];
bytes_to_hex(hash, cData, 16);
=>> in my code cData = "94feb52831aea0c2e85934c7850778c9"
}
But my result is not equal with online website that they generate HMAC MD5.
https://wtools.io/generate-hmac-hash
Data: "35413B1DD9AB9FA0F1395759BD72451C"
skey: "320E6FADB2738DA273A41E14F85027E1"
Result of them is: "bb4c6dff8a4f706b0a5206922d38a191"
Why?
You are incorrectly assuming that the web site is converting the data from a hex string to bytes when it's not.
This simple example results in the same output as the web site:
bool test_gen_md5_hmac()
{
std::string k = "320E6FADB2738DA273A41E14F85027E1";
std::string d = "35413B1DD9AB9FA0F1395759BD72451C";
unsigned char hash[16];
unsigned int len = 16;
HMAC(EVP_md5(), k.c_str(), k.size(), (unsigned char*)d.c_str(), d.size(), hash, &len);
char* rv = OPENSSL_buf2hexstr(hash, 16);
std::string rv_str(rv);
OPENSSL_free(rv);
return rv_str == "BB:4C:6D:FF:8A:4F:70:6B:0A:52:06:92:2D:38:A1:91";
}
Please take a look at the following method:
int BCVirtualCard::decrypt(std::string from, std::string *to, int keyId, bool padding)
{
if (to == nullptr)
{
NSCAssert(NO, #"Invalid params");
return 0;
}
NSString* privateKey = [m_storage privateKeyForSlot:keyId];
NSArray<NSString*>* components = [privateKey componentsSeparatedByString:#"_"];
const NSInteger componentsCount = 4;
if (components.count != componentsCount)
{
*to = "";
return 0;
}
const char* d = [components[0] UTF8String];
const char* n = [components[1] UTF8String];
const char* p = [components[2] UTF8String];
const char* q = [components[3] UTF8String];
RSA* rsa = RSA_new();
BN_hex2bn(&rsa->d, d);
BN_hex2bn(&rsa->n, n);
BN_hex2bn(&rsa->p, p);
BN_hex2bn(&rsa->q, q);
unsigned char* _to = (unsigned char *)calloc(1, sizeof(unsigned char));
int decryptedSize = RSA_private_decrypt((int)from.length(), (unsigned char *)from.c_str(), _to, rsa, RSA_NO_PADDING);
free(_to);
if (decryptedSize <= 0)
{
ERR_print_errors_cb(test, NULL);
*to = "";
return 0;
}
_to = (unsigned char *)calloc(decryptedSize, sizeof(unsigned char));
RSA_private_decrypt((int)from.length(), (unsigned char *)from.c_str(), _to, rsa, RSA_NO_PADDING);
*to = std::string((char *)_to, strlen((char *)_to));
free(_to);
RSA_free(rsa);
return 1;
}
Here the string from is supposed to be decrypted and written to the string to. For decryption I use RSA_private_decrypt function. I call it two times. First time in order to determine the size of the decrypted text and second time in order to write the decrypted text into _to buffer. And when I call it second time it usually crashes like this:
malloc: Heap corruption detected, free list is damaged at 0x280ff3d70
*** Incorrect guard value: 0
No1BCmail(2171,0x170efb000) malloc: *** set a breakpoint in malloc_error_break to debug
The breakpoint is on and this let me find the place of the crash. However I can't understand the reason why it crashes. I tried recreating the RSA structure and playing with the size allocated for _to for the second time, but nothing helps. Can you see what is wrong here? Thanks
RSA_private_decrypt requires that the to parameter points to a buffer of an adequate size. Your first call only uses a buffer of size 1, which is too small and is undefined behavior. What you need to do is get the size from rsa using RSA_size(rsa), and then you can use that to allocate space for _to. That means you don't need to call the function twice, since you'll already have the size the first time around
You should also be using decryptedSize for the length if the string to build instead of using strlen as _to may not be null terminated.
Putting all that together you could should look something like
int BCVirtualCard::decrypt(std::string from, std::string *to, int keyId, bool padding)
{
if (to == nullptr)
{
NSCAssert(NO, #"Invalid params");
return 0;
}
NSString* privateKey = [m_storage privateKeyForSlot:keyId];
NSArray<NSString*>* components = [privateKey componentsSeparatedByString:#"_"];
const NSInteger componentsCount = 4;
if (components.count != componentsCount)
{
*to = "";
return 0;
}
const char* d = [components[0] UTF8String];
const char* n = [components[1] UTF8String];
const char* p = [components[2] UTF8String];
const char* q = [components[3] UTF8String];
RSA* rsa = RSA_new();
BN_hex2bn(&rsa->d, d);
BN_hex2bn(&rsa->n, n);
BN_hex2bn(&rsa->p, p);
BN_hex2bn(&rsa->q, q);
auto _to = std::make_unique<unsigned char[]>(RSA_size(rsa)); // use smart pointers so you don't have to worry about releasing the memory
int decryptedSize = RSA_private_decrypt((int)from.length(), (unsigned char *)from.c_str(), _to.get(), rsa, RSA_NO_PADDING);
if (decryptedSize <= 0)
{
ERR_print_errors_cb(test, NULL);
*to = "";
return 0;
}
*to = std::string((char *)_to.get(), decryptedSize);
return 1;
}
I need some help with decrypt a char array in C++ using AES decrypt with Open SSL library. I already done encryption mode and works fine, but decryption is not working.
This is the Encrypt Function:
string Encrypt(char *Key, char *Msg, int size)
{
static char* Res;
static const char* const lut = "0123456789ABCDEF";
string output;
AES_KEY enc_key;
Res = (char *)malloc(size);
AES_set_encrypt_key((unsigned char *)Key, 128, &enc_key);
for(int vuelta = 0; vuelta <= size; vuelta += 16)
{
AES_ecb_encrypt((unsigned char *)Msg + vuelta, (unsigned char *)Res + vuelta, &enc_key, AES_ENCRYPT);
}
output.reserve(2 * size);
for (size_t i = 0; i < size; ++i)
{
const unsigned char c = Res[i];
output.push_back(lut[c >> 4]);
output.push_back(lut[c & 15]);
}
free(Res);
return output;
}
This is the Decrypt Function (not working):
char * Decrypt( char *Key, char *Msg, int size)
{
static char* Res;
AES_KEY dec_key;
Res = ( char * ) malloc( size );
AES_set_decrypt_key(( unsigned char * ) Key, 128, &dec_key);
for(int vuelta= 0; vuelta<=size; vuelta+=16)
{
AES_ecb_encrypt(( unsigned char * ) Msg+vuelta, ( unsigned char * ) Res+vuelta, &dec_key, AES_DECRYPT);
}
return (Res);
}
This is an Example of the Main function that call the methods, the problem is thar no mather how i print the "Res" variable in the Decrypt function, it always show random ASCII values, and i like to show the result in a string like the Encrypt function:
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <string.h>
#include "openSSL/aes.h"
using namespace std;
int main(int argc, char const *argv[])
{
char key[16];
char message[128];
char enc_message[128];
string s_key = "THIS_IS_THE_KEY_";
string s_message = "Hello World !!!";
memset(key, 0, sizeof(key));
strcpy(key, s_key.c_str());
memset(message, 0, sizeof(message));
strcpy(message, s_message.c_str());
string response = Encrypt(key, message, sizeof(message));
cout<<"This is the Encrypted Message: "<<response<<endl;
memset(enc_message, 0, sizeof(enc_message));
strcpy(enc_message, response.c_str());
Decrypt(key, enc_message, sizeof(enc_message));
return 0;
}
Any improve in this methods?
I wanted to put the answer to how I solved it: The problem with my example was that I was trying to use the decrypt function with a HEXADECIMAL STRING and it should be done with an ASCII STRING with the values as delivered by the encryption function.
That is, instead of trying to decrypt a string like this: 461D019896EFA3
It must be decrypted with a string like this: #(%_!#$
After that, the decryption will be delivered in ASCII values. They must be passed to Hexadecimal and finally to a String.
Here is the example that worked for me:
string Decrypt_string(char *Key, string HEX_Message, int size)
{
static const char* const lut = "0123456789ABCDEF";
int i = 0;
char* Res;
AES_KEY dec_key;
string auxString, output, newString;
for(i = 0; i < size; i += 2)
{
string byte = HEX_Message.substr(i, 2);
char chr = (char) (int)strtol(byte.c_str(), NULL, 16);
auxString.push_back(chr);
}
const char *Msg = auxString.c_str();
Res = (char *)malloc(size);
AES_set_decrypt_key((unsigned char *)Key, 128, &dec_key);
for(i = 0; i <= size; i += 16)
{
AES_ecb_encrypt((unsigned char *)Msg + i, (unsigned char *)Res + i, &dec_key, AES_DECRYPT);
}
output.reserve(2 * size);
for (size_t i = 0; i < size; ++i)
{
const unsigned char c = Res[i];
output.push_back(lut[c >> 4]);
output.push_back(lut[c & 15]);
}
int len = output.length();
for(int i = 0; i < len; i += 2)
{
string byte = output.substr(i, 2);
char chr = (char) (int)strtol(byte.c_str(), NULL, 16);
newString.push_back(chr);
}
free(Res);
return newString;
}
int conv_utf8_to_ucs2(const char* src, size_t len)
{
iconv_t cb = iconv_open("UTF-16", "UTF-8");
if (cb == (iconv_t)(-1))
return 0;
uint16_t* outBuff = new uint16_t[len + 1];
char* pout = (char*)outBuff;
size_t inRemains = len;
size_t outRemains = len * sizeof(uint16_t);
printf("inRemains:%d outRemains:%d\n", (int)inRemains, (int)outRemains);
size_t cvtlen = iconv(cb, &src, (size_t*)&inRemains, (char**)&pout, (size_t*)&outRemains);
if (cvtlen == (size_t)-1) {
//CONVERSION ALWAYS FAILS
goto out;
}
*pout = 0;
printf("inRemains:%d outRemains:%d cvtlen:%d\n", (int)inRemains, (int)outRemains, (int)cvtlen);
for (int i = 0; (i < len) && outBuff[i]; i++)
printf("0x%04x\n", outBuff[i]);
out:
if (outBuff)
delete[] outBuff;
iconv_close(cb);
return 0;
}
int _tmain(int argc, _TCHAR* argv[])
{
char utf8_str[] = "안녕 세상아?";
int len = strlen(utf8_str);
conv_utf8_to_ucs2(utf8_str, len);
return 0;
}
I have this tiny program to convert a string of UTF8 to UTF16, but iconv function always returns -1. I debug to go into its implementation but could not find any hint. Could someone offer an insight into this libiconv usage ?