I am working on a encryption algo with c++ using WINAPI. My encryption works flawless and my decryption too untill the last chunk to be decrypted with final = TRUE. I got the NTE_BAD_DATA error.
PS : I manualy check the buffer and the decryption works fine untill the last CryptDecrypt.
If someone have an idea, kindly help me :)
Here is my code :
PVOID test(PVOID buffer, DWORD* length, PCHAR key_str2,bool isdecrypt) {
CHAR default_key[] = "3igcZhRdWq96m3GUmTAiv9";
CHAR* key_str = default_key;
size_t len = lstrlenA(key_str);
DWORD dwStatus = 0;
BOOL bResult = FALSE;
wchar_t info[] = L"Microsoft Enhanced RSA and AES Cryptographic Provider";
HCRYPTPROV hProv;
if (!CryptAcquireContextW(&hProv, NULL, info, PROV_RSA_AES, CRYPT_VERIFYCONTEXT)) {
dwStatus = GetLastError();
CryptReleaseContext(hProv, 0);
return 0;
}
HCRYPTHASH hHash;
if (!CryptCreateHash(hProv, CALG_SHA_256, 0, 0, &hHash)) {
dwStatus = GetLastError();
CryptReleaseContext(hProv, 0);
return 0;
}
if (!CryptHashData(hHash, (BYTE*)key_str, len, 0)) {
DWORD err = GetLastError();
return 0;
}
HCRYPTKEY hKey;
if (!CryptDeriveKey(hProv, CALG_AES_128, hHash, 0, &hKey)) {
dwStatus = GetLastError();
CryptReleaseContext(hProv, 0);
return 0;
}
const size_t chunk_size = CHUNK_SIZE;
BYTE chunk[chunk_size] = { 0 };
DWORD out_len = 0;
BOOL isFinal = FALSE;
DWORD readTotalSize = 0;
DWORD inputSize = *length;
PVOID newBuff = VirtualAlloc(0, inputSize, MEM_COMMIT, PAGE_READWRITE);
while (true)
{
if (readTotalSize + chunk_size >= inputSize)
{
isFinal = TRUE;
memcpy(chunk, PVOID((DWORD)buffer + readTotalSize), inputSize - readTotalSize);
out_len = inputSize - readTotalSize;
}
else {
memcpy(chunk, PVOID((DWORD)buffer + readTotalSize), chunk_size);
out_len = chunk_size;
}
if (isdecrypt) {
if (!CryptDecrypt(hKey, NULL, isFinal, 0, chunk, &out_len)) {
int a = GetLastError();
break;
}
}
else {
if (!CryptEncrypt(hKey, NULL, isFinal, 0, chunk, &out_len, chunk_size)) {
break;
}
}
if (readTotalSize + chunk_size >= inputSize) {
memcpy(PVOID((DWORD)newBuff + readTotalSize), chunk, inputSize - readTotalSize);
readTotalSize += inputSize - readTotalSize;
}
else {
memcpy(PVOID((DWORD)newBuff + readTotalSize), chunk, chunk_size);
readTotalSize += chunk_size;
}
if (isFinal)
break;
memset(chunk, 0, chunk_size);
}
CryptReleaseContext(hProv, 0);
CryptDestroyKey(hKey);
CryptDestroyHash(hHash);
return newBuff;
}
You are not storing off the encryption the final block correctly.
The amount of data you pass into the encryption is in out_len, and the amount of data that is encrypted is put back into out_len. For a block cypher that doesn't matter for whole blocks, but your last block which is less than a whole block being passed into encryption probably creates a whole block of encrypted data.
This means that your encrypted data is probably a little larger than your plain text in the final block, but you make no effort to handle this so on when you decrypt you have an issue as you've dropped some of the encrypted data on the floor.
Use the value put into out_len by the encryption function to store off your data (as you overwrite your data stream you'll need to make sure there's space though)
Related
I wrote this code to encrypt a file in python with AES and now I send this file over HTTP to a client in C++ using Windows OS. And I want to decrypt this file on the C++ client, but I haven't found any libraries/usefull code to help me with this manner.
Here is the code for python:
def derive_key_and_iv(password, salt, key_length, iv_length):
d = d_i = b''
while len(d) < key_length + iv_length:
d_i = md5(d_i + str.encode(password) + salt).digest()
d += d_i
return d[:key_length], d[key_length:key_length + iv_length]
def encrypt(in_file, out_file, password, key_length=32):
bs = AES.block_size # 16 bytes
salt = urandom(bs)
key, iv = derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
out_file.write(salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0: #final block
padding_length = (bs - len(chunk) % bs) or bs
chunk += str.encode(padding_length * chr(padding_length))
finished = True
out_file.write(cipher.encrypt(chunk))
password = 'ABCDE' # some password
with open("some_file.bin", "rb") as in_file, open("enc_some_file.enc", "wb") as out_file:
encrypt(in_file, out_file, password)
Update:
I am updating this question regarding my try with WinCrypt. I am trying to compile from VS CODE using MSYS64 g++ compiler on Windows. Here is the code that I am trying to run. I also have some additional libraries to help me with the client-server part, such as libcurlpp and libcurl.
#include <string>
#include <sstream>
#include <conio.h>
#include <iostream>
#include <fstream>
#include <curlpp/cURLpp.hpp>
#include <curlpp/Easy.hpp>
#include <curlpp/Options.hpp>
#include <wincrypt.h>
#include <windows.h>
#define AES_KEY_SIZE 32
#define IN_CHUNK_SIZE (AES_KEY_SIZE * 10) // a buffer must be a multiple of the key size
#define OUT_CHUNK_SIZE (IN_CHUNK_SIZE * 2) // an output buffer (for encryption) must be twice as big
using namespace std;
string host = ""; #some host
int main(void)
{
curlpp::Easy req;
req.setOpt(new curlpp::options::Url(host));
ofstream myfile;
myfile.open("enc_calc.enc", ios::out | ios::binary);
myfile << req;
myfile.close();
getch();
wchar_t *in_file = (wchar_t *)"enc_calc.enc";
wchar_t *out_file = (wchar_t *)"calc.dll";
wchar_t default_key[] = L"ABCDE";
wchar_t *key_str = default_key;
const size_t len = lstrlenW(key_str);
const size_t key_size = len * sizeof(key_str[0]);
const wchar_t *filename1 = (const wchar_t *)"enc_calc.enc";
const wchar_t *filename2 = (const wchar_t *)"calc.dll";
HANDLE hInpFile = CreateFileW(filename1, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (hInpFile == INVALID_HANDLE_VALUE) {
printf("Cannot open input file!\n");
system("pause");
return (-1);
}
HANDLE hOutFile = CreateFileW(filename2, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
if (hOutFile == INVALID_HANDLE_VALUE) {
printf("Cannot open output file!\n");
system("pause");
return (-1);
}
DWORD dwStatus = 0;
BOOL bResult = FALSE;
wchar_t info[] = L"Microsoft Enhanced RSA and AES Cryptographic Provider";
HCRYPTPROV hProv;
if (!CryptAcquireContextW(&hProv, NULL, info, PROV_RSA_AES, CRYPT_VERIFYCONTEXT)) {
dwStatus = GetLastError();
printf("CryptAcquireContext failed: %x\n", dwStatus);
CryptReleaseContext(hProv, 0);
system("pause");
return dwStatus;
}
HCRYPTHASH hHash;
if (!CryptCreateHash(hProv, CALG_SHA_256, 0, 0, &hHash)) {
dwStatus = GetLastError();
printf("CryptCreateHash failed: %x\n", dwStatus);
CryptReleaseContext(hProv, 0);
system("pause");
return dwStatus;
}
if (!CryptHashData(hHash, (BYTE*)key_str, key_size, 0)) {
DWORD err = GetLastError();
printf("CryptHashData Failed : %#x\n", err);
system("pause");
return (-1);
}
printf("[+] CryptHashData Success\n");
HCRYPTKEY hKey;
if (!CryptDeriveKey(hProv, CALG_AES_128, hHash, 0, &hKey)) {
dwStatus = GetLastError();
printf("CryptDeriveKey failed: %x\n", dwStatus);
CryptReleaseContext(hProv, 0);
system("pause");
return dwStatus;
}
printf("[+] CryptDeriveKey Success\n");
const size_t chunk_size = IN_CHUNK_SIZE;
BYTE *chunk = new BYTE[chunk_size];
DWORD out_len = 0;
BOOL isFinal = FALSE;
DWORD readTotalSize = 0;
DWORD inputSize = GetFileSize(hInpFile, NULL);
while (bResult = ReadFile(hInpFile, chunk, IN_CHUNK_SIZE, &out_len, NULL)) {
if (0 == out_len) {
break;
}
readTotalSize += out_len;
if (readTotalSize >= inputSize) {
isFinal = TRUE;
printf("Final chunk set, len: %d = %x\n", out_len, out_len);
}
if (!CryptDecrypt(hKey, NULL, isFinal, 0, chunk, &out_len)) {
printf("[-] CryptDecrypt failed: %x\n", GetLastError());
break;
}
DWORD written = 0;
if (!WriteFile(hOutFile, chunk, out_len, &written, NULL)) {
printf("writing failed!\n");
break;
}
memset(chunk, 0, chunk_size);
}
delete[]chunk; chunk = NULL;
CryptDestroyHash(hHash);
CryptDestroyKey(hKey);
CryptReleaseContext(hProv, 0);
CloseHandle(hInpFile);
CloseHandle(hOutFile);
printf("Finished. Processed %#x bytes.\n", readTotalSize);
return 0;
}
The errors I'm getting are regarding the windows.h library, such as:
"ULONG does not name a type" and other types in windows. and at runtime: "Cannot Open input file", not sure why, even tho the file is downloaded in the current folder and I'm trying to open it as such in the code.
From the MS docs i read that you should call the func twice, first time is to get the token length and the second is getting its info. My problem is that it fails in the first call(error 122) but still writes a length of 32.
const char* CSystemHelper::ReturnUserByProcessHandle(const PROCESSENTRY32 &PENTRY) {
HANDLE hToken, tHandle;
DWORD ErrorCode;
if ((tHandle = OpenProcess(PROCESS_QUERY_INFORMATION, FALSE, PENTRY.th32ProcessID)) == 0) {
ErrorCode = GetLastError();
CMessage::DEFAULT_MESSAGE(ErrorCode);
return "UNIDENTIFIED";
}
if (!OpenProcessToken(tHandle, TOKEN_QUERY, &hToken)) {
ErrorCode = GetLastError();
CMessage::DEFAULT_MESSAGE(ErrorCode);
return "UNIDENTIFIED";
}
DWORD len = 0;
// this call fails, but len is set to 32
if (!GetTokenInformation(hToken, TokenOwner, NULL, 0, &len)) {
ErrorCode = GetLastError();
CMessage::DEFAULT_MESSAGE(ErrorCode);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
PTOKEN_OWNER TOKENOWNER = (PTOKEN_OWNER)LocalAlloc(LPTR, len);
if (!TOKENOWNER) {
LocalFree(TOKENOWNER);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
if (!GetTokenInformation(hToken, TokenOwner, TOKENOWNER, len, &len)) {
LocalFree(TOKENOWNER);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
char Username[256] = { 0 }, LocalDomain[256] = { 0 };
DWORD UsernameLength = 256, LocalDomainLength = 256;
SID_NAME_USE SIDNAMEUSE;
if (!LookupAccountSidA(NULL, TOKENOWNER->Owner, Username, &UsernameLength, LocalDomain, &LocalDomainLength, &SIDNAMEUSE)){
LocalFree(TOKENOWNER);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
return Username;
}
if that matters handle is opened through a processentry which does not raise any errors
Yes. It is normal operation.
When TokenInformation parameter is null, GetTokenInformation function returns 122(ERROR_INSUFFICIENT_BUFFER).
So your code must be changed like this.
DWORD len = 0;
// this call fails, but len is set to 32
if (GetTokenInformation(hToken, TokenOwner, NULL, 0, &len)) {
ErrorCode = GetLastError();
if (ErrorCode != ERROR_INSUFFICIENT_BUFFER || len == 0)
{
CMessage::DEFAULT_MESSAGE(ErrorCode);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
}
PTOKEN_OWNER TOKENOWNER = (PTOKEN_OWNER)LocalAlloc(LPTR, len);
if (!TOKENOWNER) {
LocalFree(TOKENOWNER);
CloseHandle(hToken);
return "UNIDENTIFIED";
}
Thanks for replying. Combining answers of Jack Lee and Karsten Loop i came to solution. I tried to omit error checking and return username but it was pure garbage so i decided to use std::string instead of c-style char arrays and everything is working.
Hi guys i am trying to get dll md5 hash but it is returning same value all the time, what i did wrong?
this dll is already loaded when i am trying to getHash
i am getting hash with getHash() method and calculating it with CalcHash
thnks in advanced.
#define BUFSIZE 1024
#define MD5LEN 16
int CalcHash(HANDLE hFile, char *md5sum)
{
BOOL bResult = FALSE;
HCRYPTPROV hProv = 0;
HCRYPTHASH hHash = 0;
BYTE rgbFile[BUFSIZE];
DWORD cbRead = 0;
BYTE rgbHash[MD5LEN];
DWORD cbHash = 0;
CHAR rgbDigits[] = "0123456789abcdef";
char byt[3];
int rc, err;
rc = CryptAcquireContext(&hProv, NULL, MS_STRONG_PROV, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT);
if(!rc)
{
err = GetLastError();
if(err==0x80090016)
{
//first time using crypto API, need to create a new keyset
rc=CryptAcquireContext(&hProv, NULL, MS_STRONG_PROV, PROV_RSA_FULL, CRYPT_NEWKEYSET);
if(!rc)
{
err=GetLastError();
return 0;
}
}
}
CryptCreateHash(hProv, CALG_MD5, 0, 0, &hHash);
while(bResult = ReadFile(hFile, rgbFile, BUFSIZE, &cbRead, NULL))
{
if (0 == cbRead)
{
break;
}
CryptHashData(hHash, rgbFile, cbRead, 0);
}
cbHash = MD5LEN;
CryptGetHashParam(hHash, HP_HASHVAL, rgbHash, &cbHash, 0);
md5sum[0] = 0;
for (DWORD i = 0; i < cbHash; i++)
{
sprintf(byt, "%c%c", rgbDigits[rgbHash[i] >> 4], rgbDigits[rgbHash[i] & 0xf]);
strcat(md5sum, byt);
}
CryptDestroyHash(hHash);
CryptReleaseContext(hProv, 0);
return 1;
}
char *getHash()
{
CalcHash(L"dsetup.dll", md5sum);
Logger(md5sum);
return md5sum;
}
I'm currently working on a simple encryption/decryption system in C++ using the Windows API.
I believe I've been successful at getting CryptEncrypt() to work (AES_128) for encrypting a file.
But when I Use CryptDecrypt() to decrypt the file, the first 16 bytes are corrupted and then after 4000 bytes (which is the size of the chunks I'm pulling from ReadFile() and encrypting) is another chunk of corrupted bytes and so on. If I try to decrypt a file with a total length less than 4000 bytes, the decryption works perfectly.
I'm very confused about why this is happening. There are no errors at all.
Here is a snippet of my code (I have CryptEncrypt() and CryptDecrypt() right after each other to save me exporting the key and to make the testing faster):
DWORD bytesRead;
DWORD bytesWritten;
DWORD pointer = 0;
unsigned int blockSize = 4000;
void *fileBuffer = new unsigned char[4106];
bool EOF = false;
do
{
SetFilePointer(hFileOrginal,pointer,0,0);
ReadFile(hFileOrginal,fileBuffer,blockSize,&bytesRead,NULL);
if(bytesRead<blockSize)
{
EOF=true;
}
CryptEncrypt(aesKey,NULL,EOF,0,(BYTE *)fileBuffer,&bytesRead,(blockSize+16));
CryptDecrypt(aesKey,NULL,EOF,0,(BYTE *)fileBuffer,&bytesRead);
WriteFile(hTempFile,fileBuffer,bytesRead,&bytesWritten,NULL);
pointer +=bytesRead;
}
while(!EOF);
delete[] fileBuffer;
I would really appreciate any suggestions about whats going wrong.
EDIT: On a 4704 bytes file I got the following using breakpoints.
First ReadFile bytesread 4000
First CryptEncrypt bytesRead 4000
First CryptDecrypt bytesRead 4000
Second ReadFile bytesread 704
Second CryptEncrypt bytesread 720
Second CryptDecrupt bytesread 704
Everything seems good with that yet I still get a problem.
I'm using the enhanced crypto api (With verifycontext) with a generated a single AES key with the CRYPT_EXPORTABLE property
You are not doing any error handling at all. All of the API functions you are calling have return values and error codes, none of which you are checking.
You are also not managing bytesRead correctly. CryptEncrypt() modifies the variable you pass to it, which then affects your call to CreateDecrypt(), which also modifies it, and that then affects subsequent calls to SetFilePointer(), which you should not be calling in your loop to begin with. You are not validating that you have as many bytes as you are expecting, or that bytesRead ends up back at the original value that ReadFile() returned, so you may end up skipping bytes in the source file.
Try something more like this instead:
bool ReadFromFile(HANDLE hFile, void *Buffer, DWORD BufSize, DWORD *BytesRead)
{
if (BytesRead)
*BytesRead = 0;
LPBYTE pBuffer = (LPBYTE) Buffer;
DWORD dwRead;
while (BufSize > 0)
{
if (!ReadFile(hFile, pBuffer, BufSize, &dwRead, NULL))
return false;
if (dwRead == 0)
break;
pBuffer += dwRead;
BufSize -= dwRead;
if (BytesRead)
*BytesRead += dwRead;
}
return true;
}
bool WriteToFile(HANDLE hFile, void *Buffer, DWORD BufSize)
{
LPBYTE pBuffer = (LPBYTE) Buffer;
DWORD dwWritten;
while (BufSize > 0)
{
if (!WriteFile(hFile, pBuffer, BufSize, &dwWritten, NULL))
return false;
pBuffer += dwWritten;
BufSize -= dwWritten;
}
return true;
}
DWORD bytesRead;
const UINT blockSize = 4000;
LPBYTE fileBuffer = new BYTE[blockSize+16];
bool EOF;
if (SetFilePointer(hFileOrginal, 0, NULL, FILE_BEGIN) != 0)
{
errorCode = GetLastError();
...
}
else
{
do
{
if (!ReadFromFile(hFileOrginal, fileBuffer, blockSize, &bytesRead))
{
errorCode = GetLastError();
...
break;
}
EOF = (bytesRead < blockSize);
bytesEncrypted = bytesRead;
if (!CryptEncrypt(aesKey, NULL, EOF, 0, fileBuffer, &bytesEncrypted, blockSize+16))
{
errorCode = GetLastError();
...
break;
}
bytesDecrypted = bytesEncrypted;
if (!CryptDecrypt(aesKey, NULL, EOF, 0, fileBuffer, &bytesDecrypted))
{
errorCode = GetLastError();
...
break;
}
if (!WriteToFile(hTempFile, fileBuffer, bytesDecrypted))
{
errorCode = GetLastError();
...
break;
}
if (bytesDecrypted != bytesRead)
{
...
break;
}
}
while (!EOF);
}
delete[] fileBuffer;
I am running into memory errors when I try to run my C++ program in Visual Studio 2012. I am thinking that this code is the cause (since when I remove it, it runs fine):
void GetMachineHash(CString &strHashHex) {
CMD5 cMD5;
BYTE *szHash = (BYTE*)malloc(48);
LPBYTE szMachineNameHash, szNetworkAddressHash, szVolumeIdHash;
TCHAR szMachineId[100];
DWORD nMachineIdLen = 100;
TCHAR szNetworkAddress[13];
IP_ADAPTER_INFO *pAdapterInfo, *pAdapter = NULL;
DWORD dwRetVal = 0;
ULONG ulOutBufLen = sizeof(IP_ADAPTER_INFO);
TCHAR szVolumeId[20];
TCHAR szVolumeName[MAX_PATH];
TCHAR szFileSystemName[MAX_PATH];
DWORD dwSerialNumber = 0;
DWORD dwMaxComponentLen = 0;
DWORD dwFileSystemFlags = 0;
ZeroMemory(szHash, 48);
ZeroMemory(szMachineId, 100);
ZeroMemory(szVolumeId, 20);
ZeroMemory(szVolumeName, MAX_PATH);
ZeroMemory(szFileSystemName, MAX_PATH);
ZeroMemory(szNetworkAddress, 13);
GetComputerName(szMachineId, &nMachineIdLen);
cMD5.Calculate(szMachineId);
szMachineNameHash = cMD5.Hash();
pAdapterInfo = (IP_ADAPTER_INFO *) malloc(sizeof(IP_ADAPTER_INFO));
if (pAdapterInfo == NULL) {
TRACE(_T("Error allocating memory needed to call GetAdaptersinfo()"));
szNetworkAddressHash = NULL;
}
// Make an initial call to GetAdaptersInfo to get the necessary size into the ulOutBufLen variable
if (GetAdaptersInfo(pAdapterInfo, &ulOutBufLen) == ERROR_BUFFER_OVERFLOW) {
free(pAdapterInfo);
pAdapterInfo = (IP_ADAPTER_INFO *)malloc(ulOutBufLen);
if (pAdapterInfo == NULL) {
TRACE(_T("Error allocating memory needed to call GetAdaptersinfo()"));
szNetworkAddressHash = NULL;
}
}
if ((dwRetVal = GetAdaptersInfo(pAdapterInfo, &ulOutBufLen)) == NO_ERROR) {
pAdapter = pAdapterInfo;
while (pAdapter) {
if (pAdapter->Type != MIB_IF_TYPE_LOOPBACK) {
_stprintf_s(szNetworkAddress, 13, _T("%.2X%.2X%.2X%.2X%.2X%.2X"),
pAdapter->Address[0],
pAdapter->Address[1],
pAdapter->Address[2],
pAdapter->Address[3],
pAdapter->Address[4],
pAdapter->Address[5]
);
break;
}
pAdapter = pAdapter->Next;
}
} else {
TRACE(_T("GetAdaptersInfo() call failed"));
szNetworkAddressHash = NULL;
}
cMD5.Calculate(szNetworkAddress);
szNetworkAddressHash = cMD5.Hash();
if (GetVolumeInformation(
NULL,
szVolumeName,
sizeof(szVolumeName),
&dwSerialNumber,
&dwMaxComponentLen,
&dwFileSystemFlags,
szFileSystemName,
sizeof(szFileSystemName))) {
_stprintf_s(szVolumeId, 20, _T("%lu"), dwSerialNumber);
}
cMD5.Calculate(szVolumeId);
szVolumeIdHash = cMD5.Hash();
// Calculate hash from hashes
memcpy(szHash, szMachineNameHash, 16);
memcpy(szHash+16, szNetworkAddressHash, 16);
memcpy(szHash+32, szVolumeIdHash, 16);
cMD5.Calculate(szHash, 48);
strHashHex.Preallocate(33);
strHashHex = cMD5.HexHash();
free(szHash);
free(pAdapterInfo);
return;
}
And then if I leave the function and just remove this code:
strHashHex.Preallocate(33);
strHashHex = cMD5.HexHash();
Then it will work fine as well. So I am wondering if that is the code that's causing the memory problems, and if it is, how can I fix it?
Here's the CMD5 class (which utilizes the Windows API to generate a MD5 sum):
class CMD5
{
public:
CMD5() {
if(CryptAcquireContext(&m_hCryptProv, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, CRYPT_NEWKEYSET) == 0){
if(GetLastError() == NTE_EXISTS){
CryptAcquireContext(&m_hCryptProv, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, 0);
}
}
}
~CMD5() {
if(m_hCryptProv)
CryptReleaseContext(m_hCryptProv, 0);
m_hCryptProv = NULL;
free(m_szHash);
}
bool Calculate(LPCTSTR szText) {
DWORD dwLen = sizeof(TCHAR) * _tcslen(szText);
DWORD dwHashLen;
DWORD dwHashLenSize = sizeof(DWORD);
if (CryptCreateHash(m_hCryptProv, CALG_MD5, 0, 0, &m_hHash)) {
if (CryptHashData(m_hHash, (const BYTE*)szText, dwLen, 0)) {
if (CryptGetHashParam(m_hHash, HP_HASHSIZE, (BYTE *)&dwHashLen, &dwHashLenSize, 0)) {
if(m_szHash = (BYTE*)malloc(dwHashLen)) {
if (CryptGetHashParam(m_hHash, HP_HASHVAL, (BYTE*)m_szHash, &dwHashLen, 0)) {
CryptDestroyHash(m_hHash);
}
}
}
}
}
return false;
}
bool Calculate(const LPBYTE szText, DWORD dwLen) {
DWORD dwHashLen;
DWORD dwHashLenSize = sizeof(DWORD);
if (CryptCreateHash(m_hCryptProv, CALG_MD5, 0, 0, &m_hHash)) {
if (CryptHashData(m_hHash, (const BYTE*)szText, dwLen, 0)) {
if (CryptGetHashParam(m_hHash, HP_HASHSIZE, (BYTE *)&dwHashLen, &dwHashLenSize, 0)) {
if(m_szHash = (BYTE*)malloc(dwHashLen)) {
if (CryptGetHashParam(m_hHash, HP_HASHVAL, (BYTE*)m_szHash, &dwHashLen, 0)) {
CryptDestroyHash(m_hHash);
}
}
}
}
}
return false;
}
LPBYTE Hash() const {
LPBYTE szHash = new BYTE[16];
ZeroMemory(szHash, 16);
memcpy(szHash, m_szHash, 16);
return szHash;
}
LPTSTR HexHash() const {
LPTSTR szBuf = new TCHAR[33];
ZeroMemory(szBuf, 33);
for (int i=0; i<16; i++)
_stprintf_s(szBuf+i*2, 33, _T("%02X"), m_szHash[i]);
szBuf[32]=0;
return szBuf;
}
private:
BYTE *m_szHash;
DWORD m_hHash;
HCRYPTPROV m_hCryptProv;
};
Also, the error I get from VS2012 is Critical error detected c0000374 and the call stack ends with a call to HeapAlloc() from _heap_alloc. Not sure if it matters but this code is being called in a DLL.
It looks like I was able to solve the memory allocation problems by changing the CMD5::HexHash() function to
void HexHash(CString &strHash) {
for (int i=0; i<16; i++)
strHash += StringFormat(_T("%02X"), m_szHash[i]);
return;
}
and call it via cMD5.HexHash(strHashHex);