CNG. AES. Incorrectly decrypted(possibly encrypted) file - c++

I study CNG API, I try to encrypt a file and then decrypt it. To get the key I use SHA512, for AES encryption, with the following code:
#include <QCoreApplication>
#include <windows.h>
#include <wincrypt.h>
#include <bcrypt.h>
#include <fstream>
#include <iostream>
#include <QDebug>
#include <tchar.h>
using namespace std;
int main(int argc, char *argv[])
{
srand(time(NULL));
const int blockSize = 64;
const int buffSize = 512;
DWORD cbHashObject = 0, cbData = 0, cbKeyObject = 0, cbKeyData = 0, cbBlockLen = 0, cbCipherData = 0, cbPlainData = 0, cbBlob = 0;
PBYTE pbHashObject = NULL;
PBYTE pbKeyObject = NULL;
BCRYPT_ALG_HANDLE hAlg;
BCRYPT_HASH_HANDLE hHashB;
BCRYPT_KEY_HANDLE hKey;
NTSTATUS status;
PBYTE pbHash, pbIV, pbBlob = NULL;
PBYTE pbPlainData = NULL, pbCipherData = NULL;
BYTE* password = (BYTE*)"pass";
QCoreApplication a(argc, argv);
char *buff = new char[buffSize + 1];
if (!BCRYPT_SUCCESS(status = BCryptOpenAlgorithmProvider(&hAlg, BCRYPT_SHA512_ALGORITHM, NULL, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR1 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_OBJECT_LENGTH, (PBYTE)&cbHashObject, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR2 " << hex << status << " E: " << hex << dwErr;
}
pbHashObject = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptCreateHash(hAlg, &hHashB, pbHashObject, cbHashObject, NULL, 0, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR3 " << hex << status << " E: " << hex << dwErr;
}
ZeroMemory(buff, buffSize + 1);
if (!BCRYPT_SUCCESS(status = BCryptHashData(hHashB, (PBYTE)password, strlen((char*)password)*sizeof(BYTE), 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR5 " << hex << status << " E: " << hex << dwErr;
}
cbHashObject = 0, cbData = 0;
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_HASH_LENGTH, (PBYTE)&cbHashObject, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR6 " << hex << status << " E: " << hex << dwErr;
}
pbHash = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptFinishHash(hHashB, pbHash, cbHashObject, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR7 " << hex << status << " E: " << hex << dwErr;
}
char *str_hash = new char[cbHashObject*2+1];
for(int i = 0; i < cbHashObject;i++)
sprintf(str_hash+2*i,"%02X",pbHash[i]);
cout << "PASSWORD SHA512 = " <<str_hash<< endl << endl;
ZeroMemory(str_hash, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptDestroyHash(hHashB)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR8 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptCloseAlgorithmProvider(hAlg, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR9 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptOpenAlgorithmProvider(&hAlg, BCRYPT_AES_ALGORITHM, NULL, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR10 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_OBJECT_LENGTH, (PBYTE)&cbKeyObject, sizeof(DWORD), &cbKeyData, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR11 " << hex << status << " E: " << hex << dwErr;
}
if(!BCRYPT_SUCCESS(status = BCryptSetProperty(hAlg, BCRYPT_CHAINING_MODE, (PBYTE)BCRYPT_CHAIN_MODE_CBC, sizeof(BCRYPT_CHAIN_MODE_CBC), 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR12 " << hex << status << " E: " << hex << dwErr;
}
pbKeyObject = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbKeyObject);
if(!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_BLOCK_LENGTH, (PBYTE)&cbBlockLen, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR13 " << hex << status << " E: " << hex << dwErr;
}
pbIV= (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbBlockLen+1);
for(int i(0); i < cbBlockLen; i++)
{
//pbIV[i]=rand();
pbIV[i]=0xAA;
}
for(int i = 0; i < cbBlockLen ;i++)
sprintf(str_hash+2*i,"%02X",pbIV[i]);
cout << "\n\nIV = " << str_hash;
if(!BCRYPT_SUCCESS(status = BCryptGenerateSymmetricKey(hAlg, &hKey, pbKeyObject, cbKeyObject, pbHash, cbHashObject, 0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR14 " << hex << status << " E: " << hex << dwErr;
}
if(!BCRYPT_SUCCESS(status = BCryptExportKey(hKey,NULL,BCRYPT_OPAQUE_KEY_BLOB,NULL,0,&cbBlob,0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR15 " << hex << status << " E: " << hex << dwErr;
}
pbBlob = (PBYTE)HeapAlloc(GetProcessHeap (), 0, cbBlob);
if(!BCRYPT_SUCCESS(status = BCryptExportKey(hKey,NULL,BCRYPT_OPAQUE_KEY_BLOB,pbBlob,cbBlob,&cbBlob,0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR16 " << hex << status << " E: " << hex << dwErr;
}
ifstream f1("..\\1.txt");
ofstream f2("..\\2.txt", ios_base::trunc);
pbPlainData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, blockSize + 1);
ZeroMemory(pbPlainData, blockSize + 1);
f1.read((char*)pbPlainData, blockSize);
if(!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbPlainData, blockSize, NULL, 0, NULL, NULL, 0, &cbCipherData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR15 " << hex << status << " E: " << hex << dwErr;
}
pbCipherData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbCipherData + 1);
ZeroMemory(pbCipherData, cbCipherData + 1);
do {
if (!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbPlainData, blockSize, NULL, 0, NULL, (BYTE*)pbCipherData, cbCipherData, &cbData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR17 " << hex << status << " E: " << dwErr;
}
f2.write((char*)pbCipherData, cbData);
f2.flush();
f1.read((char*)pbPlainData, blockSize);
} while(!f1.eof());
f1.close();
f2.close();
BCryptDestroyKey(hKey);
HeapFree(GetProcessHeap(), 0, pbPlainData);
HeapFree(GetProcessHeap(), 0, pbCipherData);
pbPlainData = NULL;
pbCipherData = NULL;
memset(pbIV, 0xAA, cbBlockLen);
memset(pbKeyObject, 0 , cbKeyObject);
if(!BCRYPT_SUCCESS(status = BCryptImportKey(hAlg,NULL,BCRYPT_OPAQUE_KEY_BLOB,&hKey,pbKeyObject,cbKeyObject,pbBlob,cbBlob,0)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR18 " << hex << status << " E: " << hex << dwErr;
}
ifstream f3("..\\2.txt");
ofstream f4("..\\3.txt", ios_base::trunc);
pbCipherData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, blockSize + 1);
ZeroMemory(pbCipherData, blockSize + 1);
f3.read((char*)pbCipherData, blockSize);
if(!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbCipherData, blockSize, NULL, NULL, 0, NULL, 0, &cbPlainData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR19 " << hex << status << " E: " << hex << dwErr;
}
pbPlainData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbPlainData);
ZeroMemory(pbPlainData, cbPlainData);
do{
if (!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbCipherData, blockSize, NULL, NULL, 0, (BYTE*)pbPlainData, cbPlainData, &cbData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
qDebug()<<"ERROR20 " << hex << status << " E: " << dwErr;
}
f4.write((char*)pbPlainData, cbData);
f4.flush();
f3.read((char*)pbCipherData, blockSize);
}while(!f3.eof());
f3.close();
f4.close();
return a.exec();
}
At first thought that the problem was in IV, however I removed it and nothing has changed. Here are examples:
Before encryption(1.txt):
1234567890
After encryption(2.txt):
O“сh4W;H;7Т†Gж
qї
–JmdВу•оё:s±°|kД'y|‰зXgK4ПбЖl©bЪmЗШоЏMfU¶ysr‰Ї[Іґћ·У:RљС
After decryption(3.txt):
ї
–JmdВу•оё:s_T–їя5rJ™U,ЁМЈ‘µМ­ґВ0уБЉO ‰CifБZDЧЈ)Тў«э"ќкгИу}ШЩєьъ©фb\a[

I have solved your problem, on the last two lines (206,215) change BCryptEncrypt to BCryptDecrypt
#define _CRT_SECURE_NO_WARNINGS
#include <windows.h>
#include <wincrypt.h>
#include <bcrypt.h>
#include <fstream>
#include <iostream>
#include <tchar.h>
#pragma comment(lib, "bcrypt.lib")
using namespace std;
int main(int argc, char* argv[])
{
const int blockSize = 64;
const int buffSize = 512;
DWORD cbHashObject = 0, cbData = 0, cbKeyObject = 0, cbKeyData = 0, cbBlockLen = 0, cbCipherData = 0, cbPlainData = 0, cbBlob = 0;
PBYTE pbHashObject = NULL;
PBYTE pbKeyObject = NULL;
BCRYPT_ALG_HANDLE hAlg;
BCRYPT_HASH_HANDLE hHashB;
BCRYPT_KEY_HANDLE hKey;
NTSTATUS status;
PBYTE pbHash, pbIV, pbBlob = NULL;
PBYTE pbPlainData = NULL, pbCipherData = NULL;
BYTE* password = (BYTE*)"pass";
char* buff = new char[buffSize + 1];
if (!BCRYPT_SUCCESS(status = BCryptOpenAlgorithmProvider(&hAlg, BCRYPT_SHA512_ALGORITHM, NULL, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR1 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_OBJECT_LENGTH, (PBYTE)&cbHashObject, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR2 " << hex << status << " E: " << hex << dwErr;
}
pbHashObject = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptCreateHash(hAlg, &hHashB, pbHashObject, cbHashObject, NULL, 0, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR3 " << hex << status << " E: " << hex << dwErr;
}
ZeroMemory(buff, buffSize + 1);
if (!BCRYPT_SUCCESS(status = BCryptHashData(hHashB, (PBYTE)password, strlen((char*)password) * sizeof(BYTE), 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR5 " << hex << status << " E: " << hex << dwErr;
}
cbHashObject = 0, cbData = 0;
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_HASH_LENGTH, (PBYTE)&cbHashObject, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR6 " << hex << status << " E: " << hex << dwErr;
}
pbHash = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptFinishHash(hHashB, pbHash, cbHashObject, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR7 " << hex << status << " E: " << hex << dwErr;
}
char* str_hash = new char[cbHashObject * 2 + 1];
for (int i = 0; i < cbHashObject; i++)
sprintf(str_hash + 2 * i, "%02X", pbHash[i]);
cout << "PASSWORD SHA512 = " << str_hash << endl << endl;
ZeroMemory(str_hash, cbHashObject);
if (!BCRYPT_SUCCESS(status = BCryptDestroyHash(hHashB)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR8 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptCloseAlgorithmProvider(hAlg, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR9 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptOpenAlgorithmProvider(&hAlg, BCRYPT_AES_ALGORITHM, NULL, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR10 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_OBJECT_LENGTH, (PBYTE)&cbKeyObject, sizeof(DWORD), &cbKeyData, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR11 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptSetProperty(hAlg, BCRYPT_CHAINING_MODE, (PBYTE)BCRYPT_CHAIN_MODE_CBC, sizeof(BCRYPT_CHAIN_MODE_CBC), 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR12 " << hex << status << " E: " << hex << dwErr;
}
pbKeyObject = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbKeyObject);
if (!BCRYPT_SUCCESS(status = BCryptGetProperty(hAlg, BCRYPT_BLOCK_LENGTH, (PBYTE)&cbBlockLen, sizeof(DWORD), &cbData, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR13 " << hex << status << " E: " << hex << dwErr;
}
pbIV = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbBlockLen + 1);
for (int i(0); i < cbBlockLen; i++)
{
//pbIV[i]=rand();
pbIV[i] = 0xAA;
}
for (int i = 0; i < cbBlockLen; i++)
sprintf(str_hash + 2 * i, "%02X", pbIV[i]);
std::cout << "\n\nIV = " << str_hash;
if (!BCRYPT_SUCCESS(status = BCryptGenerateSymmetricKey(hAlg, &hKey, pbKeyObject, cbKeyObject, pbHash, cbHashObject, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR14 " << hex << status << " E: " << hex << dwErr;
}
if (!BCRYPT_SUCCESS(status = BCryptExportKey(hKey, NULL, BCRYPT_OPAQUE_KEY_BLOB, NULL, 0, &cbBlob, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR15 " << hex << status << " E: " << hex << dwErr;
}
pbBlob = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbBlob);
if (!BCRYPT_SUCCESS(status = BCryptExportKey(hKey, NULL, BCRYPT_OPAQUE_KEY_BLOB, pbBlob, cbBlob, &cbBlob, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR16 " << hex << status << " E: " << hex << dwErr;
}
ifstream f1("C:\\LOL\\1.txt");
ofstream f2("C:\\LOL\\2.txt", ios_base::trunc);
pbPlainData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, blockSize + 1);
ZeroMemory(pbPlainData, blockSize + 1);
f1.read((char*)pbPlainData, blockSize);
if (!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbPlainData, blockSize, NULL, 0, NULL, NULL, 0, &cbCipherData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR15 " << hex << status << " E: " << hex << dwErr;
}
pbCipherData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbCipherData + 1);
ZeroMemory(pbCipherData, cbCipherData + 1);
do {
if (!BCRYPT_SUCCESS(status = BCryptEncrypt(hKey, (BYTE*)pbPlainData, blockSize, NULL, 0, NULL, (BYTE*)pbCipherData, cbCipherData, &cbData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR17 " << hex << status << " E: " << dwErr;
}
f2.write((char*)pbCipherData, cbData);
f2.flush();
f1.read((char*)pbPlainData, blockSize);
} while (!f1.eof());
f1.close();
f2.close();
BCryptDestroyKey(hKey);
HeapFree(GetProcessHeap(), 0, pbPlainData);
HeapFree(GetProcessHeap(), 0, pbCipherData);
pbPlainData = NULL;
pbCipherData = NULL;
memset(pbIV, 0xAA, cbBlockLen);
memset(pbKeyObject, 0, cbKeyObject);
if (!BCRYPT_SUCCESS(status = BCryptImportKey(hAlg, NULL, BCRYPT_OPAQUE_KEY_BLOB, &hKey, pbKeyObject, cbKeyObject, pbBlob, cbBlob, 0)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR18 " << hex << status << " E: " << hex << dwErr;
}
ifstream f3("C:\\LOL\\2.txt");
ofstream f4("C:\\LOL\\3.txt", ios_base::trunc);
pbCipherData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, blockSize + 1);
ZeroMemory(pbCipherData, blockSize + 1);
f3.read((char*)pbCipherData, blockSize);
if (!BCRYPT_SUCCESS(status = BCryptDecrypt(hKey, (BYTE*)pbCipherData, blockSize, NULL, NULL, 0, NULL, 0, &cbPlainData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR19 " << hex << status << " E: " << hex << dwErr;
}
pbPlainData = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbPlainData);
ZeroMemory(pbPlainData, cbPlainData);
do {
if (!BCRYPT_SUCCESS(status = BCryptDecrypt(hKey, (BYTE*)pbCipherData, blockSize, NULL, NULL, 0, (BYTE*)pbPlainData, cbPlainData, &cbData, BCRYPT_BLOCK_PADDING)))
{
DWORD dwErr = GetLastError();
std::cout << "ERROR20 " << hex << status << " E: " << dwErr;
}
f4.write((char*)pbPlainData, cbData);
f4.flush();
f3.read((char*)pbCipherData, blockSize);
} while (!f3.eof());
f3.close();
f4.close();
return 0;
}

I'm studying it too right now. I'm able to crypt a file, but then I can't decrypt it!
Here is the code:
#include <windows.h>
#include <iostream>
#include <stdio.h>
#include <bcrypt.h>
#pragma comment(lib, "bcrypt.lib")
#define NT_SUCCESS(Status) (((NTSTATUS)(Status)) >= 0)
#define STATUS_UNSUCCESSFUL ((NTSTATUS)0xC0000001L)
#define DATA_TO_ENCRYPT "Test Data"
static const BYTE rgbAES128Key[] =
{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F
};
int main() {
BCRYPT_ALG_HANDLE hAesAlg = NULL;
BCRYPT_KEY_HANDLE hKey = NULL;
NTSTATUS status = STATUS_UNSUCCESSFUL;
DWORD cbCipherText = 0,
cbPlainText = 0,
cbData = 0,
cbKeyObject = 0,
cbBlockLen = 0,
cbBlob = 0;
PBYTE pbCipherText = NULL,
pbPlainText = NULL,
pbKeyObject = NULL,
pbIV = NULL,
pbBlob = NULL;
BOOL bResult = FALSE;
wchar_t default_key[] = L"3igcZhRdWq96m3GUmTAiv9";
wchar_t* key_str = default_key;
// Open an algorithm handle.
if (!NT_SUCCESS(status = BCryptOpenAlgorithmProvider(&hAesAlg, BCRYPT_AES_ALGORITHM, NULL, 0)))
{
wprintf(L"**** Error 0x%x returned by BCryptOpenAlgorithmProvider\n", status);
return 0;
}
// Calculate the size of the buffer to hold the KeyObject.
if (!NT_SUCCESS(status = BCryptGetProperty(hAesAlg, BCRYPT_OBJECT_LENGTH, (PBYTE)&cbKeyObject, sizeof(DWORD), &cbData, 0)))
{
wprintf(L"**** Error 0x%x returned by BCryptGetProperty\n", status);
return 0;
}
// Allocate the key object on the heap.
pbKeyObject = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbKeyObject);
if (NULL == pbKeyObject)
{
wprintf(L"**** memory allocation failed\n");
return 0;
}
// Generate the key from supplied input key bytes.
if (!NT_SUCCESS(status = BCryptGenerateSymmetricKey(hAesAlg, &hKey, pbKeyObject, cbKeyObject, (PBYTE)rgbAES128Key, sizeof(rgbAES128Key), 0)))
{
wprintf(L"**** Error 0x%x returned by BCryptGenerateSymmetricKey\n", status);
return 0;
}
// Save another copy of the key for later.
if (!NT_SUCCESS(status = BCryptExportKey(hKey, NULL, BCRYPT_OPAQUE_KEY_BLOB, NULL, 0, &cbBlob, 0)))
{
wprintf(L"**** Error 0x%x returned by BCryptExportKey\n", status);
return 0;
}
// Allocate the buffer to hold the BLOB.
pbBlob = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbBlob);
if (NULL == pbBlob)
{
wprintf(L"**** memory allocation failed\n");
return 0;
}
if (!NT_SUCCESS(status = BCryptExportKey(
hKey,
NULL,
BCRYPT_OPAQUE_KEY_BLOB,
pbBlob,
cbBlob,
&cbBlob,
0)))
{
wprintf(L"**** Error 0x%x returned by BCryptExportKey\n", status);
return 0;
}
HANDLE hInpFile = CreateFileA("C:\\LOL\\file_1.encrypted", GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL);
if (hInpFile == INVALID_HANDLE_VALUE) {
printf("Cannot open input file!\n");
system("pause");
return 0;
}
HANDLE hOutFile = CreateFileA("C:\\LOL\\file_1_decrypted.txt", GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
if (hOutFile == INVALID_HANDLE_VALUE) {
printf("Cannot open output file!\n");
system("pause");
return 0;
}
const size_t chunk_size = 48;
BYTE chunk[chunk_size] = { 0 };
DWORD out_len = 0;
BOOL isFinal = FALSE;
DWORD readTotalSize = 0;
DWORD inputSize = GetFileSize(hInpFile, NULL);
while (bResult = ReadFile(hInpFile, chunk, chunk_size, &out_len, NULL)) {
if (0 == out_len) {
break;
}
readTotalSize += out_len;
if (readTotalSize == inputSize) {
isFinal = TRUE;
printf("Final chunk set.\n");
}
//
// Get the output buffer size.
//
if (!NT_SUCCESS(status = BCryptDecrypt(hKey, chunk, chunk_size, NULL, NULL, NULL, NULL, 0, &cbCipherText, BCRYPT_BLOCK_PADDING)))
{
wprintf(L"**** 1 Error 0x%x returned by BCryptEncrypt\n", status);
return 0;
}
pbCipherText = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbCipherText);
if (NULL == pbCipherText)
{
wprintf(L"**** memory allocation failed\n");
return 0;
}
// Use the key to encrypt the plaintext buffer.
// For block sized messages, block padding will add an extra block.
if (!NT_SUCCESS(status = BCryptDecrypt(hKey, chunk, chunk_size, NULL, NULL, NULL, pbCipherText, cbCipherText, &cbCipherText, BCRYPT_BLOCK_PADDING)))
{
wprintf(L"**** 2 Error 0x%x returned by BCryptEncrypt\n", status);
return 0;
}
if (!WriteFile(hOutFile, pbCipherText, cbCipherText, &cbData, NULL)) {
printf("writing failed!\n");
break;
}
}
}
The main change is the function name, from BCryptEncrypt to BCryptDerypt.
Also, the file names.
When I change to Decrypt, Irecieve "
**** 2 Error" error...
That's strange, because I use the same method, the same key...

Related

MapViewOfFile() Returns only the Last Word in the Buffer (See the Example)

I wrote a simple inter-process communication with a memory-mapped file. The code works relatively well, but I have a problem with the buffer that I'll explain shortly. Here is the code (C++, Windows):
#define UNICODE
#define _UNICODE
#include <iostream>
#include <tchar.h>
#include <Windows.h>
int wmain(int argc, wchar_t** argv)
{
if (argc != 2)
{
std::cout << "Usage: `win32mmap w` for writing, or `win32mmap r` for reading.\n";
return -1;
}
HANDLE hMapFile;
HANDLE hEvent;
HANDLE isOpened = CreateEvent(NULL, true, false, L"IsOpened"); // To check if a `win32mmap w` runs
if (wcscmp(argv[1], L"w") == 0)
{
SetEvent(isOpened);
hMapFile = CreateFileMapping(INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, 0, 1024, L"mmapFile");
if (hMapFile == NULL)
{
std::cout << "CreateFileMapping() Error: " << GetLastError() << "\n";
return GetLastError();
}
hEvent = CreateEvent(NULL, true, false, L"mmapEvent");
if (hEvent == INVALID_HANDLE_VALUE || hEvent == NULL)
{
std::cout << "CreateEvent() Error: " << GetLastError() << "\n";
return GetLastError();
}
char* buff = (char*)MapViewOfFile(hMapFile, FILE_MAP_WRITE, 0, 0, 0);
if (!buff)
{
std::cout << "MapViewOfFile() Error: " << GetLastError() << "\n";
return GetLastError();
}
while (buff[0] != L'.')
{
std::cin >> buff;
SetEvent(hEvent);
}
UnmapViewOfFile(buff);
}
else if (wcscmp(argv[1], L"r") == 0)
{
if (WaitForSingleObject(isOpened, 0) == WAIT_TIMEOUT)
{
std::cout << "Waiting for `win32mmap w`...";
WaitForSingleObject(isOpened, INFINITE);
std::cout << "\n";
}
hMapFile = OpenFileMapping(FILE_MAP_ALL_ACCESS, false, L"mmapFile");
if (hMapFile == NULL)
{
std::cout << "CreateFileMapping() Error: " << GetLastError() << "\n";
return GetLastError();
}
hEvent = OpenEvent(EVENT_ALL_ACCESS, false, L"mmapEvent");
if (hEvent == INVALID_HANDLE_VALUE || hEvent == NULL)
{
std::cout << "CreateFile() Error: " << GetLastError() << "\n";
return GetLastError();
}
char* buff = (char*)MapViewOfFile(hMapFile, FILE_MAP_READ, 0, 0, 0);
if (!buff)
{
std::cout << "MapViewOfFile() Error: " << GetLastError() << "\n";
return GetLastError();
}
if (!buff)
{
std::cout << "MapViewOfFile() Error: " << GetLastError() << "\n";
return GetLastError();
}
while (true)
{
WaitForSingleObject(hEvent, INFINITE);
ResetEvent(hEvent);
if (buff[0] == '.')
{
break;
}
std::cout << buff << "\n";
}
UnmapViewOfFile(buff);
}
else
{
std::cout << "Usage: `win32mmap w` for writing, or `win32mmap r` for reading.\n";
return -1;
}
CloseHandle(hMapFile);
return 0;
}
The program is a simple inter-process communication "chat" that relies on memory-mapped files. To use the program, you need to make two executable instance of the program: win32mmap w and win32mmap r. The first instance is used to type text that is displayed in the second instance. When you type . in the first instance, both of them are terminated.
My problem is when I run the 2 instances of the program, and I type the world Hello in the first instance (win32mmap w), the second instance shows Hello as expected. But when I type Hello World in the first instance, the second instance shows only the word World instead of Hello World. How can I fix the code that the buffer will get the whole text?
Your writer is not waiting for the reader to consume the data before overwriting it with new data.
You need 2 events - one for the reader to wait on signaling when the buffer has data to read, and one for the writer to wait on signaling when the buffer needs data.
Try this instead:
#define UNICODE
#define _UNICODE
#include <iostream>
#include <tchar.h>
#include <Windows.h>
const DWORD BufSize = 1024;
int wmain(int argc, wchar_t** argv)
{
if (argc != 2)
{
std::cout << "Usage: `win32mmap w` for writing, or `win32mmap r` for reading.\n";
return -1;
}
HANDLE hMapFile;
char* buff;
HANDLE hNeedDataEvent;
HANDLE hHasDataEvent;
DWORD dwError;
HANDLE isOpened = CreateEvent(NULL, TRUE, FALSE, L"IsOpened"); // To check if a `win32mmap w` runs
if (isOpened == NULL)
{
dwError = GetLastError();
std::cout << "CreateEvent() Error: " << dwError << "\n";
return dwError;
}
if (wcscmp(argv[1], L"w") == 0)
{
hMapFile = CreateFileMapping(INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, 0, BufSize, L"mmapFile");
if (hMapFile == NULL)
{
dwError = GetLastError();
std::cout << "CreateFileMapping() Error: " << dwError << "\n";
SetEvent(isOpened);
return dwError;
}
buff = (char*) MapViewOfFile(hMapFile, FILE_MAP_WRITE, 0, 0, BufSize);
if (!buff)
{
dwError = GetLastError();
std::cout << "MapViewOfFile() Error: " << dwError << "\n";
SetEvent(isOpened);
return dwError;
}
hNeedDataEvent = CreateEvent(NULL, TRUE, TRUE, L"mmapNeedDataEvent");
if (hNeedDataEvent == NULL)
{
dwError = GetLastError();
std::cout << "CreateEvent() Error: " << dwError << "\n";
SetEvent(isOpened);
return dwError;
}
hHasDataEvent = CreateEvent(NULL, TRUE, FALSE, L"mmapHasDataEvent");
if (hHasDataEvent == NULL)
{
dwError = GetLastError();
std::cout << "CreateEvent() Error: " << dwError << "\n";
SetEvent(isOpened);
return dwError;
}
SetEvent(isOpened);
while (WaitForSingleObject(hNeedDataEvent, INFINITE) == WAIT_OBJECT_0)
{
std::cin.get(buff, BufSize);
ResetEvent(hNeedDataEvent);
SetEvent(hHasDataEvent);
if (buff[0] == L'.') break;
}
}
else if (wcscmp(argv[1], L"r") == 0)
{
if (WaitForSingleObject(isOpened, 0) == WAIT_TIMEOUT)
{
std::cout << "Waiting for `win32mmap w`...";
WaitForSingleObject(isOpened, INFINITE);
std::cout << "\n";
}
hMapFile = OpenFileMapping(FILE_MAP_READ, FALSE, L"mmapFile");
if (hMapFile == NULL)
{
dwError = GetLastError();
std::cout << "CreateFileMapping() Error: " << dwError << "\n";
return dwError;
}
char* buff = (char*) MapViewOfFile(hMapFile, FILE_MAP_READ, 0, 0, BufSize);
if (!buff)
{
dwError = GetLastError();
std::cout << "MapViewOfFile() Error: " << dwError << "\n";
return dwError;
}
hNeedDataEvent = OpenEvent(SYNCHRONIZE, FALSE, L"mmapNeedDataEvent");
if (hNeedDataEvent == NULL)
{
dwError = GetLastError();
std::cout << "OpenEvent() Error: " << dwError << "\n";
return dwError;
}
hHasDataEvent = OpenEvent(SYNCHRONIZE, FALSE, L"mmapHasDataEvent");
if (hHasDataEvent == NULL)
{
dwError = GetLastError();
std::cout << "OpenEvent() Error: " << dwError << "\n";
return dwError;
}
do
{
SetEvent(hNeedDataEvent);
if (WaitForSingleObject(hHasDataEvent, INFINITE) != WAIT_OBJECT_0)
break;
std::cout << buff << "\n";
ResetEvent(hHasDataEvent);
}
while (buff[0] != '.');
}
else
{
std::cout << "Usage: `win32mmap w` for writing, or `win32mmap r` for reading.\n";
return -1;
}
UnmapViewOfFile(buff);
CloseHandle(hMapFile);
CloseHandle(hNeedDataEvent);
CloseHandle(hHasDataEvent);
CloseHandle(isOpened);
return 0;
}

ReadProcessMemory() Returning 0 ERROR_PARTIAL_COPY

#include <Windows.h>
#include <iostream>
#include <vector>
#include <TlHelp32.h>
#include <tchar.h>
using namespace std;
DWORD GetModuleBase(LPSTR lpModuleName, DWORD dwProcessId)
{
MODULEENTRY32 lpModuleEntry = {0};
HANDLE hSnapShot = CreateToolhelp32Snapshot( TH32CS_SNAPMODULE, dwProcessId );
if(!hSnapShot)
return NULL;
lpModuleEntry.dwSize = sizeof(lpModuleEntry);
BOOL bModule = Module32First( hSnapShot, &lpModuleEntry );
while(bModule)
{
if(!strcmp( lpModuleEntry.szModule, lpModuleName ) )
{
CloseHandle( hSnapShot );
return (DWORDLONG)lpModuleEntry.modBaseAddr;
}
bModule = Module32Next( hSnapShot, &lpModuleEntry );
}
CloseHandle( hSnapShot );
return NULL;
}
int main() {
DWORD pID;
DWORDLONG off1, off2;
DWORDLONG baseAddress;
char moduleName[] = _T("AoE2DE_s.exe");
HWND hGameWindow;
HANDLE pHandle;
//Get Handles
hGameWindow = FindWindow(NULL, "Age of Empires II: Definitive Edition");
cout << "Game Window: " << hGameWindow << std::endl;
GetWindowThreadProcessId(hGameWindow, &pID);
cout << "Process ID: " << pID << std::endl;
pHandle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, pID);
cout << "Process Handle: " << std::hex << pHandle << std::endl;
//Get Client Base Addy
DWORDLONG clientBase = GetModuleBase(moduleName, pID);
cout << "Client Base: " << clientBase << std::endl;
ReadProcessMemory(pHandle, (LPCVOID)(clientBase + 0x2BFCA18), &baseAddress, sizeof(DWORDLONG), NULL);
DWORD lastError = GetLastError();
cout << "Error: " << lastError << std::endl;
cout << "Base Address: " << std::hex << baseAddress << std::endl;
ReadProcessMemory(pHandle, (LPCVOID)(baseAddress + 0x18), &off1, sizeof(DWORDLONG), NULL);
cout << "After Offset 1: " << std::hex << off1 << std::endl;
ReadProcessMemory(pHandle, (LPCVOID)(off1 + 0x9230), &off2, sizeof(DWORDLONG), NULL);
cout << "After Final Offset: " << off2 << std::endl;
cin.get();
}
The error occurs on this line:
ReadProcessMemory(pHandle, (LPCVOID)(clientBase + 0x2BFCA18), &baseAddress, sizeof(DWORDLONG), NULL);
Application output for debugging purposes:
Game Window: 0x1307c2
Process ID: 11988
Process Handle: 0xec
Client Base: cc640000
Error: 12b
Base Address: 0
After Offset 1: 401519
After Final Offset: 8
Not sure how it is failing here, I'm somewhat new to this. Everything else goes through perfectly without error's but fails on the first ReadProcessMemory() if someone can help bring the solution for this to light I would be grateful.
ReadProcessMemory is defined like this:
BOOL ReadProcessMemory(
HANDLE hProcess,
LPCVOID lpBaseAddress,
LPVOID lpBuffer,
SIZE_T nSize,
SIZE_T *lpNumberOfBytesRead
);
If it returns zero then there has been an error, and only in that case GetLastError will return a meaningfull error number.
So correct usage would be:
SIZE_T readBytes;
if (!ReadProcessMemory(pHandle, (LPCVOID)(clientBase + 0x2BFCA18), &baseAddress, sizeof(DWORDLONG), &readBytes)) {
DWORD lastError = GetLastError();
cout << "ReadProcessMemory failed: Error: " << lastError << " read " << readBytes << std::endl;
}
else
{
cout << "ReadProcessMemory succeeded"; << std::endl;
cout << "Base Address: " << std::hex << baseAddress << std::endl;
}

Problem with windows credential prompt, CredUnPackAuthenticationBuffer error 50

I'm trying to display Windows default credential prompt and retrieve username, password and domain as strings.
I'm using this documentation:
CredUIPromptForWindowsCredentialsW
CredUnPackAuthenticationBufferW
When prompt displays, I enter random username and password (like, username: test, password: test123), hit enter and function CredUnPackAuthenticationBuffer() fails with ERROR_NOT_SUPPORTED
Code:
#include <Windows.h>
#include <wincred.h> //Link library Credui.lib for CredUIPromptForWindowsCredentials() to work
#include <iostream>
#include <string>
//Display error in console and close application
void DisplayConsoleError(const WCHAR* errorMessage, const WCHAR* fName);
int wmain(int argc, WCHAR* argv[])
{
CREDUI_INFO cuiInfo;
cuiInfo.cbSize = sizeof(CREDUI_INFO);
cuiInfo.hbmBanner = nullptr;
cuiInfo.hwndParent = nullptr;
cuiInfo.pszCaptionText = L"CaptionText";
cuiInfo.pszMessageText = L"MessageText";
DWORD dwAuthError = 0;
ULONG dwAuthPackage = 0;
LPVOID outCredBuffer = nullptr;
ULONG outCredBufferSize = 0;
BOOL credSaveCheckbox = false;
DWORD dwError = 0;
DWORD lastError = 0;
dwError = CredUIPromptForWindowsCredentials(
&cuiInfo,
dwAuthError,
&dwAuthPackage,
nullptr,
NULL,
&outCredBuffer,
&outCredBufferSize,
&credSaveCheckbox,
CREDUIWIN_CHECKBOX | CREDUIWIN_GENERIC);
if (dwError == ERROR_SUCCESS)
{
DWORD maxUserNameSize = CREDUI_MAX_USERNAME_LENGTH;
DWORD maxDomainNameSize = CREDUI_MAX_DOMAIN_TARGET_LENGTH;
DWORD maxPasswordLength = CREDUI_MAX_PASSWORD_LENGTH;
LPWSTR szUserName = new WCHAR[maxUserNameSize];
LPWSTR szDomain = new WCHAR[maxDomainNameSize];
LPWSTR szPassword = new WCHAR[maxPasswordLength];
DWORD dwCredBufferSize = outCredBufferSize; //ULONG to DWORD
DWORD lastError = 0;
dwError = CredUnPackAuthenticationBuffer(
CRED_PACK_GENERIC_CREDENTIALS,
&outCredBuffer,
dwCredBufferSize,
szUserName,
&maxUserNameSize,
szDomain,
&maxDomainNameSize,
szPassword,
&maxPasswordLength
);
lastError = GetLastError();
//Check for error
if (dwError == FALSE)
{
DisplayConsoleError(L"Blah", L"CredUnPackAuthenticationBuffer", lastError);
}
else
{
std::wcout << L"username " << szUserName << std::endl;
std::wcout << L"domain " << szDomain << std::endl;
std::wcout << L"password " << szPassword << std::endl;
}
}
else
{
lastError = dwError;
}
SecureZeroMemory(outCredBuffer, outCredBufferSize);
CoTaskMemFree(outCredBuffer);
return lastError;
}
Additionaly debugging CredUIPromptForWindowsCredentials() in VS2019 fails (problem with loading symbols?), but compiled .exe works fine. As a workaround im attaching debugger to process.
I'm beginner at winapi, so I would be grateful, if someone would explain why this error appears, what I'm doing wrong and how to fix this code.
EDIT
Moved error check above SecureZeroMemory() and CoTaskMemFree() functions to avoid calling other API functions before checking error message,
but error remained the same.
DisplayConsoleError:
void DisplayConsoleError(const WCHAR* errorMessage, const WCHAR* fName, DWORD lastError)
{
std::cout << std::endl;
std::cout << "Error\t" << std::endl;
std::wcout << L"In function:\t" << fName << std::endl;
std::cout << "Code:\t" << lastError << std::endl;
std::cout << std::endl;
}
EDIT 2 Changes to code considering #RemyLebeau feedback
Look at this function in your code:
CredUnPackAuthenticationBuffer(
CRED_PACK_GENERIC_CREDENTIALS,
&outCredBuffer,
dwCredBufferSize,
szUserName,
&maxUserNameSize,
szDomain,
&maxDomainNameSize,
szPassword,
&maxPasswordLength
);
You need to change &outCredBuffer to outCredBuffer.
#include <Windows.h>
#include <wincred.h> //Link library Credui.lib for CredUIPromptForWindowsCredentials() to work
#include <iostream>
#include <string>
//Display error in console and close application
void DisplayConsoleError(const WCHAR* errorMessage, const WCHAR* fName, DWORD lastError)
{
std::cout << std::endl;
std::cout << "Error\t" << std::endl;
std::wcout << L"In function:\t" << fName << std::endl;
std::cout << "Code:\t" << lastError << std::endl;
std::cout << std::endl;
}
int wmain(int argc, WCHAR* argv[])
{
CREDUI_INFO cuiInfo;
cuiInfo.cbSize = sizeof(CREDUI_INFO);
cuiInfo.hbmBanner = nullptr;
cuiInfo.hwndParent = nullptr;
cuiInfo.pszCaptionText = L"CaptionText";
cuiInfo.pszMessageText = L"MessageText";
DWORD dwAuthError = 0;
ULONG dwAuthPackage = 0;
LPVOID outCredBuffer = nullptr;
ULONG outCredBufferSize = 0;
BOOL credSaveCheckbox = false;
DWORD dwError = 0;
DWORD lastError = 0;
dwError = CredUIPromptForWindowsCredentials(
&cuiInfo,
dwAuthError,
&dwAuthPackage,
nullptr,
NULL,
&outCredBuffer,
&outCredBufferSize,
&credSaveCheckbox,
CREDUIWIN_CHECKBOX | CREDUIWIN_GENERIC);
if (dwError == ERROR_SUCCESS)
{
DWORD maxUserNameSize = CREDUI_MAX_USERNAME_LENGTH;
DWORD maxDomainNameSize = CREDUI_MAX_DOMAIN_TARGET_LENGTH;
DWORD maxPasswordLength = CREDUI_MAX_PASSWORD_LENGTH;
LPWSTR szUserName = new WCHAR[maxUserNameSize];
LPWSTR szDomain = new WCHAR[maxDomainNameSize];
LPWSTR szPassword = new WCHAR[maxPasswordLength];
DWORD dwCredBufferSize = outCredBufferSize; //ULONG to DWORD
DWORD lastError = 0;
dwError = CredUnPackAuthenticationBuffer(
CRED_PACK_GENERIC_CREDENTIALS,
outCredBuffer,
dwCredBufferSize,
szUserName,
&maxUserNameSize,
szDomain,
&maxDomainNameSize,
szPassword,
&maxPasswordLength
);
lastError = GetLastError();
//Check for error
if (dwError == FALSE)
{
DisplayConsoleError(L"Blah", L"CredUnPackAuthenticationBuffer", lastError);
}
else
{
std::wcout << L"username " << szUserName << std::endl;
std::wcout << L"domain " << szDomain << std::endl;
std::wcout << L"password " << szPassword << std::endl;
}
}
else
{
lastError = dwError;
}
SecureZeroMemory(outCredBuffer, outCredBufferSize);
CoTaskMemFree(outCredBuffer);
return lastError;
}

Windows WriteFile blocks even with FILE_FLAG_OVERLAPPED

I have the following code that creates a file using CreateFile with the FILE_FLAG_OVERLAPPED flag, and then calls WriteFile 100 times in a loop, passing in an OVERLAPPED structure
uint64_t GetPreciseTickCount()
{
FILETIME fileTime;
GetSystemTimePreciseAsFileTime(&fileTime);
ULARGE_INTEGER large;
large.LowPart = fileTime.dwLowDateTime;
large.HighPart = fileTime.dwHighDateTime;
return large.QuadPart;
}
uint64_t g_blockedTime = 0, g_waitTime = 0;
int main()
{
auto hFile = CreateFile(
L"test.dat",
GENERIC_WRITE,
0,
NULL,
CREATE_ALWAYS,
FILE_FLAG_OVERLAPPED | FILE_FLAG_NO_BUFFERING | FILE_FLAG_WRITE_THROUGH,
NULL);
if (hFile == INVALID_HANDLE_VALUE)
{
std::cout << "CreateFile failed with err " << GetLastError() << std::endl;
return 1;
}
uint32_t bufferSize = 4*1024*1024;
char* buffer = (char*)_aligned_malloc(bufferSize, 4096);
const int loop = 100;
LARGE_INTEGER endPosition;
endPosition.QuadPart = bufferSize * loop;
auto sfpRet = SetFilePointerEx(hFile, endPosition, nullptr, FILE_BEGIN);
if (sfpRet == INVALID_SET_FILE_POINTER)
{
std::cout << "SetFilePointer failed with err " << GetLastError() << std::endl;
return 1;
}
if (0 == SetEndOfFile(hFile))
{
std::cout << "SetEndOfFile failed with err " << GetLastError() << std::endl;
return 1;
}
auto start = GetPreciseTickCount();
OVERLAPPED overlapped;
auto completionEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr);
for (int i = 0; i < loop; ++i)
{
overlapped.hEvent = completionEvent;
overlapped.Offset = i * bufferSize;
overlapped.OffsetHigh = 0;
overlapped.Internal = 0;
overlapped.InternalHigh = 0;
auto writeFileStart = GetPreciseTickCount();
auto err = WriteFile(
hFile,
buffer,
bufferSize,
nullptr,
&overlapped);
auto writeFileEnd = GetPreciseTickCount();
g_blockedTime += (writeFileEnd - writeFileStart) / 10;
if (err == FALSE)
{
auto lastErr = GetLastError();
if (lastErr != ERROR_IO_PENDING)
{
std::cout << "WriteFile failed with err " << lastErr << std::endl;
return 1;
}
auto waitErr = WaitForSingleObject(overlapped.hEvent, INFINITE);
g_waitTime += (GetPreciseTickCount() - writeFileEnd) / 10;
if (waitErr != 0)
{
std::cout << "WaitForSingleObject failed with err " << waitErr << std::endl;
return 1;
}
}
}
auto end = GetPreciseTickCount();
CloseHandle(hFile);
std::cout << "Took " << (end - start) / 10 << " micros" << std::endl;
std::cout << "Blocked time " << g_blockedTime << " micros" << std::endl;
std::cout << "Wait time " << g_waitTime << " micros" << std::endl;
}
The prints the following output
Took 1749086 micros
Blocked time 1700085 micros
Wait time 48896 micros
Why does WriteFile block? (as is evidenced by g_blockedTime being significantly higher than g_waitTime). Is there any way I can force it to be non-blocking?
Update: I updated the code to use SetFilePointerEx and SetEndOfFile before the loop. Still seeing the same blocking problem.
The solution is to call SetFilePointerEx, SetEndOfFile, and SetFileValidData before the loop. Then subsequent calls to WriteFile within the loop become non-blocking.
uint64_t GetPreciseTickCount()
{
FILETIME fileTime;
GetSystemTimePreciseAsFileTime(&fileTime);
ULARGE_INTEGER large;
large.LowPart = fileTime.dwLowDateTime;
large.HighPart = fileTime.dwHighDateTime;
return large.QuadPart;
}
uint64_t g_blockedTime = 0, g_waitTime = 0;
int main()
{
HANDLE hToken;
auto openResult = OpenProcessToken(GetCurrentProcess(), TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY, &hToken);
if (!openResult)
{
std::cout << "OpenProcessToken failed with err " << GetLastError() << std::endl;
return 1;
}
TOKEN_PRIVILEGES tp;
tp.PrivilegeCount = 1;
tp.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED;
auto lookupResult = LookupPrivilegeValue(NULL, SE_MANAGE_VOLUME_NAME, &tp.Privileges[0].Luid);
if (!lookupResult)
{
std::cout << "LookupPrivilegeValue failed with err " << GetLastError() << std::endl;
return 1;
}
auto adjustResult = AdjustTokenPrivileges(hToken, FALSE, &tp, 0, NULL, NULL);
if (!adjustResult || GetLastError() != ERROR_SUCCESS)
{
std::cout << "AdjustTokenPrivileges failed with err " << GetLastError() << std::endl;
return 1;
}
auto hFile = CreateFile(
L"test.dat",
GENERIC_WRITE,
0,
NULL,
CREATE_ALWAYS,
FILE_FLAG_OVERLAPPED | FILE_FLAG_NO_BUFFERING | FILE_FLAG_WRITE_THROUGH,
NULL);
if (hFile == INVALID_HANDLE_VALUE)
{
std::cout << "CreateFile failed with err " << GetLastError() << std::endl;
return 1;
}
uint32_t bufferSize = 4*1024*1024;
char* buffer = (char*)_aligned_malloc(bufferSize, 4096);
const int loop = 100;
auto start = GetPreciseTickCount();
LARGE_INTEGER endPosition;
endPosition.QuadPart = bufferSize * loop;
auto setFileErr = SetFilePointerEx(hFile, endPosition, nullptr, FILE_BEGIN);
if (setFileErr == INVALID_SET_FILE_POINTER)
{
std::cout << "SetFilePointer failed with err " << GetLastError() << std::endl;
return 1;
}
if (!SetEndOfFile(hFile))
{
std::cout << "SetEndOfFile failed with err " << GetLastError() << std::endl;
return 1;
}
if (!SetFileValidData(hFile, bufferSize * loop))
{
std::cout << "SetFileValidData failed with err " << GetLastError() << std::endl;
return 1;
}
OVERLAPPED overlapped;
auto completionEvent = CreateEvent(nullptr, TRUE, FALSE, nullptr);
for (int i = 0; i < loop; ++i)
{
overlapped.hEvent = completionEvent;
overlapped.Offset = i * bufferSize;
overlapped.OffsetHigh = 0;
overlapped.Internal = 0;
overlapped.InternalHigh = 0;
auto writeFileStart = GetPreciseTickCount();
auto err = WriteFile(
hFile,
buffer,
bufferSize,
nullptr,
&overlapped);
auto writeFileEnd = GetPreciseTickCount();
g_blockedTime += (writeFileEnd - writeFileStart) / 10;
if (err == FALSE)
{
auto lastErr = GetLastError();
if (lastErr != ERROR_IO_PENDING)
{
std::cout << "WriteFile failed with err " << lastErr << std::endl;
return 1;
}
auto waitErr = WaitForSingleObject(overlapped.hEvent, INFINITE);
g_waitTime += (GetPreciseTickCount() - writeFileEnd) / 10;
if (waitErr != 0)
{
std::cout << "WaitForSingleObject failed with err " << waitErr << std::endl;
return 1;
}
}
}
auto end = GetPreciseTickCount();
CloseHandle(hFile);
std::cout << "Took " << (end - start) / 10 << " micros" << std::endl;
std::cout << "Blocked time " << g_blockedTime << " micros" << std::endl;
std::cout << "Wait time " << g_waitTime << " micros" << std::endl;
}
This produces the following output
Took 1508131 micros
Blocked time 19719 micros
Wait time 1481362 micros
Also, check out this article.

Getting info about partitions

I get an example of c++ code from msdn, where I try to get info about my partitions, but in this code my DeviceIoControl methom returns 0, end error code 3. How can I fix this error?
Code here:
#define WINVER 0x0500
#include <windows.h>
#include <winioctl.h>
#include <iostream>
#include <stdio.h>
void DisplayVolumeInfo(CHAR *volume)
{
HANDLE hDevice = CreateFileA(
volume,
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_WRITE | FILE_SHARE_READ,
NULL,
OPEN_EXISTING,
FILE_ATTRIBUTE_SYSTEM,
NULL);
if (hDevice != INVALID_HANDLE_VALUE)
{
PARTITION_INFORMATION partInfo;
DWORD retcount = 0;
BOOL res = DeviceIoControl(
hDevice,
IOCTL_DISK_GET_PARTITION_INFO,
(LPVOID)NULL,
(DWORD)0,
(LPVOID)&partInfo,
sizeof(partInfo),
&retcount,
(LPOVERLAPPED)NULL);
if (res)
std::cout << "Volume size = " << partInfo.PartitionLength.QuadPart << " bytes (" << (unsigned long long)(partInfo.PartitionLength.QuadPart / 1024 / 1024) << "Mb)" << std::endl;
else
std::cout << "Can't do IOCTL_DISK_GET_PARTITION_INFO (error code=" << GetLastError() << ")" << std::endl;
DISK_GEOMETRY diskGeometry;
retcount = 0;
res = DeviceIoControl(
hDevice,
IOCTL_DISK_GET_DRIVE_GEOMETRY,
NULL, 0,
&diskGeometry, sizeof(diskGeometry),
&retcount,
(LPOVERLAPPED)NULL);
if (res)
{
std::cout << "Cylinders = " << diskGeometry.Cylinders.QuadPart << std::endl;
std::cout << "Tracks/cylinder = " << diskGeometry.TracksPerCylinder << std::endl;
std::cout << "Sectors/track = " << diskGeometry.SectorsPerTrack << std::endl;
std::cout << "Bytes/sector = " << diskGeometry.BytesPerSector << std::endl;
}
else
std::cout << "Can't do IOCTL_DISK_GET_DRIVE_GEOMETRY (error code=" << GetLastError() << ")" << std::endl;
CloseHandle(hDevice);
}
else
std::cout << "Error opening volume " << volume << " (error code=" << GetLastError() << ")" << std::endl;
}
int main()
{
DWORD CharCount = 0;
char DeviceName[MAX_PATH] = "";
DWORD Error = ERROR_SUCCESS;
HANDLE FindHandle = INVALID_HANDLE_VALUE;
BOOL Found = FALSE;
size_t Index = 0;
BOOL Success = FALSE;
char VolumeName[MAX_PATH] = "";
//
// Enumerate all volumes in the system.
FindHandle = FindFirstVolumeA(VolumeName, MAX_PATH);
if (FindHandle == INVALID_HANDLE_VALUE)
{
Error = GetLastError();
printf("FindFirstVolume failed with error code %d\n", Error);
return 0;
}
for (;;)
{
//
// Skip the \\?\ prefix and remove the trailing backslash.
Index = strlen(VolumeName) - 1;
if (VolumeName[0] != '\\' ||
VolumeName[1] != '\\' ||
VolumeName[2] != '?' ||
VolumeName[3] != '\\' ||
VolumeName[Index] != '\\')
{
Error = ERROR_BAD_PATHNAME;
printf("FindFirstVolume/FindNextVolume returned a bad path: %s\n", VolumeName);
break;
}
//
// QueryDosDeviceW does not allow a trailing backslash,
// so temporarily remove it.
VolumeName[Index] = '\0';
CharCount = QueryDosDeviceA(&VolumeName[4], DeviceName, MAX_PATH);
VolumeName[Index] = '\\';
if ( CharCount == 0 )
{
Error = GetLastError();
printf("QueryDosDevice failed with error code %d\n", Error);
break;
}
printf("\nFound a device:\n %s", DeviceName);
printf("\nVolume name: %s", VolumeName);
printf("\n");
DisplayVolumeInfo(VolumeName);
//
// Move on to the next volume.
Success = FindNextVolumeA(FindHandle, VolumeName, MAX_PATH);
if ( !Success )
{
Error = GetLastError();
if (Error != ERROR_NO_MORE_FILES)
{
printf("FindNextVolumeW failed with error code %d\n", Error);
break;
}
//
// Finished iterating
// through all the volumes.
Error = ERROR_SUCCESS;
break;
}
}
FindVolumeClose(FindHandle);
FindHandle = INVALID_HANDLE_VALUE;
return 0;
}
Has anybody met this problem?
P.S. I work on Windows 7 x32