Im trying to look for BYTE patterns in programs but for some reason when i assign the value to from MINFO.SizeOfImage to ModuleSize it causes the program i injected the DLL into to crash.
DWORD FindPattern(const BYTE* Pattern,SIZE_T PatternSize)
{
DWORD ModuleBase = (DWORD)GetModuleHandle(NULL);
DWORD ModuleSize = 0;
MODULEINFO MINFO;
HANDLE hProcess = OpenProcess(PROCESS_ALL_ACCESS,0,GetCurrentProcessId());
if(hProcess)
{
GetModuleInformation(hProcess,GetModuleHandle(NULL),&MINFO,sizeof(MODULEINFO));
CloseHandle(hProcess);
ModuleSize = MINFO.SizeOfImage;
}
else
return 0;
for(int i = 0;i < ModuleSize;i++)
{
if(memcmp((void*)(ModuleBase + i),Pattern,PatternSize) == 0)
return ModuleBase + i;
}
return 0;
}
You code worked just fine when i compiled it and injected. I even tested it against the current FindPattern i am using. I didnt get any errors. Heres my code & yours
bool Compare(const BYTE* pData, const BYTE* bMask, const char* szMask)
{
for(;*szMask;++szMask,++pData,++bMask)
if(*szMask=='x' && *pData!=*bMask) return 0;
return (*szMask) == NULL;
}
DWORD FindPattern(DWORD dwAddress, DWORD dwLen, BYTE *bMask, char * szMask)
{
for(DWORD i=0; i<dwLen; i++)
if (Compare((BYTE*)(dwAddress+i),bMask,szMask)) return (DWORD)(dwAddress+i);
return 0;
}
And then when i run this through it
uint8 DecryptNeedle[] = {0x56, 0x8B, 0x74, 0x24, 0x08, 0x89, 0x71, 0x10,
0x0F, 0xB6, 0x16, 0x0F, 0xB6, 0x46, 0x01, 0x03,
0xC2, 0x8B, 0x51, 0x28, 0x25, 0xFF, 0x00, 0x00,
0x00, 0x89, 0x41, 0x04, 0x0F, 0xB6, 0x04, 0x10};
char DecryptMask[] = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
DWORD addrDecrypt = FindPattern(dwModuleStartAddr, 0xA000, DecryptNeedle, DecryptMask);
DWORD decrypt2 = YourFindPattern(DecryptNeedle, 32);
output is identical in both.
I would double check your injection code, and check whatelse could be causing the error. Also, do a quick error check
if(hProcess)
{
if(!GetModuleInformation(hProcess,GetModuleHandle(NULL),&MINFO,sizeof(MODULEINFO)));
{
//error
}
CloseHandle(hProcess);
ModuleSize = MINFO.SizeOfImage;
}
Related
#include <iostream>
#include<Windows.h>
#include<bcrypt.h>
#include <ntstatus.h>
#include<string>
#include<vector>
#pragma comment(lib, "bcrypt.lib")
void test_status(NTSTATUS return_val)
{
switch (return_val)
{
case(STATUS_SUCCESS):
{
std::cout << "STATUS_SUCCESS\n";
break;
}
case(STATUS_BUFFER_TOO_SMALL):
{
std::cout << "STATUS_BUFFER_TOO_SMALL\n";
break;
}
case(STATUS_INVALID_HANDLE):
{
std::cout << "STATUS_INVALID_HANDLE\n";
break;
}
case(STATUS_INVALID_PARAMETER):
{
std::cout << "STATUS_INVALID_PARAMETER\n";
break;
}
case(STATUS_NOT_SUPPORTED):
{
std::cout << "STATUS_NOT_SUPPORTED\n";
break;
}
};
}
int main()
{
BCRYPT_ALG_HANDLE phAlgorithm = nullptr;
BCRYPT_HASH_HANDLE phHash = nullptr;
LPCWSTR pszAlgId = TEXT("XTS-AES");
LPCWSTR pszImplementation = TEXT("Advanced Encryption Standard");
PUCHAR pbHashObject = nullptr;
std::vector<BYTE> pbSalt = { 0x77, 0x1f, 0x5b, 0x30, 0x2c, 0xf7, 0xc5, 0x31,
0xa9, 0x86, 0x46, 0x52, 0xe2, 0xff, 0x4a, 0x17,
0xab, 0xd0, 0x02, 0xdd, 0x4f, 0xb0, 0x2f, 0x71,
0x0f, 0xe5, 0xa8, 0x1a, 0xfe, 0xe7, 0x9c, 0x6b }; // 771f5b302cf7c531a9864652e2ff4a17abd002dd4fb02f710fe5a81afee79c6b
NTSTATUS status = BCryptOpenAlgorithmProvider(
&phAlgorithm,
BCRYPT_PBKDF2_ALGORITHM,
NULL,
NULL
);
test_status(status);
PUCHAR pbOutput = nullptr;
ULONG pcbResult = NULL;
status = BCryptGetProperty(
phAlgorithm,
BCRYPT_OBJECT_LENGTH,
pbOutput,
sizeof(DWORD),
&pcbResult,
NULL
);
test_status(status);
PUCHAR DerivedKey = nullptr;
DWORD cbDerivedKey = NULL;
std::string pbPassword = "MySecretPass";
std::string DerivedKeyString;
status = BCryptDeriveKeyPBKDF2(
phAlgorithm,
(BYTE*)pbPassword.data(),
pbPassword.length(),
(BYTE*)pbSalt.data(),
sizeof(BYTE),
10000,
(PUCHAR)DerivedKeyString.c_str(),
64,
0);
test_status(status);
status = BCryptCloseAlgorithmProvider(
phAlgorithm,
NULL
);
test_status(status);
}
This code should get the key from the password, but at this stage of generating this key, I get an error STATUS_INVALID_PARAMETER. Why is this happening and how to fix it
-I tried to change the encryption algorithm and pass the parameters in a different way, but all this did not lead to success or gave the error STATUS_INVALID_PARAMETER.
There are two errors:
Firstly, you are performing hash computation, the BCryptOpenAlgorithmProvider function must use flags and identifiers for the hash function's behavior.
In the function BCryptOpenAlgorithmProvider, replace BCRYPT_PBKDF2_ALGORITHM with BCRYPT_SHA256_ALGORITHM, use BCRYPT_ALG_HANDLE_HMAC_FLAG in the fourth dwFlags parameter.
The documentation for these two parameters is as follows:
https://learn.microsoft.com/en-us/windows/win32/seccng/cng-algorithm-identifiers
https://learn.microsoft.com/en-us/windows/win32/api/bcrypt/nf-bcrypt-bcryptopenalgorithmprovider
Secondly, do not use string type of the parameter pbDerivedKey, or it will crash the memory.
Replace std::string DerivedKeyString with BYTE DerivedKeyString[64].
In the function BCryptDeriveKeyPBKDF2, replace (PUCHAR)DerivedKeyString.c_str() with DerivedKeyString.
I am working on porting an application running on Arduino Mega to LPC824. The following piece of code is working differently for both the platforms.
/**
* Calculation of CMAC
*/
void cmac(const uint8_t* data, uint8_t dataLength) {
uint8_t trailer[1] = {0x80};
uint8_t bytes[_lenRnd];
uint8_t temp[_lenRnd];
memcpy(temp, data, dataLength);
concatArray(temp, dataLength, trailer, 1);
dataLength ++;
addPadding(temp, dataLength);
memcpy(bytes, _sk2, _lenRnd);
xorBytes(bytes,temp,_lenRnd);
aes128_ctx_t ctx;
aes128_init(_sessionkey, &ctx);
uint8_t* chain = aes128_enc_sendMode(bytes, _lenRnd, &ctx, _ivect);
Board_UARTPutSTR("chain\n\r");
printBytes(chain, 16, true);
memcpy(_ivect, chain, _lenRnd);
//memcpy(_ivect, aes128_enc_sendMode(bytes,_lenRnd,&ctx,_ivect), _lenRnd);
memcpy(_cmac,_ivect, _lenRnd);
Board_UARTPutSTR("Initialization vector\n\r");
printBytes(_ivect, 16, true);
}
I am expecting a value like {0x5d, 0xa8, 0x0f, 0x1f, 0x1c, 0x03, 0x7f, 0x16, 0x7e, 0xe5, 0xfd, 0xf3, 0x45, 0xb7, 0x73, 0xa2} for the chain variable. But the follow function is working differently. The print inside the function has the correct value which I want ({5d, 0xa8, 0x0f, 0x1f, 0x1c, 0x03, 0x7f, 0x16, 0x7e, 0xe5, 0xfd, 0xf3, 0x45, 0xb7, 0x73, 0xa2}).
But when the function returns chain is having a different value, compared to what I am expecting, I get the following value for chain {0x00, 0x20, 0x00, 0x10, 0x03, 0x01, 0x00, 0x00, 0xd5, 0x00, 0x00, 0x00, 0xd7, 0x00, 0x00, 0x00}
Inside the function, the result is correct. But it returns a wrong value to the function which called it. Why is it happening so ?
uint8_t* aes128_enc_sendMode(unsigned char* data, unsigned short len, aes128_ctx_t* key,
const unsigned char* iv) {
unsigned char tmp[16];
uint8_t chain[16];
unsigned char c;
unsigned char i;
memcpy(chain, iv, 16);
while (len >= 16) {
memcpy(tmp, data, 16);
//xorBytes(tmp,chain,16);
for (i = 0; i < 16; i++) {
tmp[i] = tmp[i] ^ chain[i];
}
aes128_enc(tmp, key);
for (i = 0; i < 16; i++) {
//c = data[i];
data[i] = tmp[i];
chain[i] = tmp[i];
}
len -= 16;
data += 16;
}
Board_UARTPutSTR("Chain!!!:");
printBytes(chain, 16, true);
return chain;
}
A good start with an issue like this is to delete as much as you can while reproducing the error, with a minimal code example the answer is typically clear. I have done that for you here.
uint8_t* aes128_enc_sendMode(void) {
uint8_t chain[16];
return chain;
}
The chain variable is a local to the function, it ceases to be defined once the function exists. Accessing a pointer to that variable causes undefined behaviour, don't do it.
In practice the pointer to the array still exists and points to an arbitrary block of memory. This block of memory is no longer reserved and can be overwritten at any time.
I suspect it works for the AVR because it is a simple 8 bit chip and that piece of memory was sitting unmolested by the time you used it. The ARM would have used greater optimisations, possibly running the full array on registers, so the data doesn't survive the transition.
tldr; You need to malloc() any arrays that you want to live past the function's exit. Be careful, malloc and embedded systems go together like diesel and styrofoam, it gets messy real quick.
So, I have little problem with CM_Get_Device_Interface_List function. Function returns with error code 3, which is CR_INVALID_POINTER. But when I call CM_Get_Device_Interface_List_Size function, it returns success.
ULONG lenght = 0;
PWSTR DevicePath = NULL;
CONFIGRET cr = CR_SUCCESS;
cr = CM_Get_Device_Interface_List_Size(&lenght, (LPGUID)&HWN_DEVINTERFACE_NLED, NULL, CM_GET_DEVICE_INTERFACE_LIST_PRESENT);// success
if (cr != CR_SUCCESS)
{
// error handling
}
cr = CM_Get_Device_Interface_List((LPGUID)&HWN_DEVINTERFACE_NLED, NULL, DevicePath, lenght, CM_GET_DEVICE_INTERFACE_LIST_PRESENT); // error
if (cr != CR_SUCCESS)
{
// error handling
}
DEFINE_GUID(HWN_DEVINTERFACE_NLED,
0x6b2a25e2, 0xaaf5, 0x482c, 0x99, 0xa5, 0x62, 0x05, 0xcd, 0xcc, 0x17, 0x6a); // GUID Declaration
So, why the pointer is invalid?
A bit late, probably figured this out but in case someone else comes across this I believe the reason you are getting the invalid pointer is because you are passing in a null pointer as the buffer (DevicePath), it must be allocated with the size returned from your first call.
Example (cleaned up a bit):
ULONG bufferSize = 0;
if (CM_Get_Device_Interface_List_Size(&bufferSize, (LPGUID)&HWN_DEVINTERFACE_NLED, NULL, CM_GET_DEVICE_INTERFACE_LIST_PRESENT) == CR_SUCCESS)
{
PWSTR buffer = (PWSTR)malloc(bufferSize);
if (CM_Get_Device_Interface_List((LPGUID)&HWN_DEVINTERFACE_NLED, NULL, buffer, bufferSize, CM_GET_DEVICE_INTERFACE_LIST_PRESENT) == CR_SUCCESS)
{
// buffer should now contain a list of NULL-terminated unicode strings
}
if (buffer)
{
free(buffer);
}
}
DEFINE_GUID(HWN_DEVINTERFACE_NLED,
0x6b2a25e2, 0xaaf5, 0x482c, 0x99, 0xa5, 0x62, 0x05, 0xcd, 0xcc, 0x17, 0x6a); // GUID Declaration
Alternate Example (no malloc):
#define BUFFER_SIZE 4096 // 4k buffer should be plenty
WCHAR buffer[BUFFER_SIZE];
if (CM_Get_Device_Interface_List((LPGUID)&HWN_DEVINTERFACE_NLED, NULL, buffer, BUFFER_SIZE, CM_GET_DEVICE_INTERFACE_LIST_PRESENT) == CR_SUCCESS)
{
// buffer should now contain a list of NULL-terminated unicode strings
}
DEFINE_GUID(HWN_DEVINTERFACE_NLED,
0x6b2a25e2, 0xaaf5, 0x482c, 0x99, 0xa5, 0x62, 0x05, 0xcd, 0xcc, 0x17, 0x6a); // GUID Declaration
I'm trying to add additional packet in MyRecv function, but I don't know why it doesn't working. I tried to parse incoming packets and function works fine.
So probably my way to sending custom packet to application isn't properly.
In general assumption I just want send prepared packet to application.
This packet i took from WPE PRO.
Code with MyRecv function:
INT WINAPI MyRecv(SOCKET sock, CHAR* buf, INT len, INT flags) {
CHAR buffer[256];
char msg2[] = { 0x1B, 0, 0x04, 0x06, 0, 0x5A, 0x65, 0x6E, 0x74, 0x61,
0x78, 0x06, 0, 0x5A, 0x65, 0x6E, 0x74, 0x61, 0x78, 0x05, 0x07, 0,
0x66, 0x61, 0x6A, 0x6E, 0x69, 0x65, 0x65 };
int ret = precv(sock, buf, len, flags);
if (ret <= 0) {
return ret;
}
if (fake_recv) {
char tmp[256];
fake_recv = false;
printf("Fake1-> Lenght:%d Size:%d", len, strlen(buf));
strcat(buf, msg2);
printf("Fake2-> Lenght:%d Size:%d", len, strlen(buf));
return ret;
}
return ret;
}
msg2 isn't a null-terminated string. In fact it has an interior null. So using strlen() and strcat() with it is never going to work.
Similarly you neither know nor care what's already in buf, so calling strcat() and strlen() on that is both pointless and dangerous: if it contains no nulls at all you will over-run it, and at best over-report the length, and at worst crash.
And you're not adjusting ret for the extra data added into the buffer.
And no useful purpose is accomplished by declaring the unused tmp[] variable.
Try this:
if (fake_recv) {
fake_recv = false;
printf("Fake1-> Length:%d Received:%d", len, ret);
int len2 = min(len-ret, sizeof msg2);
memcpy(&buf[ret], msg2, len2);
ret += len2;
printf("Fake2-> Length:%d Received:%d", len, ret);
return ret;
}
I am trying to decrypt something using 128BIT AES Decryption. When i attempt to calling CryptDecrypt i get an Error stating "Invalid Algorithm Specified". I get the same problem when using the library posted here: http://www.codeproject.com/KB/security/WinAES.aspx
What can cause this error?
I am using CryptoAPI along on vista64bit with visual studio 2008. I checked in the registry and the AES library is there...
EDIT
BYTE*& encryptedData /* get data length */
HCRYPTPROV cryptoHandle = NULL;
HCRYPTKEY aesKeyHandle = NULL;
hr = InitWinCrypt(cryptoHandle);
if(FAILED(hr))
{
return hr;
}
AesKeyOffering aesKey = { {PLAINTEXTKEYBLOB, CUR_BLOB_VERSION, 0, CALG_AES_128}, 16, { 0xFF, 0x00, 0xFF, 0x1C, 0x1D, 0x1E, 0x03, 0x04, 0x05, 0x0F, 0x20, 0x21, 0xAD, 0xAF, 0xA4, 0x04 }};
if(CryptImportKey(cryptoHandle, (CONST BYTE*)&aesKey, sizeof(AesKeyOffering), NULL, 0, &aesKeyHandle) == FALSE)
{
// DO error
return HRESULT_FROM_WIN32(GetLastError());
}
if(CryptSetKeyParam(aesKeyHandle, KP_IV, { 0xFF, 0x00, 0xFF, 0x1C, 0x1D, 0x1E, 0x03, 0x04, 0x05, 0x0F, 0x20, 0x21, 0xAD, 0xAF, 0xA4, 0x04 } , 0) == FALSE)
{
return HRESULT_FROM_WIN32(GetLastError());
}
BYTE blah2 = CRYPT_MODE_CBC;
// set block mode
if(CryptSetKeyParam(aesKeyHandle, KP_MODE, &blah2, 0) == FALSE)
{
//
return HRESULT_FROM_WIN32(GetLastError());
}
DWORD lol = dataLength / 16 + 1;
DWORD lol2 = lol * 16;
if(CryptDecrypt(aesKeyHandle, 0, TRUE, 0, encryptedData, &lol2) == FALSE)
{
return HRESULT_FROM_WIN32(GetLastError());
}
InitWinCrypt function
if(!CryptAcquireContextW(&cryptoHandle, NULL, L"Microsoft Enhanced RSA and AES Cryptographic Provider", PROV_RSA_AES, CRYPT_VERIFYCONTEXT))
{
if(!CryptAcquireContextW(&cryptoHandle, NULL, L"Microsoft Enhanced RSA and AES Cryptographic Provider", PROV_RSA_AES, 0))
{
return HRESULT_FROM_WIN32(GetLastError());
}
else
{
return S_OK;
}
}
return S_OK;
AesOffering struct:
struct AesKeyOffering
{
BLOBHEADER m_Header;
DWORD m_KeyLength;
BYTE Key[16];
};
EDIT2
After rebooting my computer, and remvoing the CBC chunk. I am now getting Bad Data Errors. The data decrypts fine in C#. But i need to do this using wincrypt.
Are you passing cryptoHandle by reference to InitWithCrypt? If not, your code
if(!CryptAcquireContextW(&cryptoHandle, ...
would only modify InitWinCrypt's copy of cryptoHandle.
EDIT: Given that it does, try getting rid of the CryptSetKeyParam call which sets CRYPT_MODE_CBC