openssl command encrypt data does not match EVP_aes_128_cbc - c++

I tried the following C implementation of Openssl EVP function for AES-128-CBC encryption but the results I am getting are incorrect compared to the command line OpenSSL result.
I referenced the code on the site below.
https://wiki.openssl.org/index.php/EVP_Symmetric_Encryption_and_Decryption
C code implementation of AES-128-CBC:
#include <openssl/conf.h>
#include <openssl/evp.h>
#include <openssl/err.h>
#include <string.h>
void handleErrors(void)
{
ERR_print_errors_fp(stderr);
abort();
}
int encrypt(unsigned char *plaintext, int plaintext_len, unsigned char *key,
unsigned char *iv, unsigned char *ciphertext)
{
EVP_CIPHER_CTX *ctx;
int len;
int ciphertext_len;
if(!(ctx = EVP_CIPHER_CTX_new()))
handleErrors();
if(1 != EVP_EncryptInit_ex(ctx, EVP_aes_128_cbc(), NULL, key, iv))
handleErrors();
if(1 != EVP_EncryptUpdate(ctx, ciphertext, &len, plaintext, plaintext_len))
handleErrors();
ciphertext_len = len;
if(1 != EVP_EncryptFinal_ex(ctx, ciphertext + len, &len))
handleErrors();
ciphertext_len += len;
/* Clean up */
EVP_CIPHER_CTX_free(ctx);
return ciphertext_len;
}
int decrypt(unsigned char *ciphertext, int ciphertext_len, unsigned char *key,
unsigned char *iv, unsigned char *plaintext)
{
EVP_CIPHER_CTX *ctx;
int len;
int plaintext_len;
/* Create and initialise the context */
if(!(ctx = EVP_CIPHER_CTX_new()))
handleErrors();
if(1 != EVP_DecryptInit_ex(ctx, EVP_aes_128_cbc(), NULL, key, iv))
handleErrors();
if(1 != EVP_DecryptUpdate(ctx, plaintext, &len, ciphertext, ciphertext_len))
handleErrors();
plaintext_len = len;
if(1 != EVP_DecryptFinal_ex(ctx, plaintext + len, &len))
handleErrors();
plaintext_len += len;
/* Clean up */
EVP_CIPHER_CTX_free(ctx);
return plaintext_len;
}
int main (void)
{
/* A 128 bit key */
unsigned char *key = (unsigned char *)"A[u#AD22ZAWm/3=E";
/* A 128 bit IV */
unsigned char *iv = (unsigned char *)"0";
/* Message to be encrypted */
unsigned char *plaintext =
(unsigned char *)"The quick brown fox jumps over the lazy dog";
/*
* Buffer for ciphertext. Ensure the buffer is long enough for the
* ciphertext which may be longer than the plaintext, depending on the
* algorithm and mode.
*/
unsigned char ciphertext[128];
/* Buffer for the decrypted text */
unsigned char decryptedtext[128];
int decryptedtext_len, ciphertext_len;
/* Encrypt the plaintext */
ciphertext_len = encrypt (plaintext, strlen ((char *)plaintext), key, iv,
ciphertext);
/* Do something useful with the ciphertext here */
printf("Ciphertext is:\n");
BIO_dump_fp (stdout, (const char *)ciphertext, ciphertext_len);
/* Decrypt the ciphertext */
decryptedtext_len = decrypt(ciphertext, ciphertext_len, key, iv,
decryptedtext);
/* Add a NULL terminator. We are expecting printable text */
decryptedtext[decryptedtext_len] = '\0';
/* Show the decrypted text */
printf("Decrypted text is:\n");
printf("%s\n", decryptedtext);
return 0;
}
Output:
Ciphertext is:
0000 - 4f 21 68 b5 e9 90 b8 82-71 f2 d2 cd 60 2e fd f1 O!h.....q...`...
0010 - 48 10 d0 e4 f2 58 86 96-22 fa a5 d2 b6 9a 66 87 H....X..".....f.
0020 - e8 e6 bd 9e 73 ad 1e 0e-ec 0a f0 8b 11 83 9d 04 ....s...........
Decrypted text is:
The quick brown fox jumps over the lazy dog
OpenSSL command line:
echo "The quick brown fox jumps over the lazy dog" | openssl enc -e -aes-128-cbc -K 415b7540414432325a41576d2f333d45 -iv 30000000000000000000000000000000 | xxd
Updated WRT suggestion :
echo -n "The quick brown fox jumps over the lazy dog" | openssl enc -e -aes-128-cbc -K 415b7540414432325a41576d2f333d45 -iv 30 | xxd
OpenSSL Output:
00000000: 29d1 cf44 9246 fd7c 48a9 2261 afc1 6b34 )..D.F.|H."a..k4
00000010: ca5c c44d eddd 27a0 d64d 9679 81e4 fcb1 .\.M..'..M.y....
00000020: 9f10 0e47 ef73 37fa 5aa8 1a85 ec70 05c6 ...G.s7.Z....p..
Why are the output results different ( key and iv are both in hex) ?
I understand that the usage of iv "0" isnt recommended.
But if the iv is "0" what should be the hex value considered to match the encryption in command line.
PS : changing IV in code isnt an option for me :(

Related

Getting different values when encrypting in NodeJS versus OpenSSL C++

I am working on client-server communications and am stuck on making sure that both sides come up with the same encrypted token value. Cannot figure out why they are different. The keys and initialization vectors along with the message itself are all the same.
Here is the function that does encryption in client code:
int main()
{
try
{
std::string message = "HelloWorld";
while ((message.size() & 0xf) != 0xf)
message += " ";
size_t inputslength = message.length();
unsigned char aes_input[inputslength];
memset(aes_input, 0, inputslength/8);
strcpy((char*) aes_input, message.c_str());
unsigned char iv[] = {'0','f','9','3','8','7','b','3','f','9','4','b','f','0','6','f'};
unsigned char aes_key[] = {'Z','T','k','0','Y','T','U','5','Y','j','N','h','M','j','k','4','N','G','I','3','N','m','I','x','N','W','E','x','N','z','d','i'};
// buffers for encryption and decryption
const size_t encslength = ((inputslength + AES_BLOCK_SIZE) / AES_BLOCK_SIZE) * AES_BLOCK_SIZE;
unsigned char enc_out[encslength];
unsigned char dec_out[inputslength];
memset(enc_out, 0, sizeof(enc_out));
memset(dec_out, 0, sizeof(dec_out));
AES_KEY enc_key, dec_key;
AES_set_encrypt_key(aes_key, AES_KEYLENGTH, &enc_key);
AES_cbc_encrypt(aes_input, enc_out, inputslength, &enc_key, iv, AES_ENCRYPT);
AES_set_decrypt_key(aes_key, AES_KEYLENGTH, &dec_key);
AES_cbc_encrypt(enc_out, dec_out, encslength, &dec_key, iv, AES_DECRYPT);
printf("original:\t");
hex_print(aes_input, sizeof(aes_input));
printf("encrypt:\t");
hex_print(enc_out, sizeof(enc_out));
printf("decrypt:\t");
hex_print(dec_out, sizeof(dec_out));
std::stringstream ss;
for(int i = 0; i < encslength; i++)
{
ss << enc_out[i];
}
return 0;
}
}
Output
original: 48 65 6C 6C 6F 57 6F 72 6C 64 20 20 20 20 20
encrypt: 72 70 A2 0D FB A1 65 15 17 97 6E 5D 36 23 E2 FA
decrypt: 0A 73 F7 52 AC C1 68 54 1D CA 7A 1F 70 33 F4
Meanwhile.. on the server:
function encryptToken(token)
{
const iv = '0f9387b3f94bf06f';
const key = 'ZTk0YTU5YjNhMjk4NGI3NmIxNWExNzdi';
console.log("key len: " + key.length);
const encrypt = (value) => {
const cipher = crypto.createCipheriv('AES-256-CBC', key, iv);
let encrypted = cipher.update(value, 'utf8', 'hex');
encrypted += cipher.final('hex');
return encrypted;
};
console.log('Encrypteddd value: ', encrypt('HelloWorld'));
}
Output
Encrypteddd value: 0c491f8c5256b9744550688fc54926e8
Before trying CBC-256 for encryption, I tried the simpler encryption mode, ECB-128 and it all comes down to the same problem. Different encryption tokens produced on client and server side which results in not being able to decrypt what comes from the server side. Any brainstorming tips will help please. I am running out of ideas, thanks.
Update 12.26 -
After taking advice on the init vector and array length on client side.. here is my updated code with output:
int main()
{
try
{
std::string message = "HelloWorld";
while ((message.size() & 0xf) != 0xf)
message += " ";
size_t inputslength = message.length();
unsigned char aes_input[inputslength+1];
memset(aes_input, 0, inputslength/8);
strcpy((char*) aes_input, message.c_str());
unsigned char iv[] = {0x0f, 0x93, 0x87, 0xb3, 0xf9, 0x4b, 0xf0, 0x6f};
unsigned char aes_key[] = {'Z','T','k','0','Y','T','U','5','Y','j','N','h','M','j','k','4','N','G','I','3','N','m','I','x','N','W','E','x','N','z','d','i'};
// buffers for encryption and decryption
const size_t encslength = ((inputslength + AES_BLOCK_SIZE) / AES_BLOCK_SIZE) * AES_BLOCK_SIZE;
unsigned char enc_out[encslength];
unsigned char dec_out[inputslength];
memset(enc_out, 0, sizeof(enc_out));
memset(dec_out, 0, sizeof(dec_out));
AES_KEY enc_key, dec_key;
AES_set_encrypt_key(aes_key, AES_KEYLENGTH, &enc_key);
AES_cbc_encrypt(aes_input, enc_out, inputslength, &enc_key, iv, AES_ENCRYPT);
AES_set_decrypt_key(aes_key, AES_KEYLENGTH, &dec_key);
AES_cbc_encrypt(enc_out, dec_out, encslength, &dec_key, iv, AES_DECRYPT);
printf("original:\t");
hex_print(aes_input, sizeof(aes_input));
printf("encrypt:\t");
hex_print(enc_out, sizeof(enc_out));
printf("decrypt:\t");
hex_print(dec_out, sizeof(dec_out));
std::stringstream ss;
for(int i = 0; i < encslength; i++)
{
ss << enc_out[i];
}
return 0;
}
}
//Output:
original: 48 65 6C 6C 6F 57 6F 72 6C 64 00
encrypt: 54 CD 98 20 59 D9 7B 2D D4 23 ED EC D0 13 97 59
Nodejs code has not changed and this remains the output:
Encrypteddd value: 0c491f8c5256b9744550688fc54926e8
So, here's the deal. Every call to AES_cbc_encrypt will change the value of the initialization vector. They do that so that you can chain calls to AES_*_encrypt and handle messages larger than one block. But because the encryption call changes the value of iv, the decryption call is getting a different initialization vector.
One (terrible) solution would be to make two vectors:
unsigned char iv_encrypt[] = { /* stuff */ };
unsigned char iv_decrypt[] = { /* same stuff */ };
That way you'd be passing the same data to each AES_cbc_encrypt call. That would at least show that you can decrypt to the original data. A better, flexible way to achieve your end would be to use a clone of your initialization vector for each call. Something like:
unsigned char iv[] = { /* stuff */ };
unsigned char *tmp_iv = static_cast<unsigned char*>( malloc( sizeof( iv ) ) );
...
memcpy( tmp_iv, iv, sizeof(iv) );
AES_cbc_encrypt(aes_input, enc_out, inputslength, &enc_key, tmp_iv, AES_ENCRYPT);
...
memcpy( tmp_iv, iv, sizeof(iv) );
AES_cbc_encrypt(enc_out, dec_out, inputslength, &dec_key, tmp_iv, AES_DECRYPT);

Using C++ implementation of OpenSSL to decrypt data

I am trying to decrypt AES-256-CBC encrypted data.
I can decrypt the entire file, but I get additional characters at the end. I also get an error from the OpenSSL libraries that says this...
EVP_DecryptFinal_ex:wrong final block length:evp_enc.c:532
Here is my decryption function which is right out of the wiki:
int decrypt(unsigned char *ciphertext, int ciphertext_len, unsigned char *key, unsigned char *iv, unsigned char *plaintext)
{
int plaintext_len = 0;
EVP_CIPHER_CTX *ctx;
int len = 0;
plaintext_len = 0;
int error = 0;
ctx = EVP_CIPHER_CTX_new();
if(ctx == NULL)
{
handleErrors(0,1);
}
error = EVP_DecryptInit_ex(ctx, EVP_aes_256_cbc(), NULL, key, iv);
handleErrors(error,2);
error = EVP_DecryptUpdate(ctx,plaintext, &len, ciphertext, ciphertext_len);
handleErrors(error, 3);
plaintext_len = len;
error = EVP_DecryptFinal_ex(ctx,plaintext + len, &len);
handleErrors(error, 4);
plaintext_len += len;
EVP_CIPHER_CTX_free(ctx);
return plaintext_len;
}
Note that I am passing in raw bytes using the uint8_t type, but converted it for this function. The plain text that is working contains the file, plus some additional characters at the end.
Here is the encrypted data in hex:
aa371af640b8481203c6bdc21bfccf109d047f0ad38daf0f03d1d7650a25616c019cdf9b80cda8980aa99809c2d9346fd1b501ef3b2f3548479840f897c85592ae90f6d5e9c87e428b00d36631963ca311b4486285495d50296a3d370aa0b82322ad0891adf43b262611cc2238769180f2ec8f7fba7dcb48f6efe9fc0f8725d18a7d29c52fc7d40193b7bed18f549f4e23c5def8f49fbf775e41e9c26d6a6ff3685d2f1a5050e660a9903305500dd946b81f49381b8db215b290b8e526eb463bb4cb9af1fdc3df14e6dfcc14eb6824400b6de24ee9e87170554caa825a2306b9d2f75432532e89871e0a4e64b43b22b69ede8127887408877f50e00368aad3c29a370ecbe3533b2e3b6a3de05ad836fc84eb2fd76c7563f24f215697bfc354c4a6e809c7e25f5d2de6df4ddae6dbb05bcd07217125268aab811183359e714df3e696eca1df2783c8855a048e0fa252c80930cf37be2c1e5eac6ac83e819a639dae9972363d7d89dcf2a2b1761888edd6c263b84cf5c6118e7544656f724156d396f8d8b3b6a49123ef633ced82bc9b65fff6f1d39d5e9e2fa32a3c58fc79459a94d237a5361d2a2db1f7b862cb0f081e6328076339392e59734a006718136d6784a12dfe93384e3e48941870bae48c563cb2474fd926963051c273bf26ce4a9d29e00d628719564cbf8769efb9726157c79b29f10644ea5656df99e20f84e0867b559b682453567c970df8ade768b6cd3498001a30160c83dd3efa3fc58c13ecac7e53041dd617905f1a9f02cc249b8c3523c92eace60a0ad9eba29ec973678859643d0d3765d7b9e2e5b58d639c7d2f6109da979981003c2e41e55d270f276b9edcfe0294d0aa34ade2cfb7140b4fbfb5d202b3079af5f7f2ec5c84a34a8d94cf3698a17fa1ee25767e00a8337f0ed6c2af9fc2dfd6dedfec45a23d376e8d1d
Here is the AES encryption key in hex
174126874a7c7f9a44da4f559cfd628586894e86f7e2eb0561a0809b7a294fd4
Here is the IV
603da7b9f9c365219a8121e528e7dddc
The result is
echo Y | format H: /q /x /fs:NTFS
#echo off
echo.
echo.Stopping the Backup Exec services...
echo.
net stop "BackupExecAgentBrowser"
net stop "BackupExecJobEngine"
net stop "BackupExecManagementService"
net stop "BackupExecRPCService"
net stop "BackupExecDeviceMediaService"
net stop "BackupExecAgentAccelerator"
net stop "bedbg"
echo.
echo.Starting the Backup Exec services...
echo.
net start "bedbg"
net start "BackupExecAgentAccelerator"
net start "BackupExecDeviceMediaService"
net start "BackupExecRPCService"
net start "BackupExecManagementService"
net start "BackupExecJobEngine"
net start "BackupExecAgentBrowser"
pause
exitC��Q�H
Notice the odd few characters at the end.
Also note, I've tried the same thing in Python and am getting the same results. I believe it has to do with the padding. But I don't know if I am supposed to pad the encrypted data, and if so, with what data, or what...

Abort trap 6 when returning from main in OS X but NOT on linux

I have a program that currently seems to run fine on Linux (Ubuntu 14.04), but when running on OS X (10.11.6) I get an abort trap 6. I've attached my code but I suspect the problem is not actually tied to the specific code. This code is for a class project, I'm not actually trying to crack passwords or anything.
Here's the code, I believe the all the important stuff happens in main.
#include <openssl/aes.h>
#include <openssl/evp.h>
#include <openssl/conf.h>
#include <openssl/err.h>
#define KEY_BYTES KEY_LENGTH/8
#define KEY_LENGTH 128
unsigned char* h(unsigned char* p, unsigned char* hp);
void handleErrors(void);
int encrypt(unsigned char *plaintext, int plaintext_len, unsigned char *key,
unsigned char *iv, unsigned char *ciphertext);
//assumes we've already padded with zeros
unsigned char* h(unsigned char* p, unsigned char *hp){
encrypt((unsigned char*)"0000000000000000", KEY_BYTES, p , (unsigned char*)"0000000000000000", hp);
return hp;
}
void handleErrors(void)
{
printf("panic!!\n");
//ERR_print_errors_fp(stderr); //sg: throw a real error you fool!
abort();
}
//sg: stolen from the evp man page
int encrypt(unsigned char *plaintext, int plaintext_len, unsigned char *key,
unsigned char *iv, unsigned char *ciphertext)
{
EVP_CIPHER_CTX *ctx;
int len;
int ciphertext_len;
/* Create and initialise the context */
if(!(ctx = EVP_CIPHER_CTX_new())) handleErrors();
/* Initialise the encryption operation. IMPORTANT - ensure you use a key
* and IV size appropriate for your cipher
* In this example we are using 256 bit AES (i.e. a 256 bit key). The
* IV size for *most* modes is the same as the block size. For AES this
* is 128 bits */
if(1 != EVP_EncryptInit_ex(ctx, EVP_aes_128_cbc(), NULL, key, iv))
handleErrors();
/* Provide the message to be encrypted, and obtain the encrypted output.
* EVP_EncryptUpdate can be called multiple times if necessary
*/
if(1 != EVP_EncryptUpdate(ctx, ciphertext, &len, plaintext, plaintext_len))
handleErrors();
ciphertext_len = len;
/* Finalise the encryption. Further ciphertext bytes may be written at
* this stage.
*/
if(1 != EVP_EncryptFinal_ex(ctx, ciphertext + len, &len)) handleErrors();
ciphertext_len += len;
/* Clean up */
EVP_CIPHER_CTX_free(ctx);
return ciphertext_len;
}
int main(){
/* Initialise the library */
ERR_load_crypto_strings();
OpenSSL_add_all_algorithms();
OPENSSL_config(NULL);
EVP_CIPHER_CTX *ctx;
unsigned char hp[KEY_BYTES];
/* Create and initialise the context */
if(!(ctx = EVP_CIPHER_CTX_new())) handleErrors();
h((unsigned char*) "1111111111111111", hp);
for(int i = 0; i < KEY_BYTES; i++){
printf("h(%i) = %x\n", i, hp[i]);
}
return 0;
}
When run on linux I get the following (which is what I expect)
h(0) = 10
h(1) = df
h(2) = c1
h(3) = b5
h(4) = f6
h(5) = 6c
h(6) = fd
h(7) = 6a
h(8) = 1d
h(9) = c4
h(10) = 6d
h(11) = 66
h(12) = 90
h(13) = 7b
h(14) = ee
h(15) = b1
However when I run on OS X I get the following:
h(0) = 10
h(1) = df
h(2) = c1
h(3) = b5
h(4) = f6
h(5) = 6c
h(6) = fd
h(7) = 6a
h(8) = 1d
h(9) = c4
h(10) = 6d
h(11) = 66
h(12) = 90
h(13) = 7b
h(14) = ee
h(15) = b1
Abort trap: 6
When I pop this into gdb I get the following
(gdb) r
Starting program: /Users/sgillen/Code/457/proj3/a.out
h(0) = 10
h(1) = df
h(2) = c1
h(3) = b5
h(4) = f6
h(5) = 6c
h(6) = fd
h(7) = 6a
h(8) = 1d
h(9) = c4
h(10) = 6d
h(11) = 66
h(12) = 90
h(13) = 7b
h(14) = ee
h(15) = b1
Program received signal SIGABRT, Aborted.
0x00007fff93150f06 in __pthread_kill () from /usr/lib/system/libsystem_kernel.dylib
(gdb) where
#0 0x00007fff93150f06 in __pthread_kill () from /usr/lib/system/libsystem_kernel.dylib
#1 0x00007fff97b374ec in pthread_kill () from /usr/lib/system/libsystem_pthread.dylib
#2 0x00007fff9ba8077f in __abort () from /usr/lib/system/libsystem_c.dylib
#3 0x00007fff9ba8105e in __stack_chk_fail () from /usr/lib/system/libsystem_c.dylib
#4 0x0000000100000ea9 in main () at gen_table.cpp:90
Not sure how to do line numbers on stack overflow, but line 90 of gen_table.cpp is that last return 0 in main.
I compile my code with the following if that's relevant.
clang -Wall -std=c++11 gen_table.cpp -I/usr/local/opt/openssl/include/ -lcrypto -lssl -g
Any help would be greatly appreciated thank you!
I found the answer to my question I figured I'd post the answer for anyone else who somehow runs into the same problem. The issue was that I was overwriting my own stack. The encryption function I was using was actually writing 32 bytes to hp (which was a 16 byte unsigned char living on the stack). So I'd destroy my own stack but not write to any memory not owned by my process. This resulted in no seg faults but when the program tried to return there were eventually problems. The exact thing that killed me changed depending on how I compiled my code.
I'm actually very surprised valgrind didn't catch this. And I still don't know why it seemed to work fine on linux when compiled with clang (compiled with g++ I got a stack-smashing detected error).
edit:
To be clear the solution was to fix my implementation of encrypt so that it only writes 16 bytes. which I simply did by commenting out the EVP_EncryptFinal_ex call.

C++ Des encryption with 16 bytes key

I'm trying to encrypt in DES a text with dynamic length with a 16 bytes key, but there is a problem with the block size of the key and text, i'm using openssl library for DES encryption. How can I use keys with 16 bytes of length.
Here my example:
char * Encrypt( char Key, char *Msg, int size) {
static char* Res;
DES_cblock Key2;
DES_key_schedule schedule;
Res = ( char * ) malloc( size );
memcpy(Key2, Key, 8);
DES_set_odd_parity( &Key2 );
DES_set_key_checked( &Key2, &schedule );
unsigned char buf[9];
buf[8] = 0;
DES_ecb_encrypt(( DES_cblock ) &Msg, ( DES_cblock ) &buf, &schedule, DES_ENCRYPT );
memcpy(Res, buf, sizeof(buf));
return (Res);
}
int main(int argc, char const *argv[]) {
char key[] = "password";
char clear[] = "This is a secret message";
char *encrypted;
encrypted = (char *) malloc(sizeof(clear));
printf("Clear text\t : %s \n",clear);
memcpy(encrypted, Encrypt(key, clear, sizeof(clear)), sizeof(clear));
printf("Encrypted text\t : %s \n",encrypted);
return 0;
}
DES has a 8-byte 56-bit key (the LSB is not used as part of the key, it is for parity) so you can't use a 16-byte key (parity is generally ignored).
Don't use DES, it is not secure and has been replaced with AES.
Don't use ECB mode, it is insecure, see ECB mode, scroll down to the Penguin.
AES allows 128, 192 and 256 bit keys.

BIO_dump_fp causing hundreds of valgrind errors

I'm trying to use the OpenSSL library for AES encryption. Everything compiles and seems to work fine. However, when I use BIO_dump_fp(stdout, (char*)ciphertext, ciphertext_len) valgrind ends up reporting hundreds of errors, mostly "conditional jump of move depends on uninitialized value(s)" errors, like this one:
Conditional jump or move depends on uninitialised value(s)
at 0x579A9C3: fwrite (iofwrite.c:49)
by 0x4F187B0: ??? (in /lib/x86_64-linux-gnu/libcrypto.so.1.0.0)
by 0x4F18AC4: BIO_dump_indent_cb (in /lib/x86_64-linux-gnu/libcrypto.so.1.0.0)
by 0x401748: main (in /home/van/Desktop/aes-test/temp/test)
Can these errors be safely ignored (i.e. are these false positives)? If it matters, I'm using Ubuntu 14.04, g++ version 4.8.2, valgrind 3.10.
UPDATE: my full source code is as follows:
#include <stdio.h>
#include "QAesHelper.h"
int main(int argc, char *argv[])
{
unsigned char iv[] = "1234567812345678";
unsigned char key[] = "Testing Testing...";
printf("Size of key: %d\n", (int)sizeof(key));
unsigned char plaintext[] = "The quick brown fox jumps over the lazy dog";
int plaintext_len = sizeof(plaintext);
printf("Size of plaintext: %d\n", plaintext_len);
unsigned char *ciphertext = (unsigned char*)malloc(plaintext_len + 32);
unsigned char *decryptedtext = (unsigned char*)malloc(plaintext_len + 2);
QAesHelper *aesHelper = new QAesHelper(key, sizeof(key));
int ciphertext_len = aesHelper->encrypt(plaintext, plaintext_len, iv, sizeof(iv), &ciphertext);
int decryptedtext_len = aesHelper->decrypt(ciphertext, ciphertext_len + 1, iv, sizeof(iv), &decryptedtext);
// If I remove the following line (BIO_dump_fp...), then
// valgrind reports no errors. With this line left in, there
// are over 900 errors reported.
BIO_dump_fp(stdout, (char*)ciphertext, ciphertext_len);
delete aesHelper;
free(ciphertext);
free(decryptedtext);
return 0;
}
And QAesHelper::encrypt() is:
int QAesHelper::encrypt(unsigned char *plaintext, int plaintext_len, unsigned char *iv, int iv_len, unsigned char **ciphertext)
{
EVP_CIPHER_CTX *ctx;
int len;
int ciphertext_len;
if(!(ctx = EVP_CIPHER_CTX_new())) handleErrors();
if(1 != EVP_EncryptInit_ex(ctx, EVP_aes_256_cbc(), NULL, key, iv))
handleErrors();
if(1 != EVP_EncryptUpdate(ctx, *ciphertext, &len, plaintext, plaintext_len))
handleErrors();
ciphertext_len = len;
if(1 != EVP_EncryptFinal_ex(ctx, *ciphertext + len, &len)) handleErrors();
ciphertext_len += len;
EVP_CIPHER_CTX_free(ctx);
return ciphertext_len;
}
Its not an error. OpenSSL heavily uses uninitialized memory segments.
Valgrind treats such usage as error and warns about it. Its normal behaviour which can be beautified in some extent:
write and use valgrind suppression file valgrind --gen-suppressions=no|yes|all
compile openssl with PURIFY macro enabled in cflags
push to valgrind --error-limit=no and ignore warnings from libssl/libcrypto