Encryption (Rijndael Cipher) With C/C++ in Android NDK - c++

I want to convert my C#(managed) decryption method into Android NDK, C/C++ (NO JAVA)
I see there was crypto. on the JAVA side but I want to keep away from any JNI, and I also see there's mcrypt and crypt++ but cant find a compiled lib for android.
Here an example in C# which I want to translate, to c/c++
public byte[] DecryptBytes(byte[] encryptedBytes)
{
RijndaelManaged RijndaelCipher = new RijndaelManaged();
RijndaelCipher.Mode = CipherMode.CBC;
ICryptoTransform Decryptor = RijndaelCipher.CreateDecryptor(Bytes32_KEY, Bytes16_IV);
MemoryStream memoryStream = new MemoryStream(encryptedBytes);
CryptoStream cryptoStream = new CryptoStream(memoryStream, Decryptor, CryptoStreamMode.Read);
byte[] plainBytes = new byte[encryptedBytes.Length];
int DecryptedCount = cryptoStream.Read(plainBytes, 0, plainBytes.Length);
memoryStream.Close();
cryptoStream.Close();
return plainBytes;
};
UPDATE
So the best I've found so far is to use openSSL AES, I have downloaded a pre-compiled lib for Android, I'm just struggling to get it work with the example some already posted as working here is the c code example
void test_enc(){
int keylength = 256;
// // 256bit KEY
uint8_t key[32] = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F};
//128bit IV
uint8_t iv[16] = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F};
//input data
uint8_t input[64] = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07};
size_t inputslength = 10;
int x;
uint8_t *aes_key = key;
uint8_t *aes_input = input;
uint8_t *iv_enc = iv;
uint8_t *iv_dec = iv;
// buffers for encryption and decryption
const size_t encslength = ((inputslength + AES_BLOCK_SIZE) / AES_BLOCK_SIZE) * AES_BLOCK_SIZE;
uint8_t *enc_out = (uint8_t*)malloc(sizeof(uint8_t) *encslength);
uint8_t *dec_out = (uint8_t*)malloc(sizeof(uint8_t) *inputslength);
memset(enc_out, 0, encslength);
memset(dec_out, 0, inputslength);
// so i can do with this aes-cbc-128 aes-cbc-192 aes-cbc-256
AES_KEY enc_key, dec_key;
AES_set_encrypt_key(aes_key, keylength, &enc_key);
AES_cbc_encrypt(input, enc_out, inputslength, &enc_key, iv_enc, AES_ENCRYPT);
AES_set_decrypt_key(aes_key, keylength, &dec_key);
AES_cbc_encrypt(enc_out, dec_out, encslength, &dec_key, iv_dec, AES_DECRYPT);
LOGI("Before:");
for(x=0;x<inputslength;x++)
LOGI("%02x, ", input[x]);
LOGI("Encrypted:");
for(x=0;x<encslength;x++)
LOGI("%02x, ", enc_out[x]);
LOGI("Decrypted:");
for(x=0;x<encslength;x++)
LOGI("%02x, ", dec_out[x]);
};
The encrypted bytes aren't the same as the c# and then the decrypt doesn't go back to the input, where have I gone wrong ?

SOLVED:
Issue is it appears the array which held the IV gets altered after the encryption so you need to reset back before you decrypt for the result
Using the pre-built OpenSSL for Android you can find here OpenSSL-for-Android-Prebuilt
and the code above just remember to set the IV before each call to AES_cbc_encrypt.

OpenSSL is quite a big library (if you care about APK size)
You can use Tiny AES in C (can be used with C++)
I have added it to Android project (CMake)
https://github.com/anonym24/Android-Tiny-AES-NDK

Related

create helper functions with parameter of type char

Below is a sketch inspired from an example, that I stripped down to the very bare minimum to fit my needs :
#include <Crypto.h>
#include <AES.h>
#include <string.h>
struct TestVector
{
const char *name;
byte key[32];
byte plaintext[16];
byte ciphertext[16];
};
// Define the ECB test vectors from the FIPS specification.
static TestVector const testVectorAES128 = {
.name = "AES-128-ECB",
.key = {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F},
.plaintext = {0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF},
.ciphertext = {0x69, 0xC4, 0xE0, 0xD8, 0x6A, 0x7B, 0x04, 0x30,
0xD8, 0xCD, 0xB7, 0x80, 0x70, 0xB4, 0xC5, 0x5A}
};
AES128 aes128;
byte buffer[16];
void testCipher(BlockCipher *cipher, const struct TestVector *test)
{
crypto_feed_watchdog();
Serial.print(test->name);
Serial.print(" Encryption ... ");
cipher->setKey(test->key, cipher->keySize());
cipher->encryptBlock(buffer, test->plaintext);
if (memcmp(buffer, test->ciphertext, 16) == 0)
Serial.println("Passed");
else
Serial.println("Failed");
Serial.print(test->name);
Serial.print(" Decryption ... ");
cipher->decryptBlock(buffer, test->ciphertext);
if (memcmp(buffer, test->plaintext, 16) == 0)
Serial.println("Passed");
else
Serial.println("Failed");
}
void setup()
{
Serial.begin(9600);
delay(3000);
Serial.println();
Serial.println(sizeof(AES128));
Serial.println();
testCipher(&aes128, &testVectorAES128);
Serial.println();
}
void loop()
{
}
It works.
I would like to modify this so that I end up with 2 functions : encrypt() and decrypt() which would both take a char array as a parameter.
The sketch above has .plaintext and .ciphertext hardcoded as a constant array of hex values/bytes (I guess?). How do I make these "dynamic"/variable, in order to feed encrypt() and decrypt()?
I am not comfortable with c++.
My project is to have this script feed a var like char hushhush[123] = "s0m3 3ncrypt3d stuff", then I would send the content of hushhush to a webserver/API running PHP, and decrypt with openssl_decrypt() (https://www.php.net/manual/en/function.openssl-decrypt.php)
My second problem will be: openssl_decrypt() needs the key to be of type string $passphrase. What is the "string" of 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F ?

Separate uint8_t array containing UTF-16 BE text by delimiter

I am receiving buffer that contains UTF-16 BE encoded text like this:
uint8_t rx_data[length] = {0x04, 0x24, 0x04, 0x30, 0x04, 0x3C, 0x04, 0x38, 0x04, 0x3B,
0x04, 0x38, 0x04, 0x4F, 0x00, 0x0A, 0x04, 0x18, 0x04, 0x3C, 0x04, 0x4F,
0x00, 0x0A, 0x04, 0x1E, 0x04, 0x42, 0x04, 0x47, 0x04, 0x35, 0x04, 0x41,
0x04, 0x42, 0x04, 0x32, 0x04, 0x3E}
Buffer contains three text strings that are separated with "\n" or {0x00, 0x0A} in my buffer.
How can I split this text into strings by new line so I will get something like this:
uint8_t str1[] = {0x04, 0x24, 0x04, 0x30, 0x04, 0x3C, 0x04, 0x38, 0x04, 0x3B,
0x04, 0x38, 0x04, 0x4F}
uint8_t str2[] = {0x04, 0x18, 0x04, 0x3C, 0x04, 0x4F}
uint8_t str3[] = {0x04, 0x18, 0x04, 0x3C, 0x04, 0x4F,
0x00, 0x0A, 0x04, 0x1E, 0x04, 0x42, 0x04, 0x47, 0x04, 0x35, 0x04, 0x41,
0x04, 0x42, 0x04, 0x32, 0x04, 0x3E}
I am considering to somehow transform my array into u16string or wstring from standard library so that I can do with transformed string smth like this:
std::wstring s_rx_data = "string1/nstring2/nstring3";
std::wstring delimiter = "\n";
size_t pos = 0;
std::string token;
while ((pos = s_rx_data.find(delimiter)) != std::string::npos) {
token = s_rx_data.substr(0, pos);
std::cout << token << std::endl;
s_rx_data.erase(0, pos + delimiter.length());
}
std::cout << s_rx_data << std::endl;
And then convert it back to 3 arrays with bytes.
The question is, how can I transform my buffer into c++ string?
Or may be better to use more strict way to divide this buffer? Like just search in a loop for delimiter and then copy all the symbols before the delimiter to new buffer.
P.S. All this happens on STM32 MCU, so I have not really big computing resources. I am receiving this buffer via Ethernet and have to separate it and print via UART on LCD screen that supports only UTF-16BE. I have combined C/C++ project, so I can use either C or C++ approaches.
std::wstring_convert<std::codecvt<char16_t,char,std::mbstate_t>,char16_t>convert;
std::u16string u16 = convert.from_bytes(rx_data);
And here is many examples of splitting.

CRC4 INTERLAKEN and ITU lookup table generation

I try to geneate CRC4 lookup tables in C++ for the known CRC algorithms CRC-4/INTERLAKEN and CRC-4/ITU. The CRC definitions are as follows:
width=4 poly=0x3 init=0xf refin=false refout=false xorout=0xf check=0xb residue=0x2 name="CRC-4/INTERLAKEN"
width=4 poly=0x3 init=0x0 refin=true refout=true xorout=0x0 check=0x7 residue=0x0 name="CRC-4/G-704"
I've started adapting the code found from the answer here to generate lookup table to compare with tables found in the answer found here. If the bit order does not matter for this code, besides polynomial which is the same what else I need to consider? How different would the codes for INTERLAKEN and ITU algorithms be?
The code:
#include <iomanip>
#include <iostream>
void make_crc_table(unsigned long crcTable[])
{
unsigned long POLYNOMIAL = 0x3;
unsigned long remainder;
unsigned char b = 0;
do
{
remainder = b;
for (unsigned long bit = 8; bit > 0; --bit)
{
if (remainder & 1)
remainder = (remainder >> 1) ^ POLYNOMIAL;
else
remainder = (remainder >> 1);
}
crcTable[(size_t)b] = remainder;
} while (0 != ++b);
}
int main()
{
unsigned long crcTable[256];
make_crc_table(crcTable);
// Print the CRC table
for (size_t i = 0; i < 256; i++)
{
std::cout << "0x";
std::cout << std::setfill('0') << std::setw(2) << std::hex << crcTable[i];
if (i % 16 == 15)
std::cout << "," << std::endl;
else
std::cout << ", ";
}
return 0;
}
The output:
0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00, 0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02,
0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03, 0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01,
0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01, 0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03,
0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02, 0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00,
0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02, 0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00,
0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01, 0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03,
0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03, 0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01,
0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00, 0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02,
0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03, 0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01,
0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00, 0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02,
0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02, 0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00,
0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01, 0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03,
0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01, 0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03,
0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02, 0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00,
0x00, 0x02, 0x03, 0x01, 0x01, 0x03, 0x02, 0x00, 0x02, 0x00, 0x01, 0x03, 0x03, 0x01, 0x00, 0x02,
0x03, 0x01, 0x00, 0x02, 0x02, 0x00, 0x01, 0x03, 0x01, 0x03, 0x02, 0x00, 0x00, 0x02, 0x03, 0x01,
Original question ends here.
Update after rcgldr's answer:
#include <iostream>
#include <bitset>
#include <iomanip>
void make_crc_table(unsigned int crcTable[])
{
unsigned char POLYNOMIAL = 0xc;
unsigned char remainder;
unsigned char b = 0;
do
{
remainder = b;
for (int bit = 8; bit > 0; --bit)
{
if (remainder & 0x80)
remainder = (remainder << 1) ^ POLYNOMIAL;
else
remainder = (remainder << 1);
}
crcTable[(size_t)b] = remainder;
} while (0 != ++b);
}
int main()
{
unsigned int crcTable[256];
make_crc_table(crcTable);
for (size_t i = 0; i < 256; i++)
{
std::cout << "0x";
std::cout << std::setfill('0') << std::setw(2) << std::hex << (crcTable[i]);
if (i % 16 == 15)
std::cout << "," << std::endl;
else
std::cout << ", ";
}
return 0;
}
Output of the code:
0x00, 0x0c, 0x18, 0x14, 0x30, 0x3c, 0x28, 0x24, 0x60, 0x6c, 0x78, 0x74, 0x50, 0x5c, 0x48, 0x44,
0xc0, 0xcc, 0xd8, 0xd4, 0xf0, 0xfc, 0xe8, 0xe4, 0xa0, 0xac, 0xb8, 0xb4, 0x90, 0x9c, 0x88, 0x84,
0x8c, 0x80, 0x94, 0x98, 0xbc, 0xb0, 0xa4, 0xa8, 0xec, 0xe0, 0xf4, 0xf8, 0xdc, 0xd0, 0xc4, 0xc8,
0x4c, 0x40, 0x54, 0x58, 0x7c, 0x70, 0x64, 0x68, 0x2c, 0x20, 0x34, 0x38, 0x1c, 0x10, 0x04, 0x08,
0x14, 0x18, 0x0c, 0x00, 0x24, 0x28, 0x3c, 0x30, 0x74, 0x78, 0x6c, 0x60, 0x44, 0x48, 0x5c, 0x50,
0xd4, 0xd8, 0xcc, 0xc0, 0xe4, 0xe8, 0xfc, 0xf0, 0xb4, 0xb8, 0xac, 0xa0, 0x84, 0x88, 0x9c, 0x90,
0x98, 0x94, 0x80, 0x8c, 0xa8, 0xa4, 0xb0, 0xbc, 0xf8, 0xf4, 0xe0, 0xec, 0xc8, 0xc4, 0xd0, 0xdc,
0x58, 0x54, 0x40, 0x4c, 0x68, 0x64, 0x70, 0x7c, 0x38, 0x34, 0x20, 0x2c, 0x08, 0x04, 0x10, 0x1c,
0x28, 0x24, 0x30, 0x3c, 0x18, 0x14, 0x00, 0x0c, 0x48, 0x44, 0x50, 0x5c, 0x78, 0x74, 0x60, 0x6c,
0xe8, 0xe4, 0xf0, 0xfc, 0xd8, 0xd4, 0xc0, 0xcc, 0x88, 0x84, 0x90, 0x9c, 0xb8, 0xb4, 0xa0, 0xac,
0xa4, 0xa8, 0xbc, 0xb0, 0x94, 0x98, 0x8c, 0x80, 0xc4, 0xc8, 0xdc, 0xd0, 0xf4, 0xf8, 0xec, 0xe0,
0x64, 0x68, 0x7c, 0x70, 0x54, 0x58, 0x4c, 0x40, 0x04, 0x08, 0x1c, 0x10, 0x34, 0x38, 0x2c, 0x20,
0x3c, 0x30, 0x24, 0x28, 0x0c, 0x00, 0x14, 0x18, 0x5c, 0x50, 0x44, 0x48, 0x6c, 0x60, 0x74, 0x78,
0xfc, 0xf0, 0xe4, 0xe8, 0xcc, 0xc0, 0xd4, 0xd8, 0x9c, 0x90, 0x84, 0x88, 0xac, 0xa0, 0xb4, 0xb8,
0xb0, 0xbc, 0xa8, 0xa4, 0x80, 0x8c, 0x98, 0x94, 0xd0, 0xdc, 0xc8, 0xc4, 0xe0, 0xec, 0xf8, 0xf4,
0x70, 0x7c, 0x68, 0x64, 0x40, 0x4c, 0x58, 0x54, 0x10, 0x1c, 0x08, 0x04, 0x20, 0x2c, 0x38, 0x34,
The bit order matters. The question's code is using reflected input and output. That means the polynomial should be bit reversed from 0x03 to 0x0c. To confirm this, table entry [0x80] should be 0x0c.
For the Interlaken table, the code should look like:
void make_crc_table(unsigned char crcTable[])
{
unsigned char POLYNOMIAL = 0x30;
unsigned char remainder;
unsigned char b = 0;
do
{
remainder = b;
for (int bit = 8; bit > 0; --bit)
{
if (remainder & 0x80)
remainder = (remainder << 1) ^ POLYNOMIAL;
else
remainder = (remainder << 1);
}
crcTable[(size_t)b] = remainder;
} while (0 != ++b);
}
Note that the CRC will be in the upper 4 bits of a byte. When done, the code will need to return (crc>>4)^0x0f.

Problem at reading and storing binary GPS data on SD Card

As Hardware, I'm using an Arduino Due and a GPS Receiver from u-Blox (https://www.sparkfun.com/products/15005) for my project. I basically want to retrieve a certain message (UBX-RXM-RAWX) via UART. As I will parse this message in post-processing, it would be fine to just read all binary data and store it directly onto an SD card. Unfortunately, not being very experienced in C++, I'm having troubles storing binary data into any file.
I see I'm missing some general knowledge there, and so I wanted to ask if you could help me out? My code is attached as well or can be found on github: https://github.com/dariopa/GPS-Logging-Station/blob/master/GPS%20Station/_UBX_GPS_StoreBinaryMessage_RAWX_DUE/_UBX_GPS_StoreBinaryMessage_RAWX_DUE.ino
Thanks for any help!
// RETRIEVE RAWX MESSAGE FOR RINEX GENERATION.
// Microcontroller: Arduino DUE
// GPS Receiver: NEO-M8P-2 (https://www.sparkfun.com/products/15005)
#include <SD.h>
File binaryFile;
const int CS = 10; // ChipSelect
const char UBLOX_INIT[] PROGMEM = {
// Disable NMEA
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x24, // GxGGA off
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x2B, // GxGLL off
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x32, // GxGSA off
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x39, // GxGSV off
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x40, // GxRMC off
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0xF0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x05, 0x47, // GxVTG off
// Disable UBX
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0x02, 0x15, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x46, // RXM-RAWX off
// Enable UBX
0xB5, 0x62, 0x06, 0x01, 0x08, 0x00, 0x02, 0x15, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x27, 0x4B, // RXM-RAWX on
// Rate
0xB5, 0x62, 0x06, 0x08, 0x06, 0x00, 0xE8, 0x03, 0x01, 0x00, 0x01, 0x00, 0x01, 0x39, //(1Hz)
// 0xB5, 0x62, 0x06, 0x08, 0x06, 0x00, 0xD0, 0x07, 0x01, 0x00, 0x01, 0x00, 0xED, 0xBD, // (0.5Hz)
// 0xB5, 0x62, 0x06, 0x08, 0x06, 0x00, 0xB8, 0x0B, 0x01, 0x00, 0x01, 0x00, 0xD9, 0x41, // (0.33Hz)
};
void setup() {
Serial.begin(9600);
Serial1.begin(9600);
delay(3000);
// send configuration data in UBX protocol
for (int i = 0; i < sizeof(UBLOX_INIT); i++) {
Serial1.write( pgm_read_byte(UBLOX_INIT + i) );
Serial.write( pgm_read_byte(UBLOX_INIT + i) );
delay(10); // simulating a 38400baud pace (or less), otherwise commands are not accepted by the device.
}
// SD CARD
// Initialize SD Card
pinMode(CS, OUTPUT);
if (!SD.begin(CS)) {
Serial.println("Initialization of SD card failed - Freeze!");
while (1) {}
}
else {
Serial.println("Initialization done.");
}
}
void loop() {
if (Serial1.available()) {
// read from port serial, send to port Serial:
char Coord[300] = {Serial1.read()};
Serial.write(Coord);
binaryFile = SD.open("Data.bin", FILE_WRITE);
if (binaryFile) {
binaryFile.println(Coord);
}
}
}
char Coord[300] = {Serial1.read()};
This initialized Coord[0] to the return of Serial1.read(). The rest of 299 member of Coord array are initialized with zero.
If Serial1.read() returns an integer int. If it's -1 than read was not succcessfull. Otherwise it's one valid character.
You want to read one character at a time and store it:
void setup() {
...
// is there a point in opening the binaryFile each loop!?
binaryFile = SD.open("Data.bin", FILE_WRITE);
if (!binaryFile) {
// handle errror
assert(0);
}
}
void loop() {
if (Serial1.available()) {
int ci = Serial1.read();
if (ci == -1) {
// handle errpr
return;
}
char c = ci;
Serial.write(c);
binaryFile.write(c);
}
}

How to decrypt AES cipherText with NCryptDecrypt on Windows

I'm trying to use Ncrypt.lib to encrypt plain text with AES and then decrypt it.
I use Ncrypt.lib because I want to use a persistent symetric key.
My problem is that the decryption works partially. Indeed, I don't have my first 16 bytes decrypted correctly.
#include <stdio.h>
#include <tchar.h>
#include <Windows.h>
#include <ncrypt.h>
#include <bcrypt.h>
void PrintBytes(
IN BYTE *pbPrintData,
IN DWORD cbDataLen) {
DWORD dwCount = 0;
for (dwCount = 0; dwCount < cbDataLen; dwCount++) {
printf("0x%02x, ", pbPrintData[dwCount]);
if (0 == (dwCount + 1) % 10) putchar('\n');
}
}
int main() {
BYTE plaintext[] =
{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F,
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F
};
static const int plainTextLen = 48;
printf("PlainText:\n");
PrintBytes(plaintext, plainTextLen);
printf("\n");
LPCWSTR keyName = L"NCryptTest";
SECURITY_STATUS status;
NCRYPT_PROV_HANDLE hProvider;
NCRYPT_KEY_HANDLE hKey;
// Open storage provider
status = NCryptOpenStorageProvider(&hProvider, NULL, 0);
// Get stored key
status = NCryptOpenKey(hProvider, &hKey, keyName, 0, 0);
if (status == NTE_BAD_KEYSET) {
// Create key if it doesn't exist
status = NCryptCreatePersistedKey(hProvider, &hKey, BCRYPT_AES_ALGORITHM, keyName, 0, 0);
status = NCryptFinalizeKey(hKey, 0);
}
// Set the chaining mode to cipher feedback
LPCWSTR chainMode = BCRYPT_CHAIN_MODE_CFB;
status = NCryptSetProperty(hKey, NCRYPT_CHAINING_MODE_PROPERTY,
(PBYTE)chainMode, wcslen(chainMode) * 2 + 2, 0);
// Random iv but here, it's fixed
//char* iv = "0123456789abcdef";
//status = NCryptSetProperty(hKey, BCRYPT_INITIALIZATION_VECTOR,
//(PBYTE)iv, 16, 0);
// Get size of the cipher text
DWORD cbCipherText = 0;
status = NCryptEncrypt(hKey, plaintext, plainTextLen, NULL, NULL, 0,
&cbCipherText, 0);
PBYTE pbCipherText = NULL;
pbCipherText = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbCipherText);
if (pbCipherText == NULL) {
printf("Error! memory allocation failed\n");
}
// Encrypt
DWORD outlen = -1;
status = NCryptEncrypt(hKey, plaintext, plainTextLen, NULL, pbCipherText,
cbCipherText, &outlen, 0);
printf("CipherText:\n");
PrintBytes(pbCipherText, cbCipherText);
printf("\n");
// Get size of the plain text
DWORD cbPlainText = 0;
status = NCryptDecrypt(hKey, pbCipherText, cbCipherText, NULL, NULL, 0,
&cbPlainText, 0);
PBYTE pbPlainText = NULL;
pbPlainText = (PBYTE)HeapAlloc(GetProcessHeap(), 0, cbPlainText);
if (pbPlainText == NULL) {
printf("Error! memory allocation failed\n");
}
// Decrypt
outlen = -1;
status = NCryptDecrypt(hKey, pbCipherText, cbCipherText, NULL,
pbPlainText, cbPlainText, &outlen, 0);
printf("PlainText:\n");
PrintBytes(pbPlainText, cbPlainText);
printf("\n");
// Cleanup
NCryptFreeObject(hKey);
NCryptFreeObject(hProvider);
getchar();
return 0;
}
And the result is :
PlainText:
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00, 0x01, 0x02, 0x03,
0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d,
0x0e, 0x0f, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
CipherText:
0xc5, 0xdc, 0x7e, 0xde, 0x83, 0x35, 0xbc, 0x34, 0x27, 0x4b,
0xf9, 0xde, 0x40, 0x36, 0xeb, 0x6d, 0xaf, 0x51, 0x8c, 0x48,
0x69, 0xa0, 0x16, 0xfb, 0x6d, 0x80, 0x44, 0xea, 0x5c, 0x74,
0x27, 0x38, 0xf1, 0x20, 0xa3, 0x87, 0x65, 0xc3, 0xcf, 0x62,
0x94, 0x84, 0xc9, 0xcd, 0x55, 0x4c, 0x7b, 0x48,
PlainText:
0x1d, 0x52, 0x88, 0x1b, 0x0c, 0x01, 0x13, 0xed, 0xe0, 0x39,
0x1e, 0x96, 0x67, 0x39, 0x72, 0x38, 0x00, 0x01, 0x02, 0x03,
0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d,
0x0e, 0x0f, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
I suspect a initialisation vector problem but I don't know how to use it, simply with BCRYPT_INITIALIZATION_VECTOR ? or I must place the random iv in front of the plain text ?
Thanks for your help.
Answer in comment:
this is because every success call NCryptEncrypt or NCryptDecrypt change state of the hKey. so you can not use the same key. after you encrypt - you need again obtaining key for decrypt – RbMm
Thank you #RbMm !