I am trying to encrypt and decrypt a buffer using openssl.
I took some example code from here (see the 2nd do_crypt function towards the end of the page) and modified it to take the message to encrypt/decrypt from a vector instead of an input file.
Unfortunately, during the decryption stage, the EVP_CipherFinal_ex funcition fails, and I don't know why.
I have tried to make a compilable example from my code (but i haven't been able to test it), that will show you what i've done so far :
#include <iostream>
#include <vector>
#include <openssl/evp.h>
using namespace std;
typedef vector<uint8_t> Vect;
Vect _cipherMessage(const Vect& ai_in, const Vect& ai_key, const Vect& ai_iv, int ai_encode_decode)
{
cout << ((ai_encode_decode==1)?"ENCODE":"DECODE") << endl;
Vect w_ret;
uint8_t outbuf[1024 + EVP_MAX_BLOCK_LENGTH];
int outlen;
EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new();
EVP_CipherInit_ex(ctx, EVP_aes_128_cbc(), NULL, ai_key.data(), ai_iv.data(), ai_encode_decode);
if (!EVP_CipherUpdate(ctx, outbuf, &outlen, ai_in.data(), ai_in.size()))
{
// Error
cout << "EVP_CipherUpdate" << endl;
EVP_CIPHER_CTX_free(ctx);
return w_ret;
}
if (!EVP_CipherFinal_ex(ctx, outbuf + outlen, &outlen))
{
// Error
cout << "EVP_CipherFinal_ex" << endl;
EVP_CIPHER_CTX_free(ctx);
return w_ret;
}
EVP_CIPHER_CTX_free(ctx);
for (uint32_t i=0 ; i<outlen ; i++)
{
w_ret.push_back(outbuf[i]);
}
return w_ret;
}
int main()
{
Vect w_inputFrame { 0x53, 0x69, 0x78, 0x74, 0x65, 0x65, 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x6f, 0x6e, 0x67 }; // "SixteenBytesLong" in hex
Vect w_key { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f };
Vect w_iv { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f };
Vect w_encryptedFrame = _cipherMessage(w_inputFrame, w_key, w_iv, 1);
Vect w_decryptedFrame = _cipherMessage(w_encryptedFrame, w_key, w_iv, 0);
cout << "Encryption / Decryption : " << ((w_inputFrame == w_decryptedFrame)?"SUCCESS":"FAILURE") << endl;
return 0;
}
Please could someone tell me what I am doing wrong ?
NOTE :
I have tested the openssl command on my device and was able to successfully encrypt / decrypt the message :
echo -n SixteenBytesLong | openssl enc -e -aes-128-cbc -K 0102030405060708090a0b0c0d0e0f -iv 0102030405060708090a0b0c0d0e0f -nosalt -nopad -base64 -p
key=0102030405060708090A0B0C0D0E0F00
iv =0102030405060708090A0B0C0D0E0F00
Ow74k7Pm+g8KQNiKZQptgw==
echo Ow74k7Pm+g8KQNiKZQptgw== | openssl enc -d -aes-128-cbc -K 0102030405060708090a0b0c0d0e0f -iv 0102030405060708090a0b0c0d0e0f -nosalt -nopad -base64 -p
key=0102030405060708090A0B0C0D0E0F00
iv =0102030405060708090A0B0C0D0E0F00
SixteenBytesLong
The main problem is that outlen is being overwritten the call to EVP_CipherFinal_ex. So your total outlen is incorrect when you get to the end of the funciton.
You could do something like:
Vect _cipherMessage(const Vect& ai_in, const Vect& ai_key, const Vect& ai_iv, int ai_encode_decode)
{
cout << ((ai_encode_decode==1)?"ENCODE":"DECODE") << endl;
Vect w_ret;
uint8_t outbuf[1024 + EVP_MAX_BLOCK_LENGTH];
int outlen;
int totalOutlen;
EVP_CIPHER_CTX *ctx = EVP_CIPHER_CTX_new();
EVP_CipherInit_ex(ctx, EVP_aes_128_cbc(), NULL, ai_key.data(), ai_iv.data(), ai_encode_decode);
if (!EVP_CipherUpdate(ctx, outbuf, &totalOutlen, ai_in.data(), (int)ai_in.size()))
{
// Error
cout << "EVP_CipherUpdate: " << ERR_error_string(ERR_get_error(), nullptr) << endl;
EVP_CIPHER_CTX_free(ctx);
return w_ret;
}
if (!EVP_CipherFinal_ex(ctx, outbuf + totalOutlen, &outlen))
{
// Error
cout << "EVP_CipherFinal_ex : " << ERR_error_string(ERR_get_error(), nullptr) << endl;
EVP_CIPHER_CTX_free(ctx);
return w_ret;
}
totalOutlen += outlen;
EVP_CIPHER_CTX_free(ctx);
for (int i=0 ; i<totalOutlen ; i++)
{
w_ret.push_back(outbuf[i]);
}
return w_ret;
}
Another problem is that the if the ai_in is larger than (1024 + EVP_MAX_BLOCK_LENGTH) you are going overwrite and corrupt your stack.
You could do something like this:
namespace
{
template<typename T, typename D>
std::unique_ptr<T, D> make_handle(T* handle, D deleter)
{
return std::unique_ptr<T, D>{handle, deleter};
}
}
Vect _cipherMessage(const Vect& ai_in, const Vect& ai_key, const Vect& ai_iv, int ai_encode_decode)
{
cout << ((ai_encode_decode==1)?"ENCODE":"DECODE") << endl;
Vect w_ret;
int outlen;
int totalOutlen;
w_ret.resize(ai_in.size() + EVP_MAX_BLOCK_LENGTH);
auto ctx = make_handle(EVP_CIPHER_CTX_new(), EVP_CIPHER_CTX_free);
if(!ctx)
{
// Error
cout << "EVP_CIPHER_CTX_new: " << ERR_error_string(ERR_get_error(), nullptr) << endl;
return w_ret;
}
if(!EVP_CipherInit_ex(ctx.get(), EVP_aes_128_cbc(), nullptr, ai_key.data(), ai_iv.data(), ai_encode_decode))
{
// Error
cout << "EVP_CipherInit_ex: " << ERR_error_string(ERR_get_error(), nullptr) << endl;
return w_ret;
}
if (!EVP_CipherUpdate(ctx.get(), w_ret.data(), &totalOutlen, ai_in.data(), (int)ai_in.size()))
{
// Error
cout << "EVP_CipherUpdate: " << ERR_error_string(ERR_get_error(), nullptr) << endl;
return w_ret;
}
if (!EVP_CipherFinal_ex(ctx.get(), w_ret.data() + totalOutlen, &outlen))
{
// Error
cout << "EVP_CipherFinal_ex : " << ERR_error_string(ERR_get_error(), nullptr) << endl;
return w_ret;
}
totalOutlen += outlen;
w_ret.resize(totalOutlen);
return w_ret;
}
Related
I wrote code to work with the MPU9250 (gyroscope and accelerometer), but I can’t read the data from another AK8963 device. There is a magnetometer on it, I get static data. In i2cdetect, I only have 0x68, i.e. the MPU9250. AK8963 at address 0x0C, but it is not there. Should it even be visible in i2cdetect?
I read the manual and the map register, it was said there that you need to enable Fuse ROM, since it will not work without it. I wrote the necessary data to the register, but the data is constantly static. All examples on the Internet with SPI or Arduino. How do I initialize AK8963 and read data? What file descriptor should be opened? 0x68 or 0x0C?
#define MPU_ADDRESS 0x68 //MPU9250 adress
#define I2C_FILE "/dev/i2c-1" //Locale i2c file
#define MPU_POWER1 0x6b
#define MPU_POWER2 0x6c
int main(){
int16_t gx, gy, gz;
int16_t ax, ay, az;
int16_t mx, my, mz;
//MPU9250
fd = open(I2C_FILE, O_RDONLY | O_WRONLY);
ioctl(fd, I2C_SLAVE, MPU_ADDRESS);
//AK8963
fd_ak8963 = open(I2C_FILE, O_RDONLY | O_WRONLY);
ioctl(fd_ak8963, I2C_SLAVE, 0x0C);
HxMagnetometer *magn = new HxMagnetometer(fd);
while(1){
magn->magn_get_raw(&mx, &my, &mz);
printf("mx = %d my = %d mz = %d\n", mx, my, mz);
printf("%s\n", _buffer);
//printf("GYRO: x = %d y = %d z = %d\n", gyro->gyro_get_x_raw(), gyro->gyro_get_y_raw(), gyro->gyro_get_z_raw());
delay(100);
}
return 0;
}
Class MGT
#include "HxMagnetometer.hpp"
HxMagnetometer::HxMagnetometer(int32_t id){
device_id = id;
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, 0x00);
std::cout << "Power down magnetometer\n";
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, 0x0F);
std::cout << "Enter Fuse ROM access mode\n";
delay(10);
std::cout << "X: " << i2c_smbus_read_byte_data(device_id, AK8963_ASAX) << std::endl;
std::cout << "Y: " << i2c_smbus_read_byte_data(device_id, AK8963_ASAY) << std::endl;
std::cout << "Z: " << i2c_smbus_read_byte_data(device_id, AK8963_ASAZ) << std::endl;
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, MFS_16BITS << 4 | 0x02);
std::cout << "Set magnetometer data resolution and sample ODR:" << std::endl;
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, 0x00);
std::cout << "Power down magnetometer\n";
}
void HxMagnetometer::magn_get_raw(int16_t* _x_magn_raw, int16_t* _y_magn_raw, int16_t* _z_magn_raw){
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, 0x0F);
if (i2c_smbus_read_byte_data(device_id, AK8963_ST1) & 0x01)
{
std::cout << "Read status: ok\n";
*_x_magn_raw = magn_get_x_raw();
//Get y raw: 0, 1, 0
*_y_magn_raw = magn_get_y_raw();
//Get x raw: 0, 0, 1
*_z_magn_raw = magn_get_z_raw();
}
i2c_smbus_write_byte_data(device_id, AK8963_CNTL, 0x00);
//Get x raw: 1, 0, 0
// i2c_smbus_read_byte_data(device_id, AK8963_ST2);
// i2c_smbus_write_byte_data(device_id, AK8963_CNTL1,AK8963_FUSE_ROM);
// i2c_smbus_write_byte_data(device_id, AK8963_CNTL1,AK8963_PWR_DOWN);
}
//Get x raw
int16_t HxMagnetometer::magn_get_x_raw(void){
return BITWISE_SHIFT(i2c_smbus_read_byte_data(device_id, 0x03), i2c_smbus_read_byte_data(device_id, 0x04));
}
//Get y raw
int16_t HxMagnetometer::magn_get_y_raw(void){
return BITWISE_SHIFT(i2c_smbus_read_byte_data(device_id, 0x05), i2c_smbus_read_byte_data(device_id, 0x06));
}
//Get z raw
int16_t HxMagnetometer::magn_get_z_raw(void){
return BITWISE_SHIFT(i2c_smbus_read_byte_data(device_id, 0x07), i2c_smbus_read_byte_data(device_id, 0x08));
}
I have successfully written a 32-bit Excel RTD server. This is loaded in Excel fine, and typing =RTD(...) in a cell works. However, my 64-bit build of the same code doesn't work, and I'd like to know of any way to debug this. I can't find any relevant documentation.
I have a 64-bit RTD server DLL. Dependency walker doesn't show any problems with missing DLL dependencies, and in fact if I link a toy executable I can call into this DLL fine.
The DLL is successfully registered with regsvr32.
Excel even sees it but lists it as "inactive". In the past, while developing the 32-bit version this usually happened due to missing DLL dependencies (but no error message). As stated earlier, I can link to the DLL and dependency walker doesn't show any problems.
What else can I do to debug this problem? Were it open source I'd look at the Excel source code and try to do what it's doing but obviously that's not an option here.
The same code produces a 32-bit DLL that 32-bit Excel runs fine. But 64-bit Excel can't seem to use the 64-bit DLL.
The problem was the registry entries for the DLL. To answer my own question (and after learning more about COM than I ever wanted to), the best way is to write a COM client application and try instantiating the COM server in several ways. For RTD, below is a sample client app I used. If anybody has similar problems, I suggest first trying to use CoCreateInstance, then seeing if you can get the CLSID from the ProgId, then creating the instance using the ProgId, since that's what Excel does. Replace the UUIDs accordingly and "VCRTDServer.RTDFunctions" with whatever your ProgId is. The code:
/**
Small test program to check that the COM DLL was properly
registered
*/
#include <objbase.h>
#ifndef RTD_ARCH
# define RTD_ARCH 32
#endif
//
//Here we do a #import on the DLL ,you can also do a #import on the .TLB
//The #import directive generates two files in the output folders.
//
#import "bin\\VCRTDServer.dll"
#include <iostream>
#include <stdexcept>
#include <string>
#include <tchar.h>
#include "IRTDServer_h.h"
using namespace std;
#define PROGID _T("VCRTDServer.RTDFunctions")
#define CLSID_STRING _T("{8D2EEA35-CBEB-49b1-8F3E-68C8F50F38D8}")
const CLSID CLSID_RTD = {0x8D2EEA35, 0xCBEB, 0x49B1,
{0x8F, 0x3E, 0x68, 0xC8, 0xF5, 0x0F, 0x38, 0xD8}};
const CLSID IID_RTD_UpdateEvent = {0xa43788c1, 0xd91b, 0x11d3,
0x8f, 0x39, 0x00, 0xc0, 0x4f, 0x36, 0x51, 0xb8};
const CLSID IID_RTD = {0xec0e6191, 0xdb41, 0x11d3,
0x8f, 0xe3, 0x00, 0xc0, 0x4f, 0x36, 0x51, 0xb8};
static string GetLastErrorAsString();
static void run();
int main() {
try {
run();
CoUninitialize();
return 0;
} catch(const exception& ex) {
cerr << "Error: " << ex.what() << endl;
CoUninitialize();
return 1;
}
}
void run() {
cout << "CoInitializing" << endl;
CoInitialize(nullptr);
// if CoCreateInstance doesn't work, nothing else will
// cout << "Checking CoCreateInstance" << endl;
// IRtdServer *obj;
// const auto hr = CoCreateInstance(CLSID_RTD,
// nullptr,
// CLSCTX_INPROC_SERVER,
// IID_RTD_UpdateEvent,
// (void**)&obj);
// if(hr != S_OK)
// throw runtime_error("CoCreateInstance failed: " + GetLastErrorAsString());
cout << "Converting prog id to clsid" << endl;
CLSID clsid;
const auto ptoc_res = CLSIDFromProgID(L"VCRTDServer.RTDFunctions", &clsid);
if(ptoc_res != S_OK)
throw runtime_error("CLSID error: " + GetLastErrorAsString());
cout << "Printing out class ID" << endl;
cout << "Class ID: " << *((int*)&clsid) << endl;
LPOLESTR progId;
const auto progIdResult = ProgIDFromCLSID(
CLSID_RTD, &progId);
if(progIdResult != S_OK)
throw runtime_error("Prog ID error: " + GetLastErrorAsString());
char buf[40];
WideCharToMultiByte(CP_ACP, NULL, progId, -1, buf, 40, NULL, NULL);
cout << "prog id is '" << buf << "'" << endl;
cout << "Creating instance" << endl;
RTDServerLib::IRtdServerPtr rtdServer;
if(rtdServer.CreateInstance(CLSID_RTD) != S_OK)
throw runtime_error("Could not create instance: " +
GetLastErrorAsString());
cout << "Starting RTD server" << endl;
const auto startResult = rtdServer->ServerStart(nullptr);
cout << "Start result was: " << startResult << endl;
}
//Returns the last Win32 error, in string format. Returns an empty string if there is no error.
std::string GetLastErrorAsString() {
//Get the error message, if any.
const auto errorMessageID = ::GetLastError();
if(errorMessageID == 0)
return {}; //No error message has been recorded
LPSTR messageBuffer = nullptr;
size_t size = FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
NULL, errorMessageID, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&messageBuffer, 0, nullptr);
std::string message(messageBuffer, size);
//Free the buffer.
LocalFree(messageBuffer);
return message;
}
Using the following code, I read an executable file and I create a file that will be used to store a variable, a simple C char array.
ifstream fIn( argv[1], ios::binary );
if( !fIn.is_open() )
throw (exception("Could not open file."));
ofstream fOut( argv[2] );
if( !fOut.is_open() )
throw (exception("Could not create file."));
// def of the C array
// name of the variable which will be write in the new file
fOut << "unsigned char " << varName << "[] =\n{"; // varName = "foobar";
stringstream ss;
string s;
static char c = 0;
// copy each byte in 's'
while(fIn.get(c)) s += c;
// generate the file
fOut << for_each(s.begin(), s.end(), hexify<char>(ss));
// end of def
fOut << "\n};\n";
The hexify<T> struct body and operator<< used to satisfy my formating needs
template<typename T> struct hexify
{
hexify(stringstream& out) : _ss(out), _n(0) {}
void operator() (T& x)
{
if( _n > 0 ) _ss << ", ";
if( (_n % 16) == 0 ) _ss << "\n\t";
_ss << "0x" << setbase(16) << setw(2) << setfill('0') << (unsigned int)x;
_n++;
}
stringstream& _ss;
int _n;
};
template<typename T>
ostream& operator<<( ostream& os, const hexify<T>& h ) { os << h._ss.str(); return os; }
-
For this situation let me say I read only the first 4 bytes of the file and save thoses bytes in a char array foobar[] :
4d5a 9000 0300 0000 0400 0000 ffff 0000
The result I expect for the foobar array definition (in the previoulsy created file) should be :
unsigned char foobar[] = { 0x4d, 0x5a, 0x90, 0x00 };
The result I obtain myself :
unsigned char foobar[] = { 0x4d, 0x5a, 0xffffff90, 0x00 };
Why the third byte is wrapped with 'f' ?
Another example with 112 bytes :
unsigned char foobar[] =
{
0x4d, 0x5a, 0xffffff90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xffffffff, 0xffffffff, 0x00, 0x00,
0xffffffb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfffffff0, 0x00, 0x00, 0x00,
0x0e, 0x1f, 0xffffffba, 0x0e, 0x00, 0xffffffb4, 0x09, 0xffffffcd, 0x21, 0xffffffb8, 0x01, 0x4c, 0xffffffcd, 0x21, 0x54, 0x68,
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20
};
A char is not the same data type as an unsigned int. I think your cast is sign-extending the upper bit of your char ( which is 1) and filling the remaining 24 bits of an unsigned int. This is kludgey, but try masking off the upper bits. I tried this after/before:
char y = 0x90;
cout << "0x" << hex << setw(2) << setfill('0') << (unsigned int)(y&0xFF) << endl;
cout << "0x" << hex << setw(2) << setfill('0') << (unsigned int)y << endl;
and got this output:
The other option is to properly change your data type to an unsigned char also. Your template call currently has it as a char, not unsigned char. That will also work w/o the masking.
I need a very straightforward piece of code that connects a serial port (where a home made device is connected), writes some data to it and then reads the reply. The written data turns ON a LED on the device (which makes it easy to see if data sent by the PC is actually received by the device)
I have it working with boost/asio and need to port it to QSerialPort. With QSerialPort, I'm able to connect, send data (and I know it's sent because the LED turns ON), but cannot receive any...
Here is the boost asio code working:
#include <iostream>
#include <boost/asio.hpp>
#include <boost/asio/serial_port_base.hpp>
#include <boost/thread.hpp>
static std::vector<char> sReceived;
void readThread( boost::asio::serial_port* port )
{
try
{
char read;
while ( boost::asio::read( *port, boost::asio::buffer(&read,1) ) == 1 )
sReceived.push_back( read );
}
catch (...)
{
// connection most likely closed
}
}
int main( int argc, char* argv[] )
{
int res = 1;
if ( argc != 2 )
{
std::cout << "Specify COM port name as first and unic parameter" << std::endl;
}
else
{
try
{
std::string portName = argv[1];
boost::asio::io_service ioservice;
boost::asio::serial_port port( ioservice );
port.open( portName );
port.set_option( boost::asio::serial_port_base::baud_rate( 921600 ) );
port.set_option( boost::asio::serial_port_base::parity( boost::asio::serial_port_base::parity::none ) );
port.set_option( boost::asio::serial_port_base::stop_bits( boost::asio::serial_port_base::stop_bits::one ) );
port.set_option( boost::asio::serial_port_base::character_size( 8 ) );
port.set_option(
boost::asio::serial_port_base::flow_control(
boost::asio::serial_port_base::flow_control::hardware ) );
if ( port.is_open() )
{
boost::thread thrd( readThread, &port );
static const size_t requestMessageSize = 10;
unsigned char request[requestMessageSize] = { 0x01, 0x00, 0x07, 0x01, 0x01, 0x00, 0x00, 0xBE, 0x0B, 0x00 };
if ( boost::asio::write( port,
boost::asio::buffer(request,requestMessageSize) ) == requestMessageSize )
{
boost::this_thread::sleep( boost::posix_time::milliseconds( 1000 ) );
if ( !sReceived.empty() )
{
std::cout << "Received:" << std::hex;
for ( size_t i = 0; i != sReceived.size(); ++i )
{
std::cout << " 0x" << static_cast<int>( sReceived[i] );
}
std::cout << std::endl;
std::cout << "Could open port, send and receive data" << std::endl;
res = 0;
}
else
{
std::cout << "Could open port, send data, but did not received any reply" << std::endl;
}
}
else
{
std::cout << "Could not send data" << std::endl;
}
port.close();
}
else
{
std::cout << "Could not open connection with " << portName << std::endl;
}
}
catch (...)
{
std::cout << "Exception raised by boost asio" << std::endl;
}
}
return res;
}
And here is the Qt code (not working):
#include <QApplication>
#include <QThread>
#include <QMessageBox>
#include <QtSerialPort/QSerialPort>
#include <sstream>
int main( int argc, char* argv[] )
{
QApplication app(argc, argv);
int res = 1;
if ( argc != 2 )
{
QMessageBox::critical( NULL, "Invalid argument", "Specify COM port name as first and unic parameter" );
}
else
{
QString portName = argv[1];
QSerialPort port( portName );
if ( port.open( QSerialPort::ReadWrite ) )
{
if ( port.setBaudRate( 921600 ) &&
port.setFlowControl( QSerialPort::HardwareControl ) &&
port.setStopBits( QSerialPort::OneStop ) &&
port.setParity( QSerialPort::NoParity ) &&
port.setDataBits( QSerialPort::Data8 ) )
{
static const size_t requestMessageSize = 10;
char request[requestMessageSize] = { 0x01, 0x00, 0x07, 0x01, 0x01, 0x00, 0x00, 0xBE, 0x0B, 0x00 };
if ( port.write( request, requestMessageSize ) == requestMessageSize )
{
QThread::sleep( 1 );
QByteArray reply = port.readAll();
if ( !reply.isEmpty() )
{
std::stringstream str;
str << "Received:" << std::hex;
for ( size_t i = 0; i != reply.size(); ++i )
{
str << " 0x" << static_cast<int>( reply.at(i) );
}
str << std::endl;
str << "Could open port, send and receive data" << std::endl;
QMessageBox::information( NULL, "OK", str.str().c_str() );
res = 0;
}
else
{
QMessageBox::critical( NULL, "Error", "Could open port, send data, but did not received any reply" );
}
}
else
{
QMessageBox::critical( NULL, "Error", "Unable to send request to port" );
}
}
else
{
QMessageBox::critical( NULL, "Error", "Unable to configure port" );
}
port.close();
}
else
{
QMessageBox::critical( NULL, "Unable to connect", QString("Could not open connection with %1").arg( portName ) );
}
}
return res;
}
When I run both codes with the good port name passed as parameter:
boost's one lights the LED ON and reports Received: 0x.....Could open port, send and receive data
Qt's one lights the LED ON, but reports Could open port, send data, but did not received any reply
What's wrong with my QSerialPort code? Why is it able to send data but fails to receive any?
port.readAll() doesn't block until something is read, in fact no data will ever be sent or read available until you return to the event loop or do a waitForReadyRead to receive data and waitForBytesWritten to write out the buffer.
so replace the Thread::sleep(1) with port.waitForBytesWritten(1000);port.waitForReadyRead(1000);
I'm writing a program on Linux that has to interface with a existing windows program. I cannot modify the way the windows program works, but I must integrate with the existing data. This program will receive raw data structures over a TCP network socket. Unfortunately the windows program embeds raw multibyte character strings in the data structures and does not indicate which codepage is in use. This works OK for english, but fails miserably with non-latin based languages (ie: japanese). At best, I can guess at the code page windows is using. If I'm running and my locale is set to "ja" or "ja_JP" I'll have to assume the windows machine is using the "SHIFT-JS" codepage... Ugly but that's life.
QUESTION:
Assuming I've guessed correctly at the codepage, how can I convert these raw MBCS character strings to UTF-8 strings?
Here is a sample of the raw data:
The string being sent is: 私のクラスへようこそ
The MBCS data received from windows (JP) is (in bytes, extra "0x00" added to ensure null termination) :
char kanji_win_raw_bytes[] = { 0x8E, 0x84, 0x82, 0xCC, 0x83, 0x4E, 0x83, 0x89, 0x83, 0x58, 0x82, 0xD6, 0x82, 0xE6, 0x82, 0xA4, 0x82, 0xB1, 0x82, 0xBB, 0x00, 0x00, 0x00 };
As nearly as I can tell, the string is coming from a windows machine using the SHIFT-JS codepage. I've tried mbsrtowcs():
const char *ptr = (char*)m_data;
// m_data contains the byte array of MBCS data
if ( m_data != NULL )
{
std::mbstate_t state = std::mbstate_t();
size_t bufflen = std::mbsrtowcs(NULL, &ptr, 0, &state);
if ( bufflen == (size_t)-1 )
{
std::cout << "ERROR! mbsrtowcs() " << strerror(errno) << std::endl;
std::cout << "Error at: " << (int32_t)( (char*)ptr - (char*)m_data ) << std::endl;
return;
}
std::vector<wchar_t> wstr(bufflen);
std::cout << "converting " << bufflen << " characters" << std::endl;
std::mbsrtowcs(&wstr[0], &ptr, wstr.size(), &state);
std::wcout << "Wide string: " << &wstr[0] << std::endl
<< "The length, including '\\0': " << wstr.size() << std::endl;
}
The call to mbsrtowcs() fails at position "0" with no characters converted.
I then tried the iconv libraries using the SHIFT-JS codepage:
bytes_converted = 0;
char input[4096] = {0};
char dst[4096] = {0};
char* src = input;
size_t dstlen = sizeof(dst);
size_t srclen = 0;
iconv_t conv = iconv_open("UTF-8", "SHIFT-JIS" );
// make a copy
memcpy( (void*)input, (void*)kanji_win_raw_bytes, sizeof(kanji_win_raw_bytes) );
srclen = sizeof(kanji_win_raw_bytes);
if ( conv != (iconv_t)-1 )
{
bytes_converted = iconv( conv, NULL, NULL, (char**)&dst, &dstlen );
if ( bytes_converted == (size_t) -1 )
{
std::cerr << "ERROR: initializing output buffer: (" << errno << ") " << strerror(errno) << std::endl;
}
bytes_converted = iconv(conv, (char**)&src, &srclen, (char**)&dst, &dstlen);
if ( bytes_converted == (size_t) - 1)
{
std::cerr << "ERROR in conversion: (" << errno << ") " << strerror(errno) << std::endl;
if ( errno == EINVAL )
{
std::cerr << "RESULT: iconv() converted " << bytes_converted << " bytes: [" << dst << "]" << std::endl;
}
}
else
{
std::cerr << "SUCCESS: iconv() converted " << bytes_converted << " bytes: [" << dst << "]" << std::endl;
}
iconv_close(conv);
}
else
{
std::cerr << "ERROR: iconv_open() failed: " << strerror(errno) << std::endl;
}
Iconv segfaults (coredumps) using the given (Japanese) string. Having only used iconv a few times, I believe the code snippits (copied from online samples) are correct and seem to work ok with latin based languages using a similar setup but different (ie: German / French) mbcs strings coming from the windows server.
The codecvt functions std::wstring_convert does not yet seem to be implemented in linux even when compiling with -std=c++11 so that doesn't appear to be an option.
Thanks in advance for any help you can provide.
-- Edit --
With the help of "myk", I created a sample application that better shows my problem. With his suggestions, I was able to get around the segmentation fault, however the windows MBCS string fails to convert regardless of the locale I choose.
/**
* MBCS test
*/
#include <stdlib.h>
#include <unistd.h>
#include <stdint.h>
#include <stdio.h>
#include <sys/types.h>
#include <string.h>
#include <errno.h>
#include <clocale>
#include <string>
#include <iostream>
// 私のクラスへようこそ (welcome to my class)
const char* kanji_string = "私のクラスへようこそ";
// This is what raw UTF-8 should look like
uint8_t kanji_utf8_raw_bytes[] = { 0xE7, 0xA7, 0x81, 0xE3, 0x81, 0xAE, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xA9, 0xE3, 0x82, 0xB9, 0xE3, 0x81, 0xB8, 0xE3, 0x82, 0x88, 0xE3, 0x81, 0x86, 0xE3, 0x81, 0x93, 0xE3, 0x81, 0x9D };
// This is Windows MBCS using the SHIFT-JS code page
uint8_t kanji_win_raw_bytes[] = { 0x8E, 0x84, 0x82, 0xCC, 0x83, 0x4E, 0x83, 0x89, 0x83, 0x58, 0x82, 0xD6, 0x82, 0xE6, 0x82, 0xA4, 0x82, 0xB1, 0x82, 0xBB, 0x00, 0x00, 0x00 };
int main( int argc, char **argv )
{
std::setlocale(LC_ALL, "en_US.utf8");
std::cout << "KANJI String [" << kanji_string << "]" << std::endl;
std::cout << "KANJI UTF-8 Raw [" << kanji_utf8_raw_bytes << "]" << std::endl;
const char *data = (char*)kanji_win_raw_bytes;
std::mbstate_t state = std::mbstate_t();
size_t result = 0;
wchar_t* buffer = (wchar_t*)malloc( sizeof(wchar_t) * (strlen((char*)data) + 1) );
if ( buffer )
{
result = std::mbsrtowcs(buffer, &data, strlen(data), &state);
if ( result == (size_t)-1 )
{
std::cout << "ERROR! mbsrtowcs() " << strerror(errno) << std::endl;
std::cout << "Error at: " << (int32_t)( (char*)data - (char*)kanji_win_raw_bytes ) << std::endl;
}
else
{
std::wcout << "Wide string: [" << buffer << "] " << std::endl;
}
free( buffer );
}
return 0;
}
Note: this can be compiled and run on Linux/Mac with the following command:
g++ mbcs_test.cpp -o mbcs_test && ./mbcs_test
For mbsrtowcs(), a couple of things:
1) The call:
size_t bufflen = std::mbsrtowcs(NULL, &ptr, 0, &state);
should be something like:
size_t bufflen = std::mbsrtowcs(buffer, &ptr, strlen(m_data), &state);
assuming that you have declared 'buffer' with something like:
wchar_t* buffer = (wchar_t*) malloc(sizeof(wchar_t) * (strlen(m_data) + 1));
The third parameter in mbsrtowcs(), which you set to zero, is the length of the result buffer, which is presumably why 0 characters are getting converted.
2) My experience is you need to have used setlocale() for mbsrtowcs() to work. I can't see from the code snippet, but suggest you include something like:
#include <clocale>
:
std::setlocale(LC_ALL, "en_US.utf8");