I have a C++ array or structure initialization issue that I have not been able to resolve.
I have a 4-level nested structure. Each level is actually the same 48 bytes wrapped in the structure next level up. The issue is when the structure is initialized and declared as a scalar value, it is correctly initialized with the provided values. However, when it is declared as a single element array, all 48 bytes become zeros, as shown below. Unfortunately the structures are too complicated to be pasted here.
If I define 4 simple structures, one containing another, with the innermost one containing the same 12 unsigned integers, then it is initialized correctly, even if it is declared in an array.
Has anyone experienced similar issues? What am I missing? What compiler flags, options, etc could lead to such a problem? Appreciate any comments and help.
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "bls12_381/fq.hpp"
static constexpr embedded_pairing::bls12_381::Fq scalar = {
{{{.std_words = {0x1c7238e5, 0xcf1c38e3, 0x786f0c70, 0x1616ec6e, 0x3a6691ae, 0x21537e29,
0x4d9e82ef, 0xa628f1cb, 0x2e5a7ddf, 0xa68a205b, 0x47085aba, 0xcd91de45}}}}
};
static constexpr embedded_pairing::bls12_381::Fq array[1] = {
{{{{.std_words = {0x1c7238e5, 0xcf1c38e3, 0x786f0c70, 0x1616ec6e, 0x3a6691ae, 0x21537e29,
0x4d9e82ef, 0xa628f1cb, 0x2e5a7ddf, 0xa68a205b, 0x47085aba, 0xcd91de45}}}}}
};
void print_struct(const char *title, const uint8_t *cbuf, int len)
{
printf("\n");
printf("[%s] %d\n", title, len);
for (int i = 0; i < len; i++) {
if (i % 30 == 0 && i != 0)
printf("\n");
else if ((i % 10 == 0 || i % 20 == 0) && i != 0)
printf(" ");
printf("%02X ", cbuf[i]);
}
printf("\n");
}
void run_tests()
{
print_struct("scalar", (const uint8_t *) &scalar, sizeof(scalar));
print_struct("array", (const uint8_t *) &array[0], sizeof(array[0]));
}
[scalar] 48
E5 38 72 1C E3 38 1C CF 70 0C 6F 78 6E EC 16 16 AE 91 66 3A 29 7E 53 21 EF 82 9E 4D CB F1
28 A6 DF 7D 5A 2E 5B 20 8A A6 BA 5A 08 47 45 DE 91 CD
[array] 48
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
I've just narrowed down the example.
The following is a complete and standalone example. I also forgot to mention that the initialization on Linux using g++ 9.3.0, -std=c++17, gets the expected results of all FF's. However, on an embedded device, the inherited structure gets all 0's.
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
struct Data {
uint32_t words;
};
struct Overlay {
Data val;
};
struct Inherit : Data {
};
static Overlay overlay[1] = {
{{.words = 0xffffffff}}
};
static Inherit inherit[1] = {
{{.words = 0xffffffff}}
};
void print_struct(const char *title, const uint8_t *cbuf, int len)
{
printf("[%s] %d\n", title, len);
for (int i = 0; i < len; i++) {
printf("%02X ", cbuf[i]);
}
printf("\n");
}
int main()
{
print_struct("overlay", (const uint8_t *) &overlay[0], sizeof(overlay[0])); // FF FF FF FF
print_struct("inherit", (const uint8_t *) &inherit[0], sizeof(inherit[0])); // 00 00 00 00 <-- incorrect?
return 0;
}
Related
This is very strange! I look around and find nothing. My test code is below:
#include "pch.h"
#include "Windows.h"
#include "openssl/ssl.h"
#pragma comment(lib,"../Include/lib/libssl.lib")
#pragma comment(lib,"../Include/lib/libcrypto.lib")
#pragma comment(lib,"Crypt32.lib")
#pragma comment(lib,"ws2_32.lib")
#include <stdlib.h>
#include <crtdbg.h>
#define _CRTDBG_MAP_ALLOC
const char* priKey = "607BC8BA457EC0A1B5ABAD88061502BEA5844E17C7DD247345CD57E501B3300B4B8889D3DFCF5017847606508DF8B283C701F35007D5F4DBA96023DD3B0CCE062D8F63BCC16021B944A1E88861B70D456BAA1E0E69C90DFCA13B4D4EA5403DA25FEBF94B0515644A7D2DF88299189688DA5D8951512ADC3B1A35BAEEC147F69AB101048B9029E65A77A438A05AE30337E82B0571B80A955C72EA0DB3B457ECC8B81F346624E3D22755FEB3D84F810431974803E13E26BF95AF7AB590E17C0113BFE9B36BE12BE16D89273348E0CC094526CAF54ABF8044565EC9500EBF657265474BC362EBDFD78F513282AAF0EEF9BA0BB00F7FF9C7E61F00465BBD379D6201";
const char* pubKey = "AE7DF3EB95DF1F864F86DA9952BB44E760152986731253C96C135E5391AEFF68F5C1640552F1CCC2BA2C12C0C68C051E343B786F13215CEFC8221D9FA97D50E895EAF50D1AF32DC5EB40C9F1F8CA5145B43CEF83F2A89C9661AFA73A70D32951271C6BEFE1B5F24B512520DA7FD4EEC982F2D8057FE1938FA2FB54D8D282A25D8397298B75E154739EF16B8E2F18368F5BEEAD3D18528B9B1A63C731A71735CDB6102E187EF3377B72B58A124FA280891A79A2BD789D5DBA3618BBD74367F5C50A220204D90A59828C3C81FDBD9D2A91CBF6C8563C6FE987BE21B19BBC340DE4D42290D63909AD5D856D13B8CDC91D5701570045CE3609D4F8884F69120AD9A3";
void rsa_test(const char* n,const char* d)
{
RSA* rsa = RSA_new();
BIGNUM* bn = BN_new();
BIGNUM* bd = BN_new();
BIGNUM* be = BN_new();
BN_set_word(be, 0x10001);
if (n) BN_hex2bn(&bn, n);
if (d) BN_hex2bn(&bd, d);
RSA_set0_key(rsa, bn, be, bd);
//calc hash
const char* msg = "hello,rsa!!";
BYTE shaResult[SHA256_DIGEST_LENGTH] = { 0 };
SHA256((unsigned char*)msg, strlen(msg), shaResult);
//sign
unsigned int olen;
unsigned char sign[256] = { 0 };
RSA_sign(NID_sha256, shaResult, SHA256_DIGEST_LENGTH, sign, &olen,rsa);
RSA_free(rsa);
}
DWORD thread_test(LPVOID lpParam)
{
rsa_test(pubKey, priKey);
return 0;
}
int main()
{
//_CrtSetBreakAlloc(159);
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF);
//CreateThread(nullptr, 0, thread_test, nullptr, 0, nullptr);
//rsa_test(pubKey,priKey);
system("pause");
}
Calling rsa_test(pubKey,priKey) directly in the main thread DO NOT cause memory leaks!
Calling CreateThread(nullptr, 0, thread_test, nullptr, 0, nullptr) comes out memory leaks!!!
Console output as follows:
Detected memory leaks!
Dumping objects ->
{173} normal block at 0x000002BDE6D44000, 264 bytes long.
Data: < _W- :_ s ! 6> D8 89 FD 5F 57 2D C5 3A 5F 82 73 F1 00 21 FA 36
{162} normal block at 0x000002BDE6D43AC0, 264 bytes long.
Data: < > 00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F
{161} normal block at 0x000002BDE6D2E2A0, 160 bytes long.
Data: <` > 60 F1 0F 91 F6 7F 00 00 00 00 00 00 00 00 00 00
{160} normal block at 0x000002BDE6D2DBA0, 160 bytes long.
Data: <` > 60 F1 0F 91 F6 7F 00 00 00 00 00 00 00 00 00 00
{159} normal block at 0x000002BDE6D48230, 352 bytes long.
Data: < P > 00 00 00 00 00 00 00 00 50 AB D3 E6 BD 02 00 00
{158} normal block at 0x000002BDE6D286B0, 12 bytes long.
Data: < > 00 00 00 00 00 00 00 00 01 00 00 00
Object dump complete.
Then I use _CrtSetBreakAlloc(159) (or other memory id) to location,and here is my call stack screenshot:
my vs2017 showing the break point at CRYPTO_zalloc(crypto/mem.c)
So my question is how to free those leak memory in thread!
Thanks a lot!!
Download my test code(build with visual studio 2017 x64)
https://www.openssl.org/docs/man1.1.0/man3/OPENSSL_thread_stop.html
OPENSSL_thread_stop();
will do it for you. You can call it like below
DWORD thread_test(LPVOID lpParam)
{
rsa_test(pubKey, priKey);
OPENSSL_thread_stop();
return 0;
}
I try to send a user defined struct which contains substructs over AMQP from one node to another. I am using the Apache Qpid library at the moment.
(I'm currently still testing my code on the PC before i rebuild it for my other devices)
my current method consist of a conversion from the struct to a bytestring and sending that over AMQP to deconverse it on the other side.
I do the following
//user defined struct
enum Quality
{
/// <summary>Value is reliable.</summary>
QUALITY_GOOD,
/// <summary>Value not reliable.</summary>
/// <remarks>
/// A variable may be declared unreliable if a sensor is not calibrated or
/// if the last query failed but older samples are still usable.
/// </remarks>
QUALITY_UNCERTAIN,
/// <summary>Value is invalid.</summary>
/// <remarks>
/// A variable may be declared bad if the measured value is out of range or
/// if a timeout occured.
/// </remarks>
QUALITY_BAD
};
struct Payload
{
/// <summary>Identifier that uniquely points to a single instance.</summary>
DdsInterface::Id id = DdsInterface::Id();
/// <summary>Human readable short name.</summary>
std::string name = "default";
/// <summary>Actual value.</summary>
long long value;
/// <summary>Quality of the Value.</summary>
Quality quality = QUALITY_GOOD;
/// <summary>Detailed quality of the variable.</summary>
QualityDetail qualityDetail = 0;
/// <summary>Unit of measure.</summary>
PhysicalQuantity quantity = 0;
Payload();
Payload(const DdsInterface::Id id, const std::string topic, const uint64_t counter);
};
//sender function
void QpidAMQP::AMQPPublish(const Payload& payload, bool durability, bool sync)
{
// Publish to MQTT broker
qpid::messaging::Message message;
message.setDurable(durability);
char b[sizeof (payload)];
memcpy(b, &payload, sizeof(payload));
//create stream of bytes to send over the line
message.setContent(b);
//message.setContent("testIfSend");
std::string temp = message.getContent();
print_bytes(temp.c_str(), sizeof (temp));// used to check the byte data
this->sender.send(message);
this->session.sync(sync);
}
//receiver functions
void *check_for_incoming_messages(QpidAMQP* amqp_instance) //called via pthread
{
qpid::messaging::Message message;
std::cout << "check for incoming messages" << std::endl;
while(amqp_instance->getReceiver()->fetch(message, qpid::messaging::Duration::FOREVER))
{
amqp_instance->on_message(&message);
}
return nullptr;
}
void QpidAMQP::on_message(qpid::messaging::Message *message)
{
/// make sure message topic and payload are copied!!!
if (this->handler)
{
std::string temp = message->getContent();
print_bytes(temp.c_str(), sizeof (temp)); // used to check the byte data
Payload payload; //Re-make the struct
memcpy(&payload, message->getContent().c_str(), message->getContentSize());
handler->ReceivedIntegerValue(payload.id.variableId, payload.value);
}
}
I did check the byte data and they where vastly different.
sender:
[ 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 63 00 00 00 01 00 00 00 00 00 00 00 00 00 00 00 60 32 bf 74 ff 7f 00 00 05 00 00 00 00 00 00 00 74 6f 70 69 63 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ]
receiver:
>[ 74 65 73 74 49 66 53 65 6e 64 00 00 00 7f 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 a0 ed ff 43 57 7f 00 00 07 00 00 00 00 00 00 00 64 65 66 61 75 6c 74 00 6d 05 77 4b 57 7f 00 00 ff ff ff ff ff ff ff ff 00 00 00 00 00 00 00 00 ]
I used the following code to print this out
void print_bytes(const void *object, size_t size)
{
// This is for C++; in C just drop the static_cast<>() and assign.
const unsigned char * const bytes = static_cast<const unsigned char *>(object);
size_t i;
printf("[ ");
for(i = 0; i < size; i++)
{
printf("%02x ", bytes[i]);
}
printf("]\n");
}
When i send only a string instead of the payload it receives on the other end. But for some reason a user defined struct doesn't work.
i want to avoid remapping everything against the qpid map because i will lose the depth of my Payload.id.
If someone has any sugestions to overcome this i would appreciatie it.
Thanks in advance,
Nick
I solved the issue.
the problem was that instead of the string instance it copied a pointer instance. By making the std::string name = "default"; into char name[20] = "default"; it copies the real character string.
This is how the publisher and subscriber encode en decode the message now.
void QpidAMQP::AMQPPublish(const Payload& payload, bool durability, bool sync)
{
// Publish to MQTT broker
//create stream of bytes to send over the line
qpid::messaging::Message message;
message.setDurable(durability);
std::string b;
b.resize(sizeof(Payload));
std::memcpy(const_cast<char*>(b.c_str()), &payload, b.size());
message.setContent(b);
std::string temp = message.getContent();
print_bytes(temp.c_str(), temp.size());
this->sender.send(message);
this->session.sync(sync);
}
void QpidAMQP::on_message(qpid::messaging::Message *message)
{
/// make sure message topic and payload are copied!!!
if (this->handler != nullptr)
{
const std::string temp = message->getContent();
print_bytes(temp.c_str(), temp.size());
Payload payload;
std::memcpy(&payload, temp.c_str(), temp.size());//sizeof(message->getContentBytes().c_str()));
handler->ReceivedIntegerValue(payload.id.variableId, payload.value);
}
}
This question already has answers here:
how do I print an unsigned char as hex in c++ using ostream?
(17 answers)
Closed 4 years ago.
I'm trying to store the hex codes read from a file into a buffer and then display it on the console, so far it doesn't seem to work. This is my code:
using namespace std;
int main()
{
ifstream file("Fishie.ch8",ios::binary);
if (!file.is_open())
{
cout << "Error";
}
else
{
file.seekg(0, ios::end);
streamoff size = file.tellg();
file.seekg(0, ios::beg);
char *buffer = new char[size];
file.read(buffer, size);
file.close();
for (int i = 0; i < size; i++)
{
cout <<hex<< buffer[i] << " ";
}
}
delete[] buffer;
cin.get();
}
The expected output should be this:
00 e0 a2 20 62 08 60 f8 70 08 61 10 40 20 12 0e
d1 08 f2 1e 71 08 41 30 12 08 12 10 00 00 00 00
00 00 00 00 00 18 3c 3c 00 00 00 00 00 00 00 00
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
3e 3f 3f 3b 39 38 38 38 00 00 80 c1 e7 ff 7e 3c
00 1f ff f9 c0 80 03 03 00 80 e0 f0 78 38 1c 1c
38 38 39 3b 3f 3f 3e 3c 78 fc fe cf 87 03 01 00
00 00 00 00 80 e3 ff 7f 1c 38 38 70 f0 e0 c0 00
3c 18 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
Instead the above output I get some strange looking symbols with lots of empty spaces.
It looks like this:
What could be the problem?
As you buffer is char all elements will be printed as characters. What you want is the number converted to hex.
BTW: As you want a conversion to hexadecimal output, it is a question if you really want to read char from file or unsigned char.
As you find out, the signature for istream.read uses char you have to convert before to unsigned char and than to unsigned int like:
cout <<hex<< (unsigned int)(unsigned char) buffer[i] << " ";
For real c++ users you should write a fine static_cast ;)
This will print out the hex values. But if you have a CR you will see a 'a' instead of '0a', so you have to set your width and fill char before:
cout.width(2);
cout.fill('0');
for (int i = 0; i < size; i++)
{
cout <<hex<< (unsigned int)(unsigned char)buffer[i] << " ";
}
BTW: delete[] buffer; is in wrong scope and must be shifted in the scope where it was defined.
I have some sample code reading some binary data from file and then writing the content into stringstream.
#include <sstream>
#include <cstdio>
#include <fstream>
#include <cstdlib>
std::stringstream * raw_data_buffer;
int main()
{
std::ifstream is;
is.open ("1.raw", std::ios::binary );
char * buf = (char *)malloc(40);
is.read(buf, 40);
for (int i = 0; i < 40; i++)
printf("%02X ", buf[i]);
printf("\n");
raw_data_buffer = new std::stringstream("", std::ios_base::app | std::ios_base::out | std::ios_base::in | std::ios_base::binary);
raw_data_buffer -> write(buf, 40);
const char * tmp = raw_data_buffer -> str().c_str();
for (int i = 0; i < 40; i++)
printf("%02X ", tmp[i]);
printf("\n");
delete raw_data_buffer;
return 0;
}
With a specific input file I have, the program doesn't function correctly. You could download the test file here.
So the problem is, I write the file content into raw_data_buffer and immediately read it back, and the content differs. The program's output is:
FFFFFFC0 65 59 01 00 00 00 00 00 00 00 00 00 00 00 00 FFFFFFE0 0A 40 00 00 00 00 00 FFFFFF80 08 40 00 00 00 00 00 70 FFFFFFA6 57 6E FFFFFFFF 7F 00 00
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 FFFFFFE0 0A 40 00 00 00 00 00 FFFFFF80 08 40 00 00 00 00 00 70 FFFFFFA6 57 6E FFFFFFFF 7F 00 00
The content FFFFFFC0 65 59 01 is overwritten with 0. Why so?
I suspect this a symptom of undefined behavior from using deallocated memory. You're getting a copy of the string from the stringstream but you're only grabbing a raw pointer to the internals that is then immediately deleted. (the link actually warns against this exact case)
const char* tmp = raw_data_buffer->str().c_str();
// ^^^^^ returns a temporary that is destroyed
// at the end of this statement
// ^^^ now a dangling pointer
Any use of tmp would exhibit undefined behavior and could easily cause the problem you're seeing. Keep the result of str() in scope.
How detect IMAGE_COR_ILMETHOD_SECT_EH must use Small or Fat?
Also me instrest other internal CLR structure/opcode details. Answer below answers this and many other questions.
/*RVA:0*/ typedef union IMAGE_COR_ILMETHOD{
IMAGE_COR_ILMETHOD_TINY Tiny;
IMAGE_COR_ILMETHOD_FAT Fat;} IMAGE_COR_ILMETHOD;
/*PC = RVA + sizeof( IMAGE_COR_ILMETHOD) = 12 or 4 byte*/ ... Code
/*EH = PC+CodeSize */typedef union IMAGE_COR_ILMETHOD_SECT_EH{
IMAGE_COR_ILMETHOD_SECT_EH_SMALL Small;
IMAGE_COR_ILMETHOD_SECT_EH_FAT Fat;
} IMAGE_COR_ILMETHOD_SECT_EH;
https://github.com/dotnet/coreclr/blob/master/src/inc/corhdr.h
for example
public static Main(string args[]){
int i=0;
try{
Console.Write("OK");
} catch(Exception){
i++
}
0000 4D 5A 90 00 MZ-header
0250 2A 02 17 8C 06 00 00 01 51 2a 00
RVA: 1B 30 02 00 // IMAGE_COR_ILMETHOD_FAT
1D 00 00 00 CodeSize= 29
01 00 00 11 Locals = 11000001
PC0: 00 16 0A i=0
PC3 00 72 01 00 00 70 try{
28 04 00 00 0A call Console.Write
00 00 DE 09
PC12:26 00 06 17 58 0A 00 DE 00 00 2A (2A is ret command)
00 00 00 01 10 00 // IMAGE_COR_ILMETHOD_SECT_EH ??? 1=count
00 00 CorExceptionFlag Flags
03 00 TryOffset
0F TryLength
12 00 HandlerOffset
09 HandlerLength
08 00 00 01 ClassToken
In this case we have small EH-frame. How detect we have small or fat frame?
struct IMAGE_COR_ILMETHOD_SECT_EH_CLAUSE_SMALL{
CorExceptionFlag Flags : 16;
unsigned TryOffset : 16;
unsigned TryLength : 8; // relative to start of try block
unsigned HandlerOffset : 16;
unsigned HandlerLength : 8; // relative to start of handler
union {
DWORD ClassToken;
DWORD FilterOffset;
};
} IMAGE_COR_ILMETHOD_SECT_EH_CLAUSE_SMALL;
typedef struct IMAGE_COR_ILMETHOD_SECT_EH_CLAUSE_FAT
{
CorExceptionFlag Flags;
DWORD TryOffset;
DWORD TryLength; // relative to start of try block
DWORD HandlerOffset;
DWORD HandlerLength; // relative to start of handler
union {
DWORD ClassToken; // use for type-based exception handlers
DWORD FilterOffset; // use for filter-based exception handlers (COR_ILEXCEPTION_FILTER is set)
};
} IMAGE_COR_ILMETHOD_SECT_EH_CLAUSE_FAT;
This is covered in partition II section 25.4.5 of ECMA-335.
If CorILMethod_Sect_FatFormat bit (0x40) is set in the Kind field (the first byte of the structure) then you should use fat, otherwise small. The Kind field can be accessed via Small.SectSmall.Kind or Fat.SectFat.Kind, either should work.