Fill struct in6_addr with unsigned char array - c++

I'am doing DNS lookup tool in C++ and i am trying to get IPv6 from
unsigned char * (where it is stored in non readable format), copy it to struct in6_addr, then i want to convert it and print it.
struct in6_addr tmp2;
char buf[41];
memcpy(tmp2.s6_addr, answ[i].rdata, 128);
cout << answ[i].name << " IN AAAA " << inet_ntop(AF_INET6, tmp2.s6_addr, buf, 128) << endl;
My output should look like this,
www.domain.name.cz. IN AAAA 2001:67c:1220:809::93e5:917
but somehow it looks like this.
www.domain.name.cz IN AAAA 106:7c12:2008:900::
Generating RDATA
u_char *ReadName(unsigned char *readResponse, unsigned char *buffer, int *count) {
unsigned char *name;
unsigned int p = 0, jumped = 0, offset;
int i, j;
*count = 1;
name = (unsigned char *) malloc(256);
name[0] = '\0';
//read the names in 3www6google3com format
while (*readResponse != 0) {
if (*readResponse >= 192) {
offset = (*readResponse) * 256 + *(readResponse + 1) - 49152; //49152 = 11000000 00000000 ;)
readResponse = buffer + offset - 1;
jumped = 1; //we have jumped to another location so counting wont go up!
} else {
name[p++] = *readResponse;
}
readResponse = readResponse + 1;
if (jumped == 0) {
*count = *count + 1; //if we havent jumped to another location then we can count up
}
}
name[p] = '\0'; //string complete
if (jumped == 1) {
*count = *count + 1; //number of steps we actually moved forward in the packet
}
//now convert 3www6google3com0 to www.google.com
for (i = 0; i < (int) strlen((const char *) name); i++) {
p = name[i];
for (j = 0; j < (int) p; j++) {
name[i] = name[i + 1];
i = i + 1;
}
name[i] = '.';
}
name[i - 1] = '\0'; //remove the last dot
return name;
Thanks for your help!

Related

Copy 80 bit hex number from char array to uint16_t vector or array

Say I have a text file containing the 80bit hex number
0xabcdef0123456789abcd
My C++ program reads that using fstream into a char array called buffer.
But then I want to store it in a uint16_t array such that:
uint16_t * key = {0xabcd, 0xef01, 0x2345, 0x6789, 0xabcd}
I have tried several approaches, but I continue to get decimal integers, for instance:
const std::size_t strLength = strlen(buffer);
std::vector<uint16_t> arr16bit((strLength / 2) + 1);
for (std::size_t i = 0; i < strLength; ++i)
{
arr16bit[i / 2] <<= 8;
arr16bit[i / 2] |= buffer[i];
}
Yields:
arr16bit = {24930, 25444, 25958, 12337, 12851}
There must be an easy way to do this that I'm just not seeing.
Here is the full solution I came up with based on the comments:
int hex_char_to_int(char c) {
if (int(c) < 58) //numbers
return c - 48;
else if (int(c) < 91) //capital letters
return c - 65 + 10;
else if (int(c) < 123) //lower case letters
return c - 97 + 10;
}
uint16_t ints_to_int16(int i0, int i1, int i2, int i3) {
return (i3 * 16 * 16 * 16) + (i2 * 16 * 16) + (i1 * 16) + i0;
}
void readKey() {
const int bufferSize = 25;
char buffer[bufferSize] = { NULL };
ifstream* pStream = new ifstream("key.txt");
if (pStream->is_open() == true)
{
pStream->read(buffer, bufferSize);
}
cout << buffer << endl;
const size_t strLength = strlen(buffer);
int* hex_to_int = new int[strLength - 2];
for (int i = 2; i < strLength; i++) {
hex_to_int[i - 2] = hex_char_to_int(buffer[i]);
}
cout << endl;
uint16_t* key16 = new uint16_t[5];
int j = 0;
for (int i = 0; i < 5; i++) {
key16[i] = ints_to_int16(hex_to_int[j++], hex_to_int[j++], hex_to_int[j++], hex_to_int[j++]);
cout << "0x" << hex << key16[i] << " ";
}
cout << endl;
}
This outputs:
0xabcdef0123456789abcd
0xabcd 0xef01 0x2345 0x6789 0xabcd

Facing issues trying to decode base64 image

I have a JPEG image, which is represented as a base64 encoded string. I want to save it as a decoded byte array using the Win32 API WriteFile() function.
Because I will use WriteFile(), I need a C string, and I need to know its length, strlen() is bad, because, as I understand, it counts to \0 which could not be the exact end of file. So, I need a function that decodes base64 and returns a char* and outputs the exact byte count.
I have read this answer, and chose code from here (some stuff changed, I marked it):
static const unsigned char base64_table[65] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
unsigned char * base64_decode(const unsigned char *src, size_t len,
size_t *out_len)
{
unsigned char dtable[256], *out, *pos, block[4], tmp;
size_t i, count, olen;
int pad = 0;
memset(dtable, 0x80, 256); // CHANGED
for (i = 0; i < sizeof(base64_table) - 1; i++)
dtable[base64_table[i]] = (unsigned char) i;
dtable['='] = 0;
count = 0;
for (i = 0; i < len; i++) {
if (dtable[src[i]] != 0x80)
count++;
}
if (count == 0 || count % 4)
return NULL;
olen = count / 4 * 3;
pos = out = new unsigned char[olen]; // CHANGED
if (out == NULL)
return NULL;
count = 0;
for (i = 0; i < len; i++) {
tmp = dtable[src[i]];
if (tmp == 0x80)
continue;
if (src[i] == '=')
pad++;
block[count] = tmp;
count++;
if (count == 4) {
*pos++ = (block[0] << 2) | (block[1] >> 4);
*pos++ = (block[1] << 4) | (block[2] >> 2);
*pos++ = (block[2] << 6) | block[3];
count = 0;
if (pad) {
if (pad == 1)
pos--;
else if (pad == 2)
pos -= 2;
else {
/* Invalid padding */
free(out); // CHANGED
return NULL;
}
break;
}
}
}
*out_len = pos - out;
return out;
}
Usage
unsigned char base[]="data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBwgHBgkIBwgKCgkLDRYPDQwMDRsUFRAWIB0iIiAdHx8kKDQsJCYxJx8fLT0tMTU3Ojo6Iys/RD84QzQ5OjcBCgoKDQwNGg8PGjclHyU3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3N//AABEIAGgAaAMBIgACEQEDEQH/xAAbAAADAQEBAQEAAAAAAAAAAAAABAUGAwIBB//EAD0QAAEDAgQDBQUECAcAAAAAAAEAAgMEEQUSITEGUXMTNIGxwSJBYXGRMnKh8RQjJDNjwtHwFUJSU2Kisv/EABgBAAMBAQAAAAAAAAAAAAAAAAIDBAEA/8QAHxEAAgICAwEBAQAAAAAAAAAAAAECEQMyEiExImET/9oADAMBAAIRAxEAPwD9xU7Gu7R9QeRVFT8Z7szqDyKGfgUfSOAvewuV8CXqMxksdvcp2POwniBt2gXUPbz05qZK3RIGufHT1UWb2mxksv7vcs5Ubxsh4xxVVSYg5lNUugiBOQNNtB/mctdwvWS1mGj9Im7aaM5XPtq4czb3r82w7OcQc9oLpHRsDbi+tidR4rccEvzurezLHQjs2te03Dy1uUu+ungEuEvofkxpQNLM0OYQpV+zeD72aeCsEAhSZ2WnLTs7T5JkiaIw92anB+Ca4P7vVj+P6BToHE01ju3QqlwiLQ1nW/lCPHsZPVl9CEKgQCQxju7PvjyKfSGMd3Z1B5FDPw2PpJXyZmdmm41svq9ZsjS7kpx4lIw5dRZRpqF888nZEaNym/MqzPITdx3UHDcWa/GX0WRzZM5sdw/2QfDZBSbpjI3VoZwnh6CkkM8gMk7iC5ztibW22Vijw+mw8CanpoYAXbRxhoPPQL3me54ttde8cfMMInfT2a+Jl72vbUE+Nrp/FRV0KcpSdNjlxfTT4ckhiDBv4grlhMufD4ZG7Ea/PmmKhwdH+CVytWFxcXQlE62ccwCq/CotHWdb+UKNcD3a7K1wv+6qjzlHkEWPYHJqXEIQqRAJDGO7s++PIp9T8Z7uzqDyKGepsfSWuEz7uy8l2GqWkPtu+amZQheqIbG4/BZnBW07MSdiNTOGXeWxC+5Itc+BHiVbxqXssPqH8oz5LK00Z7KzzZoO43uNB4aXWRVyGXxgzfUlRBq7t2HLuQbhq6YliFP/AIeWU0sU5ma5vsOzDkb/AFX55hVQG403tjEIQ4tc4kgjQWt42+C3OEUMEZe4DOb5sxNy48zzTJSdcULUa+mdcPYaaibDktfYckOuN0zLpvoFwN36ZbDml1XRt27FJPeQrXCutPUH+IP/ACFFnblJCtcKd2qOr6BFi2Byal1CEKonBT8Z7szqDyKoKfjXdmdQeRQz1YUfSRewNkrqd910mfYBvNcwCN91KyhEbiV9qER3t2jw38VHojDURvETs8bnFwd8/aHmm+NH2pYWt+0ZWgW+an4OI45yIgOymBc0DYEHKR/1CZiXfI6bXFIIaSWMySSMa6ITFjJCP+IOv1/BbbBH5qZvMCyn0tIKjBqqID2u0L2/MAfku3Dkl4i3f3o+V2KaLD7bnVLvu466AJh+qXkSWMQrUG4VnhTus/V9Aok+xVrhPutR1fQLcexmTUuoQhVE4KbjndWdQeRVJTcd7pH1B5FDPVhR9RIbYpd7rucV1aUsTvdSMpMdxRO+XG4KYEBjWnUnZxG/0SOKwTQ0j5KKR8b6R7ZBlNs8bi1rx9crvzXrH2ifiOSN1j7B08GL469PUwxaEVOeFwDbWBY4+YCoS+ALqZsuDw9mEMdISXSPc/X+/gpbsRnwniCWgLGsjdZ0UltXNP8AZHgq3D0magBb9nN7PysElxzS9pQQVzG/rKWQXI3DXaH8bKW3x6KEo/0aZpg8OaHN2IuFzekMDqjU4bG8nVOPNhcok7QpqpUJ1BKt8Im9LUdX0Cz9XMLezqrnBhcaSpLv930CPHuDk0NEhCFUTApmPn9kj6o8iqalcQ90j6o8ihnqwobIigpZx3XYFLSmxcFIypGGxQOPFE8hBDWs3tzA/ovdS/tcUoB/oka5w+84DyB+qpY7BFZ0shs47G6z+H1OeqfK++Vj22dblsn45p9AThXZseFayJ0D6XN+tike0j7psdfp9Vdq2Q1FNLTz6xytLHD5rH4HUNGKYk1pykSMeG32uwAnx9VoGTgblTvroa1bsS4WfJDSz08n24XljtPeDZVXHNqdVJpGhuN1pbK4h2V2TSwu0fD4KndCjZ9uznMNFoODxakqOr6BZ+YrQ8Id0qOr6BNxbisuhfQhCrJQUniPucfVHkV9QhnqwobIzbpWNOpueQS8rjI4lpy38UIUZYkJVGHRVH74lx5lJs4ep2ZsksoDjctBsEIWr8OZ9pOHqakqv0mG3aC+UuaCW33sd9U3LSTEXY8k/ByELGjbJeFVFWcXmY6GEtLhmPbHO0DS+W3w95WkuhCw2XpymOi0fBx/ZKjq+gQhMxbicuhoEIQqyU//2Q==";
unsigned char *g = base64_decode(base, 2568, &re); // length is appearing when you hover mouse on char[] in Visual Studio
// after call re equals 1921
HANDLE f2 = CreateFile(L"img.jpeg", GENERIC_WRITE, 0, 0, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
DWORD wr2;
WriteFile(f2, g, re, &wr2, 0);
CloseHandle(f2);
The file I am getting is not viewable, the Photos app says it is corrupted. The main problem - it weights 1.87 kb, but should be 2.31 (I download this image from a browser).
What am I doing wrong?
As #IngoLeonhardt pointed out, I should not pass the data:image/jpeg;base64, part to the function. Now it works.

Convert LPCTSTR HEXADECIMAL to BYTE in VS C++

I have below function that supports for conversion of LPCTSTR to BYTE , but the input str only support digits as of now.
void StrToByte2(LPCTSTR str, BYTE *dest)
{
UINT count = _ttoi(str);
BYTE buf[4] = { 0 };
char string[10] = { 0 };
sprintf_s(string, 10, "%04d", count);
for (int i = 0; i < 4; ++i)
{
if ((string[i] >= '0') && (string[i] <= '9'))
buf[i] = string[i] - '0';
}
dest[0] = (BYTE)(buf[0] << 4) | buf[1];
dest[1] = (BYTE)(buf[2] << 4) | buf[3];
}
If i call this function on "1234" ( any digits) , dest output some 12814,
struct st
{
byte btID[2];
int nID;
};
PTR ptr(new st);
StrToByte2(strCode, ptr->btID);
but when i call this function on any hexadecimal ex A123 , it outputs 0000 always.
Below function is used to convert back the dest code to str
CString Byte2ToStr(const byte* pbuf)
{
CString str;
str.Format(_T("%02X%02X"), pbuf[0], pbuf[1]);
return str;
}
How can i get A123 to converted to bytes and than back to str to display A123??
Please help!!
PTR ptr(new st);
This is a memory leak in C++, because new st allocates memory and there is no way to release it.
UINT count = _ttoi(str);
...
sprintf_s(string, 10, "%04d", count);
This is converting string to integer, then converts integer back to string. It doesn't seem to have a real purpose.
For example, "1234" is converted to 1234, and back to "1234". But "A123" is not a valid number so it is converted to 0, then converted to "0000". So this method fails. You can just work with the original string.
It seems this function tries to fit 2 integers in to 1 byte. This can be done as long as each value is less than 16 or 0xF (I don't know what purpose this might have) It can be fixed as follows:
void StrToByte2(const wchar_t* str, BYTE *dest)
{
int len = wcslen(str);
if(len != 4)
return; //handle error
char buf[4] = { 0 };
for(int i = 0; i < 4; ++i)
if(str[i] >= L'0' && str[i] <= L'9')
buf[i] = (BYTE)(str[i] - L'0');
dest[0] = (buf[0] << 4) + buf[1];
dest[1] = (buf[2] << 4) + buf[3];
}
CStringW Byte2_To_Str(BYTE *dest)
{
CStringW str;
str.AppendFormat(L"%X", 0xF & (dest[0] >> 4));
str.AppendFormat(L"%X", 0xF & (dest[0]));
str.AppendFormat(L"%X", 0xF & (dest[1] >> 4));
str.AppendFormat(L"%X", 0xF & (dest[1]));
return str;
}
int main()
{
BYTE dest[2] = { 0 };
StrToByte2(L"1234", dest);
OutputDebugStringW(Byte2_To_Str(dest));
OutputDebugStringW(L"\n");
return 0;
}
If the string is hexadecimal, you can use sscanf to convert each pair of character to bytes.
Basically, "1234" changes to 12 34
"A123" changes to A1 23
bool hexstring_to_bytes(const wchar_t* str, BYTE *dest, int dest_size = 2)
{
int len = wcslen(str);
if((len / 2) > dest_size)
{
//error
return false;
}
for(int i = 0; i < len / 2; i++)
{
int v;
if(swscanf_s(str + i * 2, L"%2x", &v) != 1)
break;
dest[i] = (unsigned char)v;
}
return true;
}
CStringW bytes_to_hexstring(const BYTE* bytes, int byte_size = 2)
{
CString str;
for(int i = 0; i < byte_size; i++)
str.AppendFormat(L"%02X ", bytes[i] & 0xFF);
return str;
}
int main()
{
CStringW str;
CStringW new_string;
BYTE dest[2] = { 0 };
str = L"1234";
hexstring_to_bytes(str, dest);
new_string = bytes_to_hexstring(dest);
OutputDebugString(new_string);
OutputDebugString(L"\n");
str = L"A123";
hexstring_to_bytes(str, dest);
new_string = bytes_to_hexstring(dest);
OutputDebugStringW(new_string);
OutputDebugStringW(L"\n");
return 0;
}

C++: Convert Win32 textbox char -> int -> char and put back in another textbox

I want to accept text input using a text box, then change the characters to integers and do fun math with my integers, and then put them back into a char array to be printed in another text box.
Here is my code:
int len = GetWindowTextLength(textbox) + 1;
char* text = new char[len];
GetWindowText(textbox, &text[0], len);
int x = 0;
int INTmessage[len];
int ENClen = (len * 2);
char ENCmessage[ENClen];
while (x < len) {
INTmessage[x] = int(text[x]) - 32;
x++;
}
int z = 0;
int y = 0;
while (z < ENClen) {
ENCmessage[z] = (INTmessage[y] % 9);
ENCmessage[z + 1] = (INTmessage[y] % 10);
z += 2;
y++;
}
SetWindowText(textreturn, "");
SetWindowText(textreturn, ENCmessage[0]);
The last line displays a compiler error:
invalid conversion from 'char' to LPCSTR.
Please specify What you mean by 'I don't know why this does not work'. One Error in your code is this:
//This line is incorrect because it converts an address to integer, which has no relation to value of textbox, making decryption impossible.
INTmessage[x] = int(&text[x]) - 32;
//Maybe you may want to use this code:
INTmessage[x] = int(text[x]) - 32;
The last line fails because you are passing a single char to SetWindowText() (accessing ENCmessage[0] returns the first char in the ENCmessage array). SetWindowText() expects a char* pointer to a null-terminated string instead. You can drop the [0]:
SetWindowText(textreturn, ENCmessage);
Just make sure that ENCmessage contains a null character after your digit characters.
That being said, your code can be re-written to something more like this:
int len = GetWindowTextLength(textbox) + 1;
char* text = new char[len];
len = GetWindowText(textbox, text, len);
int *INTmessage = new int[len];
for(int x = 0; x < len; ++x) {
INTmessage[x] = int(text[x]) - 32;
}
int ENClen = (len * 2) + 1;
char *ENCmessage = new char[ENClen];
for(int x = 0, y = 0; x < len; ++x, y += 2) {
ENCmessage[y] = (INTmessage[x] % 9);
ENCmessage[y + 1] = (INTmessage[x] % 10);
}
ENCmessage[ENCLen-1] = '\0';
SetWindowText(textreturn, ENCmessage);
delete[] INTmessage;
delete[] ENCmessage;
delete[] text;
Or, since you tagged the question as C++, like this instead:
#include <string>
#include <vector>
int len = GetWindowTextLength(textbox) + 1;
std::string text;
text.resize(len);
len = GetWindowText(textbox, &text[0], len);
std::vector<int> INTmessage(len);
for(int x = 0; x < len; ++x) {
INTmessage[x] = int(text[x]) - 32;
}
int ENClen = (len * 2);
std::string ENCmessage;
ENCmessage.resize(ENClen);
for (int x = 0; y = 0; x < len; ++x, y += 2) {
ENCmessage[y] = (INTmessage[x] % 9);
ENCmessage[y + 1] = (INTmessage[x] % 10);
}
SetWindowText(textreturn, ENCmessage.c_str());

Arduino: casting char to integer

im getting strange issues, when casting char to int in Arduino IDE. The method should check an array of char for "IP". If there is one number ore more numbers after the chars, they should be converted to int.
int getPin(char dataFromAndroid[ ]){
for(int i = 0; i < 10; i++) {
if(dataFromAndroid[i] == 'I') {
if(dataFromAndroid[i + 1] == 'P') { // Digitaler Pin
char c1 = dataFromAndroid[i + 2];
char c2 = dataFromAndroid[i + 3];
int digPinNr;
char str;
str+=c1;
if(c2 != '0') {
str+=c2;
}
digPinNr = str -'0';
Serial.print("c1:");
Serial.print(c1);
Serial.print("c2:");
Serial.print(c2);
Serial.print("str");
Serial.print(str);
Serial.print("Pin:");
Serial.println(digPinNr);
return digPinNr;
}
}
}
}
Sometimes the program gets 5 instead of 7 (see the outup). Can some help me please ?!
Empfangen:W204IP5 c1:5c2: str5Pin:5
Empfangen:W106IP5 c1:5c2: str7Pin:7
I have found a solution now for the problem. I have just changed the char array to a string, and the converting is done like : digPinNr = str.toInt();
Here's the full solution:
int getPin(char dataFromAndroid[ ]){
for(int i = 0; i < 10; i++) {
if(dataFromAndroid[i] == 'I') {
if(dataFromAndroid[i + 1] == 'P') { // Digitaler Pin
char c1 = dataFromAndroid[i + 2];
char c2 = dataFromAndroid[i + 3];
int digPinNr;
String str="";
str+=c1;
if(c2 != '0') {
str+=c2;
}
digPinNr = str.toInt();
Serial.print("c1:");
Serial.print(c1);
Serial.print("c2:");
Serial.print(c2);
Serial.print("str");
Serial.print(str);
Serial.print("Pin:");
Serial.println(digPinNr);
return digPinNr;
}
}
}
}