I have a chunk of data which is supposed to be zlib compressed data (I was not 100% sure).
I first tried to uncompress it with gzip by prepending "1F 8B 08 00 00 00 00 00". Just like in the accepted answer of this thread (https://unix.stackexchange.com/questions/22834/how-to-uncompress-zlib-data-in-unix). It worked out and it was probably the right approach, because the output contained a lot of human readable strings.
I then tried to implement this in a c++ program using zlib. But it seems that zlib generates a different output. Am I missing something? zlib and gzip should be basically the same (despite the headers and trailers), shouldn't they? Or do I have a simple error in my code below? (the chunk of data is shortened for the sake of simplicity)
unsigned char* decompressed;
unsigned char* dataChunk = /*...*/;
printHex(dataChunk, 160);
int error = inflateZlib(dataChunk, 160, decompressed, 1000);
printHex(decompressed, 160);
//zerr(error);
printHex(unsigned char* data, size_t n)
{
for(size_t i = 0; i < n; i++)
{
std::cout << std::hex << (uint16_t)data[i] << " ";
}
std::cout << std::dec << "\n-\n";
}
int inflateZlib(unsigned char* data, size_t length, unsigned char* decompressed, size_t maxDecompressed)
{
decompressed = new unsigned char[maxDecompressed];
z_stream infstream;
infstream.zalloc = Z_NULL;
infstream.zfree = Z_NULL;
infstream.opaque = Z_NULL;
infstream.avail_in = (uInt)(length); // size of input
infstream.next_in = (Bytef *)data; // input char array
infstream.avail_out = (uInt)maxDecompressed; // size of output
infstream.next_out = (Bytef *)decompressed; // output char array
// the actual DE-compression work.
int ret = inflateInit(&infstream);
zerr(ret);
ret = inflate(&infstream, Z_NO_FLUSH);
zerr(ret);
inflateEnd(&infstream);
return ret;
}
This produces the following output:
78 9c bd 58 4b 88 23 45 18 ee 3c 67 e3 24 93 cc ae 8a f8 42 10 c4 cb 1a 33 a3 7b f0 60 e6 e0 e6 e0 49 90 bd 29 4d 4d 77 25 dd 99 ee ea de aa ee 4c 32 82 2c e8 c1 93 ac 47 c5 45 f 82 8 5e 16 f ba 78 18 45 d0 83 7 95 15 5c d0 c3 aa b0 b2 ee 65 5c f0 e4 c5 bf aa 1f a9 ea 74 cf 64 7 31 c3 24 9d fa fe bf ea ab ff 59 15 ab 62 6a b5 5d 9b 8c 18 2a 5b 15 47 d3 b4 92 55 35 b5 ba b7 3d c6 46 b0 a3 35 3 1c 50 64 61 93 7a a4 67 d5 0 e1 c2 d8 e4 92 75 fe 56 b3 ca a6 76 c2 f0 1c 8f
-
0 0 6 c0 83 50 0 0 16 b0 78 9c bd 58 4b 88 23 45 18 ee 3c 67 e3 24 93 cc ae 8a f8 42 10 c4 cb 1a 33 a3 7b f0 60 e6 e0 e6 e0 49 90 bd 29 4d 4d 77 25 dd 99 ee ea de aa ee 4c 32 82 2c e8 c1 93 ac 47 c5 45 f 82 8 5e 16 f ba 78 18 45 d0 83 7 95 15 5c d0 c3 aa b0 b2 ee 65 5c f0 e4 c5 bf aa 1f a9 ea 74 cf 64 7 31 c3 24 9d fa fe bf ea ab ff 59 15 ab 62 6a b5 5d 9b 8c 18 2a 5b 15 47 d3 b4 92 55 35 b5 ba b7 3d c6 46 b0 a3 35 3 1c 50 64 61 93 7a a4 67 d5 0 e1 c2 d8 e4 92 75
-
which is not what I want.
Whereas gzip:
printf "\x1f\x8b\x08\x00\x00\x00\x00\x00\x78\x9c\xbd\x58\x4b\x88\x23\x45\x18\xee\x3c\x67\xe3\x24\x93\xcc\xae\x8a\xf8\x42\x10\xc4\xcb\x1a\x33\xa3\x7b\xf0\x60\xe6\xe0\xe6\xe0\x49\x90\xbd\x29\x4d\x4d\x77\x25\xdd\x99\xee\xea\xde\xaa\xee\x4c\x32\x82\x2c\xe8\xc1\x93\xac\x47\xc5\x45\xf\x82\x8\x5e\x16\xf\xba\x78\x18\x45\xd0\x83\x7\x95\x15\x5c\xd0\xc3\xaa\xb0\xb2\xee\x65\x5c\xf0\xe4\xc5\xbf\xaa\x1f\xa9\xea\x74\xcf\x64\x7\x31\xc3\x24\x9d\xfa\xfe\xbf\xea\xab\xff\x59\x15\xab\x62\x6a\xb5\x5d\x9b\x8c\x18\x2a\x5b\x15\x47\xd3\xb4\x92\x55\x35\xb5\xba\xb7\x3d\xc6\x46\xb0\xa3\x35\x3\x1c\x50\x64\x61\x93\x7a\xa4\x67\xd5\x0\xe1\xc2\xd8\xe4\x92\x75\xfe\x56\xb3\xca\xa6\x76\xc2\xf0\x1c\x8f" | gzip -dc | hexdump -C
produces:
gzip: stdin: unexpected end of file
00000000 68 03 64 00 05 77 69 6e 67 73 61 02 68 03 6c 00 |h.d..wingsa.h.l.|
00000010 00 00 01 68 04 64 00 06 6f 62 6a 65 63 74 6b 00 |...h.d..objectk.|
00000020 0c 74 65 74 72 61 68 65 64 72 6f 6e 31 68 05 64 |.tetrahedron1h.d|
00000030 00 06 77 69 6e 67 65 64 6c 00 00 00 06 6c 00 00 |..wingedl....l..|
00000040 00 05 68 02 64 00 08 63 6f 6c 6f |..h.d..colo|
0000004b
which is what I want.
I was able to decode the data you provided by using zlib 1.2.8 and the inflateInit2 function with 32 for windowBits. I used 32 based on this information from the zlib documentation:
windowBits can also be zero to request that inflate use the window size in the zlib header of the compressed stream.
and
Add 32 to windowBits to enable zlib and gzip decoding with automatic header detection
Here's the full code. I stripped out error checking since I don't have a zerr function. It doesn't appear you're using Visual C++, so you will want to remove the #pragma to avoid a warning as well.
#include <iostream>
#include <iomanip>
#include <cstdint>
#include <cctype>
#include "zlib.h"
#pragma comment(lib, "zdll.lib")
const size_t block_size = 16;
void printLine(unsigned char* data, size_t offset, size_t n)
{
if(n)
{
std::cout << std::setw(8) << std::setfill('0') << std::right << offset << " ";
for(size_t x = 0; x < block_size; ++x)
{
if(x % (block_size/2) == 0) std::cout << " ";
uint16_t d = x < n ? data[x] : 0;
std::cout << std::hex << std::setw(2) << d << " ";
}
std::cout << "|";
for(size_t x = 0; x < block_size; ++x)
{
int c = (x < n && isalnum(data[x])) ? data[x] : '.';
std::cout << static_cast<char>(c);
}
std::cout << "|\n";
}
}
void printHex(unsigned char* data, size_t n)
{
const size_t blocks = n / block_size;
const size_t remainder = n % block_size;
for(size_t i = 0; i < blocks; i++)
{
size_t offset = i * block_size;
printLine(&data[offset], offset, block_size);
}
size_t offset = blocks * block_size;
printLine(&data[offset], offset, remainder);
std::cout << "\n";
}
int inflateZlib(unsigned char* data, uint32_t length, unsigned char* decompressed, uint32_t maxDecompressed)
{
z_stream infstream;
infstream.zalloc = Z_NULL;
infstream.zfree = Z_NULL;
infstream.opaque = Z_NULL;
infstream.avail_in = length;
infstream.next_in = data;
infstream.avail_out = maxDecompressed;
infstream.next_out = decompressed;
inflateInit2(&infstream, 32);
inflate(&infstream, Z_FINISH);
inflateEnd(&infstream);
return infstream.total_out;
}
int main()
{
unsigned char dataChunk[] =
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x78\x9c\xbd\x58\x4b\x88\x23\x45"
"\x18\xee\x3c\x67\xe3\x24\x93\xcc\xae\x8a\xf8\x42\x10\xc4\xcb\x1a"
"\x33\xa3\x7b\xf0\x60\xe6\xe0\xe6\xe0\x49\x90\xbd\x29\x4d\x4d\x77"
"\x25\xdd\x99\xee\xea\xde\xaa\xee\x4c\x32\x82\x2c\xe8\xc1\x93\xac"
"\x47\xc5\x45\xf\x82\x8\x5e\x16\xf\xba\x78\x18\x45\xd0\x83\x7\x95"
"\x15\x5c\xd0\xc3\xaa\xb0\xb2\xee\x65\x5c\xf0\xe4\xc5\xbf\xaa\x1f"
"\xa9\xea\x74\xcf\x64\x07\x31\xc3\x24\x9d\xfa\xfe\xbf\xea\xab\xff"
"\x59\x15\xab\x62\x6a\xb5\x5d\x9b\x8c\x18\x2a\x5b\x15\x47\xd3\xb4"
"\x92\x55\x35\xb5\xba\xb7\x3d\xc6\x46\xb0\xa3\x35\x03\x1c\x50\x64"
"\x61\x93\x7a\xa4\x67\xd5\x00\xe1\xc2\xd8\xe4\x92\x75\xfe\x56\xb3"
"\xca\xa6\x76\xc2\xf0\x1c\x8f";
unsigned char decompressed[1000] = {};
printHex(dataChunk, sizeof(dataChunk));
uint32_t len = inflateZlib(dataChunk, sizeof(dataChunk), decompressed, sizeof(decompressed));
printHex(decompressed, len);
return 0;
}
I think you might want to define decompressed differently:
unsigned char decompressed[1000];
Related
I have trouble creating signatures with OpenSSL on a Raspberry Pi. RSA, ECDSA and EdDSA fail. Here is a small example:
#pragma once
#include <vector>
#include <string>
#include <openssl/evp.h>
#include <openssl/rsa.h>
#include <openssl/err.h>
void main()
{
// message to be sign
std::string msg_str = "my messsage";
std::vector<uint8_t> msg(msg_str.begin(), msg_str.end());
// generate a key pair
EVP_PKEY* key = EVP_PKEY_Q_keygen(nullptr, nullptr, "RSA", 2048);
// signature
std::vector<uint8_t> sig_out(1024); // reserve some memory
// signing process
EVP_MD_CTX* md_ctx(EVP_MD_CTX_new());
size_t out_len = 0; // final signature length (calculated by openssl)
if (EVP_DigestSignInit(md_ctx, nullptr, EVP_sha1(), nullptr, key) == 0)
{
std::cout << "EVP_DigestSignInit error: " << ERR_error_string(ERR_get_error(), NULL) << std::endl;
}
if (EVP_DigestSign(md_ctx, sig_out.data(), &out_len, msg.data(), msg.size()) == 0)
{
std::cout << "EVP_DigestSign error: " << ERR_error_string(ERR_get_error(), NULL) << std::endl;
}
else
{
sig_out.resize(out_len);
std::cout << "signature length: " << sig_out .size() << " data: " << std::endl;
for (size_t i = 0; i < out_len; i++)
{
printf("%.2X ", sig_out[i]);
}
}
EVP_PKEY_free(key);
EVP_MD_CTX_free(md_ctx);
return;
}
On my desktop PC all signatures work but not on the Raspberry Pi. Here is the screen output:
Windows 10, AMD CPU, x64 system, OpenSSL 3.0.0:
signature length: 256 data:
59 A9 45 F5 2B 97 51 F5 53 A8 AE 17 16 7A 26 28
F5 68 3F 1F 3D B2 05 4F 0E 28 AF F2 F5 0E DA FF
37 71 50 DD DA E1 DE F0 91 05 0A 07 79 30 00 03
A4 1E F5 60 F5 7E 47 97 EF 88 9C 27 70 CE 64 63
0B 6C 2E 50 7B D7 89 48 B6 73 44 AD 7A 02 EA 49
BC D3 95 67 B8 E6 D9 E4 A1 4F 2B E8 F4 5C F8 73
B5 53 B0 A5 FB BB 7A 81 1C 25 23 6F 30 D8 8F D8
EC 9E 02 00 C2 0D 7C 9C 23 66 D7 44 62 FF 51 1A
94 3F 6F FB D7 B2 C5 2B A4 03 09 E5 10 44 D4 AE
A2 69 F3 EB 31 1B CB 2A 14 1D 76 CD 11 09 B9 76
99 59 42 5A 74 3D 14 98 B7 87 FD 98 16 17 AC 9E
DA 55 82 0B 93 3D 24 28 4F 09 EB EA AE 82 77 47
B2 E2 C8 1E 62 FF E4 90 E6 18 E8 88 94 B4 F0 AF
DF A2 2B D7 79 32 BD C5 0F B1 03 36 B6 D8 44 9A
FA DB 02 EB 7D FE D5 D7 15 34 77 72 4D 4E 44 A8
E7 DA D9 2B 49 80 43 58 1F AA F4 1D 27 80 1C EE
Raspberry Pi, ARM CPU, Debian (bullseye), x64 system, OpenSSL 3.1.0:
EVP_DigestSign error: error:1C8000B3:Provider routines::invalid signature size
does anyone have any idea what the problem may be? I am now a bit desperate.
best regards,
SBond
solved!
this error occurs (only on ARM) when out_len is smaller than the final signature length. I need to change the code as follows:
from
if (EVP_DigestSign(md_ctx, sig_out.data(), &out_len, msg.data(), msg.size()) == 0)
{
std::cout << "EVP_DigestSign error: " << ERR_error_string(ERR_get_error(), NULL) << std::endl;
}
to
EVP_DigestSign(md_ctx, nullptr, &out_len, msg.data(), msg.size()); // calculates the final length (does not perform signature; therfore a fast process)
sig_out.resize(out_len);
if (EVP_DigestSign(md_ctx, sig_out.data(), &out_len, msg.data(), msg.size()) == 0)
{
std::cout << "EVP_DigestSign error: " << ERR_error_string(ERR_get_error(), NULL) << std::endl;
}
now I'm happy :)
I'm trying to sign a file with my private key using the following function:
void Signer::SignFile(const std::string& file)
{
RSASS<PSSR, Whirlpool>::Signer signer(rsaPrivate);
// Encrypt and write
FileSource ss1(file.c_str(), true,
new SignerFilter(rng, signer,
new FileSink(file.c_str(), true)
, true));
}
As an outcome my target file gets overwritten with the signature. Why does that happen? Obviously I'd like to append it to the file, that's why I also added an extra "true" parameter for the "putMessage".
FileSource ss1(file.c_str(), true,
new SignerFilter(rng, signer,
new FileSink(file.c_str(), true), true));
I [personally] have never seen someone use the same file as a source and sink. Usually the file data and signature are separate like with a detached signature.
It looks like one file/two streams is implementation defined: C++ read and write to same file using different streams. I guess you should expect seemingly random results on different platforms.
As an outcome my target file gets overwritten with the signature. Why does that happen?
The FileSink opens with std::ios_base::tunc. Also see FileSink on the Crypto++ wiki.
You should probably do something like this. It saves the signature to an intermediate ByteQueue, and then writes the queue to the file once the file is closed.
#include "cryptlib.h"
#include "filters.h"
#include "osrng.h"
#include "files.h"
#include "pssr.h"
#include "rsa.h"
#include "whrlpool.h"
#include <iostream>
#include <fstream>
#include <string>
int main(int argc, char* argv[])
{
using namespace CryptoPP;
AutoSeededRandomPool prng;
std::string fname("test.bin");
///// Create test message /////
{
std::string msg("Yoda said, Do or do not. There is no try.");
std::ofstream out(fname.c_str());
out.write(msg.data(), msg.size());
}
///// Generate a key /////
RSASS<PSSR, Whirlpool>::Signer signer;
signer.AccessKey().GenerateRandomWithKeySize(prng, 2048);
///// Sign the message /////
ByteQueue queue;
{
FileSource source(fname.c_str(), true,
new SignerFilter(prng, signer,
new Redirector(queue)));
}
///// Append the signature /////
{
std::ofstream out(fname.c_str(), std::ios_base::out | std::ios_base::binary | std::ios_base::app);
queue.TransferTo(FileSink(out).Ref());
}
///// Create a verifier /////
RSASS<PSSR, Whirlpool>::Verifier verifier(signer);
///// Verify the message /////
{
FileSource source(fname.c_str(), true,
new SignatureVerificationFilter(verifier, NULLPTR,
SignatureVerificationFilter::THROW_EXCEPTION));
}
std::cout << "Verified signature on message" << std::endl;
return 0;
}
It does not crash and it appends the message as expected:
$ ./test.exe
Verified signature on message
$ hexdump -C test.bin
00000000 59 6f 64 61 20 73 61 69 64 2c 20 44 6f 20 6f 72 |Yoda said, Do or|
00000010 20 64 6f 20 6e 6f 74 2e 20 54 68 65 72 65 20 69 | do not. There i|
00000020 73 20 6e 6f 20 74 72 79 2e 19 f2 1c 8f f9 cb 2f |s no try......./|
00000030 f2 38 9f a8 3b a9 0b 8b 62 25 56 a8 ea 81 7e 60 |.8..;...b%V...~`|
00000040 22 55 38 ce 79 7f 32 95 a5 1a 75 c1 80 ad b2 c2 |"U8.y.2...u.....|
00000050 6f ce a5 f7 bd 4b d3 3f e4 b3 69 00 21 60 d7 09 |o....K.?..i.!`..|
00000060 a8 71 9b 5f 41 d6 66 b1 80 f1 de 00 26 19 34 01 |.q._A.f.....&.4.|
00000070 b3 65 1b 78 e2 32 71 be bc 07 25 78 36 6b 56 4e |.e.x.2q...%x6kVN|
00000080 26 4e 12 9e a8 bb 72 86 ee 0d 70 b2 f1 bd a3 2c |&N....r...p....,|
00000090 14 fd 12 61 35 98 4a 80 9f ee 3c 31 d3 70 26 0f |...a5.J...<1.p&.|
000000a0 73 a0 5d 36 ef 96 56 65 f8 ac 3a fb 44 c3 04 76 |s.]6..Ve..:.D..v|
000000b0 e5 2f ae 92 84 be 40 34 f6 4b b8 84 aa bd 67 74 |./....#4.K....gt|
000000c0 05 43 91 d2 e6 b1 50 dd 6d 64 47 cc 3e 3c 3a 9d |.C....P.mdG.><:.|
000000d0 67 ff 4f 38 c1 a5 a6 d5 92 45 bc 2d ff 96 30 3a |g.O8.....E.-..0:|
000000e0 1d 3a 42 4f 8c 13 2d 4c 3f e9 ad 08 a6 b3 5e fa |.:BO..-L?.....^.|
000000f0 46 08 24 17 43 ce ed ec f7 1a 38 62 e7 bf 42 93 |F.$.C.....8b..B.|
00000100 84 44 b6 05 22 9e e3 bd 80 a6 08 b0 34 d0 a4 89 |.D..".......4...|
00000110 78 48 20 7f 7b 33 1c 51 9d 48 b5 b7 f7 de 2f dd |xH .{3.Q.H..../.|
00000120 d7 74 7b af 04 cd 92 fc 1c |.t{......|
I was not able to get this to work. I'm pretty sure it is a dead end.
std::fstream inout(fname.c_str(), std::ios_base::in | std::ios_base::out | std::ios_base::binary | std::ios_base::app);
FileSource fs(inout, true,
new SignerFilter(prng, signer,
new FileSink(inout), true));
Obviously I'd like to append it to the file, that's why ...
Also, because you are using PSSR, you don't need the original message. PSSR is "probabilistic signature scheme with recovery". The message is include in the signature with a mask function.
You would need the original message with a SSA scheme. SSA is "signature scheme with appendix". In a SSA scheme you need to provide both the original message and the signature.
(From the comments) Here is an example that uses a Signature Scheme with Appendix (SSA). It also uses std::iftream and std::ofstream directly so FileSource and FileSink work as you expect. std::ofstream includes std::ios_base::app so the signature is appended.
#include "cryptlib.h"
#include "filters.h"
#include "osrng.h"
#include "files.h"
#include "oaep.h"
#include "rsa.h"
#include "sha.h"
#include <iostream>
#include <fstream>
#include <string>
int main(int argc, char* argv[])
{
using namespace CryptoPP;
AutoSeededRandomPool prng;
std::string fname("test.bin");
///// Create test message /////
{
std::string msg("Yoda said, Do or do not. There is no try.");
std::ofstream out(fname.c_str());
out.write(msg.data(), msg.size());
}
///// Generate a key /////
RSASS<PKCS1v15, SHA256>::Signer signer;
signer.AccessKey().GenerateRandomWithKeySize(prng, 2048);
{
///// Create fstreams for input and output /////
std::ifstream fin(fname.c_str(), std::ios_base::in | std::ios_base::binary);
std::ofstream fout(fname.c_str(), std::ios_base::out | std::ios_base::binary | std::ios_base::app);
///// Sign the message /////
FileSource source(fin, true,
new SignerFilter(prng, signer,
new FileSink(fout)));
}
///// Create a verifier /////
RSASS<PKCS1v15, SHA256>::Verifier verifier(signer);
///// Verify the message /////
{
FileSource source(fname.c_str(), true,
new SignatureVerificationFilter(verifier, NULLPTR,
SignatureVerificationFilter::THROW_EXCEPTION));
}
std::cout << "Verified signature on message" << std::endl;
return 0;
}
And then:
$ ./test.exe
Verified signature on message
$ hexdump -C test.bin
00000000 59 6f 64 61 20 73 61 69 64 2c 20 44 6f 20 6f 72 |Yoda said, Do or|
00000010 20 64 6f 20 6e 6f 74 2e 20 54 68 65 72 65 20 69 | do not. There i|
00000020 73 20 6e 6f 20 74 72 79 2e c7 b3 6f 84 1d fd bf |s no try...o....|
00000030 c7 c8 38 7c 89 b1 f3 42 ee 5e f8 10 de a8 01 7f |..8|...B.^......|
00000040 7f a5 24 3d 27 7e 55 16 bc 80 8b 21 21 75 3d ed |..$='~U....!!u=.|
00000050 41 05 84 b1 3d bf d3 ae 3a 2f a8 81 7a e7 e4 ae |A...=...:/..z...|
00000060 50 d7 9b 25 04 17 a6 a3 1d 12 e2 8e cd 7a 02 42 |P..%.........z.B|
00000070 91 c0 d7 fc 43 09 94 a2 66 d9 67 95 55 5e dc 8c |....C...f.g.U^..|
00000080 eb bc 20 af e8 5c d4 63 05 d4 2c 48 57 6d f1 fe |.. ..\.c..,HWm..|
00000090 26 16 80 c3 41 11 58 8e 8d b0 cb 48 95 b9 ed 94 |&...A.X....H....|
000000a0 84 cc 86 0f a4 7e a3 6a ff 0d 0d 24 17 82 13 94 |.....~.j...$....|
000000b0 54 cb 8a ca 04 1e 65 18 c3 ab a2 3f 4d 44 de 42 |T.....e....?MD.B|
000000c0 32 07 29 e4 95 83 cc ff 39 85 08 bf d5 61 46 db |2.).....9....aF.|
000000d0 e0 96 d6 69 25 b9 ce 1e 3e bc 63 81 e5 16 bd 12 |...i%...>.c.....|
000000e0 a0 78 02 19 60 96 80 36 7d a5 79 be 0f 45 54 f4 |.x..`..6}.y..ET.|
000000f0 92 af f0 d8 74 65 7d 45 98 c7 bb 7f 6e 9b e3 cd |....te}E....n...|
00000100 c0 60 91 0f 78 aa 7c 77 a7 f5 4e 7d 6e ed e1 4c |.`..x.|w..N}n..L|
00000110 8e 5e 96 ac cd 30 16 e0 2d be 9e 2d 68 d4 25 46 |.^...0..-..-h.%F|
00000120 86 77 87 be 68 ef 06 26 55 |.w..h..&U|
I'm working with zlib and have some problem with decompression. I try to decompress packets that come to my program, but only the first packet is decompessed correctly. For example :
//first compressed packet
78 5e 72 65 60 08 65 bf cd c0 60 28 98 3f 95 03
08 18 19 19 25 18 4c af b9 32 38 0a a4 d6 6c 6d
6c 60 60 04 42 20 60 31 2b c9 37 61 c9 2c 28 33
e3 cc cd 4c 2e ca 2f ce 4f 2b 61 4e ce cf 65 00
29 38 c0 03 51 c6 7c 9b 81 e5 40 44 32 23 00
//first decompressed packet
//inflate return 0
45 00 00 55 07 db 00 00 31 11 6f 95 08 08 08 08
01 01 01 18 00 35 d6 45 00 41 10 65 7c b5 81 80
00 01 00 01 00 00 00 00 04 36 74 6f 34 04 69 70
76 36 09 6d 69 63 72 6f 73 6f 66 74 03 63 6f 6d
00 00 01 00 01 c0 0c 00 01 00 01 00 00 03 db 00
04 c0 58 63 01
But when i try to decompress second packet "inflate" function return me -3 and decompress nothing. Example for second compressed packet :
//second compressed packet
//inflate return -3
72 65 60 f0 62 bf 03 36 74 3e c2 d0 77 cb 19 cc
de cc d8 18 8c 30 94 b9 20 b1 92 35 33 bf 38 b1
84 a9 a8 14 c5 24 17 2f 06 96 88 63 e7 ad 01 00
I try to initialize decompresor with parameters MAX_WBITS,-MAX_WBITS,30 but it did not help.How can I solve this problem?
Code example :
//functions
InitZDecompressor = ( int (WINAPI *)( z_stream_s*, int,const char*,int)) GetProcAddress(zlibdll,"inflateInit2_");
ZDecompressor = (int (WINAPI *)(z_stream_s*,int)) GetProcAddress(zlibdll,"inflate");
ResetZDecompressor = (int (WINAPI *)(z_stream_s*)) GetProcAddress(zlibdll,"inflateEnd");
//initialize
__int32 Decoder(unsigned __int8* PDU, unsigned __int32 size, unsigned __int8 * out_b, z_stream_s & stream, bool & IsInit)
{
if (IsDllLoaded == false || PDU == nullptr) { return 0; }//if Zlib DLL was not loaded, or incoming packet is not cTCP
if ( !IsInit )
{
SecureZeroMemory(&stream, sizeof(stream));
auto res = InitZDecompressor( &stream, MAX_WBITS , "1.2.11", sizeof(z_stream_s));//initialize only one time
IsInit = true;
}
stream.next_in = PDU;
stream.avail_in = size;
stream.next_out = out_b;
stream.avail_out = 1048576;
stream.total_out = 0;
__int32 ret = 0;
//inflate
while ( stream.avail_in && ret == 0 )
{
ret = ZDecompressor(&stream, 2);
}
return ret;
}
//inflateEnd
void ResetDecompessor(bool & isInit, z_stream_s & stream)
{
if (isInit){
ResetZDecompressor(&stream);
isInit = false;
memset(&stream, 0 ,sizeof(stream));
}
}
//test func
void testZlib(unsigned __int8 *StPt, __int64 size,z_stream_s & stream,bool & isInit)
{
// StPt - start of compressed data
//size - size of compressed data
//isInit - is zStream already initialize
unsigned __int8 * OutBuf = new unsigned __int8[ 1048576 ];
auto res = zlib->Decoder( StPt,size, OutBuf, stream, isInit );
delete [] OutBuf;
}
What's happening here is that the sender is flushing the deflate compressor with an empty stored block in order to produce a decompressible packet, and then deleting the last four bytes of the empty stored block, expecting you, the receiver, to insert that.
So what you need to do is insert the bytes 00 00 ff ff between the compressed packets, and then decompress the whole thing as one zlib stream. Do not initialize inflate for the second packet -- just keep feeding compressed data to the inflator (including the inserted bytes).
I'm trying to load an RSA private key from a std::string that contains the private key in PEM format, like this:
-----BEGIN RSA PRIVATE KEY-----
MIIBOgIBAAJBAK8Q+ToR4tWGshaKYRHKJ3ZmMUF6jjwCS/u1A8v1tFbQiVpBlxYB
paNcT2ENEXBGdmWqr8VwSl0NBIKyq4p0rhsCAQMCQHS1+3wL7I5ZzA8G62Exb6RE
INZRtCgBh/0jV91OeDnfQUc07SE6vs31J8m7qw/rxeB3E9h6oGi9IVRebVO+9zsC
IQDWb//KAzrSOo0P0yktnY57UF9Q3Y26rulWI6LqpsxZDwIhAND/cmlg7rUz34Pf
SmM61lJEmMEjKp8RB/xgghzmCeI1AiEAjvVVMVd8jCcItTdwyRO0UjWU4JOz0cnw
5BfB8cSIO18CIQCLVPbw60nOIpUClNxCJzmMLbsrbMcUtgVS6wFomVvsIwIhAK+A
YqT6WwsMW2On5l9di+RPzhDT1QdGyTI5eFNS+GxY
-----END RSA PRIVATE KEY-----
And I wonder if anyone can help me to use this key instead of generating a random with the following statement.
CryptoPP::RSA::PrivateKey rsaPrivate;
rsaPrivate.GenerateRandomWithKeySize (rnd, 512);
The key is PEM encoded. You need to strip the PEM header and footer, then convert from Base64 back to DER/BER, and finally use Crypto++'s BERDecodePrivateKey.
There's some reading on the subject at the Crypto++ wiki under Keys and Formats. Below is the code to perform the conversion (I don't believe Stack Overflow has a working example of it in Crypto++).
string RSA_PRIV_KEY =
"-----BEGIN RSA PRIVATE KEY-----\n"
"MIIBOgIBAAJBAK8Q+ToR4tWGshaKYRHKJ3ZmMUF6jjwCS/u1A8v1tFbQiVpBlxYB\n"
"paNcT2ENEXBGdmWqr8VwSl0NBIKyq4p0rhsCAQMCQHS1+3wL7I5ZzA8G62Exb6RE\n"
"INZRtCgBh/0jV91OeDnfQUc07SE6vs31J8m7qw/rxeB3E9h6oGi9IVRebVO+9zsC\n"
"IQDWb//KAzrSOo0P0yktnY57UF9Q3Y26rulWI6LqpsxZDwIhAND/cmlg7rUz34Pf\n"
"SmM61lJEmMEjKp8RB/xgghzmCeI1AiEAjvVVMVd8jCcItTdwyRO0UjWU4JOz0cnw\n"
"5BfB8cSIO18CIQCLVPbw60nOIpUClNxCJzmMLbsrbMcUtgVS6wFomVvsIwIhAK+A\n"
"YqT6WwsMW2On5l9di+RPzhDT1QdGyTI5eFNS+GxY\n"
"-----END RSA PRIVATE KEY-----";
static string HEADER = "-----BEGIN RSA PRIVATE KEY-----";
static string FOOTER = "-----END RSA PRIVATE KEY-----";
size_t pos1, pos2;
pos1 = RSA_PRIV_KEY.find(HEADER);
if(pos1 == string::npos)
throw runtime_error("PEM header not found");
pos2 = RSA_PRIV_KEY.find(FOOTER, pos1+1);
if(pos2 == string::npos)
throw runtime_error("PEM footer not found");
// Start position and length
pos1 = pos1 + HEADER.length();
pos2 = pos2 - pos1;
string keystr = RSA_PRIV_KEY.substr(pos1, pos2);
// Base64 decode, place in a ByteQueue
ByteQueue queue;
Base64Decoder decoder;
decoder.Attach(new Redirector(queue));
decoder.Put((const byte*)keystr.data(), keystr.length());
decoder.MessageEnd();
// Write to file for inspection
FileSink fs("decoded-key.der");
queue.CopyTo(fs);
fs.MessageEnd();
try
{
CryptoPP::RSA::PrivateKey rsaPrivate;
rsaPrivate.BERDecodePrivateKey(queue, false /*paramsPresent*/, queue.MaxRetrievable());
// BERDecodePrivateKey is a void function. Here's the only check
// we have regarding the DER bytes consumed.
ASSERT(queue.IsEmpty());
}
catch (const Exception& ex)
{
cerr << ex.what() << endl;
exit (1);
}
After loading the key, you can validate it with:
AutoSeededRandomPool prng;
bool valid = rsaPrivate.Validate(prng, 3);
if(!valid)
cerr << "RSA private key is not valid" << endl;
And print it with:
cout << "N: " << rsaPrivate.GetModulus() << endl << endl;
cout << "E: " << rsaPrivate.GetPublicExponent() << endl << endl;
cout << "D: " << rsaPrivate.GetPrivateExponent() << endl << endl;
If the key is password protected, then Crypto++ cannot decode it. The library lacks the support to perform the decryption. In this case, you can convert it to BER/DER using the following OpenSSL command. Then you can use the key material with Crypto++.
openssl pkcs8 -nocrypt -in rsa-key.pem -inform PEM -topk8 -outform DER -out rsa-key.der
The sample program wrote the key to file with this:
FileSink fs("decoded-key.der");
queue.CopyTo(fs);
fs.MessageEnd();
The CopyTo leaves the bytes in the queue for use later. You can dump the file with an ASN.1 tool, like Gutmann's dumpasn1:
$ dumpasn1 decoded-key.der
0 314: SEQUENCE {
4 1: INTEGER 0
7 65: INTEGER
: 00 AF 10 F9 3A 11 E2 D5 86 B2 16 8A 61 11 CA 27
: 76 66 31 41 7A 8E 3C 02 4B FB B5 03 CB F5 B4 56
: D0 89 5A 41 97 16 01 A5 A3 5C 4F 61 0D 11 70 46
: 76 65 AA AF C5 70 4A 5D 0D 04 82 B2 AB 8A 74 AE
: 1B
74 1: INTEGER 3
77 64: INTEGER
: 74 B5 FB 7C 0B EC 8E 59 CC 0F 06 EB 61 31 6F A4
: 44 20 D6 51 B4 28 01 87 FD 23 57 DD 4E 78 39 DF
: 41 47 34 ED 21 3A BE CD F5 27 C9 BB AB 0F EB C5
: E0 77 13 D8 7A A0 68 BD 21 54 5E 6D 53 BE F7 3B
143 33: INTEGER
: 00 D6 6F FF CA 03 3A D2 3A 8D 0F D3 29 2D 9D 8E
: 7B 50 5F 50 DD 8D BA AE E9 56 23 A2 EA A6 CC 59
: 0F
178 33: INTEGER
: 00 D0 FF 72 69 60 EE B5 33 DF 83 DF 4A 63 3A D6
: 52 44 98 C1 23 2A 9F 11 07 FC 60 82 1C E6 09 E2
: 35
213 33: INTEGER
: 00 8E F5 55 31 57 7C 8C 27 08 B5 37 70 C9 13 B4
: 52 35 94 E0 93 B3 D1 C9 F0 E4 17 C1 F1 C4 88 3B
: 5F
248 33: INTEGER
: 00 8B 54 F6 F0 EB 49 CE 22 95 02 94 DC 42 27 39
: 8C 2D BB 2B 6C C7 14 B6 05 52 EB 01 68 99 5B EC
: 23
283 33: INTEGER
: 00 AF 80 62 A4 FA 5B 0B 0C 5B 63 A7 E6 5F 5D 8B
: E4 4F CE 10 D3 D5 07 46 C9 32 39 78 53 52 F8 6C
: 58
: }
0 warnings, 0 errors.
I am trying to dump the memory (made with malloc) to a file. I want to dump the raw data because I don't know what's inside the memory (int float double) at the point that I want to dump the memory.
What's the best way to do this?
I have tried a few thing already but non of them worked as i wanted.
In C, it's quite trivial, really:
const size_t size = 4711;
void *data = malloc(size);
if(data != NULL)
{
FILE *out = fopen("memory.bin", "wb");
if(out != NULL)
{
size_t to_go = size;
while(to_go > 0)
{
const size_t wrote = fwrite(data, to_go, 1, out);
if(wrote == 0)
break;
to_go -= wrote;
}
fclose(out);
}
free(data);
}
The above attempts to properly loop fwrite() to handle short writes, that's where most of the complexity comes from.
It's not clear what you mean by "not working".
You could reinterpret_cast the memory to a char * and write it to file easily.
Reading it back again is a different matter.
The "C++ way" of doing it would probably involve using std::ostream::write with a stream in binary mode.
#include <fstream>
#include <string>
bool write_file_binary (std::string const & filename,
char const * data, size_t const bytes)
{
std::ofstream b_stream(filename.c_str(),
std::fstream::out | std::fstream::binary);
if (b_stream)
{
b_stream.write(data, bytes);
return (b_stream.good());
}
return false;
}
int main (void)
{
double * buffer = new double[100];
write_file_binary("test.bin",
reinterpret_cast<char const *>(buffer),
sizeof(double)*100);
delete[] buffer;
return 0;
}
If this is C++, this might help you, as part of serializing and deserializing,
I write the raw memory array to a file (using new[] is essentially the same
as malloc in the C world):
https://github.com/goblinhack/simple-c-plus-plus-serializer
#include "hexdump.h"
auto elems = 128;
static void serialize (std::ofstream out)
{
auto a = new char[elems];
for (auto i = 0; i > bits(a);
hexdump(a, elems);
}
Output:
128 bytes:
0000 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f |................|
0010 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f |................|
0020 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f | !"#$%&'()*+,-./|
0030 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f |0123456789:;?|
0040 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f |#ABCDEFGHIJKLMNO|
0050 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f |PQRSTUVWXYZ[\]^_|
0060 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f |`abcdefghijklmno|
0070 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f |pqrstuvwxyz{|}~.|