EVP_KEY_sign gives signature length 0 unexpectedly - c++

I'm trying to sign a message with a private key in C++ with openssl. I have followed an example but for some reason I can't get EVP_PKEY_sign to work because it sets the signature length to 0. Is there anything wrong with my code?
function to sign message
void sign_buff(const string& priv_key_file_path, const unsigned char* buff, int buff_len, string& sig) {
FILE* f = fopen(priv_key_file_path.c_str(), "r");
EC_KEY* ec_key = PEM_read_ECPrivateKey(f, NULL, NULL, NULL);
fclose(f);
assert(1 == EC_KEY_check_key(ec_key));
EVP_PKEY* key = EVP_PKEY_new();
assert(1 == EVP_PKEY_assign_EC_KEY(key, ec_key));
EVP_PKEY_CTX* key_ctx = EVP_PKEY_CTX_new(key, NULL);
assert(1 == EVP_PKEY_sign_init(key_ctx));
assert(1 == EVP_PKEY_CTX_set_signature_md(key_ctx, EVP_sha256()));
size_t sig_len = 0;
assert(1 == EVP_PKEY_sign(key_ctx, NULL, &sig_len, buff, buff_len));
cout << sig_len; //gives 0
sig.assign(sig_len, 0);
assert(1 == EVP_PKEY_sign(key_ctx, (unsigned char*)&sig[0], &sig_len, buff, buff_len));
EVP_PKEY_CTX_free(key_ctx);
EVP_PKEY_free(key);
}
I call the function with
string priv="priv.pem";
string sig;
unsigned char buff[] = "Random message to be signed";
sign_buff(priv, &buff[0], sizeof(buff), sig);
where a private key is stored in priv.pem

Related

Why is my OpenSSL/libcrypto signature incorrect?

I need to sign a string, publish the public key as a string, and then somewhere else use the public key to verify the signed message. This is the part where the message is signed:
// RSA keypair generation
EVP_PKEY *keypair = NULL;
EVP_PKEY_CTX *ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL);
if (1 != EVP_PKEY_keygen_init(ctx)) {
initFail = true;
}
if (1 != EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, 2048)) {
initFail = true;
}
if (1 != EVP_PKEY_keygen(ctx, &keypair)) {
initFail = true;
}
EVP_PKEY_CTX_free(ctx);
// Create public key string.
BIO* bo = BIO_new(BIO_s_mem());
PEM_write_bio_PUBKEY(bo, keypair);
char buff[1000];
BIO_read(bo, &buff[0], 1000);
BIO_free(bo);
pubkey = buff;
// Create signature
size_t *slen = new size_t;
unsigned char *sig = NULL;
std::string msg;
msg = stuffThatCreatesMessage();
EVP_MD_CTX *mdctx = EVP_MD_CTX_create();
if (1 != EVP_DigestSignInit(mdctx, NULL, EVP_sha256(),
NULL, keypair)) {
initFail = true;
}
if (1 != EVP_DigestSignUpdate(mdctx, msg.c_str(), msg.length())) {
initFail = true;
}
if (1 != EVP_DigestSignFinal(mdctx, NULL, slen)) {
initFail = true;
}
sig = (unsigned char *) OPENSSL_malloc(
sizeof(unsigned char) * (*slen));
if (1 != EVP_DigestSignFinal(mdctx, sig, slen)) {
initFail = true;
}
signature = *sig;
OPENSSL_free(sig);
bool isSuccess = verifySignature(signature, pubkey, msg);
Here's the code that takes the string message and key and actually verifies the signature:
bool verifySignature(std::string sig, std::string key_str, std::string msg) {
BIO* bo = BIO_new(BIO_s_mem());
BIO_write(bo, key_str.c_str(), key_str.length());
EVP_PKEY *key = EVP_PKEY_new();
PEM_read_bio_PUBKEY(bo, &key, 0, 0);
BIO_free(bo);
unsigned char *unsigned_sig = new unsigned char[sig.length()+1];
strcpy((char *) unsigned_sig, sig.c_str());
EVP_MD_CTX *mdctx = EVP_MD_CTX_create();
if (1 != EVP_DigestVerifyInit(mdctx, NULL, EVP_sha256(), NULL, key)) {
return false;
}
if (1 != EVP_DigestVerifyUpdate(mdctx, msg.c_str(), msg.length())) {
return false;
}
bool retval = (1 == EVP_DigestVerifyFinal(mdctx, unsigned_sig,
sig.length()));
delete unsigned_sig;
return retval;
}
Every time I do this, the latter function tells me the signature is invalid, with the isSuccess variable equal to 0. It's because the EVP_DigestSignFinal is returning 0, indicating that the signature is incorrect. Where am I going wrong?
The problem, pointed out by #jww, was the line signature = *sig. I was ending up with only the first byte as my signature string. The correct way to do it is:
std::string temp_sig = reinterpret_cast<char*>(sig);
signature = temp_sig.substr(0, *slen);

how to correct convert char to byte in C++

I have some problem.
I write next code.
z=recv(conn,buff,512,0);//"Hi VahagnAAAAAAA" - but encrypted for example "zЖWЙЇ%ЂАЊ"S]яАAЧ0АбЯ.Щk5S¤Oц", length 32
BYTE messageLen = (BYTE)strlen(buff);// messageLen = 32
BYTE encryptedMessage[32];
memcpy(encryptedMessage, buff, messageLen);//!!!!!!!!!!!
DWORD encryptedMessageLen = messageLen;
CryptDecrypt(hSessionKeyRSA_2,NULL,TRUE,0,encryptedMessage, &encryptedMessageLen);
cout<<encryptedMessage<<endl;
I recv to buffer char array 32 length.
Where I copy encrypted text
"zЖWЙЇ%ЂАЊ"S]яАAЧ0АбЯ.Щk5S¤Oц"
to byte array, on the encryptedMessage have next value
"zЖWЙЇ%ЂАЊ"S]яАAЧ0АбЯ.Щk5S¤OцMMMMMMMMMMMMMMMMMMM"
where I decrypted I don't get start text, I get
"Ik VqagnеAAcS]‰МММММММММММ ММММММММ"
How I can fix it? please help me.
UPDATE
Client main()
int _tmain(int argc, TCHAR* argv[], TCHAR* envp[])
{
const char* servername="127.0.0.1";
Sleep(2000);
setlocale(LC_ALL, "Russian");
WSADATA wsaData;
struct hostent *hp;
unsigned int addr;
struct sockaddr_in server;
int wsaret=WSAStartup(0x101,&wsaData);
if(wsaret)
return 0;
SOCKET conn;
conn=socket(AF_INET,SOCK_STREAM,IPPROTO_TCP);
if(conn==INVALID_SOCKET)
return 0;
if(inet_addr(servername)==INADDR_NONE)
{
hp=gethostbyname(servername);
}
else
{
addr=inet_addr(servername);
hp=gethostbyaddr((char*)&addr,sizeof(addr),AF_INET);
}
if(hp==NULL)
{
closesocket(conn);
return 0;
}
server.sin_addr.s_addr=*((unsigned long*)hp->h_addr);
server.sin_family=AF_INET;
server.sin_port=htons(20248);
if(connect(conn,(struct sockaddr*)&server,sizeof(server)))
{
closesocket(conn);
return 0;
}
std::cout<<"Connected to server";
char buff[512];
memset(buff,'\0',512);
int z;
z=recv(conn,(char*)exportRSAKey,140,0);//Import RSA key
z=recv(conn,(char*)exportAESKey,140,0);//Import AES key
z=recv(conn,buff,512,0);//Get encryption text
importKey();//import key to client
BYTE messageLen = (BYTE)strlen(buff);
BYTE encryptedMessage[33];
memcpy(encryptedMessage, buff, messageLen);
DWORD encryptedMessageLen = messageLen;
CryptDecrypt(hSessionKeyRSA_2,NULL,FALSE,0,encryptedMessage, &encryptedMessageLen);
cout<<encryptedMessage<<endl;
// buff[z]=0;
}
Import key to client
if (CryptAcquireContext(&hCryptProv_RSA_2, NULL, MS_ENH_RSA_AES_PROV, PROV_RSA_AES, 0))
{
printf("A cryptographic provider has been acquired.\r\n");
}
else
{
DWORD d = GetLastError();
return -1;
}
int iii = CryptImportKey(hCryptProv_RSA_2,(BYTE *)&exportAESKey,140,NULL,NULL,&hSessionKeyRSA_2);
if(CryptSetKeyParam(hSessionKeyRSA_2, KP_IV, exportRSAKey, 0))
{
cout<<"ok";
}
Server main()
std::cout<<"Client connected... "<<pParam<<std::endl;
char buff[512];
CString cmd;
CString params;
int n;
int x;
BOOL auth=false;
SOCKET client=(SOCKET)pParam;
strcpy(buff,"#Server Ready.\r\n");
char keybuff[1024];
createRSAPublicKey();//create enc_dec key
//keybuff = exportRSAKey;
//memset(rec,'\0',512);
const char *p = reinterpret_cast<const char*>(exportRSAKey);
send(client,p,140,0);//send RSA
const char *pp = reinterpret_cast<const char*>(exportAESKey);
send(client,pp,140,0);//Send AES
const char *ppp = reinterpret_cast<const char*>(encryptedMessage);
send(client,ppp,512,0);//Send encrypt text
createRSAPublicKey()
BOOL createRSAPublicKey()
{
if (CryptAcquireContext(&hCryptProv_AES, NULL, MS_ENH_RSA_AES_PROV, PROV_RSA_AES, 0))
{
printf("A cryptographic provider has been acquired.\r\n");
}
else
{
DWORD d = GetLastError();
return -1;
}
HCRYPTKEY hSessionKey_AES;
if (!CryptGenKey(hCryptProv_AES, CALG_AES_256, CRYPT_EXPORTABLE, &hSessionKey_AES))
{
DWORD d = GetLastError();
return -1;
}
// Create RSA key to encrypt AES one
HCRYPTKEY hSessionKey;
if (!CryptGenKey(hCryptProv_AES, AT_KEYEXCHANGE, 1024 << 16, &hSessionKey))
{
DWORD d = GetLastError();
return -1;
}
// Export key
DWORD keylen;
BOOL ok = CryptExportKey(hSessionKey_AES, hSessionKey, SIMPLEBLOB, 0, exportRSAKey, &keylen);
if (ok == FALSE)
{
DWORD d = GetLastError();
return -1;
}
BYTE *encKey = (BYTE *)malloc(keylen);
ok = CryptExportKey(hSessionKey_AES, hSessionKey, SIMPLEBLOB, 0, exportAESKey, &keylen);
if (ok == FALSE)
{
DWORD d = GetLastError();
return -1;
}
else
printf("A cryptographic key export succeeded.\r\n");
BYTE messageLen = (BYTE)strlen(mess);
memcpy(encryptedMessage, mess, messageLen);
DWORD encryptedMessageLen = messageLen;
CryptEncrypt(hSessionKey_AES, NULL, TRUE, 0, encryptedMessage, &encryptedMessageLen, sizeof(encryptedMessage));
}
You are using strlen() to get the length of buff, but recv() does not null-terminate the buffer unless a null terminator was actually transmitted and read. You should instead be using the return value of recv(), which is the number of bytes actually read:
z=recv(conn,buff,512,0);
messageLen = z;//(BYTE)strlen(buff);
That being said, TCP is a byte stream, it has no concept of message boundaries. There is no 1-to-1 relationship between send() and recv() in TCP, like there is in UDP, so recv() above could read as little as 1 byte or as many as 512 bytes, and buff could contain a full message, a partial message, pieces of multiple messages, etc. You can't just blindly read and expect to receive everything in one go. You need to take all of that into account.
Design your TCP protocol to delimit messages, either with a preceding header that specifies the message length, or a trailing delimiter that never appears in the message body. Call recv() as many times as it takes, buffering any received data, and only process/decrypt complete messages that are in your buffer, leaving partial message data in the buffer to be completed by later reads.
Try something more like this:
Client main()
int readBuffer(SOCKET s, void *buffer, int buflen)
{
unsigned char *pbuf = (unsigned char*) buffer;
int total = 0;
while (total < buflen)
{
int num = recv(s, pbuf+total, buflen-total, 0);
if (num < 0)
return SOCKET_ERROR;
if (num == 0)
return 0;
total += num;
}
return total;
}
int _tmain(int argc, TCHAR* argv[], TCHAR* envp[])
{
const char* servername="127.0.0.1";
setlocale(LC_ALL, "Russian");
WSADATA wsaData;
memset(&wsaData, 0, sizeof(wsaData));
int wsaret = WSAStartup(0x101, &wsaData);
if (wsaret != 0)
return 0;
struct sockaddr_in server;
memset(&server, 0, sizeof(server));
server.sin_addr.s_addr = inet_addr(servername);
if (server.sin_addr.s_addr == INADDR_NONE)
{
struct hostent *hp = gethostbyname(servername);
if (hp == NULL)
return 0;
server.sin_addr = *((in_addr*)hp->h_addr);
}
server.sin_family = AF_INET;
server.sin_port = htons(20248);
SOCKET conn = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (conn == INVALID_SOCKET)
return 0;
if (connect(conn, (struct sockaddr*)&server, sizeof(server)) != 0)
{
closesocket(conn);
return 0;
}
std::cout << "Connected to server";
if (readBuffer(conn, exportRSAKey, 140) <= 0) //Import RSA key
{
closesocket(conn);
return 0;
}
if (readBuffer(conn, exportAESKey, 140) <= 0) //Import AES key
{
closesocket(conn);
return 0;
}
importKey();//import key to client
DWORD messageLen;
if (readBuffer(conn, &messageLen, sizeof(messageLen)) <= 0) //Get encryption text length
{
closesocket(conn);
return 0;
}
messageLen = ntohl(messageLen);
std::vector<BYTE> buff(messageLen);
if (messageLen > 0)
{
if (readBuffer(conn, &buff[0], messageLen) <= 0) //Get encryption text
{
closesocket(conn);
return 0;
}
if (!CryptDecrypt(hSessionKeyRSA_2, NULL, FALSE, 0, &buff[0], &messageLen))
{
closesocket(conn);
return 0;
}
}
std::cout << std::string((char*)buff.data(), messageLen) << std::endl;
}
Server main()
int sendBuffer(SOCKET s, void *buffer, int buflen)
{
unsigned char *pbuf = (unsigned char*) buffer;
int total = 0;
while (total < buflen)
{
int num = send(s, pbuf+total, buflen-total, 0);
if (num < 0)
return SOCKET_ERROR;
if (num == 0)
return 0;
total += num;
}
return total;
}
...
SOCKET client = (SOCKET)pParam;
std::cout << "Client connected... " << pParam << std::endl;
...
createRSAPublicKey();//create enc_dec key
...
if (sendBuffer(client, exportRSAKey, 140) <= 0) //send RSA
{
closesocket(client);
return;
}
if (sendBuffer(client, exportAESKey, 140) <= 0) //Send AES
{
closesocket(client);
return;
}
...
DWORD tmpMessageLen = htonl(messageLen);
if (sendBuffer(client, &tmpMessageLen, sizeof(tmpMessageLen)); //Send encrypt text length
{
closesocket(client);
return;
}
if (sendBuffer(client, encryptedMessage, messageLen) <= 0) //Send encrypt text
{
closesocket(client);
return;
}
...

error:FFFFFFFFFFFFFFFF:lib(255):func(4095):reason(4095) during RSA decyption

I'm trying to create a hybrid cryptography tool in C++ with Qt Gui.
(The data will be encrypted with AES 256-CBC, the AES Key RSA encrypted and saved then.)
But the RSA part of this tool doesn't work.
I wrote the sourcecode several times but I always get the same error on decrypt.
error:FFFFFFFFFFFFFFFF:lib(255):func(4095):reason(4095)
I hope someone could help me get a working RSA encrypt + decrypt implementation.
You can see the sourcecode here or download a testing Qt Project from my dropbox..
Dropbox Download: https://db.tt/6HKsYRTa
Sourcecode 1. Implementation:
void MainWindow::rsaEncrypt()
{
EVP_PKEY *pk = NULL;
EVP_PKEY_CTX *ctx = NULL;
QByteArray encrypted = QByteArray();
//------------------------------------------------
//--- READ PUBLIC KEY ----------------------------
FILE *pkFile = fopen(ui->publicKeyPath->text().toStdString().c_str(), "r");
if(pkFile == NULL) throw NULL;
pk = PEM_read_PUBKEY(pkFile, NULL, NULL, NULL);
if(pk == NULL) throw NULL;
fclose(pkFile);
//------------------------------------------------
ctx = EVP_PKEY_CTX_new(pk, NULL);
//------------------------------------------------
//--- ENCRYPT DATA -------------------------------
int err;
err = EVP_PKEY_encrypt_init(ctx);
if(err <= 0) throw NULL;
err = EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PADDING);
if(err <= 0) throw NULL;
size_t outLen = 0;
err = EVP_PKEY_encrypt(
ctx,
NULL,
&outLen,
(uchar*) ui->plainTextEdit->document()->toPlainText().toStdString().c_str(),
ui->plainTextEdit->document()->toPlainText().size()
);
if(err <= 0) throw NULL;
encrypted.resize(outLen);
err = EVP_PKEY_encrypt(
ctx,
(uchar*) encrypted.data(),
&outLen,
(uchar*) ui->plainTextEdit->document()->toPlainText().toStdString().c_str(),
ui->plainTextEdit->document()->toPlainText().size()
);
//------------------------------------------------
EVP_PKEY_CTX_free(ctx);
EVP_PKEY_free(pk);
if(err > 0) ui->encryptedTextEdit->document()->setPlainText(QString(encrypted));
else {
QByteArray errStr = QByteArray();
errStr.resize(256);
ERR_load_ERR_strings();
ERR_error_string(err, errStr.data());
ui->encryptedTextEdit->document()->setPlainText( QString(errStr) );
}
}
void MainWindow::rsaDecrypt()
{
EVP_PKEY *pk = NULL;
EVP_PKEY_CTX *ctx = NULL;
QByteArray decrypted = QByteArray();
//------------------------------------------------
//--- READ PRIVATE KEY ---------------------------
FILE *pkFile = fopen(ui->privateKeyPath->text().toStdString().c_str(), "r");
if(pkFile == NULL) throw NULL;
pk = PEM_read_PrivateKey(pkFile, NULL, NULL, NULL);
if(pk == NULL) throw NULL;
fclose(pkFile);
//------------------------------------------------
ctx = EVP_PKEY_CTX_new(pk, NULL);
//------------------------------------------------
//--- DECRYPT DATA -------------------------------
int err;
err = EVP_PKEY_decrypt_init(ctx);
if(err <= 0) throw NULL;
err = EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PADDING);
if(err <= 0) throw NULL;
size_t outLen = 0;
err = EVP_PKEY_decrypt(
ctx,
NULL,
&outLen,
(uchar*) ui->encryptedTextEdit->document()->toPlainText().toStdString().c_str(),
ui->encryptedTextEdit->document()->toPlainText().size()
);
if(err <= 0) throw NULL;
decrypted.resize(outLen);
err = EVP_PKEY_decrypt(
ctx,
(uchar*) decrypted.data(),
&outLen,
(uchar*) ui->encryptedTextEdit->document()->toPlainText().toStdString().c_str(),
ui->encryptedTextEdit->document()->toPlainText().size()
);
//------------------------------------------------
EVP_PKEY_CTX_free(ctx);
EVP_PKEY_free(pk);
if(err > 0) ui->decryptedTextEdit->document()->setPlainText(QString(decrypted));
else {
QByteArray errStr = QByteArray();
errStr.resize(256);
ERR_load_ERR_strings();
ERR_error_string(err, errStr.data());
ui->decryptedTextEdit->document()->setPlainText( QString(errStr) );
}
}
Sourcecode 2. Implementation:
void MainWindow::rsaEncrypt()
{
RSA *rsa = createRSAFromFile(ui->publicKeyPath->text().toStdString().c_str(), 1);
QByteArray encrypted = QByteArray();
encrypted.resize(2048);
int err = RSA_public_encrypt(
ui->plainTextEdit->document()->toPlainText().size(),
(uchar*) ui->plainTextEdit->document()->toPlainText().toStdString().c_str(),
(uchar*) encrypted.data(),
rsa,
RSA_PADDING
);
RSA_free(rsa);
if(err > 0) ui->encryptedTextEdit->document()->setPlainText( QString(encrypted) );
else {
QByteArray errStr = QByteArray();
errStr.resize(256);
ERR_load_ERR_strings();
ERR_error_string(err, errStr.data());
ui->encryptedTextEdit->document()->setPlainText( QString(errStr) );
}
}
void MainWindow::rsaDecrypt()
{
RSA *rsa = createRSAFromFile(ui->privateKeyPath->text().toStdString().c_str(), 0);
QByteArray decrypted = QByteArray();
decrypted.resize(2048);
int err = RSA_private_decrypt(
ui->encryptedTextEdit->document()->toPlainText().size(),
(uchar*) ui->encryptedTextEdit->document()->toPlainText().toStdString().c_str(),
(uchar*) decrypted.data(),
rsa,
RSA_PADDING
);
RSA_free(rsa);
if(err > 0) ui->decryptedTextEdit->document()->setPlainText( QString(decrypted) );
else {
QByteArray errStr = QByteArray();
errStr.resize(256);
ERR_load_ERR_strings();
ERR_error_string(err, errStr.data());
ui->decryptedTextEdit->document()->setPlainText( QString(errStr) );
}
}
RSA *MainWindow::createRSAFromFile(const char *keyPath, int pub)
{
FILE *keyFile = fopen(keyPath, "rb");
if(keyFile==NULL)
{
return 0;
}
RSA *rsa = RSA_new();
if(pub)
{
rsa = PEM_read_RSA_PUBKEY(keyFile, &rsa, NULL, NULL);
}
else
{
rsa = PEM_read_RSAPrivateKey(keyFile, &rsa, NULL, NULL);
}
fclose(keyFile);
return rsa;
}
Includes and defines for both implementations:
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QDebug>
#include <QByteArray>
#include <openssl/evp.h>
#include <openssl/rsa.h>
#include <openssl/pem.h>
#include <openssl/err.h>
#define RSA_PADDING RSA_PKCS1_OAEP_PADDING
Try to use this Code for creating a RSA-object. It definetly works. You should read the .pem file before and then call this function:
RSA *CryptClassRSA::createRSA(unsigned char *key, int isPublic){
RSA *rsa = NULL;
BIO *keybio;
keybio = BIO_new_mem_buf(key, -1);
if (keybio==NULL){
printf( "Failed to create key BIO");
return NULL;
}
if(isPublic){
rsa = PEM_read_bio_RSA_PUBKEY(keybio, &rsa,NULL, NULL);
}
else{
rsa = PEM_read_bio_RSAPrivateKey(keybio, &rsa,NULL, NULL);
}
if(rsa == NULL){
printf( "Failed to create RSA");
}
return rsa;
}
C++ RSA decrypt error:FFFFFFFFFFFFFFFF:lib(255):func(4095):reason(4095)
And
int err;
...
err = EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PADDING);
if(err <= 0) throw NULL;
...
Its not clear to me where the error is happening or how you are getting the errstr output of error:FFFFFFFFFFFFFFFF:lib(255):func(4095):reason(4095). I picked EVP_PKEY_CTX_set_rsa_padding as the snippet because I think the return value is -2 (and it has significance as explained below).
err is just a return code. To get the actual error, you need to call ERR_get_error. Maybe something like:
int rc;
unsigned long err;
...
rc = EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PADDING);
err = ERR_get_error();
if(rc <= 0)
{
// err is valid
}
You should also visit the EVP_PKEY_CTX_set_rsa_padding and ERR_get_error man pages.
Often, OpenSSL will return 0 for success, so I'm not sure about err <= 0 in some places. Also, because your error is 0xffff...4095 (and not 4096), I think you are getting the -2 return value discussed in the man page:
EVP_PKEY_CTX_ctrl() and its macros return a positive value for success and 0 or a negative value for failure. In particular a return value of -2 indicates the operation is not supported by the public key algorithm.
Also note... If you gather your error and print it in hex:
rc = EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PADDING);
err = ERR_get_error();
if(rc <= 0)
{
// err is valid
std::cerr << std::hex << err << std::endl;
}
Then you can use the openssl errstr 0xNNNN to print it.

Loading a PEM format certificate

How do I load the PEM format certificate as an x509 in openssl c++?
int SSL_use_certificate(SSL *ssl, X509 *x);
int SSL_use_certificate_ASN1(SSL *ssl, unsigned char *d, int len);
int SSL_use_certificate_file(SSL *ssl, const char *file, int type);
These are the 3 functions available to add a certificate to a Handle. I have a certificate string inside the program(This is just a PEM formatted data). I want to add it to the handle. How do I proceed?
Will SSL_CTX_set_default_passwd_cb work with private keys that I am loading into and ssl handle and not a context?
How do I load the PEM format certificate as an x509 in openssl c++?
You should probably use SSL_CTX_use_certificate_chain_file and SSL_CTX_use_PrivateKey_file. You can use them and build a client or server context. The sample code is shown below. There are some nuances to using them, so take a look at OpenSSL's documentation at SSL_CTX_use_certificate(3).
I'm not sure what "as an x509" means. The certificate will be x509, but the private key will be PKCS #8. There are PEM_read_bio_X509 and PEM_read_X509, they return an X509*, and they may do what you want.
Will SSL_CTX_set_default_passwd_cb work with private keys that I am loading
It depends, but it should. The password callback is optional. Use it if you password protected the key. In the code below, I call it PasswordCallback, and its used for both reading and writing the key (only reading is shown below).
using SSL_CTX_ptr = std::unique_ptr<SSL_CTX, decltype(&::SSL_CTX_free)>;
SSL_CTX* CreateServerContext()
{
do
{
int rc;
unsigned long err;
const SSL_METHOD* method = SSLv23_server_method();
ASSERT(method != NULL);
if (method == NULL)
{
LogError("GetServerContext: SSLv23_server_method failed");
break; /* failed */
}
SSL_CTX_ptr t(SSL_CTX_new(method), ::SSL_CTX_free);
ASSERT(t.get() != NULL);
if (t.get() == NULL)
{
LogError("GetServerContext: SSL_CTX_new failed");
break; /* failed */
}
long flags = SSL_OP_NO_SSLv2 | SSL_OP_NO_SSLv3;
flags |= SSL_OP_NO_COMPRESSION;
flags |= SSL_OP_SAFARI_ECDHE_ECDSA_BUG;
flags |= SSL_OP_CIPHER_SERVER_PREFERENCE;
/* Cannot fail */
SSL_CTX_set_options(t.get(), flags);
string ciphers = GetServerCipherSuites();
ASSERT(!ciphers.empty());
rc = SSL_CTX_set_cipher_list(t.get(), ciphers.c_str());
err = ERR_get_error();
ASSERT(rc == 1);
if (rc != 1)
{
LogError("GetServerContext: SSL_CTX_set_cipher_list failed");
break; /* failed */
}
string certFile = config.GetServerCertFile();
ASSERT(!certFile.empty());
rc = SSL_CTX_use_certificate_chain_file(t.get(), certFile.c_str());
err = ERR_get_error();
ASSERT(rc == 1);
if (rc != 1)
{
LogError("GetServerContext: SSL_CTX_use_certificate_chain_file failed");
break; /* failed */
}
/* These two do not return a value... cannot fail? */
SSL_CTX_set_default_passwd_cb(t.get(), PasswordCallback);
SSL_CTX_set_default_passwd_cb_userdata(t.get(), (void*) SERVER_KEY_LABEL);
string keyFile = config.GetServerKeyFile();
ASSERT(!keyFile.empty());
rc = SSL_CTX_use_PrivateKey_file(t.get(), keyFile.c_str(), SSL_FILETYPE_PEM);
err = ERR_get_error();
ASSERT(rc == 1);
if (rc != 1)
{
LogError("GetServerContext: SSL_CTX_use_PrivateKey_file failed");
break; /* failed */
}
rc = SSL_CTX_check_private_key(t.get());
err = ERR_get_error();
ASSERT(rc == 1);
if (rc != 1)
{
LogError("GetServerContext: SSL_CTX_check_private_key failed");
/* non-fatal, but everything will probably break */
}
/* These three do not return a value... cannot fail? */
SSL_CTX_set_tmp_dh_callback(t.get(), DhCallback);
SSL_CTX_set_tmp_ecdh_callback(t.get(), EcdhCallback);
SSL_CTX_set_tlsext_servername_callback(t.get(), ServerNameCallback);
return t.release();
} while (0);
return NULL;
}
The idea with the password callback is: OpenSSL provides you a buffer and a size. You fill the buffer, and return the size of how much you filled.
My password callback is somewhat involved. It performs a single hash of the raw password before passing it on to the library. That ensures a "plain text" password is not used (but does not slow down the customary attacks). Yours can prompt the user for a string, or it can return a hard coded string.
My password callback uses a label. The label allows me to derive different keys depending on usage (even though the same 'base' secret is used). By specifying a different usage or label, I get a different derivation of key bits. The label is provided through arg below, and you can set it with SSL_CTX_set_default_passwd_cb_userdata.
using EVP_MD_CTX_ptr = std::unique_ptr<EVP_MD_CTX, decltype(&::EVP_MD_CTX_destroy)>;
int PasswordCallback(char *buffer, int size, int rwflag, void *arg)
{
UNUSED(rwflag);
int rc;
unsigned long err;
ostringstream oss;
const char* label = (char*) arg;
size_t lsize = (label ? strlen(label) : 0);
SecureVector sv = config.GetMasterKey();
AC_ASSERT(!sv.empty());
if (sv.empty())
{
...
throw runtime_error(oss.str().c_str());
}
EVP_MD_CTX_ptr ctx(EVP_MD_CTX_create(), ::EVP_MD_CTX_destroy);
AC_ASSERT(ctx.get() != NULL);
const EVP_MD* hash = EVP_sha512();
AC_ASSERT(hash != NULL);
rc = EVP_DigestInit_ex(ctx.get(), hash, NULL);
err = ERR_get_error();
AC_ASSERT(rc == 1);
if (rc != 1)
{
...
throw runtime_error(oss.str().c_str());
}
rc = EVP_DigestUpdate(ctx.get(), sv.data(), sv.size());
err = ERR_get_error();
AC_ASSERT(rc == 1);
if (rc != 1)
{
...
throw runtime_error(oss.str().c_str());
}
if (label && lsize)
{
rc = EVP_DigestUpdate(ctx.get(), label, lsize);
err = ERR_get_error();
AC_ASSERT(rc == 1);
if (rc != 1)
{
...
throw runtime_error(oss.str().c_str());
}
}
int n = std::min(size, EVP_MD_size(hash));
if (n <= 0)
return 0;
rc = EVP_DigestFinal_ex(ctx.get(), (unsigned char*) buffer, (unsigned int*) &n);
err = ERR_get_error();
AC_ASSERT(rc == 1);
if (rc != 1)
{
...
throw runtime_error(oss.str().c_str());
}
return n;
}

File i/o garbled

The data between these two functions is becoming garbled. Inspecting the variables on each side show that the data is definitely different. The message size signal does work. It's the second read/write function that has the problem.
Here is my write function and its counter read function:
string Pipe::RecvCompressedMessage()
{
int message_size = 0;
DWORD dwBytesRead = 0;
string buf, data;
byte size_in[sizeof(int)];
if (!ReadFile(_h, &message_size, sizeof(int), &dwBytesRead, NULL))
{
debug->DebugMessage(Error::GetErrorMessageW(GetLastError()));
Close();
return "";
}
if (message_size < 0)
return "";
buf.resize(message_size);
int total_bytes_read = 0;
while(total_bytes_read < message_size)
{
if (!ReadFile(_h, (LPVOID) &buf, message_size - total_bytes_read, &dwBytesRead, NULL))
{
int err = GetLastError();
debug->DebugMessage(Error::GetErrorMessageW(GetLastError()));
Close();
return "";
}
data.append(buf);
total_bytes_read += dwBytesRead;
}
std::string decompressed;
boost::iostreams::filtering_streambuf<boost::iostreams::output> out;
out.push(boost::iostreams::bzip2_decompressor());
out.push(boost::iostreams::back_inserter(decompressed));
boost::iostreams::copy(boost::make_iterator_range(buf), out);
return decompressed;
}
void Pipe::SendCompressedMessage(string message)
{
std::string compressed;
boost::iostreams::filtering_streambuf<boost::iostreams::output> out;
out.push(boost::iostreams::bzip2_compressor());
out.push(boost::iostreams::back_inserter(compressed));
boost::iostreams::copy(boost::make_iterator_range(message), out);
DWORD dwBytesWritten;
int message_size = compressed.length();
if (WriteFile(_h, &message_size, sizeof(int), &dwBytesWritten, NULL) == 0)
{
debug->DebugMessage(Error::GetErrorMessageW(GetLastError()));
Close();
return;
}
if (WriteFile(_h, (LPVOID *) &compressed, message_size, &dwBytesWritten, NULL) == 0)
{
debug->DebugMessage(Error::GetErrorMessageW(GetLastError()));
Close();
return;
}
}
string buf, data;
...
if (!ReadFile(_h, (LPVOID) &buf, message_size - total_bytes_read, &dwBytesRead, NULL))
You pass pointer to string object as lpBuffer to ReadFile. Replace string with vector<char>:
vector<char> buf;
...
buf.resize(message_size - total_bytes_read);
if (!ReadFile(_h, &buf[0], message_size - total_bytes_read, &dwBytesRead, NULL))
You are writing a std::string instead a char *:
if (WriteFile(_h, (LPVOID *) (compressed.c_str()), message_size, &dwBytesWritten, NULL) == 0)