How to extract header information via CURLOPT_HEADERFUNCTION? - c++

I want to extract header information by using the CURLOPT_HEADERFUNCTION in my c++ program.
How can I use CURLOPT_HEADERFUNCTION to read a single response header field? provides the solution on how to get those header information but I want to know why my code is not working and a possible solution with example.
//readHeader function which returns the specific header information
size_t readHeader(char* header, size_t size, size_t nitems, void *userdata) {
Erza oprations; //class which contains string function like startsWith etc
if (oprations.startsWith(header, "Content-Length:")) {
std::string header_in_string = oprations.replaceAll(header, "Content-Length:", "");
long size = atol(header_in_string.c_str());
file_size = size; // file_size is global variable
std::cout << size; // here it is showing correct file size
}
else if (oprations.startsWith(header, "Content-Type:")) {
// do something
}else
// do something
return size * nitems;
}
// part of main function
curl = curl_easy_init();
if (curl) {
fp = fopen(path, "wb");
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_CAINFO, "./ca-bundle.crt");
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, false);
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, readHeader);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
fclose(fp);
std::cout << file_size; // showing value 0
Getting correct file size in readHeader function but getting 0 bytes in main function.

As shown in your github depot, oprations (operations !?) is a local variable, and will be released at the end of the readHeader function. A way to process the readHeader function and get the correct file size for a given Erza instance is to pass its pointer to userdata value. The Erza class may be rewritten as :
class Erza : public Endeavour {
//... your class body
public:
bool download (const char *url,const char* path){
curl = curl_easy_init();
if (curl) {
fp = fopen(path, "wb");
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_CAINFO, "./ca-bundle.crt");
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, false);
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, readHeader);
curl_easy_setopt(curl, CURLOPT_HEADERDATA, this ); //<-- set this pointer to userdata value used in the callback.
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
fclose(fp);
return false;
}else
return true;
}
size_t analyseHeader( char* header, size_t size, size_t nitems ){
if (startsWith(header, "Content-Length:")) {
std::string header_in_string = replaceAll(header, "Content-Length:", "");
long size = atol(header_in_string.c_str());
file_size = size; // file_size is a member variable
std::cout << size; // here it is showing correct file size
}
else if (startsWith(header, "Content-Type:")) {
// do something
}else
// do something
return size * nitems;
}
}//Eof class Erza
size_t readHeader(char* header, size_t size, size_t nitems, void *userdata) {
//get the called context (Erza instance pointer set in userdata)
Erza * oprations = (Erza *)userdata;
return oprations->analyseHeader( header, size, nitems );
}

Related

cURL write_callback does not pass userdata argument

I am trying to collect some data from a URL. If I do not define any CURLOPT_WRITEFUNCTION and CURLOPT_WRITEDATA I can obviously see the output on console. Then I tried to write that data to memory by copiying the example code, however userdata argument of my callback function returned NULL and I got following exception on line:
char* ptr = (char*)realloc(mem->memory, mem->size + realsize + 1);
Exception thrown: read access violation.
mem was nullptr.
Am I doing something wrong?
Here is my code:
struct MemoryStruct {
char* memory;
size_t size;
};
//-----------------
// Curl's callback
//-----------------
size_t CurlWrapper::curl_cb(char* data, size_t size, size_t nmemb, void* response)
{
size_t realsize = size * nmemb;
std::cout << "CALLBACK CALLED" << std::endl;
MemoryStruct* mem = (struct MemoryStruct*)response;
char* ptr = (char*)realloc(mem->memory, mem->size + realsize + 1);
if (!ptr) {
/* out of memory! */
printf("not enough memory (realloc returned NULL)\n");
return 0;
}
mem->memory = ptr;
memcpy(&(mem->memory[mem->size]), data, realsize);
mem->size += realsize;
mem->memory[mem->size] = 0;
return realsize;
}
//--------------------
// Do the curl
//--------------------
void CurlWrapper::curl_api(
const std::string& url,
std::string& str_result)
{
MemoryStruct chunk;
if (curl) {
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &CurlWrapper::curl_cb);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void*)&chunk);
// TODO: enable ssh certificate
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false); // true
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, false); // 2
curl_easy_setopt(curl, CURLOPT_ACCEPT_ENCODING, "zlib");
auto res = curl_easy_perform(curl);
/* Check for errors */
if (res != CURLE_OK) {
// nothing
std::cout << "curl_easy_perform() failed: " << curl_easy_strerror(res) << std::endl;
}
}
}
libcurl version: 7.82.0
Since libcurl is a C library, it does not know anything about C++ member functions or objects. You can overcome this "limitation" with relative ease using for example a static member function that is passed a pointer to the class.
See this example (from the everything curl book).
// f is the pointer to your object.
static size_t YourClass::func(void *buffer, size_t sz, size_t n, void *f)
{
// Call non-static member function.
static_cast<YourClass*>(f)->nonStaticFunction();
}
// This is how you pass pointer to the static function:
curl_easy_setopt(hcurl, CURLOPT_WRITEFUNCTION, YourClass::func);
curl_easy_setopt(hcurl, CURLOPT_WRITEDATA, this);

download function with libcurl, but it works incomplete [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 3 years ago.
Improve this question
Greetings everyone read this topic, my platform is win32. And I'm using libcurl with a problem.
My goal is to coding with libcurl for a download program, which it includes requesting a url to download a file, saving the file locally(fwrite), showing the progress bar while downloading.
The Problem is it can download the very small file well but when requesting a larger file like 30MB, it stops before it's done.
How can I debug this program to work well with any size of files?
I'm not familiar with libcurl, any simple detail could help. Can I have either answer of how curl_easy series works to call multiple callback functions, improper coding of either of the two callback functions, or some missing rules from libcurl?
Feel free to answer me anything.
Things I've tried:
1.I've tried re-compiling versions of libcurl. Now I'm using libcurl-7.64 compiled with "WITH_SSL=static".
2.I've tried many sites, finding the clue: the sites for very small(like 80kb) file will be downloaded completely with the progress bar. But larger file(like 30Mb) will be incomplete. One of my guess is it stopped from some transfer problem since the file is larger.
codes:
static FILE * fp;
static size_t write_callback(char *ptr, size_t size, size_t nmemb, void *userdata)
{
size_t nWrite = fwrite(ptr, size, nmemb, fp);
return nWrite;
}
static int progress_callback(void *clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow)
{
(void)ultotal;
(void)ulnow;
int totaldotz = 40;
double fractiondownloaded = (double)dlnow / (double)dltotal;
int dotz = (int)(fractiondownloaded * totaldotz);
printf("%3.0f%% [", fractiondownloaded * 100); //print the number percentage of the progress
int i = 0;
for (; i < dotz; i++) { //print "=" to show progress
printf("=");
}
for (; i < totaldotz; i++) { //print space to occupy the rest
printf(" ");
}
printf("]\r");
fflush(stdout);
return 0;
}
int download_function(CURL *curl,const char * url, const char * path)
{
curl = curl_easy_init();
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, progress_callback);
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L);
fopen_s(&fp, path, "ab+");
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_callback);
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 5L);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, false);
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 3L);
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 3L);
char * error = NULL;
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error);
CURLcode retcCode = curl_easy_perform(curl);
fclose(fp);
const char* pError = curl_easy_strerror(retcCode);
if (curl) {
curl_easy_cleanup(curl);
}
return 0;
}
#ccxxshow seems right. Set the timeout option gives me CURLE_OPERATION_TIMEDOUT error.
After remove this line I can download about 9MB PDF file successfully.
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 3L);
My complete code:
#include <curl/curl.h>
static FILE * fp;
static size_t write_callback(char *ptr, size_t size, size_t nmemb, void *userdata)
{
size_t nWrite = fwrite(ptr, size, nmemb, fp);
return nWrite;
}
static int progress_callback(void *clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow)
{
(void)ultotal;
(void)ulnow;
int totaldotz = 40;
double fractiondownloaded = (double)dlnow / (double)dltotal;
int dotz = (int)(fractiondownloaded * totaldotz);
printf("%3.0f%% [", fractiondownloaded * 100); //print the number percentage of the progress
int i = 0;
for (; i < dotz; i++) { //print "=" to show progress
printf("=");
}
for (; i < totaldotz; i++) { //print space to occupy the rest
printf(" ");
}
printf("]\r");
fflush(stdout);
return 0;
}
int download_function(CURL *curl, const char * url, const char * path)
{
curl = curl_easy_init();
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_XFERINFOFUNCTION, progress_callback);
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L);
fopen_s(&fp, path, "ab+");
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_callback);
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 5L);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, false);
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 3L);
//curl_easy_setopt(curl, CURLOPT_TIMEOUT, 3L);
char * error = NULL;
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, error);
CURLcode retcCode = curl_easy_perform(curl);
fclose(fp);
const char* pError = curl_easy_strerror(retcCode);
if (curl) {
curl_easy_cleanup(curl);
}
return 0;
}
int main()
{
CURL *testCurl = NULL;
const char *fileAddr = "https://gotocon.com/dl/goto-cph-2015/slides/AndersLybecker_and_SebastianBrandes_DevelopingIoTSolutionsWithWindows10AndAzure.pdf";
download_function(testCurl, fileAddr, "my-9MB.pdf");
}

Curl gives segmentation fault error

I am trying to download a .txt file from a server which I can access via the web browser on my raspberry pi.
Curl library gives segmentation error when I am trying to do this. Here is the code I am using.
size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t written = fwrite(ptr, size, nmemb, stream);
return written;
}
int checkNewFiles(){
CURL *curl;
FILE *fp;
CURLcode res;
string url = "http://52.233.176.151:1880/files/device/software/text.txt";
char outfilename[FILENAME_MAX] = "/home/pi/Desktop/project/cpp/ab.txt";
curl = curl_easy_init();
if (curl) {
fp = fopen(outfilename, "wb");
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
fclose(fp);
}
return 0;
}
I found the problem, what is url.c_str() doing?
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
change this to
curl_easy_setopt(curl, CURLOPT_URL, url);
Example : Curl program that download the text file.
Offcourse you need to add this neccessary header file here.
size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t written = fwrite(ptr, size, nmemb, stream);
return written;
}
int main(void) {
CURL *curl;
FILE *fp;
CURLcode res;
const char *url = "http://localhost/yourfile.txt";
char outfilename[FILENAME_MAX] = "C:\\outfile.txt";
curl = curl_easy_init();
if (curl) {
fp = fopen(outfilename,"wb");
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1); /* enable failure on http errors */
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
res = curl_easy_perform(curl);
if(res != CURLE_OK) { /* check that the operation was successful */
printf("curl_easy_perform(): %s\n", curl_easy_strerror(res));
}
/* always cleanup */
curl_easy_cleanup(curl);
fclose(fp);
}
return 0;
}
I noticed you're not checking for errors after fopen. If it fails, it returns a NULL pointer, which would cause a segfault when curl attempts to write to it.
I'm not convinced that c_str() was the culprit to your segfault in the original question as I have used that in numerous applications with no problems.

Downloading multiple files from an FTP server files using Libcurl

I have used the following code to get to download all the files from the FTP Server
Steps followed are:
1. Creating a FTP list of File
getFTPList(string sHost, string sUser, string sPass, string sUri)
{
CURL *curl;
CURLcode res;
FILE *ftplister;
string host = "ftp://";
host += sHost;
host += "/sample/";
string furl = host + sUri;
string usrpwd = sUser;
usrpwd += ":";
usrpwd += sPass;
/* local file name to store the file as */
ftplister = fopen("ftp-list", "wb"); /* b is binary, needed on win32 */
curl = curl_easy_init();
if(curl) {
/* Get a file listing from sunet */
curl_easy_setopt(curl, CURLOPT_URL, furl.c_str() );
curl_easy_setopt(curl, CURLOPT_USERPWD, usrpwd.c_str());
curl_easy_setopt(curl, CURLOPT_FTPLISTONLY, TRUE);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &write_list);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, ftplister);
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);
}
fclose(ftplister); /* close the local file */
}
Use this list to download the files calling the download functions recursively
int main(){
FILE *ftpfile;
string line;
ftpfile = fopen("ftp-list", "r");
ifstream infile("ftp-list");
while ( getline(infile, line) )
{
string url, ofname, surl = "ftp://myhost/uploader/", sfname = "C:\\CNAP\\";
url = surl + line;
ofname = sfname +line;
cout<<url<<" "<<ofname<<endl;
char* theVal ;
char* theStr ;
theVal = new char [url.size()+1];
theStr = new char [ofname.size()+1];
strcpy(theVal, url.c_str());
strcpy(theStr, ofname.c_str());
downloadFile(theVal, theStr);
}
return 0;
}
Now the download function:
size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t written;
written = fwrite(ptr, size, nmemb, stream);
return written;
}
void downloadFile(const char* url, const char* ofname)
{
CURL *curl;
FILE *fp;
CURLcode res;
curl = curl_easy_init();
if (curl){
fp = fopen(ofname,"wb");
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_USERPWD, "user:pass");
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
fclose(fp);
}
}
It works well when implemented but only to download text files or some files with texts, if I download an image or a docx or a a zip or rar or for that matter any file which is not text it fails, it fails to open after downloading (says invalid file).
I am not sure what am I missing, any help would be appreciated.
I know this is an inefficient way of coding, but I just need the downloads to be right (any file). Working on the efficiency is my next agenda.
PS: Used this method used here
Downloading multiple files with libcurl in C++
Thank you
The file when downloading needs to be opened as a binary file
fp = fopen(ofname,"wb");
Thank you for the help Rob
This is a guess.:
Try clearing the options CURLOPT_TRANSFERTEXT and CURLOPT_CRLF.
Refernce:
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html
#include <stdio.h>
#include <curl/curl.h>
#include <curl/easy.h>
#include <string>
#include <iostream>
#include <fstream>
using namespace std;
size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream);
void getFileList(
const string &strHost,
const string &strUri,
const string &strUser,
const string &strPassWord,
const string &strTargetFile
);
void downloadAllFiles(
const string &strFtpListFile,
const string &strHost,
const string &strUri,
const string &strUser,
const string &strPassWord,
const string &strSaveTargetFolder
);
int main(void)
{
string strHost = "ftp://192.168.0.1:22";
string strUri = "/cobus/test/";
string strUser = "cobus";
string strPassWord = "password";
string strTargetFile = "c:\\cobus\\ftpList.txt";
string strSaveDestFolder = "c:\\cobus\\";
getFileList(strHost, strUri, strUser, strPassWord, strTargetFile);
downloadAllFiles(
strTargetFile,
strHost,
strUri,
strUser,
strPassWord,
strSaveDestFolder
);
}
size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream)
{
size_t written = fwrite(ptr, size, nmemb, stream);
return written;
}
void getFileList(const string &strHost, const string &strUri,
const string &strUser, const string &strPassWord, const string &strTargetFile)
{
CURL *curl;
CURLcode res;
FILE *ftplister;
string strSourceFullUri = strHost + strUri;
string strUserPwInfo = strUser + ":" + strPassWord;
/* local file name to store the file as */
ftplister = fopen(strTargetFile.c_str(), "wb"); /* b is binary, needed on win32 */
curl = curl_easy_init();
if(curl)
{
/* Get a file listing from sunet */
curl_easy_setopt(curl, CURLOPT_URL, strSourceFullUri.c_str() );
curl_easy_setopt(curl, CURLOPT_USERPWD, strUserPwInfo.c_str());
curl_easy_setopt(curl, CURLOPT_FTPLISTONLY, TRUE);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, ftplister);
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);
}
fclose(ftplister); /* close the local file */
}
void downloadAllFiles(const string &strFtpListFile, const string &strHost,
const string &strUri, const string &strUser, const string &strPassWord,
const string &strSaveDestFolder)
{
FILE *ftpFile = NULL;
string strFileName = "";
ifstream infile(strFtpListFile.c_str());
if(!infile.is_open())
{
cerr << "can not open ftpList.txt" << endl;
return ;
}
/* ftpList.txt get data, line by line, processing */
while(getline(infile, strFileName))
{
CURL *curl;
FILE *destFilePath;
CURLcode res;
curl = curl_easy_init();
if (curl)
{
destFilePath = fopen((strSaveDestFolder+strFileName).c_str(),"wb");
curl_easy_setopt(curl, CURLOPT_URL, (strHost+strUri+strFileName).c_str());
curl_easy_setopt(curl, CURLOPT_USERPWD, (strUser+":"+strPassWord).c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, destFilePath);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
fclose(destFilePath);
}
}// end while
}
I expressed as much as possible to see at once the contents of the Vivian Lobo.
I tried to test to make statements Vivian Lobo. It is good working.
stackOverFlow is hard to write code.
And how can I comment to Vivian lobo article?

libcurl 404 detection

I'm doing a file download with libcurl in my c++ program. How can i detect if the request is a 404, and not do the file write? The code is:
void GameImage::DownloadImage(string file_name) {
string game_name;
game_name = file_name.substr(file_name.find_last_of("/")+1);
CURL *curl;
FILE *fp;
CURLcode res;
string url = "http://site/"+game_name+".png";
string outfilename = file_name+".png";
cout<<"INFO; attempting to download "<<url<<"..."<<endl;
curl = curl_easy_init();
if (curl) {
cout<<"INFO; downloading "<<url<<"..."<<endl;
fp = fopen(outfilename.c_str(), "wb");
cout<<"INFO; trying to open "<<outfilename<<" for file output"<<endl;
if (fp != NULL) {
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, GameImage::WriteData);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);
curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
res = curl_easy_perform(curl);
long http_code = 0;
curl_easy_getinfo (curl, CURLINFO_RESPONSE_CODE, &http_code);
curl_easy_cleanup(curl);
fclose(fp);
}
else {
cout<<"GameImage::DownloadImage; Couldn't open output file"<<endl;
}
}
}
size_t GameImage::WriteData(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t written;
written = fwrite(ptr, size, nmemb, stream);
return written;
}
I can delete the 404 response after the transfer occurs, but it would be good to not even save the response.
You can check against CURLE_HTTP_RETURNED_ERROR
This is returned if CURLOPT_FAILONERROR is set to true and the HTTP server returns an error code that is >= 400. You can't grab the specific HTTP response code, but should be enough to accomplish what you want.
I know this is an old post, but the error you're doing is that you're not checking the return value of curl_easy_perform. Setting CURLOPT_FAILONERROR will not crash the program, instead, it will notify you of the error through the return variable you named res. To get rid of the empty file, you could do something like this:
void GameImage::DownloadImage(string file_name) {
string game_name;
game_name = file_name.substr(file_name.find_last_of("/")+1);
CURL *curl;
FILE *fp;
CURLcode res;
string url = "http://site/"+game_name+".png";
string outfilename = file_name+".png";
cout<<"INFO; attempting to download "<<url<<"..."<<endl;
curl = curl_easy_init();
if (curl) {
cout<<"INFO; downloading "<<url<<"..."<<endl;
fp = fopen(outfilename.c_str(), "wb");
cout<<"INFO; trying to open "<<outfilename<<" for file output"<<endl;
if (fp == NULL) {
cout<<"GameImage::DownloadImage; Couldn't open output file"<<endl;
curl_easy_cleanup(curl);
return;
}
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, GameImage::WriteData);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);
curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
res = curl_easy_perform(curl);
fclose(fp);
if (res != CURLE_OK) {
cout<<"GameImage::DownloadImage; Failed to download file"<<endl;
remove(outfilename.c_str());
}
curl_easy_cleanup(curl);
}
}
size_t GameImage::WriteData(void *ptr, size_t size, size_t nmemb, FILE *stream) {
size_t written;
written = fwrite(ptr, size, nmemb, stream);
return written;
}