Writing a program taking LLVM IR file as a Command line argument - c++

I am writing a main.cpp file for testing LLVM IR Command line argument .
I am using llvm version : 6.0.1
#include<iostream>
#include <llvm/Bitcode/BitcodeReader.h>
#include <llvm/Bitcode/BitcodeWriter.h>
#include "llvm/IR/Function.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/ErrorOr.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/raw_ostream.h"
#include <llvm/Support/Error.h>
#include <llvm/IRReader/IRReader.h>
using namespace llvm;
using namespace std;
static cl::opt<string> input(cl::Positional, cl::desc("Bitcode file"), cl::Required);
int main(int argc, char** argv)
{
cl::ParseCommandLineOptions(argc, argv, "LLVM IR to Bytecode \n");
LLVMContext context;
ErrorOr<std::unique_ptr<MemoryBuffer>> mb = MemoryBuffer::getFile(input);
if (error_code ec = mb.getError()) {
errs() << ec.message();
return -1;
}
ErrorOr<std::unique_ptr<Module>> m=parseBitcodeFile(mb->get()->getMemBufferRef(), context);
if (error_code ec = m.getError())
{
errs() << "Error reading bitcode: " << ec.message() << "\n";
return -1;
}
return 0;
}
I got this error :
error: ‘class llvm::Expected >’ has no member named ‘getError’
if (error_code ec = m.getError())
I google many times but I do not found answer any where. Please suggest me some solution.

The above error was due to change in LLVM API over a time. Change module reader functions to return an llvm::Expected <T> []https://reviews.llvm.org/D26562
. This class parallels ErrorOr, but replaces error_code with Error. Since Error cannot be copied, this class replaces getError() with takeError().
So above code changes to :
Expected<std::unique_ptr<Module>> m = parseBitcodeFile(mb->get()->getMemBufferRef(), context);
if (std::error_code ec = errorToErrorCode(m.takeError())) {
errs() << "Error reading bitcode: " << ec.message() << "\n";
return -1;
}

Related

Error in lexClient->StartConversationAsync (Aws::Auth::DefaultAuthSignerProvider::GetSigner(const String&) const: Assertion `false' failed.)

I am currently trying to establish a stream connection between my AWSClient and a Lex Bot.
I am getting the following error:
aws-sdk-cpp/aws-cpp-sdk-core/source/auth/signer-provider/DefaultAuthSignerProvider.cpp:48: virtual std::shared_ptr<Aws::Client::AWSAuthSigner> Aws::Auth::DefaultAuthSignerProvider::GetSigner(const String&) const: Assertion `false' failed.
Aborted (core dumped)
when the code run through the following line:
lexClient->StartConversationAsync(LexRequest, OnStreamReady, OnResponseCallback, nullptr);
starting.WaitOne();
the entire .cpp follows:
#include <aws/core/Aws.h>
#include <aws/core/client/AsyncCallerContext.h>
#include <aws/core/utils/Outcome.h>
#include <aws/core/auth/AWSCredentialsProvider.h>
#include <aws/lexv2-runtime/LexRuntimeV2Client.h>
#include <aws/lexv2-runtime/model/AudioInputEvent.h>
#include <aws/lexv2-runtime/model/RecognizeUtteranceRequest.h>
#include <aws/lexv2-runtime/model/RecognizeUtteranceResult.h>
#include <aws/lexv2-runtime/model/StartConversationHandler.h>
#include <aws/lexv2-runtime/model/StartConversationRequest.h>
#include <aws/lexv2-runtime/model/StartConversationRequestEventStream.h>
#include <aws/lex/LexRuntimeServiceClient.h>
#include <aws/lex/LexRuntimeServiceRequest.h>
#include <aws/lex/model/PostContentRequest.h>
#include <aws/lex/model/PostContentResult.h>
#include <iterator>
#include <fstream>
#include <chrono>
#include <unistd.h>
using namespace Aws::LexRuntimeV2;
int SessionCount = 0;
int main(int argc, char* argv[])
{
std::string lexKey, lexSecret;
std::string botId, botAliasId, localeId, sessionId, regionId;
// bot credentials
botId = "_";
botAliasId = "_";
lexKey = "_";
lexSecret = "_";
localeId = "en_US";
// starting api
Aws::SDKOptions options;
options.loggingOptions.logLevel = Aws::Utils::Logging::LogLevel::Info;
Aws::InitAPI(options);
Aws::Client::ClientConfiguration config;
config.region = "us-east-1";
// creating a lex client
auto lexClient = Aws::MakeUnique<LexRuntimeV2Client>("MyClient", Aws::Auth::AWSCredentials(lexKey.c_str(), lexSecret.c_str()), config);
Model::StartConversationRequest LexRequest;
Model::StartConversationHandler requestHandler;
requestHandler.SetTranscriptEventCallback([](const Model::TranscriptEvent& ev)
{
std::cout << ev.GetTranscript() << std::endl;
});
requestHandler.SetOnErrorCallback([](const Aws::Client::AWSError<LexRuntimeV2Errors>& error)
{
std::cout << "Request handler: " << error.GetMessage() << std::endl;
});
LexRequest.WithLocaleId(localeId).WithBotId(botId.c_str()).WithBotAliasId(botAliasId.c_str()).WithSessionId("Blah")
.WithConversationMode(Model::ConversationMode::AUDIO).WithEventStreamHandler(requestHandler);
Aws::Utils::Threading::Semaphore signaling(0 /*initialCount*/, 1 /*maxCount*/);
auto OnResponseCallback = [&signaling](const LexRuntimeV2Client*,const Model::StartConversationRequest&,
const Model::StartConversationOutcome& outcome, const std::shared_ptr<const Aws::Client::AsyncCallerContext>&)
{
std::cout << "Response handler: " << outcome.GetError().GetMessage();
signaling.Release();
};
Model::StartConversationRequestEventStream* pStream = nullptr;
Aws::Utils::Threading::Semaphore starting(0 /*initialCount*/, 1 /*maxCount*/);
auto OnStreamReady = [&starting,&pStream](Model::StartConversationRequestEventStream& stream)
{
pStream = &stream;
pStream->SetSignatureSeed("blah");
starting.Release();
};
lexClient->StartConversationAsync(LexRequest, OnStreamReady, OnResponseCallback, nullptr);
starting.WaitOne();
std::ifstream audioFile(argv[1], std::ios_base::binary);
if (!audioFile)
{
std::cout << "Cannot open audio file " << argv[2] << std::endl;
return 0;
}
if (audioFile) {
std:: cout << "Audio file is open: "<< std::endl;
}
while (!audioFile.eof())
{
unsigned char buf[320 + 1];
audioFile.read((char*)buf, 320);
Aws::Utils::ByteBuffer bytes(buf, audioFile.gcount());
Model::AudioInputEvent input;
auto millisec_since_epoch = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock().now().time_since_epoch()).count();
input.SetClientTimestampMillis(millisec_since_epoch);
input.SetAudioChunk(bytes);
input.SetContentType("audio/lpcm; sample-rate=8000; sample-size-bits=16; channel-count=1; is-big-endian=false");
pStream->WriteAudioInputEvent(input); // o erro está aqui (quando comento o StartConversation)
sleep(20);
}
signaling.WaitOne(); // prevent the application from exiting until we're done
Aws::ShutdownAPI(options);
}
//g++ -o main main.cpp -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-cpp-sdk-lex -laws-cpp-sdk-lex-models -laws-cpp-sdk-lexv2-models -laws-cpp-sdk-lexv2-runtime
debug log](https://i.stack.imgur.com/tpA9c.png)
I have no idea why this is happening, and without this command i obviously can't publish events to lex, since the connection does not have started

Debug Assertion when using Interpreter in TensorFlowLite

I am trying to use the Mediapipe Hand tflite file using TensorFlowLite. But whenever I am getting an error when referring to the input tensor data and dimensions.\
This is the main.cpp:
#include <iostream>
#include <cstdio>
#include <vector>
#include <tensorflow/lite/interpreter.h>
#include <tensorflow/lite/kernels/register.h>
#include <tensorflow/lite/model.h>
#include <tensorflow/lite/optional_debug_tools.h>
#include <flatbuffers/flatbuffers.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <tensorflow/lite/model_builder.h>
#include <tensorflow/lite/string_util.h>
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/model.h"
#include "tensorflow/lite/builtin_op_data.h"
#include "tensorflow/lite/kernels/register.h"
using namespace std;
int main()
{
const char* filename = "C:/Users/naren/OneDrive/Documents/MediapipeHands/hand_landmark_full.tflite";
const char* image_filename = "C:/Users/naren/OneDrive/Documents/MediapipeHands/hand.jpg";
std::unique_ptr<tflite::FlatBufferModel> model = tflite::FlatBufferModel::BuildFromFile(filename);
if (model == nullptr) {
cerr << "Fail to build FlatBufferModel from file: " << filename << std::endl;
exit(1);
}
tflite::ops::builtin::BuiltinOpResolver resolver;
std::unique_ptr<tflite::Interpreter> interpreter;
if (tflite::InterpreterBuilder(*model, resolver) (&interpreter) != kTfLiteOk) {
std::cerr << "Failed to build interpreter." << std::endl;
exit(1);
}
interpreter->SetNumThreads(-1);
if (interpreter->AllocateTensors() != kTfLiteOk) {
cerr << "Failed to allocate tensors." << endl;
exit(1);
}
cv::Mat image;
auto frame = cv::imread(image_filename);
if (frame.empty()) {
cout << "Image not loaded";
exit(1);
}
cv::cvtColor(frame, image, cv::COLOR_BGR2RGB);
cv::flip(image, image, 1);
cv::imshow("Image", image);
cv::waitKey(10);
const std::vector<int>& inputs = interpreter->inputs();
TfLiteTensor* inputTensor = interpreter->tensor(inputs[0]);
return 0;
}
The program is failing at the last line before the return 0 line. If we run only uptil cv::waitKey(10), then a debug assertion window pops up. I also have the cmake file, please comment if you need it.

How can you get boost::program_options working with boost::posix_time::ptime?

I've made several attemps at getting this working to no avail. The program compiles but every timestamp format I can think of for supplying to the program is invalid.
#include <boost/program_options.hpp>
#include <boost/optional.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <iostream>
#include <string>
using namespace boost::program_options;
using namespace std;
int main(int argc, const char** argv) {
boost::posix_time::ptime forceDate;
boost::optional<boost::posix_time::ptime> forceDateOptional;
options_description descr("Options");
descr.add_options()
("force-date,a", value(&forceDate)->value_name("<date>"), "...");
try {
variables_map args;
store(command_line_parser(argc, argv).options(descr).run(), args);
notify(args);
cout << "Done." << endl;
return 0;
} catch (std::exception& e) {
cerr << e.what() << endl;
return 1;
}
}
Compiled with g++ -I /usr/local/include -L /usr/local/lib -lboost_program_options -lboost_system x.cpp and run with ./a.out --force-date 2012-01-03.
Here is the error from the exception thrown:
the argument ('2012-01-03') for option '--force-date' is invalid
Let's first try to simplify your example by removing program_options and simply trying to parse posix_time::ptime from a stream.
boost::posix_time::ptime forceDate;
std::istringstream ss("2012-01-03");
if(ss >> forceDate) {
std::cout << forceDate << "\n";
}
The above example prints nothing, so clearly something's going wrong with the parsing. If you look up the documentation for operator>> it becomes evident that the expected format for the month is an abbreviated name instead of a numeric value.
If you change the above code to
boost::posix_time::ptime forceDate;
std::istringstream ss("2012-Jan-03");
if(ss >> forceDate) {
std::cout << forceDate << "\n";
}
it produces the desired output.
More digging through the documentation shows the way to control the input format is using boost::posix_time::time_input_facet. The set_iso_extended_format sets the format to the numeric format you want to use.
boost::posix_time::ptime forceDate;
std::istringstream ss("2012-01-03");
auto facet = new boost::posix_time::time_input_facet();
facet->set_iso_extended_format();
ss.imbue(std::locale(ss.getloc(), facet));
if(ss >> forceDate) {
std::cout << forceDate << "\n";
}
Live demo
Back to program_options now. It doesn't look like the library offers locale support directly, so the only way I can get the above solution to work is by messing with the global locale. If you add the following lines to your example, it behaves as you want it to.
auto facet = new boost::posix_time::time_input_facet();
facet->set_iso_extended_format();
std::locale::global(std::locale(std::locale(), facet));
Live demo
I was able to replicate your code and compile it on Ubuntu 14.04-- I got the same results you did. So I passed the date into a std::string and then attempted to construct a boost::posix_time::ptime from that:
#include <boost/program_options.hpp>
#include <boost/optional.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <iostream>
#include <string>
using namespace boost::program_options;
using namespace std;
int main (int argc, const char ** argv) {
auto dateStr = string {};
auto descr = options_description { "Options" };
descr.add_options ()
("help,h", "produce help message")
("date,a", value (&dateStr)->value_name ("<date>"), "...");
try {
auto args = variables_map {};
store (command_line_parser (argc, argv).options (descr).run (), args);
notify (args);
auto forceDate = boost::posix_time::ptime { dateStr };
cout << "Done." << endl;
return 0;
} catch (std::exception & e) {
cout << e.what () << endl;
return 1;
}
}
I had to dig around a little before finding out that according to this SO answer you must link boost_date_time. I used the following script to compile and re-run at the same time:
rerun.sh
#!/bin/bash
g++ -o main.x64 -std=c++11 -I . -L/usr/lib/x86_64-linux-gnu main.cpp -lboost_program_options -lboost_system -lboost_date_time
./main.x64 --date "2012-01-01"
and this is the final code I used to get it to run:
main.cpp
#include <boost/program_options.hpp>
#include <boost/optional.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <iostream>
#include <string>
using namespace boost::program_options;
using namespace boost::posix_time;
using namespace std;
int main (int argc, const char ** argv) {
auto dateStr = string {};
auto descr = options_description { "Options" };
descr.add_options ()
("date,a", value (&dateStr)->value_name ("<date>"), "...");
try {
auto args = variables_map {};
store (command_line_parser (argc, argv).options (descr).run (), args);
notify (args);
// this is the important part.
auto d = boost::gregorian::date {
boost::gregorian::from_simple_string (dateStr)
};
auto forceDate = boost::posix_time::ptime { d };
cout << "Done." << endl;
return 0;
} catch (std::exception & e) {
cout << e.what () << endl;
return 1;
}
}

How to portably compute a sha1 hash in C++?

The objective is to compute the SHA1 hash of a buffer or multiple buffers as part of a C++ program.
I'm not sure whether the one using boost's UUID will do leading zeros in hash values correctly (your string should always have the same length afaik), so here's a simplified version of the example above which will do that:
#include <cstdio>
#include <string>
#include <boost/uuid/sha1.hpp>
std::string get_sha1(const std::string& p_arg)
{
boost::uuids::detail::sha1 sha1;
sha1.process_bytes(p_arg.data(), p_arg.size());
unsigned hash[5] = {0};
sha1.get_digest(hash);
// Back to string
char buf[41] = {0};
for (int i = 0; i < 5; i++)
{
std::sprintf(buf + (i << 3), "%08x", hash[i]);
}
return std::string(buf);
}
The Qt library contains since version 4.3 the class QCryptographicHash that supports various hashing algorithms, including SHA1. Although Qt is arguably less portable than - say - OpenSSL, at least for projects that already depend on Qt QCryptographicHash is the obvious way to compute a SHA1 hash.
Example program that computes the SHA1 hash of a file:
#include <QCryptographicHash>
#include <QByteArray>
#include <QFile>
#include <iostream>
#include <stdexcept>
using namespace std;
int main(int argc, char **argv)
{
try {
if (argc < 2)
throw runtime_error(string("Call: ") + *argv + string(" FILE"));
const char *filename = argv[1];
QFile file(filename);
if (!file.open(QIODevice::ReadOnly | QIODevice::Unbuffered))
throw runtime_error("Could not open: " + string(filename));
QCryptographicHash hash(QCryptographicHash::Sha1);
vector<char> v(128*1024);
for (;;) {
qint64 n = file.read(v.data(), v.size());
if (!n)
break;
if (n == -1)
throw runtime_error("Read error");
hash.addData(v.data(), n);
}
QByteArray h(hash.result().toHex());
cout << h.data() << '\n';
} catch (const exception &e) {
cerr << "Error: " << e.what() << '\n';
return 1;
}
return 0;
}
The used Qt classes are all part of Qt core library. An example cmake build file:
cmake_minimum_required(VERSION 2.8.11)
project(hash_qt CXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++11")
find_package(Qt5Core)
add_executable(hash_qt hash_qt.cc)
target_link_libraries(hash_qt Qt5::Core)
Boost provides a simple API for computing the SHA1 hash of strings:
#include <iostream>
#include <string>
#include <boost/compute/detail/sha1.hpp>
int main(int argc, char **argv)
{
if (argc < 2) {
std::cerr << "Call: " << *argv << " STR\n";
return 1;
}
boost::compute::detail::sha1 sha1 { argv[1] };
std::string s { sha1 };
std::cout << s << '\n';
return 0;
}
That API is private to the Boost Compute library, though, because it's part of a detail namespace. Meaning that it doesn't have any stability guarantees.
Boost also provides a SHA1 hashing class as part of the Boost Uuid Library, whose API is better suited for hashing arbitrary binary input, such as files. Although it is part of the detail namespace, meaning that it is kind of library-private, it is there for many years and stable.
A small example that computes the SHA1 hash of a file and prints it to stdout:
Prelude:
#include <boost/uuid/detail/sha1.hpp>
#include <boost/predef/other/endian.h>
#include <boost/endian/conversion.hpp>
#include <boost/algorithm/hex.hpp>
#include <boost/range/iterator_range_core.hpp>
#include <iostream>
#include <vector>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
using namespace std;
The main function:
{
if (argc < 2) { cerr << "Call: " << *argv << " FILE\n"; return 1; }
const char *filename = argv[1];
int fd = open(filename, O_RDONLY);
if (fd == -1) { cerr << "open: " << strerror(errno) << ")\n"; return 1; }
vector<char> v(128*1024);
boost::uuids::detail::sha1 sha1;
for (;;) {
ssize_t n = read(fd, v.data(), v.size());
if (n == -1) {
if (errno == EINTR) continue;
cerr << "read error: " << strerror(errno) << '\n';
return 1;
}
if (!n) break;
sha1.process_bytes(v.data(), n);
}
boost::uuids::detail::sha1::digest_type hash;
sha1.get_digest(hash);
#ifdef BOOST_ENDIAN_BIG_BYTE
for (unsigned i = 0; i < sizeof hash / sizeof hash[0]; ++i)
boost::endian::endian_reverse_inplace(hash[i]);
#endif
boost::algorithm::hex(boost::make_iterator_range(
reinterpret_cast<const char*>(hash),
reinterpret_cast<const char*>(hash) + sizeof hash),
std::ostream_iterator<char>(cout)); cout << '\n';
int r = close(fd);
if (r == -1) { cerr << "close error: " << strerror(errno) << '\n';
return 1; }
return 0;
}
The used parts of Boost don't create dependencies on any boost shared library. Since Boost is quite portable and available for various architectures, using Boost for computing SHA1 hashes is quite portable as well.
OpenSSL library is portable, efficient, implements SHA1 support among other useful features. Available on most platforms...
https://www.openssl.org/docs/crypto/sha.html
The OpenSSL library contains an API to different hashing methods and is very portable and readily available on many systems.
An C++ example that uses the recommended EVP API of OpenSSL to compute the SHA1 hash of a file:
int main(int argc, char **argv)
{
try {
if (argc < 2) throw runtime_error(string("Call: ") + *argv
+ string(" FILE"));
const char *filename = argv[1];
int fd = open(filename, O_RDONLY);
if (fd == -1) throw runtime_error("Could not open " + string(filename)
+ " (" + string(strerror(errno)) + ")");
BOOST_SCOPE_EXIT(&fd) { close(fd); } BOOST_SCOPE_EXIT_END
const EVP_MD *md = EVP_sha1();
if (!md) throw logic_error("Couldn't get SHA1 md");
unique_ptr<EVP_MD_CTX, void (*)(EVP_MD_CTX*)> md_ctx(EVP_MD_CTX_create(),
EVP_MD_CTX_destroy);
if (!md_ctx) throw logic_error("Couldn't create md context");
int r = EVP_DigestInit_ex(md_ctx.get(), md, 0);
if (!r) throw logic_error("Could not init digest");
vector<char> v(128*1024);
for (;;) {
ssize_t n = read(fd, v.data(), v.size());
if (n == -1) {
if (errno == EINTR)
continue;
throw runtime_error(string("read error: ") + strerror(errno));
}
if (!n)
break;
int r = EVP_DigestUpdate(md_ctx.get(), v.data(), n);
if (!r) throw logic_error("Digest update failed");
}
array<unsigned char, EVP_MAX_MD_SIZE> hash;
unsigned int n = 0;
r = EVP_DigestFinal_ex(md_ctx.get(), hash.data(), &n);
if (!r) throw logic_error("Could not finalize digest");
boost::algorithm::hex(boost::make_iterator_range(
reinterpret_cast<const char*>(hash.data()),
reinterpret_cast<const char*>(hash.data()+n)),
std::ostream_iterator<char>(cout));
cout << '\n';
} catch (const exception &e) {
cerr << "Error: " << e.what() << '\n';
return 1;
}
return 0;
}
The prelude of the example:
#include <openssl/evp.h>
#include <boost/algorithm/hex.hpp>
#include <boost/range/iterator_range_core.hpp>
#include <boost/scope_exit.hpp>
#include <iostream>
#include <vector>
#include <array>
#include <memory>
#include <string>
#include <stdexcept>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
using namespace std;
For the EVP API, the program must be linked against libcrypto, e.g.:
g++ -g -std=c++11 sha1_example.cc -lcrypto
The Botan library implements 'a kitchen sink' of cryptographic algorithms, including SHA1 (of course). It is portable between various systems that provide a recent C++ compiler.
Computing a SHA1 hash value and obtaining it as hexadecimal string is straight forward using Botan's high-level stream-like C++ API for constructing pipes.
Example for computing the SHA1 hash of a file:
#include <botan/pipe.h>
#include <botan/basefilt.h>
#include <botan/filters.h>
#include <botan/data_snk.h>
using namespace Botan;
#include <fstream>
#include <iostream>
using namespace std;
int main(int argc, char **argv)
{
try {
if (argc < 2)
throw runtime_error(string("Call: ") + *argv + string(" FILE"));
const char *filename = argv[1];
ifstream in(filename, ios::binary);
in.exceptions(ifstream::badbit);
Pipe pipe(new Chain(new Hash_Filter("SHA-1"),
new Hex_Encoder(Hex_Encoder::Lowercase)),
new DataSink_Stream(cout));
pipe.start_msg();
in >> pipe;
pipe.end_msg();
} catch (const exception &e) {
cerr << "Error: " << e.what() << '\n';
return 1;
}
return 0;
}
When the hash value should be processed as string, an ostringstream (instead of cout) can be used as data sink stream.
Depending on the target system/distribution, the headerfiles might be placed at a slightly unusual location and the library might contain a slightly unexpected suffix (e.g. on Fedora 21). Following cmake snippet accounts for that:
cmake_minimum_required(VERSION 2.8.11)
project(hash CXX)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=c++11")
find_library(LIB_BOTAN NAMES botan botan-1.10)
find_path(HEADER_BOTAN NAMES botan/pipe.h PATH_SUFFIXES botan-1.10)
add_executable(hash_botan hash_botan.cc)
set_property(TARGET hash_botan PROPERTY INCLUDE_DIRECTORIES ${HEADER_BOTAN})
target_link_libraries(hash_botan ${LIB_BOTAN})
The Crypto++ library is a portable C++ library that includes several cryptographic algorithms, including several hashing algorithms like SHA1.
The API provides various source and sink classes, where a bunch of transformations can be attached in between.
An example for computing the SHA1 hash of a file:
#include <cryptopp/files.h>
#include <cryptopp/filters.h>
#include <cryptopp/hex.h>
#include <cryptopp/sha.h>
using namespace CryptoPP;
#include <iostream>
using namespace std;
int main(int argc, char **argv)
{
try {
if (argc < 2)
throw runtime_error(string("Call: ") + *argv + string(" FILE"));
const char *filename = argv[1];
SHA1 sha1;
FileSource source(filename, true,
new HashFilter(sha1,
new HexEncoder(new FileSink(cout), false, 0, ""),
false)
);
} catch (const exception &e) {
cerr << "Error: " << e.what() << '\n';
return 1;
}
return 0;
}
It can be compiled via e.g.: g++ -Wall -g -std=c++11 hash_cryptopp.cc -lcryptopp
Crypto++ 'pumps' the content from the source through several attached transformations into the the sink. Instead of the FileSink other sinks are available, e.g. StringSinkTemplate for writing directly into a string object.
The attached objects are reference counted, such that they are automatically destructed on scope exit.

Can any one resolve this exception in the below Thrift program?

I am new to Thrift.
I am trying to create a table ("sample") in Hbase using the following Thrift program on ubuntu 10.10 and can anyone tell me whether this is correct or not.
#include <stdio.h>
#include <unistd.h>
#include <sys/time.h>
#include <poll.h>
#include <iostream>
#include <boost/lexical_cast.hpp>
#include <protocol/TBinaryProtocol.h>
#include <transport/TSocket.h>
#include <transport/TTransportUtils.h>
#include "Hbase.h"
using namespace apache::thrift;
using namespace apache::thrift::protocol;
using namespace apache::thrift::transport;
using namespace apache::hadoop::hbase::thrift;
using namespace std;
int main()
{
boost::shared_ptr<TTransport> socket(new TSocket("localhost", 60010));
boost::shared_ptr<TTransport> transport(new TBufferedTransport(socket));
boost::shared_ptr<TProtocol> protocol(new TBinaryProtocol(transport));
HbaseClient client1(protocol);
try
{
transport->open();
string tableName("Sample");
vector<ColumnDescriptor> columns;
columns.push_back(ColumnDescriptor());
columns.back().name = "cf:";
columns.back().maxVersions = 10;
columns.push_back(ColumnDescriptor());
columns.back().name = "test";
try {
client1.createTable(tableName, columns);
} catch (const AlreadyExists &ae) {
std::cerr << "WARN: " << ae.message << std::endl;
}
}
catch (const TException &tx) {
std::cerr << "ERROR: " << tx.what() << std::endl;
}
return 0;
}
But i am getting the following exception at this place client1.createTable(tableName, columns);
ERROR: No more data to read.
Please help in resolving this.
Got it,
Need to start thrift server on hbase by .<hbaseinstallationpath>/bin/hbase thrift -threadpool start