VTK: Extracting Cell Data from vtu unstructured grids in c++ - c++

I need to extract all the cell data from a .vtu (XML unstructured grid) for further manipulations in a c++ program. I am quite new to VTK...
//read all the data from the file
vtkSmartPointer<vtkXMLUnstructuredGridReader> reader =
vtkSmartPointer<vtkXMLUnstructuredGridReader>::New();
reader->SetFileName(filename.c_str());
reader->Update();
unsigned int cellNumber = reader->GetOutput()->GetNumberOfCells();
cout << "There are " << cellNumber << " input cells." << endl;
This is correct - the cell number is displayed correctly. How do access now the names of the different CellArrays properties stored in the .vtu file and then their actual numeric values? Any help is appreciated!
Cheers,
Domanov

//read all the data from the file
vtkSmartPointer<vtkXMLUnstructuredGridReader> reader =
vtkSmartPointer<vtkXMLUnstructuredGridReader>::New();
reader->SetFileName(filename.c_str());
reader->Update();
unsigned int cellNumber = reader->GetOutput()->GetNumberOfCells();
cout << "There are " << cellNumber << " input cells." << endl;
To access the cell data of unstructured grid, you can do as following:
vtkUnstructuredGrid* ugrid = reader->GetOutput();
vtkCellData *cellData = ugrid->GetCellData();
for (int i = 0; i < cellData->GetNumberOfArrays(); i++)
{
vtkDataArray* data = cellData->GetArray(j);
cout << "name " << data->GetName() << endl;
for (int j = 0; j < data->GetNumberOfTuples(); j++)
{
double value = data->GetTuple1(j);
cout << " value " << j << "th is " << value << endl;
}
}

Related

Trouble reading data from Parquet File

I am attempting to read parquet data from a binary stream (via API posts). For example: I have a rather large parquet file on the other side of an REST API and need to fetch parts of the file. I have been attempting to follow file spec here: https://github.com/apache/parquet-format however, the pattern seems to be failing (or I am misunderstanding part).
For my test, I have moved a parquet file onto my local system and am reading in binary data from the file using ifstream. My steps are as follows:
Read in magic number from header
Read in magic number from footer
Read in FileMetaData length
Read in FileMetaData (from bottom of file)
Convert stream to FileMetaData Type using:
std::shared_ptr<parquet::FileMetaData> _metadata = parquet::FileMetaData::Make(metadataBuffer.data(), &metadataLength);
Read in RowGroup(0) and RowGroup(1) file_offset and total_byte_size from the FileMetaData like this:
_metadata->RowGroup(x)->file_offset();
_metadata->RowGroup(x)->total_byte_size();
After storing this data, I proceed to read in each RowGroup from the file using ifstream again. My start position is the file_offset from the beginning of the file.
Once my RowGroup data is read in to a vector of objects, I attempt to convert the buffered data into RowGroupMetaData
std::shared_ptr<parquet::RowGroupMetaData> _rowGroupMetaData = parquet::RowGroupMetaData::Make(rowGroupData[x].rowGroupBuffer.data(), rowGroupData[x].schema);
This is where I get stuck. When I try to access parts of the _rowGroupMetaData, I am getting junk back. It seems I must be skipping a step or overlooking part of the file spec.
I noticed that there is data between the magic number PAR1 at the top of the file an the file offset of RowGroup(0). the magic number is 4 characters long but the RowGroup(0) file_offset = 113. I am not sure what the data between 4-113 is and I cannot find information on it in the spec.
My parquet file is rather simple. 2 RowGroups with 2 columns. Total of 5 rows across both RowGroups.
Code:
ifstream inFile("parquet-arrow-example.parquet", std::ofstream::binary | std::ios::ate);
std::streamsize fileSize = inFile.tellg();
inFile.seekg(0, std::ios::beg);
std::vector<char> headBuffer;
std::vector<char> tailBuffer;
std::vector<uint8_t> metadataBuffer;
headBuffer.resize(4);
tailBuffer.resize(4);
struct RowGroupData {
int groupId;
int64_t byteLength;
int64_t offset;
const parquet::SchemaDescriptor* schema;
vector<uint8_t> rowGroupBuffer;
};
uint32_t metadataLength = 0;
string header;
string footer;
//Header
inFile.read((char*)&headBuffer[0], headBuffer.size()); //PAR1
header = string(headBuffer.begin(), headBuffer.end());
cout << header << endl;
//Footer
inFile.seekg(-4, std::ios::end);
inFile.read((char*)&tailBuffer[0], tailBuffer.size()); //PAR1
footer = string(tailBuffer.begin(), tailBuffer.end());
cout << footer << endl;
//Metadata Size
inFile.seekg(-8, std::ios::end);
inFile.read((char*)&metadataLength, 4);
cout << "Metadata Length: " << metadataLength << endl;
int len = -8 - metadataLength;
//Get MetaData
inFile.seekg(len, std::ios::end);
metadataBuffer.resize(metadataLength);
inFile.read((char*)&metadataBuffer[0], metadataBuffer.size());
cout << string(metadataBuffer.begin(), metadataBuffer.end()) << endl;
std::shared_ptr<parquet::FileMetaData> _metadata = parquet::FileMetaData::Make(metadataBuffer.data(), &metadataLength);
cout << "Num Rows: " << _metadata->num_rows() << endl;
cout << "Num Columns: " << _metadata->num_columns() << endl;
cout << "Num RowGroups: " << _metadata->num_row_groups() << endl;
vector<RowGroupData> rowGroupData;
//int seeqPos = 4;
for (int x = 0; x < _metadata->num_row_groups(); x++) {
cout << "RowGroup " << x << " Byte Size: " << _metadata->RowGroup(x)->total_byte_size() << endl;
cout << "RowGroup " << x << " File Offset: " << _metadata->RowGroup(x)->file_offset() << endl;
cout << "RowGroup " << x << " Column 0 File Offset: " << _metadata->RowGroup(x)->ColumnChunk(0)->file_offset() << endl;
cout << "RowGroup " << x << " Column 0 Byte Size: " << _metadata->RowGroup(x)->ColumnChunk(0)->total_compressed_size() << endl;
cout << "RowGroup " << x << " Column 1 File Offset: " << _metadata->RowGroup(x)->ColumnChunk(1)->file_offset() << endl;
cout << "RowGroup " << x << " Column 1 Byte Size: " << _metadata->RowGroup(x)->ColumnChunk(1)->total_compressed_size() << endl;
RowGroupData rgData;
rgData.groupId = x;
rgData.byteLength = _metadata->RowGroup(x)->total_byte_size();
rgData.offset = _metadata->RowGroup(x)->file_offset();
rgData.schema = _metadata->RowGroup(x)->schema();
rgData.rowGroupBuffer.resize(rgData.byteLength);
//Store rowGroup Length
//Store rowGroup Data
inFile.seekg(rgData.offset, std::ios::beg);
inFile.read((char*)&rgData.rowGroupBuffer[0], rgData.rowGroupBuffer.size());
rowGroupData.push_back(rgData);
//seeqPos = seeqPos + rgData.byteLength;
}
cout << endl;
for (int x = 0; x < rowGroupData.size(); x++) {
vector<uint8_t> rgBuffer;
//rgBuffer = rowGroupData[x].rowGroupBuffer;
cout << "RowGroupId: " << rowGroupData[x].groupId << endl;
cout << "RowGroupData: " << string(rowGroupData[x].rowGroupBuffer.begin(), rowGroupData[x].rowGroupBuffer.end()) << endl;
std::shared_ptr<parquet::RowGroupMetaData> _rowGroupMetaData = parquet::RowGroupMetaData::Make(rowGroupData[x].rowGroupBuffer.data(), rowGroupData[x].schema);
cout << "RowGroup Rows: " << _rowGroupMetaData->num_rows() << endl;
cout << "Byte Size: " << _rowGroupMetaData->total_byte_size() << endl;
}
The data between the file header and the file_offset is the column_chunk metadata for the first column.
The parquet spec is a little confusing because there are two different file offsets. The one on the RowGroup is an offset to the first page of data in the row group. And the column chunk file_offset which points to the column chunks metadata.
To my knowledge the first offset is mostly used for splitting files, I think most other readers use the latter offset for parsing columns.
Also note that in C++ at least file_offset was being written out incorrectly prior to the release of Arrow 6.0 (it pointed to the same byte offset as that the column offset chunk did).
Last, parquet is a non-trivial format and it is easy to have subtle bugs, I'd strongly recommend trying to use a standard implementation which has been battle tested rather then creating your own. If something is missing from the API it might be simpler to contribute it to an existing implementation instead of trying to build everything from scratch.

Field values for nested structs not getting set in protobuf

I am using google protobuffer for data deserialization, but my issue is that the field values for the nested structures are not getting set. I checked the bin file in a hex editor and the values look fine. However, since the first field in my nested struct is a float and if I try to get the corresponding number of bytes and decode it into a float, the value seems fine.
Can someone advice what else I can do to check this or what perhaps is happening? I have tried using ParseFromIstream as well.
the proto file is :
syntax = "proto3";
package generatedata;
message DataSample {
DataSample_Safe DataSafe = 1;
uint32 PassReSts = 2;
uint32 Dir = 3;
}
message DataSample_Safe {
float ALast = 1;
uint32 ALastQf = 2;
}
message DataSampleMultiple
{
repeated DataSample finaldata = 1;
}
The C++ code is
fstream in("test.bin", ios::in | ios::binary);
generatedata::DataSample test_data;
while(in.read((char *) &test_data,sizeof(test_data)))
{
const generatedata::DataSample_Safe& veh = test_data.datasafe();
cout << "safe" << veh.alast() << endl;
cout << "First " << test_data.passrests() << endl;
cout << "Second " << test_data.dir() << endl;
}
Using ParseFromIstream, the code is
generatedata::DataSampleMultiple test_data;
test_data.ParseFromIstream(&in);
for(int i = 0 ; i < test_data.finaldata_size(); i ++)
{
const generatedata::DataSample& veh = test_data.finaldata(i);
const generatedata::DataSample_Safe& check = veh.datasafe();
cout << "safe" << check.alast() << endl;
cout << "First " << veh.passrests() << endl;
cout << "Second " << veh.dir() << endl;
}
in the above case using ParseFromIstream, the final data size is 0
The first cout gives segmentation fault as the value of safe has not been set. However, the second and third couts are output correctly when using istream::read

Why does my program infinitely loop?

The program that I'm working on reads an input file's contents (.csv), creates an output file (.txt), and outputs the input file's content in the output file in a formatted fashion. Here's how it looks:
#include "stdafx.h"
#include <iostream> // standard input/output library
#include <string> // string data type and its associated functions
#include <fstream> // file input/output
using namespace std; // use standard namespaces
const int iRows = 1119; // input file contains 1,119 rows
const int iColumns = 11; // input file contains 11 columns
string strData[iRows][iColumns]; // 2-dimensional array that holds input file contents
// pads strings to make them the same wide, for fixed width output
string Align(string strIn, int iWidth)
{
string strOut; // padding
// add padding
for (int i = 0; i < iWidth - strIn.length(); i++)
strOut += " ";
return strOut; // return padding
}
// main program entry point
int main()
{
ifstream inFile; // handle for input file
string strSourcePath = // input file path
"C:\\Users\\Logan\\Documents\\CIS022_S2017_Lab8b.csv";
ofstream outFile; // handle for output file
string strDestPath = // output file path
"C:\\Users\\Logan\\Documents\\out.txt";
inFile.open(strSourcePath); // open input file for read (ifstream)
for (int i = 0; i < iRows; i++) // loop for rows
for (int j = 0; j < iColumns; j++) // embedded loop for column
{
if (j == iColumns - 1) // the last element in the row is newline delimited
getline(inFile, strData[i][j], '\n');
else // all other elements are comma delimited
getline(inFile, strData[i][j], ',');
/*cout << "i = " << i << " j = " << j << " " << strData[i][j] << endl;*/ // console dump for error checking
}
inFile.close(); // done with input file, close it
outFile.open(strDestPath); // open output file for write (ofstream)
for (int i = 0; i < iRows; i++) // loop through each input row
{
outFile <<
strData[i][0] << Align(strData[i][0], 7) << // CRN
strData[i][1] << Align(strData[i][1], 6) << // Subject
strData[i][2] << Align(strData[i][2], 6) << // Number
strData[i][3] << Align(strData[i][3], 20) << // Title
strData[i][4] << Align(strData[i][4], 7) << // Days
strData[i][5] << Align(strData[i][5], 13) << // Meetdates
strData[i][6] << Align(strData[i][6], 17) << // Times
strData[i][7] << Align(strData[i][7], 6) << // Credits
strData[i][8] << Align(strData[i][8], 13) << // Instructor
strData[i][9] << Align(strData[i][9], 6) << // Room
strData[i][10] << endl; // Max Enroll
}
outFile.close(); // close output file
system("Pause"); // wait for user input
return 0; // exit program
}
However, whenever I run it, it loops infinitely here:
for (int i = 0; i < iRows; i++) // loop through each input row
{
outFile <<
strData[i][0] << Align(strData[i][0], 7) << // CRN
strData[i][1] << Align(strData[i][1], 6) << // Subject
strData[i][2] << Align(strData[i][2], 6) << // Number
strData[i][3] << Align(strData[i][3], 20) << // Title
strData[i][4] << Align(strData[i][4], 7) << // Days
strData[i][5] << Align(strData[i][5], 13) << // Meetdates
strData[i][6] << Align(strData[i][6], 17) << // Times
strData[i][7] << Align(strData[i][7], 6) << // Credits
strData[i][8] << Align(strData[i][8], 13) << // Instructor
strData[i][9] << Align(strData[i][9], 6) << // Room
strData[i][10] << endl; // Max Enroll
}
The input file contains 1119 rows of information, so I'll give you the first row:
CRN,Subj,Num,Title,Days,Meetdates,Times,Credits,Instructor,Room,Max Enroll
I let my program sit for a minute and nothing happened. Even adding this code at the beginning of the for loop only outputs the first row of information:
cout <<
strData[i][0] << " " <<
strData[i][1] << " " <<
strData[i][2] << " " <<
strData[i][3] << " " <<
strData[i][4] << " " <<
strData[i][5] << " " <<
strData[i][6] << " " <<
strData[i][7] << " " <<
strData[i][8] << " " <<
strData[i][9] << " " <<
strData[i][10] << endl;
Why does my program infinitely loop?
What happens if, in this code,
string Align(string strIn, int iWidth)
{
string strOut; // padding
// add padding
for (int i = 0; i < iWidth - strIn.length(); i++)
strOut += " ";
return strOut; // return padding
}
strIn is longer than iWidth ?
You will attempt to increment i until it reaches a negative number.
Here is probably your issue.

Head Pose Estimation on Random Forest in G Fanelli's paper

I have been working on head pose estimation on depth data. And I have read G Fanelli's paper-"Real Time Head Pose Estimation from Consumer Depth Cameras" "Real Time Head Pose Estimation with Random Regression Forests". I test the data and the code Fanelli published on the website(http://www.vision.ee.ethz.ch/~gfanelli/head_pose/head_forest.html). However when I run the code, there is a problem. The error information is "usage: ./head_pose_estimation config_file depth_image". I think it is about file reading but I don't how to fix it.
and the code is like this:
int main(int argc, char* argv[])
{
if( argc != 3 )
{
cout << "usage: ./head_pose_estimation config_file depth_image" << endl;
exit(-1);
}
loadConfig(argv[1]);
CRForestEstimator estimator;
if( !estimator.loadForest(g_treepath.c_str(), g_ntrees) ){
cerr << "could not read forest!" << endl;
exit(-1);
}
string depth_fname(argv[2]);
//read calibration file (should be in the same directory as the depth image!)
string cal_filename = depth_fname.substr(0,depth_fname.find_last_of("/")+1);
cal_filename += "depth.cal";
ifstream is(cal_filename.c_str());
if (!is){
cerr << "depth.cal file not found in the same folder as the depth image! " << endl;
return -1;
}
//read intrinsics only
float depth_intrinsic[9]; for(int i =0; i<9; ++i) is >> depth_intrinsic[i];
is.close();
Mat depthImg;
//read depth image (compressed!)
if (!loadDepthImageCompressed( depthImg, depth_fname.c_str() ))
return -1;
Mat img3D;
img3D.create( depthImg.rows, depthImg.cols, CV_32FC3 );
//get 3D from depth
for(int y = 0; y < img3D.rows; y++)
{
Vec3f* img3Di = img3D.ptr<Vec3f>(y);
const int16_t* depthImgi = depthImg.ptr<int16_t>(y);
for(int x = 0; x < img3D.cols; x++){
float d = (float)depthImgi[x];
if ( d < g_max_z && d > 0 ){
img3Di[x][0] = d * (float(x) - depth_intrinsic[2])/depth_intrinsic[0];
img3Di[x][1] = d * (float(y) - depth_intrinsic[5])/depth_intrinsic[4];
img3Di[x][2] = d;
}
else{
img3Di[x] = 0;
}
}
}
g_means.clear();
g_votes.clear();
g_clusters.clear();
string pose_filename(depth_fname.substr(0,depth_fname.find_last_of('_')));
pose_filename += "_pose.bin";
cv::Vec<float,POSE_SIZE> gt;
bool have_gt = false;
//try to read in the ground truth from a binary file
FILE* pFile = fopen(pose_filename.c_str(), "rb");
if(pFile){
have_gt = true;
have_gt &= ( fread( &gt[0], sizeof(float),POSE_SIZE, pFile) == POSE_SIZE );
fclose(pFile);
}
//do the actual estimate
estimator.estimate( img3D,
g_means,
g_clusters,
g_votes,
g_stride,
g_maxv,
g_prob_th,
g_larger_radius_ratio,
g_smaller_radius_ratio,
false,
g_th
);
cout << "Heads found : " << g_means.size() << endl;
//assuming there's only one head in the image!
if(g_means.size()>0){
cout << "Estimated: " << g_means[0][0] << " " << g_means[0][1] << " " << g_means[0][2] << " " << g_means[0][3] << " " << g_means[0][4] << " " << g_means[0][5] <<endl;
float pt2d_est[2];
float pt2d_gt[2];
if(have_gt){
cout << "Ground T.: " << gt[0] << " " << gt[1] << " " << gt[2] << " " << gt[3] << " " << gt[4] << " " << gt[5] <<endl;
cv::Vec<float,POSE_SIZE> err = (gt-g_means[0]);
//multiply(err,err,err);
for(int n=0;n<POSE_SIZE;++n)
err[n] = err[n]*err[n];
float h_err = sqrt(err[0]+err[1]+err[2]);
float a_err = sqrt(err[3]+err[4]+err[5]);
cout << "Head error : " << h_err << " mm " << endl;
cout << "Angle error : " << a_err <<" degrees " << endl;
pt2d_gt[0] = depth_intrinsic[0]*gt[0]/gt[2] + depth_intrinsic[2];
pt2d_gt[1] = depth_intrinsic[4]*gt[1]/gt[2] + depth_intrinsic[5];
}
pt2d_est[0] = depth_intrinsic[0]*g_means[0][0]/g_means[0][2] + depth_intrinsic[2];
pt2d_est[1] = depth_intrinsic[4]*g_means[0][1]/g_means[0][2] + depth_intrinsic[5];
}
return 0;
}
can anyone could tell me how to fix the problem?Thanks so much!
You should always read the readme.txt (here attached in head_pose_estimation.tgz) before testing an application:
To run the example code, type ./head_pose_estimation config.txt
data/frame_XXXX_depth.bin. The config.txt file contains all parameters
needed for the head pose estimation, e.g., the path to the forest, the
stride, and z threshold used to segment the person from the
background.

C++ how to load a 16bit TIFF file in a container to perform math operations on its data?

I have writtent a small C++ console application with code::blocks that loads
an array of values from a CSV file, performs a special "inverted" random dithering on the values, and exports the result as a PBM file (a bitmap).
The density of black pixels on the final PBM picture depends on 3 independent variables: "Reflectance of the white", "Reflectance of the black", and the values of the CSV.
The reason I use a CSV file is because I don't know how I can directly load a TIFF file into my script. The values of my file "wall.csv" are produced by a python script that transforms any tiff file in a csv...
Could you please check my code and advise for a solution to load a TIFF and detect automatically the size of the image in pixels?
The variables colo and lines define the size of the image contained as ASCII data in the CSV...
And the image values are loaded in the vector <float> CSV
What library would you use to load the tiff?
Thanks!
code:
#include <deque>
#include <cmath>
#include <iostream>
#include <fstream>
#include <algorithm>
#include <random>
#include <cstdlib>
using namespace std;
deque <float> CSV; // CSV input values, "PHOTOMETRY"
deque <float> RND; // will contain random values from 0.0 to 1.0
int colo = 0; // variables inputed
int lines = 0; // lines
float YBK = 0; // Reflectance White
float YW = 0; // Reflectance Black
float Lmax = 0; // variables to be computed
float Lmin = 10000000; // arbitrarily high value
float NBK = 0; // will contain a normalized Black value
float NW = 1; // normalized white value
float CRATIO = 0; // Black to White dynamic ratio
float LRATIO = 0; // Lowest to Highest pixel value dynamic ratio
float Z = 0; // processing variables
float X = 0;
float aBK = 0; // computed density of black at each pixel
float vRND = 0; // random value container
float IO = 0;
int main(){
cout << "please put a file named wall.csv" << endl << "in the same forler as this executable" << endl << endl;
cout << "how many:" << endl << "columns does the CSV has?" << endl;
cin >> colo;
cout << "lines does the CSV has?" << endl;
cin >> lines;
cout << "reflectance of the WHITE (CIE Y)?" << endl;
cin >> YW;
cout << "reflectance of the BLACK (CIE Y)?" << endl;
cin >> YBK;
NBK = YBK / YW; // normalized BK
CRATIO = NW / NBK; // correction Ratio
int C = lines * colo; // cells
cout << endl << " there are: " << colo << " columns";
cout << endl << " and : " << lines << " lines " ;
cout << endl << " that makes " << C << " cells " << endl;
cout << endl << " correction ratio is: " << CRATIO << endl << endl;
///_____ IMPORT THE PHOTOMETRIC DATA
cout << "...importing the photometric data" << endl;
float x = 0; // a variable that will contain a value from the file
ifstream ifs ("wall.csv");
char dummy;
for (int i = 0; i < lines; ++i){
for (int i = 0; i < colo; ++i){
ifs >> x;
if (x > Lmax) {
Lmax = x; // determines the highest pixel value
}
if (x < Lmin) {
Lmin = x; // determines the lowest pixel value
}
CSV.push_back(x);
// So the dummy won't eat digits
if (i < (colo - 1))
ifs >> dummy;
}}
ifstream ifs_close();
LRATIO = Lmax / Lmin;
cout << "...photometric data imported" << endl;
cout << endl << " maximum Luminance is: " << Lmax;
cout << endl << " minimum Luminance is: " << Lmin << endl;
cout << endl << "...luminance ratio is: " << LRATIO;
if (LRATIO > CRATIO) {
cout << endl << "...luminance ratio is: " << LRATIO;
cout << endl << "...this is too high, ending..." << '\a';
return(0);
}
cout << endl << "...luminance can be corrected :)" << endl;
///______ CREATE RANDOM VALUES BETWEEN 0 & 1
std::default_random_engine generator;
std::uniform_real_distribution <double> distribution(0.0,1.0);
for (int i=0; i<C; ++i) {
double number = distribution(generator);
RND.push_back(number);
}
cout << endl << "...random values created" << endl;
///_______ process & export to PBM
ofstream output_file("./wall.pbm");
output_file << "P1" << "\n" << colo << " " << lines << "\n"; /// PBM HEADER
cout << endl << "...file header written" << endl;
cout << endl << "...computing";
int CELLS = C; // copy the amount of cells
int LINEW = colo;
int PERCENT = 100;
while (CELLS > 0) {
while (LINEW > 0) {
Z = Lmin/CSV.front(); /// processing calculus
X = (NBK - Z)/(NBK - NW);
aBK = (1 - X);
vRND = RND.front();
if (aBK > (vRND)) {
IO = 1;
}
else {
IO = 0;
}
LINEW = LINEW - 1;
CELLS = CELLS - 1;
PERCENT = PERCENT - CELLS / C;
output_file << IO << "\n";
//cout << ERR << " "; /// fancy...
CSV.erase(CSV.begin());
RND.erase(RND.begin());
}
LINEW = colo;
}
cout << endl << "...computing done" << endl;
cout << "...file written";
output_file.close();
return(0);
}
Check out lib tiff. OpenCV just uses lib tiff as well.
http://www.libtiff.org/