how to display a multi-dimensional matrix in opencv - c++

i want to display a multi-dimensional matrix B(1000,1000,3) in opencv to see the values of this matrix at each (i,j,k) index
here is my declaration and how i fill this matrix
Mat image1 = imread("C://images//PolarImage300915163358.bmp"); // image 800*600
int dim1[3] = {1000,1000,3};
Mat B(3,dim1, CV_64F, Scalar::all(-1));
for (int j1 = 0; j1 < 800; j1++)
{
for (int j2 = 0; j2 < 600; j2++)
{
jj1 = round( ((double)phi/stepphi)+701.0);
jj2 = round( ((double)theta/steptetha)+501.0);
B.at<double>(1199-jj1,jj2,0) = image1.at<double>(j1,j2);
B.at<double>(1199-jj1,jj2,1) = image1.at<double>(j1,j2);
B.at<double>(1199-jj1,jj2,2) = image1.at<double>(j1,j2);
}
}

Related

OpenCV Using a loop to sum a part of your image error?

I am wanting to move through an image and take a 5x5 grid centered around each pixel in the image. I then want to sum that grid and compare it to a threshold.
int main()
{
Mat element = getStructuringElement(MORPH_RECT, Size(7, 7));
Mat im = imread("blob.png", IMREAD_GRAYSCALE);
bool fromCenter = false;
namedWindow("Crop frame", WINDOW_NORMAL);
Rect2d r = selectROI("Crop frame", im, fromCenter);
im = im(r);
erode(im, im, element);
Mat clone = im;
int sectionSize = 4;
int width = im.cols - sectionSize/2;
int height = im.rows - sectionSize/2;
int sum = 0;
int counter = 0;
for (int i = sectionSize/2; i < width; i++) {
for (int j = sectionSize/2; j < height; j++) {
Rect rect = Rect(i, j, sectionSize, sectionSize);
rect -= Point(rect.width / 2, rect.height / 2);
Mat temp = im(rect);
for (int x = 0; x < temp.cols; x++) {
for (int y = 0; y < temp.rows; y++) {
int pixelValue = (int)temp.at<uchar>(y, x);
sum += pixelValue;
}
}
cout << sum << endl;
if (sum > 3800) {
clone.at<uchar>(j, i) = 255;
}
else {
clone.at<uchar>(j, i) = 0;
}
namedWindow("erode", WINDOW_NORMAL);
imshow("erode", clone);
waitKey(1);
sum = 0;
}
}
}
I am getting fluctuations in the pixel sum based on where I select my ROI in the image even when both over white space Also, my pixel sum is changing when I change the value of the clone pixel in this section of the code which I do not understand at all:
if (sum > 3800) {
clone.at<uchar>(j, i) = 255;
}
else {
clone.at<uchar>(j, i) = 0;
}

Subtract opencv matrix from 3 channel matrix

I have two matrices:
cv::Mat bgr(rows, cols, CV_16UC3);
cv::Mat ir(rows, cols, CV_16UC1 );
and I want to subtract ir from each channel of bgr element-wise. I couldn't find an elegant solution yet.
EDIT
One possible solution might be:
// subtract IR from BGR
Vec3u tmp;
for (int i = 0; i < ir.rows; i++) {
for (int j = 0; j < ir.cols; j++) {
tmp = bgr.at<Vec3u>(i,j);
tmp[0] = tmp[0] - ir.at<ushort>(i,j);
tmp[1] = tmp[1] - ir.at<ushort>(i,j);
tmp[2] = tmp[2] - ir.at<ushort>(i,j);
bgr.at<Vec3u>(i, j) = tmp;
}
}
The question is that whether there is a faster solution.
If we're talking about an elegant way, it could be like this:
Mat mat = Mat::ones(2,2,CV_8UC1);
Mat mat1 = Mat::ones(2,2,CV_8UC2)*3;
Mat mats[2];
split(mat1,mats);
mats[0]-=mat;
mats[1]-=mat;
merge(mats,2,mat1);
You shouldn't use at(), if you wanted your code to be more efficient. Use pointers and check Mats for continuity:
int rows = mat.rows;
int cols = mat.cols;
if(mat.isContinuous() && mat1.isContinuous())
{
cols*=rows;
rows = 1;
}
for(int j = 0;j<rows;j++) {
auto channe2limg = mat1.ptr<Vec2b>(j);
auto channelimg = mat.ptr<uchar>(j);
for (int i = 0; i < cols; i++) {
channe2limg[i][0]-=channelimg[i];
channe2limg[i][1]-=channelimg[i];
}
}

variance of sliding window in image

I work on traffic sign detection, firstly I am applied a segmentation on RGB image to obtain red channel image as it is illustrated in image 1:
Secondely I try to find homogeneous region to eliminate not interested region (not a traffic sign) by calculating the variance of sliding window above the image
I use this code but I have always exception
int main(int argc, char** argv)
{
IplImage *image1;
if ((image1 = cvLoadImage("segmenter1/00051.jpg", 0)) == 0)
return NULL;
int rows = image1->width;
int cols = image1->height;
Mat image = Mat::zeros(cols, rows, CV_32FC1);
double x = 0;
double temp = 0;
for (int i = 0; i < rows; i++){
for (int j = 0; j < cols; j++){
temp = cvGet2D(image1, j, i).val[0];
x = temp / 255;
image.at<float>(j, i) = x;
x = image.at<float>(j, i);
}
}
int k = 16;
double seuil = 0.0013;
CvScalar blanc;//pixel blanc
blanc.val[0] = 255;
cv::Scalar mean, stddev; //0:1st channel, 1:2nd channel and 2:3rd channel
for (int j = 0; j < rows - k; j++)
{
for (int i = 0; i < cols - k; i++)
{
double som = 0;
double var = 0;
double t = 0;
for (int jj = j; jj < k+j; jj++)
{
for (int ii = i; ii < k+i; ii++)
{
t = image.at<float>(jj, ii);
som = som + t;
t = t*t;
var =var+ t;
}
}
som = som / (k*k);
if (som>0.18){
var = (var / (k*k)) - (som*som);
if (var < seuil)
cvSet2D(image1, j, i, blanc);
}
}
}
char stsave[80];
cvSaveImage("variance/00051.jpg", image1);
cv::waitKey(0);
return 0;
}
Without the specific exception, I can only guess it is out_of_range. According to opencv docs, cvGet2D and cvSet2D parameters are image, y, x which effectively translates to image, rows, cols. You have flipped the definition of rows, cols and have conflicting usage between the two loops. Maybe fix these and try again.

How to assign values to a 3 dimensional array in opencv

I want to assign value to a 3 dimensional array in opencv but don't know how to do it.
here is the code in matlab that I want to write in opencv
vv = zeros(800,600,2);
for j1=1:m1
for j2=1:m2
w=[-k;vv(j1,j2,1);vv(j1,j2,2)];
w=w/norm(w);
end
end
and this is what I did in opencv, but did not work
int dim2[3] = {800,600,2};
Mat vv(3,dim2,CV_32F,Scalar::all(0));
for(int j1 = 0; j1 < 800; j1++)
{ for(int j2 = 0; j2 < 600; j2++)
{
Mat w(3,dim2,CV_32F, Scalar(1,vv(j1,j2,1),vv(j1,j2,2)));
}
}
use the following syntax:
//initizlizes a matrix zeros, of size 800x600x2
cv::Mat vv = cv::Mat::zeros(cv::Size(600, 800), CV_32FC2);
//do some calculations on vv
//opencv version of the for loop
for (int y = 0; y < vv.rows; y++)
{
for (int x = 0; x < vv.cols; x++)
{
//access indices (y,x,1) and (y,x,2)
cv::Vec2f wVec = vv.at<cv::Vec2f>(cv::Point(x, y));
//calculates the norm
cv::Point3f w(3, wVec[0], wVec[1]);
double normW = cv::norm(w);
//divides w by it's norm. don't forget to verify that normW is not 0
w = w / normW;
//do something with the calculated w vector
}
}

How to make a color histogram based classifier using OpenCV 3.0.0

I am trying to make a classifier using OpenCV 3.0.0's CvSVM and color histogram. I already tried to make my own using the following code to make the datasets:
int labels[510];
if (label.compare("raw")){
for (int i = 0; i < 509; i++){
labels[i] = 1;
}
}
else if (label.compare("ripe")){
for (int i = 0; i < 509; i++){
labels[i] = 2;
}
}
else if (label.compare("rotten")){
for (int i = 0; i < 509; i++){
labels[i] = 3;
}
}
float trainingData[510][2];
for (int i = 0; i < 254; i++){
trainingData[i][1] = r_hist.at<float>(i - 1);
trainingData[i][2] = i;
}
int j = 0;
for (int i = 255; i < 509; i++){
trainingData[i][1] = g_hist.at<float>(j - 1);
trainingData[i][2] = i;
j++;
}
And this code for the SVM:
int width = 512, height = 512;
Mat image = Mat::zeros(height, width, CV_8UC3);
Mat labelsMat(510, 1, CV_32SC1, labels);
Mat trainingDataMat(510, 2, CV_32FC1, trainingData);
Ptr < cv::ml::SVM > svm = SVM::create();
svm = cv::Algorithm::load<ml::SVM>("svm.xml");
svm->setC(0.01);
svm->setType(ml::SVM::C_SVC);
svm->setKernel(ml::SVM::LINEAR);
svm->setTermCriteria((cvTermCriteria(TermCriteria::MAX_ITER, 100, 1e6)));
svm->train(trainingDataMat, ROW_SAMPLE, labelsMat);
svm->save("svm.xml");
The problem with the code above is that it won't save properly. Is there a better way to do it?