How does glReadPixels works? - opengl

I have just started using OpenGL i was writing a program for boundary fill algorithm ,i used glClearColor to change the background colour to red but when i am reading a pixel color using glReadPixels it gives RGB value 0,0,0 instead of 1,0,0
#include<stdio.h>
#include<math.h>
#include<GL/glut.h>
#include<GL/freeglut.h>
int WIDTH=500,HEIGHT=500;
void boundaryfill(int x, int y)
{
int pixel[4];
glReadPixels(x,y,1,1, GL_RGB, GL_FLOAT, &pixel);
printf("%f %f %f %f\n",pixel[0],pixel[1],pixel[2],pixel[4]);
if(pixel[0]==(float)1 && pixel[1]==(float)0 && pixel[2]==(float)0)
{
printf("njerngwneroingoierngoineriongioerngiernogineiorngoiern");
}
else
{
glBegin(GL_POINTS);
glColor3f(0,1,0);
glVertex2i(x,y);
glEnd();
glFlush();
//boundaryfill(x,y+1);
//boundaryfill(x+1,y);
//boundaryfill(x,y-1);
//boundaryfill(x-1,y);
}
}
void display()
{
glClearColor(1.0,0.0,0.0,1.0);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0,0.0,0.0);
glBegin(GL_LINE_LOOP);
glVertex2i(250,250);
glVertex2i(-250,250);
glVertex2i(-250,-250);
glVertex2i(250,-250);
glEnd();
boundaryfill(250,250);
glFlush();
}
void myinit_func()
{
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
gluOrtho2D(-250,250,-250,250);
}
int main(int argc, char **argv)
{
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowPosition(0,0);
glutInitWindowSize(WIDTH,HEIGHT);
glutCreateWindow("Boundary fill");
myinit_func();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}

First of all you call glReadPixels() with GL_FLOAT however pixel is an int[]. Additionally you call it with GL_RGB and since you want the alpha you need to pass GL_RGBA.
float pixel[4];
glReadPixels(x, y, 1, 1, GL_RGBA, GL_FLOAT, &pixel);
Next in printf() you get the alpha like this pixel[4], I suspect this is a typo, as you need to do pixel[3]. You also use %d instead of %f.
printf("%f %f %f %f\n", pixel[0], pixel[1], pixel[2], pixel[3]);
Last but not least, by default GLUT doesn't setup an alpha buffer, which you can check this by doing:
int bits = 0;
glGetIntegerv(GL_ALPHA_BITS, &bits);
To which bits should remain 0. So instead you need to call glutInitDisplayMode like this:
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_ALPHA);
Note that in glutInitDisplayMode you don't need to pass GLUT_RGBA as it's the same as GLUT_RGB. Which additionally is default, so it can be omitted.
Additionally I would discourage doing pixel[0] == (float)1 due to floating point precision. I would instead recommend using:
GLubyte pixel[4];
glReadPixels(x, y, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, &pixel);
printf("%u %u %u %u\n", pixel[0], pixel[1], pixel[2], pixel[3]);
Then doing pixel[0] == 255 instead of course.

Change int pixels[4]; to float pixels[4]; and the problem should be resolved.

Related

Opengl texture cylinder trouble

I'm a little bit confused.
I want to texture a "cylinder", so my first approach was this (with n the number of faces and k the number of iteration)
void cylindrer(double r, int n,double h){
double x[n],y[n];
for(int k=0; k<n; k++){
x[k]=r*cos(2*k*M_PI/n);
y[k]=r*sin(2*k*M_PI/n);
}
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texObject[1]);
for(int k=0; k<n ;k++){
int m= (k+1)%n;
glBegin(GL_POLYGON);
glTexCoord2f(1.0/n*(k),0.0); glVertex3f( x[k], y[k], h/2);
glTexCoord2f(1.0/n*(k),1); glVertex3f( x[k], y[k], -h/2);
glTexCoord2f(1.0/n*(k+1),1); glVertex3f( x[m], y[m], -h/2);
glTexCoord2f(1.0/n*(k+1),0.0); glVertex3f( x[m], y[m], h/2);
glEnd();
}
glDisable(GL_TEXTURE_2D);
}
The texture is applied but reverse so I'd changed to
glBegin(GL_POLYGON);
glTexCoord2f(1.0/n*(n-k), 0.0); glVertex3f( x[k], y[k], h/2);
glTexCoord2f(1.0/n*(n-k), 1.0); glVertex3f( x[k], y[k], -h/2);
glTexCoord2f(1.0/n*(n-k-1), 1.0); glVertex3f( x[m], y[m], -h/2);
glTexCoord2f(1.0/n*(n-k-1), 0.0); glVertex3f( x[m], y[m], h/2);
glEnd();
And it works and look like this :
when I use this texture :
But now I want to rotate the texture of 90 degrees, so I create a new jpeg file and rotate it.
So the texture now look like this :
But this is the result when I use it :
The texture is twisted around the cylinder and I don't understand why.
Any idea ?
How I load texture :
#include <math.h>
#include <jpeglib.h>
#include <jerror.h>
GLuint texObject[2]; // textures
int main(int argc,char **argv){
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowPosition(200,200);
glutInitWindowSize(1366,768);
glutCreateWindow("");
glClearColor(0.0,0.0,0.0,0.0);
//glColor3f(1.0,1.0,1.0);
glShadeModel(GL_FLAT);
glPointSize(2.0);
glEnable(GL_DEPTH_TEST);
glGenTextures(2, texObject);
loadJpegImage("./textureCorps5.jpg", 1);
glutMainLoop();
return 0;
}
void loadJpegImage(char *fichier, int i)
{
struct jpeg_decompress_struct cinfo;
struct jpeg_error_mgr jerr;
FILE *file;
unsigned char *ligne;
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_decompress(&cinfo);
#ifdef __WIN32
if (fopen_s(&file,fichier,"rb") != 0)
{
fprintf(stderr,"Error\n");
exit(1);
}
#elif __GNUC__
if ((file = fopen(fichier,"rb")) == 0)
{
fprintf(stderr,"Error\n");
exit(1);
}
#endif
jpeg_stdio_src(&cinfo, file);
jpeg_read_header(&cinfo, TRUE);
unsigned char image[cinfo.image_width*cinfo.image_height*3];
jpeg_start_decompress(&cinfo);
ligne=image;
while (cinfo.output_scanline<cinfo.output_height)
{
ligne=image+3*cinfo.image_width*cinfo.output_scanline;
jpeg_read_scanlines(&cinfo,&ligne,1);
}
jpeg_finish_decompress(&cinfo);
jpeg_destroy_decompress(&cinfo);
glBindTexture(GL_TEXTURE_2D, texObject[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, cinfo.image_width, cinfo.image_height, 0,
GL_RGB, GL_UNSIGNED_BYTE, image);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
}
GL_UNPACK_ALIGNMENTspecifies the alignment requirements for the start of each pixel row in memory. By default GL_UNPACK_ALIGNMENT is set to 4.
This means each row of the texture is supposed to have a lenght of 4*N bytes.
Your texture is an RGB texture, which needs 24 bits or 3 bytes for each texel and you tightly packed the texels and especially the lines of the texture. This means that you may disregard the alignment of 4 for the start of a line of the texture (Except 3 times the width of the texture is divisible by 4 without a remaining).
To deal with that you have to change the alignment to 1. This means you have to set glPixelStorei(GL_UNPACK_ALIGNMENT, 1); before glTexImage2D, for reading a tightly packed texture.
Otherwise you will get an offset per line of 0-3 bytes when reading the texture. This causes a continuously twisted texture.
Instead you can take care of the alignment and the aligned length of a line when you create the turned texture, too:
int bytesPerLine = cinfo.image_width * 3;
bytesPerLine += bytesPerLine % 4;
unsigned char image[bytesPerLine * cinfo.image_height];
jpeg_start_decompress(&cinfo);
while (cinfo.output_scanline<cinfo.output_height)
{
ligne = image + bytesPerLine*cinfo.output_scanline;
jpeg_read_scanlines(&cinfo,&ligne,1);
}
jpeg_finish_decompress(&cinfo);
jpeg_destroy_decompress(&cinfo);

OpenGL glDrawElements only Black Screen

i was trying to draw a cube, using the glDrawElements function, but even the simple code below only gives me a black screen. If helps, i'm programming on XCode 6.
//
// main.cpp
// Copyright (c) 2014 Guilherme Cardoso. All rights reserved.
//
#include <iostream>
#include <OpenGL/OpenGL.h>
#include <GLUT/GLUT.h>
#include <vector>
#include <math.h>
const GLfloat width = 500;
const GLfloat height = 500;
GLubyte cubeIndices[24] = {0,3,2,1,2,3,7,6
,0,4,7,3,1,2,6,5,4,5,6,7,0,1,5,4};
GLfloat vertices[][3] =
{{-1.0,-1.0,-1.0},{1.0,-1.0,-1.0},
{1.0,1.0,-1.0}, {-1.0,1.0,-1.0}, {-1.0,-1.0,1.0},
{1.0,-1.0,1.0}, {1.0,1.0,1.0}, {-1.0,1.0,1.0}};
GLfloat colors[][3] =
{{0.0,0.0,0.0},{1.0,0.0,0.0},
{1.0,1.0,0.0}, {0.0,1.0,0.0}, {0.0,0.0,1.0},
{1.0,0.0,1.0}, {1.0,1.0,1.0}, {0.0,1.0,1.0}};
void display(){
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(3, GL_FLOAT, 0, colors);
glVertexPointer(3, GL_FLOAT, 0, vertices);
//glDrawArrays(GL_QUADS, 0, 24);
glDrawElements(GL_QUADS, 24,GL_UNSIGNED_BYTE, cubeIndices);
glDisableClientState(GL_VERTEX_ARRAY);
glutSwapBuffers();
}
void mouse(int button, int state, int x, int y){
}
void keyboard(unsigned char key, int x, int y){
if(key=='q' || key == 'Q') exit(0);
}
void init(){
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0,width ,height, 0);
glMatrixMode(GL_MODELVIEW);
glClearColor (1.0, 1.0, 1.0,1.0);
//glColor3f(0.0,0.0,0.0);
}
void idle(){
}
int main(int argc, char **argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowSize(width, height);
glutCreateWindow("Assignment 3");
glutPositionWindow(0, 0);
glutDisplayFunc(display);
glutMouseFunc(mouse);
glutKeyboardFunc(keyboard);
glutIdleFunc(idle);
init();
glutMainLoop();
}
i already checked some tutorials, and is no much different of what i'm doing.
You never clear the color and especially the depth buffer. You should do that at the beginning of your display() function.
Also, your matrix setup is a bit weird. You set up some ortho projection for the pixels, but try to draw a cube in the range [-1,1], so it will be 2 pixel wide on the screen.
Actually your code is drawing the cube just fine. Look closely :P
The main issue is your projection. The initial GL viewing volume is a -1 to 1 cube, but the cube you're drawing is -1 to 1. The call to gluOrtho2D defines the projection in OpenGL coordinates, which you make the same as pixels, but since your cube is -1 to 1 it is only two pixels big, the rest being offscreen.
Instead, drop the gluOrtho2D, which sets the Z dimension -1 to 1 and only allows you to set X/Y, and create a slightly bigger projection...
glOrtho(-2, 2, -2, 2, -2, 2);
Note: As #derhass suggests, calling glClear is important especially with depth testing enabled (without it, the cube from the last draw call will hide the updated cube).

Glut Open GL object wont animate

My polygon wont move, I tried many things and i think glClear(GL_COLOR_BUFFER_BIT);or glutMainLoop(); wont do their job. So first picture stay the same. There is no animation.
float x=0;
float y=0;
float b=0;
void displayCB(void)
{
glClear(GL_COLOR_BUFFER_BIT);
kvadrat();
}
void kvadrat()
{
glBegin(GL_POLYGON);
glColor3f(1, 0, 0); glVertex2d(0.5-x, 0.5-y);
glColor3f(1, 0, 0); glVertex2d(0.5-x, -0.5-y);
glColor3f(1, 0, 0); glVertex2d(-0.5-x, -0.5-y);
glColor3f(1, 0, 0); glVertex2d(-0.5-x, 0.5-y);
glEnd();
Sleep(1999);
glFlush();
x=x+0.01; // I modified this value so it will always be between 0.5 and -0.5,
//this is just example
}
int main(int argc, char *argv[])
{
int win;
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB);
glutInitWindowSize(800,600);
win = glutCreateWindow("Elementi");
glClearColor(0.0,0.0,0.0,0.0);
glutDisplayFunc(displayCB);
glutKeyboardFunc(keyCB);
glutMainLoop();
return 0;
}
I think you may be moving the position only a very small increment every two seconds, so the image appears to be static, but it's not, it's just changing very slowly. Try taking out the Sleep(1999) and see whether your animation works any better.
If you need to animate over time, it is better to instead use glutGet(GLUT_ELAPSED_TIME) to figure out how much time has elapsed since the last frame and use that to scale your deltas.
I fixed problem with glutPostRedisplay(); // Inform GLUT that the display has changed

glReadPixels Reads wrong color in opengl

There is a simple program which is actually from a bigger one,putting this aside,I tried to do a simple pixel read/write in Opengl.
The first section which involves reading a pixel color succeeds,But the second operation which involves writing a pixel and then reading it fails!.
Here is the program which demonstrates the problem :
#include <iostream>
#include <glut.h>
using namespace std;
void init(void)
{
glClearColor(1.0,0.0,0.0,0.0);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0.0,100.0,0.0,100.0);
}
void displayfnc()
{
glClear(GL_COLOR_BUFFER_BIT);
unsigned char current[3];
int r,g,b;
//x=0 to 6 is right
glBegin (GL_POINTS);
glColor3f (0.0,0.0,1.0);
glVertex2i (6,0);
glEnd();
//works well.
glReadPixels(6,0, 1, 1 , GL_RGB , GL_UNSIGNED_BYTE ,current);
r = current[0];
g = current[1];
b = current[2];
cout<<"read from x=6::::"<<"r:"<<r<<"g:"<<g<<"b:"<<b<<endl;
//x =7 and other isnt right
glBegin (GL_POINTS);
glColor3f (0.0,0.0,1.0);
glVertex2i (7,0);
glEnd();
//the problem is here in the reading.why this happen?
glReadPixels(7,0, 1, 1 , GL_RGB , GL_UNSIGNED_BYTE ,current);
r = current[0];
g = current[1];
b = current[2];
cout<<"Read from x=7::::"<<"r:"<<r<<"g:"<<g<<"b:"<<b<<endl;
glFlush();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(100,100);
glutInitWindowPosition(0, 0);
glutCreateWindow("BoundaryFill");
init();
glutDisplayFunc(displayfnc);
glutMainLoop();
return 0;
}
Here is the output:
What is causing the second read procedure to fail?
At least on my system glutInitWindowSize(100,100) is just a suggestion. glutGet(GLUT_WINDOW_WIDTH) actually returns 104, not 100 as your gluOrtho2D() call is assuming.
Try using a dynamic projection instead:
#include <GL/glut.h>
#include <iostream>
using namespace std;
void displayfnc()
{
glClearColor(1.0,0.0,0.0,0.0);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
double w = glutGet( GLUT_WINDOW_WIDTH );
double h = glutGet( GLUT_WINDOW_HEIGHT );
gluOrtho2D(0.0,w,0.0,h);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
unsigned char current[3];
int r,g,b;
//x=0 to 6 is right
glBegin (GL_POINTS);
glColor3f (0.0,0.0,1.0);
glVertex2i (6,0);
glEnd();
//works well.
glReadPixels(6,0, 1, 1 , GL_RGB , GL_UNSIGNED_BYTE ,current);
r = current[0];
g = current[1];
b = current[2];
cout<<"read from x=6::::"<<"r:"<<r<<"g:"<<g<<"b:"<<b<<endl;
//x =7 and other isnt right
glBegin (GL_POINTS);
glColor3f (0.0,0.0,1.0);
glVertex2i (7,0);
glEnd();
//the problem is here in the reading.why this happen?
glReadPixels(7,0, 1, 1 , GL_RGB , GL_UNSIGNED_BYTE ,current);
r = current[0];
g = current[1];
b = current[2];
cout<<"Read from x=7::::"<<"r:"<<r<<"g:"<<g<<"b:"<<b<<endl;
glFlush();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(100,100);
glutInitWindowPosition(0, 0);
glutCreateWindow("BoundaryFill");
glutDisplayFunc(displayfnc);
glutMainLoop();
return 0;
}

Simple VBO doesn't display

I'm using a vertex with three floats for the position (XYZ) and three other for the color (RGB):
XYZ RGB
XYZ RGB
...
I'm currently trying to plot a red triangle. Unfortunately I end up with a white window. I think there's a problem with the stride but I can't figure it out. I've tried many values for the stride and the size, still it doesn't seem to display anything.
//main.cpp
#include "data.h"
GLuint ID;
int size,el_size;
void init(){
vector<float>data_vector(18);
data_vector[0]=0; //x
data_vector[1]=0; //y
data_vector[2]=0; //z
data_vector[3]=1;
data_vector[4]=0;
data_vector[5]=0;
data_vector[6]=1; //x
data_vector[7]=0; //y
data_vector[8]=0; //z
data_vector[9]=1;
data_vector[10]=0;
data_vector[11]=0;
data_vector[12]=0; //x
data_vector[13]=1; //y
data_vector[14]=0; //z
data_vector[15]=1;
data_vector[16]=0;
data_vector[17]=0;
size=data_vector.size();
// Init GLEW
if ( glewInit() != GLEW_OK ){
cerr << "Failed to initialize GLEW." << endl;
exit(-1);
}
if ( !glewIsSupported("GL_VERSION_1_5") && !glewIsSupported( "GL_ARB_vertex_buffer_object" ) ){
cerr << "ARB_vertex_buffer_object not supported!" << endl;
exit(-2);
}
glOrtho(-1, 1,1,-1, -5.0f, 5.0f);
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glShadeModel(GL_SMOOTH);
glEnableClientState(GL_VERTEX_ARRAY);
glGenBuffers(1,&ID);
glBindBuffer(GL_ARRAY_BUFFER, ID);
glBufferData(GL_ARRAY_BUFFER,size*sizeof(float), &data_vector[0], GL_STATIC_DRAW);
el_size=3*sizeof(data_vector[0]);
}
void reshape(int w, int h){
cout<<"reshape"<<endl;
glViewport(0,0, (GLsizei) w, (GLsizei) h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0.0f, (GLdouble) w, 0.0f, (GLdouble) h);
}
void display(){
cout<<"display"<<endl;
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, ID);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, el_size, 0);
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(3,GL_FLOAT, el_size,(void*)(el_size));
glDrawArrays(GL_TRIANGLES,0,size/6);
glFlush();
}
int main(int argc, char **argv){
cout<<"main"<<endl;
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500,500);
glutInitWindowPosition(300,300);
glutCreateWindow(argv[0]);
init();
glutDisplayFunc(display);
// glutReshapeFunc(reshape);
glutMainLoop();
return 0;
}
You do appear to be using the incorrect stride. The stride should be the distance from the start of one vertex to the start of the next vertex, or in your case 6 floats.
You've set the stride as el_size, which is only 3 floats.
Also take care that your resize function is using an ortho matrix from 0 to screen width, and your init function is setting it from -1 to 1. If resize ever gets called your scene will become radically different.
One problem I see is that call to glOrtho in the init function. Whatever you intend to do, this surely doesn't do what you want. As a general rule, put all drawing state related commands only into the display function, nowhere else. Setting transformation matrices (and the viewport) should happen there. Doing so saves a lot of headaches later.
The other problem is, that the stride you define is to short. The stride is the distance from vertex to vertex in an interlaced array, not the length of a single attribute. el_size is calculated wrong.