GL_TEXTURE_RECTANGLE_ARB - opengl

I try to use data raw texture using GL_TEXTURE_RECTANGLE_ARB:
void Display::tex(){
GLubyte Texture[16] =
{
0,0,0,0, 0xFF,0xFF,0xFF,0xFF,
0xFF,0xFF,0xFF,0xFF, 0,0,0,0
};
GLuint Nom;
glLoadIdentity();//load identity matrix
glTranslatef(0.0f,0.0f,-4.0f);//move forward 4 units
glEnable(GL_DEPTH_TEST); //Active le depth test
glDisable( GL_CULL_FACE );
glEnable (GL_TEXTURE_RECTANGLE_ARB);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 2);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &Nom);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, Nom);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8, 2, 2,
0, GL_BGRA_EXT, GL_UNSIGNED_INT_8_8_8_8_REV, Texture);
angle = 0.01 * glutGet ( GLUT_ELAPSED_TIME );
glRotatef(angle,0,1,1);
glBegin(GL_QUADS); //Et c'est parti pour le cube !
glTexCoord2i(0,0);glVertex3i(-1,-1,-1);
glTexCoord2i(1,0);glVertex3i(+1,-1,-1);
glTexCoord2i(1,1);glVertex3i(+1,+1,-1);
glTexCoord2i(0,1);glVertex3i(-1,+1,-1);
//1 face
glTexCoord2i(0,0);glVertex3i(-1,-1,+1);
glTexCoord2i(1,0);glVertex3i(+1,-1,+1);
glTexCoord2i(1,1);glVertex3i(+1,+1,+1);
glTexCoord2i(0,1);glVertex3i(-1,+1,+1);
//2 faces
glTexCoord2i(0,0);glVertex3i(+1,-1,-1);
glTexCoord2i(1,0);glVertex3i(+1,-1,+1);
glTexCoord2i(1,1);glVertex3i(+1,+1,+1);
glTexCoord2i(0,1);glVertex3i(+1,+1,-1);
//3 faces
glTexCoord2i(0,0);glVertex3i(-1,-1,-1);
glTexCoord2i(1,0);glVertex3i(-1,-1,+1);
glTexCoord2i(1,1);glVertex3i(-1,+1,+1);
glTexCoord2i(0,1);glVertex3i(-1,+1,-1);
//4 faces
glTexCoord2i(1,0);glVertex3i(-1,+1,-1);
glTexCoord2i(1,1);glVertex3i(+1,+1,-1);
glTexCoord2i(0,1);glVertex3i(+1,+1,+1);
glTexCoord2i(0,0);glVertex3i(-1,+1,+1);
//5 faces
glTexCoord2i(1,0);glVertex3i(-1,-1,+1);
glTexCoord2i(1,1);glVertex3i(+1,-1,+1);
glTexCoord2i(0,1);glVertex3i(+1,-1,-1);
glTexCoord2i(0,0);glVertex3i(-1,-1,-1);
//6 faces
glEnd();
glFlush();
}
The result is not very good :
http://shareimage.ro/images/xdkyd12oty44c0qpuo1b.png
The cube should have all faces with an texture 4 square (2 black and 2 white ) .
I dont know where is the error...

GL_ARB_texture_rectangle extension uses dimension-dependent texture coordinates. Use [0..W]x[0..H] range for texture coordinates, instead of normalized coordinates range [0..1]x[0..1].
For example, to draw a full image on a quad;
glTexCoord2f(0, 0); glVertex3f(...); // top-left
glTexCoord2f(0, imageHeight); glVertex3f(...); // bottom-left
glTexCoord2f(imageWidth, imageHeight); glVertex3f(...); // bottom-right
glTexCoord2f(imageWidth, 0); glVertex3f(...); // top-right
Note that there are several limitations using GL_ARB_texture_rectangle extension.
Mipmap filtering is not supported.
Texture border is not supported.
GL_REPEAT wrap mode is not supported.
Palette texture is not supported.
texture coods are addressed by [0..w]x[0..h].
Luckily, OpenGL provides GL_ARB_texture_non_power_of_two extension as well to resolve the above limitations, while it is still supporting NPOT (Non Power Of Two) textures.
The biggest advantages of GL_ARB_texture_non_power_of_two are;
GL_ARB_texture_non_power_of_two uses the conventional normalized texture coords, [0..1]x[0..1].
It does NOT require an additional texture target token, GL_TEXTURE_RECTANGLE_ARB for glEnable(), glTexImage*D(), glBindTexture(), etc. That is, you can still use GL_TEXTURE_2D as usual for NPOT textures.

Using texture rectangles, the texture coordinates are absolute pixel positions. In your case your texture coordinates would be 0,0 2,0 2,2 0,2

It's working now, see next:
GLubyte Texture[16] =
{
0,0xFF,0,0xFF, 0xFF,0xFF,0xFF,0xFF,
0xFF,0xFF,0xFF,0xFF, 0,0,0,0
};
GLuint Nom;
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); //Efface le framebuffer et le depthbuffer
glMatrixMode(GL_MODELVIEW); //Un petit gluLookAt()...
glLoadIdentity();//load identity matrix
glTranslatef(0.0f,0.0f,-4.0f);//move forward 4 units
angle = 0.1 * glutGet ( GLUT_ELAPSED_TIME );
glRotatef(angle,0,2,2);
glEnable(GL_TEXTURE_2D);
glEnable (GL_TEXTURE_RECTANGLE_ARB);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 2);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1,&Nom); //Génère un n° de texture
glBindTexture(GL_TEXTURE_2D,Nom); //Sélectionne ce n°
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 4, 4, 0, GL_RGB,GL_UNSIGNED_BYTE, Texture);
glTexParameterf(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); // GL_CLAMP_TO_EDGE
glTexParameterf(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); // GL_CLAMP_TO_EDGE
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8, 2, 2, 0, GL_BGRA_EXT, GL_UNSIGNED_INT_8_8_8_8_REV, Texture);
glBegin(GL_QUADS);
glTexCoord2i(0,0);glVertex3i(-1,-1,-1);
glTexCoord2i(2,0);glVertex3i(+1,-1,-1);
glTexCoord2i(2,2);glVertex3i(+1,+1,-1);
glTexCoord2i(0,2);glVertex3i(-1,+1,-1);
//1 face
glTexCoord2i(0,0);glVertex3i(-1,-1,+1);
glTexCoord2i(2,0);glVertex3i(+1,-1,+1);
glTexCoord2i(2,2);glVertex3i(+1,+1,+1);
glTexCoord2i(0,2);glVertex3i(-1,+1,+1);
//2 faces
glTexCoord2i(0,0);glVertex3i(+1,-1,-1);
glTexCoord2i(2,0);glVertex3i(+1,-1,+1);
glTexCoord2i(2,2);glVertex3i(+1,+1,+1);
glTexCoord2i(0,2);glVertex3i(+1,+1,-1);
//3 faces
glTexCoord2i(0,0);glVertex3i(-1,-1,-1);
glTexCoord2i(2,0);glVertex3i(-1,-1,+1);
glTexCoord2i(2,2);glVertex3i(-1,+1,+1);
glTexCoord2i(0,2);glVertex3i(-1,+1,-1);
glEnd();
glDisable(GL_TEXTURE_2D);

Related

openGL Texture mapping - c

I'm trying to display the texture on the window using openGL. However, the texture is only mapping to the bottom left of my window and it cuts off! output
Here is my code:
Texture:
GLuint textureID[1];
GLubyte Image[1024*768*4];
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1,textureID);
glBindTexture(GL_TEXTURE_2D, textureID[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, 3, 1024, 768, 0, GL_RGBA, GL_UNSIGNED_BYTE, Image);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glEnable(GL_TEXTURE_GEN_S);
glEnable(GL_TEXTURE_GEN_T);
Quad:
glPushMatrix ();
glTranslatef(0, 0.0, -1.1);
glMaterialf(GL_FRONT, GL_SHININESS, 30.0);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, textureID[0]);
glViewport(-511,-383,1025,768);
glBegin(GL_QUADS);
glTexCoord2d(0.0, 0.0); glVertex2f(0.0, 0.0);
glTexCoord2d(1.0, 0.0); glVertex2f(1024.0, 0.0);
glTexCoord2d(1.0, 1.0); glVertex2f(1024.0, 768.0);
glTexCoord2d(0.0, 1.0); glVertex2f(0.0, 768.0);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix ();
glFlush ();
I'm trying to map the texture into my window. Both of my window and texture have the size of 1024x768. What did I do wrong? If I comment out glViewport, the texture will be mapped to top right.
The reason why your image gets cut off are the values supplied to glViewport. This function specifies to which area of the screen the rendering should go. So if you set the x-value to -511 and the width to 1025, then the drawing will happen from pixel -511 to 514, which is exactly what you see.
What you actually want to get the image to your desired position is a projection (most probably an orthographic one), that maps you input coordinates to the appropriate normalized device coordinates (NDC). When not using projections OpenGL works in this NDC coordinates ranging from -1 to 1 on each axis and not, as you assumed, in pixel coordinates.
Your viewport parameters are invalid for your particular desires (negative values for x,y). Also likely you didn't specify a projection / modelview matrix pair that maps local coordinates to pixels (at least not in the code shown), yet the coordinates you pass to glVertex look like you want to address pixels.

Coordinates wrong when drawing to texture in framebuffer and then rendering texture to screen?

I'm using opengl to draw the graphics for simple game like space invaders. So far I have it rendering moving meteorites and a gif file quite nicely. I get the basics. But I just cant get the framebuffer working properly which I indent to render bitmap font to.
The first function will be called inside the render function only when the score changes, This will produce a texture containing the score characters. The second function will draw the texture containing the bitmap font characters to the screen every time the render function is called. I thought this would be a more efficient way to draw the score. Right now I'm just trying to get it drawing a square using the frameBuffer, but it seems that the coordinates range from -1 to 0. I thought the coordinates for a texture went from 0 to 1? I commented which vertex effects which corner of the square and it seems to be wrong.
void Score::UpdateScoreTexture(int* success)
{
int length = 8;
char* chars = LongToNumberDigits(count, &length, 0);
glDeleteTextures(1, &textureScore);//last texture containing previous score deleted to make room for new score
glGenTextures(1, &textureScore);
GLuint frameBufferScore;
glGenTextures(1, &textureScore);
glBindTexture(GL_TEXTURE_2D, textureScore);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256, 256, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL);
glGenFramebuffers(1, &frameBufferScore);
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferScore);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureScore, 0);
GLenum status;
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
std::cout << "status is: ";
std::cout << "\n";
switch (status)
{
case GL_FRAMEBUFFER_COMPLETE:
std::cout << "good";
break;
default:
PrintGLStatus(status);
while (1 == 1);
}
glBindFramebuffer(GL_FRAMEBUFFER, frameBufferScore);
glBegin(GL_POLYGON);
glVertex3f(-1, -1, 0.0);//appears to be the bottom left,
glVertex3f(0, -1, 0.0);//appears to be the bottom right
glVertex3f(0, 0, 0.0);//appears to be the top right
glVertex3f(-1, 0, 0.0);//appears to be the top left
glEnd();
glDisable(GL_TEXTURE_2D);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D,0);
glDeleteFramebuffers(1, &frameBufferScore);
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, chars2);
}
void Score::DrawScore(void)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBindTexture(GL_TEXTURE_2D, textureScore);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex3f(0.7, 0.925, 0.0);
glTexCoord2f(0.0, 1.0); glVertex3f(0.7, 0.975, 0.0);
glTexCoord2f(1.0, 1.0); glVertex3f(0.975, 0.975, 0.0);
glTexCoord2f(1.0, 0.0); glVertex3f(0.975, 0.925, 0.0);
glEnd();
glFlush();
glDisable(GL_TEXTURE_2D);
}
Any ideas where I'm going wrong?
You have not set the glViewport, this may give you problems.
Another possibility is that you have the matrix set to something other than identity.
Ensure that you have reset the model-view and projection matrices to identity (or what you want them to be) before glBegin(GL_POLYGON) in UpdateScoreTexture() (You may wish to push the matrices to the stack before you make changes):
glViewport(0,0, framebufferWidth, framebufferHeight)
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glLoadIdentity()
Then put them back at the end of the function:
glViewport(0,0, width, height)
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
glPopMatrix()

How to transform SDL TTF Surface into GL Texture with Transparent background

I would like to draw text over content drawn in GL. I want the symbols themselves to be opaque, whilst the rest transparent, allowing the drawn content to be seen. The following code yields a text, which is the correct one, yet with a perfectly white background. My drawn content is completely absent.
How am I to solve this? I am using SDL 2.0, VSC
glPushMatrix();
/*Content drawn in GL*/
GLuint TextureID = 0;
SDL_Color Color = {30, 30, 30, 0};
TTF_Font * Font = TTF_OpenFont("Times.ttf", 30);
SDL_Surface * Message = TTF_RenderText_Blended(Font, "ASDASDASD", Color);
glGenTextures(1, &TextureID);
glBindTexture(GL_TEXTURE_2D, TextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, Message->w, Message->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, Message->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, TextureID);
glBegin(GL_QUADS);
{
glColor4f(.5, .5, .5, 1);
glTexCoord2f(0,0); glVertex2f(MouseX/10, MouseY/10);
glTexCoord2f(1,0); glVertex2f((MouseX/10) + (Message->w / 10), MouseY/10);
glTexCoord2f(1,1); glVertex2f((MouseX/10) + (Message->w / 10), (MouseY/10) + (Message->h / 10));
glTexCoord2f(0,1); glVertex2f(MouseX/10, (MouseY/10) + (Message->h / 10));
}
glEnd();
glPopMatrix();
SDL_FreeSurface(Message);
SDL_GL_SwapWindow(GameWindow);
Think of it this way: When you create the texture above, you're drawing dark grey fully transparent (30,30,30,0) text against a fully transparent background (0,0,0,0 due to TTF_RenderText_Blended). You're then using immediate mode color blending to raster a quad of the same size (HINT: with no blend func and no mention of z buffer disable!).
P.S. you're leaking like crazy in the example:
glGenTexture every frame without a glDeleteTextures (you only need a
new one if it changes)
TTF_OpenFont without a TTF_CloseFont in every
frame (keep it only until you do not require rendering of that font)
TTF_RenderText_Blended surface isn't leaked but you could free it right after
glTexImage2D
I assume you're meaning to use the font as a mask to subtract from a solid color background? I generally create font textures white (full alpha) then apply color or masking in the shader. To accomplish a similar effect in immediate mode you could:
// startup
glEnable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
SDL_Color Color = {255, 255, 255, 255};
TTF_Font * Font = TTF_OpenFont("Times.ttf", 30);
SDL_Surface * Message = TTF_RenderText_Blended(Font, "ASDASDASD", Color);
TTF_CloseFont(Font);
glGenTextures(1, &TextureID);
glBindTexture(GL_TEXTURE_2D, TextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, Message->w, Message->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, Message->pixels);
SDL_FreeSurface(Message);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// frame render
glClear(GL_COLOR_BUFFER_BIT);
// you'll see this in many (immediate mode) 2D rendering examples
// without the blend eq below, this would render the white text on top of the quad's background color
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// use text as a mask to cut away from what is below
glBlendEquation(GL_FUNC_REVERSE_SUBTRACT);
glBindTexture(GL_TEXTURE_2D, TextureID);
glBegin(GL_QUADS);
{
glColor4f(.5, .5, .5, 1);
glTexCoord2f(0,0); glVertex2f(MouseX/10, MouseY/10);
glTexCoord2f(1,0); glVertex2f((MouseX/10) + (Message->w / 10), MouseY/10);
glTexCoord2f(1,1); glVertex2f((MouseX/10) + (Message->w / 10), (MouseY/10) + (Message->h / 10));
glTexCoord2f(0,1); glVertex2f(MouseX/10, (MouseY/10) + (Message->h / 10));
}
glEnd();
// if done every frame, make sure to: glDeleteTextures(1, &TextureID);
SDL_GL_SwapWindow(GameWindow);
Very handy site here: http://www.andersriggelsen.dk/glblendfunc.php

nVidia openGL fails to display simple COLOR_INDEX texture

I have my first simple OpenGL program to display 2D images using OpenGL. I'm using an index-based image, calling glTexImage2D(.. GL_RGB, ... GL_COLOR_IMAGE...)
This is working as expected on an ATI card.
Having swapped to an nVidia card, I see a black quad instead of my image. Given that it works on the ATI I guess the code is basically correct - but maybe I have missed a setting - or maybe the card doesn't support what I'm doing (?!)
First the Setup code (I'm using Qt btw, so there's probably some context calls I'm missing):-
glClearColor( 0.1, 0.1, 0.25, 0); // background color
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4); // 4-byte pixel alignment
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
Here's the code to set the texture :-
GLfloat Greys[256];
GLfloat Ones[256];
for( int I(0); I < 256; ++I )
{
Greys[I] = (GLfloat)I/256;
Ones[I] = 1.0;
}
makeCurrent();
glPixelMapfv( GL_PIXEL_MAP_I_TO_R, 256, Greys );
glPixelMapfv( GL_PIXEL_MAP_I_TO_G, 256, Greys );
glPixelMapfv( GL_PIXEL_MAP_I_TO_A, 256, Ones );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_pImage->size().width(), m_pImage->size().height(), 0, GL_COLOR_INDEX, GL_UNSIGNED_BYTE, m_pImage->bits() );
Here's the display code
glLoadIdentity();
// Get the camera in the right place
glRotatef( 180, 1, 0, 0 );
// Apply the Pan(Offset), and Zoom
glTranslatef( m_Offset.x(), m_Offset.y(), 0);
glScalef( m_Zoom, m_Zoom, 1 );
// Display the image texture mapped to a rectangle
glColor3f( 1,1,0 );
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS);
glTexCoord2f( 0, 0 ); glVertex3f( 0, 0, 10 );
glTexCoord2f( 1, 0 ); glVertex3f( ImSize.width(), 0, 10 );
glTexCoord2f( 1, 1 ); glVertex3f( ImSize.width(), ImSize.height(), 10 );
glTexCoord2f( 0, 1 ); glVertex3f( 0, ImSize.height(), 10 );
glEnd();
glDisable(GL_TEXTURE_2D);
I also display the same image in full colour, in a separate window, using a straight RGB - RGB call to glTexImage2D. So I'm confident the dimensions are acceptable.
If I remove the call to glTexImage2D then I get a yellow quad as expected. Thus I suspect I have a problem with my calls to set the colour LUTs.
Board is an ASUS GeForce 210 silent
Windows XP 32 bit.
nVidia Drivers 6.14.13.681 (9-23-2012), R306.81 (branch: r304_70-122)
Did you test for OpenGL error codes? You may use this code: https://gist.github.com/4144988 – regarding color index formats? I wouldn't be surprised if it simply wasn't supported by the driver. Nobody uses color index formats these days. If you want to draw a palleted texture, upload the pallete into a 1D RGB texture and the color indexed image into a single channel (GL_RED or GL_LUMINANCE, depending on the OpenGL version) 2D texture and use the value as index into the pallete texture.

OpenGL Nvidia Driver 259.12 texture not working

My OpenGL application which was working fine on ATI card stopped working when I put in an NVIDIA Quadro card. Texture simply don't work at all! I've reduced my program to a single display function which doesn't work:
void glutDispCallback()
{
//ALLOCATE TEXTURE
unsigned char * noise = new unsigned char [32 * 32 * 3];
memset(noise, 255, 32*32*3);
glEnable(GL_TEXTURE_2D);
GLuint textureID;
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 32, 32, 0, GL_RGB, GL_UNSIGNED_BYTE, noise);
delete [] noise;
//DRAW
glDrawBuffer(GL_BACK);
glViewport(0, 0, 1024, 1024);
setOrthographicProjection();
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glLoadIdentity();
glDisable(GL_BLEND);
glDisable(GL_LIGHTING);
glBindTexture(GL_TEXTURE_2D, textureID);
glColor4f(0,0,1,0);
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex2f(-0.4,-0.4);
glTexCoord2f(0, 1);
glVertex2f(-0.4, 0.4);
glTexCoord2f(1, 1);
glVertex2f(0.4, 0.4);
glTexCoord2f(1,0);
glVertex2f(0.4,-0.4);
glEnd();
glutSwapBuffers();
//CLEANUP
GL_ERROR();
glDeleteTextures(1, &textureID);
}
The result is a blue quad (or whatever is specified by glColor4f()), and not a white quad which is what the texture is. I have followed the FAQ on OpenGL site. I have disabled blending in case texture was being blended out. I have disabled lighting. I have looked through glGetError() - no errors. I've also set glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE); and GL_DECAL. Same result. I've also tried different polygon winding - CW and CCW.
Anyone else encounter this?
Can you try using GL_REPLACE in glTexEnvi? It could be a bug in the NV driver.
Your code is correct and does what it should.
memset(noise, 255, 32*32*3); makes the texture white, but you call glColor4f(0,0,1,0); so the final color will be (1,1,1)*(0,0,1) = (0,0,1) = blue.
What is the behavior you would like to have ?
I found the error. Somewhere else in my code I had initialized a GL_TEXTURE_3D object and had not called glDisable(GL_TEXTURE_3D);
Even though I had called glBindTexture(GL_TEXTURE_2D, textureID); it should have bound a 2D texture as the current texture and used that - as this code always worked on ATI cards. Well apparently the nVidia driver wasn't doing that - it was using that 3D texture for some reason. So adding glDisable(GL_TEXTURE_3D); fixed the problem and everything works as expected.
Thanks all who tried to help.