OpenGL - texture not visible with blending enabled [closed] - c++

It's difficult to tell what is being asked here. This question is ambiguous, vague, incomplete, overly broad, or rhetorical and cannot be reasonably answered in its current form. For help clarifying this question so that it can be reopened, visit the help center.
Closed 10 years ago.
When I try to display texture on object it works but only with GL_BLEND disabled. When I enable blending:
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
The texture is just not visible anymore. Black screen.
I have really no idea whats going on. Its same for JPG and for PNG with alpha channel.
EDIT (more details):
Well, its hard to paste the code (objects, objects eveywhere and huge) but it goes something like this:
//initialization - i commented everything else
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//preparing texture
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &texture_id);
glBindTexture(GL_TEXTURE_2D, texture_id);
int Mode = GL_BGR;
int nOfColors = image->format->BytesPerPixel;
if (nOfColors == 4) {
if (image->format->Rmask == 0x000000ff)
Mode = GL_RGBA;
else
Mode = GL_BGRA;
} else if (nOfColors == 3) {
if (image->format->Rmask == 0x000000ff)
Mode = GL_RGB;
else
Mode = GL_BGR;
}
// glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glTexImage2D(GL_TEXTURE_2D, 0, nOfColors, image->w, image->h, 0, Mode, GL_UNSIGNED_BYTE, image->pixels);
glDisable(GL_TEXTURE_2D);
//drawing
glColor4f(1.0f,1.0f,1.0f,1.0f);
glEnable(GL_TEXTURE_2D);
glBindTexture( GL_TEXTURE_2D, _i );
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2f(x, y);
glTexCoord2f(1, 0);
glVertex2f(x + width, y);
glTexCoord2f(1, 1);
glVertex2f(x + width, y + height);
glTexCoord2f(0, 1);
glVertex2f(x, y + height);
glEnd();
glBindTexture( GL_TEXTURE_2D, NULL );
glDisable(GL_TEXTURE_2D);
EDIT2
"black screen" may be little confusing - i meant that nothing is displayed (my background is black but it doesnt matter) - if i turn blending off i get nice texture on screen - with blending on nothing but background color

Did you try to call glTexEnvf?
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
Here is example: http://unick-soft.ru/art/files/basicBlend.zip
For your case you need to look this code:
//draw texture
glPushMatrix();
glColor4f(1.0, 1.0, 0.0, 1.0);
texture.switchOffTexture();
Sphere.drawObject();
glTranslatef(0.0, 0.0, 7.0);
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE );
texture.bindTexture();
glBegin(GL_POLYGON);
glTexCoord2f(0.0, 0.0);
glVertex3f(-1.0, -1.0, 0.0);
glTexCoord2f(1.0, 0.0);
glVertex3f(1.0, -1.0, 0.0);
glTexCoord2f(1.0, 1.0);
glVertex3f(1.0, 1.0, 0.0);
glTexCoord2f(0.0, 1.0);
glVertex3f(-1.0, 1.0, 0.0);
glEnd();
glPopMatrix();
If you have problem with compilation, you can comment include: #include < gl\glaux.h >
Press 1, 2, 3 to select blending type. In case 3 you will see this result http://unick-soft.ru/art/img/blend/text_blend.png

It seems as if your alpha channel was all 0 - are you positive that you are reading your texture correctly (with alpha channel)? Try filling it by hand or check if it is really filled by whatever software you are using to read your files into pixels.

So it finally started to work after i changed my image loading library to DevIL. Previous had problems with alpha channel it seems.
Thanks all for your help

Related

In C++, OpenGL, Glut, how to bind image.c to a texture, where image.c comes from Gimp>Export>C source code

So I've spent past two days looking through different kinds of 'solutions' to my question via google, there aren't all that many and the ones I've find don't seem to work.
I'm exporting a small test image as .c resource file from Gimp, it's size is 64x64 and it has an alpha channel.
Basically looks like:
static const struct {
unsigned int width;
unsigned int height;
unsigned int bytes_per_pixel; /* 2:RGB16, 3:RGB, 4:RGBA */
char *comment;
unsigned char pixel_data[64 * 64 * 4 + 1];
} ship = {
64, 64, 4,
(char*) 0,
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\237\237\237\377\237\237\237\377\237\237\237\377\237\237"
"\237\377\237\237\237\377\237\237\237\377\237\237\237\377vZI\0vZI\0vZI\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
and goes on like that for quite a while, as you might expect, until finally ending with
"\237\237\377\237\237\237\377\237\237\237\377",
};
So how can I actually use this resource file? If anyone could provide an example, a bare minimum that is needed to create a square with the texture stamped on it, I'd be most appreciative.
Looking at the reference page for glTexImage2D, it is done like (from here) :
GLuint texName1 = 0;
glGenTextures(1, &texName1);
glBindTexture(GL_TEXTURE_2D, texName1);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0, ship.bytes_per_pixel, ship.width, ship.height, 0, GetDataFormat(), GetDataType(), ship.pixel_data);
glColor3f(1, 1, 0);
glBindTexture(GL_TEXTURE_2D, texName1);
glBegin(GL_QUADS);
glTexCoord2f (0.0, 0.0);
glVertex3f (0.0, 0.0, -5.0f);
glTexCoord2f (1.0, 0.0);
glVertex3f (.5, 0.0, -5.0f);
glTexCoord2f (1.0, 1.0);
glVertex3f (.5, .5, -5.0f);
glTexCoord2f (0.0, 1.0);
glVertex3f (0.0, .5, -5.0f);
glEnd();
The key line is this :
glTexImage2D(GL_TEXTURE_2D, 0, ship.bytes_per_pixel, ship.width, ship.height, 0, GetDataFormat(), GetDataType(), ship.pixel_data);
You need to implement GetDataFormat() and GetDataType() yourself, and it returns the data format and type.
One possible implementation :
GLenum GetDataFormat(){
return GL_BGRA;
}
GLenum GetDataType(){
return GL_UNSIGNED_BYTE;
}
add the file to your source code.
ship structure object is created with all info needed.

Partly Transparent HUD-style overlay in OpenGL

I'm trying to make a program showing a red rotating cube in the background, overlayed with a textured quad.
The texture is a simple 24-bit bitmap of the words "Hello World" in black over a white background. I want the white background to be transparent so that the cube can be seen behind the overlay. The image loader checks the value of each pixel and adds the relevant alpha value to convert the image into a 32-bit bitmap.
At the moment, my program displays the overlay with black text but a red background, same colour as the cube. Below is the code used for the initial texture set up:
if (bitmap->Load("test.bmp")) {
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, 3, bitmap->GetWidth(), bitmap->GetHeight(),
0, GL_RGBA, GL_UNSIGNED_BYTE, bitmap->GetPixelData());
}
And this is the whole of my display function, in case anything is interfering with anything else.
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(40, 1, 0.1, 27.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glColor3f(1.0, 0.0, 0.0);
glTranslatef(0.0, 0.0, -1.1);
glRotatef(angle, 1.0, 1.0, 0.0);
glutSolidCube(0.1);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 640, 480, 0.0, -1.0, 10.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glDisable(GL_CULL_FACE);
glClear(GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
glTexCoord2d(0.0, 0.0); glVertex2f(0.0, 0.0);
glTexCoord2d(1.0, 0.0); glVertex2f(320.0, 0.0);
glTexCoord2d(1.0, 1.0); glVertex2f(320.0, 240.0);
glTexCoord2d(0.0, 1.0); glVertex2f(0.0, 240.0);
glEnd();
glDisable(GL_BLEND);
glDisable(GL_TEXTURE_2D);
glFlush();
glutSwapBuffers();
The default texture environment is GL_MODULATE which mixes in the current color (red from your cube) with the incoming texel value.
Switch to GL_DECAL or do a glColor3ub(255,255,255) before you render your text.

Wrong colors when loading textures in OpenGL

I'm trying to understand how to load a texture in OpenGL and I wrote this very simple code:
GLuint texture;
void loadTexture() {
GLubyte data[] = { 255,0,0,
0,255,0,
0,255,0,
255,0,0 };
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
int chk = gluBuild2DMipmaps( GL_TEXTURE_2D, GL_RGB, 2, 2, GL_RGB, GL_UNSIGNED_BYTE, data );
if (chk!=0)
printf("error code = %d\n",chk);
else
printf("success\n");
}
I use loadTexture() to load the texture in memory. The texture, in this sample, is extremely simple, but at the moment it doesn't matter.
void drawTexturedSquare() {
glEnable( GL_TEXTURE_2D );
glBegin (GL_QUADS);
glTexCoord2f (0.0, 0.0);
glNormal3f(0, 0, 1);
glVertex3f (0.0, 0.0, 0.0);
glTexCoord2f (1.0, 0.0);
glNormal3f(0, 0, 1);
glVertex3f (10.0, 0.0, 0.0);
glTexCoord2f (1.0, 1.0);
glNormal3f(0, 0, 1);
glVertex3f (10, 10, 0.0);
glTexCoord2f (0.0, 1.0);
glNormal3f(0, 0, 1);
glVertex3f (0.0, 10, 0.0);
glEnd ();
glDisable( GL_TEXTURE_2D);
}
I would like to apply this simple texture to a square. I call function drawTexturedSquare() from inside a draw() function where I already called
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
The problem is that this is the result I get
while I expected the square to be green in main diagonal (upper-left to lower-right) and red is secondary diagonal (upper-right to lower-left). May someone explain me why?
Besides, every time I run the program I get a different result:
I do not understand where this blue comes out...May someone help me?
Each row of your data needs to be 4-byte aligned. Either pad each row with 0 0, or use a RGBA texture.
If you don't want to do the aforementioned, you can use:
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
The reason your color is changing is that since you don't pad your rows correctly, OpenGL reads past the end of the array.
You may want to try GL_NEAREST for GL_TEXTURE_MIN_FILTER and GL_TEXTURE_MAG_FILTER
As it is right now, OpenGL is interpolating between the colors, creating a gradient (since your texture is being stretched from 2x2 to however big your screen is)
This doesn't explain why you're getting different results for your texture each time though.

OpenGL texture blending and translation

I've got two textures mapping to a surface, one is a checkerboard (the pattern on the floor) and one is a lightmap, both of which are blending and presenting just fine. What I'm having trouble with is making the lightmap texture translate prior to blending. I need to do this so I can simulate a flashlight scanning across the floor....
Code:
//Floor - Checkerboard
float fF = 3.0; //Floor Factor
//Checkerboard texture
glActiveTexture(GL_TEXTURE0);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, textures[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);//turn on mipmapping
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
//Spotlight, modulate on checkerboard
glActiveTexture(GL_TEXTURE1);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, textures[4]);
**glPushMatrix();
glMatrixMode(GL_TEXTURE);
glLoadIdentity();
glTranslatef(5.0, 5.0, 0.0);
glMatrixMode(GL_MODELVIEW);
glPopMatrix();**
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_BLEND);
float vals[4] = {0.0, 0.0, 0.0, 0.5};
glTexEnvfv(GL_TEXTURE_ENV, GL_TEXTURE_ENV_COLOR, vals);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_COMBINE);
glTexEnvf(GL_TEXTURE_ENV, GL_COMBINE_RGB, GL_INTERPOLATE);
glTexEnvf(GL_TEXTURE_ENV, GL_SRC0_RGB, GL_PREVIOUS);
glTexEnvf(GL_TEXTURE_ENV, GL_OPERAND0_RGB, GL_SRC_COLOR);
glTexEnvf(GL_TEXTURE_ENV, GL_SRC1_RGB, GL_TEXTURE);
glTexEnvf(GL_TEXTURE_ENV, GL_OPERAND1_RGB, GL_SRC_COLOR);
glTexEnvf(GL_TEXTURE_ENV, GL_SRC2_RGB, GL_CONSTANT);
glTexEnvf(GL_TEXTURE_ENV, GL_OPERAND2_RGB, GL_SRC_ALPHA);
float spot_x = 1;
float spot_y = 1;
glBegin(GL_QUADS);
glNormal3d(0, 1, 0);
glMultiTexCoord2f(GL_TEXTURE0, 0.0, fF);
glMultiTexCoord2f(GL_TEXTURE1, 0.0, spot_y);
glVertex3fv(d);
glMultiTexCoord2f(GL_TEXTURE0, fF, fF);
glMultiTexCoord2f(GL_TEXTURE1, spot_x, spot_y);
glVertex3fv(c);
glMultiTexCoord2f(GL_TEXTURE0, fF, 0.0);
glMultiTexCoord2f(GL_TEXTURE1, spot_x, 0.0);
glVertex3fv(e);
glMultiTexCoord2f(GL_TEXTURE0, 0.0, 0.0);
glMultiTexCoord2f(GL_TEXTURE1, 0.0, 0.0);
glVertex3fv(g);
glEnd();
I've tried putting the six lines of code inside the **'s in several locations, thinking maybe the OpenGL state machine needed it somewhere specific, but nothing seems to give me the effect I want.
Anyone done this before? I can't find any information in either of my OpenGL books, or online about translating AND blending textures. I might be looking for the wrong thing, but I've been beating my head against a wall for two days now. Any help at all would be awesome.
Thanks
For completeness, the problem is that texture coordinates fall in the [0,1] interval, and I was translating by integer values, so it appeared as though it wasn't translating at all.

How do I make textures transparent in OpenGL?

I've tried to research this on Google but there doesn't appear to me to be any coherent simple answers. Is this because it's not simple, or because I'm not using the correct keywords?
Nevertheless, this is the progress I've made so far.
Created 8 vertices to form 2 squares.
Created a texture with a 200 bit alpha value (so, about 80% transparent).
Assigned the same texture to each square, which shows correctly.
Noticed that when I use a texture with 255 alpha, it appears brighter.
The init is something like the following:
glClearColor(0.0, 0.0, 0.0, 0.0);
glShadeModel(GL_FLAT);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, textureIds);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
int i, j;
GLubyte pixel;
for (i = 0; i < TEXTURE_HEIGHT; i++)
{
for (j = 0; j < TEXTURE_WIDTH; j++)
{
pixel = ((((i & 0x8) == 0) ^ ((j & 0x8) == 0)) * 255);
texture[i][j][0] = pixel;
texture[i][j][1] = pixel;
texture[i][j][2] = pixel;
texture[i][j][3] = 200;
}
}
glBindTexture(GL_TEXTURE_2D, textureIds[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(
GL_TEXTURE_2D, 0, GL_RGBA,
TEXTURE_WIDTH, TEXTURE_HEIGHT,
0, GL_RGBA, GL_UNSIGNED_BYTE, texture);
This is somewhat similar to the code snippet from page 417 in the book, OpenGL Programming Guide, and creates a check pattern.
And then, the display function contains...
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
// Use model view so that rotation value is literal, not added.
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
// ... translation, etc ...
glBindTexture(GL_TEXTURE_2D, textureIds[0]);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex3f(-1.0, +1.0, 0.0); // top left
glTexCoord2f(0.0, 1.0); glVertex3f(-1.0, -1.0, 0.0); // bottom left
glTexCoord2f(1.0, 1.0); glVertex3f(+1.0, -1.0, 0.0); // bottom right
glTexCoord2f(1.0, 0.0); glVertex3f(+1.0, +1.0, 0.0); // top right
glEnd();
// not neccecary to repeat, just good practice
glBindTexture(GL_TEXTURE_2D, textureIds[0]);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex3f(-0.5, +1.0, -1.0); // top left
glTexCoord2f(0.0, 1.0); glVertex3f(-0.5, -1.0, -1.0); // bottom left
glTexCoord2f(1.0, 1.0); glVertex3f(+1.5, -1.0, -1.0); // bottom right
glTexCoord2f(1.0, 0.0); glVertex3f(+1.5, +1.0, -1.0); // top right
glEnd();
glFlush();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
SwapBuffers();
So, this renders a 2nd square in the background; I can see this but it looks like they're being blended with the background (I assume this because they are darker with 200 bit alpha than 255 bit) instead of the texture behind...
As you can see, no transparency... How can I fix this?
So the other answer which was here but was deleted mentioned this - Generally, for alpha blending to work correctly you need to sort the objects from far to near in the coordinate system of the camera.
This is why your polygons are blended with the background. You can confirm that this is indeed the problem by disabling the depth test. Without depth test all the fragments are displayed and you'll be able to see the alpha blending.
More on this in this page.