I'm trying to generate textures like so:
#define checkImageWidth 64
#define checkImageHeight 64
static GLubyte checkImage[checkImageHeight][checkImageWidth][4];
static GLubyte otherImage[checkImageHeight][checkImageWidth][4];
static GLuint texName[2];
void makeCheckImages(void)
{
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
c = ((((i&0x10)==0)^((j&0x10))==0))*255;
otherImage[i][j][0] = (GLubyte) c;
otherImage[i][j][1] = (GLubyte) 0;
otherImage[i][j][2] = (GLubyte) 0;
otherImage[i][j][3] = (GLubyte) 255;
}
}
}
void init(void)
{
glClearColor (1.0, 0.0, 0.0, 0.0);
glShadeModel(GL_FLAT);
glEnable(GL_DEPTH_TEST);
makeCheckImages();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(2, texName);
glBindTexture(GL_TEXTURE_2D, texName[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
glBindTexture(GL_TEXTURE_2D, texName[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
otherImage);
glEnable(GL_TEXTURE_2D);
engineGL.current.tex = texName[1];
}
But when I check the values of texName[0] and [1] they are both 0, I do not understand why, what am I doing wrong. Thanks.
You probably are calling glGenTextures before creating the OpenGL context, and that will generate a GL error. Don't try to create textures before you've initialized OpenGL.
I had this problem, and glGetError() was returning 0.
In my case it was caused by calling glGenTextures(...) on a different thread to the one the GL context was created on (because I was loading the textures asynchronously). Calling it from the main thread after the async load made glGenTextures(...) start working again.
Try calling glGetError. It should tell you in more detail what went wrong. In general, if an OpenGL function fails, the first thing you do should be to ask OpenGL why it failed. It knows, because it just tried to execute the function.
It's much harder for us to guess at what might have gone wrong.
In my case, I was "lazy loading" my texture, so the glGenTexture was inside a glBegin / glEnd command !
before calling glGenTexture, your opengl context must be created and XXXMakeCurrent'ed
Related
I created a class for OpenGL textures and for some applications, a static object would be useful.
The problem is, when I create a texture as static or global object, my program crashes, while as local object, everything works fine. I have absolutely no idea what is going on.
This is my constructor:
Texture::Texture(std::string file, bool bitmap):
textureName("tex"), transparent(false) {
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
int texWidth, texHeight;
unsigned char *data = SOIL_load_image(file.c_str(), &texWidth, &texHeight, 0, SOIL_LOAD_RGBA);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texWidth, texHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
if(bitmap) {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
glBindTexture(GL_TEXTURE_2D, 0);
for(int i = 3; i < texWidth * texHeight * 4; i+= 4) {
if(data[i] != 0xff) {
transparent = true;
break;
}
}
textureSize = glm::vec2(texWidth, texHeight);
SOIL_free_image_data(data);
}
OpenGL must be initialized before you can call any OpenGL functions. (More accurately: you need to have "made an OpenGL context current")
The way you do that depends on which libraries you are using.
One thing that all the libraries have in common is: there are some functions you need to call, which you usually call in main. (for example, glfwCreateWindow and glfwMakeContextCurrent in GLFW)
Since global objects are created before main is called, you are trying to call OpenGL functions before you have an OpenGL context. This doesn't work.
I am creating a flat surface and applying a texture for it. But for some reason the texture is not getting applied properly. I am getting something like this.
This is the code that I am using (I have a class for applying textures),
for(int i = 0; i < 512; i++) {
for(int j = 0; j < 512; j++) {
int c = ((((i&0x8)==0)^(((j&0x8))==0)))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte)255;
//cout<<"("<<(int)dataForPixel.rgbtRed<<","<<(int)dataForPixel.rgbtGreen<<","<<(int)dataForPixel.rgbtBlue<<")";
}
}
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
imageX,
imageY,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
checkImage
);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, texName);
glBegin(GL_QUADS);
glTexCoord2d(0.0, 0.0); glVertex3d(-100, -100, 0.0);
glTexCoord2d(0.0, 1.0); glVertex3d(-100, 100, 0.0);
glTexCoord2d(1.0, 1.0); glVertex3d( 100, 100, 0.0);
glTexCoord2d(1.0, 0.0); glVertex3d( 100, -100, 0.0);
glEnd();
The image is a 512 x 512 image.
Why is the texture not applying properly.
UPDATE:
The c value is just for producing a chess board pattern which consists of squares of 8 pixels width and height of alternating black and white.
Ok I found out the problem. Seems i was trying to allocate the checkImage memory dynamically but not in one go. So there were gaps in the memory which the OpenGL did not bother with.
Once I fixed it to allocate the memory as one big chunk it worked.
I'm programming in c++ in opengl.
I'm supposed to make a texture and I made the following code:
void makeCheckImage(void){
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
}
}
}
void init(void){
glClearColor(0.0, 0.0, 0.0, 0.0);
glShadeModel(GL_FLAT);
glEnable(GL_DEPTH_TEST);
makeCheckImage();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
}
my problem is that this beautiful piece of code (or maybe not) is applying textures to the whole scene! And I just want it to apply to one object...
Can anybody help me?
glEnable(GL_TEXTURE_2D);
//Draw object
glDisable(GL_TEXTURE_2D);
I'm working on a game engine, that uses BMP texture files on MD3 model files. Unfortunately for me, it isn't working at all. I've tried everything, to no avail. Since I wrote the loaders myself, I looked at them, and fixed a few bugs in them, but still, I just get a gray lighted model.
Here's my rendering code:
void drawmd3(md3& model, GLuint tex)
{
int i = 0;
glLoadIdentity();
glTranslatef(-1.5,0,-24);
glRotatef(270,0,0,1);
glRotatef(-90,0,1,0);
glRotatef(a,0,0,1);
while(i<model.surfnum)
{
glBindTexture(GL_TEXTURE_2D, tex);
int j = 0;
while(j<model.surfs.at(i).trinum)
{
int tmp = model.surfs.at(i).tris.at(j).indexes[0];
int tmp1 = model.surfs.at(i).tris.at(j).indexes[1];
int tmp2 = model.surfs.at(i).tris.at(j).indexes[2];
float norms[3];
norms[0] = convert_normals0(model.surfs.at(i).verts.at(tmp).normal);
norms[1] = convert_normals1(model.surfs.at(i).verts.at(tmp).normal);
norms[2] = convert_normals2(model.surfs.at(i).verts.at(tmp).normal);
float norms1[3];
norms1[0] = convert_normals0(model.surfs.at(i).verts.at(tmp1).normal);
norms1[1] = convert_normals1(model.surfs.at(i).verts.at(tmp1).normal);
norms1[2] = convert_normals2(model.surfs.at(i).verts.at(tmp1).normal);
float norms2[3];
norms2[0] = convert_normals0(model.surfs.at(i).verts.at(tmp2).normal);
norms2[1] = convert_normals1(model.surfs.at(i).verts.at(tmp2).normal);
norms2[2] = convert_normals2(model.surfs.at(i).verts.at(tmp2).normal);
glBegin(GL_TRIANGLES);
glNormal3f(norms[0],norms[1],norms[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp).st[0],1-model.surfs.at(i).st.at(tmp).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp).coord[0]/64, model.surfs.at(i).verts.at(tmp).coord[1]/64, model.surfs.at(i).verts.at(tmp).coord[2]/64);
glNormal3f(norms1[0],norms1[1],norms1[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp1).st[0],1-model.surfs.at(i).st.at(tmp1).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp1).coord[0]/64, model.surfs.at(i).verts.at(tmp1).coord[1]/64, model.surfs.at(i).verts.at(tmp1).coord[2]/64);
glNormal3f(norms2[0],norms2[1],norms2[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp2).st[0],1-model.surfs.at(i).st.at(tmp2).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp2).coord[0]/64, model.surfs.at(i).verts.at(tmp2).coord[1]/64, model.surfs.at(i).verts.at(tmp2).coord[2]/64);
glEnd();
j++;
}
glBindTexture(GL_TEXTURE_2D, 0);
i++;
}
}
Here's my init code:
glEnable(GL_TEXTURE_2D);
glGenTextures(1,&tex[0]);
glBindTexture(GL_TEXTURE_2D, tex[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, img.img.imgwidth, img.img.imgheight, 0, GL_RGBA8, GL_UNSIGNED_BYTE, img.data);
That's your problem right there:
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA8,
img.img.imgwidth,
img.img.imgheight,
0,
GL_RGBA8, <<<<<<<<<<----------
GL_UNSIGNED_BYTE,
img.data);
The token you're using for the format parameter is not allowed. From the reference:
format
Specifies the format of the pixel data. The following symbolic values are accepted: GL_COLOR_INDEX, GL_RED, GL_GREEN, GL_BLUE, GL_ALPHA, GL_RGB, GL_BGR, GL_RGBA, GL_BGRA, GL_LUMINANCE, and GL_LUMINANCE_ALPHA.
Thus no texture will be loaded and you end up with the default white of a not setup texture.
Do I have to set up my gl context in a certain way to bind textures. I'm following a tutorial. I start by doing:
#define checkImageWidth 64
#define checkImageHeight 64
static GLubyte checkImage[checkImageHeight][checkImageWidth][4];
static GLuint texName;
void makeCheckImage(void)
{
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
}
}
}
void initt(void)
{
glClearColor (0.0, 0.0, 0.0, 0.0);
makeCheckImage();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
engineGL.current.tex = texName;
}
In my rendering I do:
PolygonTesselator.Begin_Contour();
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, current.tex);
if(layer[currentlayer].Shapes[i].Contour[c].DrawingPoints.size() > 0)
{
glColor4f(
layer[currentlayer].Shapes[i].Color.r,
layer[currentlayer].Shapes[i].Color.g,
layer[currentlayer].Shapes[i].Color.b,
layer[currentlayer].Shapes[i].Color.a);
}
for(unsigned int j = 0; j < layer[currentlayer].Shapes[i].Contour[c].DrawingPoints.size(); ++j)
{
gluTessVertex(PolygonTesselator.tobj,&layer[currentlayer].Shapes[i].Contour[c].DrawingPoints[j][0],
&layer[currentlayer].Shapes[i].Contour[c].DrawingPoints[j][0]);
}
PolygonTesselator.End_Contour();
}
glDisable(GL_TEXTURE_2D);
}
However it still renders the color and not the texture at all. I'd atleast expect to see black or something but its as if the bind fails. Am I missing something?
Thanks
It looks like from that code that you don't set any UVs.
Edit: Does it make any difference using GL_MODULATE instead of GL_DECAL? (Am taking guesses here because I suspect the problem lies in code you haven't provided, or in gluTessVertex itself ...