Do I have to set up my gl context in a certain way to bind textures. I'm following a tutorial. I start by doing:
#define checkImageWidth 64
#define checkImageHeight 64
static GLubyte checkImage[checkImageHeight][checkImageWidth][4];
static GLuint texName;
void makeCheckImage(void)
{
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
}
}
}
void initt(void)
{
glClearColor (0.0, 0.0, 0.0, 0.0);
makeCheckImage();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
engineGL.current.tex = texName;
}
In my rendering I do:
PolygonTesselator.Begin_Contour();
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, current.tex);
if(layer[currentlayer].Shapes[i].Contour[c].DrawingPoints.size() > 0)
{
glColor4f(
layer[currentlayer].Shapes[i].Color.r,
layer[currentlayer].Shapes[i].Color.g,
layer[currentlayer].Shapes[i].Color.b,
layer[currentlayer].Shapes[i].Color.a);
}
for(unsigned int j = 0; j < layer[currentlayer].Shapes[i].Contour[c].DrawingPoints.size(); ++j)
{
gluTessVertex(PolygonTesselator.tobj,&layer[currentlayer].Shapes[i].Contour[c].DrawingPoints[j][0],
&layer[currentlayer].Shapes[i].Contour[c].DrawingPoints[j][0]);
}
PolygonTesselator.End_Contour();
}
glDisable(GL_TEXTURE_2D);
}
However it still renders the color and not the texture at all. I'd atleast expect to see black or something but its as if the bind fails. Am I missing something?
Thanks
It looks like from that code that you don't set any UVs.
Edit: Does it make any difference using GL_MODULATE instead of GL_DECAL? (Am taking guesses here because I suspect the problem lies in code you haven't provided, or in gluTessVertex itself ...
Related
So my problem is that I try to load multiple textures at start up and then store all of the ID's so that I can bind them to use them. Now I know that the ID's are being stored correctly because I can debug it and see in both assigning and usage that the ID's are the same. Just for every binding it uses the last texture that I load. Here is my code:
GLuint TextureLoader::LoadTexture (const char* fileName,Material& material,int width,int height) {
GLuint textureImage;
FILE* textureFile;
textureFile = fopen(fileName, "rb");
unsigned char* imageData;
if (textureFile == NULL) {
return 0;
}
imageData = (unsigned char*)malloc(width * height * 3);
char header[54];
fread(header,1,54,textureFile);
fread(imageData, width * height * 3, 1, textureFile);
fclose(textureFile);
for (int i = 0; i < width * height; ++i) {
int nextIndex = i * 3;
unsigned char a = imageData[nextIndex];
unsigned char b = imageData[nextIndex+2];
imageData[nextIndex] = b;
imageData[nextIndex+2] = a;
}
glEnable(GL_TEXTURE_2D);
glGenTextures( 1, &textureImage );
glBindTexture( GL_TEXTURE_2D, textureImage );
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_MODULATE );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
/*glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR_MIPMAP_NEAREST );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_REPEAT );
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE);
gluBuild2DMipmaps( GL_TEXTURE_2D, 3, width, height,GL_RGB, GL_UNSIGNED_BYTE, imageData );*/
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, imageData);
glGenerateMipmap(GL_TEXTURE_2D);
free(imageData);
return textureImage;
}
Here is my usage of this code:
if (showTexture) {
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, material.texture);
}
Here is my load call:
green1.texture = TextureLoader::LoadTexture("/Users/hewitt/Desktop/OpenGLImages/face6.bmp",green1, 256, 256);
And then I just use:
glTexCoord2f(1.0f,0.0f);
to draw depending on the corner it changes. And this draws the one texture correctly it just repeats the texture even when I glBind a different number.
Any help is greatly appreciated.
Edit ----------
glBegin(mode);
Spatial::Vector3 normal = Mesh::calculateNormal (pointGroup);
glNormal3f(normal.x, normal.y, normal.z);
Material material = mesh.getMaterial();
if (showTexture) {
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, material.texture);
}
int counter = 0;
for (Spatial::Vector3 point : pointGroup) {
if (showTexture == false) {
Material::Colour colour = material.getColour();
glColor3f(colour.red, colour.green, colour.blue);
} else {
if (counter == 0) {
glTexCoord2f(1.0f,0.0f);
} else if (counter == 1) {
glTexCoord2f(1.0f,1.0f);
} else if (counter == 2) {
glTexCoord2f(0.0f,1.0f);
} else if (counter == 3) {
glTexCoord2f(0.0f,0.0f);
}
}
glVertex3f(point.x,point.y,point.z);
counter ++;
}
glEnd();
You cannot call glEnable or glBindTexture while in between glBegin and glEnd. You must bind your texture before glBegin. In fact the set of OpenGL functions you can call between glBegin and glEnd is limited to those transferring vertex attributes (glVertex, glColor, glTexCoord, etc...).
Note that the immediate mode API (that is glBegin/glEnd and friends) is legacy and was deprecated for more than 10 years. I suggest you switch to the modern OpenGL pipeline, that will sort many things out for you.
I have problem freetype and OpenGL. I need just draw all loaded symbols on single texture. Here's:
FT_Init_FreeType(&lib);
FT_New_Face(lib, "C:\\verdana.ttf", 0, &face);
FT_Set_Pixel_Sizes(face, 0, size);
auto ww = 256 * size;
auto hh = size;
std::vector<unsigned char> buffer(ww * hh, 0);
int off = 0;
for (int c = 0; c < 256; c++)
{
FT_UInt GlyphIndex;
GlyphIndex = FT_Get_Char_Index(face, c);
FT_Load_Char(face, GlyphIndex, FT_LOAD_RENDER);
FT_Bitmap bmp = face->glyph->bitmap;
int advance = (face->glyph->advance.x >> 6);
int bW = bmp.width;
int bH = bmp.rows;
for (int h = 0; h < bH; ++h) {
for (int w = 0; w < bW; ++w) {
buffer[h * bW + off + w] = bmp.buffer[w + bW * h];
}
}
off += advance;
}
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, ww, hh, 0, GL_ALPHA, GL_UNSIGNED_BYTE, &buffer[0]);
I tried many ways to do this. But all I get is absolutely black texture...
What's wrong with my code?
I think the problem is that some values between 0-255 is not visible or drawable character and that's why you get nothing.
you should check for GlyphIndex:
GlyphIndex = FT_Get_Char_Index(face, c);
if (!GlyphIndex) continue;
then you can expect freetype to draw the rest of characters for you.
hallelujah, I got solution!
This should look like this:
for (int c = 0; c < 256; c++)
{
FT_UInt GlyphIndex;
GlyphIndex = FT_Get_Char_Index(face, c);
FT_Load_Char(face, GlyphIndex, FT_LOAD_RENDER);
FT_Bitmap bmp = face->glyph->bitmap;
int advance = (face->glyph->advance.x >> 6);
int bW = bmp.width;
int bH = bmp.rows;
for (int h = 0; h < bH; ++h) {
for (int w = 0; w < bW; ++w) {
buffer[h * ww + off + w] = bmp.buffer[w + bW * h];
}
}
off += advance;
}
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, ww, hh, 0, GL_RED, GL_UNSIGNED_BYTE, &buffer[0]);
I am creating a flat surface and applying a texture for it. But for some reason the texture is not getting applied properly. I am getting something like this.
This is the code that I am using (I have a class for applying textures),
for(int i = 0; i < 512; i++) {
for(int j = 0; j < 512; j++) {
int c = ((((i&0x8)==0)^(((j&0x8))==0)))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte)255;
//cout<<"("<<(int)dataForPixel.rgbtRed<<","<<(int)dataForPixel.rgbtGreen<<","<<(int)dataForPixel.rgbtBlue<<")";
}
}
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
imageX,
imageY,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
checkImage
);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, texName);
glBegin(GL_QUADS);
glTexCoord2d(0.0, 0.0); glVertex3d(-100, -100, 0.0);
glTexCoord2d(0.0, 1.0); glVertex3d(-100, 100, 0.0);
glTexCoord2d(1.0, 1.0); glVertex3d( 100, 100, 0.0);
glTexCoord2d(1.0, 0.0); glVertex3d( 100, -100, 0.0);
glEnd();
The image is a 512 x 512 image.
Why is the texture not applying properly.
UPDATE:
The c value is just for producing a chess board pattern which consists of squares of 8 pixels width and height of alternating black and white.
Ok I found out the problem. Seems i was trying to allocate the checkImage memory dynamically but not in one go. So there were gaps in the memory which the OpenGL did not bother with.
Once I fixed it to allocate the memory as one big chunk it worked.
I'm programming in c++ in opengl.
I'm supposed to make a texture and I made the following code:
void makeCheckImage(void){
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
}
}
}
void init(void){
glClearColor(0.0, 0.0, 0.0, 0.0);
glShadeModel(GL_FLAT);
glEnable(GL_DEPTH_TEST);
makeCheckImage();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
}
my problem is that this beautiful piece of code (or maybe not) is applying textures to the whole scene! And I just want it to apply to one object...
Can anybody help me?
glEnable(GL_TEXTURE_2D);
//Draw object
glDisable(GL_TEXTURE_2D);
I'm trying to generate textures like so:
#define checkImageWidth 64
#define checkImageHeight 64
static GLubyte checkImage[checkImageHeight][checkImageWidth][4];
static GLubyte otherImage[checkImageHeight][checkImageWidth][4];
static GLuint texName[2];
void makeCheckImages(void)
{
int i, j, c;
for (i = 0; i < checkImageHeight; i++) {
for (j = 0; j < checkImageWidth; j++) {
c = ((((i&0x8)==0)^((j&0x8))==0))*255;
checkImage[i][j][0] = (GLubyte) c;
checkImage[i][j][1] = (GLubyte) c;
checkImage[i][j][2] = (GLubyte) c;
checkImage[i][j][3] = (GLubyte) 255;
c = ((((i&0x10)==0)^((j&0x10))==0))*255;
otherImage[i][j][0] = (GLubyte) c;
otherImage[i][j][1] = (GLubyte) 0;
otherImage[i][j][2] = (GLubyte) 0;
otherImage[i][j][3] = (GLubyte) 255;
}
}
}
void init(void)
{
glClearColor (1.0, 0.0, 0.0, 0.0);
glShadeModel(GL_FLAT);
glEnable(GL_DEPTH_TEST);
makeCheckImages();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glGenTextures(2, texName);
glBindTexture(GL_TEXTURE_2D, texName[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
checkImage);
glBindTexture(GL_TEXTURE_2D, texName[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,
checkImageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
otherImage);
glEnable(GL_TEXTURE_2D);
engineGL.current.tex = texName[1];
}
But when I check the values of texName[0] and [1] they are both 0, I do not understand why, what am I doing wrong. Thanks.
You probably are calling glGenTextures before creating the OpenGL context, and that will generate a GL error. Don't try to create textures before you've initialized OpenGL.
I had this problem, and glGetError() was returning 0.
In my case it was caused by calling glGenTextures(...) on a different thread to the one the GL context was created on (because I was loading the textures asynchronously). Calling it from the main thread after the async load made glGenTextures(...) start working again.
Try calling glGetError. It should tell you in more detail what went wrong. In general, if an OpenGL function fails, the first thing you do should be to ask OpenGL why it failed. It knows, because it just tried to execute the function.
It's much harder for us to guess at what might have gone wrong.
In my case, I was "lazy loading" my texture, so the glGenTexture was inside a glBegin / glEnd command !
before calling glGenTexture, your opengl context must be created and XXXMakeCurrent'ed