OpenGL being stubborn, not allowing texture mapping - c++

I'm working on a game engine, that uses BMP texture files on MD3 model files. Unfortunately for me, it isn't working at all. I've tried everything, to no avail. Since I wrote the loaders myself, I looked at them, and fixed a few bugs in them, but still, I just get a gray lighted model.
Here's my rendering code:
void drawmd3(md3& model, GLuint tex)
{
int i = 0;
glLoadIdentity();
glTranslatef(-1.5,0,-24);
glRotatef(270,0,0,1);
glRotatef(-90,0,1,0);
glRotatef(a,0,0,1);
while(i<model.surfnum)
{
glBindTexture(GL_TEXTURE_2D, tex);
int j = 0;
while(j<model.surfs.at(i).trinum)
{
int tmp = model.surfs.at(i).tris.at(j).indexes[0];
int tmp1 = model.surfs.at(i).tris.at(j).indexes[1];
int tmp2 = model.surfs.at(i).tris.at(j).indexes[2];
float norms[3];
norms[0] = convert_normals0(model.surfs.at(i).verts.at(tmp).normal);
norms[1] = convert_normals1(model.surfs.at(i).verts.at(tmp).normal);
norms[2] = convert_normals2(model.surfs.at(i).verts.at(tmp).normal);
float norms1[3];
norms1[0] = convert_normals0(model.surfs.at(i).verts.at(tmp1).normal);
norms1[1] = convert_normals1(model.surfs.at(i).verts.at(tmp1).normal);
norms1[2] = convert_normals2(model.surfs.at(i).verts.at(tmp1).normal);
float norms2[3];
norms2[0] = convert_normals0(model.surfs.at(i).verts.at(tmp2).normal);
norms2[1] = convert_normals1(model.surfs.at(i).verts.at(tmp2).normal);
norms2[2] = convert_normals2(model.surfs.at(i).verts.at(tmp2).normal);
glBegin(GL_TRIANGLES);
glNormal3f(norms[0],norms[1],norms[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp).st[0],1-model.surfs.at(i).st.at(tmp).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp).coord[0]/64, model.surfs.at(i).verts.at(tmp).coord[1]/64, model.surfs.at(i).verts.at(tmp).coord[2]/64);
glNormal3f(norms1[0],norms1[1],norms1[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp1).st[0],1-model.surfs.at(i).st.at(tmp1).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp1).coord[0]/64, model.surfs.at(i).verts.at(tmp1).coord[1]/64, model.surfs.at(i).verts.at(tmp1).coord[2]/64);
glNormal3f(norms2[0],norms2[1],norms2[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp2).st[0],1-model.surfs.at(i).st.at(tmp2).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp2).coord[0]/64, model.surfs.at(i).verts.at(tmp2).coord[1]/64, model.surfs.at(i).verts.at(tmp2).coord[2]/64);
glEnd();
j++;
}
glBindTexture(GL_TEXTURE_2D, 0);
i++;
}
}
Here's my init code:
glEnable(GL_TEXTURE_2D);
glGenTextures(1,&tex[0]);
glBindTexture(GL_TEXTURE_2D, tex[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, img.img.imgwidth, img.img.imgheight, 0, GL_RGBA8, GL_UNSIGNED_BYTE, img.data);

That's your problem right there:
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA8,
img.img.imgwidth,
img.img.imgheight,
0,
GL_RGBA8, <<<<<<<<<<----------
GL_UNSIGNED_BYTE,
img.data);
The token you're using for the format parameter is not allowed. From the reference:
format
Specifies the format of the pixel data. The following symbolic values are accepted: GL_COLOR_INDEX, GL_RED, GL_GREEN, GL_BLUE, GL_ALPHA, GL_RGB, GL_BGR, GL_RGBA, GL_BGRA, GL_LUMINANCE, and GL_LUMINANCE_ALPHA.
Thus no texture will be loaded and you end up with the default white of a not setup texture.

Related

OpenGL Texture DSA not showing texture

I'm attempting to use the newer DSA functions to show textures but it's not working at all.
Here is the code for using the older approach.
unsigned int containerTexture = 0;
glGenTextures(1, &containerTexture);
glBindTexture(GL_TEXTURE_2D, containerTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);
Here is the DSA version.
unsigned int containerTexture = 0;
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glCreateTextures(GL_TEXTURE_2D, 1, &containerTexture);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTextureParameteri(containerTexture, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTextureStorage2D(containerTexture, 1, GL_RGB8, width, height);
glTextureSubImage2D(containerTexture, 1, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateTextureMipmap(containerTexture);
glBindTextureUnit(0, containerTexture);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);
The 2nd parameter to glTextureSubImage2D is the level to be set. Mipmap levels are zero-based, therefore the base level is 0 and not 1 as in your code:
glTextureSubImage2D(containerTexture, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);

OpenGL 4.1 and lower Black Texture, Mac and Windows

I had this problem when compiling my OpenGL code on lower-end PC's that don't support OpenGL 4.5 and macs. In my regular code, I would use functions like glCreateTextures and glTextureStorage2D, but they are not supported in other versions so I went the other glGenTextures path.
Here's the image generation code:
Texture::Texture(const std::string& path)
: m_Path(path)
{
int width, height, channels;
stbi_set_flip_vertically_on_load(1);
unsigned char* data = stbi_load(path.c_str(), &width, &height, &channels, 0);
RW_CORE_ASSERT(data, "Failed to load image!");
m_Width = width;
m_Height = height;
GLenum internalFormat = 0, dataFormat = 0;
if (channels == 4)
{
internalFormat = GL_RGBA8;
dataFormat = GL_RGBA;
}
else if (channels == 3)
{
internalFormat = GL_RGB8;
dataFormat = GL_RGB;
}
m_InternalFormat = internalFormat;
m_DataFormat = dataFormat;
RW_CORE_ASSERT(internalFormat & dataFormat, "Format not supported!");
glGenTextures(1, &m_ID);
glBindTexture(GL_TEXTURE_2D, m_ID);
glTexParameteri(m_ID, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(m_ID, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(m_ID, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);
glBindTexture(GL_TEXTURE_2D, 0);
stbi_image_free(data);
}
And I want to bind my textures to specific slots on the GPU so I have this function:
void Texture::Bind(uint32_t slot) const
{
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(GL_TEXTURE_2D, m_ID);
}
Here's the screenshot of what gets drawn:
To make sure it wasn't a rendering problem I decided to put it into ImGui's renderer.
And here's the picture I am supposed to get:
And image imports correctly, I get no errors and same importing code and paths work on higher-end PC and the only thing that's changed is that the higher-end PC has OpenGL 4.5 texture generation code.
It turns out I had to specify GL_TEXTURE_2D in these places instead of texture ID:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);

Array texture in OpenGL 3.+ with mipmap

I have read this tutorial: Array Texture,
but I don't want to use the glTexStorage3D()(requires OpenGL 4.2) function. First of all, can someone check whether I have implemented this code properly(I'm using glTexImage3D instead of glTexStorage3D):
unsigned int nrTextures = 6;
GLsizei width = 256;
GLsizei height = 256;
GLuint arrayTextureID;
std::vector<unsigned char*> textures(nrTextures);
//textures: Load textures here...
glGenTextures(1, &arrayTextureID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D_ARRAY, arrayTextureID);
//Gamma to linear color space for each texture.
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB, width, height, nrTextures, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
for(unsigned int i = 0; i < nrTextures; i++)
glTexSubImage3D(GL_TEXTURE_2D_ARRAY, 0, 0, 0, i, width, height, 1, GL_RGB, GL_UNSIGNED_BYTE, textures[i]);
/*glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);*/
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
/*glGenerateMipmap(GL_TEXTURE_2D_ARRAY);*/
glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
How do I implement mipmapping with this implementation?
This mostly looks ok. The only problem I can see is that GL_SRGB is not a valid internal texture format. So the glTexImage3D() call needs to use GL_SRGB8 instead:
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB8, width, height, nrTextures, 0,
GL_RGB, GL_UNSIGNED_BYTE, NULL);
For generating mipmaps, you can call:
glGenerateMipmap(GL_TEXTURE_2D_ARRAY);
after you filled the texture with data with the glTexSubImage3D() calls.

SOIL - Getting Image Dimensions

I have decided to use SOIL to load images for use with OpenGL for my project. I have this method, which loads an image and returns a GLTexture, which is a struct that hold a GLuint textureid and two ints width and height:
GLTexture loadTexture(const char *filePath) {
GLTexture texture = {};
int width;
int height;
unsigned char *data;
//Load Image File Directly into an OpenGL Texture
texture.id = SOIL_load_OGL_texture
(
filePath,
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID,
SOIL_FLAG_MIPMAPS | SOIL_FLAG_INVERT_Y | SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_COMPRESS_TO_DXT
);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Generate and Bind Texture
glGenTextures(1, &(texture.id));
glBindTexture(GL_TEXTURE_2D, texture.id);
//Get Width, Height and Data of Image
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;
}
As far as I know, glGetTexLevelParameteriv() should return the width of the texture that is binded into width and height, but whenever I load an image, this returns 0.
Should I fill in width and height as parameters for the method or is it possible to get them via OpenGL?
The texture id generated by SOIL_load_OGL_texture is overridden in the
glGenTextures(1, &(texture.id));
line (glGenTextures creates a new texture and stores the id in &(texture.id)). All operations afterwards work on the newly created texture. Since this new texture is empty, width and height are 0.
I'm not sure what you want to achieve here, but if you only want to load the texture, then this code might work:
texture.id = SOIL_load_OGL_texture (...);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Just bind and do not create a new texture
glBindTexture(GL_TEXTURE_2D, texture.id);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;

C++ Adding a texture to a GL_QUAD and it's coming out black

I have a series of rectangles of different colours and I'm trying to add a texture to one of them. However when I apply the texture to the given rectangle, it just turns black. Below is the function I use to load the texture.
GLuint GLWidget:: LoadTexture(const char * pic, int width, int height){
GLuint Texture;
BYTE * data;
FILE * picfile;
picfile = fopen(pic, "rb");
if (picfile == NULL)
return 0;
data = (BYTE *)malloc(width * height * 3);
fread(data, width * height, 3, picfile);
fclose(picfile);
glGenTextures(1, &Texture);
glBindTexture(GL_TEXTURE_2D, Texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
return Texture;
}
In another function where the GL_QUADS are drawn, I then have...
GLuint myTex = LoadTexture("texture.bmp", 500, 500);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBindTexture(GL_TEXTURE_2D, myTex);
glBegin(GL_QUADS);
glTexCoord2f(1, 1); glVertex3f(42, 10, 42);
glTexCoord2f(1, 0); glVertex3f(42, 10, -42);
glTexCoord2f(0, 0); glVertex3f(-42,10,-42);
glTexCoord2f(0, 1); glVertex3f(-42,10, 42);
glEnd();
If anyone could let me know where I am going wrong that would be great, thanks!
This call
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
is invalid. GL_RGB8 is a valid internalFormat, but it is not a valid enum for format. Use GL_RGB, GL_UNSIGNED_BYTE as format and type if your client-side data is 3 channels with 8 but unsigned int data per channel.
Another thing is
LoadTexture("texture.bmp", 500, 500);
This suggests that you are dealing with BMP files, but your loader only deals with completely raw image data.