OpenGL Texture DSA not showing texture - c++

I'm attempting to use the newer DSA functions to show textures but it's not working at all.
Here is the code for using the older approach.
unsigned int containerTexture = 0;
glGenTextures(1, &containerTexture);
glBindTexture(GL_TEXTURE_2D, containerTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);
Here is the DSA version.
unsigned int containerTexture = 0;
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glCreateTextures(GL_TEXTURE_2D, 1, &containerTexture);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTextureParameteri(containerTexture, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTextureStorage2D(containerTexture, 1, GL_RGB8, width, height);
glTextureSubImage2D(containerTexture, 1, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateTextureMipmap(containerTexture);
glBindTextureUnit(0, containerTexture);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);

The 2nd parameter to glTextureSubImage2D is the level to be set. Mipmap levels are zero-based, therefore the base level is 0 and not 1 as in your code:
glTextureSubImage2D(containerTexture, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);

Related

Array texture in OpenGL 3.+ with mipmap

I have read this tutorial: Array Texture,
but I don't want to use the glTexStorage3D()(requires OpenGL 4.2) function. First of all, can someone check whether I have implemented this code properly(I'm using glTexImage3D instead of glTexStorage3D):
unsigned int nrTextures = 6;
GLsizei width = 256;
GLsizei height = 256;
GLuint arrayTextureID;
std::vector<unsigned char*> textures(nrTextures);
//textures: Load textures here...
glGenTextures(1, &arrayTextureID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D_ARRAY, arrayTextureID);
//Gamma to linear color space for each texture.
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB, width, height, nrTextures, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
for(unsigned int i = 0; i < nrTextures; i++)
glTexSubImage3D(GL_TEXTURE_2D_ARRAY, 0, 0, 0, i, width, height, 1, GL_RGB, GL_UNSIGNED_BYTE, textures[i]);
/*glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);*/
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
/*glGenerateMipmap(GL_TEXTURE_2D_ARRAY);*/
glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
How do I implement mipmapping with this implementation?
This mostly looks ok. The only problem I can see is that GL_SRGB is not a valid internal texture format. So the glTexImage3D() call needs to use GL_SRGB8 instead:
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB8, width, height, nrTextures, 0,
GL_RGB, GL_UNSIGNED_BYTE, NULL);
For generating mipmaps, you can call:
glGenerateMipmap(GL_TEXTURE_2D_ARRAY);
after you filled the texture with data with the glTexSubImage3D() calls.

SOIL - Getting Image Dimensions

I have decided to use SOIL to load images for use with OpenGL for my project. I have this method, which loads an image and returns a GLTexture, which is a struct that hold a GLuint textureid and two ints width and height:
GLTexture loadTexture(const char *filePath) {
GLTexture texture = {};
int width;
int height;
unsigned char *data;
//Load Image File Directly into an OpenGL Texture
texture.id = SOIL_load_OGL_texture
(
filePath,
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID,
SOIL_FLAG_MIPMAPS | SOIL_FLAG_INVERT_Y | SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_COMPRESS_TO_DXT
);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Generate and Bind Texture
glGenTextures(1, &(texture.id));
glBindTexture(GL_TEXTURE_2D, texture.id);
//Get Width, Height and Data of Image
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;
}
As far as I know, glGetTexLevelParameteriv() should return the width of the texture that is binded into width and height, but whenever I load an image, this returns 0.
Should I fill in width and height as parameters for the method or is it possible to get them via OpenGL?
The texture id generated by SOIL_load_OGL_texture is overridden in the
glGenTextures(1, &(texture.id));
line (glGenTextures creates a new texture and stores the id in &(texture.id)). All operations afterwards work on the newly created texture. Since this new texture is empty, width and height are 0.
I'm not sure what you want to achieve here, but if you only want to load the texture, then this code might work:
texture.id = SOIL_load_OGL_texture (...);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Just bind and do not create a new texture
glBindTexture(GL_TEXTURE_2D, texture.id);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;

Setting up Cube Map

Here is the code I am trying to use to set up a Cube Map in LWJGL.
public static int setUpCubeMap(String filename, int anisotropyLevel, boolean clamp, boolean pixelated, boolean mipmapped) {
IntBuffer tmp = BufferUtils.createIntBuffer(1);
glGenTextures(tmp);
tmp.rewind();
try {
InputStream in = new FileInputStream(filename);
PNGDecoder decoder = new PNGDecoder(in);
glEnable(GL_TEXTURE_CUBE_MAP);
ByteBuffer buf = ByteBuffer.allocateDirect(4 * decoder.getWidth() * decoder.getHeight());
decoder.decode(buf, decoder.getWidth() * 4, PNGDecoder.Format.RGBA);
buf.flip();
glBindTexture(GL_TEXTURE_CUBE_MAP, tmp.get(0));
org.lwjgl.opengl.ARBTextureStorage.glTexStorage2D(GL_TEXTURE_CUBE_MAP, (int)(Math.log(Math.max(decoder.getHeight(), decoder.getWidth()))/Math.log(2))+1, GL_RGBA8, decoder.getWidth(), decoder.getHeight());
glTexSubImage2D(GL_TEXTURE_CUBE_MAP, 0, 0, 0, decoder.getWidth(), decoder.getHeight(), GL_RGBA, GL_UNSIGNED_BYTE, buf);
if(mipmapped)
org.lwjgl.opengl.GL30.glGenerateMipmap(GL_TEXTURE_CUBE_MAP);
if(clamp) {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} else {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_TEXTURE_WRAP_S);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_TEXTURE_WRAP_T);
}
if(pixelated) {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
} else {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
if(mipmapped)
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
else
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
if(anisotropyLevel > 1)
glTexParameterf(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAX_ANISOTROPY_EXT, anisotropyLevel);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL_RGBA, decoder.getWidth(), decoder.getHeight(), 0, GL_RGBA, GL_UNSIGNED_BYTE, buf);
System.out.println("Loaded texture successfully from: " + filename + " with dimensions of " + decoder.getWidth() + "x" + decoder.getHeight());
} catch (java.io.FileNotFoundException ex) {
System.err.println("Error " + filename + " not found");
} catch (java.io.IOException e) {
System.err.println("Error decoding " + filename);
}
tmp.rewind();
return tmp.get(0);
}
I then pass the texture to a samplerCube in a shader and call textureCube() to display it. Sadly, the screen shows up black, and when I call glGetError() I get GL_INVALID_OPERATION and GL_INVALID_ENUM. What am I doing wrong here?
There are several problems, none of these definitely prevent the code from working, I list them anyway.
The glTexSubImage2D call at the beginning is is superfluous.
calling it with GL_TEXTURE_CUBE_MAP causes an error glTexSubImage2D.
you are filling in the data the correct way down below
Also related to this, the call to glGenerateMipmap should happen after those other six glTexSubImage2D calls.
And in this section, in the else clause, the third parameter to both function calls is wrong.
if(clamp) {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
} else {
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_TEXTURE_WRAP_S);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_TEXTURE_WRAP_T);
}
I suggest you use gDEBugger; it gives a very understandable description of the problem and exactly when it occurs.

Textures only draw if their dimensions are powers of two?

I am developing a project using GLUT, and when I want to use GlTexture to add a texture, it only works if the height and width are powers of 2 (e.g. 128x64, 256x256, etc.)
Has anyone else experienced the same problem?
My Example:
GLuint textureID;
char caminho2[1000]= "C:\\....";
glEnable(GL_TEXTURE_2D);
ilInit();
unsigned int t[2], tw, th;
unsigned char *texData;
ilGenImages(2,t);
ilBindImage(t[0]);
ilLoadImage((ILstring) caminho2);
tw = ilGetInteger(IL_IMAGE_WIDTH);
th = ilGetInteger(IL_IMAGE_HEIGHT);
ilConvertImage(IL_RGBA, IL_UNSIGNED_BYTE);
texData = ilGetData();
glGenTextures(1,&textureID); // unsigned int texID - variavel global;
glBindTexture(GL_TEXTURE_2D,textureID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tw, th, 0, GL_RGBA, GL_UNSIGNED_BYTE, texData);
glBindTexture(GL_TEXTURE_2D,textureID);
// draw..........
glBindTexture(GL_TEXTURE_2D,0);

OpenGL being stubborn, not allowing texture mapping

I'm working on a game engine, that uses BMP texture files on MD3 model files. Unfortunately for me, it isn't working at all. I've tried everything, to no avail. Since I wrote the loaders myself, I looked at them, and fixed a few bugs in them, but still, I just get a gray lighted model.
Here's my rendering code:
void drawmd3(md3& model, GLuint tex)
{
int i = 0;
glLoadIdentity();
glTranslatef(-1.5,0,-24);
glRotatef(270,0,0,1);
glRotatef(-90,0,1,0);
glRotatef(a,0,0,1);
while(i<model.surfnum)
{
glBindTexture(GL_TEXTURE_2D, tex);
int j = 0;
while(j<model.surfs.at(i).trinum)
{
int tmp = model.surfs.at(i).tris.at(j).indexes[0];
int tmp1 = model.surfs.at(i).tris.at(j).indexes[1];
int tmp2 = model.surfs.at(i).tris.at(j).indexes[2];
float norms[3];
norms[0] = convert_normals0(model.surfs.at(i).verts.at(tmp).normal);
norms[1] = convert_normals1(model.surfs.at(i).verts.at(tmp).normal);
norms[2] = convert_normals2(model.surfs.at(i).verts.at(tmp).normal);
float norms1[3];
norms1[0] = convert_normals0(model.surfs.at(i).verts.at(tmp1).normal);
norms1[1] = convert_normals1(model.surfs.at(i).verts.at(tmp1).normal);
norms1[2] = convert_normals2(model.surfs.at(i).verts.at(tmp1).normal);
float norms2[3];
norms2[0] = convert_normals0(model.surfs.at(i).verts.at(tmp2).normal);
norms2[1] = convert_normals1(model.surfs.at(i).verts.at(tmp2).normal);
norms2[2] = convert_normals2(model.surfs.at(i).verts.at(tmp2).normal);
glBegin(GL_TRIANGLES);
glNormal3f(norms[0],norms[1],norms[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp).st[0],1-model.surfs.at(i).st.at(tmp).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp).coord[0]/64, model.surfs.at(i).verts.at(tmp).coord[1]/64, model.surfs.at(i).verts.at(tmp).coord[2]/64);
glNormal3f(norms1[0],norms1[1],norms1[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp1).st[0],1-model.surfs.at(i).st.at(tmp1).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp1).coord[0]/64, model.surfs.at(i).verts.at(tmp1).coord[1]/64, model.surfs.at(i).verts.at(tmp1).coord[2]/64);
glNormal3f(norms2[0],norms2[1],norms2[2]);
glTexCoord2f(model.surfs.at(i).st.at(tmp2).st[0],1-model.surfs.at(i).st.at(tmp2).st[1]);
glVertex3f(model.surfs.at(i).verts.at(tmp2).coord[0]/64, model.surfs.at(i).verts.at(tmp2).coord[1]/64, model.surfs.at(i).verts.at(tmp2).coord[2]/64);
glEnd();
j++;
}
glBindTexture(GL_TEXTURE_2D, 0);
i++;
}
}
Here's my init code:
glEnable(GL_TEXTURE_2D);
glGenTextures(1,&tex[0]);
glBindTexture(GL_TEXTURE_2D, tex[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, img.img.imgwidth, img.img.imgheight, 0, GL_RGBA8, GL_UNSIGNED_BYTE, img.data);
That's your problem right there:
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA8,
img.img.imgwidth,
img.img.imgheight,
0,
GL_RGBA8, <<<<<<<<<<----------
GL_UNSIGNED_BYTE,
img.data);
The token you're using for the format parameter is not allowed. From the reference:
format
Specifies the format of the pixel data. The following symbolic values are accepted: GL_COLOR_INDEX, GL_RED, GL_GREEN, GL_BLUE, GL_ALPHA, GL_RGB, GL_BGR, GL_RGBA, GL_BGRA, GL_LUMINANCE, and GL_LUMINANCE_ALPHA.
Thus no texture will be loaded and you end up with the default white of a not setup texture.