Textures only draw if their dimensions are powers of two? - c++

I am developing a project using GLUT, and when I want to use GlTexture to add a texture, it only works if the height and width are powers of 2 (e.g. 128x64, 256x256, etc.)
Has anyone else experienced the same problem?
My Example:
GLuint textureID;
char caminho2[1000]= "C:\\....";
glEnable(GL_TEXTURE_2D);
ilInit();
unsigned int t[2], tw, th;
unsigned char *texData;
ilGenImages(2,t);
ilBindImage(t[0]);
ilLoadImage((ILstring) caminho2);
tw = ilGetInteger(IL_IMAGE_WIDTH);
th = ilGetInteger(IL_IMAGE_HEIGHT);
ilConvertImage(IL_RGBA, IL_UNSIGNED_BYTE);
texData = ilGetData();
glGenTextures(1,&textureID); // unsigned int texID - variavel global;
glBindTexture(GL_TEXTURE_2D,textureID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tw, th, 0, GL_RGBA, GL_UNSIGNED_BYTE, texData);
glBindTexture(GL_TEXTURE_2D,textureID);
// draw..........
glBindTexture(GL_TEXTURE_2D,0);

Related

How to properly use glTexImage2D to render an object each frame?

This is the code for a ground object that gets rendered every frame in a game I'm making with c++ and openGl.
Ground::Ground() {
this->loadedObject = loadObject("ground.obj");
this->vertices = getVerticesFromLoadedObject(this->loadedObject);
this->textureCoords = getTextureCoordsFromLoadedObject(this->loadedObject);
this->textureData = loadTexture("ground.jpg");
glGenTextures(1, &this->textureId);
glBindTexture(GL_TEXTURE_2D, this->textureId);
}
void Ground::draw() {
glEnable(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, this->textureData.width, this->textureData.height, 0, GL_RGB, GL_UNSIGNED_BYTE, this->textureData.data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY_EXT);
tinyobj::shape_t shape = this->loadedObject.shape;
unsigned int vertexCount = shape.mesh.num_face_vertices.size() * 3;
glTexCoordPointer(2, GL_FLOAT, 0, this->textureCoords);
glVertexPointer(3, GL_FLOAT, 0, this->vertices);
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY_EXT);
glDisable(GL_TEXTURE_2D);
}
Even though this works, calling glTexImage2D every frame causes a memory leak, but I can't get the texture to show otherwise, I tried doing the call in Ground's constructor but it doesn't load the image at all. How do I load the image just once to avoid the memory leak?
Update: with glTexImage2D in the constructor
Ground::Ground() {
this->loadedObject = loadObject("ground.obj");
this->vertices = getVerticesFromLoadedObject(this->loadedObject);
this->textureCoords = getTextureCoordsFromLoadedObject(this->loadedObject);
this->textureData = loadTexture("ground.jpg");
glGenTextures(1, &this->textureId);
glBindTexture(GL_TEXTURE_2D, this->textureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, this->textureData.width, this->textureData.height, 0, GL_RGB, GL_UNSIGNED_BYTE, this->textureData.data);
}
void Ground::draw() {
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, this->textureId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY_EXT);
tinyobj::shape_t shape = this->loadedObject.shape;
unsigned int vertexCount = shape.mesh.num_face_vertices.size() * 3;
glTexCoordPointer(2, GL_FLOAT, 0, this->textureCoords);
glVertexPointer(3, GL_FLOAT, 0, this->vertices);
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY_EXT);
glDisable(GL_TEXTURE_2D);
}

OpenGL Texture DSA not showing texture

I'm attempting to use the newer DSA functions to show textures but it's not working at all.
Here is the code for using the older approach.
unsigned int containerTexture = 0;
glGenTextures(1, &containerTexture);
glBindTexture(GL_TEXTURE_2D, containerTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);
Here is the DSA version.
unsigned int containerTexture = 0;
int width = 0, height = 0, channelCount = 0;
stbi_set_flip_vertically_on_load(true);
unsigned char* pixels = stbi_load("res/textures/container.jpg", &width, &height, &channelCount, 0);
if (pixels) {
glCreateTextures(GL_TEXTURE_2D, 1, &containerTexture);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTextureParameteri(containerTexture, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTextureParameteri(containerTexture, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTextureStorage2D(containerTexture, 1, GL_RGB8, width, height);
glTextureSubImage2D(containerTexture, 1, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);
glGenerateTextureMipmap(containerTexture);
glBindTextureUnit(0, containerTexture);
} else {
cerr << "Failed to load texture! \n";
}
stbi_image_free(pixels);
The 2nd parameter to glTextureSubImage2D is the level to be set. Mipmap levels are zero-based, therefore the base level is 0 and not 1 as in your code:
glTextureSubImage2D(containerTexture, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels);

OpenGL 4.1 and lower Black Texture, Mac and Windows

I had this problem when compiling my OpenGL code on lower-end PC's that don't support OpenGL 4.5 and macs. In my regular code, I would use functions like glCreateTextures and glTextureStorage2D, but they are not supported in other versions so I went the other glGenTextures path.
Here's the image generation code:
Texture::Texture(const std::string& path)
: m_Path(path)
{
int width, height, channels;
stbi_set_flip_vertically_on_load(1);
unsigned char* data = stbi_load(path.c_str(), &width, &height, &channels, 0);
RW_CORE_ASSERT(data, "Failed to load image!");
m_Width = width;
m_Height = height;
GLenum internalFormat = 0, dataFormat = 0;
if (channels == 4)
{
internalFormat = GL_RGBA8;
dataFormat = GL_RGBA;
}
else if (channels == 3)
{
internalFormat = GL_RGB8;
dataFormat = GL_RGB;
}
m_InternalFormat = internalFormat;
m_DataFormat = dataFormat;
RW_CORE_ASSERT(internalFormat & dataFormat, "Format not supported!");
glGenTextures(1, &m_ID);
glBindTexture(GL_TEXTURE_2D, m_ID);
glTexParameteri(m_ID, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(m_ID, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(m_ID, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);
glBindTexture(GL_TEXTURE_2D, 0);
stbi_image_free(data);
}
And I want to bind my textures to specific slots on the GPU so I have this function:
void Texture::Bind(uint32_t slot) const
{
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(GL_TEXTURE_2D, m_ID);
}
Here's the screenshot of what gets drawn:
To make sure it wasn't a rendering problem I decided to put it into ImGui's renderer.
And here's the picture I am supposed to get:
And image imports correctly, I get no errors and same importing code and paths work on higher-end PC and the only thing that's changed is that the higher-end PC has OpenGL 4.5 texture generation code.
It turns out I had to specify GL_TEXTURE_2D in these places instead of texture ID:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);

OpenGL: Can not write to framebuffer

I've been struggling with this for a while, without any positive result. I've also implemented framebuffers with multiple render targets before, without any issues, but right now it wont work for some reason.
Here's the code for initialization:
// LOAD TEXTURE 1
glGenTextures(1, &m_Texture1);
glBindTexture(GL_TEXTURE_2D, m_Texture1);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, size.x, size.y, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
// LOAD TEXTURE 2
glGenTextures(1, &m_Texture2);
glBindTexture(GL_TEXTURE_2D, m_Texture2);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16F, size.x, size.y, 0, GL_RGB, GL_FLOAT, NULL);
// LOAD TEXTURE 3
glGenTextures(1, &m_Texture3);
glBindTexture(GL_TEXTURE_2D, m_Texture3);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB32F_ARB, size.x, size.y, 0, GL_RGB, GL_FLOAT, NULL);
// Load FBO
glGenFramebuffers(1, &m_fbo);
glBindFramebuffer(GL_FRAMEBUFFER_EXT, m_fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, m_Texture1, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT1_EXT, GL_TEXTURE_2D, m_Texture2, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT2_EXT, GL_TEXTURE_2D, m_Texture3, 0);
glBindFramebuffer(GL_FRAMEBUFFER_EXT, 0);
And for drawing:
glBindFramebuffer(GL_FRAMEBUFFER_EXT, m_fbo);
/// Draw the buffers, 3 color textures
GLenum buffers[] = { GL_COLOR_ATTACHMENT0_EXT, GL_COLOR_ATTACHMENT1_EXT, GL_COLOR_ATTACHMENT2_EXT };
glDrawBuffers(3, buffers);
/// Clear all the buffers
GLfloat color1[] = { 1.0f, 1.0, 0.0, 1.0 };
GLfloat color2[] = { 0.0f, 1.0, 0.0, 1.0 };
GLfloat color3[] = { 0.0f, 0.0, 1.0, 1.0 };
glClearBufferfv(GL_COLOR, 0, color1);
glClearBufferfv(GL_COLOR, 1, color2);
glClearBufferfv(GL_COLOR, 2, color3);
glBindFramebuffer(GL_FRAMEBUFFER_EXT, 0);
I'm using gDEBugger for debugging buffers etc, and the 3 textures should be yellow, green and blue if i'm not right? But they are black for some reason...

Awesomium with OpenGL Texture Shift

I'm trying to use awesomium with opengl (glut and cpp) and I'm having trouble rendering the webpages to textures. The textures are randomly (different every time I run the program) shifted in the x and y axis. Picture:
I thought GL_CLAMP_TO_EDGE would take care of that problem but it did not. What am I doing wrong?
The Awesomiumpart:
web_core = WebCore::Initialize(WebConfig());
view = web_core->CreateWebView(WIDTH, HEIGHT);
WebURL url(WSLit(URL));
view->LoadURL(url);
BindMethods(view);
while (view->IsLoading())
web_core->Update();
Sleep(300);
web_core->Update();
surface = (BitmapSurface*)view->surface();
The part loading the texture:
glBindTexture(GL_TEXTURE_2D, 13);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, WIDTH, HEIGHT, 0,
GL_BGRA_EXT, GL_UNSIGNED_BYTE, surface);
This is what I did. I know you solved the problem, but it might help someone in the future.
int w = surface->width();
int h = surface->height();
unsigned char *buffer = new unsigned char[w * h * 4];
surface->CopyTo(buffer, w * 4, 4, false, false);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE, buffer);
delete[] buffer;