opengl - black textures, how to use glm::vec* on buffers? - c++

I'm currently learn some opengl stuff. In the past, I stored my vertex data, texture position data and so on, in arrays, created with malloc.
Now I'm trying to achieve this with some std::vector. I also created a simple .obj loader with this tutorial http://www.opengl-tutorial.org/beginners-tutorials/tutorial-7-model-loading.
But my textures stay black for some reason. I have this stuff in a "object" class.
I will post all the code that I think is important, but if you want to see the whole source code, I will post it.
Object.hpp:
class Object {
public:
Object(std::string objName);
~Object(void);
bool loadTexture(std::string textureName);
void setPos(glm::vec3 pos);
void setDir(glm::vec3 dir);
void draw(GLfloat time);
private:
std::vector<glm::vec3> vertexData;
std::vector<glm::vec3> elementData;
std::vector<glm::vec2> texturePosData;
std::vector<glm::vec3> vertexNormalData;
GLuint vertexArrayObject;
GLuint vertexBufferObject;
GLuint elementBufferObject;
GLuint texturePosBufferObject;
ShaderProgram *shader;
GLuint shaderProgram ;
GLuint textureObject;
GLuint textureUnit;
static GLuint globalTextureCount;
glm::vec3 position;
glm::vec3 direction;
//Uniforms
GLint model, uniView, uniProj, overrideColor;
};
There is currently some unused stuff I will use as soon as the textures work (like setPos() or setDir() )
Object.cpp:
I know the constructor and object loader is working correctly, because the vertex data and texture position data is in the vector
bool Object::loadTexture(std::string textureName) {
textureUnit = globalTextureCount;
globalTextureCount++;
glBindVertexArray(vertexArrayObject);
//Create texuture Pos Buffer
glGenBuffers(1, &texturePosBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, texturePosBufferObject);
glBufferData(GL_ARRAY_BUFFER, texturePosData.size() * sizeof(glm::vec2), &texturePosData[0], GL_STATIC_DRAW);
glGenTextures(1, &textureObject);
glActiveTexture(GL_TEXTURE0 + textureUnit);
glBindTexture(GL_TEXTURE_2D, textureObject);
int width, height;
unsigned char *image = NULL;
image = SOIL_load_image(textureName.c_str(), &width, &height, 0, SOIL_LOAD_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
SOIL_free_image_data(image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glUniform1i(glGetUniformLocation(shaderProgram, "texKitten"), textureUnit);
GLint texAttrib = glGetAttribLocation(shaderProgram, "texcoord");
glEnableVertexAttribArray(texAttrib);
glVertexAttribPointer(texAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
return true;
}
Is it right way, I am using glBufferData with those vectors ?
And what could be the problem with the textures ?
glError() don't throw an error.
Thank you for your help !
David

The problem is you're calling glBufferData before you loaded the texture into your vector. The call to glBufferData is copying the contents of your vector into the GPU memory, but your vector hasn't got the texture loaded in it yet, so it's probably full of zeros (just initialized).
What I suggest is wait until after SOIL_load_image returns to call glBufferData.
[EDIT]
I was way offbase in my answer there. I also didn't see your comment on your question, that you found the root cause. What threw me off was that you were creating and loading your vertex array buffer inside the loadTexture method. The vertex array buffer doesn't really have anything to do with the texture or loading it. It might make more sense and make your code more readable to deal with the vertex array buffer somewhere other than in loadTexture. It certainly threw me off when reading your code! :)

Related

opengl- does VAO store texrture unit calls as well

The very basic foundation for creating a VAO, VBO and applying a texture goes like this:
unsigned int VBO, VAO, EBO; //#1
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
// texture coord attribute
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2); //#2
And for creating a texture:
unsigned int texture1, texture2;
// texture 1
// ---------
glGenTextures(1, &texture1); //#3
glBindTexture(GL_TEXTURE_2D, texture1); //#4
// set the texture wrapping parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); // set texture wrapping to GL_REPEAT (default wrapping method)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
// set texture filtering parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load image, create texture and generate mipmaps
int width, height, nrChannels;
stbi_set_flip_vertically_on_load(true); // tell stb_image.h to flip loaded texture's on the y-axis.
// The FileSystem::getPath(...) is part of the GitHub repository so we can find files on any IDE/platform; replace it with your own image path.
unsigned char *data = stbi_load(FileSystem::getPath("resources/textures/container.jpg").c_str(), &width, &height, &nrChannels, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else
{
std::cout << "Failed to load texture" << std::endl;
}
stbi_image_free(data);
//some more code , inside the render loop
glActiveTexture(GL_TEXTURE0); #5
glBindTexture(GL_TEXTURE0, texture1); #6
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE0, texture1);
glDrawElements(...);
glfwSwapBuffers(...);
//end of render loop
According to what I've understood from learnopengl, from line #1 to line #2, the calls are stored inside the VAO which is why we don't have to write the stuff over and over again and we only switch the VAO for drawing.
But, is the code from line #3 to line #6 also stored in the VAO? If it is, then why don't we write line #5 directly after line #3? And if it isn't how do we link a specific texture unit to a specific VAO if we are using multiple VAOs?
EDIT:
after the glBindTexture(GL_TEXTURE_2D, texture1) the the texture calls that proceed affect the currently bound texture, isn't it? Then does that mean glActiveTexture(...) also affect the currently bound texture? And why do we bind the texture again after Activating it using glBindTexture(...)?
The reason lines 1 and 2 are "stored inside the VAO" is because that's what those functions do. Those functions set state within the currently bound vertex array object.
A vertex array object, as the name suggests, is an object that deals with vertex arrays. Texture management, texture storage, and using textures for rendering have nothing to do with vertex arrays. As such, none of those functions in any way affects VAO state, in much the same way that modifying the contents of a vector<int> won't modify the contents of some list<float> object.
how do we link a specific texture unit to a specific VAO if we are using multiple VAOs?
Again, VAOs deal with vertex data. Textures aren't vertex data, so VAOs don't care about them and vice-versa. You don't link textures and VAOs.
You use VAOs and textures (among other things) to perform rendering. VAOs and textures each do different things within the process of rendering and thus have no direct relationship to one another.

How to debug default frame and depth buffers?

I'm trying to implement shadows on my custom renderer through dynamic shadow mapping and forward rendering (deferred rendering will be implemented later). For instance, everything renders correctly to the Framebuffer used to generate the shadow map. However, when using the default Framebuffer to render the scene normally only the skybox gets rendered (it means that the default Framebuffer is used) and my only hypothesis is that the problem is related with the depth buffer since disabling the call to DrawActors(...) (in ForwardRenderShadows) appears to solve the problem but I can't generate my depth map if doing so. Any suggestions on the matter?
Code:
void Scene::DrawActors(const graphics::Shader& shader)
{
for(const auto& actor : actors_)
actor->Draw(shader);
}
template <typename T>
void ForwardRenderShadows(const graphics::Shader& shadow_shader, const std::vector<T>& lights)
{
for(const auto& light : lights)
{
if(light->Shadow())
{
light->DrawShadows(shadow_shader);
DrawActors(shadow_shader); //removing this line "solves the problem"
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
}
}
/*
shadow mapping is only implemented on directional lights for the moment, and that is the
relevant code that gets called when the process starts, more code details at the end of code
snippet.
*/
void DirectionalLight::SetupShadows()
{
glGenFramebuffers(1, &framebuffer_);
glGenTextures(1, &shadow_map_);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, shadow_map_, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
throw std::runtime_error("Directional light framebuffer is not complete \n");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
ShadowSetup(true);
}
void DirectionalLight::DrawShadows(const graphics::Shader& shader)
{
if(!ShadowSetup())
SetupShadows();
glViewport(0, 0, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glClear(GL_DEPTH_BUFFER_BIT);
shader.Use();
projection_ = clutch::Orthopraphic(-10.0f, 10.0f, -10.0f, 10.0f, 1.0f, 100.0f);
transform_ = clutch::LookAt(direction_ * -1.0f, {0.0f, 0.0f, 0.0f, 0.0f}, {0.0f, 1.0f, 0.0f, 0.0f});
shader.SetUniformMat4("light_transform", projection_ * transform_);
}
void DirectionalLight::Draw(const graphics::Shader& shader)
{
shader.SetUniform4f("light_dir", direction_);
shader.SetUniform4f("light_color", color_);
shader.SetUniformMat4("light_transform", transform_);
shader.SetUniformMat4("light_projection", projection_);
shader.SetUniformInt("cast_shadow", shadows_);
glActiveTexture(GL_TEXTURE12);
shader.SetUniformInt("shadow_map", 12);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
}
Code the repo: https://github.com/rxwp5657/Nitro
Relevant Files for the problem:
include/core/scene.hpp
include/core/directional_light.hpp
include/core/light_shadow.hpp
include/core/directional_light.hpp
include/graphics/mesh.hpp
src/core/scene.cpp
src/core/directional_light.cpp
src/core/light_shadow.cpp
src/core/directional_light.cpp
src/graphics/mesh.cpp
Finally, what I have tried so far is:
Deactivating depth testing with glDepthMask(GL_FALSE) and glDisable(GL_DEPTH_TEST) //same problem.
Changing depth function to glDepthFunc(GL_ALWAYS); // No desired results;
If you have a NVIDIA graphics card, you could have a look a Nsight. You can capture a frame and view all occurred GL-calls.
Then, you can select an event, for instance the event 22 in my example, and inspect all textures, the color buffer, the depth buffer and the stencil buffer.
Furthermore, you can have a look on all GL-state parameters at one event.
Ok, after using apitrace I found out that the VBO changes when switching from a custom Framebuffer to the default one. Because of this, the solution to the problem is to set again the VBO after switching to the default frame buffer.
Based on the code of the project, the solution is calling the Setup function again after switching to the default Framebuffer.
Setup function of the Mesh class:
void Mesh::Setup(const Shader& shader)
{
glGenVertexArrays(1, &vao_);
glBindVertexArray(vao_);
glGenBuffers(1, &elementbuffer_);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer_);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices_.size() * sizeof(unsigned int), &indices_[0], GL_STATIC_DRAW);
glGenBuffers(1, &vbo_);
glBindBuffer(GL_ARRAY_BUFFER, vbo_);
glBufferData(GL_ARRAY_BUFFER, vertices_.size() * sizeof(Vertex), &vertices_[0], GL_STATIC_DRAW);
shader.PosAttrib("aPosition", 3, GL_FLOAT, sizeof(Vertex), 0);
shader.PosAttrib("aNormal", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, normal));
shader.PosAttrib("aTexCoord", 2, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tex_coord));
shader.PosAttrib("aTangent", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tangent));
shader.PosAttrib("aBitangent",3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, bitangent));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
loaded_ = true;
}
Vertices buffer after switching to default frame buffer
Vertices buffer with no Framebuffer switching (no shadow map generated)

C++ - OpenGL crash on glDrawElements()

So in my program I have a number of textures that I am trying to display. Earlier in my code I generate the VAO for the textures and the ibo (or index buffer) for each texture. But when I run my code it crashes at the glDrawElements() call and in nvoglv32.dll. I've read around that a bug in the nvidia driver might be causing it but I doubt it. Something is probably wrong when I generate or bind the VAO or ibo but I have no idea where. Here's the section of code where the error happens:
for (int i = 0; i < NUM_TEXTURES; i++){
glBindVertexArray(VAO_T[i]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo[i]);
glBindTexture(GL_TEXTURE_2D, texture[i]);
//error here
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, BUFFER_OFFSET(0));//error right here
}
This is the error I get when running in debug:
Unhandled exception at 0x0263FE4A in Comp465.exe: 0xC0000005: Access violation reading location 0x00000000.
Heres my code where I generate the VAO, ibo, and textures:
glGenVertexArrays(NUM_TEXTURES, VAO_T);
glGenBuffers(NUM_TEXTURES, VBO_T);
glGenBuffers(NUM_TEXTURES, ibo);
glGenTextures(NUM_TEXTURES, texture);
...
for (int i = 0; i < NUM_TEXTURES; i++){
//Tel GL which VAO we are using
glBindVertexArray(VAO_T[i]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo[i]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices[i]), indices[i], GL_STATIC_DRAW);
//initialize a buffer object
glEnableVertexAttribArray(VBO_T[i]);
glBindBuffer(GL_ARRAY_BUFFER, VBO_T[i]);
glBufferData(GL_ARRAY_BUFFER, sizeof(point[i]) + sizeof(texCoords), NULL, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(point[i]), point[i]);
glBufferSubData(GL_ARRAY_BUFFER, sizeof(point[i]), sizeof(texCoords), texCoords);
GLuint vPosition = glGetAttribLocation(textureShader, "vPosition");
glVertexAttribPointer(vPosition, 4, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(vPosition);
GLuint vTexCoord = glGetAttribLocation(textureShader, "vTexCoord");
glVertexAttribPointer(vTexCoord, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(sizeof(point[i])));
glEnableVertexAttribArray(vTexCoord);
//Get handles for the uniform structures in the texture shader program
VP = glGetUniformLocation(textureShader, "ViewProjection");
//Bind the texture that we want to use
glBindTexture(GL_TEXTURE_2D, texture[i]);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
// set texture parameters
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
//Load texture
texture[i] = loadRawTexture(texture[i], TEX_FILE_NAME[i], PixelSizes[i][0], PixelSizes[i][1]);
if (texture[i] != 0) {
printf("texture loaded \n");
}
else
printf("Error loading texture \n");
}
This statement certainly looks wrong:
glEnableVertexAttribArray(VBO_T[i]);
glEnableVertexAttribArray() takes an attribute location as its argument, not a buffer id. You actually use it correctly later:
GLuint vPosition = glGetAttribLocation(textureShader, "vPosition");
...
glEnableVertexAttribArray(vPosition);
GLuint vTexCoord = glGetAttribLocation(textureShader, "vTexCoord");
...
glEnableVertexAttribArray(vTexCoord);
So you should be able to simply delete that extra call with the invalid argument.
Apart from that, I noticed a couple of things that look slightly off, or at least suspicious:
The following call is meaningless if you use the programmable pipeline, which you are based on what's shown in the rest of the code. It can be deleted.
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
This is probably just a naming issue, but textureShader needs to be a program object, i.e. the return value of a glCreateProgram(), not a shader object.
While inconclusive without seeing the declaration, I have a bad feeling about this, and a couple other similar calls:
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices[i]), indices[i], GL_STATIC_DRAW);
If indices[i] is an array, i.e. the declaration looks something like this:
indices[NUM_TEXTURES][INDEX_COUNT];
then this is ok. But if indices[i] is a pointer, or degenerated to a pointer when it was passed as a function argument, sizeof(indices[i]) will be the size of a pointer. You may want to double check that it gives the actual size of the index array. Same thing for other similar cases.

How to change the size of the texture when displayed it on the screen in openGL (version> 3.3)?

How i can change size of texture on the screen? I'm want make a little 2d game for learning in OpenGL.:-) I try to change vertices[] geometry, but texture doesn't stretch to it. I use something like this code:
tempz = IMG_Load("graphics/menu_start.png");
SDL_InvertSurface(tempz);
//setup quad geometry
//setup quad vertices
vertices[0] = Vector2D(0.0,0.0);
vertices[1] = Vector2D(1.0,0.0);
vertices[2] = Vector2D(1.0,1.0);
vertices[3] = Vector2D(0.0,1.0);
//fill quad indices array
GLushort* id = &indices[0];
*id++ =0;
*id++ =1;
*id++ =2;
*id++ =0;
*id++ =2;
*id++ =3;
//setup quad vao and vbo stuff
glGenVertexArrays(1, &vaoID);
glGenBuffers(1, &vboVerticesID);
glGenBuffers(1, &vboIndicesID);
glBindVertexArray(vaoID);
glBindBuffer (GL_ARRAY_BUFFER, vboVerticesID);
//pass quad vertices to buffer object
glBufferData (GL_ARRAY_BUFFER, sizeof(vertices), &vertices[0], GL_STATIC_DRAW);
//enable vertex attribute array for position
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE,0,0);
//pass quad indices to element array buffer
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboIndicesID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), &indices[0], GL_STATIC_DRAW);
int texture_width = 0, texture_height = 0, channels=0;
texture_width = tempz->w;
texture_height = tempz->h;
//setup OpenGL texture and bind to texture unit 0
glGenTextures(1, &textureID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
//set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//allocate texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texture_width, texture_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, tempz->pixels);
programID = LoadShaders( "shaders/shader.vert", "shaders/shader.frag" );
glUseProgram(programID);
It would help if you posted the rest, e.g. the shaders, but what I find strange is that you have no texture coordinates in your vertices. Usually they'd be (0,0), (0,1), (1,0) and (1,1) at the corners. These are attributes in your vertices just like the physical positions. Then you can wobble the physical position all over the place and still see the texture mapped the same way over the face.

OpenGL - texelFetch fetches nothing

I'm trying to draw a textured plane following the OpenGL SuperBible 6th ed. but for some reason I fail.
Here's my texture initialization code.
GLuint texture;
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
int w = 256;
int h = 256;
glBindTexture(GL_TEXTURE_2D, texture);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA32F, w, h);
float * data = new float[w * h * 4];
//This just creates some image data
generateTexture(data, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
delete [] data;
This is the plane object. The object itself is drawn, just untextured.
glGenBuffers(1, &planeBuffer);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
glBufferData(GL_ARRAY_BUFFER,
sizeof(planePositions),
planePositions,
GL_STATIC_DRAW);
These are my vertex and fragment shaders.
#version 430 core
layout (location = 0) in vec3 position;
uniform mat4 proj, view;
void main(void){
gl_Position = proj * view * vec4 (position, 1.0);
}
#version 430 core
uniform sampler2D s;
out vec4 frag_color;
void main () {
frag_color = texelFetch(s, ivec2(gl_FragCoord.xy), 0);
};
I draw like this
glUseProgram(textureProgram);
GLuint projLocation = glGetUniformLocation (textureProgram, "proj");
glUniformMatrix4fv (projLocation, 1, GL_FALSE, projectionSource);
GLuint viewLocation = glGetUniformLocation (textureProgram, "view");
glUniformMatrix4fv (viewLocation, 1, GL_FALSE, viewSource);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
GLuint positionLocation = glGetAttribLocation(textureProgram, "position");
glVertexAttribPointer (positionLocation, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray (positionLocation);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
GLuint ts = glGetUniformLocation (textureProgram, "s");
glUniform1i(ts, 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray (positionLocation);
//Afterwards I draw more geometry with other shaders. This shows correctly
glUseProgram(shaderProgram);
//Bind buffers, matrices, drawarrays, etc
But I just get a black untextured plane. If I override the frag_color assignment by adding another line afterwards, like so
frag_color = vec4(1.0);
it works, i.e. I get a white plane, so the shaders seem to be working correctly.
I don't get any errors whatsoever from glGetError().
Compatibility:
OpenGL version supported: 4.2.12337 Compatibility Profile Context 13.101
GLSL version supported: 4.30
The data array does contain values between 0 and 1. I have also tried hard-coding some random coordinates into the texelFetch() function, but I always get a black plane. It looks as though the sampler2D contained nothing but zeroes. I have also tried hard-coding the values contained in data to 1.0, 255.0, but nothing.
Either the book fails to mention something or I am missing something stupid. Why does the texture refuse to show on the plane?
EDIT: I added some code to the drawing part. The rest of the geometry that I draw (with different shaders) shows perfectly. None of it uses textures.
I finally got my way around this, although it is not clear what the problem is exactly.
I got the texture to show using glTexImage2D instead of glTexStorage2D and glTexSubImage2D.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_FLOAT, data);
I also had to set the parameters explicitly, even though I'm using texelFetch().
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Binding the uniform explicitly was not necessary.
What is strange is that, according to the docs, glTexStorage2Dis equivalent to this:
for (i = 0; i < levels; i++) {
glTexImage2D(target, i, internalformat, width, height, 0, format, type, NULL);
width = max(1, (width / 2));
height = max(1, (height / 2));
}
However, this combination does not work.
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
I still have to figure out why is that, but at least I got it to work.