Can't render to texture (anymore) - opengl

I implemented a volume rendering demo application a few months ago. Everything worked fine in Windows XP-32bits. I used OpenGL -glew and SFML2.0-rc as a windowing&input library.
Now. I moved to windows 7-64bits just recently.
The program did not work out of the box, SFML seemed to crash. I changed the windowing library to GLFW, still using Glew. By going through the code i realized the very basic render to texture technique did not work anymore.
So i broke everything down to a minimal case so i could present it to you. (I also made a port to Qt5.0.2 to cross-check my assumptions : same diagnosis).
So here is the problem :
The program is supposed to render a simple unit cube with front-face culling to a texture in pass 1. Then in pass 2 i switch to back-face culling and render the same cube again. In the fragment shader (pass 2) i have the option to read the texture (from pass 1) and write it to the output : but i get a big black screen when i should see the front-face culled cube ...
Initialization code :
glGenFramebuffers(1, &raycastingFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, raycastingFrameBuffer);
glGenTextures(1, &cubeRenderTexture);
glBindTexture(GL_TEXTURE_2D, cubeRenderTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, viewWidth, viewHeight, 0, GL_RGBA, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, cubeRenderTexture, 0);
GLenum DrawBuffers[1] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1, DrawBuffers);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
return false;
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Render passes :
// PASS ONE :
// render the unit cube (with front face culling) to texture
// we end up with a texture whose colors represent outgoing rays locations on the box
//
glBindFramebuffer(GL_FRAMEBUFFER, raycastingFrameBuffer);
glEnable(GL_CULL_FACE);
glDisable(GL_DEPTH_TEST);
glViewport(0, 0, viewWidth, viewHeight);
glCullFace(GL_FRONT);
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderRaycasting1.getProgramID());
glBindBuffer(GL_ARRAY_BUFFER, cube_VBO_ID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 108*sizeof(float));
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glUniformMatrix4fv(glGetUniformLocation(shaderRaycasting1.getProgramID(), "modelview"), 1, GL_TRUE, modelview.getData());
glUniformMatrix4fv(glGetUniformLocation(shaderRaycasting1.getProgramID(), "projection"), 1, GL_TRUE, projection.getData());
glDrawArrays(GL_TRIANGLES, 0, 36);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(0);
glUseProgram(0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glEnable(GL_DEPTH_TEST);
// PASS TWO :
// render the unit cube (with back face culling this time)
// we get colors representing ray entrance locations on the box
//
glViewport(0, 0, viewWidth, viewHeight);
glCullFace(GL_BACK);
glClearColor(0.0, 0.0, 0.0, 0.0);
glClearDepth(1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderRaycasting2.getProgramID());
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, cubeRenderTexture);
glUniform1i(cubeRenderTextureID, 0);
glUniform1i(glGetUniformLocation(shaderRaycasting2.getProgramID(), "displayWidth"), (GLint) viewWidth);
glUniform1i(glGetUniformLocation(shaderRaycasting2.getProgramID(), "displayHeight"), (GLint) viewHeight);
glBindBuffer(GL_ARRAY_BUFFER, cube_VBO_ID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 108*sizeof(float));
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glUniformMatrix4fv(glGetUniformLocation(shaderRaycasting1.getProgramID(), "modelview"), 1, GL_TRUE, modelview.getData());
glUniformMatrix4fv(glGetUniformLocation(shaderRaycasting1.getProgramID(), "projection"), 1, GL_TRUE, projection.getData());
glDrawArrays(GL_TRIANGLES, 0, 36);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(0);
glActiveTexture(0);
glUseProgram(0);
... and finally the minimal fragment shader :
#version 330
in vec3 color;
uniform int displayWidth;
uniform int displayHeight;
uniform sampler2D cubeTex;
layout (location = 0) out vec4 outColor;
void main()
{
float viewWidth = displayWidth;
float viewHeight = displayHeight;
vec3 boxIn = color;
vec2 cubeCoord = vec2( (gl_FragCoord.x - 0.5) / viewWidth, (gl_FragCoord.y - 0.5) / viewHeight);
vec3 boxOut = texture(cubeTex, cubeCoord).rgb;
vec3 rayColor = boxOut;
outColor = vec4(rayColor, 1); // i get a black screen here ...
}
Some last words :
- Everything compiles with no warnings, no errors (same for Qt 5.0.2 port of the demo)
- I tried every possible little "tweaking" like glEnable(...), changing opengl version, using texelFetch, and what not ... obviously i can't find what's wrong with this code.
- The original code was much more complex and did run, but on XP and not on Win7.
- etc.

Did you install the original vendor drivers for your GPU as downloaded from the vendor's driver support website, or do you still have installed the crippled versions that are shipped with Windows 7?
The drivers shipping with Windows 7 do not offer modern OpenGL support. Microsoft strips them of anything OpenGL and the default OpenGL implementation of Windows-7 is just a OpenGL-1.4 emulation on top of Direct3D.
If you didn't already, then download the original drivers from your GPU's vendor and install those, then report back if this changed the outcome.

Related

How to debug default frame and depth buffers?

I'm trying to implement shadows on my custom renderer through dynamic shadow mapping and forward rendering (deferred rendering will be implemented later). For instance, everything renders correctly to the Framebuffer used to generate the shadow map. However, when using the default Framebuffer to render the scene normally only the skybox gets rendered (it means that the default Framebuffer is used) and my only hypothesis is that the problem is related with the depth buffer since disabling the call to DrawActors(...) (in ForwardRenderShadows) appears to solve the problem but I can't generate my depth map if doing so. Any suggestions on the matter?
Code:
void Scene::DrawActors(const graphics::Shader& shader)
{
for(const auto& actor : actors_)
actor->Draw(shader);
}
template <typename T>
void ForwardRenderShadows(const graphics::Shader& shadow_shader, const std::vector<T>& lights)
{
for(const auto& light : lights)
{
if(light->Shadow())
{
light->DrawShadows(shadow_shader);
DrawActors(shadow_shader); //removing this line "solves the problem"
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
}
}
/*
shadow mapping is only implemented on directional lights for the moment, and that is the
relevant code that gets called when the process starts, more code details at the end of code
snippet.
*/
void DirectionalLight::SetupShadows()
{
glGenFramebuffers(1, &framebuffer_);
glGenTextures(1, &shadow_map_);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, shadow_map_, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
throw std::runtime_error("Directional light framebuffer is not complete \n");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
ShadowSetup(true);
}
void DirectionalLight::DrawShadows(const graphics::Shader& shader)
{
if(!ShadowSetup())
SetupShadows();
glViewport(0, 0, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glClear(GL_DEPTH_BUFFER_BIT);
shader.Use();
projection_ = clutch::Orthopraphic(-10.0f, 10.0f, -10.0f, 10.0f, 1.0f, 100.0f);
transform_ = clutch::LookAt(direction_ * -1.0f, {0.0f, 0.0f, 0.0f, 0.0f}, {0.0f, 1.0f, 0.0f, 0.0f});
shader.SetUniformMat4("light_transform", projection_ * transform_);
}
void DirectionalLight::Draw(const graphics::Shader& shader)
{
shader.SetUniform4f("light_dir", direction_);
shader.SetUniform4f("light_color", color_);
shader.SetUniformMat4("light_transform", transform_);
shader.SetUniformMat4("light_projection", projection_);
shader.SetUniformInt("cast_shadow", shadows_);
glActiveTexture(GL_TEXTURE12);
shader.SetUniformInt("shadow_map", 12);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
}
Code the repo: https://github.com/rxwp5657/Nitro
Relevant Files for the problem:
include/core/scene.hpp
include/core/directional_light.hpp
include/core/light_shadow.hpp
include/core/directional_light.hpp
include/graphics/mesh.hpp
src/core/scene.cpp
src/core/directional_light.cpp
src/core/light_shadow.cpp
src/core/directional_light.cpp
src/graphics/mesh.cpp
Finally, what I have tried so far is:
Deactivating depth testing with glDepthMask(GL_FALSE) and glDisable(GL_DEPTH_TEST) //same problem.
Changing depth function to glDepthFunc(GL_ALWAYS); // No desired results;
If you have a NVIDIA graphics card, you could have a look a Nsight. You can capture a frame and view all occurred GL-calls.
Then, you can select an event, for instance the event 22 in my example, and inspect all textures, the color buffer, the depth buffer and the stencil buffer.
Furthermore, you can have a look on all GL-state parameters at one event.
Ok, after using apitrace I found out that the VBO changes when switching from a custom Framebuffer to the default one. Because of this, the solution to the problem is to set again the VBO after switching to the default frame buffer.
Based on the code of the project, the solution is calling the Setup function again after switching to the default Framebuffer.
Setup function of the Mesh class:
void Mesh::Setup(const Shader& shader)
{
glGenVertexArrays(1, &vao_);
glBindVertexArray(vao_);
glGenBuffers(1, &elementbuffer_);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer_);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices_.size() * sizeof(unsigned int), &indices_[0], GL_STATIC_DRAW);
glGenBuffers(1, &vbo_);
glBindBuffer(GL_ARRAY_BUFFER, vbo_);
glBufferData(GL_ARRAY_BUFFER, vertices_.size() * sizeof(Vertex), &vertices_[0], GL_STATIC_DRAW);
shader.PosAttrib("aPosition", 3, GL_FLOAT, sizeof(Vertex), 0);
shader.PosAttrib("aNormal", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, normal));
shader.PosAttrib("aTexCoord", 2, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tex_coord));
shader.PosAttrib("aTangent", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tangent));
shader.PosAttrib("aBitangent",3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, bitangent));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
loaded_ = true;
}
Vertices buffer after switching to default frame buffer
Vertices buffer with no Framebuffer switching (no shadow map generated)

Texture view, GL_DEPTH_STENCIL_TEXTURE_MODE parameter set to GL_STENCIL_INDEX and Intel GPU

I have a depth-stencil texture attached to a framebuffer. I need to access the stencil index of this texture from a shader.
I'm using a texture view to ease the access to the stencil index of this texture (as suggested in this answer). It works perfectly fine on the dedicated nVidia GPU, but doesn't work on the integrated Intel GPU. I know that the stencil values are there in the depth-stencil texture because I checked them via glGetTexImage(). But when I access these values in the shader (by the texture view) I get only zeroes.
I can't figure out if either my code is wrong/incomplete or the Intel driver is bugged.
I did some tests. If I set the parameter GL_DEPTH_STENCIL_TEXTURE_MODE to GL_STENCIL_INDEX directly on the depth-stencil texture, avoiding the use of the texture view, then it works fine. If I use the texture view to retrieve the depth component (leaving GL_DEPTH_STENCIL_TEXTURE_MODE to the default value, i.e. GL_DEPTH_COMPONENT), then the texture view works fine. It seems that only the combination of both produces an error.
OpenGL 4.3
Dedicated GPU: NVIDIA GeForce MX150 (support OpenGL up to
4.6)
Integrated GPU: Intel UHD Graphics 620 (support OpenGL up to 4.4) OS: Windows 64-bit 10.0.18362
The full minimal code is pretty long, so I removed certain parts, though I cannot remove more parts, since I'm not sure where the bug could be.
filter.fs
#version 430 core
out vec4 fs_frag_color;
in vec2 vs_tex_coords;
layout (binding = 0) uniform usampler2D u_stencil_tex;
void main()
{
uint draw_count = texture(u_stencil_tex, vs_tex_coords).r;
// Show stencil buffer
if(draw_count == 1)
{
fs_frag_color = vec4(0, 1, 0, 1);
}
else if(draw_count > 1 && draw_count <= 10)
{
float orange_shade = float(draw_count + 5) / 15.0;
fs_frag_color = vec4(orange_shade, orange_shade, 0 ,1);
}
else if(draw_count > 10 && draw_count <= 50)
{
float red_shade = float(draw_count + 5) / 55.0;
fs_frag_color = vec4(red_shade, 0, 0, 1);
}
else
{
fs_frag_color = vec4(0, 0, 0, 1);
}
}
main.cpp
int main(void)
{
//Initialize GLFW and open a window with a 4.3 OpenGL context
// ...
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
glDebugMessageCallback(debugMessage_callback, nullptr);
Shader main_shader;
main_shader.load("shader.vs", "shader.fs");
Shader filter_shader;
filter_shader.load("filter.vs", "filter.fs");
//CUBE VAO
GLuint cube_VAO;
GLuint cube_VBO;
glGenVertexArrays(1, &cube_VAO);
glGenBuffers(1, &cube_VBO);
glBindVertexArray(cube_VAO);
// cube_VAO settings...
glBindVertexArray(0);
//OFFSCREEN FBO
GLuint offscreen_FBO = 0;
GLuint offscreen_colorTex = 0;
GLuint offscreen_depthStencilTex = 0;
GLuint offscreen_stencilTexView = 0;
int offscreen_width, offscreen_height;
glfwGetFramebufferSize(window1, &offscreen_width, &offscreen_height);
glGenFramebuffers(1, &offscreen_FBO);
glBindFramebuffer(GL_FRAMEBUFFER, offscreen_FBO);
glGenTextures(1, &offscreen_colorTex);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, offscreen_colorTex);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, offscreen_width,
offscreen_height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, 0); //unbind
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, offscreen_colorTex, 0);
static GLenum offscreen_drawBuffers[] = { GL_COLOR_ATTACHMENT0 };
glDrawBuffers(1, offscreen_drawBuffers);
glGenTextures(1, &offscreen_depthStencilTex);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, offscreen_depthStencilTex);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_DEPTH24_STENCIL8,
offscreen_width, offscreen_height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, 0); //unbind
// Attach the depth and stencil texture to the framebuffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT,
GL_TEXTURE_2D, offscreen_depthStencilTex, 0);
glGenTextures(1, &offscreen_stencilTexView);
glTextureView(offscreen_stencilTexView, GL_TEXTURE_2D,
offscreen_depthStencilTex, GL_DEPTH24_STENCIL8, 0, 1, 0, 1);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, offscreen_stencilTexView);
glTexParameteri(GL_TEXTURE_2D, GL_DEPTH_STENCIL_TEXTURE_MODE,
GL_STENCIL_INDEX);
glBindTexture(GL_TEXTURE_2D, 0);
// ^^^^^^
// ^^^^^^ Here the important block
// ^^^^^^
// Check if the framebuffer is complete
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
throw std::runtime_error("The offscreen FBO is not complete.");
glBindFramebuffer(GL_FRAMEBUFFER, 0); //unbind
// WINDOW RECTANGLE VAO
GLuint wRect_VAO;
GLuint wRect_VBO;
glGenVertexArrays(1, &wRect_VAO);
glGenBuffers(1, &wRect_VBO);
glBindVertexArray(wRect_VAO);
// wRect_VAO settings ...
glBindVertexArray(0);
while (!glfwWindowShouldClose(window1))
{
float ratio;
int width, height;
glfwGetFramebufferSize(window1, &width, &height);
ratio = width / (float)height;
glViewport(0, 0, width, height);
// Drawing on the offscreen FBO
glBindFramebuffer(GL_FRAMEBUFFER, offscreen_FBO);
glEnable(GL_DEPTH_TEST);
glEnable(GL_STENCIL_TEST);
glStencilFunc(GL_ALWAYS, 1, 0xFF); //stencil test always pass
glStencilOp(GL_KEEP, GL_INCR, GL_INCR); //the value stored in the stencil buffer is increased every time a fragment is not discarded
glClearColor(1.f, 1.f, 1.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
main_shader.bind();
glm::mat4 view(1.0f); view = glm::translate(view, glm::vec3(0.f, 0.f, -3.f));
glm::mat4 projection = glm::perspective(glm::radians(45.f), 800.f / 600.f, 0.1f, 100.0f);
glm::mat4 model(1.f);
model = glm::rotate(model, glm::radians(static_cast<float>(glfwGetTime() * 20.f)), glm::vec3(1.f, 1.f, 0.f));
main_shader.set_mat4("model", model);
main_shader.set_mat4("view", view);
main_shader.set_mat4("projection", projection);
glBindVertexArray(cube_VAO);
glDrawArrays(GL_TRIANGLES, 0, 36);
glBindVertexArray(0);
// Drawing on the default FBO
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glDisable(GL_DEPTH_TEST);
glClearColor(0.f, 1.f, 1.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT);
filter_shader.bind();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, offscreen_stencilTexView);
glBindVertexArray(wRect_VAO);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glfwSwapBuffers(window1);
glfwPollEvents();
}
//Destroy the window and shut down GLFW ...
}

Call to glDrawElements breaks display on some GPUs?

I'm working on a 3D PBR engine along with some colleagues (OpenGL 3.3 + GLFW3 and ImGui), and we have a peculiar error : the application enters its drawing loop normally and gives no OpenGL error whatsoever (as shown by extensive use of glCheckError()), but the screen is completely blank. The peculiar part is that this only happens on my colleagues' computer that uses an Nvidia GeForce GTX 1060 as its graphic cards (2 of my friends have the same exact computer).
After some investigation, it turns out that this error is triggered by a function that is called a single time before the main loop. Long story short, it renders a quad to a texture bound via a framebuffer using a couple shaders. I narrowed down the problem to the glDrawElements call ; on those computers, not changing anything except commenting out this line fixes the display (at the cost of the texture displaying garbage instead of the expected result of the draw call), whereas all of the other computers run the app fine even with the line uncommented.
The culprit function is as follows :
/**
* Returns a texture identifier filled with a precomputed irradiance map calculated
* from a provided, already set up-environment map
*/
GLuint precomputeIrradianceMap(GLuint envMapId)
{
std::cerr << "Precomputing irradiance map ... " << std::endl;
GLuint irradianceVAO, irradianceVBO, irradianceMap,
irradianceVertShader, irradianceFragShader, irradianceProgram, captureFBO,
captureRBO;
GLint aPos_location, env_map;
glGenVertexArrays(1, &irradianceVAO);
glBindVertexArray(irradianceVAO);
glGenBuffers(1, &irradianceVBO);
glBindBuffer(GL_ARRAY_BUFFER, irradianceVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad_vertices), quad_vertices, GL_STATIC_DRAW);
glGenFramebuffers(1, &captureFBO);
glGenRenderbuffers(1, &captureRBO);
glBindFramebuffer(GL_FRAMEBUFFER, captureFBO);
glBindRenderbuffer(GL_RENDERBUFFER, captureRBO);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, 320, 320);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, captureRBO);
checkGLerror();
glGenTextures(1, &irradianceMap);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, irradianceMap);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16F, 320, 320, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glBindTexture(GL_TEXTURE_2D, envMapId);
checkGLerror();
irradianceVertShader = createShaderFromSource(GL_VERTEX_SHADER, "shaders/irradiance_vert.glsl");
irradianceFragShader = createShaderFromSource(GL_FRAGMENT_SHADER, "shaders/irradiance_frag.glsl");
irradianceProgram = glCreateProgram();
glAttachShader(irradianceProgram, irradianceVertShader);
glAttachShader(irradianceProgram, irradianceFragShader);
printShaderLog(irradianceVertShader);
printShaderLog(irradianceFragShader);
glLinkProgram(irradianceProgram);
glUseProgram(irradianceProgram);
checkGLerror();
env_map = glGetUniformLocation(irradianceProgram, "environmentMap");
aPos_location = glGetAttribLocation(irradianceProgram, "aPos");
glEnableVertexAttribArray(aPos_location);
checkGLerror();
glVertexAttribPointer(aPos_location, 3, GL_FLOAT, GL_FALSE, 0, (void *) 0);
glUniform1i(env_map, 0);
checkGLerror();
glViewport(0, 0, 320, 320);
glBindFramebuffer(GL_FRAMEBUFFER, captureFBO);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, irradianceMap, 0);
checkGLerror();
glClearDepth(1.f);
glClearColor(0.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
checkGLerror();
// This is the culprit
// glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, quad_indices);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
checkGLerror();
// glDisableVertexAttribArray(aPos_location);
glDeleteShader(irradianceVertShader);
glDeleteShader(irradianceFragShader);
glDeleteProgram(irradianceProgram);
glDeleteFramebuffers(1, &captureFBO);
glDeleteRenderbuffers(1, &captureRBO);
glDeleteBuffers(1, &irradianceVBO);
glDeleteVertexArrays(1, &irradianceVAO);
checkGLerror();
std::cerr << "... done " << std::endl;
return irradianceMap;
}
You can safely replace checkGLerror() by std::cerr << glCheckError() << std::endl. As I said before, there is no OpenGL error whatsoever, all the shaders compile fine, and this only breaks on computers equipped with an Nvidia GeForce GTX 1060.
The rest of the code is mostly setting up VAOs, VBOs and the like, and the render loop is as follows :
while (!glfwWindowShouldClose(window))
{
glfwGetFramebufferSize(window, &display_w, &display_h);
float newRatio = (float)display_w / display_h;
if(ratio != newRatio)
{
ratio = newRatio;
setAspectRatio(p, ratio);
invP = p.inverse();
glViewport(0, 0, display_w, display_h);
}
ImGui_ImplGlfwGL3_NewFrame();
// Rendering + OpenGL rendering
// Draw the skybox, then the model
ImGui::Begin("Physical parameters", NULL, ImGuiWindowFlags_AlwaysAutoResize);
ImGui::SliderFloat("Dielectric specularity", &ds, 0.f, 1.f, "%.3f");
ImGui::SliderFloat("Light intensity", &l0, 0.f, 10.f, "%.2f");
ImGui::Checkbox("Use irradiance map as skybox", &skyOrIrradiance);
ImGui::Checkbox("Debug draw irradiance map", &debugDraw);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, skyOrIrradiance ? irradianceTexture : skybox_texture);
ImGui::End();
Matrix4f v = camera->getMatrix(), invV = v.inverse();
// glClearColor(0.45f, 0.55f, 0.60f, 1.00f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(vertex_array_objs[0]);
glUseProgram(skybox_program);
glUniformMatrix4fv(skybox_v_location, 1, GL_FALSE, (const GLfloat *) invV.data());
glUniformMatrix4fv(skybox_p_location, 1, GL_FALSE, (const GLfloat *) invP.data());
glDisable(GL_DEPTH_TEST);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, quad_indices);
glBindVertexArray(vertex_array_objs[1]);
glUseProgram(program);
glUniformMatrix4fv(mv_location, 1, GL_FALSE, (const GLfloat *) v.data());
glUniformMatrix4fv(p_location, 1, GL_FALSE, (const GLfloat *)p.data());
glUniform1f(uDS_location, ds);
glUniform1f(L0_location, l0);
glActiveTexture(GL_TEXTURE0 + model.activeTexturesCount());
glBindTexture(GL_TEXTURE_2D, irradianceTexture);
glUniform1i(irradiance_location, model.activeTexturesCount());
glEnable(GL_DEPTH_TEST);
model.render();
if(debugDraw)
{
displayTexture(irradianceTexture);
displayTexture(skybox_texture, 0.f, -1.f);
}
camera->drawControls();
// Draw basic interface
basicInterface();
ImGui::Render();
ImGui_ImplGlfwGL3_RenderDrawData(ImGui::GetDrawData());
glfwSwapBuffers(window);
glfwPollEvents();
camera->update(window);
}
model.render() is as follows :
void GltfModel::render() const
{
for(unsigned int i = 0; i < _activeTextures.size(); i++)
{
if(_textureLocations[i] > -1)
{
int j = _activeTextures[i];
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, _textureIds[j]);
glUniform1i(_textureLocations[i], i);
}
}
glBindBuffer(GL_ARRAY_BUFFER, _buffers[ARRAY_BUFFER]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _buffers[ELEMENT_ARRAY_BUFFER]);
glDrawElements(_drawingMode, _indicesCount, _indicesType, NULL);
}
Thanks by advance for your time !
EDIT : putting a glClear(GL_COLOR_BUFFER_BIT) call right before glfwSwapBuffers(window) still displays a white screen with the culprit line uncommented even though the clear color has been set to light blue. Commenting the culprit line indeed displays a light blue screen, so this makes me think it's a framebuffer issue, but I can't say for sure.

OpenGL - texelFetch fetches nothing

I'm trying to draw a textured plane following the OpenGL SuperBible 6th ed. but for some reason I fail.
Here's my texture initialization code.
GLuint texture;
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
int w = 256;
int h = 256;
glBindTexture(GL_TEXTURE_2D, texture);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA32F, w, h);
float * data = new float[w * h * 4];
//This just creates some image data
generateTexture(data, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
delete [] data;
This is the plane object. The object itself is drawn, just untextured.
glGenBuffers(1, &planeBuffer);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
glBufferData(GL_ARRAY_BUFFER,
sizeof(planePositions),
planePositions,
GL_STATIC_DRAW);
These are my vertex and fragment shaders.
#version 430 core
layout (location = 0) in vec3 position;
uniform mat4 proj, view;
void main(void){
gl_Position = proj * view * vec4 (position, 1.0);
}
#version 430 core
uniform sampler2D s;
out vec4 frag_color;
void main () {
frag_color = texelFetch(s, ivec2(gl_FragCoord.xy), 0);
};
I draw like this
glUseProgram(textureProgram);
GLuint projLocation = glGetUniformLocation (textureProgram, "proj");
glUniformMatrix4fv (projLocation, 1, GL_FALSE, projectionSource);
GLuint viewLocation = glGetUniformLocation (textureProgram, "view");
glUniformMatrix4fv (viewLocation, 1, GL_FALSE, viewSource);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
GLuint positionLocation = glGetAttribLocation(textureProgram, "position");
glVertexAttribPointer (positionLocation, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray (positionLocation);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
GLuint ts = glGetUniformLocation (textureProgram, "s");
glUniform1i(ts, 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray (positionLocation);
//Afterwards I draw more geometry with other shaders. This shows correctly
glUseProgram(shaderProgram);
//Bind buffers, matrices, drawarrays, etc
But I just get a black untextured plane. If I override the frag_color assignment by adding another line afterwards, like so
frag_color = vec4(1.0);
it works, i.e. I get a white plane, so the shaders seem to be working correctly.
I don't get any errors whatsoever from glGetError().
Compatibility:
OpenGL version supported: 4.2.12337 Compatibility Profile Context 13.101
GLSL version supported: 4.30
The data array does contain values between 0 and 1. I have also tried hard-coding some random coordinates into the texelFetch() function, but I always get a black plane. It looks as though the sampler2D contained nothing but zeroes. I have also tried hard-coding the values contained in data to 1.0, 255.0, but nothing.
Either the book fails to mention something or I am missing something stupid. Why does the texture refuse to show on the plane?
EDIT: I added some code to the drawing part. The rest of the geometry that I draw (with different shaders) shows perfectly. None of it uses textures.
I finally got my way around this, although it is not clear what the problem is exactly.
I got the texture to show using glTexImage2D instead of glTexStorage2D and glTexSubImage2D.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_FLOAT, data);
I also had to set the parameters explicitly, even though I'm using texelFetch().
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Binding the uniform explicitly was not necessary.
What is strange is that, according to the docs, glTexStorage2Dis equivalent to this:
for (i = 0; i < levels; i++) {
glTexImage2D(target, i, internalformat, width, height, 0, format, type, NULL);
width = max(1, (width / 2));
height = max(1, (height / 2));
}
However, this combination does not work.
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
I still have to figure out why is that, but at least I got it to work.

OpenGL Render To Texture (To screen) blank

I'm drawing a lot of points to the screen, and I am trying to get them to render to a texture for post processing before then rendering the texture back to the screen. Currently I'm trying to just pass through and get the texture rendering but it appears to be rendering nothing.
My fragment shader works (without render to texture - used bellow in mEllipseTextureProgram) and the only change I have made is to include
out vec3 color;
to render to the texture itself (and obviously change to color= rather than gl_FragColor=). The second program referenced below (mScreenProgram) is as follows:
Vertex Shader:
#version 330 compatibility
in vec2 vUV;
out vec2 UV;
void main()
{
gl_Position = gl_Vertex;
UV = vUV;
}
Fragment Shader:
#version 330 core
in vec2 UV;
out vec3 color;
uniform sampler2D renderedTexture;
void main(){
color = texture( renderedTexture, UV ).xyz;
}
I set up my render to texture stuff like this:
glGenFramebuffers(1, &mGaussianFrameBuffer);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, mGaussianFrameBuffer);
glGenTextures(1, &mGaussianRenderTexture);
glBindTexture(GL_TEXTURE_2D, mGaussianRenderTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
1024,
768,
0, GL_RGBA, GL_UNSIGNED_BYTE,
NULL);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, mGaussianRenderTexture, 0);
glGenTextures(1, &mGaussianDepthBuffer);
glBindTexture(GL_TEXTURE_2D, mGaussianDepthBuffer);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT,
1024,
768,
0, GL_DEPTH_COMPONENT, GL_FLOAT,
NULL);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, mGaussianDepthBuffer, 0);
which returns a successful GL_FRAMEBUFFER_COMPLETE.
Here are the three concerning rendering functions:
void Draw()
{
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
//gluPerspective(45, 1024.0/768.0, 3.0, 20000);
perspectiveGL(45, 1024.0/768.0, 1.0, 20000);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPushMatrix();
gluLookAt(cameraData[0],cameraData[1], cameraData[2],
cameraData[3],cameraData[4], cameraData[5],
0,1,0);
glEnable(GL_DEPTH_TEST);
glClearColor( 0.2f, 0.2f, 0.9f, 0.0f );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// OTHER STUFF POTENTIALLY DRAW HERE IN DIFFERENT WAYS
glUseProgram(mEllipseTextureProgram);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, mGaussianFrameBuffer);
GLuint attachments[1] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1, attachments);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
DrawEllipseToTexture();
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glUseProgram(mScreenProgram);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
DrawTextureToScreen();
}
void DrawEllipseToTexture()
{
glEnableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, mVBO);
glVertexPointer(3, GL_FLOAT, 0, 0);
glEnable(GL_PROGRAM_POINT_SIZE);
glEnable(GL_POINT_SPRITE);
glEnable( GL_TEXTURE_2D );
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mBMP);
glProgramUniform1i(mEllipseTextureProgram, mTextureLocation, 0);
glBindBuffer(GL_ARRAY_BUFFER, mUV);
glEnableVertexAttribArray(mTexCoordLocation);
glVertexAttribPointer(mTexCoordLocation, 2, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, mSpacial);
glEnableVertexAttribArray(mSpacialLocation);
glVertexAttribPointer(mSpacialLocation, 1, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, mNormals);
glEnableVertexAttribArray(mNormalLocation);
glVertexAttribPointer(mNormalLocation, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, mVerticalSpat);
glEnableVertexAttribArray(mMajorLocation);
glVertexAttribPointer(mMajorLocation, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, mHorizontalSpat);
glEnableVertexAttribArray(mMinorLocation);
glVertexAttribPointer(mMinorLocation, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable( GL_BLEND );
glDrawArrays(GL_POINTS, 0, mNumberPoints);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDisable( GL_TEXTURE_2D );
glDisableVertexAttribArray(mSpacialLocation);
glDisable(GL_POINT_SPRITE);
glDisable( GL_POINT_SMOOTH );
glDisable(GL_PROGRAM_POINT_SIZE);
glDisableClientState(GL_VERTEX_ARRAY);
}
void DrawTextureToScreen()
{
glEnableClientState(GL_VERTEX_ARRAY);
glEnable( GL_TEXTURE_2D );
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mGaussianRenderTexture);
glProgramUniform1i(mScreenProgram, mGaussianTextureLocation, 0);
GLfloat vertices[] = { -1, -1, 2,
1, -1, 2,
1, 1, 2,
-1, 1, 2 };
GLfloat uv[] = { 0, 0,
1, 0,
1, 1,
0, 1,};
glVertexPointer(3, GL_FLOAT, 0, vertices);
glEnableVertexAttribArray(mGaussianUV);
glVertexAttribPointer(mGaussianUV, 2, GL_FLOAT, GL_FALSE, 0, uv);
glDrawArrays(GL_TRIANGLES, 0, 4);
glDisable ( GL_TEXTURE_2D );
glDisableClientState(GL_VERTEX_ARRAY);
}
I still don't know what was wrong with my solution above (to which I tried a lot of variations) - but following this tutorial got it working: Tutorial
I usually use `out vec4 color', I am not sure whether it will work after you setting your texture type is GL_RGBA and then return vec3 for each fragment.
And I didnt see you set glViewport() in your code which means there is no specify affine transformation to mapping the screen position (x,y) from NDC to Screen space. If you don't have a viewport during fbo render, that definitely gives you nothing.
I am same as you struggling in the similar situation. Read the functions you don't know from GL documentation instead of blog is a much better way to understand things correctly...
: )