How to debug default frame and depth buffers? - c++

I'm trying to implement shadows on my custom renderer through dynamic shadow mapping and forward rendering (deferred rendering will be implemented later). For instance, everything renders correctly to the Framebuffer used to generate the shadow map. However, when using the default Framebuffer to render the scene normally only the skybox gets rendered (it means that the default Framebuffer is used) and my only hypothesis is that the problem is related with the depth buffer since disabling the call to DrawActors(...) (in ForwardRenderShadows) appears to solve the problem but I can't generate my depth map if doing so. Any suggestions on the matter?
Code:
void Scene::DrawActors(const graphics::Shader& shader)
{
for(const auto& actor : actors_)
actor->Draw(shader);
}
template <typename T>
void ForwardRenderShadows(const graphics::Shader& shadow_shader, const std::vector<T>& lights)
{
for(const auto& light : lights)
{
if(light->Shadow())
{
light->DrawShadows(shadow_shader);
DrawActors(shadow_shader); //removing this line "solves the problem"
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
}
}
/*
shadow mapping is only implemented on directional lights for the moment, and that is the
relevant code that gets called when the process starts, more code details at the end of code
snippet.
*/
void DirectionalLight::SetupShadows()
{
glGenFramebuffers(1, &framebuffer_);
glGenTextures(1, &shadow_map_);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, shadow_map_, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
throw std::runtime_error("Directional light framebuffer is not complete \n");
glBindFramebuffer(GL_FRAMEBUFFER, 0);
ShadowSetup(true);
}
void DirectionalLight::DrawShadows(const graphics::Shader& shader)
{
if(!ShadowSetup())
SetupShadows();
glViewport(0, 0, constants::SHADOW_WIDTH, constants::SHADOW_HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
glClear(GL_DEPTH_BUFFER_BIT);
shader.Use();
projection_ = clutch::Orthopraphic(-10.0f, 10.0f, -10.0f, 10.0f, 1.0f, 100.0f);
transform_ = clutch::LookAt(direction_ * -1.0f, {0.0f, 0.0f, 0.0f, 0.0f}, {0.0f, 1.0f, 0.0f, 0.0f});
shader.SetUniformMat4("light_transform", projection_ * transform_);
}
void DirectionalLight::Draw(const graphics::Shader& shader)
{
shader.SetUniform4f("light_dir", direction_);
shader.SetUniform4f("light_color", color_);
shader.SetUniformMat4("light_transform", transform_);
shader.SetUniformMat4("light_projection", projection_);
shader.SetUniformInt("cast_shadow", shadows_);
glActiveTexture(GL_TEXTURE12);
shader.SetUniformInt("shadow_map", 12);
glBindTexture(GL_TEXTURE_2D, shadow_map_);
}
Code the repo: https://github.com/rxwp5657/Nitro
Relevant Files for the problem:
include/core/scene.hpp
include/core/directional_light.hpp
include/core/light_shadow.hpp
include/core/directional_light.hpp
include/graphics/mesh.hpp
src/core/scene.cpp
src/core/directional_light.cpp
src/core/light_shadow.cpp
src/core/directional_light.cpp
src/graphics/mesh.cpp
Finally, what I have tried so far is:
Deactivating depth testing with glDepthMask(GL_FALSE) and glDisable(GL_DEPTH_TEST) //same problem.
Changing depth function to glDepthFunc(GL_ALWAYS); // No desired results;

If you have a NVIDIA graphics card, you could have a look a Nsight. You can capture a frame and view all occurred GL-calls.
Then, you can select an event, for instance the event 22 in my example, and inspect all textures, the color buffer, the depth buffer and the stencil buffer.
Furthermore, you can have a look on all GL-state parameters at one event.

Ok, after using apitrace I found out that the VBO changes when switching from a custom Framebuffer to the default one. Because of this, the solution to the problem is to set again the VBO after switching to the default frame buffer.
Based on the code of the project, the solution is calling the Setup function again after switching to the default Framebuffer.
Setup function of the Mesh class:
void Mesh::Setup(const Shader& shader)
{
glGenVertexArrays(1, &vao_);
glBindVertexArray(vao_);
glGenBuffers(1, &elementbuffer_);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer_);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices_.size() * sizeof(unsigned int), &indices_[0], GL_STATIC_DRAW);
glGenBuffers(1, &vbo_);
glBindBuffer(GL_ARRAY_BUFFER, vbo_);
glBufferData(GL_ARRAY_BUFFER, vertices_.size() * sizeof(Vertex), &vertices_[0], GL_STATIC_DRAW);
shader.PosAttrib("aPosition", 3, GL_FLOAT, sizeof(Vertex), 0);
shader.PosAttrib("aNormal", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, normal));
shader.PosAttrib("aTexCoord", 2, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tex_coord));
shader.PosAttrib("aTangent", 3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, tangent));
shader.PosAttrib("aBitangent",3, GL_FLOAT, sizeof(Vertex), offsetof(Vertex, bitangent));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
loaded_ = true;
}
Vertices buffer after switching to default frame buffer
Vertices buffer with no Framebuffer switching (no shadow map generated)

Related

Framebuffer texture coming out white (fragment shader doesn't affect it)

So I have an fbo and trying to output a depth texture from the light source perspective. Unfortunately the depth texture is coming out pure white even if I hard code black in the frag shader.
This is my frame buffer initialization
//Render frame to a texture
m_FrameBuffer = 0;
glGenFramebuffers(1, &m_FrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, m_FrameBuffer); // once frame buffer is bound, must draw to it or black screen
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &m_depthTexture);
glBindTexture(GL_TEXTURE_2D, m_depthTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC, GL_LEQUAL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_R_TO_TEXTURE);
glFramebufferTexture(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, m_depthTexture, 0);
glDrawBuffer(GL_NONE);
//Always check that our framebuffer is ok
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
printf("frame buffer binding error");
glBindTexture(GL_TEXTURE_2D, 0);
this is my rendering of frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, m_FrameBuffer);
glViewport(0, 0, 1024, 1024);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glBindVertexArray(m_VAO);
glBindBuffer(GL_ARRAY_BUFFER, m_VBO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, vertices.size());
glDisableVertexAttribArray(0);
glBindVertexArray(0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
This is my fragment shader
#version 330 core
// Ouput data
layout(location = 0) out float fragmentdepth;
void main()
{
//fragmentdepth = gl_FragCoord.z;
fragmentdepth = 0;
}
This is my main loop
while (!window.closed())
{
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glDepthFunc(GL_LESS);
window.clear();
CameraControls();
depth.enable();
//depthMVP = depthProjectionMatrix * depthViewMatrix * depthModelMatrix;
depth.setUniformMat4("projection", projection);
depth.setUniformMat4("view", camera);
depth.setUniformMat4("model", shape1->modelMatrix);
shape1->RenderToTexture();
depth.disable();
window.clear();
basic.enable();
basic.setUniformMat4("proj", projection);
//basic.setUniform3f("light_Pos", lightPos);
basic.setUniformMat4("view", camera);
basic.setUniformMat4("model", shape1->modelMatrix);
basic.setUniformMat4("DepthBiasMVP", biasMatrix);
//NEED TO CHANGE THIS TO BIAS MATRIX X DEPTHMVP
//MVP = projection * camera * shape1->modelMatrix;
//basic.setUniformMat4("MVP", MVP);
shape1->Render(basic);
basic.disable();
window.update();
}
I am following this tutorial http://www.opengl-tutorial.org/intermediate-tutorials/tutorial-16-shadow-mapping/
Just answering my own question after looking at my old questions that weren't resolved. If anyone is wondering, my problem was that I had fused two tutorials into one and one of them had different near and far values for the frustum. The framebuffer was black because the object was too far from the camera.

Call to glDrawElements breaks display on some GPUs?

I'm working on a 3D PBR engine along with some colleagues (OpenGL 3.3 + GLFW3 and ImGui), and we have a peculiar error : the application enters its drawing loop normally and gives no OpenGL error whatsoever (as shown by extensive use of glCheckError()), but the screen is completely blank. The peculiar part is that this only happens on my colleagues' computer that uses an Nvidia GeForce GTX 1060 as its graphic cards (2 of my friends have the same exact computer).
After some investigation, it turns out that this error is triggered by a function that is called a single time before the main loop. Long story short, it renders a quad to a texture bound via a framebuffer using a couple shaders. I narrowed down the problem to the glDrawElements call ; on those computers, not changing anything except commenting out this line fixes the display (at the cost of the texture displaying garbage instead of the expected result of the draw call), whereas all of the other computers run the app fine even with the line uncommented.
The culprit function is as follows :
/**
* Returns a texture identifier filled with a precomputed irradiance map calculated
* from a provided, already set up-environment map
*/
GLuint precomputeIrradianceMap(GLuint envMapId)
{
std::cerr << "Precomputing irradiance map ... " << std::endl;
GLuint irradianceVAO, irradianceVBO, irradianceMap,
irradianceVertShader, irradianceFragShader, irradianceProgram, captureFBO,
captureRBO;
GLint aPos_location, env_map;
glGenVertexArrays(1, &irradianceVAO);
glBindVertexArray(irradianceVAO);
glGenBuffers(1, &irradianceVBO);
glBindBuffer(GL_ARRAY_BUFFER, irradianceVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad_vertices), quad_vertices, GL_STATIC_DRAW);
glGenFramebuffers(1, &captureFBO);
glGenRenderbuffers(1, &captureRBO);
glBindFramebuffer(GL_FRAMEBUFFER, captureFBO);
glBindRenderbuffer(GL_RENDERBUFFER, captureRBO);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, 320, 320);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, captureRBO);
checkGLerror();
glGenTextures(1, &irradianceMap);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, irradianceMap);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16F, 320, 320, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glBindTexture(GL_TEXTURE_2D, envMapId);
checkGLerror();
irradianceVertShader = createShaderFromSource(GL_VERTEX_SHADER, "shaders/irradiance_vert.glsl");
irradianceFragShader = createShaderFromSource(GL_FRAGMENT_SHADER, "shaders/irradiance_frag.glsl");
irradianceProgram = glCreateProgram();
glAttachShader(irradianceProgram, irradianceVertShader);
glAttachShader(irradianceProgram, irradianceFragShader);
printShaderLog(irradianceVertShader);
printShaderLog(irradianceFragShader);
glLinkProgram(irradianceProgram);
glUseProgram(irradianceProgram);
checkGLerror();
env_map = glGetUniformLocation(irradianceProgram, "environmentMap");
aPos_location = glGetAttribLocation(irradianceProgram, "aPos");
glEnableVertexAttribArray(aPos_location);
checkGLerror();
glVertexAttribPointer(aPos_location, 3, GL_FLOAT, GL_FALSE, 0, (void *) 0);
glUniform1i(env_map, 0);
checkGLerror();
glViewport(0, 0, 320, 320);
glBindFramebuffer(GL_FRAMEBUFFER, captureFBO);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, irradianceMap, 0);
checkGLerror();
glClearDepth(1.f);
glClearColor(0.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
checkGLerror();
// This is the culprit
// glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, quad_indices);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
checkGLerror();
// glDisableVertexAttribArray(aPos_location);
glDeleteShader(irradianceVertShader);
glDeleteShader(irradianceFragShader);
glDeleteProgram(irradianceProgram);
glDeleteFramebuffers(1, &captureFBO);
glDeleteRenderbuffers(1, &captureRBO);
glDeleteBuffers(1, &irradianceVBO);
glDeleteVertexArrays(1, &irradianceVAO);
checkGLerror();
std::cerr << "... done " << std::endl;
return irradianceMap;
}
You can safely replace checkGLerror() by std::cerr << glCheckError() << std::endl. As I said before, there is no OpenGL error whatsoever, all the shaders compile fine, and this only breaks on computers equipped with an Nvidia GeForce GTX 1060.
The rest of the code is mostly setting up VAOs, VBOs and the like, and the render loop is as follows :
while (!glfwWindowShouldClose(window))
{
glfwGetFramebufferSize(window, &display_w, &display_h);
float newRatio = (float)display_w / display_h;
if(ratio != newRatio)
{
ratio = newRatio;
setAspectRatio(p, ratio);
invP = p.inverse();
glViewport(0, 0, display_w, display_h);
}
ImGui_ImplGlfwGL3_NewFrame();
// Rendering + OpenGL rendering
// Draw the skybox, then the model
ImGui::Begin("Physical parameters", NULL, ImGuiWindowFlags_AlwaysAutoResize);
ImGui::SliderFloat("Dielectric specularity", &ds, 0.f, 1.f, "%.3f");
ImGui::SliderFloat("Light intensity", &l0, 0.f, 10.f, "%.2f");
ImGui::Checkbox("Use irradiance map as skybox", &skyOrIrradiance);
ImGui::Checkbox("Debug draw irradiance map", &debugDraw);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, skyOrIrradiance ? irradianceTexture : skybox_texture);
ImGui::End();
Matrix4f v = camera->getMatrix(), invV = v.inverse();
// glClearColor(0.45f, 0.55f, 0.60f, 1.00f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(vertex_array_objs[0]);
glUseProgram(skybox_program);
glUniformMatrix4fv(skybox_v_location, 1, GL_FALSE, (const GLfloat *) invV.data());
glUniformMatrix4fv(skybox_p_location, 1, GL_FALSE, (const GLfloat *) invP.data());
glDisable(GL_DEPTH_TEST);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, quad_indices);
glBindVertexArray(vertex_array_objs[1]);
glUseProgram(program);
glUniformMatrix4fv(mv_location, 1, GL_FALSE, (const GLfloat *) v.data());
glUniformMatrix4fv(p_location, 1, GL_FALSE, (const GLfloat *)p.data());
glUniform1f(uDS_location, ds);
glUniform1f(L0_location, l0);
glActiveTexture(GL_TEXTURE0 + model.activeTexturesCount());
glBindTexture(GL_TEXTURE_2D, irradianceTexture);
glUniform1i(irradiance_location, model.activeTexturesCount());
glEnable(GL_DEPTH_TEST);
model.render();
if(debugDraw)
{
displayTexture(irradianceTexture);
displayTexture(skybox_texture, 0.f, -1.f);
}
camera->drawControls();
// Draw basic interface
basicInterface();
ImGui::Render();
ImGui_ImplGlfwGL3_RenderDrawData(ImGui::GetDrawData());
glfwSwapBuffers(window);
glfwPollEvents();
camera->update(window);
}
model.render() is as follows :
void GltfModel::render() const
{
for(unsigned int i = 0; i < _activeTextures.size(); i++)
{
if(_textureLocations[i] > -1)
{
int j = _activeTextures[i];
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, _textureIds[j]);
glUniform1i(_textureLocations[i], i);
}
}
glBindBuffer(GL_ARRAY_BUFFER, _buffers[ARRAY_BUFFER]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _buffers[ELEMENT_ARRAY_BUFFER]);
glDrawElements(_drawingMode, _indicesCount, _indicesType, NULL);
}
Thanks by advance for your time !
EDIT : putting a glClear(GL_COLOR_BUFFER_BIT) call right before glfwSwapBuffers(window) still displays a white screen with the culprit line uncommented even though the clear color has been set to light blue. Commenting the culprit line indeed displays a light blue screen, so this makes me think it's a framebuffer issue, but I can't say for sure.

Framebuffer cutting out section of models when moved

I am having an issue with implementing a frame buffer for post processing. So far I can draw everything to a textured quad and a apply a shader to change the screen, so in that respect it works.
My issue is when I move my models or camera, the model start to render incorrectly and parts of them get "cut off". See the following screen shots as it is hard to explain.
To avoid putting pages and pages of code, all the rendering works fine without the Frame Buffer and every looks fine if I don't move the camera or models.
I set up the frame buffer like this.
//Set up FBO
glGenFramebuffers(1, &m_FrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, m_FrameBuffer);
glActiveTexture(GL_TEXTURE2);
glGenTextures(1, &m_TexColorBuffer);
glBindTexture(GL_TEXTURE_2D, m_TexColorBuffer);
glTexImage2D(
GL_TEXTURE_2D, 0, GL_RGB, 1280, 720, 0, GL_RGB, GL_UNSIGNED_BYTE, 0
);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glFramebufferTexture2D(
GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_TexColorBuffer, 0
);
glGenRenderbuffers(1, &m_RBODepthStencil);
glBindRenderbuffer(GL_RENDERBUFFER, m_RBODepthStencil);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, 1280, 720);
glFramebufferRenderbuffer(
GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_RBODepthStencil
);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, m_TexColorBuffer, 0);
GLenum DrawBuffers[1] = { GL_COLOR_ATTACHMENT0 };
glDrawBuffers(1, DrawBuffers); // "1" is the size of DrawBuffers
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
std::cout << "Frame Buffer: Contructor: Issue completing frame buffer" << std::endl;
//Set Up Shader
m_ScreenShader = new Shader("../Assets/Shaders/screen.vert", "../Assets/Shaders/screen.frag");
//Setup Quad VAO
glGenBuffers(1, &m_QuadVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_QuadVBO);
glBufferData(GL_ARRAY_BUFFER, 24 * sizeof(float), points, GL_STATIC_DRAW);
glGenVertexArrays(1, &m_QuadVAO);
glBindVertexArray(m_QuadVAO);
//glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, m_QuadVBO);
//glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
GLint posAttrib = glGetAttribLocation(m_ScreenShader->getID(), "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), 0);
GLint texAttrib = glGetAttribLocation(m_ScreenShader->getID(), "texcoord");
glEnableVertexAttribArray(texAttrib);
glVertexAttribPointer(texAttrib, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
I then Bind it prior to drawing the scene (skybox and models) and then draw it like this.
Unbind();
glBindVertexArray(m_QuadVAO);
glDisable(GL_DEPTH_TEST);
m_ScreenShader->enable();
m_ScreenShader->setUniform1f("time", glfwGetTime());
GLint baseImageLoc = glGetUniformLocation(m_ScreenShader->getID(), "texFramebuffer");
glUniform1i(baseImageLoc, 2);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, m_TexColorBuffer);
glDrawArrays(GL_TRIANGLES, 0, 6);
Any help is appreciated!
When rendering into a framebuffer that has a depth buffer and the depth test is enabled, then the depth buffer must be cleared. The depth buffer must be cleared after binding the framebuffer and before drawing to the framebuffer:
gllBindFramebuffer( GL_FRAMEBUFFER, m_FrameBuffer );
glClear( GL_DEPTH_BUFFER_BIT );
Note, if you do not clear the deep buffer, it retains its content. This causes that the depth test may fail at positions, where the geometry has been in the past. It follows that parts are missing in the geometry, as in the image in the question.

Opengl Thread 1: EXC_BAD_ACCESS (code=1, address=0x0)

I making an OpenGL video game using GLFW version 2. I am getting an error I do not understand
The following code is:
//
// GameWindow.cpp
// RocketGame
//
// Created by Vaibhav Malhotra on 12/5/17.
// Copyright © 2017 Vaibhav Malhotra. All rights reserved.
//
#include "GameWindow.hpp"
typedef struct
{
GLfloat positionCoordinates[3];
GLfloat textureCoordinates[2];
} vertexData;
#define Square_Size 100
vertexData vertices[] = {
{{0.0f,0.0f,0.0f}, {0.0f,0.0f}},
{{Square_Size,0.0f,0.0f},{1.0f,0.0f}},
{{Square_Size,Square_Size,0.0f},{1.0f,1.0f}},
{{0.0f,Square_Size,0.0f},{0.0f,1.0f}}
};
void GameWindow::setRunning(bool newRunning)
{
_running = newRunning;
}
bool GameWindow::getRunning()
{
return _running;
}
GLuint GameWindow::loadAndBufferImage(const char *filename)
{
GLFWimage imageData;
glfwReadImage(filename, &imageData, NULL);
GLuint textureBufferID;
glGenTextures(1, &textureBufferID);
glBindTexture(GL_TEXTURE_2D, textureBufferID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageData.Width, imageData.Height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData.Data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glfwFreeImage(&imageData);
return textureBufferID;
}
GameWindow::GameWindow(bool running):_running(running),_height(800),_width(800*16/9),_vertexBufferID(0)
{
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glViewport(0.0f, 0.0f, _width, _height);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glMatrixMode(GL_PROJECTION);
gluOrtho2D(0, _width, 0, _height);
glMatrixMode(GL_MODELVIEW);
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, sizeof(vertexData), (GLvoid *) offsetof(vertexData, positionCoordinates));
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(2, GL_FLOAT, sizeof(vertexData), (GLvoid *) offsetof(vertexData, textureCoordinates));
_textureBufferID = loadAndBufferImage("rocket.tga");
}
void GameWindow::render()
{
glClear(GL_COLOR_BUFFER_BIT);
//glColor3f(1.0f, 0.0f, 0.0f);
glDrawArrays(GL_QUADS, 0, 4);
glfwSwapBuffers();
}
void GameWindow::update()
{
}
Under the render function the code glDrawArrays(GL_QUADS, 0, 4); is returning a runtime error:
Thread 1: EXC_BAD_ACCESS (code=1, address=0x0).
For output I am just getting a black screen.
Why is this error is coming for me?
Under the render function the code glDrawArrays(GL_QUADS, 0, 4); is returning a runtime error Thread 1: EXC_BAD_ACCESS (code=1, address=0x0).
A memory access violation on glDrawArrays generally hints at some sort of problem with VBOs or similar storage objects.
From what I can tell, there are three issues with your code:
You are only calling glEnableClientState and its corresponding gl*Pointer calls once, when they need to be called every frame before glDrawArrays. If you'd like to only call them once, look into Vertex Array Objects (VAOs), although these are only available since OpenGL 3. Bear in mind, that when you move the glEnableClientState calls, you will have to ensure that your buffer is bound again with glBindBuffer.
Each client state has a corresponding gl*Pointer call. You correctly use glVertexPointer for GL_VERTEX_ARRAY, but for GL_TEXTURE_COORD_ARRAY, you need glTexCoordPointer.
Your glEnableClientState calls are missing their corresponding glDisableClientState calls, which need to be called in reverse order of the enabling calls, after your glDrawArrays call.
Just as a side note, the GLU library is deprecated. Your gluOrtho2D call appears to be the only instance where you depend on it, and it can be easily replaced with:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, _width, 0, _height, -1.0f, 1.0f);
You are getting an exception in glDrawArrays because it tries to read memory at address 0, which is what your code actually specifies.
GL's texture coordinate array pointer is initializez to NULL and no VBO. Since you use legacy GL, this will be interpreted as a client memory address. Now you enable GL_TEXCOORD_ARRAY but never actually set glTexCoordPointer, so it stays NULL:
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(2, GL_FLOAT, sizeof(vertexData), (GLvoid *) offsetof(vertexData, textureCoordinates));
The quick solution is of course to not overwrite your vertex array pointer state by having it point to the the texture coords, but to specify the texcoord array pointer via glTexCoordPointer.
The real solution is to give up on legacy GL which is deprecated0 since a decade by now, and switch to "modern" OpenGL using a GL 3.2 core profile or above.

glUseProgram doesn't change shader

I am currently working on a application with (not yet implemented) post effects. So the 3D scene gets rendered in a buffer and then, another shader renders the buffer on the screen.
My problem is that the program uses my basic shader vor the 3d scene instead of the post effect shader in the second pass. Do you have an idea, what could be doing this?
My approach: I have a class which has two major functions:
- bindBuffer() sets the render Target to a buffer
- renderSSAO() renders the buffer to the screen and hopfuly one day will add an ambient occlusion effect. :)
BIND BUFFER FUNCTION
void SSAOShader::bindBuffer() {
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glViewport(0,0,windowWidth,windowHeight);
}
RENDER FUNCTION
void SSAOShader::renderSSAO(GLuint currentShader) {
// set shader
glUseProgram(shader);
//draw on Screen
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0,0,windowWidth,windowHeight);
// clear screen
glClearColor(0.4, 0.4, 0.4, 1.0);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
// bind texture, pass to shader
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, renderTexture);
glUniform1i(textureID, 0);
glBindVertexArray(vertexarray);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindVertexArray(0);
glUseProgram(currentShader);
}
Render Loop (the functions are from the ssaoShader-object)
while (!glfwWindowShouldClose(mainWindow)) {
ssaoShader->bindBuffer();
// clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// draw cool stuff
ssaoShader->renderSSAO(programID);
// Swap buffers
glfwSwapBuffers(mainWindow);
glUseProgram(programID);
}
So basically, the render target gets set to my buffer with bindBuffer();, then I draw objects with my basic (phong etc) shader, and the renderSSAO function changes the target to my screen. This function also changes the shader to my post-effects shader and changes it back to whatever it was before. programID is the shader-GLuint.
I know that the problem is the used shader since the result of all is that I only have a black screen with a quad-gon in the world origin. I can also move around like normal.
CONSTRUCTOR
SSAOShader::SSAOShader(float windowWidth, float windowHeight, GLuint currentShader) {
this->windowWidth = windowWidth;
this->windowHeight = windowHeight;
//shader
shader = LoadShaders("Passthrough.vertexshader", "SSAO.fragmentshader");
glUseProgram(shader);
textureID = glGetUniformLocation(shader, "renderTexture");
int status;
glGetProgramiv(shader, GL_COMPILE_STATUS, &status);
std::cout << (status == GL_TRUE) << std::endl;
GLfloat planeVertices[] = {
-1, -1,
1, -1,
-1, 1,
1, 1
};
// create vertex array
glGenVertexArrays(1, &vertexarray);
glBindVertexArray(vertexarray);
// create "uv" (vertex) buffer
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(planeVertices), planeVertices, GL_STATIC_DRAW);
// add vertecies to as attributes
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
glBindVertexArray(0);
// ---- set up framebuffer ----
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glGenTextures(1, &renderTexture);
glBindTexture(GL_TEXTURE_2D, renderTexture);
glTexImage2D(GL_TEXTURE_2D, 0,GL_RGB, windowWidth, windowHeight, 0,GL_RGB, GL_UNSIGNED_BYTE, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glGenRenderbuffers(1, &depthrenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthrenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, windowWidth, windowHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthrenderbuffer);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, renderTexture, 0);
DrawBuffers[1] = GL_COLOR_ATTACHMENT0;
glDrawBuffers(1, DrawBuffers);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE){
exit(0); // bad luck
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
//glUseProgram(currentShader);
}
VS2012 Project
https://studi.f4.htw-berlin.de/~s0539750/SSAOTest.zip
For who ever is mad enough to look at my code. ;)
I found the problem.
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, renderTexture, 0);
DrawBuffers[1] = GL_COLOR_ATTACHMENT0;
glDrawBuffers(1, DrawBuffers);
This part in the constructor changed my GLuint shader;. I don't know why exactly but the reason was that DrawBuffers is a fixed array with size 1. Obviously, accessing DrawBuffers[1] isn't the best idea of the day. ;)
My guess would be that either DarBuffers[1] was used for my "shader" GLuint or the problem was that glDrawBuffers never realy attached GL_COLOR_ATTACHMENT0 to my framebuffer. What is the case?
Thanks for all the help by the way, It helped alot. :)