Render depth from framebuffer texture - opengl

I've been trying to add SSAO (based on the tutorial here: http://john-chapman-graphics.blogspot.co.nz/2013/01/ssao-tutorial.html) to a project of mine and I've gotten stuck on rendering the depth correctly.
I have created a framebuffer texture to draw the depth to each frame. The texture itself looks correct when I draw it to a quad but I can't figure out how to correctly apply it to the whole scene.
Here's what the regular scene looks like:
And here's the applied depth texture:
Currently in my fragment shader I just use my regular texture coordinates (TexCoord0) to draw the depth texture. I'm guessing I need to change this but I don't have a clue what to change it to.
Here's the relevant fragment shader code:
#version 330 core
in vec2 TexCoord0;
smooth in vec3 vNormal;
smooth in vec3 vWorldPos;
in mat4 ProjectionMatrix;
uniform sampler2D uDepthTex;
layout(location = 0) out vec4 FragColor;
void main()
{
FragColor = texture(uDepthTex, TexCoord0);
}
I'm at a bit of a loss as to how to fix this. I've read a lot of sample code online which seems to use regular texture coordinates to draw the depth.
EDIT:
Here's my framebuffer setup code:
glGenFramebuffers(1, &ssaoFramebufferID);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glGenTextures(1, &ssaoDepthTextureID);
glBindTexture(GL_TEXTURE_2D, ssaoDepthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT24, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_NONE);
glFramebufferTexture(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, ssaoDepthTextureID, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
And the depth pass code I call each frame before regular draw:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 1024);
depthShader.Bind();
depthShader.SetUniformMatrixfv("depthMVP", camera.GetViewMatrix() * Matrix4::Perspective(60, aspectRatio, 0.1f, 10.0f));
DrawScene(true);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0, 0, width, height);
My depth shaders, vertex:
#version 330 core
layout(location = 0) in vec3 Position;
uniform mat4 depthMVP;
void main() {
gl_Position = depthMVP * vec4(Position, 1.0);
}
fragment:
#version 330 core
layout(location = 0) out float fragmentdepth;
void main() {
fragmentdepth = gl_FragCoord.z;
}
EDIT2:
The glClear call where it currently is gives me a completely black texture. Putting it below the first glViewport call gives the depth image I displayed above though:
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 1024);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

I managed to get it working thanks to some help in the comments:
My code is now as follows, fragment shader:
#version 330 core
in vec2 TexCoord0;
smooth in vec3 vNormal;
smooth in vec3 vWorldPos;
in mat4 ProjectionMatrix;
uniform sampler2D uDepthTex;
uniform float uWidth;
uniform float uHeight;
layout(location = 0) out vec4 FragColor;
void main()
{
vec2 res = gl_FragCoord.xy / vec2(uWidth, uHeight);
FragColor = texture(uDepthTex, res);
}
Framebuffer creation:
glGenFramebuffers(1, &ssaoFramebufferID);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glGenTextures(1, &ssaoDepthTextureID);
glBindTexture(GL_TEXTURE_2D, ssaoDepthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, 1024, 768, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC, GL_LEQUAL);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_NONE);
Depth draw code:
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 768);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
depthShader.Bind();
glDisable(GL_ALPHA_TEST);
depthShader.SetUniformMatrixfv("depthMVP", Matrix4::Perspective(60, aspectRatio, 0.1f, 10.0f) * camera.GetViewMatrix());
DrawScene();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0, 0, width, height);
glEnable(GL_ALPHA_TEST);

Related

OpenGL Texture still interpolated even with GL_NEAREST

I'm making a 3d game using OpenGL and I have encountered a problem I can't find the answer to. For some reason, when I load the textures it is still interpolated.
My image loading:
glGenTextures(1, &chunkTex);
glBindTexture(GL_TEXTURE_2D, chunkTex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, 0);
int width, height, chan;
stbi_set_flip_vertically_on_load(true);
unsigned char *data = stbi_load("res/images/atlas.png", &width, &height, &chan, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE,
data);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
std::cout << "Failed to load texture" << std::endl;
}
stbi_image_free(data);
My vertex shader:
#version 330 core
in vec3 aPos;
in vec3 aColor;
in vec3 aNormal;
in vec2 uv;
uniform mat4 mvp;
out vec3 ourColor;
out vec2 puv;
out vec3 pNormal;
void main()
{
gl_Position = mvp * vec4(aPos, 1.0);
ourColor = aColor;
puv = uv;
pNormal = aNormal;
}
My fragment shader:
#version 330 core
out vec4 col;
in vec3 ourColor;
in vec2 puv;
in vec3 pNormal;
uniform sampler2D tex;
uniform vec3 lightDir;
void main()
{
col = texture(tex, puv) * max(dot(pNormal, -lightDir), 0.1);
}
If you could help me fix this I would appreciate it!
Edit: Here are my window hints:
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
Here you create a nex texture object and set the texture parameters for it:
glGenTextures(1, &chunkTex);
glBindTexture(GL_TEXTURE_2D, chunkTex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
But here you unbind it and bind texture object 0 instead:
glBindTexture(GL_TEXTURE_2D, 0);
and load the texture image to that one:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
Now, in legacy and compatibility profile GL, texture object 0 even exists (actually, there even do exist different texture objects named 0 for different texture types), and is usable. But for that one, you never have set the texture filters and will use the default ones. And it looks like you keep textur eobject 0 bound also for rendering, and not the (empty and incomplete) chunkTex one.

opengl - texture clamping not working in post processing

I'm having a problem with post processing in my deferred renderer.
When I'm applying an effect like bloom on top of my scene, the bottom of the bloom effect texture leaks into the top of my screen, the top of the texture leaks into the bottom of the screen, the left into the right of the screen and so on.
Looks like the uv coords don't get clamped correctly, although I thought I'd have set that all correctly.
The bloom effect in this case is created in a lower resolution and the gets upsampled using linear filtering.
This image that shows the problem:
I render the postprocessing effects using a simple quad:
glm::vec3(-1.f, -1.f, 0.f),
glm::vec3(1.f, -1.f, 0.f),
glm::vec3(1.f, 1.f, 0.f),
glm::vec3(-1.f, 1.f, 0.f)
This is the vertex shader I use to render the quad:
#version 430 core
layout(location = 0) in vec3 POSITION;
noperspective out vec2 tex_coord;
void main(void)
{
tex_coord = POSITION.xy * 0.5 + 0.5;
gl_Position = vec4(POSITION.xy, 0.0, 1.0);
}
And if course in the fragment shader I'm doing something like this:
#version 430 core
uniform sampler2D uTexture;
noperspective in vec2 tex_coord;
out vec4 frag_color;
void main(void)
{
frag_color = texture(uTexture, tex_coord);
}
The effect got rendered into a frame buffer object which got set up like this:
glGenFramebuffers(1, &_filterFbo);
glBindFramebuffer(GL_FRAMEBUFFER, _filterFbo);
// filter texture
glGenTextures(1, &_filterTexture);
glBindTexture(GL_TEXTURE_2D, _filterTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16F, _effectWidth, _effectHeight, 0, GL_RGB, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _filterTexture, 0);
I really don't know what's the problem here, I hope anyone got an idea.

OpenGL FBO Returns Black Screen

I'm having a bit of trouble with my frame buffers in my OpenGL C++ application. There are no errors thrown (I get GL_FRAMEBUFFER_COMPLETE when I call glCheckFramebufferStatus), however when I render my FBO texture all I see is a black screen. I've searched for numerous hours and redone it multiple times to no avail, so I'm finally turning to you guys for some help. Below is all the relevant code. I hope you guys can spot something that my tired eyes couldn't. Thank you.
Initialization code:
glGenFramebuffers(1, &m_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
m_texture = new Texture(width, height);
//glBindTexture(GL_TEXTURE_2D, m_texture->GetID());
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_texture->GetID(), 0);
glGenRenderbuffers(1, &m_depth);
glBindRenderbuffer(GL_RENDERBUFFER, m_depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_depth);
GLuint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
CheckError(status);
Texture init code:
unsigned char* imageData = fileName != "" ? stbi_load(fileName.c_str(), &width, &height, &numComponents, 4) : 0;
if (imageData == NULL && fileName != "")
std::cerr << "Texture loading failed for texture: " << fileName << std::endl;
glGenTextures(1, &m_texture);
glBindTexture(GL_TEXTURE_2D, m_texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
glBindTexture(GL_TEXTURE_2D, 0);
if (fileName != "") stbi_image_free(imageData);
Texture bind code:
glActiveTexture(GL_TEXTURE0 + unit);
glBindTexture(GL_TEXTURE_2D, m_texture);
Framebuffer bind code:
glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
glViewport(0, 0, m_width, m_height);
Main code:
fbo.BindFramebuffer();
display.Clear(1.0f, 0.0f, 0.0f, 1.0f);
fboShader.Bind();
fboShader.Update(transform2, camera);
texture.Bind(0);
mesh2.Draw();
display.Bind();
display.Clear(0.0f, 0.0f, 0.9f, 1.0f);
shader.Bind();
shader.Update(transform, camera);
fbo.BindFramebufferTexture(0);
mesh3.Draw();
FBO shader code
Fragment:
#version 330
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 position0;
uniform sampler2D diffuse;
out vec4 outputColor;
void main()
{
outputColor = vec4(normal0, 1.0);
//gl_FragColor = vec4(normal0, 1.0);
}
Vertex:
#version 330
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 position0;
uniform mat4 transform;
void main()
{
gl_Position = transform * vec4(position, 1.0);
texCoord0 = texCoord;
normal0 = (transform * vec4(normal, 0.0)).xyz;
position0 = gl_Position.xyz;
}
What I should see is a box (mesh3) with a monkeyhead the color of its normals (mesh2) and red background on it, but all I get is black. I really appreciate any help!

Viewing depth buffer in OpenGL

I am having trouble viewing depth rendered to a texture. I can view it when I render depth as RGB to texture, but when I try to render depth only I get only black. I have searched for many hours and implemented fixes from other similar questions, but to no avail. I've disabled GL_TEXTURE_COMPARE_MODE, added glDrawBuffer(GL_NONE) and glReadBuffer(GL_NONE).
I'm running on OSX 10.9 and OpenGL 4.1. I've removed gl error checking from the pasted code for conciseness.
The setup for the depth FBO/texture is this:
glGenFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, FramebufferName);
glGenTextures(1, &renderedTexture);
glBindTexture(GL_TEXTURE_2D, renderedTexture);
glTexImage2D(GL_TEXTURE_2D, 0,
GL_DEPTH_COMPONENT,
fbWidth,
fbHeight,
0,
GL_DEPTH_COMPONENT,
GL_FLOAT,
0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC, GL_LEQUAL);
// glTexParameteri (GL_TEXTURE_2D, GL_DEPTH_TEXTURE_MODE, GL_INTENSITY);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_R_TO_TEXTURE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_NONE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, renderedTexture, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
assert(false && "framebuffer NOT OK");
depth fragment - I can see depth when rendering RGB
#version 150 core
#if RENDER_RGB
out vec4 outColor;
void main(){
vec3 color = gl_FragCoord.z);
outColor = vec4(color, 1.0);
}
#else
void main(){
gl_FragDepth = gl_FragCoord.z;
}
#endif
Reading back the depth texture, I am using this fragment shader:
#version 150 core
in vec2 UV;
uniform sampler2D renderedTexture;
out vec4 outColor;
void main(){
vec3 color = vec3( texture(renderedTexture, UV).r );
outColor = vec4(color, 1.0);
}
Nevermind, it does work! My problem was the depth-only FBO setup for the depth texture was never called.

glGetTexImage don't work with unsigned integer texture

I'm using integer texture and bind it to framebuffer to get render data.
I'v bind it to quad to display it on the screen and I'm sure the content of the texture is right.
But when I use glGetTexImage to get the content of the texture, I get random numbers.
here is the code to create the framebuffer:
glEnable(GL_TEXTURE_2D);
glGenFramebuffers(1, &param.fbo);
glGenTextures(1,&param.triTex);
glBindTexture(GL_TEXTURE_2D, param.triTex);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_FALSE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32UI, param.fboSize, param.fboSize, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, 0);
glBindFramebuffer(GL_FRAMEBUFFER, param.fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, param.triTex, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
The texture and framebuffer renders without problem.
here is the code of getting the content:
glBindTexture(GL_TEXTURE_2D, param.triTex);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
triTexData = (unsigned int *)malloc(param.fboSize * param.fboSize * sizeof(unsigned int));
memset(triTexData, 0, sizeof(unsigned int) * param.fboSize * param.fboSize);
glGetTexImage(GL_TEXTURE_2D, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, triTexData);
glBindTexture(GL_TEXTURE_2D,0);
Fragment shader and geometry shader
#version 330
flat in int color;
out vec4 fragColor;
void main(void)
{
fragColor = vec4(color,0.0,0.0,0.0);
}
#version 330
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
flat out int color;
void main(void)
{
gl_Position = gl_in[0].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
gl_Position = gl_in[1].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
EndPrimitive();
};
Your fragment shader should actually be written this way to output to a GL_R32UI image:
#version 330
flat in int color;
out uint fragColor;
void main (void) {
fragColor = color;
}
At present, you are not reading back "random" numbers, you are just seeing what happens when you interpret floating-point data as integers without the proper conversion. Not pleasant, is it?