OpenGL - rendering to 3D Texture has no effect on texture - c++

I am trying to splat vertex information into a 3D Texture.
Corresponding C++ code:
GLuint color_tex;
GLuint fb;
glGenTextures(1, &color_tex);
glBindTexture(GL_TEXTURE_3D, color_tex);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_REPEAT);
glTexImage3D(GL_TEXTURE_3D, 0, GL_R32UI, 32, 32, 32, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, NULL);
//-------------------------
glGenFramebuffers(1, &fb);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
//Attach one of the faces of the Cubemap texture to this FBO
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, color_tex, 0);
//-------------------------
//Does the GPU support current FBO configuration?
GLenum status;
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
switch(status)
{
case GL_FRAMEBUFFER_COMPLETE:
std::cout << "good" << std::endl;
default:
break;
}
//-------------------------
glClearColor(0,0,0,0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(splat_vertex_ids);
glBindVertexArray( mVAO );
glDrawArrays(mPrimitiveType, 0, mDrawSize);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
GLuint content[32*32*32];
glBindTexture(GL_TEXTURE_3D, color_tex);
glGetTexImage(GL_TEXTURE_3D, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, &content);
for(int i = 0; i< 32*32*32; ++i)
if(content[i] != 0)
std::cout << content[i] << std::endl;
I read back the content to check if anything was rendered, but the texture only contains 0's.
My first thought was, that my shaders don't work correctly, so I replaced them by this :
Geometry Shader:
#version 330
layout(points) in;
layout(points, max_vertices = 1) out;
in vec3[] vPosition;
out vec2 oTexCoord;
void main(void)
{
oTexCoord = vec2(0.2);
gl_Position = vec4(0.2,0.2, 0.2,1);
gl_Layer = 0;
EmitVertex();
EndPrimitive();
}
Vertex Shader:
#version 330
layout(location = 0)in vec3 aPosition;
uniform vec3 uChunkStart;
uniform float uRes;
out vec3 vPosition;
void main(void)
{
vPosition = aPosition;
}
Fragment Shader:
#version 330
in vec2 oTexCoord;
out uint color;
void main(void)
{
color = 1337u;
}
Don't know whats wrong with this. Shouldn't it create a single dot? My first time doing such a thing.
EDIT: Rendering to a 2D texture also fails

Related

GLSL cannot get sampler3D value

I cannot get sampler3D value in my GLSL fragment shader.
I am writing a shader in GLSL and want to take sampler3D (3d texture) as volume data and do volume rendering. However, seems I cannot bind 3d texture to my shader's sampler3D. texture3D() always returns (0,0,0,0) on shader side. But on my application side, I can use glGetTexImage to get the data.
Application Side (dS is a Shader class object):
dS.use();
dS.setInt("volumeData", 0);
uint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_3D, texture);
float data[64];
for (int i = 0; i < 64; ++i) data[i] = 1.0f;
glTexImage3D(GL_TEXTURE_3D, 0, GL_R16F, 4, 4, 4, 0, GL_RED, GL_FLOAT, data);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_3D, 0);
float read[64];
glBindTexture(GL_TEXTURE_3D, texture);
glGetTexImage(GL_TEXTURE_3D, 0, GL_RED, GL_FLOAT, read);
std::cout << read[0] << "," << read[63] << std::endl; // 1,1
glBindTexture(GL_TEXTURE_3D, 0);
// When drawing.
dS.use();
dS.setMat4("model", glm::mat4(1.0f));
dS.setMat4("view", view);
dS.setMat4("projection", projection);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_3D, texture);
cube.draw();
Vertex Shader
#version 330 core
// for snow rendering
layout (location = 0) in vec3 aPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection * view * model * vec4(aPos, 1.0);
}
Fragment Shader
#version 330
out vec4 FragColor;
uniform sampler3D volumeData;
void main()
{
ivec3 size = textureSize(volumeData, 0);
float c = texture3D(volumeData, vec3(0)).x;
if (size.x == 1)
FragColor = vec4(1.0, 0.0, c, 1.0);
else if (size.x > 2)
FragColor = vec4(0.0, 1.0, 0.0, 1.0);
else if (size.x == 0)
FragColor = vec4(0.0, 0.0, 1.0, 1.0);
else
FragColor = vec4(1.0, 1.0, 1.0, 1.0);
}
I only got a red cube, which means size.x == 1 and texture3D returns 0, which is expected to be size.x == 4 and texture3D returns 1.0
I used similar method for 2d texture and it works. So I guess my Shader class is right. And I also tried to add glEnable(GL_TEXTURE_3D) before I did any 3d texture operation.
And I am not sure if this helps: I used glfw-3.2.1 WIN64 and glad.
I solved it.
It's a really stupid typo.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
should use GL_TEXTURE_3D instead of GL_TEXTURE_2D.
And since GL_TEXTURE_MIN_FILTER default state is GL_NEAREST_MIPMAP_LINEAR. It leads to mipmap incompleteness.

OpenGL 4.3 - CubeMap only works when bound as a GL_TEXTURE_2D

I am trying to implement a skybox in open gl. It doesn't work unless I bind It as GL_TEXTURE_2D
This is how I load my cube map:
// http://www.antongerdelan.net/opengl/cubemaps.html
void SkyBoxMaterial::CreateCubeMap(const char* front, const char* back, const char* top, const char*bottom, const char* left, const char*right, GLuint *tex_cube){
glGenTextures(ONE, tex_cube);
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, front));
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_POSITIVE_Z, back));
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_POSITIVE_Y, top));
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, bottom));
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_NEGATIVE_X, left));
assert(LoadCubeMapSide(*tex_cube, GL_TEXTURE_CUBE_MAP_POSITIVE_X, right));
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
checkGLErrors("SKYBOX_MATERIAL::CreateCubeMap", "END"); }
In the snippet below I bind the texture as a cube map. This does not work! I jus get a black square. However, if I would bind it to GL_TEXTURE_2D I get a textured cube (?).
bool SkyBoxMaterial::LoadCubeMapSide(GLuint texture, GLenum side_target, const char* fileName){
glBindTexture(GL_TEXTURE_CUBE_MAP, texture);
int width, height, n;
int force_channels = 4;
unsigned char* image_data = stbi_load(fileName, &width, &height, &n, force_channels);
glTexImage2D(side_target, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image_data);
glBindTexture(GL_TEXTURE_CUBE_MAP, 0);
free(image_data);
return true;
}
Rendering of my material:
void SkyBoxMaterial::Draw(){
glDepthMask(GL_FALSE);
glUseProgram(programID);
glBindVertexArray(VAO);
glActiveTexture(GL_TEXTURE0);
glUniform1i(glGetUniformLocation(programID, "skybox"), 0);
glBindTexture(GL_TEXTURE_CUBE_MAP, cubeMapTexture);
glDrawArrays(geometry->getDrawMode(), 0, geometry->getNumVertices());
glBindVertexArray(CLEAR);
glUseProgram(CLEAR);
glDepthMask(GL_TRUE);
}
My vertex shader:
#version 430 core
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
layout (location = 0) in vec3 gl_vertex;
out vec3 texcoords;
void main(){
vec4 homogeneous_vertecies = vec4(gl_vertex,1.0);
gl_Position = projection*view*model*homogeneous_vertecies;
texcoords = gl_vertex;
}
Fragement Shader:
#version 430 core
in vec3 texcoords;
uniform samplerCube skybox;
// Ouput data
out vec4 color;
void main(){
color = texture(skybox,texcoords);
}
I am failry new to OpenGl. Is this maybe a flagging Issue?
These are the only 3 flags I set at the start.
glEnable(GL_DEPTH_TEST);
// Cull triangles which normal is not towards the camera
glEnable(GL_CULL_FACE);
// Accept fragment if it closer to the camera than the former one
glDepthFunc(GL_LESS);
Don't really know what to go on here. This seems very obscure to me.
Thanks!
Marc

OpenGL FBO Returns Black Screen

I'm having a bit of trouble with my frame buffers in my OpenGL C++ application. There are no errors thrown (I get GL_FRAMEBUFFER_COMPLETE when I call glCheckFramebufferStatus), however when I render my FBO texture all I see is a black screen. I've searched for numerous hours and redone it multiple times to no avail, so I'm finally turning to you guys for some help. Below is all the relevant code. I hope you guys can spot something that my tired eyes couldn't. Thank you.
Initialization code:
glGenFramebuffers(1, &m_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
m_texture = new Texture(width, height);
//glBindTexture(GL_TEXTURE_2D, m_texture->GetID());
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_texture->GetID(), 0);
glGenRenderbuffers(1, &m_depth);
glBindRenderbuffer(GL_RENDERBUFFER, m_depth);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_depth);
GLuint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
CheckError(status);
Texture init code:
unsigned char* imageData = fileName != "" ? stbi_load(fileName.c_str(), &width, &height, &numComponents, 4) : 0;
if (imageData == NULL && fileName != "")
std::cerr << "Texture loading failed for texture: " << fileName << std::endl;
glGenTextures(1, &m_texture);
glBindTexture(GL_TEXTURE_2D, m_texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
glBindTexture(GL_TEXTURE_2D, 0);
if (fileName != "") stbi_image_free(imageData);
Texture bind code:
glActiveTexture(GL_TEXTURE0 + unit);
glBindTexture(GL_TEXTURE_2D, m_texture);
Framebuffer bind code:
glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
glViewport(0, 0, m_width, m_height);
Main code:
fbo.BindFramebuffer();
display.Clear(1.0f, 0.0f, 0.0f, 1.0f);
fboShader.Bind();
fboShader.Update(transform2, camera);
texture.Bind(0);
mesh2.Draw();
display.Bind();
display.Clear(0.0f, 0.0f, 0.9f, 1.0f);
shader.Bind();
shader.Update(transform, camera);
fbo.BindFramebufferTexture(0);
mesh3.Draw();
FBO shader code
Fragment:
#version 330
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 position0;
uniform sampler2D diffuse;
out vec4 outputColor;
void main()
{
outputColor = vec4(normal0, 1.0);
//gl_FragColor = vec4(normal0, 1.0);
}
Vertex:
#version 330
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 position0;
uniform mat4 transform;
void main()
{
gl_Position = transform * vec4(position, 1.0);
texCoord0 = texCoord;
normal0 = (transform * vec4(normal, 0.0)).xyz;
position0 = gl_Position.xyz;
}
What I should see is a box (mesh3) with a monkeyhead the color of its normals (mesh2) and red background on it, but all I get is black. I really appreciate any help!

glGetTexImage don't work with unsigned integer texture

I'm using integer texture and bind it to framebuffer to get render data.
I'v bind it to quad to display it on the screen and I'm sure the content of the texture is right.
But when I use glGetTexImage to get the content of the texture, I get random numbers.
here is the code to create the framebuffer:
glEnable(GL_TEXTURE_2D);
glGenFramebuffers(1, &param.fbo);
glGenTextures(1,&param.triTex);
glBindTexture(GL_TEXTURE_2D, param.triTex);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_FALSE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32UI, param.fboSize, param.fboSize, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, 0);
glBindFramebuffer(GL_FRAMEBUFFER, param.fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, param.triTex, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
The texture and framebuffer renders without problem.
here is the code of getting the content:
glBindTexture(GL_TEXTURE_2D, param.triTex);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
triTexData = (unsigned int *)malloc(param.fboSize * param.fboSize * sizeof(unsigned int));
memset(triTexData, 0, sizeof(unsigned int) * param.fboSize * param.fboSize);
glGetTexImage(GL_TEXTURE_2D, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, triTexData);
glBindTexture(GL_TEXTURE_2D,0);
Fragment shader and geometry shader
#version 330
flat in int color;
out vec4 fragColor;
void main(void)
{
fragColor = vec4(color,0.0,0.0,0.0);
}
#version 330
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
flat out int color;
void main(void)
{
gl_Position = gl_in[0].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
gl_Position = gl_in[1].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
color = gl_PrimitiveIDIn;
EmitVertex();
EndPrimitive();
};
Your fragment shader should actually be written this way to output to a GL_R32UI image:
#version 330
flat in int color;
out uint fragColor;
void main (void) {
fragColor = color;
}
At present, you are not reading back "random" numbers, you are just seeing what happens when you interpret floating-point data as integers without the proper conversion. Not pleasant, is it?

Render depth from framebuffer texture

I've been trying to add SSAO (based on the tutorial here: http://john-chapman-graphics.blogspot.co.nz/2013/01/ssao-tutorial.html) to a project of mine and I've gotten stuck on rendering the depth correctly.
I have created a framebuffer texture to draw the depth to each frame. The texture itself looks correct when I draw it to a quad but I can't figure out how to correctly apply it to the whole scene.
Here's what the regular scene looks like:
And here's the applied depth texture:
Currently in my fragment shader I just use my regular texture coordinates (TexCoord0) to draw the depth texture. I'm guessing I need to change this but I don't have a clue what to change it to.
Here's the relevant fragment shader code:
#version 330 core
in vec2 TexCoord0;
smooth in vec3 vNormal;
smooth in vec3 vWorldPos;
in mat4 ProjectionMatrix;
uniform sampler2D uDepthTex;
layout(location = 0) out vec4 FragColor;
void main()
{
FragColor = texture(uDepthTex, TexCoord0);
}
I'm at a bit of a loss as to how to fix this. I've read a lot of sample code online which seems to use regular texture coordinates to draw the depth.
EDIT:
Here's my framebuffer setup code:
glGenFramebuffers(1, &ssaoFramebufferID);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glGenTextures(1, &ssaoDepthTextureID);
glBindTexture(GL_TEXTURE_2D, ssaoDepthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT24, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_NONE);
glFramebufferTexture(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, ssaoDepthTextureID, 0);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
And the depth pass code I call each frame before regular draw:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 1024);
depthShader.Bind();
depthShader.SetUniformMatrixfv("depthMVP", camera.GetViewMatrix() * Matrix4::Perspective(60, aspectRatio, 0.1f, 10.0f));
DrawScene(true);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0, 0, width, height);
My depth shaders, vertex:
#version 330 core
layout(location = 0) in vec3 Position;
uniform mat4 depthMVP;
void main() {
gl_Position = depthMVP * vec4(Position, 1.0);
}
fragment:
#version 330 core
layout(location = 0) out float fragmentdepth;
void main() {
fragmentdepth = gl_FragCoord.z;
}
EDIT2:
The glClear call where it currently is gives me a completely black texture. Putting it below the first glViewport call gives the depth image I displayed above though:
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 1024);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
I managed to get it working thanks to some help in the comments:
My code is now as follows, fragment shader:
#version 330 core
in vec2 TexCoord0;
smooth in vec3 vNormal;
smooth in vec3 vWorldPos;
in mat4 ProjectionMatrix;
uniform sampler2D uDepthTex;
uniform float uWidth;
uniform float uHeight;
layout(location = 0) out vec4 FragColor;
void main()
{
vec2 res = gl_FragCoord.xy / vec2(uWidth, uHeight);
FragColor = texture(uDepthTex, res);
}
Framebuffer creation:
glGenFramebuffers(1, &ssaoFramebufferID);
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glGenTextures(1, &ssaoDepthTextureID);
glBindTexture(GL_TEXTURE_2D, ssaoDepthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, 1024, 768, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC, GL_LEQUAL);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE, GL_NONE);
Depth draw code:
glBindFramebuffer(GL_FRAMEBUFFER, ssaoFramebufferID);
glViewport(0, 0, 1024, 768);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
depthShader.Bind();
glDisable(GL_ALPHA_TEST);
depthShader.SetUniformMatrixfv("depthMVP", Matrix4::Perspective(60, aspectRatio, 0.1f, 10.0f) * camera.GetViewMatrix());
DrawScene();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0, 0, width, height);
glEnable(GL_ALPHA_TEST);