Separate stencil with FBO (NVidia) - opengl

I'm trying to have a FBO with separate depth and stencil buffers.
I know that NVidia GPU's historically only supported packed depth/stencil.
However I stumbled on the ARB_texture_stencil8 extension and wonder how to use it against a FBO.
This code gives gl error 1159 on glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_STENCIL_ATTACHMENT_EXT, GL_TEXTURE_2D, fboStencilTexture, 0):
static GLuint fboId, rboDepth, rboStencil, TEXTURE_WIDTH, TEXTURE_HEIGHT;
if (!fboColorTexture) {
fboDepthTexture = fboId = rboDepth = rboStencil = TEXTURE_WIDTH = TEXTURE_HEIGHT = 0; // vid restart?
glGenTextures(1, &fboColorTexture);
glBindTexture(GL_TEXTURE_2D, fboColorTexture);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
if (!fboDepthTexture) {
glGenTextures(1, &fboDepthTexture);
glBindTexture(GL_TEXTURE_2D, fboDepthTexture);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
if (!fboStencilTexture) {
glGenTextures(1, &fboStencilTexture);
glBindTexture(GL_TEXTURE_2D, fboStencilTexture);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
qglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
GLuint curWidth = r_virtualResolution.GetFloat() * glConfig.vidWidth, curHeight = r_virtualResolution.GetFloat() * glConfig.vidHeight;
if (curWidth != TEXTURE_WIDTH || curHeight != TEXTURE_HEIGHT) {
TEXTURE_WIDTH = curWidth;
TEXTURE_HEIGHT = curHeight;
glBindTexture(GL_TEXTURE_2D, fboColorTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB5_A1, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); //NULL means reserve texture memory, but texels are undefined
glBindTexture(GL_TEXTURE_2D, fboDepthTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glBindTexture(GL_TEXTURE_2D, fboStencilTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_STENCIL_INDEX8, TEXTURE_WIDTH, TEXTURE_HEIGHT, 0, GL_STENCIL_INDEX, GL_FLOAT, 0);
}
//-------------------------
if (!fboId) {
// create a framebuffer object, you need to delete them when program exits.
glGenFramebuffersEXT(1, &fboId);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
// attach a texture to FBO color attachement point
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, fboColorTexture, 0);
// attach a renderbuffer to depth attachment point
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D, fboDepthTexture, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_STENCIL_ATTACHMENT_EXT, GL_TEXTURE_2D, fboStencilTexture, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);

The purpose of ARB_texture_stencil8 is to permit you to use stencil-only formats as textures. That is, reading from them in shaders.
This does not mean that you can separate your stencil and depth buffers. The idea behind stencil-8 textures is that you would generate their data by either copying the stencil portion of a depth/stencil texture, or you would render without a depth buffer entirely.
So you cannot use this extension to guarantee that you can render to separate depth and stencil images. That's still hardware dependent.
Also, if you're going to use new features like ARB_texture_stencil8, you shouldn't be combining them with old EXT_framebuffer_object APIs. So stop using glFramebufferTexture2DEXT and start using glFramebufferTexture.

Related

Attaching multiple textures to single FBO (and)multiple FBOs in OpenGL ES 2.0

Initially, I wanted to have 2 texture attachments to one FBO but I see in OpenGL ES 2.0 GL_COLOR_ATTACHMENTi is not supported (only GL_COLOR_ATTACHMENT0 is supported). But just to see what happens with 2 texture attachment with the same GL_COLOR_ATTACHMENT0, I could see only the second texture's content visible.
Then I thought to have multiple FBOs with respective textures. But I still only see the 2nd texture (attached to 2nd FBO).
GLuint fbo;
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1920, 1080, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
glBindTexture(GL_TEXTURE_2D, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("Problem with OpenGL framebuffer : %x\n", status);
}
glClear(GL_COLOR_BUFFER_BIT | GL_STENCIL_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // This is needed
DrawRect();
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
RenderTexture("-- Hello texture 1");
glDisable(GL_BLEND);
glBindTexture(GL_TEXTURE_2D, 0);
GLuint fbo2;
glGenFramebuffers(1, &fbo2);
glBindFramebuffer(GL_FRAMEBUFFER, fbo2);
GLuint texture2;
glGenTextures(1, &texture2);
glBindTexture(GL_TEXTURE_2D, texture2);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1920, 1080, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture2, 0);
glBindTexture(GL_TEXTURE_2D, 0);
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("Problem with OpenGL framebuffer2 : %x\n", status);
}
glClear(GL_COLOR_BUFFER_BIT | GL_STENCIL_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // This is needed
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
RenderTexture(" & texture 2 -- ");
glDisable(GL_BLEND);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, texture);
render_fboTexture();
glBindTexture(GL_TEXTURE_2D, texture2);
render_fboTexture();
glBindTexture(GL_TEXTURE_2D, 0);
eglSwapBuffers(egldisplay, eglsurface);
Full code can be seen at - Font rendering onto off-screen FBO not working. (Note: fonts are loaded by creating textures using Freetype lib with glTexImage2D)

Why is glCheckFramebufferStatus always 36054 for GL_DEPTH_COMPONENT on OpenGL ES 3.1+?

I render the color (GL_COLOR_ATTACHMENT0) and depth (GL_DEPTH_ATTACHMENT) of my scene into a FBO, which works fine on PC with OpenGL 4+.
But on my Smartphone with OpenGL ES 3.1+ I always get for the depth via glCheckFramebufferStatus(GL_FRAMEBUFFER) the status = 36054.
glGenTextures(1, &m_textCol);
glBindTexture(GL_TEXTURE_2D, m_textCol);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, wth, hgt, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
glGenTextures(1, &m_textDepth);
glBindTexture(GL_TEXTURE_2D, m_textDepth);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, wth, hgt, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
GLuint framebuffer;
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_textCol, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); always 36053 -> ok
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, m_textDepth, 0);
status = glCheckFramebufferStatus(GL_FRAMEBUFFER); // always 36054 -> not ok
I tried it also with
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, wth, hgt, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, NULL);
but no change.
Has maybe someone an idea what I'm doing wrong? Thanks.
Status 36054 is GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT.
If we look at Section 9.4 of the OpenGL ES3.1 specification, it lists the following valid internalFormat values for depth attachments:
DEPTH_COMPONENT16
DEPTH_COMPONENT24
DEPTH_COMPONENT32F
DEPTH24_STENCIL8
DEPTH32F_STENCIL8
Note that the last two also contain a stencil part. You can use these for the depth component and ignore the stencil attachment if you do not need it. Especially DEPTH24_STENCIL8 has a good chance of being widely supported.

What might be the issue with my pipeline to rendering the depth buffer to a texture?

The main steps for depth testing from my understanding:
1) enable depth testing and how we want to depth test
2) create the frame buffer object and make sure it has a depth attached to it
3) bind our frame buffer object ( make sure to clear it before rendering )
4) draw stuff
And that should be it no? our frame buffer depth attachment should have depth data? But I always get straight 1's default depth clear color
step 1:
glEnable(GL_DEPTH_TEST);
glDepthFunc( GL_LEQUAL );
step 2:
//create the frame buffer object
glGenFramebuffers(1, &m_uifboHandle);
// Initialize FBO
glBindFramebuffer(GL_FRAMEBUFFER, m_uifboHandle);
//create 2 texture handles 1 for diffuse, 1 for depth
unsigned int m_uiTextureHandle[2];
glGenTextures( 2, m_uiTextureHandle );
//create the diffuse texture
glBindTexture( GL_TEXTURE_2D, m_uiTextureHandle[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, uiWidth, uiHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_uiTextureHandle[0], 0);
.
//create the depth buffer
glBindTexture(GL_TEXTURE_2D, m_uiTextureHandle[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP );
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, uiWidth, uiHeight, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, m_uiTextureHandle[1], 0);
//go back to default binding
glBindFramebuffer(GL_FRAMEBUFFER, 0);
step 3:
//bind the frame buffer object
glBindFramebuffer( GL_FRAMEBUFFER, m_uifboHandle );
//clear it
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
step 4:
//draw things
Are these not the steps?
Am i missing something?
I've tried a few different tutorials.
I can't get any depth to render to a texture
I keep getting straight 1's over and over.
The framebuffer probably is not complete. Try checking for completeness. Moreover your code was:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, uiWidth, uiHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
However, it should be (watch the RGB-RGBA):
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, uiWidth, uiHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);

Why does framebuffer freeze in Derelict3?

I am using Derelict3 which uses the specific openGL3 standard and I am having an issue with the screen freezing (not updating) if glEnable(GL_DEPTH_TEST) is called, and the depth buffers not working if it is not. I have noticed that calls to set the clear color and enable depth testing are ignored if they are made prior to setting up the SDL_GL context and reloading Derelict3. I had to make them in the following order:
win=SDL_CreateWindow("3Doodle", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, flags);
if(!win){
writefln("Error creating SDL window");
SDL_Quit();
}
context=SDL_GL_CreateContext(win);
SDL_GL_SetSwapInterval(1);
glVersion=DerelictGL3.reload();
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glClearColor(0.0, 0.0, 0.0, 1.0);
glClearDepth(1.0);
glCullFace(GL_BACK);
glFrontFace(GL_CCW);
While this sorted out depth culling while writing to the standard double buffer, when I implement a framebuffer object with a renderBuffer as the new depth buffer I get a freeze... the program compiles and runs without warnings, but the rendered image to the screen aligned quad remains fixed. If I comment out glEnable(GL_DEPTH_TEST); the camera moves freely but the the rendered image pays no mind to depth testing and simply draws objects in order.
The framebuffer initialization code is:
//fbo
glEnable (GL_FRAMEBUFFER_SRGB);
glGenFramebuffers(1, &fbo);
assert(fbo > 0);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glGenRenderbuffers(1, &rbo);
assert(rbo > 0);
glBindRenderbuffer(GL_RENDERBUFFER, rbo);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, projMat.w, projMat.h);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
glGenTextures(1, &fbon);
assert(fbon > 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, fbon);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, projMat.w, projMat.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, null);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fbon, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glGenTextures(1, &fboc);
assert(fboc > 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, fboc);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, projMat.w, projMat.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, null);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT1, GL_TEXTURE_2D, fboc, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glGenTextures(1, &fbop);
assert(fbop > 0);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, fbop);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, projMat.w, projMat.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, null);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT2, GL_TEXTURE_2D, fbop, 0);
glBindTexture(GL_TEXTURE_2D, 0);
int status=glCheckFramebufferStatus(GL_FRAMEBUFFER);
assert(status == GL_FRAMEBUFFER_COMPLETE);
buffs=[GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1, GL_COLOR_ATTACHMENT2];
glDrawBuffers(3, buffs.ptr);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
The code calling the fbo in draw() is:
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shad);
glUniformMatrix4fv(viewLoc, 1, GL_TRUE, player.mat.ptr);
current.draw();
glUseProgram(0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
rend.draw();
I am at a loss as the same codes is working in c# as it ought.
My stupidity. Forgot to specify glDepthFunc

OpenGL: Render to FBO using multiple textures

I'm experimenting with a renderer. What I want is to write a color buffer and a normal buffer to two separate textures. I got that part figured out.
However, the color buffer is supposed to be a combination of two textures. This should do the trick:
glActiveTexture(GL_TEXTURE0_ARB);
glEnable(GL_TEXTURE_2D);
g_Tex->Bind();
glActiveTexture(GL_TEXTURE1_ARB);
glEnable(GL_TEXTURE_2D);
g_TexNormal->Bind();
g_Shader->Enable();
RenderScene();
g_Shader->Disable();
glActiveTexture(GL_TEXTURE1_ARB);
glDisable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0_ARB);
glDisable(GL_TEXTURE_2D);
And this is the fragment shader: (GLSL)
uniform sampler2D tex_diffuse;
uniform sampler2D tex_normal;
void main()
{
gl_FragColor = texture2D(tex_diffuse, gl_TexCoord[0].st);
//gl_FragColor = texture2D(tex_normal, gl_TexCoord[0].st);
}
However, the second texture is the same as the first! Whichever texture is attached to GL_TEXTURE0 is the one used for both samplers.
Initializing the FBO:
glGenFramebuffersEXT(1, &g_FBOColor);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, g_FBOColor);
glGenTextures(1, &g_FBOTexColor);
glBindTexture(GL_TEXTURE_2D, g_FBOTexColor);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, g_FBOTexColor, 0);
glGenTextures(1, &g_FBOTexNormal);
glBindTexture(GL_TEXTURE_2D, g_FBOTexNormal);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT1_EXT, GL_TEXTURE_2D, g_FBOTexNormal, 0);
glGenTextures(1, &g_FBOTexDepth);
glBindTexture(GL_TEXTURE_2D, g_FBOTexDepth);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT24, w, h, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D, g_FBOTexDepth, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
Complete render section:
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, g_FBOColor);
glPushAttrib(GL_VIEWPORT_BIT | GL_COLOR_BUFFER_BIT);
glViewport(
0, 0,
Window::GetSingleton()->GetWidth(), Window::GetSingleton()->GetHeight()
);
GLenum buffers[] = { GL_COLOR_ATTACHMENT0_EXT, GL_COLOR_ATTACHMENT1_EXT };
glDrawBuffers(2, buffers);
//glReadBuffer(GL_COLOR_ATTACHMENT0_EXT | GL_COLOR_ATTACHMENT1_EXT);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glActiveTexture(GL_TEXTURE0_ARB);
glEnable(GL_TEXTURE_2D);
g_Tex->Bind();
glActiveTexture(GL_TEXTURE1_ARB);
glEnable(GL_TEXTURE_2D);
g_TexNormal->Bind();
g_Shader->Enable();
RenderScene();
g_Shader->Disable();
glActiveTexture(GL_TEXTURE1_ARB);
glDisable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0_ARB);
glDisable(GL_TEXTURE_2D);
glPopAttrib();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
Thanks in advance.
Well I figured it out. :)
The reason my textures didn't work was because I didn't set up the uniform locations. Fixed code:
g_Shader->Enable();
glActiveTexture(GL_TEXTURE0_ARB);
glEnable(GL_TEXTURE_2D);
g_Tex->Bind();
glUniform1i(glGetUniformLocation(g_Shader->GetProgram(), "tex_diffuse"), 0);
glActiveTexture(GL_TEXTURE1_ARB);
glEnable(GL_TEXTURE_2D);
g_TexNormal->Bind();
glUniform1i(glGetUniformLocation(g_Shader->GetProgram(), "tex_normal"), 1);
RenderScene();
glActiveTexture(GL_TEXTURE1_ARB);
glDisable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0_ARB);
glDisable(GL_TEXTURE_2D);
g_Shader->Disable();