Draw texture from QGLFramebufferObject on full screen quad using custom shader - c++

Using QT 4.7 and QGLWidget, I want to use QGLWidget::paintGL() to render a scene into a QGLFramebufferObject and then - using the texture generated by the QGLFramebufferObject - onto the screen. For the second step I render a full-screen quad with an orthographic projection and use an own shader to render the texture onto it.
Rendering into the QGLFramebufferObject seems to work fine (at least I can call QGLFramebufferObject::toImage().save(filename) and I get the correctly rendered image), but I can't get the rendered texture to be drawn onto the screen.
Here the code I use to render into the framebuffer object:
//Draw into framebufferobject
_fbo->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
_camera->applyModelviewMatrix();
_scene->glDraw();
glFlush();
_fbo->release();
_fbo->toImage().save("image.jpg");
As said, the image saved here contains the correctly rendered image.
Here the code I try to render the framebuffer object onto the screen. I'm using an own shader to render it.
//Draw framebufferobject to a full-screen quad on the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0,1.0,0.0,1.0,-1.0,1.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
_selectionShader->bind();
glBindTexture(GL_TEXTURE_2D, _fbo->texture());
_selectionShader->setUniformValue("renderedTexture", _fbo->texture());
glBegin(GL_QUADS);
glTexCoord2f(0.0,0.0);
glVertex3f(0.0f,0.0f,0.0f);
glTexCoord2f(1.0,0.0);
glVertex3f(1.0f,0.0f,0.0f);
glTexCoord2f(1.0,1.0);
glVertex3f(1.0f,1.0f,0.0f);
glTexCoord2f(0.0,1.0);
glVertex3f(0.0f,1.0f,0.0f);
glEnd();
glDisable(GL_TEXTURE_2D);
glFlush();
}
The vertex shader simply passes through the position it uses as texture coordinates
varying vec2 position;
void main()
{
gl_Position = ftransform();
position = gl_Vertex.xy;
}
And the fragment shader draws the texture
varying vec2 position;
uniform sampler2D renderedTexture;
void main()
{
gl_FragColor = texture2D(renderedTexture, position);
}
The projection I'm doing is correct, for when I exchange the fragment shader with the following one, it draws the expected color gradient:
varying vec2 position;
uniform sampler2D renderedTexture;
void main()
{
gl_FragColor = vec4(position.x, 0.0f, position.y, 1.0f);
}
But using the other fragment shader that should render the texture, I only get a blank screen (that was made blank by glClear() in the beginning of the rendering). So the fragment shader seems to draw either black or nothing.
Am I missing anything? Am I passing the texture correctly to the shader? Do I have to do anything else to prepare the texture?

_selectionShader->setUniformValue("renderedTexture", _fbo->texture());
This is the (or a) wrong part. A sampler uniform in a shader is not set to the texture object, but to the texture unit that this object is bound as texture to (which you already did with glBindTexture(GL_TEXTURE_2D, _fbo->texture()). So since you seem to use GL_TEXTURE0 all the time, you just have to set it to texture unit 0:
_selectionShader->setUniformValue("renderedTexture", 0);
By the way, no need to glEnable(GL_TEXTURE_2D), that isn't necessary when using shaders. And why use glTexCoord2f if you're just using the vertex position as texture coordinate anyway?

Related

Rendering Transparent Blocks in OpenGL

I'm rendering 3D terrain with cubes and I need to render water transparently.
My solution for this, after some research, was to separate the water cubes into one buffer and the solid blocks into the other buffer, then in my fragment shader, I'm using:
uniform float transparency;
in my rendering code:
shader.setFloat("transparency", 1.0f);
glBindVertexArray(solidVAO);
glDrawArrays(GL_TRIANGLES, 0, solidPoints.size());
shader.setFloat("transparency", 0.3f);
glBindVertexArray(transparentVAO);
glDrawArrays(GL_TRIANGLES, 0, transparentPoints.size());
and then in my Fragment Shader
out vec4 FragColor;
in vec2 TexCoord;
// texture sampler
uniform sampler2D atlas;
uniform float transparency;
void main()
{
FragColor = texture(atlas, TexCoord);
FragColor.w = transparency;
}
However, the water is still solid, is there a better approach to drawing transparent shapes in front of solids? Or, what is going wrong here?
You need to set blending mode for the transparent shape as well:
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

Render from fbo texture to another within same fbo

I'm trying to set up deferred rendering, and have successfully managed to output data to the varying gbuffer textures (position, normal, albedo, specular).
I am now attempting to sample from the albedo texture to a 5th colour attachment in the same fbo (for the purposes of further potential post process sampling), by rendering a full screen quad with simple texture coordinates.
I have checked that the vertex/texcoord data is good via Nsight, and confirmed that the shader can "see" the texture to sample from, but all that I see in the target texture is the clear colour when I examine it in the Nsight debugger.
At the moment, the shader is basically just a simple pass through shader:
vertex shader:
#version 430
in vec3 MSVertex;
in vec2 MSTexCoord;
out xferBlock
{
vec3 VSVertex;
vec2 VSTexCoord;
} outdata;
void main()
{
outdata.VSVertex = MSVertex;
outdata.VSTexCoord = MSTexCoord;
gl_Position = vec4(MSVertex,1.0);
}
fragment shader:
#version 430
layout (location = 0) uniform sampler2D colourMap;
layout (location = 0) out vec4 colour;
in xferBlock
{
vec3 VSVertex;
vec2 VSTexCoord;
} indata;
void main()
{
colour = texture(colourMap, indata.VSTexCoord).rgba;
}
As you can see, there is nothing fancy about the shader.
The gl code is as follows:
//bind frame buffer for writing to texture #5
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glDrawBuffer(GL_COLOR_ATTACHMENT4); //5 textures total
glClear(GL_COLOR_BUFFER_BIT);
//activate shader
glUseProgram(second_stage_program);
//bind texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, fbo_buffers[2]); //third attachment: albedo
//bind and draw fs quad (two array buffers: vertices, and texture coordinates)
glBindVertexArray(quad_vao);
glDrawArrays(GL_TRIANGLES,0,6);
I'm trying to work out what is preventing rendering to the texture. I'm using a core context with OpenGL v4.3.
I've tried outputting a single white colour for all fragments, using the texture coordinates to generate a colour colour = vec4(indata.VSTexCoord, 1.0, 1.0); and sampling the texture itself, as you see in the shader code, but nothing changes the resultant texture, which just shows the clear colour.
What am I doing wrong?

Does the draw order affects objects position in depth? (images included)

I have a few objects on the scene and even if I specify that the object A have y= 10 (the highest object), from the TOP camera I can see the bottom objects through the object A. Here is an image from my scene.
And only today I found an interesting property that draw order of models matters, I may be wrong. Here is another image where I change the draw order of "ship1", attention: "ship1" is way bellow my scene, if I do ship1.draw(); first, the ship disappears (correct), but if I do ship1.draw(); in last, he appears on top (incorrect).
Video: Opengl Depth Problem video
Q1) Does the draw order always matter?
Q2) How do I fix this, should I change draw order every time I change camera position?
Edit: I also compared my class of Perspective Projection with the glm library, just to be sure that it´s not the problem with my projection Matrix. Everything is correct.
Edit1: I have my project on git: Arkanoid git repository (windows, project is ready to run at any computer with VS installed)
Edit2: I don´t use normals or texture. Just vertices and indices.
Edit3: Is there a problem, if every object on the scene uses(share) vertices from the same file ?
Edit4: I also changed my Perspective Projection values. I had near plane at 0.0f, now I have near=20.0f and far=500.0f, angle=60º. But nothing changes, view does but the depth not. =/
Edit5: Here is my Vertex and Fragment shaders.
Edit6: contact me any time, I am here all day, so ask me anything. At the moment I am rewriting all project from zero. I have two cubes which renders well, one in front of another. Already added mine class for: camera, projections, handler for shaders. Moving to Class which creates and draws objects.
// Vertex shader
in vec4 in_Position;
out vec4 color;
uniform mat4 Model;
uniform mat4 View;
uniform mat4 Projection;
void main(void)
{
color = in_Position;
gl_Position = Projection * View * Model * in_Position;
}
// Fragment shader
#version 330 core
in vec4 color;
out vec4 out_Color;
void main(void)
{
out_Color = color;
}
Some code:
void setupOpenGL() {
std::cerr << "CONTEXT: OpenGL v" << glGetString(GL_VERSION) << std::endl;
glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glDepthMask(GL_TRUE);
glDepthRange(0.0, 1.0);
glClearDepth(1.0);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CCW);
}
void display()
{
++FrameCount;
glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
renderScene();
glutSwapBuffers();
}
void renderScene()
{
wallNorth.draw(shader);
obstacle1.draw(shader);
wallEast.draw(shader);
wallWest.draw(shader);
ship1.draw(shader);
plane.draw(shader);
}
I have cloned the repository you have linked to see if the issue was located somewhere else. In your most recent version the Object3D::draw function looks like this:
glBindVertexArray(this->vaoID);
glUseProgram(shader.getProgramID());
glUniformMatrix4fv(this->currentshader.getUniformID_Model(), 1, GL_TRUE, this->currentMatrix.getMatrix()); // PPmat é matriz identidade
glDrawElements(GL_TRIANGLES, 40, GL_UNSIGNED_INT, (GLvoid*)0);
glBindVertexArray(0);
glUseProgram(0);
glClear( GL_DEPTH_BUFFER_BIT); <<< clears the current depth buffer.
The last line clears the depth buffer after each object that is drawn, meaning that the next object drawn is not occluded properly. You should only clear the depth buffer once every frame.

OpenGL how to render background

Hi I am doing an assignment and can't figure out how to render a background.
I've drawn the triangles and every thing renders to the screen ok but it always becomes the foreground and blocks everything else from view.
Here is my code for rendering the back ground.
void render(){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(bgShaderID);
glBindVertexArray(bgArrayID);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
// draw everything else
glutSwapBuffers();
glFlush();
}
In my vertex shader I have the following:
in vec3 a_vertex;
in vec3 a_colour;
out vec3 fragmentColor;
void main(){
gl_Position = vec4(a_vertex.xy, 0.0 ,1);
fragmentColor = a_colour;
}
It seems like you have the GL_DEPTH_TEST enabled. I don't know what projection matrix and z values you use for drawing your foreground objects, but
gl_Position = vec4(a_vertex.xy, 0.0 ,1);
is setting clip space z of the background to 0. Assuming a perspecitve projection, this is redicolously close to the front plane. Assuming some prthographic projection, this is still in the middle of the depth range.
You could of course try to set z=1.0 to set it to the far plane in the shader. However, since you draw the background first, you might be better off just disabling the GL_DEPTH_TEST (or disabling depth writes via glDepthMask(GL_FALSE)) temporarily during drawing of your backgorund.

GLSL and FBOs - glActiveTexture doesn't work?

I'm trying to write a simple shader which would add textures attached to FBOs. There is no problem with FBO initialization and such (I've tested it). The problem is I believe with
glActiveTexture(GL_TEXTURE0). It doesn't seem to be doing anything- here is my frag shader:
(but generally shader is called - I've tested that by putting gl_FragColor = vec4(0,1,0,1);
uniform sampler2D Texture0;
uniform sampler2D Texture1;
varying vec2 vTexCoord;
void main()
{
vec4 texel0 = texture2D(Texture0, gl_TexCoord[0].st);
vec4 vec = texel0;
gl_FragColor = texel0;
}
And in C++ code i have:
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, iFrameBufferAccumulation);
glDrawBuffer(GL_COLOR_ATTACHMENT0_EXT);
( Render something - it works fine to iTextureImgAccumulation texture attached to GL_COLOR_ATTACHMENT0_EXT )
glClear (GL_COLOR_BUFFER_BIT );
glEnable(GL_TEXTURE_RECTANGLE_NV);
glActiveTexture(GL_TEXTURE0);
glBindTexture( GL_TEXTURE_RECTANGLE_NV, iTextureImgAccumulation ); // Bind our frame buffer texture
xShader.setUniform1i("Texture0", 0);
glLoadIdentity(); // Load the Identity Matrix to reset our drawing locations
glTranslatef(0.0f, 0.0f, -2.0f);
xShader.bind();
glBegin(GL_QUADS);
glTexCoord2f(0,OPT.m_nHeight);
glVertex3f(-1,-1,0);
glTexCoord2f(OPT.m_nWidth,OPT.m_nHeight);
glVertex3f(1,-1,0);
glTexCoord2f(OPT.m_nWidth,0);
glVertex3f(1,1,0);
glTexCoord2f(0,0);
glVertex3f(-1,1,0);
glEnd();
glBindTexture( GL_TEXTURE_RECTANGLE_NV, NULL );
xShader.unbind();
Result: black screen (when displaying second texture and using shader (without using shader its fine). I'm aware that this shader shouldn't do much but he doesn't even display
the first texture.
I'm in the middle of testing things but idea is that after rendering to first texture
I would add first texture to the second one. To do this I imagine that this fragment shader
would work :
uniform sampler2D Texture0;
uniform sampler2D Texture1;
varying vec2 vTexCoord;
void main()
{
vec4 texel0 = texture2D(Texture0, gl_TexCoord[0].st);
vec4 texel1 = texture2D(Texture1, gl_TexCoord[0].st);
vec4 vec = texel0 + texel1;
vec.w = 1.0;
gl_FragColor = vec;
}
And whole idea is that in a loop tex2 = tex2 + tex1 ( would it be possible that i use tex2 in this shader to render to GL_COLOR_ATTACHMENT1_EXT which is attached to tex2 ?)
I've tested both xShader.bind(); before initializing uniform variables and after. Both cases - black screen.
Anyway for a moment, I'm pretty sure that there is some problem with initialization of sampler for textures (maybe cos they are attached to FBO)?
I've checked the rest and it works fine.
Also another stupid problem:
How can i render texture on whole screen ?
I've tried something like that but it doesn't work ( i have to translate a bit this quad )
glViewport(0,0 , OPT.m_nWidth, OPT.m_nHeight);
glBindTexture( GL_TEXTURE_RECTANGLE_NV, iTextureImg/*iTextureImgAccumulation*/ ); // Bind our frame buffer texture
glBegin(GL_QUADS);
glTexCoord2f(0,OPT.m_nHeight);
glVertex3f(-1,-1,0);
glTexCoord2f(OPT.m_nWidth,OPT.m_nHeight);
glVertex3f(1,-1,0);
glTexCoord2f(OPT.m_nWidth,0);
glVertex3f(1,1,0);
glTexCoord2f(0,0);
glVertex3f(-1,1,0);
glEnd();
Doesnt work with glVertex2f also..
Edit: I've checked out and I can initialise some uniform variables only textures are problematic.
I've changed order but it still doesn't work.:( Btw other uniforms values are working well. I've displayed texture I want to pass to shader too. It works fine. But for unknown reason texture sampler isn't initialized in fragment shader. Maybe it has something to do that this texture is glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, GL_RGB16F /GL_FLOAT_R32_NV/, OPT.m_nWidth, OPT.m_nHeight, 0, GL_RED, GL_FLOAT, NULL); (its not GL_TEXTURE_2D)?
It's not clear what does your xShader.bind(), I can gues you do glUseProgram(...) there. But uniform variables (sampler index in your case) should be set up after the glUseProgram(...) is called. In this order:
glUseProgram(your_shaders); //probably your xShader.bind() does it.
GLuint sampler_idx = 0;
GLint location = glGetUniformLocation(your_shaders, "Texture0");
if(location != -1) glUniform1i(location, sampler_idx);
else error("cant, get uniform location");
glActiveTexture(GL_TEXTURE0 + sampler_idx);
glBindTexture(GL_TEXTURE_2D, iTextureImg);
and 'yes' you can render FBO texture and use it in shader in another context
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, your_fbo_id);
// render to FBO there
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
then use your FBO texture the same way as you use regular textures.
glActiveTexture(GL_TEXTURE0);
glBindTexture( GL_TEXTURE_RECTANGLE_NV, iTextureImgAccumulation ); // Bind our frame buffer texture
xShader.setUniform1i("Texture0", 0);
This is a rectangle texture.
uniform sampler2D Texture0;
This is a 2D texture. They are not the same thing. The sampler type must match the texture type. You need to use a samplerRect, assuming your version of GLSL supports that.