How to multisample FBOS - opengl

Notice: I am using LWJGL.
Here is my code for creating a new FBO:
/**
* Creates a new FBO.
* #param width The width of the FBO to create.
* #param height The height of the FBO to create.
* #return an int[] array containing the buffer IDs in the
* following order: {frameBufferID, colorBufferID (texture), depthBufferID}.
*/
public static int[] newFBO(int width, int height) {
int[] out = new int[3];
out[0] = glGenFramebuffersEXT();
out[1] = glGenTextures();
out[2] = glGenRenderbuffersEXT();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, out[0]);
glBindTexture(GL_TEXTURE_2D, out[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, org.lwjgl.opengl.GL12.GL_TEXTURE_MAX_LEVEL,20);
glTexParameteri(GL_TEXTURE_2D, GL14.GL_GENERATE_MIPMAP,GL_TRUE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0,GL_RGBA, GL_INT, (java.nio.ByteBuffer) null);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_TEXTURE_2D, out[1], 0);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, out[2]);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL14.GL_DEPTH_COMPONENT24, width, height);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT,GL_DEPTH_ATTACHMENT_EXT,GL_RENDERBUFFER_EXT, out[2]);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
return out;
}
Here is how I draw the FBO to the screen:
public static void rectOnScreen(int tex) {
glBindTexture(GL_TEXTURE_2D, tex);
glDisable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2f(-1, -1);
glTexCoord2f(0, 1);
glVertex2f(-1, 1);
glTexCoord2f(1, 1);
glVertex2f(1, 1);
glTexCoord2f(1, 0);
glVertex2f(1, -1);
glEnd();
glDisable(GL_TEXTURE_2D);
glEnable(GL_DEPTH_TEST);
}
Basically, I use out[2] as the argument for that function.
Now, how do I apply multisampling here? I don't really like the jaggedish look of the results I'm getting. I want to take multiple samples of the FBO when I draw it. I would be very happy to get the code written out, but I guess a link to a tutorial or something is fine too.

Related

Render fonts with freetype

I made opengl rendering engine, and i can render shapes, 3d shapesand textures. Then i wanted to render fonts. I used freetype library to do this. But i have one serious problem with it. When i render font it looks terrible. First i will explain how i initialized freetype, and loaded font, then how i render textures, so you will be able to see itf there are any mistakes.
I initialize freetype and load font like this:
FT_Library library;
FT_Face face;
FT_GlyphSlot slot;
FT_Init_FreeType(&library);
FT_New_Face(library, "arial.ttf", 0, &face);
FT_Set_Char_Size(face, 0, 20 * 64, 300, 300);
slot = face->glyph;
FT_Load_Char(face, 'a', FT_LOAD_RENDER);
Then i load it into my texture:
this->tex.LoadFromBuffer(slot->bitmap.buffer, slot->bitmap.width, slot->bitmap.rows);
The tex object is just texture container which looks like this:
class Texture
{
public:
void LoadFromBuffer(unsigned char* buf, int w, int h);
float sizex, sizey;
GLuint texName;
};
And load from buffer function looks like this:
void Texture::LoadFromBuffer(unsigned char* buf, int w, int h)
{
this->sizex = w;
this->sizey = h;
glGenTextures(1, &this->texName);
glBindTexture(GL_TEXTURE_2D, this->texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
glBindTexture(GL_TEXTURE_2D, 0);
stbi_image_free(buf);
}
Then texture is created and i render it just normaly with opengl on squad:
void Renderer::Render(Quad& quadv)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, quadv.texture->texName);
glBegin(GL_QUADS);
glColor3f(quadv.color.r, quadv.color.g, quadv.color.b);
glTexCoord2f(0.0, 0.0); glVertex2f(quadv.vertices[0].x, quadv.vertices[0].y);
glTexCoord2f(1.0, 0.0); glVertex2f(quadv.vertices[1].x, quadv.vertices[1].y);
glTexCoord2f(1.0, 1.0); glVertex2f(quadv.vertices[2].x, quadv.vertices[2].y);
glTexCoord2f(0.0, 1.0); glVertex2f(quadv.vertices[3].x, quadv.vertices[3].y);
glEnd();
glDisable(GL_TEXTURE_2D);
}
Quadv is object which contains vertices of quad, but its not important here.
Texture in quadv is the texture with letter a, that i loaded before.
Code looks just normal to me, but when i run it, the texture looks like this:
A letter is triple no matter what size texture is or size of quad.
And whole image is terrible.
I don't recall what is the default PixelMode when using FT.
But it looks to me like you are assuming that the bitmap provided by FT comes as a 32-bit RGBA color, which is probably not the case.
I'm gonna assume it comes as an 8-bit color. Try changing:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
to
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RED, GL_UNSIGNED_BYTE, buf);
We replaced GL_RGB which was the input format to GL_RED such that the tex image function expects 1 component per pixel. FT2 Documentation for further information
The bitmap contains 1 channel per pixel, encoded in a single byte. Since you're using legacy OpenGL, I recommend using the internal texture format GL_ALPHA. Therefore each element of the texture is treated as a single alpha component.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_RED, GL_UNSIGNED_BYTE, buf);
Enable Blending to show only the opaque part of the glyphs:
void Renderer::Render(Quad& quadv)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, quadv.texture->texName);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBegin(GL_QUADS);
glColor3f(quadv.color.r, quadv.color.g, quadv.color.b);
glTexCoord2f(0.0, 0.0); glVertex2f(quadv.vertices[0].x, quadv.vertices[0].y);
glTexCoord2f(1.0, 0.0); glVertex2f(quadv.vertices[1].x, quadv.vertices[1].y);
glTexCoord2f(1.0, 1.0); glVertex2f(quadv.vertices[2].x, quadv.vertices[2].y);
glTexCoord2f(0.0, 1.0); glVertex2f(quadv.vertices[3].x, quadv.vertices[3].y);
glEnd();
glDisable(GL_TEXTURE_2D);
}

Deferred shader textures from FBO display as black

I am trying to use deferred shading to implement SSAO and I have problems to access my textures in the deferred fragment shader. The code is in C++/Qt5 and makes use of Coin3D to generate the rest of the UI (but this shouldn't really matter here).
The fragment shader of the deferred pass is:
#version 150 compatibility
uniform sampler2D color;
uniform sampler2D position;
uniform sampler2D normal;
uniform vec3 dim;
uniform vec3 camPos;
uniform vec3 camDir;
void main()
{
// screen position
vec2 t = gl_TexCoord[0].st;
// the color
vec4 c = texture2D(color, t);
gl_FragColor = c + vec4(1.0, t.x, t.y, 1.0);
}
The code for running the deferred pass is
_geometryBuffer.Unbind();
// push state
{
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glPushAttrib(GL_DEPTH_BUFFER_BIT |
GL_COLOR_BUFFER_BIT |
GL_LIGHTING_BIT |
GL_SCISSOR_BIT |
GL_POLYGON_BIT |
GL_CURRENT_BIT);
glDisable(GL_DEPTH_TEST);
glDisable(GL_ALPHA_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_COLOR_MATERIAL);
glDisable(GL_SCISSOR_TEST);
glDisable(GL_CULL_FACE);
}
// bind shader
// /!\ IMPORTANT to do before specifying locations
_deferredShader->bind();
_CheckGLErrors("deferred");
// specify positions
_deferredShader->setUniformValue("camPos", ...);
_deferredShader->setUniformValue("camDir", ...);
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_NORMAL, 2);
_deferredShader->setUniformValue("normal", GLint(2));
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_POSITION, 1);
_deferredShader->setUniformValue("position", GLint(1));
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_DIFFUSE, 0);
_deferredShader->setUniformValue("color", GLint(0));
_CheckGLErrors("bind");
// draw screen quad
{
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glColor3f(0, 0, 0);
glVertex2f(-1, -1);
glTexCoord2f(1, 0);
glColor3f(0, 0, 0);
glVertex2f( 1, -1);
glTexCoord2f(1, 1);
glColor3f(0, 0, 0);
glVertex2f( 1, 1);
glTexCoord2f(0, 1);
glColor3f(0, 0, 0);
glVertex2f(-1, 1);
glEnd();
}
_deferredShader->release();
// for debug
_geometryBuffer.Unbind(2);
_geometryBuffer.Unbind(1);
_geometryBuffer.Unbind(0);
_geometryBuffer.DeferredPassBegin();
_geometryBuffer.DeferredPassDebug();
// pop state
{
glPopAttrib();
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}
I know that the textures have been correctly processed in the geometry buffer creation because I can dump them into files and get the expected result.
The deferred pass doesn't work. The shader compiled correctly and I get the following result on screen:
And the last part of my code (DeferredPassBegin/Debug) is to draw the FBO to the screen (as shown in screenshot) as a proof that the GBuffer is correct.
The current result seems to mean that the textures are not correctly bound to their respective uniform, but I know that the content is valid as I dumped the textures to files and got the same results as shown above.
My binding functions in GBuffer are:
void GBuffer::Unbind()
{
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
}
void GBuffer::Bind(TextureType type, uint32_t idx)
{
glActiveTexture(GL_TEXTURE0 + idx);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _textures[static_cast<uint32_t>(type)]);
}
void GBuffer::Unbind(uint32_t idx)
{
glActiveTexture(GL_TEXTURE0 + idx);
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
}
Finally, the textures are 512/512, and I created them in my GBuffer with:
WindowWidth = WindowHeight = 512;
// Create the FBO
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
const uint32_t NUM = static_cast<uint32_t>(NUM_TEXTURES);
// Create the gbuffer textures
glGenTextures(NUM, _textures);
glGenTextures(1, &_depthTexture);
for (unsigned int i = 0 ; i < NUM; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, WindowWidth, WindowHeight, 0, GL_RGBA, GL_FLOAT, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0 + i + _firstIndex, GL_TEXTURE_2D, _textures[i], 0);
}
// depth
glBindTexture(GL_TEXTURE_2D, _depthTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT32F, WindowWidth, WindowHeight, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, _depthTexture, 0);
GLenum buffers[NUM];
for(uint32_t i = 0; i < NUM; ++i){
buffers[i] = GLenum(GL_COLOR_ATTACHMENT0 + i + _firstIndex);
}
glDrawBuffers(NUM, buffers);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("FB error, status: 0x%x\n", status);
return _valid = false;
}
// unbind textures
glBindTexture(GL_TEXTURE_2D, 0);
// restore default FBO
glBindFramebuffer(GL_FRAMEBUFFER, 0);
How can I debug farther at this stage?
I know that the texture data is valid, but I can't seem to bind it to the shader correctly (but I have other shaders that use textures loaded from files and which work fine).
--- Edit 1 ---
As asked, the code for DeferredPassBegin/Debug (mostly coming from this tutorial )
void GBuffer::DeferredPassBegin() {
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, _fbo);
}
void GBuffer::DeferredPassDebug() {
GLsizei HalfWidth = GLsizei(_texWidth / 2.0f);
GLsizei HalfHeight = GLsizei(_texHeight / 2.0f);
SetReadBuffer(TEXTURE_TYPE_POSITION);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
0, 0, HalfWidth, HalfHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
SetReadBuffer(TEXTURE_TYPE_DIFFUSE);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
0, HalfHeight, HalfWidth, _texHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
SetReadBuffer(TEXTURE_TYPE_NORMAL);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
HalfWidth, HalfHeight, _texWidth, _texHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
}
Arghk!!!
So I expected that texture parameters were not mandatory, but as I looked at some code, I just tried to specify my texture parameters. When generating the FBO textures, I use now
for (unsigned int i = 0 ; i < NUM; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, WindowWidth, WindowHeight, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0 + i + _firstIndex, GL_TEXTURE_2D, _textures[i], 0);
}
And with this change, I get the expected result (with only c in the fragment shader, and similar correct results if I switch to visualizing the normal / position).
Conclusion: one must specify the texture parameters for deferred shading to work (at least with the graphics setup of my application / machine).

SDL2 with OpenGL texture displaying incorrectly

I have searched for this but unfortunately the only answers I find are those that deal with the issue of textures not being mapped, in this case the texture is mapped but does not look like the loaded surface.
Anyway, ignoring the fact that power of 2 square images aren't required (which I've yet to properly look into yet) I've made a functional script for expanding the surface to power of 2 dimensions and a structure for storing the resized surface, the old dimensions of the surface and the ID for the texture.
My surface to texture code is (arguments are SDL_Surface* surf, antTex* tex):
tex->w=surf->w;
tex->h=surf->h;
tex->surf=resizeToNearestPow2(surf);
glGenTextures(1, &tex->id);
glBindTexture(GL_TEXTURE_2D, tex->id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tex->surf->w, tex->surf->h, 0, GL_BGRA, GL_UNSIGNED_BYTE, tex->surf->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, NULL);
The antTex struct is:
GLuint id; //Texture ID
int w, h; //Original Image Size
int rw, rh; //Resized Canvas Size
SDL_Surface* surf; //Resized Surface
Everything about resizing the image appears to be working fine (having used SDL's SDL_SaveBMP function to check the resulting image), I used SDL2_image.h's IMG_Load for loading the original image.
When I draw a plane with the texture mapped to it while GL_BLEND is enabled nothing shows up at all, when I disable it I get this (the red is the window's background colour):
http://i1200.photobucket.com/albums/bb336/DarknessXeagle/error_zps03e1e5a1.png
The original image was this:
http://i1200.photobucket.com/albums/bb336/DarknessXeagle/d_zpsc8b9af6f.png
This is my function for drawing plane with the texture mapped to it (arguments are antTex* tex, int x, int y):
glBindTexture(GL_TEXTURE_2D, tex->id);
int w, h;
w=tex->w;
h=tex->h;
GLfloat tw, th;
th=(GLfloat)tex->h/(GLfloat)tex->rh;
tw=(GLfloat)tex->w/(GLfloat)tex->rw;
glPushMatrix();
glTranslatef(x, y, 0);
glBegin(GL_QUADS);
glNormal3f(0, 0, 1);
glTexCoord2f(0, th); glVertex2f(0, h);
glTexCoord2f(tw, th); glVertex2f(w, h);
glTexCoord2f(tw, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);
glEnd();
glPopMatrix();
glBindTexture(GL_TEXTURE_2D, NULL);
You have a problem with u and v coordinates passed to render the texture into the quad using glTexCoord2f.
You need to change:
glTexCoord2f(0, th); glVertex2f(0, h);
glTexCoord2f(tw, th); glVertex2f(w, h);
glTexCoord2f(tw, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);
into:
glTexCoord2f(0, 1); glVertex2f(0, h);
glTexCoord2f(1, 1); glVertex2f(w, h);
glTexCoord2f(1, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);

Problems rendering to a texture using FBO in OS X 10.7 but not Linux

I have a test program that programmatically renders a texture and then renders a simple quad that is skinned with the previously drawn texture. The program is extremely simple and everything is done in 2D. This program works great under linux and everything looks like it should:
However when I run the program on my mac running 10.7 nothing shows up:
I'm at a complete loss as to why this program isn't working on my mac.
Here are the relevant bits of the program.
Creating the texture:
void createTextureObject(){
cout<<"Creating a new texture of size:"<<textureSize<<endl;
//glDeleteTextures(1, &textureId);
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap generation included in OpenGL v1.4
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, textureSize, textureSize, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
isTextureValid = true;
createFBObject();
}
Creating the Frame Buffer Object:
void createFBObject(){
cout<<"Creating a new frame buffer object"<<endl;
glDeleteFramebuffers(1, &fboId);
glGenFramebuffersEXT(1, &fboId);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
glGenRenderbuffersEXT(1, &rboId);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, rboId);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, textureSize, textureSize);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, textureId, 0);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, rboId);
bool status = checkFramebufferStatus();
if(!status){
cout<<"FrameBufferObject not created! Quitting!"<<endl;
exit(1);
fboUsed = false;
}
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
Rendering to the texture:
void drawToTexture(){
if (!isTextureValid)
createTextureObject();
glViewport(0, 0, textureSize, textureSize);
glLoadIdentity();
// set the rendering destination to FBO
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
// clear buffer
glClearColor(0, 0, 0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// draw to the texture
glColor3f(0.0, 1.0, 0.0);
for (int i=1; i<=5; i++){
glBegin(GL_LINE_LOOP);
glVertex2f(-1 * i/5.0f, -1 * i/5.0f);
glVertex2f( 1 * i/5.0f, -1 * i/5.0f);
glVertex2f( 1 * i/5.0f, 1 * i/5.0f);
glVertex2f(-1 * i/5.0f, 1 * i/5.0f);
glEnd();
}
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
Rendering the textured quad:
void drawTexturedQuad(){
glViewport(50, 50, textureSize, textureSize);
glLoadIdentity();
glClearColor(0.2, 0.2, 0.2, 0.2);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, textureId);
int size = 1;
int texS = 1;
glBegin(GL_QUADS);
glColor4f(1, 1, 1, 1);
glTexCoord2f(texS, texS); glVertex3f(size, size,0);
glTexCoord2f(0, texS); glVertex3f(-1 * size , size,0);
glTexCoord2f(0, 0); glVertex3f(-1 * size, -1 * size,0);
glTexCoord2f(texS, 0); glVertex3f(size, -1 * size,0);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
}
I'm using GLUT and my display call back is simply:
void displayCB(){
drawToTexture();
drawTexturedQuad();
glutSwapBuffers();
}
Additionally I have other methods check to see if FrameBufferObjects are supported by OpenGL and if they are not the program quits. Additionally I have other logic that sets textureSize whenever the window is resized.
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap generation included in OpenGL v1.4
This might be a problem. The GL specification doesn't say exactly at what point mipmaps should be generated, and of course, this generates problems with render to texture. So the GL_EXT_framebuffer_object (and core versions too) introduced glGenerateMipmapEXT, which you call exactly in the point where you want mipmaps generated (usually after rendering to the texture).
So, remove the GL_GENERATE_MIPMAP stuff and use glGenerateMipmap manually when needed. You can read more about this problem in the GL_EXT_framebuffer_object specification.
Are you sure that your buffer id is 0 and not 1?
Maybe another FBO was already created before.
Also check texture dimensions try make them power of 2

Opengl texturing (with mimap or not) from a FBO

I must be missing something obvious in using FBO :
I call TMyForm::Init() once at the start of my application :
class TMyForm
{ ...
private:
Gluint mTextureId, mFboId;
int mWidth, mHeight;
}
void TMyForm::Init()
{
mWidth = 1920;
mHeight = 1080;
...
// create a texture object
glEnable(GL_TEXTURE_2D);
glClearColor ( 0.0, 0.0, 0.0, 1.0 );
glGenTextures(1, &mTextureId);
glBindTexture(GL_TEXTURE_2D, mTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1920, 1080, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
// create a framebuffer object
glGenFramebuffersEXT(1, &mFboId);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mFboId);
// attach the texture to FBO color attachment point
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, mTextureId, 0);
// switch back to window-system-provided framebuffer
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
And in the Draw() function I send the buffer to a card (the rendered scene is ok) and to a preview window (all black or all white depending if I switch back to default window context or the fbo context) :
void TMyForm::Draw()
{
// set rendering destination to FBO
glEnable(GL_TEXTURE_2D);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mFboId);
//---
glViewport(-1920, -1080, 1920 * 2, 1080 * 2);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(double(-iWidth)*viewport_ratio, double(iWidth)*viewport_ratio, double(-iHeight), double(iHeight), 1000.0, 100000.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Render();
glFlush();
//--- 1 : send to card
delete[] mBufferPlayout;
mBufferPlayout = NULL;
try
{
mBufferPlayout = new GLubyte [mWidth * mHeight * 4];
glReadBuffer(GL_COLOR_ATTACHMENT0_EXT);
glReadPixels(0,0, mWidth, mHeight, GL_BGRA_EXT, GL_UNSIGNED_BYTE, mBufferPlayout);
DisplayFrame(mBufferPlayout);
}
catch (TSCDbException &e) { ShowMessage(GetLastError()); }
//--- 2 : Texturing to the preview window :
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, mTextureId);
//glGenerateMipmapEXT(GL_TEXTURE_2D);
int iWidthPreview = mWidth / 2; // ie 960
int iHeightPreview = mHeight / 2; // ie 540
glViewport(-iWidthPreview, -iHeightPreview, iWidthPreview * 2, iHeightPreview * 2);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(double(-iWidthPreview), double(iWidthPreview), double(-iHeightPreview), double(iHeightPreview), 1000.0, 100000.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0, 0.0, -50000.0);
mxIncrust0 = 0.0; myIncrust0 = 0.0;
mxIncrust1 = iWidthPreview; myIncrust1 = iHeightPreview;
glBegin(GL_QUADS);
glColor4ub(255,255,255,255);
glTexCoord2d(0.0, 0.0); glVertex2d(mxIncrust0, myIncrust0);
glTexCoord2d(0.0, 1.0); glVertex2d(mxIncrust0, myIncrust1);
glTexCoord2d(1.0,1.0); glVertex2d(mxIncrust1, myIncrust1);
glTexCoord2d(1.0,0.0); glVertex2d(mxIncrust1, myIncrust0);
glEnd();
glDisable(GL_TEXTURE_2D);
glFlush();
//---
SwapBuffers(ghDC);
}
So my problem is the "part 2" which does not do what I wish : texturing the fbo content to the current window.
I tried changing the "glTexCoord2d" with no success.