Getting OpenGL to render texture to screen - c++

I am having trouble getting an image texture to render to the screen. Nothing is displayed other than a blank black screen, and there are no obvious errors. I believe the issue has something to do with the glOrtho call, but i'm not sure.
All relevant code is included below. (OnResize gets called after initialisation and before first Render call)
OpenGL2.1 compatibility required, with a view to add shaders later. The texture stuff being hardcoded to the renderer is just temporary, I already have a texture class ready to go, once I work out what's wrong.
I think I might need a FrameBuffer object somewhere? The tutorials I've found often differ drastically and are usually targeting OGL3
Video::Video() : running(true)
{
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
std::string err = "Error initialising video: ";
err += SDL_GetError();
throw std::runtime_error(err);
}
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL, 1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
int flags = SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_MAXIMIZED;
this->window = SDL_CreateWindow("lolpenGL", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 800, 600, flags);
if (this->window == nullptr) {
std::string err = "Error creating SDL window: ";
err += SDL_GetError();
throw std::runtime_error(err);
}
this->glcontext = SDL_GL_CreateContext(this->window);
if (this->glcontext == nullptr) {
std::string err = "Error creating GL context: ";
err += SDL_GetError();
throw std::runtime_error(err);
}
GLenum glew_err = glewInit();
if (glew_err != GLEW_OK) {
std::string err = "Error initialising GLEW: ";
err += (char *)glewGetErrorString(glew_err);
throw std::runtime_error(err);
}
if (!GLEW_VERSION_2_1) {
throw std::runtime_error("OpenGL 2.1 not available");
}
int max_tex_size;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_tex_size);
if (max_tex_size < 1024) {
throw std::runtime_error("Maximum supported texture size too small");
}
int max_tex_units;
glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, &max_tex_units);
if (max_tex_units < 2) {
throw std::runtime_error("GPU does not have enough texture unnits");
}
SDL_GL_SetSwapInterval(1); // vsync
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glDisable(GL_DEPTH_TEST); // what's drawn last is displayed on top.
// Temp texture load
glGenTextures(1, &this->texid);
glBindTexture(GL_TEXTURE_2D, this->texid);
// Set our texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
// Set texture filtering
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Load, create texture and generate mipmaps
int width, height;
unsigned char *image = SOIL_load_image("awesomeface.png", &width, &height, 0, SOIL_LOAD_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
SOIL_free_image_data(image);
glBindTexture(GL_TEXTURE_2D, 0);
glEnable(GL_TEXTURE_2D);
}
void Video::OnResize(int w, int h)
{
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glViewport(0, 0, w, h);
glOrtho(0, w, 0, h, 9001, -1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void Video::Render()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix();
{
glColor4f(1, 1, 1, 1);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, this->texid);
// do stuff
}
glPopMatrix();
SDL_GL_SwapWindow(this->window);
}
EDIT: Ok, i'm an idiot, I wasn't actually drawing anything. I found that adding the following works, where the // do stuff is.
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex3f(0, 0, 0);
glTexCoord2f(0, 1); glVertex3f(0, 100, 0);
glTexCoord2f(1, 1); glVertex3f(100, 100, 0);
glTexCoord2f(1, 0); glVertex3f(100, 0, 0);
glEnd();
Is this the best way to do this?

Related

SDL2 + OpenGL + SDL2_TTF: Displaying text

I'm having trouble getting a TTF font to draw in OpenGL plus SDL2.0.
I remember that before SDL version 2 I had no problems, but there seems to be a lack of documentation on the subject, perhaps due to the new standard.
I have included the code below to show generally what I am doing.
This code is very inefficient as I recreate the Texture every frame, take this into consideration if copy+pasting :)
void main_render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//Set our color back to white (Full texture color)
glColor3f(1.0f, 1.0f, 1.0f);
mainCamera->perspective();
mainCamera->translate();
//Load the font
TTF_Font *font = TTF_OpenFont("../resource/font.ttf", 10);
if (font == nullptr) {
std::cout << "TTF_OpenFont error: " << std::endl;
return;
}
//Render font to a SDL_Surface
SDL_Color color = {0,0,255,80};
SDL_Surface *surface = TTF_RenderText_Blended(font, "Hi!", color);
if (surface == nullptr) {
TTF_CloseFont(font);
std::cout << "TTF_RenderText error: " << std::endl;
return;
}
//Create a SDL_Texture * from the surface
SDL_Texture * text = SDL_CreateTextureFromSurface(fontRender, surface);
if (text == nullptr){
std::cout << "SDL_CreateTextureFromSurface error: " << std::endl;
return;
}
//Bind the SDL_Texture in OpenGL
SDL_GL_BindTexture(text, NULL, NULL);
//Draw the SDL_Texture * as a Quad
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS); {
glTexCoord2d(0, 0); glVertex3f(0, 0, 0);
glTexCoord2d(1, 0); glVertex3f(0 + surface->w, 0, 0);
glTexCoord2d(1, 1); glVertex3f(0 + surface->w, 0 + surface->h, 0);
glTexCoord2d(0, 1); glVertex3f(0, 0 + surface->h, 0);
} glEnd();
glDisable(GL_TEXTURE_2D);
//Cleanup
TTF_CloseFont(font);
SDL_DestroyTexture(text);
SDL_FreeSurface(surface);
//Swap the buffers to refresh the window
mainWindow->swap_buffers();
}
So OpenGL requires that all textures have dimensions of Base2 on my system (2,4,16,32,64...)
I fixed this problem by doing an incremental search for the closest power of two to the original dimension:
unsigned int power_two_floor(unsigned int val) {
unsigned int power = 2, nextVal = power*2;
while((nextVal *= 2) <= val)
power*=2;
return power*2;
}
I then ran into a snag: The texture was the correct color, yet the pixels were scrambled. This was fixed by copying the image to a RGB SDL_Surface, using the adjusted OpenGL dimensions.
//Find the first power of two for OpenGL image
int w = power_two_floor(surface->w)*2;
int h = power_two_floor(surface->h)*2;
//Create a surface to the correct size in RGB format, and copy the old image
SDL_Surface * s = SDL_CreateRGBSurface(0, w, h, 32, 0x00ff0000,0x0000ff00,0x000000ff,0xff000000);
SDL_BlitSurface(surface, NULL, s, NULL);
Adding filter information was required to correct OpenGL from utilizing mipmaps:
//Avoid mipmap filtering
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
Here is my answer:
unsigned int power_two_floor(unsigned int val) {
unsigned int power = 2, nextVal = power*2;
while((nextVal *= 2) <= val)
power*=2;
return power*2;
}
void main_render() {
//Load the font
TTF_Font *font = TTF_OpenFont("../resource/font.ttf", 10);
if (font == nullptr) {
std::cout << "TTF_OpenFont error: " << std::endl;
return;
}
SDL_Color colorFg = {0,0,255};
SDL_Surface *surface;
//Render font to a SDL_Surface
if ((surface = TTF_RenderText_Blended(font, "Hi!", colorFg)) == nullptr) {
TTF_CloseFont(font);
std::cout << "TTF_RenderText error: " << std::endl;
return;
}
GLuint texId;
//Generate OpenGL texture
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &texId);
glBindTexture(GL_TEXTURE_2D, texId);
//Avoid mipmap filtering
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
//Find the first power of two for OpenGL image
int w = power_two_floor(surface->w)*2;
int h = power_two_floor(surface->h)*2;
//Create a surface to the correct size in RGB format, and copy the old image
SDL_Surface * s = SDL_CreateRGBSurface(0, w, h, 32, 0x00ff0000,0x0000ff00,0x000000ff,0xff000000);
SDL_BlitSurface(surface, NULL, s, NULL);
//Copy the created image into OpenGL format
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, s->pixels);
//Draw the OpenGL texture as a Quad
glBegin(GL_QUADS); {
glTexCoord2d(0, 1); glVertex3f(0, 0, 0);
glTexCoord2d(1, 1); glVertex3f(0 + surface->w, 0, 0);
glTexCoord2d(1, 0); glVertex3f(0 + surface->w, 0 + surface->h, 0);
glTexCoord2d(0, 0); glVertex3f(0, 0 + surface->h, 0);
} glEnd();
glDisable(GL_TEXTURE_2D);
//Cleanup
TTF_CloseFont(font);
SDL_FreeSurface(s);
SDL_FreeSurface(surface);
glDeleteTextures(1, &texId);
}

Deferred shader textures from FBO display as black

I am trying to use deferred shading to implement SSAO and I have problems to access my textures in the deferred fragment shader. The code is in C++/Qt5 and makes use of Coin3D to generate the rest of the UI (but this shouldn't really matter here).
The fragment shader of the deferred pass is:
#version 150 compatibility
uniform sampler2D color;
uniform sampler2D position;
uniform sampler2D normal;
uniform vec3 dim;
uniform vec3 camPos;
uniform vec3 camDir;
void main()
{
// screen position
vec2 t = gl_TexCoord[0].st;
// the color
vec4 c = texture2D(color, t);
gl_FragColor = c + vec4(1.0, t.x, t.y, 1.0);
}
The code for running the deferred pass is
_geometryBuffer.Unbind();
// push state
{
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glPushAttrib(GL_DEPTH_BUFFER_BIT |
GL_COLOR_BUFFER_BIT |
GL_LIGHTING_BIT |
GL_SCISSOR_BIT |
GL_POLYGON_BIT |
GL_CURRENT_BIT);
glDisable(GL_DEPTH_TEST);
glDisable(GL_ALPHA_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_COLOR_MATERIAL);
glDisable(GL_SCISSOR_TEST);
glDisable(GL_CULL_FACE);
}
// bind shader
// /!\ IMPORTANT to do before specifying locations
_deferredShader->bind();
_CheckGLErrors("deferred");
// specify positions
_deferredShader->setUniformValue("camPos", ...);
_deferredShader->setUniformValue("camDir", ...);
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_NORMAL, 2);
_deferredShader->setUniformValue("normal", GLint(2));
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_POSITION, 1);
_deferredShader->setUniformValue("position", GLint(1));
_geometryBuffer.Bind(GBuffer::TEXTURE_TYPE_DIFFUSE, 0);
_deferredShader->setUniformValue("color", GLint(0));
_CheckGLErrors("bind");
// draw screen quad
{
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glColor3f(0, 0, 0);
glVertex2f(-1, -1);
glTexCoord2f(1, 0);
glColor3f(0, 0, 0);
glVertex2f( 1, -1);
glTexCoord2f(1, 1);
glColor3f(0, 0, 0);
glVertex2f( 1, 1);
glTexCoord2f(0, 1);
glColor3f(0, 0, 0);
glVertex2f(-1, 1);
glEnd();
}
_deferredShader->release();
// for debug
_geometryBuffer.Unbind(2);
_geometryBuffer.Unbind(1);
_geometryBuffer.Unbind(0);
_geometryBuffer.DeferredPassBegin();
_geometryBuffer.DeferredPassDebug();
// pop state
{
glPopAttrib();
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
}
I know that the textures have been correctly processed in the geometry buffer creation because I can dump them into files and get the expected result.
The deferred pass doesn't work. The shader compiled correctly and I get the following result on screen:
And the last part of my code (DeferredPassBegin/Debug) is to draw the FBO to the screen (as shown in screenshot) as a proof that the GBuffer is correct.
The current result seems to mean that the textures are not correctly bound to their respective uniform, but I know that the content is valid as I dumped the textures to files and got the same results as shown above.
My binding functions in GBuffer are:
void GBuffer::Unbind()
{
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
}
void GBuffer::Bind(TextureType type, uint32_t idx)
{
glActiveTexture(GL_TEXTURE0 + idx);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _textures[static_cast<uint32_t>(type)]);
}
void GBuffer::Unbind(uint32_t idx)
{
glActiveTexture(GL_TEXTURE0 + idx);
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
}
Finally, the textures are 512/512, and I created them in my GBuffer with:
WindowWidth = WindowHeight = 512;
// Create the FBO
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
const uint32_t NUM = static_cast<uint32_t>(NUM_TEXTURES);
// Create the gbuffer textures
glGenTextures(NUM, _textures);
glGenTextures(1, &_depthTexture);
for (unsigned int i = 0 ; i < NUM; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, WindowWidth, WindowHeight, 0, GL_RGBA, GL_FLOAT, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0 + i + _firstIndex, GL_TEXTURE_2D, _textures[i], 0);
}
// depth
glBindTexture(GL_TEXTURE_2D, _depthTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT32F, WindowWidth, WindowHeight, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, _depthTexture, 0);
GLenum buffers[NUM];
for(uint32_t i = 0; i < NUM; ++i){
buffers[i] = GLenum(GL_COLOR_ATTACHMENT0 + i + _firstIndex);
}
glDrawBuffers(NUM, buffers);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("FB error, status: 0x%x\n", status);
return _valid = false;
}
// unbind textures
glBindTexture(GL_TEXTURE_2D, 0);
// restore default FBO
glBindFramebuffer(GL_FRAMEBUFFER, 0);
How can I debug farther at this stage?
I know that the texture data is valid, but I can't seem to bind it to the shader correctly (but I have other shaders that use textures loaded from files and which work fine).
--- Edit 1 ---
As asked, the code for DeferredPassBegin/Debug (mostly coming from this tutorial )
void GBuffer::DeferredPassBegin() {
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindFramebuffer(GL_READ_FRAMEBUFFER, _fbo);
}
void GBuffer::DeferredPassDebug() {
GLsizei HalfWidth = GLsizei(_texWidth / 2.0f);
GLsizei HalfHeight = GLsizei(_texHeight / 2.0f);
SetReadBuffer(TEXTURE_TYPE_POSITION);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
0, 0, HalfWidth, HalfHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
SetReadBuffer(TEXTURE_TYPE_DIFFUSE);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
0, HalfHeight, HalfWidth, _texHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
SetReadBuffer(TEXTURE_TYPE_NORMAL);
glBlitFramebuffer(0, 0, _texWidth, _texHeight,
HalfWidth, HalfHeight, _texWidth, _texHeight, GL_COLOR_BUFFER_BIT, GL_LINEAR);
}
Arghk!!!
So I expected that texture parameters were not mandatory, but as I looked at some code, I just tried to specify my texture parameters. When generating the FBO textures, I use now
for (unsigned int i = 0 ; i < NUM; i++) {
glBindTexture(GL_TEXTURE_2D, _textures[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, WindowWidth, WindowHeight, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0 + i + _firstIndex, GL_TEXTURE_2D, _textures[i], 0);
}
And with this change, I get the expected result (with only c in the fragment shader, and similar correct results if I switch to visualizing the normal / position).
Conclusion: one must specify the texture parameters for deferred shading to work (at least with the graphics setup of my application / machine).

Render TTF SDL2.0 opengl 3.1

I'm working with SDL2.0, and using a (semi modern) opengl (3.1). I'm looking to add a text overlay to my application, and to render TTF in the application. How would I go about this using modern OpenGL?
EDIT:
As per the suggestion of genpfault, I've tried using the SDL_TTF library, but All I'm getting is garbage on screen http://i.stack.imgur.com/FqyCT.png
I've attached a gist of my shaders, which are very simple for this program, and also the snipped I'm using to load the text into surface, and to bind it to the texture. I'm not trying to do anything crazy here at all. Is there anything I'm doing wrong you can see? I'm not really too sure how to debug shaders etc.
https://gist.github.com/anonymous/7284430
I spent too much time with a black screen before I figured out the actually text data was in the alpha channel.
GLuint shader_program_text;
void drawText()
{
//Render the message to an SDL_Surface, as that's what TTF_RenderText_X returns
text_font = TTF_OpenFont("bpl_binary/waltographUI.ttf", 50);
SDL_Color color = {255, 0, 0, 0};
SDL_Surface* sdl_surface = TTF_RenderText_Blended(text_font, "hello world", color);
GLuint texture_id;
glGenTextures(1, &texture_id);
glBindTexture(GL_TEXTURE_2D, texture_id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sdl_surface->w, sdl_surface->h, 0, GL_BGRA, GL_UNSIGNED_BYTE, sdl_surface->pixels);
glBindTexture(GL_TEXTURE_2D, 0);
SDL_FreeSurface(sdl_surface);
TTF_CloseFont(text_font);
//glDrawPixels(sdl_surface->w, sdl_surface->h, GL_RGBA, GL_UNSIGNED_BYTE, sdl_surface->pixels);
//FILE* file = fopen("output.txt", "w+");
//for(int h=0; h<sdl_surface->h; h++)
//{
//
// for(int w=0; w<sdl_surface->w; w++)
// {
// unsigned int xxx = ((unsigned int*)sdl_surface->pixels)[h*sdl_surface->w + w];
// /*if(xxx != 0)
// fprintf(file, "x", xxx);
// else
// fprintf(file, " ", xxx);*/
// fprintf(file, "%08x ", xxx);
// }
// fprintf(file, "\n");
//}
//fclose(file);
//MULTIPLY WITH ALPHA TO ACTUALLY SEE SOMETHING
glUseProgram(shader_program_text);
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture_id);
glBegin(GL_TRIANGLE_STRIP);
//texcoord; position;
glVertexAttrib2f(1, 0, 1); glVertexAttrib2f(0, -1, -1); //top left
glVertexAttrib2f(1, 1, 1); glVertexAttrib2f(0, +1, -1); //top right
glVertexAttrib2f(1, 0, 0); glVertexAttrib2f(0, -1, +1); //bottom left
glVertexAttrib2f(1, 1, 0); glVertexAttrib2f(0, +1, +1); //bottom right
glEnd();
glDisable(GL_TEXTURE_2D);
glUseProgram(0);
}
Use the pixels member of the SDL_Surface returned by a TTF_Render*() call to populate a texture.

C++ - SDL/OpenGL loaded image's colors are inverted

I'm trying to load an image into my game. It's being written in C++, with SDL and OpenGL, and the SDL_Image framework.
I've gotten the image in, and have rotated/inverted it to my needs, but there are two main problems.
Firstly, the image's colors are completely inverted. (ie blue -> red)
I have the format to either 'GL_RGB' or 'GL_RGBA,' depending on the bytes per pixel. I've tried with PNGs, JPEGs, and BMPs, but their all the same. I'm lost!
--Thanks to datenwolf for correcting my silly mistakes!
Secondly, the image draws once, and that's it. It doesn't get redrawn on the other buffer, it just gets displayed once and then is gone as soon as the next frame is drawn.
GLuint loadTexture()
{
SDL_Surface* image = IMG_Load( "/Users/<My name>/Pictures/pacman board.jpeg" );
SDL_DisplayFormatAlpha(image);
unsigned object(0);
glGenTextures(1, &object);
glBindTexture(GL_TEXTURE_2D, object);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
int Mode = NULL;
if(image->format->BytesPerPixel == 3) {Mode = GL_RGB;}
else if(image->format->BytesPerPixel == 4) {Mode = GL_RGBA;}
glTexImage2D(GL_TEXTURE_2D, 0, Mode, image->w, image->h, 0, Mode, GL_UNSIGNED_BYTE, image->pixels);
SDL_FreeSurface(image);
return object;
}
int main(int argc, char * argv[])
{
SDL_Init(SDL_INIT_EVERYTHING);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_WM_SetCaption("HI", NULL);
SDL_SetVideoMode(600, 600, 32, SDL_OPENGL);
glClearColor(0,0,0,1);
glViewport(0, 0, 600, 600);
glShadeModel(GL_SMOOTH);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
unsigned int pad_texture = 0;
pad_texture = loadTexture();
//---MAIN LOOP---\\
bool isRunning = true;
SDL_Event event;
while (isRunning)
{
while (SDL_PollEvent(&event))
{
if (event.type == SDL_QUIT) { isRunning = false; }
if (event.type == SDL_KEYUP && event.key.keysym.sym == SDLK_ESCAPE) {isRunning = false;}
}
glClear(GL_COLOR_BUFFER_BIT);
gluOrtho2D(0, 600, 0, 600); // For some reason, I prefer having '0,0' at the bottom left.
glPushMatrix();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,pad_texture);
glBegin(GL_QUADS);
glTexCoord2d(0, 1); glVertex2f(0, 0);
glTexCoord2d(1, 1); glVertex2f(600, 0);
glTexCoord2d(1, 0); glVertex2f(600, 600);
glTexCoord2d(0, 0); glVertex2f(0, 600);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix(); //Stop drawing
SDL_GL_SwapBuffers();
}
SDL_Quit();
return 0;
}
Most image file formats have the pixel data in BGR or BGRA order. If you load such image data using a RGB ordering the blue and red channel will be swapped, of course.
OpenGL does offer those pixel formats as well, since version 1.4. You'll have to either use a extension wrapper like GLEW or fetch and install a current glext.h to get access to the new format tokens. Have a look at http://www.opengl.org/sdk/docs/man/xhtml/glTexImage2D.xml for the complete list of supported formats in OpenGL-3 core and later. For older OpenGL, namely OpenGL-1.4 to OpenGL-2.1 look at http://www.opengl.org/sdk/docs/man2/xhtml/glTexImage2D.xml
Update
That your image shows only once is due to the following:
while (isRunning)
{
/*...*/
glClear(GL_COLOR_BUFFER_BIT);
gluOrtho2D(0, 600, 0, 600); // For some reason, I prefer having '0,0' at the bottom left.
gluOrtho2D multiplies on top whats already on the matrix. Which is what gluOrtho2D did in the previous iteration. Also you left the matrix mode in a "dangling" state. As a general rule you should always freshly setup the whole OpenGL drawing state (viewport, clear color, clear depth, all the matrices) at the beginning of each drawing iteration. In your case
while (isRunning)
{
/*...*/
glViewport(0, 0, win_width, win_height);
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, win_width, 0, win_height, -1, 1); // gluOrtho2D is the most useless wrapper ever..., just put a -1, 1 as additional parameters to glOrtho
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glShadeModel(GL_SMOOTH);

Opengl texturing (with mimap or not) from a FBO

I must be missing something obvious in using FBO :
I call TMyForm::Init() once at the start of my application :
class TMyForm
{ ...
private:
Gluint mTextureId, mFboId;
int mWidth, mHeight;
}
void TMyForm::Init()
{
mWidth = 1920;
mHeight = 1080;
...
// create a texture object
glEnable(GL_TEXTURE_2D);
glClearColor ( 0.0, 0.0, 0.0, 1.0 );
glGenTextures(1, &mTextureId);
glBindTexture(GL_TEXTURE_2D, mTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1920, 1080, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
// create a framebuffer object
glGenFramebuffersEXT(1, &mFboId);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mFboId);
// attach the texture to FBO color attachment point
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, mTextureId, 0);
// switch back to window-system-provided framebuffer
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
And in the Draw() function I send the buffer to a card (the rendered scene is ok) and to a preview window (all black or all white depending if I switch back to default window context or the fbo context) :
void TMyForm::Draw()
{
// set rendering destination to FBO
glEnable(GL_TEXTURE_2D);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mFboId);
//---
glViewport(-1920, -1080, 1920 * 2, 1080 * 2);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(double(-iWidth)*viewport_ratio, double(iWidth)*viewport_ratio, double(-iHeight), double(iHeight), 1000.0, 100000.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Render();
glFlush();
//--- 1 : send to card
delete[] mBufferPlayout;
mBufferPlayout = NULL;
try
{
mBufferPlayout = new GLubyte [mWidth * mHeight * 4];
glReadBuffer(GL_COLOR_ATTACHMENT0_EXT);
glReadPixels(0,0, mWidth, mHeight, GL_BGRA_EXT, GL_UNSIGNED_BYTE, mBufferPlayout);
DisplayFrame(mBufferPlayout);
}
catch (TSCDbException &e) { ShowMessage(GetLastError()); }
//--- 2 : Texturing to the preview window :
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, mTextureId);
//glGenerateMipmapEXT(GL_TEXTURE_2D);
int iWidthPreview = mWidth / 2; // ie 960
int iHeightPreview = mHeight / 2; // ie 540
glViewport(-iWidthPreview, -iHeightPreview, iWidthPreview * 2, iHeightPreview * 2);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(double(-iWidthPreview), double(iWidthPreview), double(-iHeightPreview), double(iHeightPreview), 1000.0, 100000.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0.0, 0.0, -50000.0);
mxIncrust0 = 0.0; myIncrust0 = 0.0;
mxIncrust1 = iWidthPreview; myIncrust1 = iHeightPreview;
glBegin(GL_QUADS);
glColor4ub(255,255,255,255);
glTexCoord2d(0.0, 0.0); glVertex2d(mxIncrust0, myIncrust0);
glTexCoord2d(0.0, 1.0); glVertex2d(mxIncrust0, myIncrust1);
glTexCoord2d(1.0,1.0); glVertex2d(mxIncrust1, myIncrust1);
glTexCoord2d(1.0,0.0); glVertex2d(mxIncrust1, myIncrust0);
glEnd();
glDisable(GL_TEXTURE_2D);
glFlush();
//---
SwapBuffers(ghDC);
}
So my problem is the "part 2" which does not do what I wish : texturing the fbo content to the current window.
I tried changing the "glTexCoord2d" with no success.