SDL2 with OpenGL texture displaying incorrectly - c++

I have searched for this but unfortunately the only answers I find are those that deal with the issue of textures not being mapped, in this case the texture is mapped but does not look like the loaded surface.
Anyway, ignoring the fact that power of 2 square images aren't required (which I've yet to properly look into yet) I've made a functional script for expanding the surface to power of 2 dimensions and a structure for storing the resized surface, the old dimensions of the surface and the ID for the texture.
My surface to texture code is (arguments are SDL_Surface* surf, antTex* tex):
tex->w=surf->w;
tex->h=surf->h;
tex->surf=resizeToNearestPow2(surf);
glGenTextures(1, &tex->id);
glBindTexture(GL_TEXTURE_2D, tex->id);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tex->surf->w, tex->surf->h, 0, GL_BGRA, GL_UNSIGNED_BYTE, tex->surf->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, NULL);
The antTex struct is:
GLuint id; //Texture ID
int w, h; //Original Image Size
int rw, rh; //Resized Canvas Size
SDL_Surface* surf; //Resized Surface
Everything about resizing the image appears to be working fine (having used SDL's SDL_SaveBMP function to check the resulting image), I used SDL2_image.h's IMG_Load for loading the original image.
When I draw a plane with the texture mapped to it while GL_BLEND is enabled nothing shows up at all, when I disable it I get this (the red is the window's background colour):
http://i1200.photobucket.com/albums/bb336/DarknessXeagle/error_zps03e1e5a1.png
The original image was this:
http://i1200.photobucket.com/albums/bb336/DarknessXeagle/d_zpsc8b9af6f.png
This is my function for drawing plane with the texture mapped to it (arguments are antTex* tex, int x, int y):
glBindTexture(GL_TEXTURE_2D, tex->id);
int w, h;
w=tex->w;
h=tex->h;
GLfloat tw, th;
th=(GLfloat)tex->h/(GLfloat)tex->rh;
tw=(GLfloat)tex->w/(GLfloat)tex->rw;
glPushMatrix();
glTranslatef(x, y, 0);
glBegin(GL_QUADS);
glNormal3f(0, 0, 1);
glTexCoord2f(0, th); glVertex2f(0, h);
glTexCoord2f(tw, th); glVertex2f(w, h);
glTexCoord2f(tw, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);
glEnd();
glPopMatrix();
glBindTexture(GL_TEXTURE_2D, NULL);

You have a problem with u and v coordinates passed to render the texture into the quad using glTexCoord2f.
You need to change:
glTexCoord2f(0, th); glVertex2f(0, h);
glTexCoord2f(tw, th); glVertex2f(w, h);
glTexCoord2f(tw, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);
into:
glTexCoord2f(0, 1); glVertex2f(0, h);
glTexCoord2f(1, 1); glVertex2f(w, h);
glTexCoord2f(1, 0); glVertex2f(w, 0);
glTexCoord2f(0, 0); glVertex2f(0, 0);

Related

Render fonts with freetype

I made opengl rendering engine, and i can render shapes, 3d shapesand textures. Then i wanted to render fonts. I used freetype library to do this. But i have one serious problem with it. When i render font it looks terrible. First i will explain how i initialized freetype, and loaded font, then how i render textures, so you will be able to see itf there are any mistakes.
I initialize freetype and load font like this:
FT_Library library;
FT_Face face;
FT_GlyphSlot slot;
FT_Init_FreeType(&library);
FT_New_Face(library, "arial.ttf", 0, &face);
FT_Set_Char_Size(face, 0, 20 * 64, 300, 300);
slot = face->glyph;
FT_Load_Char(face, 'a', FT_LOAD_RENDER);
Then i load it into my texture:
this->tex.LoadFromBuffer(slot->bitmap.buffer, slot->bitmap.width, slot->bitmap.rows);
The tex object is just texture container which looks like this:
class Texture
{
public:
void LoadFromBuffer(unsigned char* buf, int w, int h);
float sizex, sizey;
GLuint texName;
};
And load from buffer function looks like this:
void Texture::LoadFromBuffer(unsigned char* buf, int w, int h)
{
this->sizex = w;
this->sizey = h;
glGenTextures(1, &this->texName);
glBindTexture(GL_TEXTURE_2D, this->texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
glBindTexture(GL_TEXTURE_2D, 0);
stbi_image_free(buf);
}
Then texture is created and i render it just normaly with opengl on squad:
void Renderer::Render(Quad& quadv)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, quadv.texture->texName);
glBegin(GL_QUADS);
glColor3f(quadv.color.r, quadv.color.g, quadv.color.b);
glTexCoord2f(0.0, 0.0); glVertex2f(quadv.vertices[0].x, quadv.vertices[0].y);
glTexCoord2f(1.0, 0.0); glVertex2f(quadv.vertices[1].x, quadv.vertices[1].y);
glTexCoord2f(1.0, 1.0); glVertex2f(quadv.vertices[2].x, quadv.vertices[2].y);
glTexCoord2f(0.0, 1.0); glVertex2f(quadv.vertices[3].x, quadv.vertices[3].y);
glEnd();
glDisable(GL_TEXTURE_2D);
}
Quadv is object which contains vertices of quad, but its not important here.
Texture in quadv is the texture with letter a, that i loaded before.
Code looks just normal to me, but when i run it, the texture looks like this:
A letter is triple no matter what size texture is or size of quad.
And whole image is terrible.
I don't recall what is the default PixelMode when using FT.
But it looks to me like you are assuming that the bitmap provided by FT comes as a 32-bit RGBA color, which is probably not the case.
I'm gonna assume it comes as an 8-bit color. Try changing:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
to
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RED, GL_UNSIGNED_BYTE, buf);
We replaced GL_RGB which was the input format to GL_RED such that the tex image function expects 1 component per pixel. FT2 Documentation for further information
The bitmap contains 1 channel per pixel, encoded in a single byte. Since you're using legacy OpenGL, I recommend using the internal texture format GL_ALPHA. Therefore each element of the texture is treated as a single alpha component.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, buf);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_RED, GL_UNSIGNED_BYTE, buf);
Enable Blending to show only the opaque part of the glyphs:
void Renderer::Render(Quad& quadv)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glBindTexture(GL_TEXTURE_2D, quadv.texture->texName);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBegin(GL_QUADS);
glColor3f(quadv.color.r, quadv.color.g, quadv.color.b);
glTexCoord2f(0.0, 0.0); glVertex2f(quadv.vertices[0].x, quadv.vertices[0].y);
glTexCoord2f(1.0, 0.0); glVertex2f(quadv.vertices[1].x, quadv.vertices[1].y);
glTexCoord2f(1.0, 1.0); glVertex2f(quadv.vertices[2].x, quadv.vertices[2].y);
glTexCoord2f(0.0, 1.0); glVertex2f(quadv.vertices[3].x, quadv.vertices[3].y);
glEnd();
glDisable(GL_TEXTURE_2D);
}

LWJGL and openGL - giving tiles ID's

I have this question, i want to give each tile a seprate ID, but I don't know how and there doesn't seem to be any post about it either. So what i want to have is that I can edit each tile independently, without all tiles doing the same things. If you know the answer, please say what I can edit, thanks in advance.
-bryan
Code: (everything draws on the screen perfectly BTW)
public static void drawQuadTex(Texture tex, float x, float y, float quadWidth, float quadHeight){
tex.bind();
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTranslatef(x, y, 0);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2f(0, 0);
glTexCoord2f(1, 0);
glVertex2f(quadWidth, 0);
glTexCoord2f(1, 1);
glVertex2f(quadWidth, quadHeight);
glTexCoord2f(0, 1);
glVertex2f(0, quadHeight);
glEnd();
glLoadIdentity();
}
so how can I apply an ID to this?

Texture isn't mapped to quad

I'm using the following to draw text in OpenGL (using SDL)
void renderText(const TTF_Font *font, const SDL_Color color,
const double& x, const double& y, const double& z, const std::string& text) {
bool textured = glIsEnabled(GL_TEXTURE_2D);
glEnable(GL_TEXTURE_2D);
//test color: black
glColor3f(0,0,0);
SDL_Surface *Message = TTF_RenderText_Blended(const_cast<TTF_Font*>(font), text.c_str(), color);
GLuint Texture = 0;
//Generate an OpenGL 2D texture from the SDL_Surface
glGenTextures(1, &Texture);
glBindTexture(GL_TEXTURE_2D, Texture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, Message->w, Message->h, 0, GL_BGRA,
GL_UNSIGNED_BYTE, Message->pixels);
//Draw this texture on a quad with the given xyz coordinates.
glBegin(GL_QUADS);
glTexCoord2d(0, 0); glVertex3d(x, y, z);
glTexCoord2d(1, 0); glVertex3d(x+Message->w, y, z);
glTexCoord2d(1, 1); glVertex3d(x+Message->w, y+Message->h, z);
glTexCoord2d(0, 1); glVertex3d(x, y+Message->h, z);
glEnd();
//Clean up
glDeleteTextures(1, &Texture);
SDL_FreeSurface(Message);
if (!textured)
glDisable(GL_TEXTURE_2D);
}
However, the only thing shown is a rectangle at (x,y) (in this case (10,10)) with the background color set with glColor3f (black in this case). The texture created from the SDL_Surface isn't shown on the quad. The arguments passed to the function are all valid:
font: previously loaded TTF_Font != NULL
color: SDL_Color {255,255,255} (white)
x = 10
y = 10
z = 0
text = "test"
What's wrong here?
glOrtho(0.0, SCREEN_WIDTH, SCREEN_HEIGHT, 0.0, -1.0, 10.0);
OpenGL v2.2
SDL v1.2.15
Kubuntu Raring x64
Update: When using ..._Solid instead of ..._Blended, calling glColor3f(1,1,1) and passing SDL_Color {255,255,255} results in some strange stuff:
The quad appears where it should appear, and it shows strange content. What's wrong?
THe problem was that I didn't call glBlendFunc before calling above method. So OpenGL had a wrong blending mode set. After using glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);it looks fine.

How to multisample FBOS

Notice: I am using LWJGL.
Here is my code for creating a new FBO:
/**
* Creates a new FBO.
* #param width The width of the FBO to create.
* #param height The height of the FBO to create.
* #return an int[] array containing the buffer IDs in the
* following order: {frameBufferID, colorBufferID (texture), depthBufferID}.
*/
public static int[] newFBO(int width, int height) {
int[] out = new int[3];
out[0] = glGenFramebuffersEXT();
out[1] = glGenTextures();
out[2] = glGenRenderbuffersEXT();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, out[0]);
glBindTexture(GL_TEXTURE_2D, out[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, org.lwjgl.opengl.GL12.GL_TEXTURE_MAX_LEVEL,20);
glTexParameteri(GL_TEXTURE_2D, GL14.GL_GENERATE_MIPMAP,GL_TRUE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0,GL_RGBA, GL_INT, (java.nio.ByteBuffer) null);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_TEXTURE_2D, out[1], 0);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, out[2]);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL14.GL_DEPTH_COMPONENT24, width, height);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT,GL_DEPTH_ATTACHMENT_EXT,GL_RENDERBUFFER_EXT, out[2]);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
return out;
}
Here is how I draw the FBO to the screen:
public static void rectOnScreen(int tex) {
glBindTexture(GL_TEXTURE_2D, tex);
glDisable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2f(-1, -1);
glTexCoord2f(0, 1);
glVertex2f(-1, 1);
glTexCoord2f(1, 1);
glVertex2f(1, 1);
glTexCoord2f(1, 0);
glVertex2f(1, -1);
glEnd();
glDisable(GL_TEXTURE_2D);
glEnable(GL_DEPTH_TEST);
}
Basically, I use out[2] as the argument for that function.
Now, how do I apply multisampling here? I don't really like the jaggedish look of the results I'm getting. I want to take multiple samples of the FBO when I draw it. I would be very happy to get the code written out, but I guess a link to a tutorial or something is fine too.

OpenGL Framebuffer is very slow

I'm currently programming a 2D-sidescroll game with c++ and OpenGL. To get the window I'm using SFML, but that should'nt be relevant as I draw all my quads directly with OpenGL.
Now the problem is when I use a Framebufferobject to draw some light-textures on it and then use it as a mask over the scene, the framerate drops down. Normally the game reaches its limit at 60 fps, but with the fbo, its about 40 fps, sometimes even 30.
The effect looks nice thought :D
I have never used a framebuffer befor, and this:
Framebuffer FBO render to texture is very slow, using OpenGL ES 2.0 on Android, why?
doesn't seem to help.
So my question: What am I doing wrong? Or is there even a much simpler way to reach the same effect?
I'm running it on Ubuntu 12.04.
My code:
void init() {
GLuint fbo, mask; //framebufferobject and texture
glewInit();
glGenTextures(1, &mask);
glBindTexture(GL_TEXTURE_2D, mask);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1024, 600, 0, GL_RGBA, GL_UNSIGNED_BYTE,
0);
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mask,
0);
glClearColor(darkness, darkness, darkness, 1);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) !=
GL_FRAMEBUFFER_COMPLETE)CMain::game.usefbo = false;
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
void render() {
//render usual stuff, then
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glClear(GL_COLOR_BUFFER_BIT);
glPushAttrib(GL_VIEWPORT_BIT);
glViewport(0, 0, 1024, 600);
glBlendFunc(GL_SRC_COLOR, GL_ONE);
//render lights
glPopAttrib();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBlendFunc(GL_DST_COLOR, GL_SRC_COLOR);
//render fbo over the screen:
glBindTexture(GL_TEXTURE_2D, mask);
glTranslatef(0, 0, 9);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2i(0, 0);
glTexCoord2f(0, 1); glVertex2i(0, 600);
glTexCoord2f(1, 1); glVertex2i(1024, 600);
glTexCoord2f(1, 0); glVertex2i(1024, 0);
glEnd();
glLoadIdentity();
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//display
}
Edit:
At //render lights three light-textures (round, radial, black-to-white) are rendered to the fbo via glBegin(GL_QUADS) etc.
Later of course I want to use much more lights.
I need to refresh the fbo every frame cause the lights are moving and overlapping each other.
render lights:
for(std::vector<LightSource*>::iterator it = lights.begin(); it != lights.end(); it++) {
(*it)->draw();
}
where lights is a vector of LightsSources.
Lightssource::draw():
void LightsSource::draw() {
if (visible()) {
if (activated) {
glTranslatef(this->x, this->y, 0);
glBindTexture(GL_TEXTURE_2D, this->texture);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2i(0, 0);
glTexCoord2f(0, 1); glVertex2i(0, height);
glTexCoord2f(1, 1); glVertex2i(width, height);
glTexCoord2f(1, 0); glVertex2i(width, 0);
glEnd();
glLoadIdentity();
}
}
}
visible() and activated are usually true.
What kind of GPU are you using?
The only thing I see right now is that you're using the texture format "GL_RGBA" instead of "GL_BGRA", which is considered the fastest format on many GPUs. So if you GPU is very old, that might be the issue.