2D TTF Text Rendering with SDL & OpenGL Issues - opengl

So I have looked around and I haven't found the best way to actually implement this in SDL & OpenGL. At this moment I can display text to the screen with a TTF but its not really displaying the text the way it should be. Attached is a screen shot of how the engine looks like when displaying text. Basically from what I can see in my code, I think what is happening is that I am not blitting my text texture with the my main SDL_Surface that I am using as my window. I say that because in the picture my character who has a red collision box around him is being covered up my the text texture I'm rendering my text with. Any ideas of what I can do?
In my game loop I call beginDraw() and endDraw() before and after I draw everything to the screen.
Picture: http://public.gamedev.net/uploads/monthly_07_2012/post-200874-0-25909300-1342404845_thumb.png
All the engine code can be found here: https://github.com/Jevi/SDL_GL_ENGINE
P.S: I'm going to make another post for this but I might as well ask this also if you guys are already looking at my code. I have been noticing some memory issues with the engine. In task manager the memory allocated to my program is constantly climbing since its starts at around 11000 K.
void graphics::SDL_GL_RenderText(float x1, float y1, int width, int height, const char* text, int ptsize, const char* ttfLoc, int r, int g, int b){
SDL_Surface* temp;
SDL_Surface* temp2;
SDL_Rect rect;
TTF_Font* font;
SDL_Color textColor;
unsigned int texture;
font = TTF_OpenFont( ttfLoc , ptsize );
textColor.r = r;
textColor.g = g;
textColor.b = b;
temp = TTF_RenderText_Blended( font, text, textColor );
// width = nextpoweroftwo(width);
// height = nextpoweroftwo(height);
temp2 = SDL_CreateRGBSurface(0, width, height, 32, r, g, b, 0);
SDL_BlitSurface(temp, 0, temp2, 0);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, temp2->w, temp2->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, temp2->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
/* prepare to render our texture */
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture);
glColor3f(1.0f, 1.0f, 1.0f);
/* Draw a quad at location */
glBegin(GL_QUADS);
glTexCoord2d(0,0);
glVertex2f(x1, y1);
glTexCoord2d(1,0);
glVertex2f(x1 + temp2->w, y1);
glTexCoord2d(1,1);
glVertex2f(x1 + temp2->w, y1 + temp2->h);
glTexCoord2d(0,1);
glVertex2f(x1, y1 + temp2->h);
glEnd();
glFinish();
glDisable(GL_BLEND);
glDisable(GL_TEXTURE_2D);
SDL_FreeSurface(temp);
SDL_FreeSurface(temp2);
TTF_CloseFont(font);
glDeleteTextures(1, &texture);
}
void graphics::GL_BeginDraw(){
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix(); //Start rendering phase
glOrtho(0,width,height,0,-1,1); //Set the matrix
}
void graphics::GL_EndDraw(){
glPopMatrix(); //End rendering phase
SDL_GL_SwapBuffers();
glFinish();
}

Related

Problems with updating a texture

I have a function that generates the obj texture and another one that displays it. In order not to leak memory, I tried to call only one time the first function and than, use glTexSubImage2D in the second one, the problem is that this function is not working for me. When I start the program, the texture isn't being updating.
Here is the generating texture function:
GLuint importText(const std::string &text,int font_size,int red,int green,int blue, texto_data *texto){
SDL_Color font_color = {blue,green,red};
TTF_Font* font=TTF_OpenFont("arial.ttf",font_size);
SDL_Surface *image = TTF_RenderText_Blended(font,text.c_str(),font_color);
SDL_DisplayFormatAlpha(image);
glGenTextures(1,&texto->texture);
glBindTexture(GL_TEXTURE_2D, texto->texture);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0,GL_RGBA,image->w,image->h,0,GL_RGBA,GL_UNSIGNED_BYTE,image->pixels);
SDL_FreeSurface(image);
TTF_CloseFont(font);
return texto->texture;
}
And here is the display function:
int carrega_texto(texto_data *texto) {
SDL_Color font_color = {texto->b,texto->g,texto->r};
TTF_Font* font = TTF_OpenFont("arial.ttf",texto->tamanho);
SDL_Surface *image = TTF_RenderText_Blended(font,texto->string,font_color);
SDL_DisplayFormatAlpha(image);
glBindTexture(GL_TEXTURE_2D, texto->texture);
glTexSubImage2D(GL_TEXTURE_2D, 0,0,image->w,image->h,0,GL_RGBA,GL_UNSIGNED_BYTE,image->pixels);
SDL_FreeSurface(image);
TTF_CloseFont(font);
glBindTexture(GL_TEXTURE_2D, texto->texture);
glColor4ub(255, 255, 255, 255);
glBegin(GL_QUADS);
glTexCoord2d(0,0); glVertex2f(texto->area.a.x, texto->area.a.y);
glTexCoord2d(1,0); glVertex2f(texto->area.b.x, texto->area.b.y);
glTexCoord2d(1,1); glVertex2f(texto->area.c.x, texto->area.c.y);
glTexCoord2d(0,1); glVertex2f(texto->area.d.x, texto->area.d.y);
glEnd();
}
Nt: I tried to just always call the generating texture and keeping deleting with glDeleteTextures, in order not to use glTexSubImage2D. The problem is that the glGenTexture always increases the variable texto->texture and than it overflow.

Immediate mode texturing weird output

I'm trying to simply draw an image with OpenGL's immediate mode functions.
However, my output is kinda weird. I tried a few Texture parameters - but i get the same result, sometimes with different colors. I kinda can't figure out the problem, but i figure it's either the image loading or texture setup. I'll cut to the case, here is how i generate my texture (in a Texture2D class):
glBindTexture(GL_TEXTURE_2D, id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
A call to glTexImage2D follows, like this: glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _width, _height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);, where data is an array of unsigned char set to 255, white. I then load a PNG file with CImg, like this:
CImg<unsigned char> image(_filename.c_str());
image.resize(m_Width, m_Height);
glBindTexture(GL_TEXTURE_2D, m_ID);
if(image.spectrum() == 4)
{
unsigned char* data = image.data();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_Width, m_Height, GL_RGBA, GL_UNSIGNED_BYTE, data);
}
else if(image.spectrum() == 3)
{
unsigned char* data = image.data();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_Width, m_Height, GL_RGB, GL_UNSIGNED_BYTE, data);
}
glBindTexture(GL_TEXTURE_2D, 0);
But when i try to draw the texture in immediate mode like this (origin is upper left corner, notice the red rectangle arund the texture is intentional):
void SimpleRenderer::drawTexture(Texture2D* _texture, float _x, float _y, float _width, float _height)
{
_texture->setActive(0);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(_x, _y);
glTexCoord2f(1.0f, 0.0f);
glVertex2f(_x + _width, _y);
glTexCoord2f(1.0f, 1.0f);
glVertex2f(_x + _width, _y + _height);
glTexCoord2f(0.0f, 1.0f);
glVertex2f(_x, _y + _height);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glColor3ub(strokeR, strokeG, strokeB);
glBegin(GL_LINE_LOOP);
glVertex2f((GLfloat)_x, (GLfloat)_y);
glVertex2f((GLfloat)_x+_width, (GLfloat)_y);
glVertex2f((GLfloat)_x+_width, (GLfloat)_y+_height);
glVertex2f((GLfloat)_x, (GLfloat)_y+_height);
glEnd();
}
I expect an output like this within the rectangle (debugging the same PNG within the same program/thread with CImg):
But i get this:
Can anyone spot the problem with my code?
From How pixel data are stored with CImg:
The values are not interleaved, and are ordered first along the X,Y,Z and V axis respectively (corresponding to the width,height,depth,dim dimensions), starting from the upper-left pixel to the bottom-right pixel of the instane image, with a classical scanline run.So, a color image with dim=3 and depth=1, will be stored in memory as :R1R2R3R4R5R6......G1G2G3G4G5G6.......B1B2B3B4B5B6.... (i.e following a 'planar' structure)and not as R1G1B1R2G2B2R3G3B3... (interleaved channels),
OpenGL does not work with planar data, it expects interleaved pixel data. Easiest solution is to use a library other than CImg.
This does not fix the scaling issues, but it's a good place to start.
Use CImg<T>::permute_axes() to transform your image buffer to interleaved format, like this :
CImg<unsigned char> img("imageRGB.png");
img.permute_axes("cxyz"); // Convert to interleaved representation
// Now, use img.data() here as a pointer for OpenGL functions.
img.permute_axes("yzcx"); // Go back to planar representation (if needed).
CImg is indeed a great library, so all these kind of transformations have been planned in the library.

OpenGL/ImageMagick Failing to Render Texture (Sometimes)

I have been having a very odd problem when trying to use OpenGL's C++ API. I am trying to load in a texture using ImageMagick, and then display it as a simple 2D textured square. I have a decent amount of experience with using OpenGL in Java, so I understand how to render a texture and bind it to a primitive. However, each time I attempt to draw it, the program either fails to render, or it renders it as a (properly sized) white square. I'm not entirely sure what is going on, but I believe it has to do with ImageMagick.
I have been using Ubuntu's terminal for compiling, and I've learned just how painful it can be to have to install libraries manually. ImageMagick first refused to compile when used in my program, and when I finally got the program to compile, it would seg-fault each time it ran. I've finally got it "working", but now, whenever I attempt to load in the image, the program will run without rendering. I haven't found anything like this on Google.
http://imgur.com/C7yKwDK
The odd thing is, very rarely, it will work correctly and render the square as expected. However, when I then try to rerun the program, it fails as shown above. I've determined that the line that causes it to fail to render is the same line the image is loaded, so that led me to believe that the image was just being loaded incorrectly, causing the program to fail. However, if I move the texture loading code before the creation of the GL window, the program will consistently render successfully, but the textured square appears only as white (though the size of the square is correct, so I know the image loading is working).
Anyway, sorry for the long post. I've just given up solving this one on my own, and was hoping one of you could help me out.
OpenGL Initialization Code:
Texture* tx;
void GraphicsOGL :: initialize3D(int argc, char* argv[]) {
Magick::InitializeMagick(*argv);
glutInit(&argc, argv);
//Loading Here ALWAYS Causes White Square
/*glEnable(GL_TEXTURE_2D);
tx = new Texture("Resources/Images/test.png");
tx->load();*/
glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA);
glutInitWindowSize(SCREEN_WIDTH, SCREEN_HEIGHT);
glutInitWindowPosition(100, 100);
glutCreateWindow("OpenGL Game");
glViewport(0,0,SCREEN_WIDTH,SCREEN_HEIGHT);
glOrtho(0,SCREEN_WIDTH,SCREEN_HEIGHT,0, -3,1000);
glEnable(GL_DEPTH_TEST);
glEnable(GL_ALPHA_TEST);
glEnable(GL_TEXTURE_2D);
//Loading Here SOMETIMES Works, But Typically Fails
tx = new Texture("Resources/Images/test.png");
tx->load();
glutDisplayFunc(displayCallback);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glutMainLoop();
}
Texture Loading Code:
bool Texture::load() {
try {
m_image.read(m_fileName); //This Line Causes it to Fail to Render
m_image.write(&m_blob, "RGBA");
}
catch (Magick::Error& Error) {
std::cout << "Error loading texture '" << m_fileName << "': " << Error.what() << std::endl;
return false;
}
width = m_image.columns();
height = m_image.rows();
glGenTextures(1, &m_textureObj);
glBindTexture(m_textureTarget, m_textureObj);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
glTexParameterf(m_textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(m_textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE);
glTexImage2D(m_textureTarget, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_blob.data());
//glBindTexture(m_textureTarget, 0);
return true;
}
Texture Drawing Code:
void GraphicsOGL :: drawTexture(float x, float y, Texture* tex) {
glEnable(GL_TEXTURE_2D);
tex->bind();
float depth = 0, w, h;
w = tex->getWidth();
h = tex->getHeight();
glBegin(GL_QUADS);
glVertex3f(x, y+h, depth); glTexCoord2f(1,0);
glVertex3f(x+w, y+h, depth); glTexCoord2f(1,1);
glVertex3f(x+w, y, depth); glTexCoord2f(0,1);
glVertex3f(x, y, depth); glTexCoord2f(0,0);
glEnd();
}

rendering SDL_TTF text onto openGL Red Square instead of text

I've been attempting to render text onto an openGL window using SDL and the SDL_TTF library on windows XP, VS2010.
Versions:
SDL version 1.2.14
SDL TTF devel 1.2.10
openGL (version is at least 2-3 years old).
I have successfully created an openGL window using SDL / SDL_image and can render lines / polygons onto it with no problems.
However, moving onto text it appears that there is some flaw in my current program, I am getting the following result when trying this code here
for those not willing to pastebin here are only the crutial code segments:
void drawText(char * text) {
glLoadIdentity();
SDL_Color clrFg = {0,0,255,0}; // set colour to blue (or 'red' for BGRA)
SDL_Surface *sText = TTF_RenderUTF8_Blended( fntCourier, text, clrFg );
GLuint * texture = create_texture(sText);
glBindTexture(GL_TEXTURE_2D, *texture);
// draw a polygon and map the texture to it, may be the source of error
glBegin(GL_QUADS); {
glTexCoord2i(0, 0); glVertex3f(0, 0, 0);
glTexCoord2i(1, 0); glVertex3f(0 + sText->w, 0, 0);
glTexCoord2i(1, 1); glVertex3f(0 + sText->w, 0 + sText->h, 0);
glTexCoord2i(0, 1); glVertex3f(0, 0 + sText->h, 0);
} glEnd();
// free the surface and texture, removing this code has no effect
SDL_FreeSurface( sText );
glDeleteTextures( 1, texture );
}
segment 2:
// create GLTexture out of SDL_Surface
GLuint * create_texture(SDL_Surface *surface) {
GLuint texture = 0;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
// The SDL_Surface appears to have BGR_A formatting, however this ends up with a
// white rectangle no matter which colour i set in the previous code.
int Mode = GL_RGB;
if(surface->format->BytesPerPixel == 4) {
Mode = GL_RGBA;
}
glTexImage2D(GL_TEXTURE_2D, 0, Mode, surface->w, surface->h, 0, Mode,
GL_UNSIGNED_BYTE, surface->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
return &texture;
}
Is there an obvious bit of code I am missing?
Thank you for any help on this subject.
I've been trying to learn openGL and SDL for 3 days now, so please forgive any misinformation on my part.
EDIT:
I notice that using
TTF_RenderUTF8_Shaded
TTF_RenderUTF8_Solid
Throw a null pointer exception, meaning that there is an error within the actual text rendering function (I suspect), I do not know how this means TTF_RenderUTF8_Blended returns a red square but I suspect all troubles hinge on this.
I think the problem is in the glEnable(GL_TEXTURE_2D) and glDisable(GL_TEXTURE_2D) functions which must be called every time the text is painted on the screen.And maybe also the color conversion between the SDL and GL surface is not right.
I have combined create_texture and drawText into a single function that displays the text properly. That's the code:
void drawText(char * text, TTF_Font* tmpfont) {
SDL_Rect area;
SDL_Color clrFg = {0,0,255,0};
SDL_Surface *sText = SDL_DisplayFormatAlpha(TTF_RenderUTF8_Blended( tmpfont, text, clrFg ));
area.x = 0;area.y = 0;area.w = sText->w;area.h = sText->h;
SDL_Surface* temp = SDL_CreateRGBSurface(SDL_HWSURFACE|SDL_SRCALPHA,sText->w,sText->h,32,0x000000ff,0x0000ff00,0x00ff0000,0x000000ff);
SDL_BlitSurface(sText, &area, temp, NULL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sText->w, sText->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, temp->pixels);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS); {
glTexCoord2d(0, 0); glVertex3f(0, 0, 0);
glTexCoord2d(1, 0); glVertex3f(0 + sText->w, 0, 0);
glTexCoord2d(1, 1); glVertex3f(0 + sText->w, 0 + sText->h, 0);
glTexCoord2d(0, 1); glVertex3f(0, 0 + sText->h, 0);
} glEnd();
glDisable(GL_TEXTURE_2D);
SDL_FreeSurface( sText );
SDL_FreeSurface( temp );
}
screenshot
I'm initializing OpenGL as follows:
int Init(){
glClearColor( 0.1, 0.2, 0.2, 1);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho( 0, 600, 300, 0, -1, 1 );
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
if( glGetError() != GL_NO_ERROR ){
return false;
}
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_COLOR, GL_ONE_MINUS_SRC_ALPHA);
}
I think you should just add glEnable(GL_BLEND), because the code for the text surface says TTF_RenderUTF8_Blended( fntCourier, text, clrFg ) and you have to enable the blending abilities of opengl.
EDIT
Okay, I finally took the time to put your code through a compiler. Most importantly, compiler with -Werror so that warning turn into errors
GLuint * create_texture(SDL_Surface *surface) {
GLuint texture = 0;
/*...*/
return &texture;
}
I didn't see it first, because that's something like C coder's 101 and is quite unexpected: You must not return pointers to local variables!. Once the functions goes out of scope the pointer returned will point to nonsense only. Why do you return a pointer at all? Just return a integer:
GLuint create_texture(SDL_Surface *surface) {
GLuint texture = 0;
/*...*/
return texture;
}
Because of this you're also not going to delete the texture afterward. You upload it to OpenGL, but then loose the reference to it.
Your code misses a glEnable(GL_TEXTURE_2D) that's why you can't see any effects of texture. However your use of textures is suboptimal. They way you did it, you recreate a whole new texture each time you're about to draw that text. If that happens in a animation loop, you'll
run out of texture memory rather soon
slow it down significantly
(1) can be addressed by not generating a new texture name each redraw
(2) can be addresses by uploading new texture data only when the text changes and by not using glTexImage2D, but glTexSubImage2D (of course, if the dimensions of the texture change, it must be glTexImage2D).
EDIT, found another possible issue, but first fix your pointer issue.
You should make sure, that you're using GL_REPLACE or GL_MODULATE texture environment mode. If using GL_DECAL or GL_BLEND you end up with red text on a red quad.
There was leaking memory of of the function in my previous post and the program was crashing after some time...
I improved this by separating the texture loading and displaying:
The first function must be called before the SDL loop.It loads text string into memory:
Every string loaded must have different txtNum parameter
GLuint texture[100];
SDL_Rect area[100];
void Load_string(char * text, SDL_Color clr, int txtNum, const char* file, int ptsize){
TTF_Font* tmpfont;
tmpfont = TTF_OpenFont(file, ptsize);
SDL_Surface *sText = SDL_DisplayFormatAlpha(TTF_RenderUTF8_Solid( tmpfont, text, clr ));
area[txtNum].x = 0;area[txtNum].y = 0;area[txtNum].w = sText->w;area[txtNum].h = sText->h;
glGenTextures(1, &texture[txtNum]);
glBindTexture(GL_TEXTURE_2D, texture[txtNum]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, sText->w, sText->h, 0, GL_BGRA, GL_UNSIGNED_BYTE, sText->pixels);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
SDL_FreeSurface( sText );
TTF_CloseFont(tmpfont);
}
The second one displays the string, must be called in the SDL loop:
void drawText(float coords[3], int txtNum) {
glBindTexture(GL_TEXTURE_2D, texture[txtNum]);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS); {
glTexCoord2f(0, 0); glVertex3f(coords[0], coords[1], coords[2]);
glTexCoord2f(1, 0); glVertex3f(coords[0] + area[txtNum].w, coords[1], coords[2]);
glTexCoord2f(1, 1); glVertex3f(coords[0] + area[txtNum].w, coords[1] + area[txtNum].h, coords[2]);
glTexCoord2f(0, 1); glVertex3f(coords[0], coords[1] + area[txtNum].h, coords[2]);
} glEnd();
glDisable(GL_TEXTURE_2D);
}

OpenGL Tiles/Blitting/Clipping

In OpenGL, how can I select an area from an image-file that was loaded using IMG_Load()?
(I am working on a tilemap for a simple 2D game)
I'm using the following principle to load an image-file into a texture:
GLuint loadTexture( const std::string &fileName ) {
SDL_Surface *image = IMG_Load(fileName.c_str());
unsigned object(0);
glGenTextures(1, &object);
glBindTexture(GL_TEXTURE_2D, object);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->w, image->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->pixels);
SDL_FreeSurface(image);
return object;
}
I then use the following to actually draw the texture in my rendering-part:
glColor4ub(255,255,255,255);
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
glTexCoord2d(0,0); glVertex2f(x,y);
glTexCoord2d(1,0); glVertex2f(x+w,y);
glTexCoord2d(1,1); glVertex2f(x+w,y+h);
glTexCoord2d(0,1); glVertex2f(x,y+h);
glEnd();
Now what I need is a function that allows me to select certain rectangular parts from the GLuint that I get from calling loadTexture( const std::string &fileName ), such that I can then use the above code to bind these parts to rectangles and then draw them to the screen. Something like:
GLuint getTileTexture( GLuint spritesheet, int x, int y, int w, int h )
Go ahead and load the entire collage into a texture. Then select a subset of it using glTexCoord when you render your geometry.
glTexSubImage2D will not help in any way. It allows you to add more than one file to a single texture, not create multiple textures from a single file.
Example code:
void RenderSprite( GLuint spritesheet, unsigned spritex, unsigned spritey, unsigned texturew, unsigned textureh, int x, int y, int w, int h )
{
glColor4ub(255,255,255,255);
glBindTexture(GL_TEXTURE_2D, spritesheet);
glBegin(GL_QUADS);
glTexCoord2d(spritex/(double)texturew,spritey/(double)textureh);
glVertex2f(x,y);
glTexCoord2d((spritex+w)/(double)texturew,spritey/(double)textureh);
glVertex2f(x+w,y);
glTexCoord2d((spritex+w)/(double)texturew,(spritey+h)/(double)textureh);
glVertex2f(x+w,y+h);
glTexCoord2d(spritex/(double)texturew,(spritey+h)/(double)textureh);
glVertex2f(x,y+h);
glEnd();
}
Although Ben Voigt's answer is the usual way to go, if you really want an extra texture for the tiles (which may help with filtering at the edges) you can use glGetTexImage and play a bit with the glPixelStore parameters:
GLuint getTileTexture(GLuint spritesheet, int x, int y, int w, int h)
{
glBindTexture(GL_TEXTURE_2D, spritesheet);
// first we fetch the complete texture
GLint width, height;
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
GLubyte *data = new GLubyte[width*height*4];
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
// now we take only a sub-rectangle from this data
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, /*filter+wrapping*/, /*whatever*/);
glPixelStorei(GL_UNPACK_ROW_LENGTH, width);
glTexImage2D(GL_TEXTURE_2D, 0, RGBA, w, h, 0,
GL_RGBA, GL_UNSIGNED_BYTE, data+4*(y*width+x));
// clean up
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
delete[] data;
return texture;
}
But keep in mind, that this function always reads the whole texture atlas into CPU memory and then copies a sub-part into the new smaller texture. So it would be a good idea to create all needed sprite textures in one go and only read the data in once. In this case you can also just drop the atlas texture completely and only read the image into system memory with IMG_Load to distribute it into the individual sprite textures. Or, if you really need the large texture, then at least use a PBO to copy its data into (with GL_DYNAMIC_COPY usage or something the like), so it need not leave the GPU memory.