In OpenGL, how can I select an area from an image-file that was loaded using IMG_Load()?
(I am working on a tilemap for a simple 2D game)
I'm using the following principle to load an image-file into a texture:
GLuint loadTexture( const std::string &fileName ) {
SDL_Surface *image = IMG_Load(fileName.c_str());
unsigned object(0);
glGenTextures(1, &object);
glBindTexture(GL_TEXTURE_2D, object);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->w, image->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->pixels);
SDL_FreeSurface(image);
return object;
}
I then use the following to actually draw the texture in my rendering-part:
glColor4ub(255,255,255,255);
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
glTexCoord2d(0,0); glVertex2f(x,y);
glTexCoord2d(1,0); glVertex2f(x+w,y);
glTexCoord2d(1,1); glVertex2f(x+w,y+h);
glTexCoord2d(0,1); glVertex2f(x,y+h);
glEnd();
Now what I need is a function that allows me to select certain rectangular parts from the GLuint that I get from calling loadTexture( const std::string &fileName ), such that I can then use the above code to bind these parts to rectangles and then draw them to the screen. Something like:
GLuint getTileTexture( GLuint spritesheet, int x, int y, int w, int h )
Go ahead and load the entire collage into a texture. Then select a subset of it using glTexCoord when you render your geometry.
glTexSubImage2D will not help in any way. It allows you to add more than one file to a single texture, not create multiple textures from a single file.
Example code:
void RenderSprite( GLuint spritesheet, unsigned spritex, unsigned spritey, unsigned texturew, unsigned textureh, int x, int y, int w, int h )
{
glColor4ub(255,255,255,255);
glBindTexture(GL_TEXTURE_2D, spritesheet);
glBegin(GL_QUADS);
glTexCoord2d(spritex/(double)texturew,spritey/(double)textureh);
glVertex2f(x,y);
glTexCoord2d((spritex+w)/(double)texturew,spritey/(double)textureh);
glVertex2f(x+w,y);
glTexCoord2d((spritex+w)/(double)texturew,(spritey+h)/(double)textureh);
glVertex2f(x+w,y+h);
glTexCoord2d(spritex/(double)texturew,(spritey+h)/(double)textureh);
glVertex2f(x,y+h);
glEnd();
}
Although Ben Voigt's answer is the usual way to go, if you really want an extra texture for the tiles (which may help with filtering at the edges) you can use glGetTexImage and play a bit with the glPixelStore parameters:
GLuint getTileTexture(GLuint spritesheet, int x, int y, int w, int h)
{
glBindTexture(GL_TEXTURE_2D, spritesheet);
// first we fetch the complete texture
GLint width, height;
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
GLubyte *data = new GLubyte[width*height*4];
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
// now we take only a sub-rectangle from this data
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, /*filter+wrapping*/, /*whatever*/);
glPixelStorei(GL_UNPACK_ROW_LENGTH, width);
glTexImage2D(GL_TEXTURE_2D, 0, RGBA, w, h, 0,
GL_RGBA, GL_UNSIGNED_BYTE, data+4*(y*width+x));
// clean up
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
delete[] data;
return texture;
}
But keep in mind, that this function always reads the whole texture atlas into CPU memory and then copies a sub-part into the new smaller texture. So it would be a good idea to create all needed sprite textures in one go and only read the data in once. In this case you can also just drop the atlas texture completely and only read the image into system memory with IMG_Load to distribute it into the individual sprite textures. Or, if you really need the large texture, then at least use a PBO to copy its data into (with GL_DYNAMIC_COPY usage or something the like), so it need not leave the GPU memory.
Related
I'm trying to simply draw an image with OpenGL's immediate mode functions.
However, my output is kinda weird. I tried a few Texture parameters - but i get the same result, sometimes with different colors. I kinda can't figure out the problem, but i figure it's either the image loading or texture setup. I'll cut to the case, here is how i generate my texture (in a Texture2D class):
glBindTexture(GL_TEXTURE_2D, id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
A call to glTexImage2D follows, like this: glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _width, _height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);, where data is an array of unsigned char set to 255, white. I then load a PNG file with CImg, like this:
CImg<unsigned char> image(_filename.c_str());
image.resize(m_Width, m_Height);
glBindTexture(GL_TEXTURE_2D, m_ID);
if(image.spectrum() == 4)
{
unsigned char* data = image.data();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_Width, m_Height, GL_RGBA, GL_UNSIGNED_BYTE, data);
}
else if(image.spectrum() == 3)
{
unsigned char* data = image.data();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_Width, m_Height, GL_RGB, GL_UNSIGNED_BYTE, data);
}
glBindTexture(GL_TEXTURE_2D, 0);
But when i try to draw the texture in immediate mode like this (origin is upper left corner, notice the red rectangle arund the texture is intentional):
void SimpleRenderer::drawTexture(Texture2D* _texture, float _x, float _y, float _width, float _height)
{
_texture->setActive(0);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2f(_x, _y);
glTexCoord2f(1.0f, 0.0f);
glVertex2f(_x + _width, _y);
glTexCoord2f(1.0f, 1.0f);
glVertex2f(_x + _width, _y + _height);
glTexCoord2f(0.0f, 1.0f);
glVertex2f(_x, _y + _height);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glColor3ub(strokeR, strokeG, strokeB);
glBegin(GL_LINE_LOOP);
glVertex2f((GLfloat)_x, (GLfloat)_y);
glVertex2f((GLfloat)_x+_width, (GLfloat)_y);
glVertex2f((GLfloat)_x+_width, (GLfloat)_y+_height);
glVertex2f((GLfloat)_x, (GLfloat)_y+_height);
glEnd();
}
I expect an output like this within the rectangle (debugging the same PNG within the same program/thread with CImg):
But i get this:
Can anyone spot the problem with my code?
From How pixel data are stored with CImg:
The values are not interleaved, and are ordered first along the X,Y,Z and V axis respectively (corresponding to the width,height,depth,dim dimensions), starting from the upper-left pixel to the bottom-right pixel of the instane image, with a classical scanline run.So, a color image with dim=3 and depth=1, will be stored in memory as :R1R2R3R4R5R6......G1G2G3G4G5G6.......B1B2B3B4B5B6.... (i.e following a 'planar' structure)and not as R1G1B1R2G2B2R3G3B3... (interleaved channels),
OpenGL does not work with planar data, it expects interleaved pixel data. Easiest solution is to use a library other than CImg.
This does not fix the scaling issues, but it's a good place to start.
Use CImg<T>::permute_axes() to transform your image buffer to interleaved format, like this :
CImg<unsigned char> img("imageRGB.png");
img.permute_axes("cxyz"); // Convert to interleaved representation
// Now, use img.data() here as a pointer for OpenGL functions.
img.permute_axes("yzcx"); // Go back to planar representation (if needed).
CImg is indeed a great library, so all these kind of transformations have been planned in the library.
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 7 years ago.
Improve this question
I have been trying to use OpenGL to render an image to a screen and in order to learn more about texturing etc., my approach was to do map the image to the texture and then draw it using GL_QUADS. However, all I get is a blank screen.
So, the texture was initialised as follows:
class Texture
{
public:
void init_texture(int rows, int cols);
void load_texture(const Image * frame);
GLuint _texture_obj;
};
void Texture::init_texture(int rows, int cols)
{
if (_texture_obj) glDeleteTextures(1, &_texture_obj);
_texture_target = GL_TEXTURE_2D;
glGenTextures(1, &_texture_obj);
if (_texture_obj) {
glBindTexture(_texture_target, _texture_obj);
glTexImage2D(_texture_target, 0, GL_BGRA, cols, rows, 0,
GL_BGRA, GL_UNSIGNED_BYTE, (GLvoid *)NULL);
glTexParameterf(_texture_target, GL_TEXTURE_MIN_FILTER,
GL_LINEAR);
glTexParameterf(_texture_target, GL_TEXTURE_MAG_FILTER,
GL_LINEAR);
}
else {
throw std::runtime_error("Could not create the OpenGL texture");
}
}
void Texture::load_texture(const Image *frame)
{
glBindTexture(GL_TEXTURE_2D, _texture_obj);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, frame->cols(),
frame->rows(), GL_BGRA, GL_UNSIGNED_BYTE,
frame->data());
}
The code above initializes and loads the texture.
To display the texture, I use a QOpenGLWidget derived object where the initialization and the paint method looks like this:
class GLWidget: public QOpenGLWidget
{
public:
GLWidget(Image *image);
void initializeGL();
void resizeGL(int w, int h);
void paintGL();
private:
Texture texture;
};
void GLWidget::GLWidget(Image * image)
{
texture.init(image->rows, image->cols);
texture.load(image);
}
void GLWidget::initializeGL()
{
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
}
void GLWidget::resizeGL(int w, int h)
{}
void GLWidget::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_REPLACE);
glBindTexture(GL_TEXTURE_2D, texture._tex_object);
glBegin(GL_QUADS);
// width(), height() gives the component width and height
glTexCoord2i(0, 0); glVertex2i(0, 0);
glTexCoord2i(0, 1); glVertex2i(0, this->width());
glTexCoord2i(1, 1); glVertex2i(this->height(), this->width());
glTexCoord2i(1, 0); glVertex2i(this->height(), 0);
glEnd();
glDisable(GL_TEXTURE_2D);
}
However, this only shows me a blank screen. Is there some viewport that I need to setup? I only started using OpenGL a few days ago, so am quite green in that aspect. I am using qt version 5.4 and OpenGL 4.3 on linux.
The call
glTexImage2D(_texture_target, 0, GL_BGRA, cols, rows, 0, GL_BGRA, GL_UNSIGNED_BYTE, (GLvoid *)NULL);
is invalid, because GL_BGRA is not a valid internalFormat. You should use GL_RGBA for that. Note that the internalFormat just defines the basic data type and the number of channels, the actual layout is totally implementation-specific. The format parameter on the other hand tells the GL how to interpret the data in client memory, so formats like GL_RGBA and GL_BGRA are actually defined.
I'm not really familiar with QOpenGLWidget. However, I think that creating your texture in the constructor might be a bad idea, as the GL context might not be available at that point (or at least not made current to the thread). You should move that stuff into initializeGL().
I'm attempting to render a .png image as a texture. However, all that is being rendered is a white square.
I give my texture a unique int ID called texID, read the pixeldata into a buffer 'image' (declared in the .h file). I load my pixelbuffer, do all of my OpenGL stuff and bind that pixelbuffer to a texture for OpenGL. I then draw it all using glDrawElements.
Also I initialize the texture with a size of 32x32 when its contructor is called, therefore i doubt it is related to a power of two size issue.
Can anybody see any mistakes in my OpenGL GL_TEXTURE_2D setup that might give me a block white square.
#include "Texture.h"
Texture::Texture(int width, int height, string filename)
{
const char* fnPtr = filename.c_str(); //our image loader accepts a ptr to a char, not a string
printf(fnPtr);
w = width; //give our texture a width and height, the reason that we need to pass in the width and height values manually
h = height;//UPDATE, these MUST be P.O.T.
unsigned error = lodepng::decode(image,w,h,fnPtr);//lodepng's decode function will load the pixel data into image vector
//display any errors with the texture
if(error)
{
cout << "\ndecoder error " << error << ": " << lodepng_error_text(error) <<endl;
}
for(int i = 0; i<image.size(); i++)
{
printf("%i,", image.at(i));
}
printf("\nImage size is %i", image.size());
//image now contains our pixeldata. All ready for OpenGL to do its thing
//let's get this texture up in the video memory
texGLInit();
}
void Texture::texGLInit()
{
//WHERE YOU LEFT OFF: glGenTextures isn't assigning an ID to textures. it stays at zero the whole time
//i believe this is why it's been rendering white
glGenTextures(1, &textures);
printf("\ntexture = %u", textures);
glBindTexture(GL_TEXTURE_2D, textures);//evrything we're about to do is about this texture
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
//glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
//glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
//glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
//glDisable(GL_COLOR_MATERIAL);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8,w,h,0, GL_RGBA, GL_UNSIGNED_BYTE, &image);
//we COULD free the image vectors memory right about now.
}
void Texture::draw(point centerPoint, point dimensions)
{
glEnable(GL_TEXTURE_2D);
printf("\nDrawing block at (%f, %f)",centerPoint.x, centerPoint.y);
glBindTexture(GL_TEXTURE_2D, textures);//bind the texture
//create a quick vertex array for the primitive we're going to bind the texture to
printf("TexID = %u",textures);
GLfloat vArray[8] =
{
centerPoint.x-(dimensions.x/2), centerPoint.y-(dimensions.y/2),//bottom left i0
centerPoint.x-(dimensions.x/2), centerPoint.y+(dimensions.y/2),//top left i1
centerPoint.x+(dimensions.x/2), centerPoint.y+(dimensions.y/2),//top right i2
centerPoint.x+(dimensions.x/2), centerPoint.y-(dimensions.y/2)//bottom right i3
};
//create a quick texture array (we COULD create this on the heap rather than creating/destoying every cycle)
GLfloat tArray[8] =
{
0.0f,0.0f, //0
0.0f,1.0f, //1
1.0f,1.0f, //2
1.0f,0.0f //3
};
//and finally.. the index array...remember, we draw in triangles....(and we'll go CW)
GLubyte iArray[6] =
{
0,1,2,
0,2,3
};
//Activate arrays
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
//Give openGL a pointer to our vArray and tArray
glVertexPointer(2, GL_FLOAT, 0, &vArray[0]);
glTexCoordPointer(2, GL_FLOAT, 0, &tArray[0]);
//Draw it all
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, &iArray[0]);
//glDrawArrays(GL_TRIANGLES,0,6);
//Disable the vertex arrays
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisable(GL_TEXTURE_2D);
//done!
/*glBegin(GL_QUADS);
glTexCoord2f(0.0f,0.0f);
glVertex2f(centerPoint.x-(dimensions.x/2), centerPoint.y-(dimensions.y/2));
glTexCoord2f(0.0f,1.0f);
glVertex2f(centerPoint.x-(dimensions.x/2), centerPoint.y+(dimensions.y/2));
glTexCoord2f(1.0f,1.0f);
glVertex2f(centerPoint.x+(dimensions.x/2), centerPoint.y+(dimensions.y/2));
glTexCoord2f(1.0f,0.0f);
glVertex2f(centerPoint.x+(dimensions.x/2), centerPoint.y-(dimensions.y/2));
glEnd();*/
}
Texture::Texture(void)
{
}
Texture::~Texture(void)
{
}
I'll also include the main class' init, where I do a bit more OGL setup before this.
void init(void)
{
printf("\n......Hello Guy. \n....\nInitilising");
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0,XSize,0,YSize);
glEnable(GL_TEXTURE_2D);
myBlock = new Block(0,0,offset);
glClearColor(0,0.4,0.7,1);
glLineWidth(2); // Width of the drawing line
glMatrixMode(GL_MODELVIEW);
glDisable(GL_DEPTH_TEST);
printf("\nInitialisation Complete");
}
Update: adding in the main function where I first setup my OpenGL window.
int main(int argc, char** argv)
{
glutInit(&argc, argv); // GLUT Initialization
glutInitDisplayMode(GLUT_RGBA|GLUT_DOUBLE); // Initializing the Display mode
glutInitWindowSize(800,600); // Define the window size
glutCreateWindow("Gem Miners"); // Create the window, with caption.
printf("\n========== McLeanTech Systems =========\nBecoming Sentient\n...\n...\n....\nKILL\nHUMAN\nRACE \n");
init(); // All OpenGL initialization
//-- Callback functions ---------------------
glutDisplayFunc(display);
glutKeyboardFunc(mykey);
glutSpecialFunc(processSpecialKeys);
glutSpecialUpFunc(processSpecialUpKeys);
//glutMouseFunc(mymouse);
glutMainLoop(); // Loop waiting for event
}
Here's the usual checklist for whenever textures come out white:
OpenGL context created and being bound to current thread when attemting to load texture?
Allocated texture ID using glGenTextures?
Are the parameters format and internal format to glTex[Sub]Image… valid OpenGL tokens allowed as input for this function?
Is mipmapping being used?
YES: Supply all mipmap layers – optimally set glTexParameteri GL_TEXTURE_BASE_LEVEL and GL_TEXTURE_MAX_LEVEL, as well as GL_TEXTURE_MIN_LOD and GL_TEXTURE_MAX_LOG.
NO: Turn off mipmap filtering by setting glTexParameteri GL_TEXTURE_MIN_FILTER to GL_NEAREST or GL_LINEAR.
I'm trying to draw a simple texture in opengl. I made a simple class Texture:
class Texture{
public:
unsigned int id;
unsigned char image[256*256*3];
int level;
int border;
int width;
int height;
Texture (int level =0, int border = 0) : level(level), border(border) {
glGenTextures(1, &id);
width = 256, height = 256;
glTexImage2D(GL_TEXTURE_2D, level, GL_RGB, width, height, border, GL_RGB, GL_UNSIGNED_BYTE, &image[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
for (int i= 0; i<width*height*3; i+=3){
image[i]=1;//i%255;
image[i+1] =1;// 255-i%255;
image[i+2] =1;// i%128;
}
}
void useIt(){
glBindTexture( GL_TEXTURE_2D, id );
}
};
It creates an unsigned char array and fill it with some random data. I'm trying to use it this way:
glEnable(GL_TEXTURE_2D);
texture->useIt();
glBegin(GL_TRIANGLES);
glNormal3d(0, 1, 0);
glTexCoord2d(0.0,0.0);
glVertex3f(width/-2.f,height/2.f,depth/2.f);
glTexCoord2d(1.0,1.0);
glVertex3f(width/2.f,height/2.f,depth/2.f);
glTexCoord2d(1.0,0.0);
glVertex3f(width/2.f,height/2.f,depth/-2.f);
glTexCoord2d(0.0,0.0);
glVertex3f(width/-2.f,height/2.f,depth/2.f);
glTexCoord2d(1.0,1.0);
glVertex3f(width/2.f,height/2.f,depth/-2.f);
glTexCoord2d(0.0,1.0);
glVertex3f(width/-2.f,height/2.f,depth/-2.f);
glEnd();
glDisable(GL_TEXTURE_2D);
It draws the plane, but withouht texture (draws with the previously used material). what am i doing wrong?
Three possible issues with your code. Brett Hale already told you, that you need to bind a texture object before uploading data to it with glTexImage.
glTexImage creates copy of the data you supply to it (this is different to the glVertex…Pointer functions, which only take a pointer or offset into a buffer object). However you're filling the image array with data after you copied it's contents to the texture. Also you may safely delete the image array after copying the data to the texture.
Last but not least: Those operations are found in a constructor. If you have the texture class instance in a scope that's initialized before a OpenGL context has been created, nothing will happen at all, because there's no OpenGL context. So either make sure, the texture object is created only after a OpenGL context is available, or put the texture creation and upload code into a separate method, that you call once a OpenGL context is available.
glBindTexture is required in the Texture constructor, prior to the glTex* operations.
You might also require: glPixelStorei(GL_UNPACK_ALIGNMENT, 1) prior to glTexImage2D, since row memory addresses are not on 4-byte boundaries.
BTW..., you need to set the image data before you 'upload' it via glTexImage2D. Right now, you are just setting the texture with uninitialized data. Furthermore, the loop that sets the RGB byte data is just giving you values very close to black, all: (1, 1, 1).
So I have looked around and I haven't found the best way to actually implement this in SDL & OpenGL. At this moment I can display text to the screen with a TTF but its not really displaying the text the way it should be. Attached is a screen shot of how the engine looks like when displaying text. Basically from what I can see in my code, I think what is happening is that I am not blitting my text texture with the my main SDL_Surface that I am using as my window. I say that because in the picture my character who has a red collision box around him is being covered up my the text texture I'm rendering my text with. Any ideas of what I can do?
In my game loop I call beginDraw() and endDraw() before and after I draw everything to the screen.
Picture: http://public.gamedev.net/uploads/monthly_07_2012/post-200874-0-25909300-1342404845_thumb.png
All the engine code can be found here: https://github.com/Jevi/SDL_GL_ENGINE
P.S: I'm going to make another post for this but I might as well ask this also if you guys are already looking at my code. I have been noticing some memory issues with the engine. In task manager the memory allocated to my program is constantly climbing since its starts at around 11000 K.
void graphics::SDL_GL_RenderText(float x1, float y1, int width, int height, const char* text, int ptsize, const char* ttfLoc, int r, int g, int b){
SDL_Surface* temp;
SDL_Surface* temp2;
SDL_Rect rect;
TTF_Font* font;
SDL_Color textColor;
unsigned int texture;
font = TTF_OpenFont( ttfLoc , ptsize );
textColor.r = r;
textColor.g = g;
textColor.b = b;
temp = TTF_RenderText_Blended( font, text, textColor );
// width = nextpoweroftwo(width);
// height = nextpoweroftwo(height);
temp2 = SDL_CreateRGBSurface(0, width, height, 32, r, g, b, 0);
SDL_BlitSurface(temp, 0, temp2, 0);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, temp2->w, temp2->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, temp2->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
/* prepare to render our texture */
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture);
glColor3f(1.0f, 1.0f, 1.0f);
/* Draw a quad at location */
glBegin(GL_QUADS);
glTexCoord2d(0,0);
glVertex2f(x1, y1);
glTexCoord2d(1,0);
glVertex2f(x1 + temp2->w, y1);
glTexCoord2d(1,1);
glVertex2f(x1 + temp2->w, y1 + temp2->h);
glTexCoord2d(0,1);
glVertex2f(x1, y1 + temp2->h);
glEnd();
glFinish();
glDisable(GL_BLEND);
glDisable(GL_TEXTURE_2D);
SDL_FreeSurface(temp);
SDL_FreeSurface(temp2);
TTF_CloseFont(font);
glDeleteTextures(1, &texture);
}
void graphics::GL_BeginDraw(){
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix(); //Start rendering phase
glOrtho(0,width,height,0,-1,1); //Set the matrix
}
void graphics::GL_EndDraw(){
glPopMatrix(); //End rendering phase
SDL_GL_SwapBuffers();
glFinish();
}