Resizing font size using glutBitmapString - opengl

I'm currently using Glut's function glutBitmapString to overlay text on my windows. I know that I can specify the font/size using different Bitmapped fonts. However, the largest text possible is GLUT_BITMAP_TIMES_ROMAN_24, is it possible to print 2D text with even bigger fonts? Or is there any way to resize the font displayed by glutBitmapString?

Render your text to a texture and then render a quad with that texture with whatever scaling factor you want.
Something like this:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <cmath>
GLuint tex = 0, fbo = 0, rbo = 0;
GLuint fbo_w = 0, fbo_h = 0;
bool SetFboSize(int width, int height)
{
int max_size;
glGetIntegerv( GL_MAX_RENDERBUFFER_SIZE_EXT, &max_size );
if( width > max_size || height > max_size ) return false;
fbo_w = width;
fbo_h = height;
// create FBO
if(fbo) glDeleteFramebuffersEXT( 1, &fbo );
glGenFramebuffersEXT( 1, &fbo );
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, fbo );
// create and attach a new texture as the FBO's color buffer
if(tex) glDeleteTextures( 1, &tex );
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_2D, tex );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL );
glFramebufferTexture2DEXT( GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, tex, 0 );
// create and attach a new depth buffer to currently bound FBO
if(rbo) glDeleteRenderbuffersEXT( 1, &rbo );
glGenRenderbuffersEXT( 1, &rbo );
glBindRenderbufferEXT( GL_RENDERBUFFER_EXT, rbo );
glRenderbufferStorageEXT( GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, width, height );
glFramebufferRenderbufferEXT( GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, rbo );
if( GL_FRAMEBUFFER_COMPLETE_EXT != glCheckFramebufferStatusEXT( GL_FRAMEBUFFER_EXT ) )
{
return false;
}
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, 0 ); // unbind fbo
return true;
}
void display()
{
// render to texture
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, fbo );
glBindTexture( GL_TEXTURE_2D, 0 );
{
glViewport( 0, 0, fbo_w, fbo_h );
glClearColor( 0, 0, 0, 0 );
glClear( GL_COLOR_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glColor3ub( 255, 0, 0 );
glWindowPos2i( 0, 0 );
glutBitmapString( GLUT_BITMAP_TIMES_ROMAN_24, (unsigned char*)"Hello, world!" );
}
// render to screen
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, 0 );
{
int w = glutGet( GLUT_WINDOW_WIDTH );
int h = glutGet( GLUT_WINDOW_HEIGHT );
glViewport( 0, 0, w, h );
glClearColor( 0, 0, 0, 0 );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
double ar = w / (double)h;
glOrtho( -2 * ar, 2 * ar, -2, 2, -1, 1);
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glColor3ub( 255, 255, 255 );
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, tex );
float scale = sin( (double)glutGet( GLUT_ELAPSED_TIME ) / 1000.0f );
glScalef( scale, scale, 1 );
glBegin( GL_QUADS );
glTexCoord2i( 0, 0 );
glVertex2i( -1, -1 );
glTexCoord2i( 1, 0 );
glVertex2i( 1, -1 );
glTexCoord2i( 1, 1 );
glVertex2i( 1, 1 );
glTexCoord2i( 0, 1 );
glVertex2i( -1, 1 );
glEnd();
}
glutSwapBuffers();
}
void timer( int extra )
{
glutPostRedisplay();
glutTimerFunc( 16, timer, 0 );
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutInitWindowSize( 640, 480 );
glutCreateWindow( "GLUT" );
glewInit();
if( !GLEW_VERSION_1_4 )
return -1;
if( !GLEW_EXT_framebuffer_object )
return -1;
if( !SetFboSize( 200, 50 ) )
return -1;
glutDisplayFunc( display );
glutTimerFunc( 0, timer, 0 );
glutMainLoop();
return 0;
}

Related

glTexCoordPointer output not as expected [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
I am trying to use glDrawElements , glTexCoordPointer and glTexImage1D to create a 1D texture, render a surface dataset and color the vertices according to their height value.
GLuint tex = 0;
My texture generation:
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_1D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
};
glTexImage1D( GL_TEXTURE_1D, 0, GL_RGBA, 3, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
vArray is an array which stores the vertices for rendering.
float vArray[12] = {
0.0, 1.0, 0.0,
0.0, 3.0, 1.0,
1.0, 2.0, 0.0,
1.0, 1.0, 1.0,
}
iArray is an array which stores the indices for rendering.
int iSize = 6;
int iArray[6] = {
0, 2, 1,
2, 1, 4,
}
tArray is an array which stores the normalised heights.
GLfloat tArray[4] = {
0.0, 0.3, 1.0, 0.0,
}
My render code:
glEnable(GL_TEXTURE_1D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_1D, tex);
glTexCoordPointer(1, GL_FLOAT, sizeof(GL_FLOAT), tArray);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3 ,GL_FLOAT,0, vArray);
glDrawElements(GL_TRIANGLES, iSize, GL_UNSIGNED_INT, iArray);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisable(GL_TEXTURE_1D);
The final output is not as I expected, hoping somebody can point out my mistakes.
RE: your glTexCoordPointer() call: Tightly-packed arrays usually use 0 for stride.
vArray and tArray only have four elements, not five, so the 4 in iArray will cause OpenGL to read off the ends of those arrays into garbage (if you're lucky) or segfaults (if you're not).
All together:
#include <GL/glew.h>
#include <GL/glut.h>
GLuint tex = 0;
void display()
{
glClear( GL_COLOR_BUFFER_BIT );
float vArray[] =
{
-0.5, -0.5,
0.5, -0.5,
0.5, 0.5,
-0.5, 0.5,
};
GLfloat tArray[] =
{
0.0, 0.3, 1.0, 0.0,
};
unsigned int iArray[] =
{
0, 1, 2,
2, 3, 0,
};
glEnable( GL_TEXTURE_1D );
glBindTexture( GL_TEXTURE_1D, tex );
glEnableClientState( GL_TEXTURE_COORD_ARRAY );
glTexCoordPointer( 1, GL_FLOAT, 0, tArray );
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, vArray );
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, iArray );
glDisableClientState( GL_TEXTURE_COORD_ARRAY );
glDisableClientState( GL_VERTEX_ARRAY );
glDisable( GL_TEXTURE_1D );
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 400, 400 );
glutCreateWindow( "GLUT" );
glewInit();
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_1D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
};
glTexImage1D( GL_TEXTURE_1D, 0, GL_RGBA, 3, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glutDisplayFunc( display );
glutMainLoop();
return 0;
}
EDIT: 2D version:
#include <GL/glew.h>
#include <GL/glut.h>
GLuint tex = 0;
void display()
{
glClear( GL_COLOR_BUFFER_BIT );
float vArray[] =
{
-0.5, -0.5,
0.5, -0.5,
0.5, 0.5,
-0.5, 0.5,
};
GLfloat tArray[] =
{
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
};
unsigned int iArray[] =
{
0, 1, 2,
2, 3, 0,
};
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, tex );
glEnableClientState( GL_TEXTURE_COORD_ARRAY );
glTexCoordPointer( 2, GL_FLOAT, 0, tArray );
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, vArray );
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, iArray );
glDisableClientState( GL_TEXTURE_COORD_ARRAY );
glDisableClientState( GL_VERTEX_ARRAY );
glDisable( GL_TEXTURE_2D );
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 400, 400 );
glutCreateWindow( "GLUT" );
glewInit();
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_2D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
0, 0, 255, 255,
};
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, 2, 2, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

1D Texture lookup table with fixed function pipeline

I want to create a 1D texture with OpenGL's (old) fixed function pipeline, which interpolated through three colors.
After looking online: glTexImage1D is commonly referred to. I cannot find any good simple sample codes for this, and I wouldn't know how to return a (RGB) value from this.
Does OpenGL (fixed function) have any methods for creating a 1D Texture, and returning an RGB value from this?
Yup, glTexImage1D() + GL_TEXTURE_1D:
#include <GL/glew.h>
#include <GL/glut.h>
GLuint tex = 0;
void display()
{
glClear( GL_COLOR_BUFFER_BIT );
glEnable( GL_TEXTURE_1D );
glBindTexture( GL_TEXTURE_1D, tex );
glBegin(GL_TRIANGLES);
glTexCoord1f( 0.0f );
glVertex2f( -0.5f, -0.5f );
glTexCoord1f( 0.5f );
glVertex2f( 0.5f, -0.5f );
glTexCoord1f( 1.0f );
glVertex2f( 0.0f, 0.5f );
glEnd();
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 400, 400 );
glutCreateWindow( "GLUT" );
glewInit();
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_1D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
};
glTexImage1D( GL_TEXTURE_1D, 0, GL_RGBA, 3, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

Opening bmp image to texture in OpenGL

I'm having problems opening a bmp image to a texture using OpenGl
Here is the loadTexture func:
GLuint loadTexture() {
FILE *f;
int imageSize,rd;
f = fopen(filename, "rb");
if(f == 0){
printf("Couldn't open file\n");
exit(-1);
}
GLubyte header[54];
fread(header, 54,1,f);
if(header[0] != 'B' || header[1] != 'M'){
printf("File not bitmap\n");
exit(1);
}
dataPos = *(int*)&(header[0x0A]);
imageSize = *(int*)&(header[0x22]);
width = *(int*)&(header[0x12]);
height = *(int*)&(header[0x16]);
if (imageSize==0) imageSize=width*height*3;
if (dataPos==0) dataPos=54;
data = new unsigned char [imageSize];
fread(data,1,imageSize,f);
fclose(f);
GLuint textureID;
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0,GL_RGB, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP); return textureID;
}
The function always returns 0, but there is no error (I looked at glGetError() as well)
When trying to load the texture anyway:
glClear(GL_COLOR_BUFFER_BIT);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_REPLACE);
//BOTTOM LEFT - red
glBindTexture(GL_TEXTURE_2D,texture);
glViewport(0,0,256,256);
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex2f(-1,1);
glTexCoord2f(1,0);
glVertex2f(-1,-1);
glTexCoord2f(0,1);
glVertex2f(1,-1);
glTexCoord2f(1,1);
glVertex2f(1,1);
glEnd();
I get a white square and not the picture..
This is my init func:
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(512, 512);
glutCreateWindow("Sample");
glEnable(GL_TEXTURE_2D);
glOrtho(-1.0,1.0,-1.0,1.0,2.0,-2.0);
glClearColor(0,0,0,0);
texture = loadTexture();
printf("Texture: %d\n",texture);
glutDisplayFunc(mydisplay);
glutMainLoop();
Any thoughts?
You don't get a current GL context with GLUT until glutCreateWindow() successfully returns. glutInitDisplayMode() is not sufficient.
All the GL functions you call in loadTexture() require a current GL context to function.
Move texture = loadTexture(); to somewhere after glutCreateWindow() and before glutMainLoop();.
Also, if you're going to be using 3-component BGR/RGB make sure to use glPixelStorei() to set GL_UNPACK_ALIGNMENT to 1 (instead of the default 4) before your glTexImage2D() call.
Here's the simplest thing that works on my system:
// http://glew.sourceforge.net/
#include <GL/glew.h>
#include <GL/glut.h>
GLuint loadTexture()
{
const unsigned char data[] =
{
255, 0, 0, 0, 255, 0,
0, 0, 255, 255, 255, 255,
};
const GLsizei width = 2;
const GLsizei height = 2;
GLuint textureID = 0;
glGenTextures(1, &textureID);
glBindTexture(GL_TEXTURE_2D, textureID);
glPixelStorei( GL_UNPACK_ALIGNMENT, 1 );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
return textureID;
}
GLuint texture = 0;
void display()
{
glClearColor( 0, 0, 0, 1 );
glClear( GL_COLOR_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( -2, 2, -2, 2, 2, -2 );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D,texture);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_REPLACE);
glBegin(GL_QUADS);
glTexCoord2f( 0, 0 );
glVertex2i( -1, -1 );
glTexCoord2f( 1, 0 );
glVertex2i( 1, -1 );
glTexCoord2f( 1, 1 );
glVertex2i( 1, 1 );
glTexCoord2f( 0, 1 );
glVertex2i( -1, 1 );
glEnd();
glutSwapBuffers();
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 640, 480 );
glutCreateWindow( "GLUT" );
glewInit();
texture = loadTexture();
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

Rendering Freetype glyph to OpenGL (3.3+) texture results in repeated texture with artifacts

I was simply trying to display a glyph (letter Ě) loaded from Freetype as a bitmap picture onto an OpenGL texture that is stretched across the whole window. Result is here:
To get to this result I had to set an extremely large FT_Set_Pixel_Sizes to 450 otherwise the resulting image looks even worse.
I am using GLSLShader loader class by M.M. Mobeen, GLEW, SDL2 and GLM.
shaders/shader.vert
#version 330 core
layout(location=0) in vec4 vVertex; //object space vertex
out vec2 vUV; // texture coordinates for texture lookup in the fragment shader
void main()
{
gl_Position = vec4(vVertex.xy,0,1);
vUV = vVertex.zw; // texture coordinate
}
shaders/shader.frag
#version 330 core
layout (location=0) out vec4 vFragColor;
smooth in vec2 vUV;
uniform sampler2D textureMap;
void main()
{
vFragColor = texture(textureMap, vUV);
}
main.cpp
#include <glew.h>
#include <glm/glm.hpp>
#include "SDL2/SDL.h"
#include "SDL2/SDL_opengl.h"
#include <ft2build.h>
#include FT_FREETYPE_H
#include "GLSLShader.h" // https://github.com/bagobor/opengl33_dev_cookbook_2013/blob/master/Chapter3/src/GLSLShader.h
//shader reference
GLSLShader shader;
//vertex array and vertex buffer object IDs
GLuint vaoID;
GLuint vboVerticesID;
GLuint vboIndicesID;
//texture ID
GLuint textureID;
//quad vertices and indices
glm::vec4 vertices[4];
GLushort indices[6];
int main( int argc, const char* argv[] )
{
if( SDL_Init( SDL_INIT_EVERYTHING ) == -1 ) return EXIT_FAILURE;
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MAJOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MINOR_VERSION, 2 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
SDL_GL_SetAttribute( SDL_GL_DEPTH_SIZE, 24 );
SDL_Window* window = SDL_CreateWindow( "OpenGL + Freetype",
SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
1024, 768,
SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN );
SDL_GLContext glcontext = SDL_GL_CreateContext( window );
if( glcontext == nullptr ) return EXIT_FAILURE;
glewExperimental = GL_TRUE;
if( glewInit() != GLEW_OK ) return EXIT_FAILURE;
SDL_GL_SetSwapInterval( 1 );
shader.LoadFromFile( GL_VERTEX_SHADER, "shaders/shader.vert" );
shader.LoadFromFile( GL_FRAGMENT_SHADER, "shaders/shader.frag" );
//compile and link shader
shader.CreateAndLinkProgram();
shader.Use();
shader.AddAttribute( "vVertex" );
shader.AddUniform( "textureMap" );
glUniform1i( shader( "textureMap" ), 0 );
shader.UnUse();
//setup quad geometry
vertices[0] = glm::vec4( -1.0, -1.0, 0, 1 );
vertices[1] = glm::vec4( 1.0, 1.0, 1, 0 );
vertices[2] = glm::vec4( 1.0, -1.0, 1, 1 );
vertices[3] = glm::vec4( -1.0, 1.0, 0, 0 );
//fill quad indices array
GLushort* id = &indices[0];
*id++ = 0;
*id++ = 1;
*id++ = 2;
*id++ = 0;
*id++ = 1;
*id++ = 3;
glGenVertexArrays( 1, &vaoID );
glGenBuffers( 1, &vboVerticesID );
glGenBuffers( 1, &vboIndicesID );
glBindVertexArray( vaoID );
glBindBuffer( GL_ARRAY_BUFFER, vboVerticesID );
glBufferData( GL_ARRAY_BUFFER, sizeof( vertices ), &vertices[0], GL_DYNAMIC_DRAW );
glEnableVertexAttribArray( shader["vVertex"] );
glVertexAttribPointer( shader["vVertex"], sizeof( shader["vVertex"] ), GL_FLOAT, GL_FALSE, 0, 0 );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, vboIndicesID );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( indices ), &indices[0], GL_STATIC_DRAW );
glGenTextures( 1, &textureID );
glActiveTexture( GL_TEXTURE0 );
glBindTexture( GL_TEXTURE_2D, textureID );
glPixelStorei( GL_UNPACK_ALIGNMENT, 1 );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
FT_Library library;
FT_Face face;
bool error = FT_Init_FreeType( &library );
if( error ) return EXIT_FAILURE;
error = FT_New_Face( library,
"dejavu.ttf",
0,
&face );
if( error ) return EXIT_FAILURE;
error = FT_Set_Pixel_Sizes( face, 0, 16 );
if( error ) return EXIT_FAILURE;
FT_Set_Pixel_Sizes( face, 0, 450 );
FT_UInt glyph_index = FT_Get_Char_Index( face, 282 ); // letter Ě
error = FT_Load_Glyph( face, glyph_index, FT_LOAD_DEFAULT );
if( error ) return EXIT_FAILURE;
error = FT_Render_Glyph( face->glyph, FT_RENDER_MODE_NORMAL );
if( error ) return EXIT_FAILURE;
glTexImage2D( GL_TEXTURE_2D,
0,
GL_RGB,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_RGB,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
bool running = true;
while( running )
{
SDL_Event e;
if( SDL_PollEvent( &e ) )
{
switch( e.type )
{
case SDL_QUIT:
running = false;
break;
default:
break;
}
}
glClearColor( 1, 1, 1, 1 );
glClear( GL_COLOR_BUFFER_BIT );
shader.Use(); // bind shader
glDrawElements( GL_TRIANGLES, sizeof( indices ), GL_UNSIGNED_SHORT, 0 );
shader.UnUse(); // unbind shader
SDL_GL_SwapWindow( window );
}
return 0;
}
FT_RENDER_MODE_NORMAL results in a single-channel bitmap, not RGB:
FT_RENDER_MODE_NORMAL This is the default render mode; it corresponds to 8-bit anti-aliased bitmaps.
Try GL_RED for format in your glTexImage2D() call instead of GL_RGB:
glTexImage2D( GL_TEXTURE_2D,
0,
GL_RGB,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_RED,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);

SDL/OpenGL Texture Transparency

I'm just wondering how I can convert an SDL_Surface (loaded from a png via IMG_Load) and stick it on a quad. Here is what I have (mostly just copy pasted from a tutorial I found).
#include "SDL.h"
#include "SDL_opengl.h"
#include "SDL_image.h"
#include <stdio.h>
int main(int argc, char *argv[])
{
SDL_Surface *screen;
// Slightly different SDL initialization
if ( SDL_Init(SDL_INIT_VIDEO) != 0 ) {
printf("Unable to initialize SDL: %s\n", SDL_GetError());
return 1;
}
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 ); // *new*
screen = SDL_SetVideoMode( 640, 480, 16, SDL_OPENGL ); // *changed*
if ( !screen ) {
printf("Unable to set video mode: %s\n", SDL_GetError());
return 1;
}
// Set the OpenGL state after creating the context with SDL_SetVideoMode
glClearColor( 0, 0, 0, 0 );
glEnable( GL_TEXTURE_2D ); // Need this to display a texture
glViewport( 0, 0, 640, 480 );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( 0, 640, 480, 0, -1, 1 );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
// Load the OpenGL texture
GLuint texture; // Texture object handle
SDL_Surface *surface; // Gives us the information to make the texture
if ( (surface = IMG_Load("img/star.png")) ) {
// Check that the image's width is a power of 2
if ( (surface->w & (surface->w - 1)) != 0 ) {
printf("warning: image.bmp's width is not a power of 2\n");
}
// Also check if the height is a power of 2
if ( (surface->h & (surface->h - 1)) != 0 ) {
printf("warning: image.bmp's height is not a power of 2\n");
}
// Have OpenGL generate a texture object handle for us
glGenTextures( 1, &texture );
// Bind the texture object
glBindTexture( GL_TEXTURE_2D, texture );
// Set the texture's stretching properties
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// Edit the texture object's image data using the information SDL_Surface gives us
glTexImage2D( GL_TEXTURE_2D, 0, 3, surface->w, surface->h, 0,
GL_RGBA, GL_UNSIGNED_BYTE, surface->pixels );
}
else {
printf("SDL could not load image.bmp: %s\n", SDL_GetError());
SDL_Quit();
return 1;
}
// Free the SDL_Surface only if it was successfully created
if ( surface ) {
SDL_FreeSurface( surface );
}
// Clear the screen before drawing
glClear( GL_COLOR_BUFFER_BIT );
// Bind the texture to which subsequent calls refer to
glBindTexture( GL_TEXTURE_2D, texture );
glColor3f(1,0,0);
glDisable( GL_TEXTURE_2D );
glBegin( GL_QUADS );
// Top-left vertex (corner)
glTexCoord2i( 0, 0 );
glVertex3f( 0, 0, 0 );
// Bottom-left vertex (corner)
glTexCoord2i( 1, 0 );
glVertex3f( 328, 0, 0 );
// Bottom-right vertex (corner)
glTexCoord2i( 1, 1 );
glVertex3f( 328, 328, 0 );
// Top-right vertex (corner)
glTexCoord2i( 0, 1 );
glVertex3f( 0, 328, 0 );
glEnd();
glColor3f(1,1,1);
glEnable( GL_TEXTURE_2D );
glBegin( GL_QUADS );
// Top-left vertex (corner)
glTexCoord2i( 0, 0 );
glVertex3f( 100, 100, 0 );
// Bottom-left vertex (corner)
glTexCoord2i( 1, 0 );
glVertex3f( 228, 100, 0 );
// Bottom-right vertex (corner)
glTexCoord2i( 1, 1 );
glVertex3f( 228, 228, 0 );
// Top-right vertex (corner)
glTexCoord2i( 0, 1 );
glVertex3f( 100, 228, 0 );
glEnd();
SDL_GL_SwapBuffers();
// Wait for 3 seconds to give us a chance to see the image
SDL_Delay(3000);
// Now we can delete the OpenGL texture and close down SDL
glDeleteTextures( 1, &texture );
SDL_Quit();
return 0;
}
The texture renders, but where there should be transparency, there is just black.
I don't see you enabling blending anywhere. You'll need some variation of:
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
See here: http://www.opengl.org/resources/faq/technical/transparency.htm
To hav transparency, you need an alpha channel, on your texture, but more important, in your framebuffer. This is done with SDL_GL_SetAttribute, for example:
SDL_GL_SetAttribute( SDL_GL_RED_SIZE, 8 );
SDL_GL_SetAttribute( SDL_GL_GREEN_SIZE, 8 );
SDL_GL_SetAttribute( SDL_GL_BLUE_SIZE, 8 );
SDL_GL_SetAttribute( SDL_GL_ALPHA_SIZE, 8 );
This will request a framebuffer with at least those sizes, and you'll be able to use blending. Remember to enable blending and set a blending function.
Have a look at your call glTexImage2D( GL_TEXTURE_2D, 0, 3, surface->w, surface->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, surface->pixels );. You specify 3 color components for the internal format. Try 4 (with alpha channel), or GL_RGBA (more about this).
i've been check your code in my computer and thats run correctly. Have you set 'opengl32.lib' in your linker?
Being the brilliant mind that I am, my issue was that i had glEnable(GL_DEPTH_TEST) on. Removing that line made everything work and me worry about later issues. Have fun!