Do I pass the wrong data to glTexImage2D? - c++

I'm trying to make an OpenGL texture by populating a pixel buffer with data from a baked font. I'm taking each value from the font array and making a bitmap essentially.
The problem is when I'm displaying the full texture I get noise. However by creating an 8x8 texture of one glyph the texture is displayed correctly.
The pixel buffer is 8bit monochrome, so I pass GL_ALPHA as buffer format.
I tried using 32bpp GL_RGBA format as well and it yields the same result.
DebugFont
LoadBakedFont(void)
{
glEnable(GL_BLEND);
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
unsigned char baked_font[128][8] = {} //In my source code this is not empty :)
unsigned char *pixels = (unsigned char*)malloc(sizeof(unsigned char) * 128 * 8 * 8);
memset(pixels, 0, sizeof(unsigned char) * 128 * 8 * 8);
int counter = 0;
for(int i = 0; i < 128; ++i)
{
for(int j = 0; j < 8; ++j)
{
for(int k = 0; k < 8; ++k)
{
unsigned char val = (baked_font[i][j] >> k & 1);
pixels[counter++] = val == 1 ? 0xff : 0x00;
}
}
}
//Renders the exlamation mark perfectly
for(int y = 0; y < 8; ++y)
{
for(int x = 0; x < 8; ++x)
{
unsigned char *test = pixels + (0x21 * 64);
if(test[y * 8 + x])
printf("#");
else
printf(".");
}
printf("\n");
}
//POD struct
DebugFont font;
glGenTextures(1, &font.tex);
glBindTexture(GL_TEXTURE_2D, font.tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, 8 * 128, 8, 0, GL_ALPHA, GL_UNSIGNED_BYTE, pixels);
glBindTexture(GL_TEXTURE_2D, 0);
free(pixels);
return font;
}
void
DrawTexture(DebugFont font)
{
glBindTexture(GL_TEXTURE_2D, font.tex);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex2f(0,0);
glTexCoord2f(1.0f, 0.0f); glVertex2f(8 * 128,0);
glTexCoord2f(1.0f, 1.0f); glVertex2f(8 * 128, 8);
glTexCoord2f(0.0f, 1.0f); glVertex2f(0, 8);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
}
Random noise?
Exclamation Mark

The way you arrange the data makes sense for a tall 8x1024 image where each 8x8 makes up a character.
But you load it as a 1024x8 image instead, putting all the pixels in the wrong places.

Related

SDL_TTF rendering garbage to texture

I am attempting to use OpenGL and SDL, using SDL_ttf to render text to a texture, but the code is rendering garbage.
My "Render to texture code":
GLuint textToTexture(std::string & text, TTF_Font* font, glm::vec4 textColour, glm::vec4 bgColour)
{
if (!TTF_WasInit())
{
if (TTF_Init() == -1)
exit(6);
}
SDL_Color colour = { (Uint8)(textColour.r*255), (Uint8)(textColour.g*255), (Uint8)(textColour.b*255), (Uint8)(textColour.a*255) };
SDL_Color bg = { (Uint8)(bgColour.r*255), (Uint8)(bgColour.g*255), (Uint8)(bgColour.b*255), (Uint8)(bgColour.a*255) };
SDL_Surface *stringImage = NULL;
stringImage = TTF_RenderText_Blended(font, text.c_str(), colour);
if (stringImage == NULL)
{
exit(5);
}
GLuint trueH = powerofTwo(stringImage->h);
GLuint trueW = powerofTwo(stringImage->w);
unsigned char* pixels = NULL;
GLuint w = stringImage->w;
GLuint h = stringImage->h;
GLuint colours = stringImage->format->BytesPerPixel;
pixels = padTexture((unsigned char*)stringImage->pixels, w, h, pixels, trueW, trueH, colours);
GLuint format, internalFormat;
if (colours == 4) {
if (stringImage->format->Rmask == 0x000000ff)
format = GL_RGBA;
else
format = GL_BGRA;
}
else {
// no alpha
if (stringImage->format->Rmask == 0x000000ff)
format = GL_RGB;
else
format = GL_BGR;
}
internalFormat = (colours == 4) ? GL_RGBA : GL_RGB;
GLuint texId = 0;
//GLuint texture;
glGenTextures(1, &texId);
glBindTexture(GL_TEXTURE_2D, texId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, trueW, trueH, 0,format, GL_UNSIGNED_BYTE, pixels);
// SDL surface was used to generate the texture but is no longer
// required. Release it to free memory
SDL_FreeSurface(stringImage);
free(pixels)
return texId;
}
The code for computing the correct dimensions for padding:
int powerofTwo(int num)
{
if (num != 0)
{
num--;
num |= num >> 1; // Divide by 2^k for consecutive doublings of k up to 32,
num |= num >> 2; // and then or the results.
num |= num >> 4;
num |= num >> 8;
num |= num >> 16;
num++;
}
return num;
}
and finally, the code that copies the bytes to a texture of the correct dimensions:
unsigned char* padTexture(unsigned char * src, int srcW, int srcH, unsigned char * dest, int width, int height, int bpp)
{
dest = (unsigned char*)calloc(1, width*height*bpp);
for (int i = 0; i < srcH; i++)
{
memcpy(dest + (width*i*bpp),src + (srcW*i*bpp), srcW*bpp);
}
return dest;
}
The result of this code is as follows: [![Garbled Texture][1]][1]
I have confirmed and error checked that SDL_TTF is properly initialized elsewhere in the codebase, and that the font is also being loaded.
I have tested with three different ttf fonts, with the same results.
Also, if I use any other TTF_rendering function (Shaded, Solid etc), A solid quad is rendered, and the "colours" variable in the textToTexture function also ends up as 1.
Additional:
As I previously stated, I tested with three ttf fonts:
MavenPro-Regular,
HelveticaNeueLTStd-Th
and another I found off the internet.
I was trying to render the string "Select Scenario".
The pre padded image dimensions are 138x25 pixels.
The post padded image dimensions are 256x32 pixels.
Update 1:
After fixing the bpp issue the new texture is as follows:
This image changes everytime I run the program.
Update 2:
After fixing the additional spotted errors with padding the image, and setting the pixel data to the texture itself, when I use TTF_RenderText_Blended all I get is a black quad, and when I use TTF_RenderText_Shaded I get:
Update 3:
I used SDL_SaveBMP immedietly before calling the GL code and after calling SDL_RenderText_Blended, the result was a completely white image, (given which text colour).
When I do the same using TTF_RenderText_Solid, The saved image is as it should be, but is rendered by opengl like the images you see above.
SDL_TTF initialized fine, the fonts load without error, and the text rendering returns no errors, so I can't think what to do next.
Update 4:
I have since refactored all the ttf code into a single function and removed the padding code (as modern opengl doesn't seem to care about it). However, despite all project settings and code now being identical to a test project that is known to work on the same hardware, the problem persists.
GLuint textToTexture(const char * text, const char * font, glm::vec4 textColour, glm::vec4 bgColour, unsigned int & texID)
{
if (!TTF_WasInit()) {
if (TTF_Init() == -1)
exit(6);
}
SDL_Color colour = { (Uint8)(textColour.r * 255), (Uint8)(textColour.g * 255), (Uint8)(textColour.b * 255),(Uint8)(textColour.a * 255) };
SDL_Color bg = { (Uint8)(bgColour.r * 255), (Uint8)(bgColour.g * 255), (Uint8)(bgColour.b * 255),255 };
TTF_Font* fontObj = TTF_OpenFont(font, 24);
if (!fontObj)
{
SDL_ShowSimpleMessageBox(SDL_MESSAGEBOX_ERROR,
"Texture Error",
"Cannot load font to create texture.",
NULL);
return 0;
}
SDL_Surface *image = NULL;
image = TTF_RenderText_Blended(fontObj, text, colour);
if (image == NULL)
{
exit(5);
//exitFatalError("String surface not created.");
std::cout << "String surface not created." << std::endl;
}
unsigned char* pixels = NULL;
GLuint w = image->w;
GLuint h = image->h;
GLuint colours = image->format->BytesPerPixel;
GLuint externalFormat, internalFormat;
SDL_PixelFormat *format = image->format;
if (colours == 4) {
if (image->format->Rmask == 0x000000ff)
externalFormat = GL_RGBA;
else
externalFormat = GL_BGRA;
}
else {
// no alpha
if (image->format->Rmask == 0x000000ff)
externalFormat = GL_RGB;
else
externalFormat = GL_BGR;
}
internalFormat = (colours == 4) ? GL_RGBA : GL_RGB;
GLuint texId = 0;
//GLuint texture;
glGenTextures(1, &texID);
glBindTexture(GL_TEXTURE_2D, texID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, w, h, 0, externalFormat, GL_UNSIGNED_BYTE, image->pixels);
//glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, trueW, trueH, 0, externalFormat, GL_UNSIGNED_BYTE, pixels);
glGenerateMipmap(GL_TEXTURE_2D);
//// SDL surface was used to generate the texture but is no longer
//// required. Release it to free memory
SDL_FreeSurface(image);
TTF_CloseFont(fontObj);
return texID;
}
I have a workaround that saves the image to bmp, then reloads it and creates a texture, but only when I use TTF_RenderText_Shaded. If I use TTF_RenderText_Blended, I get an single colour image which corresponds to the text colour.
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, trueH, trueW, 0,format, GL_UNSIGNED_BYTE, pixels);
trueH and trueW order is reversed
memcpy(src + (srcW*i*bpp), dest + (width*i*bpp), srcW*bpp);
Source and destination order reversed.
dest = (unsigned char*)calloc(0, width*height*bpp);
0 elements of size width*height*bpp allocated, which is 0 bytes. Should be 1 instead of 0.
Here is a complete example:
#include <SDL2/SDL.h>
#include <GL/gl.h>
#include <SDL2/SDL_ttf.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
static unsigned char* padTexture(unsigned char * src, int srcW, int srcH, unsigned char * dest, int width, int height, int bpp, const SDL_Palette *palette)
{
int dst_bpp = (bpp == 1) ? 4 : bpp;
dest = (unsigned char*)calloc(1, width*height*dst_bpp);
if(bpp != 1) {
for (int i = 0; i < srcH; i++)
{
memcpy(dest + (width*i*bpp), src + (srcW*i*bpp), srcW*bpp);
}
} else {
/* indexed - read colours from palette */
for(int i = 0; i < srcH; i++) {
for(int j = 0; j < srcW; j++) {
memcpy(dest + (width*i+j)*dst_bpp,
&palette->colors[src[srcW*i+j]], sizeof(SDL_Color));
}
}
}
return dest;
}
static int powerofTwo(int num) {
if (num != 0)
{
num--;
num |= num >> 1; // Divide by 2^k for consecutive doublings of k up to 32,
num |= num >> 2; // and then or the results.
num |= num >> 4;
num |= num >> 8;
num |= num >> 16;
num++;
}
return num;
}
static GLuint textToTexture(const char *text, TTF_Font* font) {
if (!TTF_WasInit()) {
if (TTF_Init() == -1)
exit(6);
}
SDL_Color colour = { 255, 255, 255, 255 };
SDL_Color bg = { 0, 0, 0, 255 };
SDL_Surface *stringImage = NULL;
// stringImage = TTF_RenderText_Blended(font, text, colour);
stringImage = TTF_RenderText_Shaded(font, text, colour, bg);
if (stringImage == NULL) {
exit(5);
}
GLuint trueH = powerofTwo(stringImage->h);
GLuint trueW = powerofTwo(stringImage->w);
unsigned char* pixels = NULL;
GLuint w = stringImage->w;
GLuint h = stringImage->h;
GLuint colours = stringImage->format->BytesPerPixel;
pixels = padTexture((unsigned char*)stringImage->pixels, w, h, pixels, trueW, trueH,
colours, stringImage->format->palette);
GLuint format, internalFormat;
/* If indexed, want resulting image to be 32bit */
if(colours == 1) {
colours = 4;
}
if (colours == 4) {
if (stringImage->format->Rmask == 0x000000ff)
format = GL_RGBA;
else
format = GL_BGRA;
}
else {
// no alpha
if (stringImage->format->Rmask == 0x000000ff)
format = GL_RGB;
else
format = GL_BGR;
}
internalFormat = (colours == 4) ? GL_RGBA : GL_RGB;
GLuint texId = 0;
//GLuint texture;
glGenTextures(1, &texId);
glBindTexture(GL_TEXTURE_2D, texId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, trueW, trueH, 0,format, GL_UNSIGNED_BYTE, pixels);
// SDL surface was used to generate the texture but is no longer
// required. Release it to free memory
SDL_FreeSurface(stringImage);
free(pixels);
return texId;
}
int main(int argc, char* argv[])
{
SDL_Init(SDL_INIT_VIDEO);
TTF_Init();
SDL_Window *window = SDL_CreateWindow("SDL2 Example", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 600, 400, SDL_WINDOW_OPENGL);
SDL_GLContext gl_ctx = SDL_GL_CreateContext(window);
TTF_Font *font = TTF_OpenFont(".fonts/tahoma.ttf", 16);
if(font) {
printf("font loaded\n");
textToTexture("Select Scenario", font);
TTF_CloseFont(font);
}
int quit = 0;
while(!quit) {
SDL_Event ev;
while(SDL_PollEvent(&ev)) {
if(ev.type == SDL_QUIT || ev.type == SDL_KEYUP) {
quit = 1;
}
}
glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glColor3f(1.0f, 1.0f, 1.0f);
glBegin(GL_QUADS);
glTexCoord2f(0, 1);
glVertex2f(-0.5, -0.5);
glTexCoord2f(0, 0);
glVertex2f(-0.5, 0.5);
glTexCoord2f(1, 0);
glVertex2f(0.5, 0.5);
glTexCoord2f(1, 1);
glVertex2f(0.5, -0.5);
glEnd();
glFlush();
SDL_GL_SwapWindow(window);
}
SDL_GL_DeleteContext(gl_ctx);
SDL_DestroyWindow(window);
TTF_Quit();
SDL_Quit();
return 0;
}

OpenGL changing color of generated texture

I'm creating a sheet of characters and symbols from a font file, which works fine, except on the generated sheet all the pixels are black (with varying alpha). I would prefer them to be white so I can apply color multiplication and have different colored text. I realize that I can simply invert the color in the fragment shader, but I want to reuse the same shader for all my GUI elements.
I'm following this tutorial: http://en.wikibooks.org/wiki/OpenGL_Programming/Modern_OpenGL_Tutorial_Text_Rendering_02
Here's a snippet:
// Create map texture
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, &map);
glBindTexture(GL_TEXTURE_2D, map);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, mapWidth, mapHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Draw bitmaps onto map
for (uint i = start; i < end; i++) {
charInfo curChar = character[i];
if (FT_Load_Char(face, i, FT_LOAD_RENDER)) {
cout << "Loading character " << (char)i << " failed!" << endl;
continue;
}
glTexSubImage2D(GL_TEXTURE_2D, 0, curChar.mapX, 0, curChar.width, curChar.height, GL_ALPHA, GL_UNSIGNED_BYTE, glyph->bitmap.buffer);
}
The buffer of each glyph contains values of 0-255 for the alpha of the pixels. My question is, how do I generate white colored pixels instead of black? Is there a setting for this? (I've tried some blend modes but without success)
Since you create the texture with
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, mapWidth, mapHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
you can either change the GL_RGBA to GL_RED (or GL_LUMINANCE for pre-3.0 OpenGL) or you can create the RGBA buffer and copy the glyph data there.
I.e., you have
glyph->bitmap.buffer
then you do
unsigned char* glyphRGBA = new unsigned char[ curChar.width * curChar.height * 4];
for(int j = 0 ; j < curChar.height ; j++)
for(int i = 0 ; i < curChar.width ; i++)
{
int ofs = j * curChar.width + i;
for(int k = 0; k < 3 ; k++)
glyphRGBA[ofs + k] = YourTextColor[k];
// set alpha
glyphRGBA[ofs + 3] = glyph->bitmap.buffer[ofs];
}
In the code above YourTextColor is unsigned char[3] array with RGB components of the text color. The glyphRGBA array can be fed to glTexSubImage2D.

OpenGL fails when resizing buffer

I need to update an array of pixels to the screen every frame. It works initially, however when I try to resize the screen it glitches and eventually throws EXC_BAD_ACCESS 1. I already checked that the buffer is allocated to the correct size before every frame, however it does not seem to affect the result.
#include <stdio.h>
#include <stdlib.h>
#include <GLUT/GLUT.h>
unsigned char *buffer = NULL;
int width = 400, height = 400;
unsigned int screenTexture;
void Display()
{
for (int y = 0; y < height; y+=4) {
for (int x = 0; x < width; x++) {
buffer[(x + y * width) * 3] = 255;
}
}
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
// This function results in EXC_BAD_ACCESS 1, although the buffer is always correctly allocated
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, buffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, width, height, 0, 0, 1);
glMatrixMode(GL_MODELVIEW);
glBegin (GL_QUADS);
glTexCoord2f(0,0); glVertex2i(0, 0);
glTexCoord2f(1,0); glVertex2i(width,0);
glTexCoord2f(1,1); glVertex2i(width,height);
glTexCoord2f(0,1); glVertex2i(0, height);
glEnd ();
glFlush();
glutPostRedisplay();
}
void Resize(int w, int h)
{
width = w;
height = h;
buffer = (unsigned char *)realloc(buffer, sizeof(unsigned char) * width * height * 3);
if (!buffer) {
printf("Error Reallocating buffer\n");
exit(1);
}
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE);
glutInitWindowSize(width, height);
glutCreateWindow("Rasterizer");
glutDisplayFunc(Display);
glutReshapeFunc(Resize);
glGenTextures(1, &screenTexture);
glBindTexture(GL_TEXTURE_2D, screenTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
glDisable(GL_DEPTH_TEST);
buffer = (unsigned char *)malloc(sizeof(unsigned char) * width * height * 3);
glutMainLoop();
}
After resizing the screen does not display properly either:
What is causing this problem? The code compiles and runs you just have to link GLUT and OpenGL.
As #genpfault mentioned, OpenGL reads 4 bytes per pixel instead of your assumption of 3.
Instead of changing GL_UNPACK_ALIGNMENT, you can also change your code to the correct assumption of 4 bytes per pixel via a simple struct:
struct pixel {
unsigned char r, g, b;
unsigned char unused;
};
Then, instead of using the magic constant 3, you can use the much clearer sizeof(struct pixel). This makes it easier to read and to convey the intent of the code, and it doesn't result in any extra code (as the structure is "effectively" an array of 4 bytes).
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, buffer);
^^^^^^
GL_UNPACK_ALIGNMENT defaults to 4, not 1. So OpenGL will read 4 bytes for every pixel, not the 3 that you're assuming.
Set GL_UNPACK_ALIGNMENT to 1 using glPixelStorei().
It sounds like you found something that works, but I don't think the problem was properly diagnosed. I believe the biggest issue is in the way you initialize your texture data here:
for (int y = 0; y < height; y+=4) {
for (int x = 0; x < width; x++) {
buffer[(x + y * width) * 3] = 255;
}
}
This only sets data in every 4th row, and then only for every 3rd byte within those rows. To initialize all the data to white, you need to increment the row number (y) by 1 instead of 4, and set all 3 components inside the loop:
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
buffer[(x + y * width) * 3 ] = 255;
buffer[(x + y * width) * 3 + 1] = 255;
buffer[(x + y * width) * 3 + 2] = 255;
}
}
You also need to set GL_UNPACK_ALIGNMENT to 1:
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
This controls the row alignment (not the pixel alignment, as suggested in a couple other answers). The default value for GL_UNPACK_ALIGNMENT is 4. But with 3 bytes per pixel in the GL_RGB format you are using, the size of a row is only a multiple of 4 bytes if the number of pixels is a multiple of 4. So for tightly packed rows with 3 bytes/pixel, the value needs to be set to 1.

Loading Images (using their RGB(A) pixel data) in openGL textures

In the main function:
img = cvLoadImage("test.JPG");
//openCV functions to load and create matrix
CvMat *mat = cvCreateMat(img->height,img->width,CV_8SC3 );
cvGetMat( img, mat,0,1);
//creating the 3-dimensional array during runtime
data = new float**[img->height];
for(int i=0;i<img->height;i++){
data[i] = new float*[img->width];
}
for(int i=0;i<img->height;i++){
for(int j=0;j<img->width;j++)
data[i][j] = new float[3];
}
//setting RGB values
for(int i=0;i<img->height;i++)
{
for(int j=0;j<img->width;j++)
{
CvScalar scal = cvGet2D( mat,i,j);
data[i][j][0] = scal.val[0];
data[i][j][1] = scal.val[1];
data[i][j][2] = scal.val[2];
}
}
I am using openCV to get the image pixel data, storing it in the dynamically created matrix "data".Now generating textures and binding them:
glGenTextures(1,&texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0 ,GL_RGB, img->width,img->height,0,GL_RGB,GL_FLOAT,data);
glBindTexture(GL_TEXTURE_2D, texName);
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex3f(-1,-1,0);
glTexCoord2f(0,1);
glVertex3f(-1,1,0);
glTexCoord2f(1,1);
glVertex3f(1,1,0);
glTexCoord2f(1,0);
glVertex3f(1,-1,0);
glEnd();
There are no compilation errors but during the runtime, the window displays the square I made, but not the image that I tried to convert into texture.
How do I achieve loading any image into texture using the pixel data that I extract using openCV. I have printed the RGB values and they seem legit enough and the number of triplets printed are as expected.
glBindTexture should be called before the glTexParemeteri...glTexImage2D calls, so openGL knows which texture you're setting up.
glEnable(GL_TEXTURE_2D);
glGenTextures(1,&texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D, 0 ,GL_RGB, img->width,img->height,0,GL_RGB,GL_FLOAT,data);
More importantly, you are not setting up your data variable correctly:
float* data = new float[img->height * img->width * 3];
for (int i = 0; i < img->height; i++)
{
for (int j = 0; j < img->width; j++)
{
CvScalar scal = cvGet2D(mat, i, j);
data[(i * img->width + j) + 0] = scal.val[0];
data[(i * img->width + j) + 1] = scal.val[1];
data[(i * img->width + j) + 2] = scal.val[2];
}
}
Also, you might need to swap the order of color components and/or convert them to 0..1 range, I don't know how openCV loads images.

Can load a single texture but having problems loading into an array

I am having trouble loading textures onto an array of type GLuints but the problem doesn't occur with loading a single texture. When I debug it, I see that textureIDs isn't assigned any values and has only 0s even after setTexture(...) is called
but the problem doesn't happen on a single texture loaded onto textureID. Either I'm missing something so obvious or my understanding of Opengl or C++ is lacking.
Relevant functions within this context
GLuint textureIDs[3];
GLuint textureID;
Ground Constructor
Ground::Ground(void) :
textureId(0),groundTextures()
{
setAllTexture();
}
Draw : ground function that draws.. well the ground
void Ground::draw()
{
glPushMatrix();
//Ground
glPushMatrix();
glTranslatef(0,-1,0); //all buildings and ground
//ground
glPushMatrix();
glTranslatef(-5,0,-20);
glScalef(40,0.2,40);
setTexture("Textures/rue2.bmp", textureId, true, true);
drawRectangle(1.0f);
glPopMatrix();
glPopMatrix();
}
settexture() sets textures
void Ground::setTexture(const char* textureName, GLuint& textId, bool stretchX, bool stretchZ)
{
if (textId == 0)
{
textId = (GLuint)createTexture(textureName);
}
if (textId != 0)
{
//enable texture coordinate generation
glEnable(GL_NORMALIZE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_COLOR_MATERIAL);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND); //Enable alpha blending
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); //Set the blend function
glBindTexture(GL_TEXTURE_2D, textId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, stretchX ? GL_REPEAT : GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, stretchZ ? GL_REPEAT : GL_CLAMP);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
}
}
setAllTexture: loads textures onto the array of GLuints
int Ground::setAllTexture()
{
setTexture("Textures/asphalt.bmp",groundTextures[0],false, false );
//set up textures for all of them and bind them to ground textures.
glBindTexture(GL_TEXTURE_2D, groundTextures[0]);
setTexture("Textures/concreteFloor.bmp",groundTextures[1],false, false);
//set up textures for all of them and bind them to ground textures.
glBindTexture(GL_TEXTURE_2D, groundTextures[1]);
setTexture("Textures/dirtyGrass.bmp",groundTextures[2],false, false);
//set up textures for all of them and bind them to ground texture
glBindTexture(GL_TEXTURE_2D, groundTextures[2]);
for ( unsigned int i =0 ; i < 3 ; i ++ )
//check to see if all textures loaded properly
{
if ( groundTextures[i] == 0 )
return false;
}
return true;
//if you're here, every texture was loaded correctly.
}
ImageLoader: loads a 24bit RGB bitmap
Image* loadBMP(const char* filename) {
ifstream input;
input.open(filename, ifstream::binary);
assert(!input.fail() || !"Could not find file");
char buffer[2];
input.read(buffer, 2);
assert(buffer[0] == 'B' && buffer[1] == 'M' || !"Not a bitmap file");
input.ignore(8);
int dataOffset = readInt(input);
//Read the header
int headerSize = readInt(input);
int width;
int height;
switch(headerSize) {
case 40:
//V3
width = readInt(input);
height = readInt(input);
input.ignore(2);
assert(readShort(input) == 24 || !"Image is not 24 bits per pixel");
assert(readShort(input) == 0 || !"Image is compressed");
break;
case 12:
//OS/2 V1
width = readShort(input);
height = readShort(input);
input.ignore(2);
assert(readShort(input) == 24 || !"Image is not 24 bits per pixel");
break;
case 64:
//OS/2 V2
assert(!"Can't load OS/2 V2 bitmaps");
break;
case 108:
//Windows V4
assert(!"Can't load Windows V4 bitmaps");
break;
case 124:
//Windows V5
assert(!"Can't load Windows V5 bitmaps");
break;
default:
assert(!"Unknown bitmap format");
}
//Read the data
int bytesPerRow = ((width * 3 + 3) / 4) * 4 - (width * 3 % 4);
int size = bytesPerRow * height;
auto_array<char> pixels(new char[size]);
input.seekg(dataOffset, ios_base::beg);
input.read(pixels.get(), size);
//Get the data into the right format
auto_array<char> pixels2(new char[width * height * 3]);
for(int y = 0; y < height; y++) {
for(int x = 0; x < width; x++) {
for(int c = 0; c < 3; c++) {
pixels2[3 * (width * y + x) + c] =
pixels[bytesPerRow * y + 3 * x + (2 - c)];
}
}
}
input.close();
return new Image(pixels2.release(), width, height);
}
createTexture: creates a texture and returns a GLuint
unsigned int createTexture( const char* imageName )
{
Image* image = loadBMP(imageName);
GLuint textureId = 0;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB,image->width, image- >height,0,GL_RGB,GL_UNSIGNED_BYTE,image->pixels);
delete image;
return (unsigned int) textureId;
}