Green screen after applying texture from OpenCV Mat - c++

I try to render an image from an OpenCv Mat using Open GL ES. However, I get a green screen and there is nothing else.
I followed this pipeline :
cv::Mat --> CVPixelBufferRef --> CVOpenGLESTextureCacheCreateTextureFromImage --> glbindTexture.
// OpenGL Texture reference for y images.
CVOpenGLESTextureRef lumaTexture;
// OpenGL Texture reference for color images.
CVOpenGLESTextureRef chromaTexture;
// OpenGL Texture cache for the color camera.
CVOpenGLESTextureCacheRef videoTextureCache;
- (void) uploadGLColorTextureFromMat:(cv::Mat)image{
if(image.empty()){
NSLog(#"image empty.");
}else{
// Clear the previous color texture.
if (lumaTexture)
{
CFRelease (lumaTexture);
lumaTexture = NULL;
}
// Clear the previous color texture
if (chromaTexture)
{
CFRelease (chromaTexture);
chromaTexture = NULL;
}
// Displaying image with width over 1280 is an overkill. Downsample it to save bandwidth.
while( image.cols > 2560 )
cv::resize(image, image, cv::Size(image.cols/2,image.rows/2));
// Allow the texture cache to do internal cleanup.
CVOpenGLESTextureCacheFlush(_display.videoTextureCache, 0);
int height = image.rows;
int width = image.cols;
// set pixel buffer attributes so we get an iosurface
NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
nil];
// create planar pixel buffer
CVPixelBufferRef pixelBuffer = nil;
CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);
// lock pixel buffer
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
// get image details
size_t widthB = CVPixelBufferGetWidth(pixelBuffer);
size_t heightB = CVPixelBufferGetHeight(pixelBuffer);
NSLog(#"%#",(pixelBuffer));
OSType pixelFormat = CVPixelBufferGetPixelFormatType (pixelBuffer);
NSAssert(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, #"YCbCr is expected!");
// Activate the default texture unit.
glActiveTexture (GL_TEXTURE0);
CVReturn err;
// Create an new Y texture from the video texture cache.
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RED_EXT,
(int)widthB,
(int)heightB,
GL_RED_EXT,
GL_UNSIGNED_BYTE,
0,
lumaTexture);
if (err) {
NSLog(#"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
CVPixelBufferRelease(pixelBuffer);
return;
}
// Set good rendering properties for the new texture.
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture), CVOpenGLESTextureGetName(lumaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Activate the default texture unit.
glActiveTexture (GL_TEXTURE1);
// Create an new CbCr texture from the video texture cache.
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
(int)widthB/2,
(int)heightB/2,
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&chromaTexture);
if (err)
{
NSLog(#"Error with CVOpenGLESTextureCacheCreateTextureFromImage: %d", err);
return;
}
// Set rendering properties for the new texture.
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture), CVOpenGLESTextureGetName(_display.chromaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
The function to render :
- (void)renderCameraImage
{
if (_useColorCamera)
{
if (!lumaTexture || !chromaTexture)
return;
glActiveTexture(GL_TEXTURE0);
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture),
CVOpenGLESTextureGetName(lumaTexture));
glActiveTexture(GL_TEXTURE1);
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture),
CVOpenGLESTextureGetName(chromaTexture));
glDisable(GL_BLEND);
[yCbCrTextureShader useShaderProgram];
[yCbCrTextureShader renderWithLumaTexture:GL_TEXTURE0 chromaTexture:GL_TEXTURE1];
}
else
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _display.depthAsRgbaTexture);
[rgbaTextureShader useShaderProgram];
[rgbaTextureShader renderTexture:GL_TEXTURE0];
}
glUseProgram (0);
}
I don't have any errors. Just got green screen and I didn't figure out how to solve this problem.
As an output of pixelBuffer I get (the first lines) :
<CVPixelBuffer 0x13e6cd0b0 width=968 height=1296 pixelFormat=420f iosurface=0x140500a28 planes=2>
<Plane 0 width=968 height=1296 bytesPerRow=976>
<Plane 1 width=484 height=648 bytesPerRow=976>
I have already took at this post,this one and other posts on stack but it did not help me.
Could you help to figure out how to solve this?

Related

Surface poorly filled with sdl_ttf

I'm trying to make an openGL game in c++ and I'm trying to implement a text system,
to do this I'm trying to use SDL_ttf.
I already used SDL_ttf in an other project but with another api, so I made the same code but it happened to not fill the pixel data of the surface.
Here is my code :
void Text2Texture::setText(const char * text, size_t fontIndex){
SDL_Color c = {255, 255, 0, 255};
SDL_Surface * surface;
surface = TTF_RenderUTF8_Blended(loadedFonts_[fontIndex], text, c);
if(surface == nullptr) {
fprintf(stderr, "Error TTF_RenderText\n");
return;
}
GLenum texture_format;
GLint colors = surface->format->BytesPerPixel;
if (colors == 4) { // alpha
if (surface->format->Rmask == 0x000000ff)
texture_format = GL_RGBA;
else
texture_format = GL_BGRA_EXT;
} else { // no alpha
if (surface->format->Rmask == 0x000000ff)
texture_format = GL_RGB;
else
texture_format = GL_BGR_EXT;
}
glBindTexture(GL_TEXTURE_2D, textureId_);
glTexImage2D(GL_TEXTURE_2D, 0, colors, surface->w, surface->h, 0, texture_format, GL_UNSIGNED_BYTE, surface->pixels);
///This line tell me pixel data is 8 bit witch isn't good ?
std::cout << "pixel size : " << sizeof(surface->pixels) << std::endl;
///This line give me correct result
fprintf(stderr, "texture size : %d %d\n", surface->w, surface->h);
glBindTexture(GL_TEXTURE_2D, 0);
}
As you can see in the comment, the pointer pixels in surface have a size of 8 bit, witch is way too low for a texture. I don't know why It do that.
At the end, the texture data look to be fully filled with 0 (resulting with a black squad using very basic shaders).
In this project I'm using glfw to create an openGL context so I'm not using sdl and I did not initialized it.
However, I did initialize sdl_ttf, here is all I did before calling setText :
std::vector<TTF_Font *> Text2Texture::loadedFonts_;
void Text2Texture::init(){
if(TTF_Init() == -1) {
fprintf(stderr, "TTF_Init: %s\n", TTF_GetError());
}
}
int Text2Texture::loadFont(std::string const& fontPath){
loadedFonts_.emplace_back();
loadedFonts_.back() = TTF_OpenFont(fontPath.data(), 32);
if( loadedFonts_.back() == nullptr ) {
fprintf(stderr, "TTF_OpenFont: %s \n", TTF_GetError());
loadedFonts_.pop_back();
return -1;
}
return ((int)loadedFonts_.size() - 1);
}
///The constructor initialize the texture :
Text2Texture::Text2Texture(){
glGenTextures(1, &textureId_);
glBindTexture(GL_TEXTURE_2D, textureId_);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
My class got a static part here is it corp :
class Text2Texture {
public:
Text2Texture();
void setText(const char * text, size_t fontIndex = 0);
unsigned int getId() const;
//Partie static
static void init();
static void quit();
static int loadFont(std::string const& fontPath);
private:
unsigned int textureId_;
//Partie static
static std::vector<TTF_Font *> loadedFonts_;
};
I initialize sdl_ttf and load texture with static method then I create class instance to create specific texture.
If you find where is my mistake I would be pleased to read your answer.
(By the way, I'm not really sure using sdl_ttf is the good approach, if you have a better idea I would take it too but I would like to solve this problem first)
The format and type parameter of glTexImage2Dspecifiy how one single pixel is encoded.
When the texture font is created, each pixel is encoded to a single byte. This means your texture consist of a single color channel and each pixel has 1 byte.
I'm very sure that colors = surface->format->BytesPerPixel is 1.
Note that it is sufficient to encode the glyph in one color channel, because the glyph consists of information that would fit in a single byte.
By default, OpenGL assumes that the start of each row of an image is aligned 4 bytes. This is because the GL_UNPACK_ALIGNMENT parameter by default is 4. Since the image has 1 (red) color channel, and is tightly packed, the start of a row is possibly misaligned.
Change the GL_UNPACK_ALIGNMENT parameter to 1, before specifying the two-dimensional texture image (glTexImage2D).
Since the texture has only one (red) color channel, the green and blue color will be 0 and the alpha channel will be 1 when the texture is looked up. But you can treat green, blue and even alpha channels to be read from the red color channel, too.
This can be achieved by setting the texture swizzle parameters GL_TEXTURE_SWIZZLE_G, GL_TEXTURE_SWIZZLE_B respectively GL_TEXTURE_SWIZZLE_A. See glTexParameter.
Further, note that the texture parameter are stored in the texture object. glTexParameter changes the texture object which is currently bound to the specified target of the current texture unit. So it is sufficient to set the parameters once when the texture image is created.
In comparison, glPixelStore changes global states an ma have to be set to its default value after specifying the texture image (if later calls to glTexImage2D rely on it).
The specification of the 2-dimensional texture image and setting the parameters may look as follows:
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, surface->w, surface->h, 0,
GL_RED, GL_UNSIGNED_BYTE, surface->pixels);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_G, GL_RED);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_B, GL_RED);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_A, GL_RED);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

OpenGL texture not rendered properly on model

In my opengl application, texture is not rendered correctly on the model.
Here is a screenshot of the result:
Here is what the bunny should look like:
expected result
Here is the code to load the texture.
stbi_set_flip_vertically_on_load(1);
m_LocalBuffer = stbi_load(path.c_str(), &m_Width, &m_Height, &m_BPP, 0);
GLCall(glGenTextures(1, &m_RendererID));
GLCall(glBindTexture(GL_TEXTURE_2D, m_RendererID));
GLCall(glGenerateMipmap(GL_TEXTURE_2D));
GLenum format = GL_RGBA;
//..switching on m_BPP to set format, omitted here
GLCall(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GLCall(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GLCall(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE));
GLCall(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE));
GLCall(glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, m_Width, m_Height, 0, format, GL_UNSIGNED_BYTE, m_LocalBuffer));
GLCall(glBindTexture(GL_TEXTURE_2D, 0));
if (m_LocalBuffer) {
stbi_image_free(m_LocalBuffer);
}
Here is the texture file I'm using
Texture File
I downloaded the asset from https://blenderartists.org/t/uv-unwrapped-stanford-bunny-happy-spring-equinox/1101297 (the 3.3Mb link)
Here is the code where I read in the texCoords
for (size_t i = 0; i < mesh->mNumVertices; i++) {
//..read in positions and normals
if (mesh->mTextureCoords[0]) {
vertex.TexCoords.x = mesh->mTextureCoords[0][i].x;
vertex.TexCoords.y = mesh->mTextureCoords[0][i].y;
}
}
I'm loading the model as an obj file using assimp. I just read the texture coord from the result and pass it to the shader. (GLCall is just a debug macro I have in the renderer)
What could potentially be the cause for this? Let me know if more info is needed. Thanks a lot!
The image seems to be flipped vertically (around the x-axis). To compensated that, you've to flip the image manually, after loading it. Or if you've flipped the image then you've to omit that. Whether the image has to be flipped or not, depends on the image format.

OpenGL/GLM 3D model texturing issue

I am using OpenGL, GLM, ILU and GLUT libraries for loading and texturing 3D models. The models appear to load in correctly, however when it comes to the texturing the texture seems to repeat.
I have included two pictures below showing non-textured, textured.
non-textured:
textured:
If you look closely enough to the last image, the texture is applied to a tiny scale and repeated across the whole model.
For the code, I first start by loading the texture.
ILboolean success = false;
if (ilGetInteger(IL_VERSION_NUM) < IL_VERSION)
{
return false;
}
ilInit(); /*Initialize the DevIL library*/
ilGenImages(1, &ilTextureID); //Generate DevIL image objects
ilBindImage(ilTextureID); /* Binding of image object */
success = ilLoadImage((const ILstring)theFilename); /* Loading of image*/
if (!success)
{
ilDeleteImages(1, &ilTextureID);
return false;
}
success = ilConvertImage(IL_RGBA, IL_UNSIGNED_BYTE); // Convert every colour component into unsigned byte.
if (!success)
{
return false;
}
textureWidth = ilGetInteger(IL_IMAGE_WIDTH);
textureHeight = ilGetInteger(IL_IMAGE_HEIGHT);
glGenTextures(1, &GLTextureID); // GLTexture name generation
glBindTexture(GL_TEXTURE_2D, GLTextureID); // Binding of GLtexture name
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // Use linear interpolation for magnification filter
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); // Use linear interpolation for minifying filter
glTexImage2D(GL_TEXTURE_2D, 0, ilGetInteger(IL_IMAGE_BPP), ilGetInteger(IL_IMAGE_WIDTH),
ilGetInteger(IL_IMAGE_HEIGHT), 0, ilGetInteger(IL_IMAGE_FORMAT), GL_UNSIGNED_BYTE,
ilGetData()); /* Texture specification */
glBindTexture(GL_TEXTURE_2D, GLTextureID); // Binding of GLtexture name
ilDeleteImages(1, &ilTextureID);
I have tried things like adding,
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
but this just seems to make the model non-textured.
Then I call the method to model loading method and apply the texture:
m_model = glmReadOBJ(mdlFilename);
glmFacetNormals(m_model);
glmVertexNormals(m_model, 180.0f, false);
m_TextureID = mdlTexture.getTexture();
m_model->textures[m_model->numtextures - 1].id = m_TextureID;
m_model->textures[m_model->numtextures - 1].width = mdlTexture.getTWidth();
m_model->textures[m_model->numtextures - 1].height =mdlTexture.getTHeight();
For the above code, whilst I was debugging I am getting negative values for
"vertices", "normals" and "facetnorms" for the 3D model, but I am getting values for "numnormals", "numtexcoords" and "numfacetnorms". I'm not entirely sure if this is normal.
And finally for the rendering of the model:
glPushMatrix();
//transformations here...
glTranslatef(mdlPosition.x, 0.0f, -mdlPosition.z);
glRotatef(mdlRotationAngle, 0, 1, 0);
glScalef(mdlScale.x, mdlScale.y, mdlScale.z);
glmDraw(m_model, GLM_SMOOTH | GLM_TEXTURE | GLM_MATERIAL);
glPopMatrix();

Why are my textures showing up as fractal patterns with my openGL renderer?

I'm working on getting textures to render using openGL. I'm part of the way there and stuck.
My goal is to get this picture: http://i.imgur.com/d3kZTsn.png
and this is where I'm at: http://i.imgur.com/uAV8q0W.png
Has anyone seen this issue before?
if (tObject == 0) // We don't yet have an OpenGL texture target
{
// This code counts the number of images and if there are none simply
// returns without doing anything
int nImages = 0;
while (tName[nImages][0] != '\0' && nImages < MAX_IMAGES)
nImages++;
if (nImages < 1)
return;
// To Do
//
// Generate a texture object and place the object's value in the "tObject"
// member, then bind the object to the 2D texture target
glGenTextures(nImages, &tObject);
glBindTexture(GL_TEXTURE_2D, tObject);
for (int nImage = 0; nImage < nImages; nImage++)
{
// This code loads the texture using the windows library's "BitmapFile" object
BitmapFile texture;
if (!texture.Read(tName[nImage]))
complain("Couldn't read texture %s", tName);
GLuint srcFormat, targFormat;
// To Do
//
// First decide which format the texture is. If the texture has 4 bytes
// per pixel then it should be an RGBA texture, if it is 3 bytes per pixel
// then it is an RGB image. Notice though that the byte order for the BitmatFile
// object is reversed, so you need to take that into account in the "source" format
if( texture.BytesPerPixel() == 3 )
{
srcFormat = GL_BGR;
targFormat = GL_RGB;
}
else
{
srcFormat = GL_BGRA;
targFormat = GL_RGBA;
}
// Then you need to set the unpack alignment to tell OpenGL about the structure
// of the data in the image and send the data to OpenGL. If there are multiple files
// then we are manually creating a mipmap here and you will use the "level" parameter
// of glTexImage2D to tell OpenGL which mipmap level is being set. The levels are
// set in the same order as they are stored in the image list.
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
if( nImages > 1 )
{
glGenerateMipmap(GL_TEXTURE_2D);
}
glTexImage2D(GL_TEXTURE_2D, nImage, targFormat, texture.Width(), texture.Height(), 0, srcFormat, GL_UNSIGNED_BYTE, texture.ImageData());
}
// Finally, if there is only one image, you need to tell OpenGL to generate a mipmap
if( nImages == 1)
{
glGenerateMipmap(GL_TEXTURE_2D);
}
}
// Here you need to bind the texture to the 2D texture target and set the texture parameters
// You need to set the wrap mode, the minification and magnification filters.
glBindTexture(GL_TEXTURE_2D, tObject);
glTexParameteri(tObject, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(tObject, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(tObject, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(tObject, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
// To Do
//
// For advanced antialiasing set the number of anisotropic samples
GLERR;
I do not understand the logic you are using to call glGenerateMipmap (...). The second parameter to glTexImage2D (...) is the texture LOD - glGenerateMipmap will generate the entire mip pyramid starting with LOD 0. Essentially, you invalidate every one of the calls to glTexImage2D (...) except the first and last iterations of that loop by doing this. It really looks like you either want an array texture, or each one of those images should be a separate texture.
In fact, glGenTextures (...) does not work the way you think it does. You are supposed to pass an array if nImages is > 1. That array will hold nImages-many texture object names. You bind each one and upload image data individually to LOD 0, then you can generate mipmaps.
The following addresses everything I just mentioned:
GLuint* tObjects = NULL;
if (tObjects == NULL) // We don't yet have any OpenGL textures
{
// This code counts the number of images and if there are none simply
// returns without doing anything
int nImages = 0;
while (tName[nImages][0] != '\0' && nImages < MAX_IMAGES)
nImages++;
if (nImages < 1)
return;
tObjects = new GLuint [nImages];
// To Do
//
// Generate multiple texture objects and place the object's values in the "tObjects"
// member, then bind the object to the 2D texture target
glGenTextures (nImages, tObjects);
for (int nImage = 0; nImage < nImages; nImage++)
{
glBindTexture(GL_TEXTURE_2D, tObjects [nImage]);
// This code loads the texture using the windows library's "BitmapFile" object
BitmapFile texture;
if (!texture.Read(tName[nImage]))
complain("Couldn't read texture %s", tName);
GLuint srcFormat, targFormat;
// To Do
//
// First decide which format the texture is. If the texture has 4 bytes
// per pixel then it should be an RGBA texture, if it is 3 bytes per pixel
// then it is an RGB image. Notice though that the byte order for the BitmatFile
// object is reversed, so you need to take that into account in the "source" format
if( texture.BytesPerPixel() == 3 )
{
srcFormat = GL_BGR;
targFormat = GL_RGB;
}
else
{
srcFormat = GL_BGRA;
targFormat = GL_RGBA;
}
// Then you need to set the unpack alignment to tell OpenGL about the structure
// of the data in the image and send the data to OpenGL. If there are multiple files
// then we are manually creating a mipmap here and you will use the "level" parameter
// of glTexImage2D to tell OpenGL which mipmap level is being set. The levels are
// set in the same order as they are stored in the image list.
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, targFormat, texture.Width(), texture.Height(), 0, srcFormat, GL_UNSIGNED_BYTE, texture.ImageData());
glGenerateMipmap (GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
}
}

Something wrong with converting SDL surface to GL texture

I can't find my mistake, why text has not been created? When using texture instead of text I get nothing or black background with colored points, please help
GLuint texture;
SDL_Surface *text = NULL;
TTF_Font *font = NULL;
SDL_Color color = {0, 0, 0};
font = TTF_OpenFont("../test.ttf", 20);
text = TTF_RenderText_Solid(font, "Hello, SDL !!!", color);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, text->w, text->h, 0, GL_RGB, GL_UNSIGNED_BYTE, text->pixels);
SDL_FreeSurface(text);
One thing you could add is to specify texture filters, e.g.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
Few things you have to check first
is the font loaded properly? check if "font == NULL", maybe your
font path is wrong
is the shader (if you use a shader) setup properly?
My guess is that you set the wrong pixel format type in glTexImage2D cause random color dots apear on your texture
Below is my code that load image via SDL_image for OpenGL use, I think it would be a good start to figure out what step you missed or forgot.
BTW, this code is not perfect. The types of pixel format is more than four (like index color) and I only handle some of them.
/*
* object_, originalWidth_ and originalHeight_ are private variables in
* this class, don't panic.
*/
void
Texture::Load(string filePath, GLint minMagFilter, GLint wrapMode)
{
SDL_Surface* image;
GLenum textureFormat;
GLint bpp; //Byte Per Pixel
/* Load image file */
image = IMG_Load(filePath.c_str());
if (image == nullptr) {
string msg("IMG error: ");
msg += IMG_GetError();
throw runtime_error(msg.c_str());
}
/* Find out pixel format type */
bpp = image->format->BytesPerPixel;
if (bpp == 4) {
if (image->format->Rmask == 0x000000ff)
textureFormat = GL_RGBA;
else
textureFormat = GL_BGRA;
} else if (bpp == 3) {
if (image->format->Rmask == 0x000000ff)
textureFormat = GL_RGB;
else
textureFormat = GL_BGR;
} else {
string msg("IMG error: Unknow pixel format, bpp = ");
msg += bpp;
throw runtime_error(msg.c_str());
}
/* Store widht and height */
originalWidth_ = image->w;
originalHeight_ = image->h;
/* Make OpenGL texture */
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &object_);
glBindTexture(GL_TEXTURE_2D, object_);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, minMagFilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, minMagFilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrapMode);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrapMode);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE);
glTexImage2D(
GL_TEXTURE_2D, // texture type
0, // level
bpp, // internal format
image->w, // width
image->h, // height
0, // border
textureFormat, // format(in this texture?)
GL_UNSIGNED_BYTE, // data type
image->pixels // pointer to data
);
/* Clean these mess up */
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
SDL_FreeSurface(image);
}
For more information, you should check out SDL wiki or deep into it's source code to fully understand the architecture of SDL_Surface.