How can I convert a .png image to an OpenGL surface, with SDL? what I have now:
typedef GLuint texture;
texture load_texture(std::string fname){
SDL_Surface *tex_surf = IMG_Load(fname.c_str());
if(!tex_surf){
return 0;
}
texture ret;
glGenTextures(1, &ret);
glBindTexture(GL_TEXTURE_2D, ret);
glTexImage2D(GL_TEXTURE_2D, 0, 3, tex_surf->w, tex_surf->h, 0, GL_RGB, GL_UNSIGNED_BYTE, tex_surf->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
SDL_FreeSurface(tex_surf);
return ret;
}
and my code to draw the thing:
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, tex);
//Use blurry texture mapping (replace GL_LINEAR with GL_NEAREST for blocky)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glColor4f( 1.0, 1.0, 1.0, 1.0 ); //Don't use special coloring
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(0.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(128.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(128.0f, 128.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(0.0f, 128.0f, 0.0f);
glEnd();
glDisable(GL_TEXTURE_2D);
The problem is that it only works with .bmp files, and they turn bluish, so what is wrong?
Also, when I try to load a .png, it shows up really weird.
Wrong colors can be caused by getting the channel order wrong. The code I have lying around for loading .bmp's uses GL_BGR instead of GL_RGB so I think that will solve your problem with bmp's.
The problem with your png image is more likely caused by the png being 32-bits per pixel. Probably the best solution for you is to inspect the format field of the SDL surface to determine to appropriate flags/values to pass to glTexImage2D.
Related
I am following this article to render a video onto a texture using OpenGL and winforms using C++.
I have changed the code in the renderer as follows. But the glutPostRedisplay(); is not working. The same logic works well when I am creating a OpenGL window and rendering over there. But does not seem to work well in winforms.
As of what I understood is that the glutPostRedisplay is trying to refresh my main winforms window and not the OpenGL viewport. I am not sure how to Refresh my viewport.
void OpenGLForm::COpenGL::Render(System::Void)
{
//glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
// These are necessary if using glTexImage2D instead of gluBuild2DMipmaps
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
// Draw a textured quad
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex2f(0.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex2f(frame_width, 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex2f(frame_width, frame_height);
glTexCoord2f(0.0f, 1.0f); glVertex2f(0.0f, frame_height);
glEnd();
glFlush();
//glutSwapBuffers();
OpenGLForm::COpenGL::SwapOpenGLBuffers();
//Get data from the camera
uint32_t* buffer = new uint32_t[frame_width * frame_height * 4];
if (display_mode == DISPLAY_ARGB) {
// Pass a pointer to the texture directly into Thermal_GetImage for maximum performance
status = Thermal_GetDisplayImage(camera, buffer, (uint32_t)frame_pixels);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGBA8,
frame_width,
frame_height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
buffer);
// Clean up buffer
delete[] buffer;
// Update display
glutPostRedisplay();
}
void OpenGLForm::COpenGL::SwapOpenGLBuffers(System::Void)
{
SwapBuffers(m_hDC);
}
glutPostRedisplay only works with a "glut"-window. (glutCreateWindow). You have to use a Win-API function to invalidate the client area of the window (e.g. InvalidateRect):
InvalidateRect(HWND, NULL, TRUE);
I'm trying to add a texture to a simple square for more than 5h now but I still don't manage to do so.
Here's the code I'm using in paintGL():
glEnable(GL_TEXTURE_2D);
GLuint id;
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
float pixels[] = {
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f
};
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 2, 2, 0, GL_RGB, GL_FLOAT, pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(0,0,0);
glTexCoord2f(1.0f, 0.0f); glVertex3f(1,0,0);
glTexCoord2f(1.0f, 1.0f); glVertex3f(1,1,0);
glTexCoord2f(0.0f, 1.0f); glVertex3f(0,1,0);
glEnd();
if (glGetError() != 0)
{
qDebug() << glGetError();
}
There is no errors nor OpenGL's ones. I initialized the clear color to grey.
When I try to change color with glColor3f(...), it works.
I read about all tutorials that I could find with Google but no one helped.
SOLUTION:
I never put
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
at the right place: just after the glTexImage2D! Code above is edited and now work like a charm.
You HAVE to specify filtering for your texture:
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
Because default filter uses mipmaps, but you don't generate them.
It looks to me like you're only using one quarter of your texture. It's only 4 pixels, and you've set the texture coordinates to be just the first pixel. If that pixel is white and your texture environment is set to multiply the quad's color by the texture, and you're set to use nearest neighbor sampling, then you'll get just the color. Try changing the texture coordinates to be (0,0) to (1,1) instead of (0,0) to (0.5,0.5) and see if that gives you the expected result. You can also try setting the various texture parameters and environments differently to see how that affects your drawing.
I've written a very simple OpenGL application. Its goal is to load a texture and draw it on a plane. If I use the function 'glTexSubImage2D' the plane is not textured and the function 'glGetError' returns the error '1281' (invalid value). However if I use the function 'glTexImage2D' my plane plane is textured correctly (and I have no error).
Here's a piece of my code :
void core::RootDevice::LoadTexture(char const *pFileName)
{
SDL_Surface *pSurface = IMG_Load(pFileName);
char *pPixels = reinterpret_cast<char*>(pSurface->pixels);
uint32_t bytePerPixel = pSurface->format->BitsPerPixel;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
{
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pboID);
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pSurface->w, //NO ERROR : All is ok
//pSurface->h, 0, GL_RGB, GL_UNSIGNED_BYTE, pPixels);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pSurface->w, //ERROR : 1281
pSurface->h, GL_RGB, GL_UNSIGNED_BYTE, pPixels);
std::cout << glGetError() << std::endl;
getchar();
}
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
And the rendering code :
void core::RootDevice::Render(void)
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
{
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(-1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(1.0f, 1.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(-1.0f, 1.0f, 0.0f);
glEnd();
}
glBindTexture(GL_TEXTURE_2D, 0);
}
And the result is the followings:
Does anyone can help me?
glTexSubImage2D() is used to replace parts or all of a texture that already has image data allocated. You have to call glTexImage2D() on the texture at least once before you can use glTexSubImage2D() on it. Unlike glTexSubImage2D(), glTexImage2D() allocates image data. You can use NULL for the last (data) argument to glTexImage2D() if you only want to allocate image data, and later set the data with glTexSubImage2D().
Newer versions of OpenGL (4.4 and ES 3.0) have a new entry point glTexStorage2D() that can be used as an alternative to glTexImage2D() to allocate the image data for a texture without specifying the data. It is similar to calling glTexImage2D() with data = NULL, but also allows specifying ahead of time if space for mipmaps will be needed.
I linked, included, and did everything necessary to use SOIL and it's functions.
I load the texture like so in init():
void initRendering() {
std::string vertexInputString="exampleVS.txt";
std::string fragmentInputString="exampleFS.txt";
//create new shader
InitializeProgram(shaderOne, readFiletoString(vertexInputString), readFiletoString(fragmentInputString));
//enables/disables
glEnable(GL_DEPTH_TEST);
glEnable(GL_TEXTURE_2D);
glEnable (GL_BLEND);
//glEnable(GL_LIGHTING);
//glEnable(GL_LIGHT0);
//states
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_LINEAR);
//load textures
image = SOIL_load_OGL_texture(
"resources/wood.png",
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID,
SOIL_FLAG_INVERT_Y
);
if( 0 == image ){printf( "SOIL loading error: '%s'\n", SOIL_last_result());}
//start shader
glUseProgram(shaderOne);
}
For simplicity the wood.png is a 256x256 png texture and loads without an error message.
Here is where i draw the textured quad:
glPushMatrix();
glColor4f(0.0, 0.0, 0.0,1.0f);
glBindTexture(GL_TEXTURE_2D, image);
//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_DECAL);
//glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_DECAL);
//glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST); // when texture area is small, bilinear filter the closest mipmap
//glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // when texture area is large, bilinear filter the first mipmap
//glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); // texture should tile
//glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glEnable(GL_TEXTURE_2D);
glBegin(GL_QUADS);
glNormal3f(0.0, 1.0f, 0.0f);
glTexCoord2f(0.0f,0.0f); glVertex3f(-50.0f, 10.0f, 50.0f);
glTexCoord2f(0.0f,1.0f); glVertex3f(-50.0f, 10.0f, -50.0f);
glTexCoord2f(1.0f,1.0f); glVertex3f(50.0f, 10.0f, -50.0f);
glTexCoord2f(1.0f,0.0f); glVertex3f(50.0f, 10.0f, 50.0f);
glEnd();
glPopMatrix();
The result is simply an all black texture instead of a wood texture:
I have 128 x 128 px 24 bit bmp file for texture. It's a green square with one vertical line. It loads OK with stbi_image. In initialization are enabled
glEnable(GL_TEXTURE_2D);
glEnable(GL_TEXTURE_GEN_S);
glEnable(GL_TEXTURE_GEN_T);
and texture parameters are
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
I have an object which is made with cubes and cylinders (and some polygons with vertices). I bind texture to one like this to glut:
glPushMatrix();
glBindTexture(GL_TEXTURE_2D, tex1_id);
glColor3f(0.0f, 1.0f, 0.0f);
glTranslatef(-0.5f, 0.2f, 0.05f);
glScalef(2.4f, 0.4f, 1.0f);
glutSolidCube(0.6f);
glBindTexture(GL_TEXTURE_2D, 0);
glPopMatrix();
or to vertices square:
glPushMatrix();
glTranslatef(-0.03f, 1.26f, -0.35f);
glRotatef(94, 1.0f, 0, 0);
glBindTexture(GL_TEXTURE_2D, tex1);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(-0.3f, 0.4f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(-0.3f, 0.85f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(0.3f, 0.85f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(0.3f, 0.4f, 0.0f);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glPopMatrix();
I know that glutSolidCube does not generete texture coordinates but apparently with upper glEnables it works just fine. So the texture shows up on the object. Everything looks good at this point.
Problem starts when I change view with lookAt or I translate and rotate object in some path. Then it looks like as texture does not move and rotate with objects but remains static.
First: initial view; Second: slightly rotated view (lookAt); Third: slightly translated object
Same thing happens if I will draw that object with vertices. I use OpenGL 2.1 in C++ with glut.
You are doing in wrong way. You have to define texture co-ordinate.
Draw cube by giving vertices and texture co-ordinates by yourself.
//first face of cube
glTexCoord2f(0.0, 0.0);
glVertex3f(x, y, z);
glTexCoord2f(1.0, 0.0);
glVertex3f(x, y, z);
glTexCoord2f(1.0, 1.0);
glVertex3f(x, y, z);
glTexCoord2f(0.0, 1.0);
glVertex3f(x, y, z);
//and so on for all six face of cube.
You must understand how texture co-ordinates works.
Now if you want to draw the cylinder then use gluCylinder function look here for details. You can map texture on cylinder rendered using gluCylinder by using gluQuadricTexture function like below.I assume you have loaded the texture.
//Create the quadric object to render cylinder
GLUquadric *quad;
quad = gluNewQuadric();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _textureId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
gluQuadricTexture(quad,1);
gluCylinder(quad,2,2,3,20,20);
You can find the similar example here.