Here is what I have so far :
Rectangle::Rectangle(int x, int y)
{
sizeX_ = x * CASE;
sizeY_ = y * CASE;
texture_ = gdl::Image::load("./ressources/floor.jpg");
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
void Rectangle::draw()
{
texture_.bind();
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(0.0f, 0.0f, 0.0f);
glTexCoord2f(sizeX_ / CASE, 0.0f);
glVertex3f(sizeX_, 0.0f, 0.0f);
glTexCoord2f(0.0f, sizeY_ / CASE);
glVertex3f(sizeX_, 0.0f, sizeY_);
glTexCoord2f(sizeX_ / CASE, sizeY_ / CASE);
glVertex3f(0.0f, 0.0f, sizeY_);
glEnd();
}
The constructor takes the size of the map in cases. For example : (10, 10). and the real size will be (10 * CASE), where CASE = 400.
But this is not repeating correctly the texture. The texture seems to be reduced (good point) and packed from the up left point to the bottom right point.
Am I doing something wrong ?
glTexParameter works on the currently bound texture. It doesn't globally set parameters. This means that you need to bind a texture before calling this function:
glBindTexture(GL_TEXTURE_2D, ...);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
Related
I have a texture in a game that clamps to the edge instead of linearly scaling up. These are the parameters I'm providing OpenGL with:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EGDE);
How should I change them in order to make the texture scale up?
These are the vertices I'm giving OpenGL:
typedef struct{
float positionX;
float positionY;
float textureX ;
float textureY ;
} textureVertex;
textureVertex vertices[4] = {
{-3.5f, 3.5f, -1.0f, 1.0f},
{-3.5f, -3.5f, -1.0f, -1.0f},
{ 3.5f, -3.5f, 1.0f, -1.0f},
{ 3.5f, 3.5f, 1.0f, 1.0f}
};
You should specify texture coordinates for your vertices. CLAMP just tells OpenGL what to do when the coordinates go outside the range 0 -> 1. It does not say where that range is.
I'm trying to add a texture to a simple square for more than 5h now but I still don't manage to do so.
Here's the code I'm using in paintGL():
glEnable(GL_TEXTURE_2D);
GLuint id;
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
float pixels[] = {
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f
};
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 2, 2, 0, GL_RGB, GL_FLOAT, pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(0,0,0);
glTexCoord2f(1.0f, 0.0f); glVertex3f(1,0,0);
glTexCoord2f(1.0f, 1.0f); glVertex3f(1,1,0);
glTexCoord2f(0.0f, 1.0f); glVertex3f(0,1,0);
glEnd();
if (glGetError() != 0)
{
qDebug() << glGetError();
}
There is no errors nor OpenGL's ones. I initialized the clear color to grey.
When I try to change color with glColor3f(...), it works.
I read about all tutorials that I could find with Google but no one helped.
SOLUTION:
I never put
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
at the right place: just after the glTexImage2D! Code above is edited and now work like a charm.
You HAVE to specify filtering for your texture:
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
Because default filter uses mipmaps, but you don't generate them.
It looks to me like you're only using one quarter of your texture. It's only 4 pixels, and you've set the texture coordinates to be just the first pixel. If that pixel is white and your texture environment is set to multiply the quad's color by the texture, and you're set to use nearest neighbor sampling, then you'll get just the color. Try changing the texture coordinates to be (0,0) to (1,1) instead of (0,0) to (0.5,0.5) and see if that gives you the expected result. You can also try setting the various texture parameters and environments differently to see how that affects your drawing.
I've written a very simple OpenGL application. Its goal is to load a texture and draw it on a plane. If I use the function 'glTexSubImage2D' the plane is not textured and the function 'glGetError' returns the error '1281' (invalid value). However if I use the function 'glTexImage2D' my plane plane is textured correctly (and I have no error).
Here's a piece of my code :
void core::RootDevice::LoadTexture(char const *pFileName)
{
SDL_Surface *pSurface = IMG_Load(pFileName);
char *pPixels = reinterpret_cast<char*>(pSurface->pixels);
uint32_t bytePerPixel = pSurface->format->BitsPerPixel;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
{
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pboID);
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pSurface->w, //NO ERROR : All is ok
//pSurface->h, 0, GL_RGB, GL_UNSIGNED_BYTE, pPixels);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pSurface->w, //ERROR : 1281
pSurface->h, GL_RGB, GL_UNSIGNED_BYTE, pPixels);
std::cout << glGetError() << std::endl;
getchar();
}
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
And the rendering code :
void core::RootDevice::Render(void)
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
{
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(-1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(1.0f, 1.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(-1.0f, 1.0f, 0.0f);
glEnd();
}
glBindTexture(GL_TEXTURE_2D, 0);
}
And the result is the followings:
Does anyone can help me?
glTexSubImage2D() is used to replace parts or all of a texture that already has image data allocated. You have to call glTexImage2D() on the texture at least once before you can use glTexSubImage2D() on it. Unlike glTexSubImage2D(), glTexImage2D() allocates image data. You can use NULL for the last (data) argument to glTexImage2D() if you only want to allocate image data, and later set the data with glTexSubImage2D().
Newer versions of OpenGL (4.4 and ES 3.0) have a new entry point glTexStorage2D() that can be used as an alternative to glTexImage2D() to allocate the image data for a texture without specifying the data. It is similar to calling glTexImage2D() with data = NULL, but also allows specifying ahead of time if space for mipmaps will be needed.
I'm pretty new at ES 2.0, I'm trying to draw a single texture in openGL ES 2.0.
Here is my code:
data : a pointer to the texture (it is working, I already could test it with ES 1.0)
w : texture width
h : texture height
texture is 256x256 pixels
glGetError does return GL_NO_ERROR on every lineā¦
Hope someone can help ! All I can see is a black screen !
Thanks,
Wise
{
{
enum {
ATTRIB_VERTEX=0,
ATTRIB_TEXTURE_POSITION,
NUM_ATTRIB
};
glClearColor (0.0f, 0.0f, 0.0f, 0.0f);
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
const GLfloat vertices[] = { -0.5f, -0.5f, 0.5f, -0.5f, -0.5f, 0.5f, 0.5f, 0.5f };
const GLfloat texture_coord[] = { 0.0f, 1.0f,0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f };
GLuint texName;
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texName);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertices);
glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION);
glVertexAttribPointer(ATTRIB_TEXTURE_POSITION, 2, GL_FLOAT, 0, 0, texture_coord);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA,w,h,
0, GL_RGBA,
GL_UNSIGNED_BYTE, data);
glDrawArrays (GL_TRIANGLES, 0, 4);
}
}
Is That full source ?? If so , you have to do these but you must do setting shader before these .
Setting hadles
ATTRIB_VERTEX = glGetAttribLocation(m_paintProgram, "position");
ATTRIB_TEXTURE_POSITION = glGetAttribLocation(m_paintProgram, "inputTextureCoordinate");
maintextureHandle= glGetUniformLocation(m_EffectProgram[displayMode], "texture0");
giving the value through handles
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texName);
glUniform1i(maintextureHandle, 0); // add
And also you don't have to call this code every time , just call it once
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA,w,h,0, GL_RGBA,GL_UNSIGNED_BYTE, data);
Actually , your code doesn't have so many required codes such as Shader load, Shader compile, glUseProgram() , glBindFramebuffer() , glviewport(), glBindRenderbuffer things like that. but I can't teach all of them so
I want you to refer to this link and learn more information of opengl es 2.0.
http://examples.oreilly.com/9780596804831/readme.html#WireframeSkeleton
How can I convert a .png image to an OpenGL surface, with SDL? what I have now:
typedef GLuint texture;
texture load_texture(std::string fname){
SDL_Surface *tex_surf = IMG_Load(fname.c_str());
if(!tex_surf){
return 0;
}
texture ret;
glGenTextures(1, &ret);
glBindTexture(GL_TEXTURE_2D, ret);
glTexImage2D(GL_TEXTURE_2D, 0, 3, tex_surf->w, tex_surf->h, 0, GL_RGB, GL_UNSIGNED_BYTE, tex_surf->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
SDL_FreeSurface(tex_surf);
return ret;
}
and my code to draw the thing:
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, tex);
//Use blurry texture mapping (replace GL_LINEAR with GL_NEAREST for blocky)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glColor4f( 1.0, 1.0, 1.0, 1.0 ); //Don't use special coloring
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(0.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(128.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(128.0f, 128.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(0.0f, 128.0f, 0.0f);
glEnd();
glDisable(GL_TEXTURE_2D);
The problem is that it only works with .bmp files, and they turn bluish, so what is wrong?
Also, when I try to load a .png, it shows up really weird.
Wrong colors can be caused by getting the channel order wrong. The code I have lying around for loading .bmp's uses GL_BGR instead of GL_RGB so I think that will solve your problem with bmp's.
The problem with your png image is more likely caused by the png being 32-bits per pixel. Probably the best solution for you is to inspect the format field of the SDL surface to determine to appropriate flags/values to pass to glTexImage2D.