OpenGL textures: incorrect mapping onto 2D polygon - opengl

Have noticed an unexpected behavior of OpenGL (nVidia's card), when I try to map a texture on a 2D polygon. The image seems to be "broken" into 2 pieces:
.
Here's the source texture:
The simplified code snippet:
GLuint textId;
glGenTextures( 1, &textId );
glBindTexture( GL_TEXTURE_2D, textId );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA8, img.width(), img.height(), 0, GL_BGRA, GL_UNSIGNED_BYTE, (void*)img.bits() );
glEnable( GL_TEXTURE_2D );
glBegin( GL_TRIANGLE_FAN ); // the same effect for GL_QUADS
glTexCoord2d( 0.0, 0.0 );
glVertex2d( -0.5, -0.3 );
glTexCoord2d( 1.0, 0.0 );
glVertex2d( 0.5, -0.8 );
glTexCoord2d( 1.0, 1.0 );
glVertex2d( 0.5, 0.8 );
glTexCoord2d( 0.0, 1.0 );
glVertex2d( -0.5, 0.3 );
glEnd();
What is the cause for such a strange effect? I expect to see my texture just broadening from left to right, with the white lines remaining parallel.

Related

glTexCoordPointer output not as expected [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
I am trying to use glDrawElements , glTexCoordPointer and glTexImage1D to create a 1D texture, render a surface dataset and color the vertices according to their height value.
GLuint tex = 0;
My texture generation:
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_1D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
};
glTexImage1D( GL_TEXTURE_1D, 0, GL_RGBA, 3, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
vArray is an array which stores the vertices for rendering.
float vArray[12] = {
0.0, 1.0, 0.0,
0.0, 3.0, 1.0,
1.0, 2.0, 0.0,
1.0, 1.0, 1.0,
}
iArray is an array which stores the indices for rendering.
int iSize = 6;
int iArray[6] = {
0, 2, 1,
2, 1, 4,
}
tArray is an array which stores the normalised heights.
GLfloat tArray[4] = {
0.0, 0.3, 1.0, 0.0,
}
My render code:
glEnable(GL_TEXTURE_1D);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glBindTexture(GL_TEXTURE_1D, tex);
glTexCoordPointer(1, GL_FLOAT, sizeof(GL_FLOAT), tArray);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3 ,GL_FLOAT,0, vArray);
glDrawElements(GL_TRIANGLES, iSize, GL_UNSIGNED_INT, iArray);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisable(GL_TEXTURE_1D);
The final output is not as I expected, hoping somebody can point out my mistakes.
RE: your glTexCoordPointer() call: Tightly-packed arrays usually use 0 for stride.
vArray and tArray only have four elements, not five, so the 4 in iArray will cause OpenGL to read off the ends of those arrays into garbage (if you're lucky) or segfaults (if you're not).
All together:
#include <GL/glew.h>
#include <GL/glut.h>
GLuint tex = 0;
void display()
{
glClear( GL_COLOR_BUFFER_BIT );
float vArray[] =
{
-0.5, -0.5,
0.5, -0.5,
0.5, 0.5,
-0.5, 0.5,
};
GLfloat tArray[] =
{
0.0, 0.3, 1.0, 0.0,
};
unsigned int iArray[] =
{
0, 1, 2,
2, 3, 0,
};
glEnable( GL_TEXTURE_1D );
glBindTexture( GL_TEXTURE_1D, tex );
glEnableClientState( GL_TEXTURE_COORD_ARRAY );
glTexCoordPointer( 1, GL_FLOAT, 0, tArray );
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, vArray );
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, iArray );
glDisableClientState( GL_TEXTURE_COORD_ARRAY );
glDisableClientState( GL_VERTEX_ARRAY );
glDisable( GL_TEXTURE_1D );
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 400, 400 );
glutCreateWindow( "GLUT" );
glewInit();
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_1D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
};
glTexImage1D( GL_TEXTURE_1D, 0, GL_RGBA, 3, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glutDisplayFunc( display );
glutMainLoop();
return 0;
}
EDIT: 2D version:
#include <GL/glew.h>
#include <GL/glut.h>
GLuint tex = 0;
void display()
{
glClear( GL_COLOR_BUFFER_BIT );
float vArray[] =
{
-0.5, -0.5,
0.5, -0.5,
0.5, 0.5,
-0.5, 0.5,
};
GLfloat tArray[] =
{
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
};
unsigned int iArray[] =
{
0, 1, 2,
2, 3, 0,
};
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, tex );
glEnableClientState( GL_TEXTURE_COORD_ARRAY );
glTexCoordPointer( 2, GL_FLOAT, 0, tArray );
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, vArray );
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, iArray );
glDisableClientState( GL_TEXTURE_COORD_ARRAY );
glDisableClientState( GL_VERTEX_ARRAY );
glDisable( GL_TEXTURE_2D );
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 400, 400 );
glutCreateWindow( "GLUT" );
glewInit();
glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_2D, tex );
unsigned char pixels[] =
{
255, 0, 0, 255,
0, 255, 0, 255,
0, 0, 255, 255,
0, 0, 255, 255,
};
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, 2, 2, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels );
glTexEnvi( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

Using single channel texture (OpenGL 2)?

Short storry:
when I render anything using texture loaded like this
glTexImage2D ( GL_TEXTURE_2D, 0, GL_R8, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, pixels );
I get only black
Long storry:
I can get RGBA texture with alpha channel (e.g. text with transparent backgorund using this code):
This code works:
// === load
#define GL_ABGR 0x8000
SDL_Surface * surf = SDL_LoadBMP( "common_resources/dejvu_sans_mono_RGBA.bmp" );
glGenTextures ( 1, &itex );
glBindTexture ( GL_TEXTURE_2D, itex );
glTexImage2D ( GL_TEXTURE_2D, 0, GL_RGBA, surf->w, surf->h, 0, GL_ABGR, GL_UNSIGNED_BYTE, surf->pixels );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// ....
// === render
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, itex );
glColor3f(1.0f,1.0f,1.0f);
glEnable(GL_BLEND);
glEnable(GL_ALPHA_TEST);
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
drawString ( caption, xmin, ymin+12, 6 );
renders like
But I'm trying to use just one channel (8-bit; grayscale) images / textures instead of RGBA. These I cannot get to render neither with nor without transparancy. Whatever I do I get only black image.
This doesn't
// === load
#define GL_ABGR 0x8000
SDL_Surface * surf = SDL_LoadBMP( "common_resources/dejvu_sans_mono_Alpha.bmp" );
glGenTextures ( 1, &itex );
glBindTexture ( GL_TEXTURE_2D, itex );
glTexImage2D ( GL_TEXTURE_2D, 0, GL_R8, surf->w, surf->h, 0, GL_RED, GL_UNSIGNED_BYTE, surf->pixels );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// ....
// === render
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, itex );
glColor3f(1.0f,1.0f,1.0f);
//glEnable(GL_BLEND);
//glEnable(GL_ALPHA_TEST);
//glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
drawString ( caption, xmin, ymin+12, 6 );
renders like
Notes:
I know that I should somehow use glTexEnv according to e.g. here but my main problem is that apparently the monochrome texture does not render at all
I tried also other GL_LUMINANCE and GL_INTENSITY instead of GL_RED in glTexImage2D with no difference
there are other questions like here and here but mostly with OpenGL>3.0 and fragment shaders
Also, is it possible that my graphics card or driver does not support this ? I'm on ubuntu 16.04
GL_VENDOR: Intel Open Source Technology Center
GL_RENDERER: Mesa DRI Intel(R) HD Graphics 530 (Skylake GT2)
GL_VERSION: 3.0 Mesa 11.2.0
for completeness - although it is not importaint the drawString looks like this:
drawString ( caption, xmin, ymin+12, 6 ){
const int nchars = 95;
float persprite = 1.0f/nchars;
glBegin(GL_QUADS);
for(int i=0; i<65536; i++){
if( str[i] == 0 ) break; // 0-terminated string
int isprite = str[i] - 33; // 33 is offset of meaningfull ASCII characters
float offset = isprite*persprite+(persprite*0.57);
float xi = i*sz + x;
glTexCoord2f( offset , 1.0f ); glVertex3f( xi, y, 3.0f );
glTexCoord2f( offset+persprite, 1.0f ); glVertex3f( xi+sz, y, 3.0f );
glTexCoord2f( offset+persprite, 0.0f ); glVertex3f( xi+sz, y+sz*2, 3.0f );
glTexCoord2f( offset , 0.0f ); glVertex3f( xi, y+sz*2, 3.0f );
}
glEnd();
}
I want to try to help you. In my projects I am using this arguments for generating textures from grayscale source images:
glTexImage2D(GL_TEXTURE_2D, 0, 1, width, height, 0, GL_RED,
GL_UNSIGNED_BYTE, pixels);
As written in documentation, third argument - number of color components (1 in our case). Need to check integer value of GL_R8 or replace it explicitly.
GL_RED means that you place luminances in red channel (not in each red, green, blue channels as for grayscale image).

How is Texture Splatting in OpenGL implemented?

I have been reading for a while the different techniques used to texture terrains and came across texture splatting. I have found a lot of articles that discuss how to do this in OpenGL, but most only discuss it theoretically and provide little to no code that I can study. Does anyone know/have some code that illustrates this in OpenGL?
Just to clarify, I want to be able to load four different textures, and based on the height of the quad/vertices, change the texture from one gradually to the next.
Edit: Below is a quick bit of code to help show what I want to know
#include <windows.h>
#include <SFML/Graphics.hpp>
#include <gl/gl.h>
#include <gl/glu.h>
#define GL_CLAMP_TO_EDGE 0x812F
class Scene {
public:
void resize( int w, int h ) {
// OpenGL Reshape
glViewport( 0, 0, w, h );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
gluPerspective( 120.0, (GLdouble)w/(GLdouble)h, 0.5, 500.0 );
glMatrixMode( GL_MODELVIEW );
}
};
int main() {
sf::RenderWindow window(sf::VideoMode(800, 600, 32), "Test");
///Setup the scene, materials, lighting
Scene scene;
scene.resize(800,600);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glColorMaterial(GL_FRONT_AND_BACK, GL_EMISSION);
glEnable(GL_COLOR_MATERIAL);
glShadeModel(GL_SMOOTH);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_LIGHT0);
float XL = .5, YL = .1, ZL = 1;
GLfloat ambientLight[] = { 0.2f, 0.2f, 0.2f, 1.0f };
GLfloat diffuseLight[] = { 0.8f, 0.8f, 0.8, 1.0f };
GLfloat specularLight[] = { 0.5f, 0.5f, 0.5f, 1.0f };
GLfloat lightpos[] = {XL, YL, ZL, 0.};
glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight);
glLightfv(GL_LIGHT0, GL_DIFFUSE, diffuseLight);
glLightfv(GL_LIGHT0, GL_SPECULAR, specularLight);
glLightfv(GL_LIGHT0, GL_POSITION, lightpos);
///Test terrain texture splatting
///Load the textures
sf::Image tex1;
tex1.loadFromFile("texture1.png");
sf::Image tex2;
tex2.loadFromFile("texture2.png");
///Set the first texture
GLuint grass;
glGenTextures(1, &grass);
glBindTexture(GL_TEXTURE_2D, grass);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, tex1.getSize().x, tex1.getSize().y, 0, GL_RGBA, GL_UNSIGNED_BYTE, (const GLvoid*)tex1.getPixelsPtr() );
///Set the second texture
GLuint dirt;
glGenTextures(1, &dirt);
glBindTexture(GL_TEXTURE_2D, dirt);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, tex2.getSize().x, tex2.getSize().y, 0, GL_RGBA, GL_UNSIGNED_BYTE, (const GLvoid*)tex2.getPixelsPtr() );
///Start loop
while( window.isOpen() ) {
sf::Event event;
while( window.pollEvent( event ) ) {
if( event.type == sf::Event::Closed )
window.close();
}
///Clear buffer and set camera
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(50.0, 1.0, 1.0, 50);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(1, 0, 1, 0, 0, 0, 0, 1, 0);
///Begin rendering quad
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, grass);
///I know that around here I should enable blending in order to get my two textures to mix, but I am not certain
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-0.5, -0.5, 0.0);
glTexCoord2f(1, 0);
glVertex3f(-0.5, 0.5, 0.0);
glTexCoord2f(1, 1);
glVertex3f(0.5, 0.5, 0.0);
glTexCoord2f(0, 1);
glVertex3f(0.5, -0.5, 0.0);
glEnd();
///Reset env settings for SFML
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
window.display();
}
return 1;
}
As people mentioned above, use programmable pipeline, use shaders. In the fragment shader you can pass all the textures and interpolate between them based on vertex data you receive from the vertex shader.
Quick search gave me this result. I am sure that is what you need. Also take a look at this post. And this paper explains the technique very well.

texture disappears with gluSphere and glutPostRedisplay

I'm just starting with opengl. This is my first project, in fact.
I want to map some texture to a quadrilateral in the background and want to draw a sphere using gluSphere in the front and I want to animate this. So, I map the texture first, then draw the sphere in the display function and then call glutPostRedisplay. It does show the texture and the sphere correctly when display is first called. But, as soon as glutPostRedisplay is called, the texture disappears and only the sphere is drawn. I have given my code below. I apologize for using any bad opengl practices.
void display() {
glClear(GL_COLOR_BUFFER_BIT);
drawTex();
glPushMatrix();
glTranslatef(SIZE/2, SIZE/2, 0);
glutSolidSphere(15.0, 20, 20);
glPopMatrix();
glutSwapBuffers();
glutPostRedisplay();
}
void LoadTextureRAW( const char * filename, int wrap ) {
GLuint texture;
int width, height;
unsigned char * data;
FILE * file;
// open texture data
file = fopen( filename, "rb" );
if ( file == NULL ) {
return;
}
// allocate buffer
width = 1073;
height = 918;
data = (unsigned char *)malloc( width * height * 3 );
// read texture data
fclose( file );
// select our current texture
glBindTexture( GL_TEXTURE_2D, 1 );
// select modulate to mix texture with color for shading
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
// when texture area is small, bilinear filter the closest mipmap
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST );
// when texture area is large, bilinear filter the first mipmap
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
// if wrap is true, the texture wraps over at the edges (repeat)
// ... false, the texture ends at the edges (clamp)
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrap ? GL_REPEAT : GL_CLAMP );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrap ? GL_REPEAT : GL_CLAMP );
// build our texture mipmaps
gluBuild2DMipmaps( GL_TEXTURE_2D, 3, width, height,
GL_RGB, GL_UNSIGNED_BYTE, data );
// free buffer
free( data );
}
void drawTex() {
glBegin(GL_QUADS);
glTexCoord2d(0.0,0.0);
glVertex3d(0.0,SIZE/2, -SIZE);
glTexCoord2d(1.0,0.0);
glVertex3d(SIZE, SIZE/2, -SIZE);
glTexCoord2d(1.0,1.0);
glVertex3d(SIZE, SIZE/2, SIZE);
glTexCoord2d(0.0,1.0);
glVertex3d(0.0, SIZE/2, SIZE);
glEnd();
glFlush();
}
void map() {
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glEnable( GL_TEXTURE_2D );
LoadTextureRAW("background.bmp", true);
glBindTexture( GL_TEXTURE_2D, 1 );
drawTex();
}
void init() {
glClearColor(1, 1, 1, 1);
glClearDepth( SIZE );
glOrtho(0, SIZE, 0, SIZE, -SIZE, SIZE);
GLfloat mat_ambient[] = { 1.0, 1.0, 1.0, 1.0 };
GLfloat mat_specular[] = { 1.0, 1.0, 1.0, 1.0 };
GLfloat mat_shininess[] = { 50.0 };
GLfloat light_position[] = { SIZE/2, 0, 1.0, 0.0 };
GLfloat model_ambient[] = { 0.5, 0.5, 0.5, 1.0 };
glClearColor(1.0, 1.0, 1.0, 1.0);
glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient);
glMaterialfv(GL_FRONT, GL_SPECULAR, mat_specular);
glMaterialfv(GL_FRONT, GL_SHININESS, mat_shininess);
glLightfv(GL_LIGHT0, GL_POSITION, light_position);
glLightModelfv(GL_LIGHT_MODEL_AMBIENT, model_ambient);
//glColor3f(0,0,0);
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
glRotatef(-40, 1, 0, 0);
glRotatef(-40, 0, 1, 0);
map();
}
Any help would be really appreciated.
A few notes: In the fixed function pipleine the light position must be set, after the view transformation (=camera) has been applied to the modelview matrix. In fact the whole code in init() actually belongs in the display function.
glClearDepth should be set to 1 unless you know, what you're doing (the clear depth works in NDC space and as a OpenGL beginner this is something "too advanced" for a starting. Just set it to 1 and be happy.
The map() function makes no sense. Textures are initialized one time and then only bound before rendering textured geometry.
Last but not least glutSolidSphere doesn't generate proper texture coordinates. But you need to give OpenGL texture coordinates. My suggestion: Ditch glutSolidSphere and do the geometry yourself. Like this: https://stackoverflow.com/a/5989676/524368

I'm seeing artifacts when I attempt to rotate an image

This is the before:
http://img22.imageshack.us/img22/5310/beforedes.jpg
znd after:
http://img189.imageshack.us/img189/8890/afterr.jpg
EDIT:: Now that I look at imageshack's upload, the artifacts are diminished a great deal.. but trust me, they are more pronounced than that.
I don't understand why this is happening. Imageshack uploads them to jpg, but in my program they are in the image folder as .tif (The reason for .tif is because I couldn't get ANY other image to maintain their transparent parts).
But anyways, these artifacts follow the original top of the image as it rotates anywhere except the original.
Here's part of my code that loads the image
GLuint texture;
GLenum texture_format;
GLint nofcolors;
GLfloat spin;
bool Game::loadImage()
{
SDL_Surface * surface; // this surface will tell us the details of the image
if ( surface = SM.load_image("Images/tri2.tif") )
{
//get number of channels in the SDL surface
nofcolors = surface->format->BytesPerPixel;
//contains an alpha channel
if ( nofcolors == 4 )
{
if ( surface->format->Rmask == 0x000000ff )
texture_format = GL_RGBA;
else texture_format = GL_BGRA;
}
else if ( nofcolors == 3 ) //no alpha channel
{
if ( surface->format->Rmask == 0x000000ff )
texture_format = GL_RGB;
else texture_format = GL_BGR;
}
// Have OpenGL generate a texture object handle for us
glGenTextures( 1, &texture );
// Bind the texture object
glBindTexture( GL_TEXTURE_2D, texture );
// Set the texture’s stretching properties
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, nofcolors, surface->w, surface->h, 0, texture_format, GL_UNSIGNED_BYTE, surface->pixels );
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
}
else
{
SDL_Quit();
return false;
}
// Free the SDL_Surface only if it was successfully created
if ( surface )
{
SDL_FreeSurface( surface );
return true;
}
else return false;
}
void Game::drawImage()
{
// Clear the screen before drawing
glClear( GL_COLOR_BUFFER_BIT );
glTranslatef( float(S_WIDTH/2), float(S_HEIGHT/2), 0.0f );
glRotatef( spin, 0.0, 0.0, 1.0 );
// Bind the texture to which subsequent calls refer to
glBindTexture( GL_TEXTURE_2D, texture );
glBegin( GL_QUADS );
{
// Top-left vertex (corner)
glTexCoord2i( 0, 0 );
glVertex3f( -64, 0, 0 );
// Top-right vertex (corner)
glTexCoord2i( 1, 0 );
glVertex3f( 64, 0, 0 );
// Bottom-right vertex (corner)
glTexCoord2i( 1, 1 );
glVertex3f( 64, 128, 0 );
// Bottom-left vertex (corner)
glTexCoord2i( 0, 1 );
glVertex3f( -64, 128, 0 );
}
glEnd();
glLoadIdentity();
SDL_GL_SwapBuffers();
}
Looks like the texture is set to GL_WRAP. Try GL_CLAMP_TO_EDGE instead.
In Game::loadImage, after your glBindTexture call:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Your current setting is GL_REPEAT, which is the OpenGL default.