How to blend RGB & BGRA raw images by opengl? - opengl

I am very new to OpenGL
Trying to blend RGB & BGRA raw images.
RGB is backgroung image
RGBA is foregroubng image
With the below source getting the output only ,"osd_raw" image .Blending is not happening.
In the below code loading the 2 raw images .
Creating 2 textures & binding with it.
Used glBlendFunc for blending.
The output is not getting blended.
Please tell me where it's getting wrong.
void display()
{
GLuint texture[2];
int width = 960;
int height = 540;
unsigned char *osd_raw = loadFile("./osd.raw");
unsigned char *video_raw = loadFile("./video.raw");
glClearColor(0.1, 0.1, 0.1, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glGenTextures(2, texture);
glEnable( GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, video_raw);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBindTexture(GL_TEXTURE_2D, texture[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, osd_raw);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-1, -1, 0);
glTexCoord2f(0, 1);
glVertex3f(-1, 1, 0);
glTexCoord2f(1, 1);
glVertex3f(1, 1, 0);
glTexCoord2f(1, 0);
glVertex3f(1, -1, 0);
glEnd();
glFlush();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGBA);
glutInitWindowSize(960, 540);
glutInitWindowPosition(0, 0);
glutCreateWindow("glut test");
glutDisplayFunc(display);
glutMainLoop();
return 0;
}

First of all, note that drawing with glBegin/glEnd sequences is deprecated since more than 10 years.
Read about Fixed Function Pipeline and see Vertex Specification for a state of the art way of rendering.
If you want to blend 2 textures, then you have to bind the first texture and to draw the quad with the proper texture coordinate attributes set.
After that you have to bind the 2nd texture and to draw the quad again, with enabled blending (see Blending):
glDisable( GL_BLEND);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-1, -1, 0);
glTexCoord2f(0, 1);
glVertex3f(-1, 1, 0);
glTexCoord2f(1, 1);
glVertex3f(1, 1, 0);
glTexCoord2f(1, 0);
glVertex3f(1, -1, 0);
glEnd();
glEnable( GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, texture[1]);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-1, -1, 0);
glTexCoord2f(0, 1);
glVertex3f(-1, 1, 0);
glTexCoord2f(1, 1);
glVertex3f(1, 1, 0);
glTexCoord2f(1, 0);
glVertex3f(1, -1, 0);
glEnd();
Further note, that the Depth Test has to be disabled or set to e.g. GL_LEQUAL, when the 2nd quad is drawn. Otherwise the 2nd quad would be discarded by the depth test.
Since the blend function is glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);, of course some alpha channels of the 2nd textue have to be less than 1.0.

As per Rabbid76 ,modified & below is the working source for reference.
void display()
{
GLuint texture[2];
int width = 960;
int height = 540;
unsigned char *osd_raw = loadFile("./osd.raw");
unsigned char *video_raw = loadFile("./video.raw");
glClearColor(0.1, 0.1, 0.1, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glGenTextures(2, texture);
glDisable(GL_BLEND);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, video_raw);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-1, -1, 0);
glTexCoord2f(0, 1);
glVertex3f(-1, 1, 0);
glTexCoord2f(1, 1);
glVertex3f(1, 1, 0);
glTexCoord2f(1, 0);
glVertex3f(1, -1, 0);
glEnd();
glEnable( GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, texture[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, osd_raw);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-1, -1, 0);
glTexCoord2f(0, 1);
glVertex3f(-1, 1, 0);
glTexCoord2f(1, 1);
glVertex3f(1, 1, 0);
glTexCoord2f(1, 0);
glVertex3f(1, -1, 0);
glEnd();
glFlush();
}

Related

Texture Mapping bug in opengl (with multiple textures)

i'm having trouble in using more then one texture in Opengl. I've written a test program that draws two squares, one bigger then other. When i texturize just one of then, everything works fine, but when i texture both, thats the result:
Thats the code i use to initialize the textures:
unsigned int grass_height, grass_width, wall_height, wall_width;
unsigned char *grass = loadBMP("./minegrama.bmp", &grass_height, &grass_width);
unsigned char *wall = loadBMP("./mineleaves.bmp", &wall_height, &wall_width);
glShadeModel(GL_SMOOTH);
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glGenTextures(2, tex);
glBindTexture(GL_TEXTURE_2D, tex[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, grass_width, grass_height, 0, GL_BGR, GL_UNSIGNED_BYTE, grass);
gluBuild2DMipmaps(tex[0], GL_RGB, grass_width, grass_height, GL_RGB, GL_UNSIGNED_BYTE, grass);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST_MIPMAP_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glBindTexture(GL_TEXTURE_2D, tex[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, wall_width, wall_height, 0, GL_BGR, GL_UNSIGNED_BYTE, wall);
gluBuild2DMipmaps(tex[1], GL_RGB, grass_width, grass_height, GL_RGB, GL_UNSIGNED_BYTE, wall);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST_MIPMAP_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
Thats the code of the drawing:
//Seta o MatrixMode pra usar texturas
glMatrixMode(GL_TEXTURE);
//INICIO DO CHAO
glPushMatrix();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, tex[0]);
//Desenhar o chao;
glColor3f(0, 0.39, 0);
glBegin(GL_QUADS);
glVertex3f(-30, -30, -0.001);
glTexCoord3f(-30, -30, -0.001);
glVertex3f(30, -30, -0.001);
glTexCoord3f(30, -30, -0.001);
glVertex3f(30, 30, -0.001);
glTexCoord3f(30, 30, -0.001);
glVertex3f(-30, 30, -0.001);
glTexCoord3f(-30, 30, -0.001);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
//FIM DO CHAO
glPushMatrix();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, tex[1]);
glTranslatef(0, 0, 10);
glBegin(GL_QUADS);
glVertex3f(-1, -1, 0); glTexCoord3f(-1, -1, 0); //Especifica cada vértice
glVertex3f(1, -1, 0); glTexCoord3f(1, -1, 0);
glVertex3f(1, 1, 0); glTexCoord3f(1, 1, 0);
glVertex3f(-1, 1, 0); glTexCoord3f(-1, 1, 0);
glDisable(GL_TEXTURE_2D);
glEnd();
glPopMatrix();
The first parameter of gluBuild2DMipmaps is wrong. The documentation for the target paramter states:
Specifies the target texture. Must be GLU_TEXTURE_2D.
In addition, your texture coordinates don't look good. With the current values, the texture on the large rectangle will be repeated 60 times. In addition, the z-value of the texture coordinates should be 0, not 0.01. Even better: Use glTexCoord2f for 2-dimensional textures.

C++ Adding a texture to a GL_QUAD and it's coming out black

I have a series of rectangles of different colours and I'm trying to add a texture to one of them. However when I apply the texture to the given rectangle, it just turns black. Below is the function I use to load the texture.
GLuint GLWidget:: LoadTexture(const char * pic, int width, int height){
GLuint Texture;
BYTE * data;
FILE * picfile;
picfile = fopen(pic, "rb");
if (picfile == NULL)
return 0;
data = (BYTE *)malloc(width * height * 3);
fread(data, width * height, 3, picfile);
fclose(picfile);
glGenTextures(1, &Texture);
glBindTexture(GL_TEXTURE_2D, Texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
return Texture;
}
In another function where the GL_QUADS are drawn, I then have...
GLuint myTex = LoadTexture("texture.bmp", 500, 500);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBindTexture(GL_TEXTURE_2D, myTex);
glBegin(GL_QUADS);
glTexCoord2f(1, 1); glVertex3f(42, 10, 42);
glTexCoord2f(1, 0); glVertex3f(42, 10, -42);
glTexCoord2f(0, 0); glVertex3f(-42,10,-42);
glTexCoord2f(0, 1); glVertex3f(-42,10, 42);
glEnd();
If anyone could let me know where I am going wrong that would be great, thanks!
This call
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
is invalid. GL_RGB8 is a valid internalFormat, but it is not a valid enum for format. Use GL_RGB, GL_UNSIGNED_BYTE as format and type if your client-side data is 3 channels with 8 but unsigned int data per channel.
Another thing is
LoadTexture("texture.bmp", 500, 500);
This suggests that you are dealing with BMP files, but your loader only deals with completely raw image data.

Using framebuffer for gaussian blur 2 pass

I want to apply gaussian blur in a 256x256 texture but Im having problems passing the first pass to a framebuffer and reusing it actualy result in part of image, that appear to be the texture transformed.
Tanks the attention
vertex shader:
void main()
{
gl_Position = ftransform();
gl_TexCoord[0] = gl_MultiTexCoord0;
}
fragment shader:
I removed all the part of blurring only to tests.
uniform sampler2D texture_0;
uniform float switcher;
vec4 cor;
void main()
{
cor = vec4(0.0,0.0,0.0,0.0);
vec2 position = gl_TexCoord[0].xy;
cor = texture2D(texture_0,position);
gl_FragColor = cor;
}
initializations:
glGenTextures( 1, &tex1 );
glBindTexture( GL_TEXTURE_2D, tex1 );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, img1->getWidth(),img1->getHeight(), 0,GL_RGB, GL_UNSIGNED_BYTE, data1);
glBindTexture(GL_TEXTURE_2D, 0);
glGenTextures( 1, &tex2 );
glBindTexture( GL_TEXTURE_2D, tex2 );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, img1->getWidth(),img1->getHeight(), 0,GL_RGB, GL_UNSIGNED_BYTE, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture( GL_TEXTURE_2D, tex1 );
glActiveTexture(GL_TEXTURE1);
glBindTexture( GL_TEXTURE_2D, tex2 );
glGenFramebuffers(1,&framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER,framebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER,GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex2,0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
render:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glUniform1i(loc_u_texture_0, 0);
glUniform1f(loc_switcher_1, 0);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glNormal3f(0, 1, 0);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-2, -2, -10);
glTexCoord2f(1, 0);
glVertex3f(2, -2, -10);
glTexCoord2f(1, 1);
glVertex3f(2, 2, -10);
glTexCoord2f(0, 1);
glVertex3f(-2, 2, -10);
glEnd();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
usleep(50);
glUniform1i(loc_u_texture_0, 1);
glUniform1f(loc_switcher_1, 1);
glNormal3f(0, 1, 0);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-2, -2, -10);
glTexCoord2f(1, 0);
glVertex3f(2, -2, -10);
glTexCoord2f(1, 1);
glVertex3f(2, 2, -10);
glTexCoord2f(0, 1);
glVertex3f(-2, 2, -10);
glEnd();
glutSwapBuffers();
Use the following lines to draw a quad:
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(-1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(1.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(1.0f, 1.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(-1.0f, 1.0f, 0.0f);
glEnd();
When you reset both the projection and the modelview matrices, your primitives are not transformed and you can directly draw objects in clipping space (which is a cube with corners (-1,-1,-1) and (1,1,1)).
Also, don't use the following:
glActiveTexture(GL_TEXTURE1);
glBindTexture( GL_TEXTURE_2D, tex2 );
if you are not going to use more than one input sampler per shader at the same time, you need only the first texture channel GL_TEXTURE0, which is set by default.

Is there a way to put in a buffer FBO texture output format GL_ALPHA?

I am using ftgl font library.
ftgl function to create image has this code:
glBindTexture(GL_TEXTURE_2D, textID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, textureWidth, textureHeight,0, GL_ALPHA, GL_UNSIGNED_BYTE, textureMemory);
Format GL_ALPHA.
When I render to FBO images(GL_RGBA) they look ok.
But when I render fonts to fbo texture, there is some problems with colour and alpha.
This image should open in editor that don't touch pixels(browser uses anti-aliasing).
Code:
/// 1 step create FBO
// Create the color buffer
glGenTextures(1, &id_tex);
glBindTexture(GL_TEXTURE_2D, id_tex);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, wid, hei, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); // GL_RGBA GL_ALPHA
// Create the frame buffer
glGenFramebuffersEXT(1, &id_buf);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, id_buf);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, id_tex, 0);
GLenum err = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT);
assert(err == GL_FRAMEBUFFER_COMPLETE_EXT);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
/// 2 step render to fbo
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, id_buf);
glEnable(GL_TEXTURE_2D);
glEnable(GL_ALPHA_TEST);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glClearColor (0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0,0, 1920 , 1080);
ftgl::render();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
glDisable(GL_TEXTURE_2D);
glFlush ();
glDisable(GL_BLEND);
glDisable(GL_ALPHA_TEST);
glViewport (0, 0, 1920, 1080);
glClearColor (0.0f, 0.0f, 0.0f, 1.0f);
/// 3 step render texture
glEnable(GL_TEXTURE_2D);
glEnable(GL_ALPHA_TEST);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_COLOR);
glBindTexture(GL_TEXTURE_2D, id_tex);
glBegin(GL_QUADS);
glTexCoord2d(0,0); glVertex3f(point4.X , point4.Y , point4.Z);
glTexCoord2d(0,1); glVertex3f(point1.X , point1.Y , point1.Z);
glTexCoord2d(1,1); glVertex3f(point2.X , point2.Y , point2.Z);
glTexCoord2d(1,0); glVertex3f(point3.X , point3.Y , point3.Z);
glEnd();
glDisable(GL_BLEND);
glDisable(GL_ALPHA_TEST);
glDisable(GL_TEXTURE_2D);

How to determine which error i'm receiving, when calling glTexSubImage2d

I'm trying to draw some texture with CAOpenGLLayer, but receiving GL_INVALID_OPERATION when i try to call glTexSubImage2d. According to this document : http://www.opengl.org/sdk/docs/man/xhtml/glTexSubImage2D.xml it's one of the errors described there. But seems i'm not breaking any rule described there and i don't understand what i'm doing wrong. Here is a code that i'm trying to run:
- (CGLContextObj)copyCGLContextForPixelFormat:(CGLPixelFormatObj)pixelFormat
{
uint32_t plugin_width = 32, plugin_height = 32;
texture_data = new uint8_t[plugin_width * plugin_height * 4];
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glScalef(1.0f, -1.0f, 1.0f);
glOrtho(0, plugin_width , 0, plugin_height , -1.0, 1.0);
glActiveTexture(GL_TEXTURE0);
glEnable(GL_TEXTURE_RECTANGLE_EXT);
glGenTextures(1, &textureName);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, textureName);
glTextureRangeAPPLE(GL_TEXTURE_RECTANGLE_EXT, plugin_width * plugin_height * 4, texture_data);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE , GL_STORAGE_SHARED_APPLE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, 0, GL_RGBA, plugin_width, plugin_height, 0, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8, texture_data);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 0);
return [super copyCGLContextForPixelFormat:pixelFormat];
}
- (void)drawInCGLContext:(CGLContextObj)ctx pixelFormat:(CGLPixelFormatObj)pf forLayerTime:(CFTimeInterval)t displayTime:(const CVTimeStamp *)ts
{
uint32_t plugin_width = 32, plugin_height = 32;
uint32_t width = plugin_width;
uint32_t height = plugin_height;
srand(time(NULL));
for (int i = 0; i < 32*32*4; i ++)
texture_data[i] = rand() % 255;
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, textureName);
glTexSubImage2D (GL_TEXTURE_RECTANGLE_EXT, 0, 0, 0, width, height,GL_BGRA, GL_UNSIGNED_INT_8_8_8_8 ,texture_data);
assert(glGetError() == GL_NO_ERROR); // here i'm getting GL_INVALID_OPERATION
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_QUADS);
glTexCoord2f(0.0f,0.0f);
glVertex2f(0.0f, 0.0f);
glTexCoord2f(1.0f,0.0f);
glVertex2f(width, 0);
glTexCoord2f(0.0f,1.0f);
glVertex2f(0, height);
glTexCoord2f(1.0f,1.0f);
glVertex2f(width, height);
glEnd();
[super drawInCGLContext:ctx pixelFormat:pf forLayerTime:t displayTime:ts];
}
Did you try to replace the external format GL_UNSIGNED_INT_8_8_8_8 with GL_UNSIGNED_BYTE in both calls (glTexImage2D and glTexSubImage2D)?
That might help.