I'm currently using a VBO to draw a series of cubes in OpenGL using a GLSL shader that performs multitexturing (with 5 textures). It works fine if I don't pass the textures in. But if I try to use texturing at all, the whole screen is only drawn white (presumably the last clear color). If I pass the textures in using immediate mode (without the VBO) then it's also fine there. I can't tell why there's a problem.
Code:
// create vertex/normal/color/texcoord VBO
glGenBuffersARB(1, &vboId);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, vboId);
glBufferDataARB(GL_ARRAY_BUFFER_ARB, sizeof(vert_buf)+sizeof(norm_buf)+sizeof(col_buf)+sizeof(tex_buf), 0, GL_STREAM_DRAW);
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, 0, sizeof(vert_buf), vert_buf);
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, sizeof(vert_buf), sizeof(norm_buf), norm_buf);
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, sizeof(vert_buf)+sizeof(norm_buf), sizeof(col_buf), col_buf);
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, sizeof(vert_buf)+sizeof(norm_buf)+sizeof(col_buf), sizeof(tex_buf), tex_buf);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, 0);
// create geometry indices VBO
glGenBuffersARB(1, &vboId2);
glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, vboId2);
glBufferDataARB(GL_ELEMENT_ARRAY_BUFFER_ARB, sizeof(index_buf), index_buf, GL_STATIC_DRAW_ARB);
glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, 0);
// bind vertex/normal/color/texcoord VBO
glBindBufferARB(GL_ARRAY_BUFFER_ARB, vboId);
// enable vertex arrays
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
// specify vertex and index arrays with their offsets
glVertexPointer(3, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, (void*)sizeof(vert_buf));
glColorPointer(3, GL_FLOAT, 0, (void*)(sizeof(vert_buf)+sizeof(norm_buf)));
// bind geometry indices VBO
glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, vboId2);
glIndexPointer(GL_UNSIGNED_INT, 0, 0);
// set the texture units
GLvoid* start = (void*)(sizeof(vert_buf)+sizeof(norm_buf)+sizeof(col_buf));
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[0].tex_id); // same as GL_TEXTURE1
glTexCoordPointer(2, GL_FLOAT, 0, start);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[1].tex_id); // same as GL_TEXTURE2
glTexCoordPointer(2, GL_FLOAT, 0, start);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[2].tex_id); // same as GL_TEXTURE3
glTexCoordPointer(2, GL_FLOAT, 0, start);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + noiseTexID); // same as GL_TEXTURE4
glTexCoordPointer(2, GL_FLOAT, 0, start);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + lutTexID); // same as GL_TEXTURE5
glTexCoordPointer(2, GL_FLOAT, 0, start);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// draw VBOs
glDrawElements(GL_TRIANGLES, 36*ROWS*COLS, GL_UNSIGNED_INT, 0);
// disable arrays
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
// disable texture arrays
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[0].tex_id);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[1].tex_id);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + (GLuint)vid_regions[2].tex_id);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + noiseTexID);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTexture(GL_TEXTURE0 + lutTexID);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
// unbind VBOs
glBindBufferARB(GL_ARRAY_BUFFER_ARB, 0);
glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, 0);
Note: the creation of the VBOs is called during the initial setup phase. The drawing happens in a drawing GLUT routine.
Try resetting your texture level back to 0:
glClientActiveTexture(GL_TEXTURE0);
It's not uncommon for weirdness to happen when the active texture level isn't reset (say if you're using FBOs, etc).
It might also be helpful to set your clear colour to something other than black/white/transparent to see if the screen is being drawn with the clear colour or if you're somehow seeing a texel stretched or something else.
Related
I've been getting back into OpenGL recently after transitioning to linux. For some reason or another, things like glGenBuffers, glBindBuffer, etc, aren't present with the normal GL headers, and cause my program to crash using GLEW. To get around this I've been using GLES2.
And now the problem:
#include <SFML/Graphics.hpp>
#include <GLES2/gl2.h>
#include <GL/gl.h>
int main() {
sf::RenderWindow window(sf::VideoMode(640, 480), "OpenGL Tests");
glOrtho(0, 640, 0, 480, -1, 1);
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0, 0, 0, 0);
//x, y, r, g, b
GLfloat triangleVertices[15] = {
10, 10, 1, 0, 0,
110, 10, 0, 1, 0,
60, 96.6f, 0, 0, 1
};
GLuint triangleBuf;
glGenBuffers(1, &triangleBuf);
glBindBuffer(GL_VERTEX_ARRAY, triangleBuf);
glBufferData(GL_VERTEX_ARRAY, sizeof(triangleVertices), triangleVertices, GL_STATIC_DRAW);
glBindBuffer(GL_VERTEX_ARRAY, 0);
while(window.isOpen()) {
sf::Event event;
while(window.pollEvent(event)) {
if(event.type == sf::Event::Closed) window.close();
}
glClear(GL_COLOR_BUFFER_BIT);
glBindBuffer(GL_VERTEX_ARRAY, triangleBuf);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 3, 0);
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(3, GL_FLOAT, 2, (GLfloat*) 2);
glDrawArrays(GL_TRIANGLES, 0, 3);
window.display();
sf::sleep(sf::milliseconds(50));
}
glDeleteBuffers(1, &triangleBuf);
}
This is how I remember using buffers, though to be honest I don't trust my memory. With good reason it seems, since this crashes immediately after the window opens.
I've tried changing the last argument to glDrawArrays to 1 instead of 3. I've also tried binding the buffer twice, first as GL_VERTEX_ARRAY, followed by glVertexPointer, second as GL_COLOR_ARRAY, followed by glColorPointer. No dice.
The 3rd parameter of glVertexPointer respectively glColorPointer is the byte offset between consecutive (stride). If a named buffer object is bound, then the 4th parameter is treated as a byte offset, too.
The stride parameter has to be 5 * sizeof(GLfloat), because each attribute tuple consists of 5 (GLfloat) components (X, Y, R, G, B). The offset for the vertex coordinates is 0, because they are at the beginning. The offset for the color is 2 * sizeof(GLfloat), because the color comes after the 2 components of the vertex coordinate.
Further more GL_VERTEX_ARRAY is not a valid buffer target. The buffer target for vertex attributes is GL_ARRAY_BUFFER. The wrong usage of GL_VERTEX_ARRAY will cause INVALID_ENUM errors. Error information can be get by glGetError.
Do the following changes:
glGenBuffers(1, &triangleBuf);
glBindBuffer(GL_ARRAY_BUFFER, triangleBuf);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices), triangleVertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, triangleBuf);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 5*sizeof(GLfloat), 0);
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(3, GL_FLOAT, 5*sizeof(GLfloat), (GLfloat*)(2*sizeof(GLfloat)));
Since you do not bind a texture and you do net even have specified texture coordinates, do not enable 2 dimensional texturing:
glEnable(GL_TEXTURE_2D);
Solved by adding glDepthFunc(GL_ALWAYS);
I'm trying to blend two textures on a heightfield with glDrawElements. Normalpointer and vertexpointer data are the same but the TexCoordPointer are different for the two textures. No matter which BlendFunc I try there's always only one of the textures visible although large parts of texture_two are transparent. Does glDrawElements work with gl_blend or am I doing it wrong?
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glBlendEquation(GL_FUNC_ADD);
glDepthFunc(GL_ALWAYS);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texture_ind_one);
glBindTexture(GL_TEXTURE_2D, texture_one);
glDrawElements(GL_TRIANGLES, indicies_num, GL_UNSIGNED_INT, indices);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texture_ind_two);
glBindTexture(GL_TEXTURE_2D, texture_two);
glDrawElements(GL_TRIANGLES, indicies_num, GL_UNSIGNED_INT, indices);
Thank you very much!
You need to set an appropriate depth function via glDepthFunc using something that has "equals" in it (probably GL_LEQUAL), since you are drawing twice at the same depth.
You can also consider blending the textures yourself inside a fragment shader.
I've been trying to get VBOs working in my latest project and the program segfaults when I try to call glDrawArrays.
Generating the VBO
bool Renderer::init()
{
GLfloat verticies[]=
{0,0,
0,32,
32,32,
32, 0};
glGenBuffersARB(1, &vboTest);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, vboTest);
glBufferDataARB(GL_ARRAY_BUFFER_ARB, sizeof(verticies), verticies, GL_STATIC_DRAW);
}
And then the rendering the VBO
void Renderer::renderScene()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, vboTest);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, 0);
glDrawArrays(GL_QUADS, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glLoadIdentity();
SDL_GL_SwapWindow(window);
}
Nevermind, it was a stupid arbitrary error. I loaded the functions using SDL_GL_GetProcAddress, but then in the header for the renderer class defined GL_EXT_PROTOTYPES instead of re-prototyping the functions.
I'm drawing my points like that :
TAB_PAS = 2;
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[0].send_terrain());
glDrawElements( GL_LINES, indice_degra_de.size(), GL_UNSIGNED_INT, indice_degra_de.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[1].send_terrain());
glDrawElements( GL_LINES, indice_degra.size(), GL_UNSIGNED_INT, indice_degra.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[2].send_terrain());
glDrawElements( GL_LINES, indice_degra_de.size(), GL_UNSIGNED_INT, indice_degra_de.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[3].send_terrain());
It draws a big terrain.
So now, I want for exemple applique a floor texture. I saw a function glTexCoordPointer but I don't know how to use it:
glTexCoordPointer(2, GL_FLOAT, 0, test[0].send_terrain());
// Something like that ?
I already uploaded my picture.tga, so now the problem is to applique it.
You use glTextCoordPointer in the same way as glVertexPointer :
glVertexPointer(TAB_PAS, GL_FLOAT, 0, test[0].send_terrain());
glTexCoordPointer(2, GL_FLOAT, 0, test[0].send_texCoords());
glDrawElements(GL_LINES, indice_degra_de.size(), GL_UNSIGNED_INT, indice_degra_de.constData());
You need to specify the texture coordinates in another array, where every pair of texture coordinates correspond to one vertex in your terrain data.
To have OpenGL actually use your Texture Coordinates:
glEnableClientState(GL_TEXTURE_COORD_ARRAY); // Before calling DrawElements
And of course you must have the texture bound and enabled. Complete example, assuming you generate texture coordinates and have your .tga texture uploaded to OpenGL:
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, textureHandle);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[0].send_terrain());
glTexCoordPointer(2, GL_FLOAT, 0, text[0].send_texCoord());
glDrawElements( GL_LINES, indice_degra_de.size(), GL_UNSIGNED_INT, indice_degra_de.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[1].send_terrain());
glTexCoordPointer(2, GL_FLOAT, 0, text[1].send_texCoord());
glDrawElements( GL_LINES, indice_degra.size(), GL_UNSIGNED_INT, indice_degra.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[2].send_terrain());
glTexCoordPointer(2, GL_FLOAT, 0, text[2].send_texCoord());
glDrawElements( GL_LINES, indice_degra_de.size(), GL_UNSIGNED_INT, indice_degra_de.constData());
glVertexPointer(TAB_PAS,GL_FLOAT,0,test[3].send_terrain());
glTexCoordPointer(2, GL_FLOAT, 0, text[3].send_texCoord());
// and so on...
I hope this is what you need
I'm attempting to implement this paper. I've have most of it down, but the part about sending arbitrary, non-geometric data to the shader for use in determining and displaying geometric edges is causing me problems. I've managed to successfully send most of my data just fine using what I know of VBOs. However, I need to send a large amount of data, which necessitates the use of multiple texture coordinates.
I've already implemented several variations of what I believe to be the correct way of setting up multiple sets of texture coordinates, and followed the instructions of many forum posters. No solutions work thus far.
For context, the program is sending 4 nearly identical copies of a set of 4 vertices, 2 normal vectors, a float, and an integer (stored as a float) for each unique edge in the model. I've laid out the data like this:
v0 is stored in gl_Vertex (vec3)
v1 is stored in gl_Color (vec3)
v2 is stored in gl_MultiTexCoord0 (vec3)
v3 is stored in gl_MultiTexCoord1 (vec3)
n0 is stored in gl_Normal (vec3)
n1 is stored in gl_SecondaryColor (vec3)
r and i are stored in gl_MultiTexCoord2 (vec2)
The only difference between the 4 copies is the i value, which helps determine how to organize the vertices if and when a drawable edge is found.
As you can see, I need at least 3 texture coordinates. I was able to get the first one working (gl_MultiTexCoord0) just fine, but any following texture coordinates, though on the graphics card, appear to have uncontrollable behavior, sometimes working, but usually not.
My rendering function used to look like this:
void Mesh::RenderLineEdgesGPU()
{
// Enable client state
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_SECONDARY_COLOR_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// Turn on edge shader
edgeProgram.Activate();
// Link buffers
// v0
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[0]);
glVertexPointer(3, GL_FLOAT, 0, 0);
// v1
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[1]);
glColorPointer(3, GL_FLOAT, 0, 0);
// v2
glClientActiveTextureARB(GL_TEXTURE0_ARB);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[2]);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
// v3
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[3]);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
// n0
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[4]);
glNormalPointer(GL_FLOAT, 0, 0);
// n1
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[5]);
glSecondaryColorPointer(3, GL_FLOAT, 0, 0);
// r and i
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[6]);
glTexCoordPointer(2, GL_FLOAT, 0, 0);
// Indicies
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, edgeMeshHandles[7]);
// Draw
glDrawElements(GL_POINTS, EdgeVertexQuantity, GL_UNSIGNED_INT, 0);
// Turn off edge shader
edgeProgram.Deactivate();
// Disable client state
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_SECONDARY_COLOR_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
This was my original one. It definitely works for v0, v1, and v2. It appears to work for "r and i", but that could be an illusion. I don't have the ability to test n0 or n1 yet. v3 definitely DOES NOT work. As you can see, I'm drawing them as points, which tells me if they are there or not (via the shader). v0, v1, and v2 are all there. Attempting to do the same for v3 yields either a single point at the origin or nothing at all.
After looking at suggestions online, here's my new setup:
void Mesh::RenderLineEdgesGPU()
{
// Enable client state
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_SECONDARY_COLOR_ARRAY);
// Turn on edge shader
edgeProgram.Activate();
// Link buffers
// v0
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[0]);
glVertexPointer(3, GL_FLOAT, 0, 0);
// v1
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[1]);
glColorPointer(3, GL_FLOAT, 0, 0);
// v2
glClientActiveTextureARB(GL_TEXTURE0_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glActiveTextureARB(GL_TEXTURE0_ARB);
glEnable(GL_TEXTURE_2D);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[2]);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
// v3
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glActiveTextureARB(GL_TEXTURE1_ARB);
glEnable(GL_TEXTURE_2D);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[3]);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
// n0
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[4]);
glNormalPointer(GL_FLOAT, 0, 0);
// n1
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[5]);
glSecondaryColorPointer(3, GL_FLOAT, 0, 0);
// r and i
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glBindBufferARB(GL_ARRAY_BUFFER_ARB, edgeMeshHandles[6]);
glTexCoordPointer(2, GL_FLOAT, 0, 0);
// Indicies
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, edgeMeshHandles[7]);
// Draw
glDrawElements(GL_POINTS, EdgeVertexQuantity, GL_UNSIGNED_INT, 0);
// Turn off edge shader
edgeProgram.Deactivate();
// Disable client state
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_SECONDARY_COLOR_ARRAY);
}
Notice how I've done the glEnableClientState / glDisableClientState calls around the actual "loading" of each texture coordinate list. I also use glActiveTextureARB and glEnable(GL_TEXTURE_2D). While I think I might understand why glActiveTextureARB is needed here, the other one baffles me. According to the GLSL Common Mistakes page, you're not supposed to use glEnable(GL_TEXTURE_2D) when you make your own shaders, since the use of shaders ignores this call anyway.
So that's it. I've gotten this far without being able to find any tutorials specifically addressing how to send non-texture coordinate data in texture coordinates. Perhaps if someone knows a tutorial on that, my problem would be alleviated. Thanks for your time!
glClientActiveTextureARB changes which specific texture coordinate unit following calls to glEnableClientState(GL_TEX_COORD_ARRAY) and glTexCoordPointer will alter.
glActiveTextureARB affects glEnable(GL_TEXTURE_2D), which, as you mentioned, you don't need for shaders.
If you look at your code closely, picking only those 5 calls (and their equivalent Disable), here is what you have:
glClientActiveTextureARB(GL_TEXTURE0_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glActiveTextureARB(GL_TEXTURE0_ARB);
glEnable(GL_TEXTURE_2D);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glActiveTextureARB(GL_TEXTURE1_ARB);
glEnable(GL_TEXTURE_2D);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glTexCoordPointer(2, GL_FLOAT, 0, 0);
glDrawElements(GL_POINTS, EdgeVertexQuantity, GL_UNSIGNED_INT, 0);
Ok, we already said that glActiveTextureARB and glEnable are not useful (by the way, you enable/disable GL_TEXTURE_2D without any draw in between, that's not useful), so remove them:
glClientActiveTextureARB(GL_TEXTURE0_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glTexCoordPointer(2, GL_FLOAT, 0, 0);
glDrawElements(GL_POINTS, EdgeVertexQuantity, GL_UNSIGNED_INT, 0);
What stands out now ? 2 issues:
You Disable your client state before ever executing Draw
You don't set the Enable bit for TEXTURE2
What should you write ? Something along those lines: (Beware, you'll need to add the BindBuffer calls back to each Pointer call):
// texture coord 0
glClientActiveTextureARB(GL_TEXTURE0_ARB); // program texcoord unit 0
glEnableClientState(GL_TEXTURE_COORD_ARRAY); // enable array data to shader
glTexCoordPointer(3, GL_FLOAT, 0, 0); // say what data
// texture coord 1
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(3, GL_FLOAT, 0, 0);
// texture coord 2
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, 0);
glDrawElements(GL_POINTS, EdgeVertexQuantity, GL_UNSIGNED_INT, 0);
// done with those texcoord units, turn them off
glClientActiveTextureARB(GL_TEXTURE0_ARB);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE1_ARB);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glClientActiveTextureARB(GL_TEXTURE2_ARB);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
Some more comments on BindBuffer: glBindBufferARB(GL_ARRAY_BUFFER_ARB,...) does not get affected by the glClientActiveTextureARB, but it does affect the next glTexCoordPointer call. In essence, think of glClientActiveTextureARB and glBindBufferARB as providing extra arguments to glTexCoordPointer.
Last thing, you probably want to group some of those VBOs in less buffers. Something for another question maybe ? (Hint, the 2 arguments to glTexCoordPointer don't have to be 0)