how to use glDrawElements with glBindVertexArray properly - c++

what am trying to do is using glDrawElements to draw without redundancy of vertices as follow:
Model ModelManager::CreateModel(std::vector<glm::vec3>&vertices, std::vector<uint16_t>&vertexIndeces)
{
//Vertecies
GLuint vertexArray;
glGenVertexArrays(1, &vertexArray);
glBindVertexArray(vertexArray);
GLuint vBufferId;
glGenBuffers(1, &vBufferId);
glBindBuffer(GL_ARRAY_BUFFER, vBufferId);
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
GLuint iBufferId;
glGenBuffers(1, &iBufferId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, iBufferId);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size(), vertexIndeces.data(), GL_STATIC_DRAW);
glBindVertexArray(0);
//
return Model(vertexArray, vBufferId, iBufferId, vertexIndeces.size());
}
and then when I draw :
void Model::Draw()
{
if (vertexArray)
{
isFinishedIniting = true;
glBindVertexArray(vertexArray);
glDrawElements(GL_TRIANGLES, elementCount, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
}
}
shaders:
#version 120
void main()
{
gl_Position= gl_ModelViewProjectionMatrix*gl_Vertex;
}
#version 120
void main()
{
gl_FragColor=vec4(1.0,0.0,0.0,0.0);
}
the obj file am trying to load is easy I hand made it :
v 0.0 0.0 0.0
v 1.0 1.0 0.0
v -1.0 1.0 0.0
v -1.0 -1.0 0.0
v 1.0 -1.0 0.0
f 1/1/1 2/1/1 3/1/1
f 1/1/1 4/1/1 5/1/1
so it should show two red Triangles ,but it's not drawing anything to the screen!

There are a couple of problems in this code:
The sizes passed to glBufferData() look wrong:
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
...
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size(), vertexIndeces.data(), GL_STATIC_DRAW);
Both vertices and vertexIndeces() are vectors. The .size() method on a vector gives the number of elements, while glBufferData() expects the size in bytes. To fix this, change the code to:
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(vertices[0]),
vertices.data(), GL_STATIC_DRAW);
...
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size() * sizeof(vertexIndeces[0]),
vertexIndeces.data(), GL_STATIC_DRAW);
The API calls use a mix of fixed function attributes and generic vertex attributes. Based on the version in the shader code, and the shader code itself (particularly the use of gl_Vertex), you're using OpenGL 2.1 level shaders with fixed function attributes. Therefore, you need to use glEnableClientState() and glVertexPointer() instead of glEnableVertexAttribArray() and glVertexAttribPointer():
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, 0);
You can use generic vertex attributes, but then you need to declare a variable of type attribute in the vertex shader, instead of using gl_Vertex.

Related

Opengl 3.1 GLSL 140 Outputting White In Fragment Shader in C++ [closed]

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 1 year ago.
Improve this question
I am stuck using a laptop with no support for OpenGL 3.3 or anything higher than 3.1. This is problematic as I am not entirely familiar with this version so I am learning how to use things again. My problem is that for some reason, my fragment shader is only outputting in white. I'm not quite sure what's wrong, but I get the feeling that it has something to do with the way I set the VBO data as this has recently changed in my code. Right now, I'm trying to convert my old rendering engine (written in OpenGL 3.3) to OpenGL 3.1. This is because of the limitations set by my old laptop. This is why I am using a struct for the Vertex which includes BiTangent and Tangent values.
My Code:
struct Vertex {
// Position
glm::vec3 Position;
// Normals
glm::vec3 Normal;
// Texture Coordinates
glm::vec2 TexCoords;
// BiTangent
glm::vec3 BiTangent;
// Tangent
glm::vec3 Tangent;
};
struct Texture {
unsigned int id;
int number;
};
class Mesh {
std::vector<Vertex> vertices;
std::vector<unsigned int> indices;
std::vector<Texture> textures;
unsigned int VAO, VBO[5], EBO;
public:
Mesh(std::vector<Vertex> vertices, std::vector<unsigned int> indices, std::vector<Texture> textures) {
this->vertices = vertices;
this->indices = indices;
this->textures = textures;
setupMesh();
};
void setupMesh() {
glGenVertexArrays(1, &VAO);
glGenBuffers(4, VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
int stride = 14;
// Position
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Normal
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glEnableVertexAttribArray(1);
// Texture Coordinates
glBindBuffer(GL_ARRAY_BUFFER, VBO[2]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glEnableVertexAttribArray(2);
// BiTangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[3]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)8);
glEnableVertexAttribArray(3);
// Tangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[4]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(4, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)11);
glEnableVertexAttribArray(4);
// Element Buffer Object
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// Unbind
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
};
void Draw(ShaderProgram shader) {
for (int i = 0; i < textures.size(); i++) {
// Give textures the proper id and bind them
glActiveTexture(GL_TEXTURE0 + textures[i].number);
glBindTexture(GL_TEXTURE_2D, textures[i].id);
}
// Draw
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
// Unbind
glBindVertexArray(0);
}
};
Vertex Shader:
#version 140 core
in vec3 aPos;
in vec3 aColor;
in vec2 aTexCoords;
in vec3 aBiTangent;
in vec3 aTangent;
out vec3 Color;
void main(void){
Color = vec3(aColor);
gl_Position = vec4(aPos, 1.0);
}
Fragment Shader:
#version 140 core
out vec4 FragColor;
//precision highp float;
in vec3 Color;
void main(void)
{
FragColor = vec4(1.0, 0.8, 0.8, 1.0);
}
I tried manually setting a color to override displaying the normals, but the output is still white.
EDIT:
If I can generate one VBO instead of 4, that would be useful. My question for that is how I'm supposed to access the vertex data from the shaders when using one VBO.
If a named buffer object is bound, then the last parameter of glVertexAttribPointer is treated as a byte offset into the buffer object's data store.
Hence the offset has to be sizeof(float)*number rather than number:
For instance:
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 3));
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 6));
...
Furthermore, the version specification is invalide. There is no GLSL 1.40 core version. Remove the token core in the vertex and in the fragment shader:
#version 140 core
#version 140
core is introduced in OpenGL Shading Language 1.50.
I recommend to check if the shader compilation succeeded and if the program object linked successfully. See Shader Compilation.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS. If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.

OpenGL indexbuffer not working when loading objects one by one

I'm learning OpenGL and ran into an "obstacle".
I drew some houses (blocks and pyramids) using an indexbuffer.
This works fine when loading all the vertices (from all houses) into the vertexbuffer and using 1 big indexbuffer. Now I want to animate the objects and load them in the vertexbuffer one by one so I can execute transformation on the objects one by one. The code is not much different from one big buffer, but when I do this I just see some random shapes shooting around on the screen. My code is as follows:
I have a world class which holds the list of 3D objects, has one big list of all vertices (for trial purposes), one big list of indices (also trial) and a method to add an Object3D object to the world.
class World
{
public:
World();
~World();
vector<Object3D> objects;
vector<glm::vec3> Vertices;
vector<GLushort> Indices;
void AddObject(Object3D &object);
};
Object3D class:
class Object3D
{
public:
Object3D();
~Object3D();
glm::vec3 Position;
vector<glm::vec3> Vertices;
vector<unsigned int> Indices;
};
World AddObject method, simply adds the object to the "objects" list and adds the vertices and indices to the "Vertices" and "Indices" lists to create one big buffer:
void World::AddObject(Object3D &object) {
int oldVerticesSize = Vertices.size();
objects.push_back(object);
Vertices.insert(Vertices.end(), object.Vertices.begin(), object.Vertices.end());
for each (GLushort index in object.Indices)
{
Indices.push_back(index + oldVerticesSize);
}
}
When I render the big buffer with all the vertices and indices (as shown below) it works fine.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, World.Indices.size(), GL_UNSIGNED_SHORT, 0);
//glDrawArrays(GL_TRIANGLES, 0, World.Vertices.size());
glBindVertexArray(0); //release vao
//Model = glm::rotate(Model, 0.01f, glm::vec3(0.0f, 1.0f, 0.0f));
Mvp = Projection * View * Model;
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
glutSwapBuffers();
//glutPostRedisplay();
}
When I loop through the objects and load the vertices of the objects in the buffer one-object-at-a-time (as shown below) it shows some random "shapes" which keep "shooting around" or rapidly changing. What am I doing wrong here?
Thanks in advance for any advice.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int index = 0;
for each (Object3D mesh in World.objects)
{
Mvp = Projection * View * Model;
UpdateBuffer(mesh.Vertices, mesh.Indices);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, mesh.Indices.size() , GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0); //release vao
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
index++;
}
glutSwapBuffers();
}
UpdateBuffers method:
void WorldRenderer::UpdateBuffer(vector<glm::vec3> vertices, vector<unsigned int> indices) {
//fill Vbo
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//fill ibo
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), indices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(Vao);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
}
For the first Render method (without the loop) the buffers are created once in the init method which looks like this:
void WorldRenderer::Init(int argc, char ** argv) {
InitGlutGlew(argc, argv);
InitMatrices();
glDisable(GL_CULL_FACE);
//-------------- init buffers --------------
// vbo vertices
glGenBuffers(1, &Vbo);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, World.Vertices.size() * sizeof(glm::vec3),
&World.Vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//ibo
glGenBuffers(1, &Ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, World.Indices.size() * sizeof(unsigned int),
&World.Indices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// VAO setup
glGenVertexArrays(1, &Vao);
glBindVertexArray(Vao);
// Bind vertices to vao
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
//Bind elements
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
//------ init shaders -----------
InitShader();
}
As soon as I see your UpdateBuffer function, I feel that the glBufferData for vbo is not loaded properly.
The second parameter vertices.size() returns just number of elements in vector but not the actual size of object.
So the second parameter should be vertices.size() * sizeof(glm::vec3)
And the third parameter is ok as it returns pointer to the underlying array. If not working ....directly pass the address of first element as shown below.
Overall it should be something like as shown below.
glBufferData(
GL_ARRAY_BUFFER,
vertices.size() * sizeof(glm::vec3),
&vertices[0],
GL_STATIC_DRAW
);
Check if it works.
Why you are seeing differences?
First render:
Your buffer contain all the world's data for vertices continuosly, when the glDrawElements is called.
So here the mesh1 last vertex is continued with mesh 2 fist vertex.... So you see a kind of closed shape.
Second render:
your buffer contains only one mesh data at a time, when the glDrawElements is called.
so your shape ends for each mesh after calling glDrawElements.
To obtain the same result as first render, you have to first update a single vertex buffer for all meshes ( use glBufferSubData).
Then call glDrawElements once. Then you will see same result.

glDrawElements not drawing

I create my buffers with the following code:
//generate buffers
glGenVertexArrays(1, &VAO);
//glGenBuffers(1, &EBO);
glGenBuffers(1, &VBO_vertices);
glGenBuffers(1, &VBO_colors);
glGenBuffers(1, &VBO_normals);
// Bind the Vertex Array Object first, then bind and set vertex buffer(s) and attribute pointer(s).
glBindVertexArray(VAO);
// Copy our vertices array in a buffer for OpenGL to use
glBindBuffer(GL_ARRAY_BUFFER, VBO_vertices);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vTable.size() * sizeof(int), &vTable[0], GL_STATIC_DRAW);
// Position attribute
glVertexAttribPointer((GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (X,Y,Z)
glEnableVertexAttribArray(0);
//Buffer for color
glBindBuffer(GL_ARRAY_BUFFER, VBO_colors);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &v_color[0], GL_STATIC_DRAW);
// Color attribute
glVertexAttribPointer((GLuint)1, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (R,G,B)
glEnableVertexAttribArray(1);
//Buffer for normals
glBindBuffer(GL_ARRAY_BUFFER, VBO_normals);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &v_normals[0], GL_STATIC_DRAW);
//normal attribute
glVertexAttribPointer((GLuint)2, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (R,G,B)
glEnableVertexAttribArray(2);
// Unbind the VAO
glBindVertexArray(0);
My data are :
vector<vec3> vertices, v_normals,v_color;
vector<int> vTable;
I have vertices, normals and colors per vertex and an index table with the index vertices of each triangle.
When I try to render this, nothing appears on the window.
glBindVertexArray(VAO); //Bind VAO
glDrawElements(GL_TRIANGLES, vTable.size(), GL_UNSIGNED_INT, &vTable[0]);
glBindVertexArray(0); //Unbind VAO
If I used this:
glDrawArrays(GL_TRIANGLES,0,vTable.size());
It draws something but an incomplete object, like in the link image.
image
Anybody knows what happens? Thanks in advance
Your glDrawElements call is wrong, the last parameter should be a byte offset into your GL_ELEMENT_ARRAY_BUFFER that holds the indices, not pointer to system memory.
glDrawElements(GL_TRIANGLES, vTable.size(), GL_UNSIGNED_INT, 0);

OPENGL: Square Class Using VBO

So, I am trying to make a basic "Drawable" class that handles a lot of the drawing for me in the background and I want to use modern OpenGL (no begin and end statements). I keep just getting a blank screen when I run draw().
I have run the debugger and checked, my array is initialized properly with 3xFLOAT for position and 4xFLOAT for color. Any idea what is going wrong? I am very new to this library. My example tries to draw a red cube at (+-0.5, +-0.5, 0.0), so the indexData array is just { 0, 1, 2, 3 }.
#define DRAWABLE_VERTEX_DEPTH 3
#define SIZE_OF_VERTEX_ELEMENT sizeof(GLfloat)
#define VERTEX_SIZE (DRAWABLE_VERTEX_DEPTH * SIZE_OF_VERTEX_ELEMENT)
#define DRAWABLE_COLOR_DEPTH 4
#define SIZE_OF_COLOR_ELEMENT sizeof(GLfloat)
#define COLOR_SIZE (DRAWABLE_COLOR_DEPTH * SIZE_OF_COLOR_ELEMENT)
#define INDEX_SIZE sizeof(GLushort)
#define DRAWABLE_STRIDE (VERTEX_SIZE + COLOR_SIZE)
inline Drawable(/*Arguments omitted for brevity...*/)
{
//Standard initialization omitted....
glGenBuffers(1, &vboID);
glGenBuffers(1, &vioID);
glGenVertexArrays(1, &vaoID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vioID);
glBufferData(GL_ARRAY_BUFFER, (VERTEX_SIZE + COLOR_SIZE) * vertexCount, vertexData, drawType);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, INDEX_SIZE * indexCount, indexData, drawType);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Generate Vertex Array
glBindVertexArray(vaoID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, DRAWABLE_VERTEX_DEPTH, GL_FLOAT, GL_FALSE, DRAWABLE_STRIDE, 0);
glVertexAttribPointer(1, DRAWABLE_COLOR_DEPTH, GL_FLOAT, GL_FALSE, DRAWABLE_STRIDE, (GLbyte*)VERTEX_SIZE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vioID);
glBindVertexArray(0);
}
inline void draw()
{
glBindVertexArray(vaoID);
glDrawElements(drawMode, indexCount, GL_UNSIGNED_SHORT, NULL);
glBindVertexArray(0);
}
GLSL Vertex Shader:
#version 430\r\n
in layout(location=0) vec3 inPosition;
in layout(location=1) vec4 inColor;
out vec4 outVertexColor;
void main()
{
gl_Position = vec4(inPosition, 1.0);
outVertexColor = inColor;
}
GLSL Fragment Shader:
#version 430\r\n
in vec4 outVertexColor;
out vec4 outFragmentcolor;
void main()
{
outFragmentcolor = outVertexColor;
}
Apart from the issues mentioned in the comments, your index array is GLushort (unsigned 16 bit), while your draw call specifies GL_UNSIGNED_INT (unsigned 32 bit). Replace with GL_UNSIGNED_SHORT.

OpenGL VAO always drawing first Vertex from origin

The title sums up my issue, but no matter what I set the first vertex as, OpenGL always draws it at the origin. I've tried this on a school computer and it wasn't a problem but I'm not at school and it's possible something I've changed is causing the issue. Regardless, I see no reason why this should happen. In case syntax seems weird, this code is written in D but should be an almost seamless port from C.
My code is:
class Mesh
{
this(vec3[] vertices, uint[] indices)
{
draw_count = indices.length;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
glGenBuffers(NUM_BUFFERS, vertex_array_buffers.ptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_array_buffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, vertices.length * vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(cast(GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, cast(void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertex_array_buffers[INDEX_VB]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.length * indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindVertexArray(0);
}
void draw()
{
glBindVertexArray(vertex_array_object);
glDrawElements(GL_TRIANGLES, draw_count, GL_UNSIGNED_INT, cast(const(void)*)0);
glBindVertexArray(0);
}
private:
enum
{
POSITION_VB,
INDEX_VB,
NORMAL_VB,
NUM_BUFFERS
};
GLuint vertex_array_object;
GLuint vertex_array_buffers[NUM_BUFFERS];
vec3 normals;
int draw_count;
}