Using Eigen::Vector3f with Opengl ES 2.0 VBO - c++

I am having trouble getting Eigen::Vector3f and Opengl ES 2.0 VBO to work together.
My initial attempt was glVertexAttribPointer(VERTEX, 3, GL_FLOAT , sizeof(Vertex), 0 ).
While this draws nothing if I start to play with the stride values I can see a broken mesh.
This is my current code which leads to crash. In my old code I was using simple vector3 class made from 3 floats which was working fine.
struct Vertex {
Eigen::Vector3f pos ;
Eigen::Vector3f normal;
};
std::vector<Vertex> Vertices;
std::vector<ushort16> Indices;
...
GLuint vao;
uint32 vboID, vboID2;
glGenVertexArraysOES(1, &vao);
glBindVertexArrayOES(vao);
glGenBuffers(1, &vboID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, GL_STATIC_DRAW , sizeof(Vertex) * Vertices.size(), &Vertices[0]);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &vboID2);
glBindBuffer(GL_ARRAY_BUFFER, vboID2);
glBufferData(GL_ARRAY_BUFFER, GL_STATIC_DRAW , sizeof(ushort16) * m_vIndices.size(), &Indices[0]);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glEnableVertexAttribArray(VERTEX);
glVertexAttribPointer(VERTEX, 3, GL_FLOAT , sizeof(Vertex), &Vertices[0].pos );
glEnableVertexAttribArray(NORMAL);
glVertexAttribPointer(NORMAL, 3, GL_FLOAT , sizeof(Vertex), &Vertices[0].normal );
glBindBuffer(GL_ARRAY_BUFFER, vboID2);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArrayOES(0);
...

that solved my problem:
I removed the:
glBufferData(GL_ARRAY_BUFFER, GL_STATIC_DRAW , sizeof(Vertex) * Vertices.size(), &Vertices[0]);
And I changed these lines to
glVertexAttribPointer(VERTEX, 3, GL_FLOAT , sizeof(Vertex), Vertices[0].pos.data() );
....
glVertexAttribPointer(NORMAL, 3, GL_FLOAT , sizeof(Vertex), Vertices[0].normal.data() );

Related

Initializing a OpenGL VBO with a struct

Im having problems to add a struct to a OpenGL VBO
This is my struct
struct Vertex {
//vertices
std::vector<glm::vec3> vertices;
//texture coordinates
std::vector<glm::vec3> texCord;
}
This is how im assigning and initializing the VBO
glGenVertexArrays(1, &buffer.VAO);
glGenBuffers(1, &buffer.VBO);
glGenBuffers(1, &buffer.EBO);
glBindBuffer(GL_ARRAY_BUFFER, buffer.VAO);
//Initialize buffer data
glBindBuffer(GL_ARRAY_BUFFER, buffer.VBO);
glBufferData(GL_ARRAY_BUFFER,
vertex.vertices.size() * sizeof(glm::vec3) +
vertex.texCord.size() * sizeof(glm::vec3)
&vertex,
GL_STATIC_DRAW);
//indices attribute
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffer.EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertex.indices.size() * sizeof(glm::vec2), &vertex.indices[0], GL_STATIC_DRAW);
//vertices coordinates attribute
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, vertices));
//texture coordinates attribute
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, texCord));
And my draw command looks like this
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(buffer.VAO);
glDrawElements(GL_TRIANGLES, vertex.indices.size(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
Is it possible to initialize buffer data with a raw struct and if yes how?
std::vector stores a pointer to dynamically allocated memory. However, you must pass a consecutive buffer to glBufferData.
Create the object's data store with glBufferData, but use glBufferSubData to initialize the buffer:
size_t vertexSize = vertex.vertices.size() * sizeof(glm::vec3);
size_t texCordSize = vertex.texCord.size() * sizeof(glm::vec3);
glBufferData(GL_ARRAY_BUFFER, vertexSize + texCordSize, nullptr, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexSize, vertex.vertices.data());
glBufferSubData(GL_ARRAY_BUFFER, vertexSize, texCordSize, vertex.texCord.data());
Thanks to #Rabbid76 I now solved my problem. Code is down below.
Note: I need to create the buffer before filling data with glBufferSubData
glGenVertexArrays(1, &buffer.VAO);
glGenBuffers(1, &buffer.VBO);
glGenBuffers(1, &buffer.EBO);
glBindBuffer(GL_ARRAY_BUFFER, buffer.VAO);
int vertexSize = sizeof(glm::vec3) * vertex.vertices.size();
int texCordSize = sizeof(glm::vec2) * vertex.texCord.size();
int totalSize = vertexSize + texCordSize;
int indicesSize = sizeof(unsigned int) * vertex.indices.size();
//Create Buffer dynamically
glBufferData(GL_ARRAY_BUFFER, totalSize, NULL, GL_DYNAMIC_DRAW);
//Fill the buffer with data
glBindBuffer(GL_ARRAY_BUFFER, buffer.VBO);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertexSize, vertex.vertices.data());
glBufferSubData(GL_ARRAY_BUFFER, vertexSize, texCordSize, vertex.texCord.data());
//indices attribute
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffer.EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesSize, vertex.indices.data(), GL_STATIC_DRAW);
//position attribute
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3) + sizeof(glm::vec2), (void*)0);
//texture attribute
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(glm::vec3) + sizeof(glm::vec2), (void*)sizeof(glm::vec3));
glBindVertexArray(0);

Problem displaying multiple objects in modern OpenGL [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 2 years ago.
Improve this question
I'm trying to display two shapes in OpenGL.
First I got the vertices,uvs, normals,indices from an obj file and texture form DDS and stored them in an array of struct Shape.
Then I indexed the vertices, uvs, normals and indices for all the shapes in 4 respective array, also storing the number of total number of vertices, uvs , normals and indices in another vector.
Then I initialized the VBOs.
Then I creates vertex array objects for the two shapes and set them up giving respective VertexAttribPointer. (I think the problem is in this step)
Finally I bind the respective VAOs and display them but only one shape is being displayed.
Where exactly am I going wrong.
Code for VBOindexing :
std::vector<glm::vec4> elecount;
long long int endind = 0,endver=0,enduv=0,endnr=0;
std::vector<unsigned short> indices;
std::vector<glm::vec3> indexed_vertices;
std::vector<glm::vec2> indexed_uvs;
std::vector<glm::vec3> indexed_normals;
for (int i = 0;i < componentcount ;i++)
{
endver = endind = enduv = endnr = 0;
indexVBO(component[i].vertices, component[i].uvs, component[i].normals, indices, indexed_vertices, indexed_uvs, indexed_normals);
endind = indices.size();
endver = indexed_vertices.size();
enduv = indexed_uvs.size();
endnr = indexed_normals.size();
elecount.push_back(glm::vec4(endver, enduv, endnr, endind));
}
Code for VBOs :
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, indexed_vertices.size() * sizeof(glm::vec3), &indexed_vertices[0], GL_STATIC_DRAW);
GLuint uvbuffer;
glGenBuffers(1, &uvbuffer);
glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
glBufferData(GL_ARRAY_BUFFER, indexed_uvs.size() * sizeof(glm::vec2), &indexed_uvs[0], GL_STATIC_DRAW);
GLuint normalbuffer;
glGenBuffers(1, &normalbuffer);
glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
glBufferData(GL_ARRAY_BUFFER, indexed_normals.size() * sizeof(glm::vec3), &indexed_normals[0], GL_STATIC_DRAW);
GLuint elementbuffer;
glGenBuffers(1, &elementbuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned short), &indices[0], GL_STATIC_DRAW);
Code for VAOs :
GLuint CubeVertexArrayID, SphereVertexArrayID;
glGenVertexArrays(1, &CubeVertexArrayID);
glGenVertexArrays(1, &SphereVertexArrayID);
glBindVertexArray(CubeVertexArrayID);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
glBindVertexArray(SphereVertexArrayID);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void *)(sizeof(glm::vec3) * ((int)elecount[0][0])));
glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)(sizeof(glm::vec2) * ((int)elecount[0][1])));
glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*)(sizeof(glm::vec3) * ((int)elecount[0][2])));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
Code for displaying :
glBindVertexArray(CubeVertexArrayID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, component[0].Texture);
glUniform1i(TextureID, 0);
glDrawElements(GL_TRIANGLES,(int)elecount[0][3],GL_UNSIGNED_SHORT,(void*)0);
glm::mat4 ModelMatrix2 = glm::mat4(1.0);
ModelMatrix2 = glm::translate(ModelMatrix2, glm::vec3(2.0f, 0.0f, 0.0f));
glm::mat4 MVP2 = ProjectionMatrix * ViewMatrix * ModelMatrix2;
glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP2[0][0]);
glUniformMatrix4fv(ModelMatrixID, 1, GL_FALSE, &ModelMatrix2[0][0]);
glUseProgram(shaderProg);
glBindVertexArray(SphereVertexArrayID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, component[1].Texture);
glUniform1i(TextureID, 0);
glDrawElements(GL_TRIANGLES, (int)(elecount[1][3]-elecount[0][3]), GL_UNSIGNED_SHORT,(void*)(sizeof(unsigned short) * ((int)elecount[0][3])));
There's nothing wrong in the code you posted, so it's hard to say for sure where's the problem. However my best guess is that indexVBO (that you didn't show) pushes the absolute indices within indexed_* arrays. Combined with the offset glVertexAttribPointer of SphereVertexArrayID this causes out-of-bound reads.
You could fix your indexVBO code. However, since both VAOs reference the same buffer, the simplest solution (and I would say, the correct solution) is to use a single VAO for both components. If my hypothesis is correct, it is as simple as changing the
glBindVertexArray(SphereVertexArrayID);
to
glBindVertexArray(CubeVertexArrayID);
when you're drawing the 2nd component. Then you can get rid of SphereVertexArrayID completely.

glDrawElements not drawing

I create my buffers with the following code:
//generate buffers
glGenVertexArrays(1, &VAO);
//glGenBuffers(1, &EBO);
glGenBuffers(1, &VBO_vertices);
glGenBuffers(1, &VBO_colors);
glGenBuffers(1, &VBO_normals);
// Bind the Vertex Array Object first, then bind and set vertex buffer(s) and attribute pointer(s).
glBindVertexArray(VAO);
// Copy our vertices array in a buffer for OpenGL to use
glBindBuffer(GL_ARRAY_BUFFER, VBO_vertices);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vTable.size() * sizeof(int), &vTable[0], GL_STATIC_DRAW);
// Position attribute
glVertexAttribPointer((GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (X,Y,Z)
glEnableVertexAttribArray(0);
//Buffer for color
glBindBuffer(GL_ARRAY_BUFFER, VBO_colors);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &v_color[0], GL_STATIC_DRAW);
// Color attribute
glVertexAttribPointer((GLuint)1, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (R,G,B)
glEnableVertexAttribArray(1);
//Buffer for normals
glBindBuffer(GL_ARRAY_BUFFER, VBO_normals);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*3*vertices.size(), &v_normals[0], GL_STATIC_DRAW);
//normal attribute
glVertexAttribPointer((GLuint)2, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0); //size = 3 (R,G,B)
glEnableVertexAttribArray(2);
// Unbind the VAO
glBindVertexArray(0);
My data are :
vector<vec3> vertices, v_normals,v_color;
vector<int> vTable;
I have vertices, normals and colors per vertex and an index table with the index vertices of each triangle.
When I try to render this, nothing appears on the window.
glBindVertexArray(VAO); //Bind VAO
glDrawElements(GL_TRIANGLES, vTable.size(), GL_UNSIGNED_INT, &vTable[0]);
glBindVertexArray(0); //Unbind VAO
If I used this:
glDrawArrays(GL_TRIANGLES,0,vTable.size());
It draws something but an incomplete object, like in the link image.
image
Anybody knows what happens? Thanks in advance
Your glDrawElements call is wrong, the last parameter should be a byte offset into your GL_ELEMENT_ARRAY_BUFFER that holds the indices, not pointer to system memory.
glDrawElements(GL_TRIANGLES, vTable.size(), GL_UNSIGNED_INT, 0);

OpenGl does not display triangles using VBO

I am trying to display 2 simple triangles in OpenGL, but they don't appear, despite checking numerous sites. Did I forget anything in the code ?
#define BUFFER_OFFSET(bytes) ((GLubyte*) NULL + (bytes))
// Prepare:
GLfloat vertices[]={0,0,1,
0.5,0,-0.5,
0,0,0,
0,0,1,
0,0,0,
-0.5,0,-0.5};
GLushort indices[]={0,1,2,3,4,5};
GLfloat *generatedVertices=new GLfloat[18];
GLushort *generatedIndices=new GLushort[6];
GLfloat *colors=new GLfloat[18];
glGenBuffers( 3, triangleBuffers ); // triangleBuffers = Global GLuint
for (int p=0;p<6;p++)
{
MVector V(vertices[p*3],vertices[p*3+1],vertices[p*3+2]); // Maya type
generatedVertices[p*3]=V.x;
generatedVertices[p*3+1]=V.y;
generatedVertices[p*3+2]=V.z;
generatedIndices[p]=p;
colors[p*3]=0.8f;
colors[p*3+1]=0.6f;
colors[p*3+2]=0.0f;
}
// vertices
glBindBuffer(GL_ARRAY_BUFFER, data->triangleBuffers[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*18, generatedVertices, GL_STATIC_DRAW);
// indices
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, data->triangleBuffers[1]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLushort)*6, generatedIndices, GL_STATIC_DRAW);
// couloirs
glBindBuffer(GL_ARRAY_BUFFER, data->triangleBuffers[2]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*18*data->totalSize, colors, GL_STATIC_DRAW);
// Render:
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, data->triangleBuffers[0]);
glVertexPointer(3, GL_FLOAT, 0, BUFFER_OFFSET(0));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, data->triangleBuffers[1]);
glBindBuffer(GL_ARRAY_BUFFER, data->triangleBuffers[2]);
glColorPointer(3, GL_FLOAT, 0, BUFFER_OFFSET(0));
glDrawElements(GL_TRIANGLES, totalSize*6, GL_UNSIGNED_INT, 0);
glDisableClientState( GL_VERTEX_ARRAY );
glDisableClientState( GL_COLOR_ARRAY );
Issue is coming from maya. In OpenGL-Legacy mode, the mesh is properly displayed, in OpenGL-Strict mode, it isn't. I will investigate in this direction.

Proper way to use indexed VBO

I am working on a simple 3D Engine. I currently have a working setup with multiple VAO's which I can switch between during the render loop, but they all are not using index buffers.
I'm now trying to add a new VAO composed of 4 VBO's: vert position, color, normal and indices.
Everything compiles and runs but the drawing calls to the second VAO (with indexed vertices) do not render. I'm sure there is a problem with my setup somewhere, so I've added this code which includes all the VAO and VBO generations, calls, and uses. Does anything in this code seem wrong, and is this the correct way to set it all up?
VAO1 has 3 buffers: position, color, normals
VAO2 has 3 buffers: position, color, normals and vertex indices
//Initalize vaos and vbos
GLuint vao1, vbo1[3];
GLuint vao2, vbo2[4];
//Generate Vertex arrays:
glGenVertexArrays(1, &vao1);
glGenVertexArrays(1, &vao2);
//Generate Buffers:
glGenBuffers(3, vbo1);
glGenBuffers(4, vbo2);
//Initalize Bufferdata vectors:
vector<GLfloat> VertPosBuffer1Vector;
vector<GLfloat> VertNormalBuffer1Vector;
vector<GLfloat> VertColorBuffer1Vector;
vector<GLfloat> VertPosBuffer2Vector;
vector<GLfloat> VertNormalBuffer2Vector;
vector<GLfloat> VertColorBuffer2Vector;
vector<GLuint> VertIndexBuffer2Vector;
//Fill Buffers:
//(not included but all vectors are filled with data)
//VAO 1
glBindVertexArray(vao1);
//Vertex position buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo1[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertPosBuffer1Vector.size(), &VertPosBuffer1Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(0);
//Vertex color buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo1[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertColorBuffer1Vector.size(), &VertColorBuffer1Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(1);
//Vertex normal buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo1[2]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertNormalBuffer1Vector.size(), &VertNormalBuffer1Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(2);
//VAO 2
glBindVertexArray(vao2);
//Vertex position buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo2[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertPosBuffer2Vector.size(), &VertPosBuffer2Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(0);
//Vertex color buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo2[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertColorBuffer2Vector.size(), &VertColorBuffer2Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(1);
//Vertex normal buffer:
glBindBuffer(GL_ARRAY_BUFFER, vbo2[2]);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*VertNormalBuffer2Vector.size(), &VertNormalBuffer2Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(2);
//Vertex index buffer:
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo2[3]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint)*VertIndexBuffer2Vector.size(), &VertIndexBuffer2Vector[0], GL_STATIC_DRAW);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(3);
//unbind vao
glBindVertexArray(0);
//bind first vao
glBindVertexArray(vao1);
and
//RENDERLOOP
//render objects from vao1 using:
glDrawArrays(GL_TRIANGLES, start, size);
//switch vao
glBindVertexArray(0);
glBindVertexArray(vao2);
//render objects from vao2 using:
glDrawElements(
GL_TRIANGLES,
start,
GL_UNSIGNED_INT,
(void*)0
);
I have checked that the data in my buffers are correct.
Is it correct that the shader doesn't take in any information of indices? The shader will be the same as if I didn't use an index buffer?
Thank you
The indices are not a vertex attribute. So what you need to do is remove these two lines:
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(3);
I also noticed that you are using the variable "start" as the count argument for glDrawElements. I don't know the values of start and size, but I assume you should use "size" as the second argument in glDrawElements.