OpenGL indexbuffer not working when loading objects one by one - c++

I'm learning OpenGL and ran into an "obstacle".
I drew some houses (blocks and pyramids) using an indexbuffer.
This works fine when loading all the vertices (from all houses) into the vertexbuffer and using 1 big indexbuffer. Now I want to animate the objects and load them in the vertexbuffer one by one so I can execute transformation on the objects one by one. The code is not much different from one big buffer, but when I do this I just see some random shapes shooting around on the screen. My code is as follows:
I have a world class which holds the list of 3D objects, has one big list of all vertices (for trial purposes), one big list of indices (also trial) and a method to add an Object3D object to the world.
class World
{
public:
World();
~World();
vector<Object3D> objects;
vector<glm::vec3> Vertices;
vector<GLushort> Indices;
void AddObject(Object3D &object);
};
Object3D class:
class Object3D
{
public:
Object3D();
~Object3D();
glm::vec3 Position;
vector<glm::vec3> Vertices;
vector<unsigned int> Indices;
};
World AddObject method, simply adds the object to the "objects" list and adds the vertices and indices to the "Vertices" and "Indices" lists to create one big buffer:
void World::AddObject(Object3D &object) {
int oldVerticesSize = Vertices.size();
objects.push_back(object);
Vertices.insert(Vertices.end(), object.Vertices.begin(), object.Vertices.end());
for each (GLushort index in object.Indices)
{
Indices.push_back(index + oldVerticesSize);
}
}
When I render the big buffer with all the vertices and indices (as shown below) it works fine.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, World.Indices.size(), GL_UNSIGNED_SHORT, 0);
//glDrawArrays(GL_TRIANGLES, 0, World.Vertices.size());
glBindVertexArray(0); //release vao
//Model = glm::rotate(Model, 0.01f, glm::vec3(0.0f, 1.0f, 0.0f));
Mvp = Projection * View * Model;
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
glutSwapBuffers();
//glutPostRedisplay();
}
When I loop through the objects and load the vertices of the objects in the buffer one-object-at-a-time (as shown below) it shows some random "shapes" which keep "shooting around" or rapidly changing. What am I doing wrong here?
Thanks in advance for any advice.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int index = 0;
for each (Object3D mesh in World.objects)
{
Mvp = Projection * View * Model;
UpdateBuffer(mesh.Vertices, mesh.Indices);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, mesh.Indices.size() , GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0); //release vao
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
index++;
}
glutSwapBuffers();
}
UpdateBuffers method:
void WorldRenderer::UpdateBuffer(vector<glm::vec3> vertices, vector<unsigned int> indices) {
//fill Vbo
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//fill ibo
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), indices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(Vao);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
}
For the first Render method (without the loop) the buffers are created once in the init method which looks like this:
void WorldRenderer::Init(int argc, char ** argv) {
InitGlutGlew(argc, argv);
InitMatrices();
glDisable(GL_CULL_FACE);
//-------------- init buffers --------------
// vbo vertices
glGenBuffers(1, &Vbo);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, World.Vertices.size() * sizeof(glm::vec3),
&World.Vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//ibo
glGenBuffers(1, &Ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, World.Indices.size() * sizeof(unsigned int),
&World.Indices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// VAO setup
glGenVertexArrays(1, &Vao);
glBindVertexArray(Vao);
// Bind vertices to vao
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
//Bind elements
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
//------ init shaders -----------
InitShader();
}

As soon as I see your UpdateBuffer function, I feel that the glBufferData for vbo is not loaded properly.
The second parameter vertices.size() returns just number of elements in vector but not the actual size of object.
So the second parameter should be vertices.size() * sizeof(glm::vec3)
And the third parameter is ok as it returns pointer to the underlying array. If not working ....directly pass the address of first element as shown below.
Overall it should be something like as shown below.
glBufferData(
GL_ARRAY_BUFFER,
vertices.size() * sizeof(glm::vec3),
&vertices[0],
GL_STATIC_DRAW
);
Check if it works.
Why you are seeing differences?
First render:
Your buffer contain all the world's data for vertices continuosly, when the glDrawElements is called.
So here the mesh1 last vertex is continued with mesh 2 fist vertex.... So you see a kind of closed shape.
Second render:
your buffer contains only one mesh data at a time, when the glDrawElements is called.
so your shape ends for each mesh after calling glDrawElements.
To obtain the same result as first render, you have to first update a single vertex buffer for all meshes ( use glBufferSubData).
Then call glDrawElements once. Then you will see same result.

Related

Trouble loading and drawing a .obj model using Assimp and OpenGL

After performing the basics of OpenGL (creating a window, making a 2D triangle, shaders, etc.) I decided to start trying to load simple .obj models. The most recommended library was Assimp so I followed some tutorials and modified my project to load models. But unfortunately, the models were displayed very strangely. I created the following code to show this:
#include <iostream>
#include <string>
#include <vector>
#include <memory>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
#include <assimp/Importer.hpp>
#include <assimp/scene.h>
#include <assimp/postprocess.h>
struct Vertex
{
glm::vec3 position;
glm::vec3 normal;
};
struct Mesh
{
//The vertex array object, vertex buffer object and element buffer object
GLuint VAO;
GLuint VBO;
GLuint EBO;
//Vectors for the vertices and indices to put in the buffers
std::vector<Vertex> vertices;
std::vector<GLuint> indices;
//Constructor
Mesh(const std::vector<Vertex>& vertices, const std::vector<GLuint>& indices)
{
this->vertices = vertices;
this->indices = indices;
//Generate the VAO
glGenVertexArrays(1, &VAO);
//Generate the buffer objects
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
//Bind the VAO
glBindVertexArray(VAO);
//Bind the VBO and set the vertices
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * vertices.size(), &vertices.at(0), GL_STATIC_DRAW);
//Enable the first attribute pointer
glEnableVertexAttribArray(0);
//Set the attribute pointer The stride is meant to be 'sizeof(Vertex)', but it doesn't work at all that way
// \/
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
//Enable the second attribute pointer
glEnableVertexAttribArray(1);
//Set the attribute pointer ditto
// \/
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, (void*) offsetof(Vertex, normal));
//Bind the EBO and set the indices
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indices.size(), &indices.at(0), GL_STATIC_DRAW);
//Report any errors
GLenum error = glGetError();
if (error != GL_NO_ERROR)
{
std::cerr << "Error while creating mesh!" << std::endl;
}
glBindVertexArray(0);
}
void draw()
{
//Bind the VAO
glBindVertexArray(VAO);
//Bind the ELement Buffer Object
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
//Draw the mesh
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
//Unbind the VAO
glBindVertexArray(0);
}
};
int main()
{
//Intialize GLFW (no error checking for brevity)
glfwInit();
//Set the OpenGL version to 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//Create a new window
GLFWwindow* window = glfwCreateWindow(800, 600, "Model Testing", NULL, NULL);
glfwMakeContextCurrent(window);
//Initialize glew (no checking again)
glewInit();
glViewport(0, 0, 800, 600);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
//Load the model
Assimp::Importer importer;
const aiScene* scene = importer.ReadFile("mymodel.obj", aiProcess_Triangulate | aiProcess_GenNormals);
//Check for errors
if ((!scene) || (scene->mFlags == AI_SCENE_FLAGS_INCOMPLETE) || (!scene->mRootNode))
{
std::cerr << "Error loading mymodel.obj: " << std::string(importer.GetErrorString()) << std::endl;
//Return fail
return -1;
}
//A vector to store the meshes
std::vector<std::unique_ptr<Mesh> > meshes;
//Iterate over the meshes
for (unsigned int i = 0; i < scene->mNumMeshes; ++i)
{
//Get the mesh
aiMesh* mesh = scene->mMeshes[i];
//Create vectors for the vertices and indices
std::vector<Vertex> vertices;
std::vector<GLuint> indices;
//Iterate over the vertices of the mesh
for (unsigned int j = 0; j < mesh->mNumVertices; ++j)
{
//Create a vertex to store the mesh's vertices temporarily
Vertex tempVertex;
//Set the positions
tempVertex.position.x = mesh->mVertices[j].x;
tempVertex.position.y = mesh->mVertices[j].y;
tempVertex.position.z = mesh->mVertices[j].z;
//Set the normals
tempVertex.normal.x = mesh->mNormals[j].x;
tempVertex.normal.y = mesh->mNormals[j].y;
tempVertex.normal.z = mesh->mNormals[j].z;
//Add the vertex to the vertices vector
vertices.push_back(tempVertex);
}
//Iterate over the faces of the mesh
for (unsigned int j = 0; j < mesh->mNumFaces; ++j)
{
//Get the face
aiFace face = mesh->mFaces[j];
//Add the indices of the face to the vector
for (unsigned int k = 0; k < face.mNumIndices; ++k) {indices.push_back(face.mIndices[k]);}
}
//Create a new mesh and add it to the vector
meshes.push_back(std::unique_ptr<Mesh>(new Mesh(std::move(vertices), std::move(indices))));
}
//While the window shouldn't be closed
while (!glfwWindowShouldClose(window))
{
//Clear the buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//Draw all the meshes
for (auto& mesh : meshes) {mesh.get()->draw();}
//Swap the buffers
glfwSwapBuffers(window);
}
//Close the window now that it's not needed anymore
glfwDestroyWindow(window);
return 0;
}
When the program loads this teapot my screen looks like this:
And from further away at another angle (using a more complex program than the one above):
In case it's useful, I'm running Ubuntu 16.04 with an Nvidia GTX 750 Ti, driver version 361.45
The stride should be sizeof(Vertex). If it doesn't work with that stride then something else is wrong!
Try moving the binding of the ebo to right after the vbo in the mesh constructor like this:
//Bind the VBO and set the vertices
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * vertices.size(), &vertices.at(0), GL_STATIC_DRAW);
//Bind the EBO and set the indices
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * indices.size(), &indices.at(0), GL_STATIC_DRAW);
Thats the way they have it on the mesh page you linked. I have had a similar problem with a different loader. Your indices are are not being loaded correctly so some verticies are positioned correctly while others are not.
My experience with OpenGL is pathetic so I can be mistaken. I see that your vertices are made as: x,y,z,nx,ny,nz where xyz is vertex coords and nxnynz is normal coords. Therefore stride is 6*sizeof(float).
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6*sizeof(float), 0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6*sizeof(float), (void*) 3*sizeof(float);
just look at second answer here: Opengl Vertex attribute stride to learn more about stride calculation
if this doesn't help check if indices where formed correctly
and small advice: work with cubes not with teapots (just made cube in blender or write it yourself in notepad)

Trouble wrapping OpenGL in object-oriented way

I'm not very experience with the OpenGL library so I'm having trouble understanding why when I move some initialization code to a class or a function, GL stops drawing onto the screen. Some research indicates that the library is "global" or state-based rather than object based?
Anyway, here is some code that works
GLuint vertexArrayBuffer;
glGenVertexArrays(1, &vertexArrayBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexArrayBuffer);
// VBO is ready to accept vertex data
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(0);
while(!screen.isClosed()) {
// Give the screen a background color
screen.paint(0.0f, 0.0f, 0.5f, 1.0f);
glBindBuffer(GL_ARRAY_BUFFER, vertexArrayBuffer);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
// Switch to display buffer after drawing all of the above
screen.swapBuffers();
This is all enclosed in the main function, with not much programming structure. The output is a nice white triangle onto a blueish background.
This is the issue here, taking the exact code prior to the event loop and wrapping it into a function:
GLuint initVertexArray(vertex vertices[]) {
// Create file descriptor for the VBO for use as reference to gl vertex functions
GLuint vertexArrayBuffer;
glGenVertexArrays(1, &vertexArrayBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexArrayBuffer);
// VBO is ready to accept vertex data
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(0);
return vertexArrayBuffer;
}
and calling it GLuint vertexArrayBuffer = initVertexArray(vertices); in the main function, produces no output of any kind, no errors either, just the same blueish background.
Have you checked what sizeof(vertices) is returning. In this case vertices[] will decay into a pointer so I would imagine that sizeof(vertices) is sizeof(vertex*).
Try passing the size of the array alongside it like so:
GLuint initVertexArray(vertex vertices[], const unsigned int size);
Then you would use it like so:
glBufferData(GL_ARRAY_BUFFER, size, vertices, GL_STATIC_DRAW);
instead of:
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
You would then call it in the same scope as you declared your vertices array:
vertex vertices[100];
// sizeof(vertices) here will give the actual size of the vertices array
// eg: sizeof(vertex) * 100 instead of just giving sizeof(vertex*)
GLuint vertexArrayBuffer = initVertexArray(vertices, sizeof(vertices));

OpenGL VAO always drawing first Vertex from origin

The title sums up my issue, but no matter what I set the first vertex as, OpenGL always draws it at the origin. I've tried this on a school computer and it wasn't a problem but I'm not at school and it's possible something I've changed is causing the issue. Regardless, I see no reason why this should happen. In case syntax seems weird, this code is written in D but should be an almost seamless port from C.
My code is:
class Mesh
{
this(vec3[] vertices, uint[] indices)
{
draw_count = indices.length;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
glGenBuffers(NUM_BUFFERS, vertex_array_buffers.ptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_array_buffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, vertices.length * vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(cast(GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, cast(void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertex_array_buffers[INDEX_VB]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.length * indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindVertexArray(0);
}
void draw()
{
glBindVertexArray(vertex_array_object);
glDrawElements(GL_TRIANGLES, draw_count, GL_UNSIGNED_INT, cast(const(void)*)0);
glBindVertexArray(0);
}
private:
enum
{
POSITION_VB,
INDEX_VB,
NORMAL_VB,
NUM_BUFFERS
};
GLuint vertex_array_object;
GLuint vertex_array_buffers[NUM_BUFFERS];
vec3 normals;
int draw_count;
}

how to use glDrawElements with glBindVertexArray properly

what am trying to do is using glDrawElements to draw without redundancy of vertices as follow:
Model ModelManager::CreateModel(std::vector<glm::vec3>&vertices, std::vector<uint16_t>&vertexIndeces)
{
//Vertecies
GLuint vertexArray;
glGenVertexArrays(1, &vertexArray);
glBindVertexArray(vertexArray);
GLuint vBufferId;
glGenBuffers(1, &vBufferId);
glBindBuffer(GL_ARRAY_BUFFER, vBufferId);
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
GLuint iBufferId;
glGenBuffers(1, &iBufferId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, iBufferId);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size(), vertexIndeces.data(), GL_STATIC_DRAW);
glBindVertexArray(0);
//
return Model(vertexArray, vBufferId, iBufferId, vertexIndeces.size());
}
and then when I draw :
void Model::Draw()
{
if (vertexArray)
{
isFinishedIniting = true;
glBindVertexArray(vertexArray);
glDrawElements(GL_TRIANGLES, elementCount, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
}
}
shaders:
#version 120
void main()
{
gl_Position= gl_ModelViewProjectionMatrix*gl_Vertex;
}
#version 120
void main()
{
gl_FragColor=vec4(1.0,0.0,0.0,0.0);
}
the obj file am trying to load is easy I hand made it :
v 0.0 0.0 0.0
v 1.0 1.0 0.0
v -1.0 1.0 0.0
v -1.0 -1.0 0.0
v 1.0 -1.0 0.0
f 1/1/1 2/1/1 3/1/1
f 1/1/1 4/1/1 5/1/1
so it should show two red Triangles ,but it's not drawing anything to the screen!
There are a couple of problems in this code:
The sizes passed to glBufferData() look wrong:
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
...
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size(), vertexIndeces.data(), GL_STATIC_DRAW);
Both vertices and vertexIndeces() are vectors. The .size() method on a vector gives the number of elements, while glBufferData() expects the size in bytes. To fix this, change the code to:
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(vertices[0]),
vertices.data(), GL_STATIC_DRAW);
...
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vertexIndeces.size() * sizeof(vertexIndeces[0]),
vertexIndeces.data(), GL_STATIC_DRAW);
The API calls use a mix of fixed function attributes and generic vertex attributes. Based on the version in the shader code, and the shader code itself (particularly the use of gl_Vertex), you're using OpenGL 2.1 level shaders with fixed function attributes. Therefore, you need to use glEnableClientState() and glVertexPointer() instead of glEnableVertexAttribArray() and glVertexAttribPointer():
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, 0);
You can use generic vertex attributes, but then you need to declare a variable of type attribute in the vertex shader, instead of using gl_Vertex.

OpenGL Vertex Buffer Object does not work

I have a bunny.h which contains the following data:
bunny[] | vertex coordinates xyz
NUM_TRIANGLES | the amount of triangles for the bunny
normals[] | the normals for each triangle
triangles[] | indices for the triangles
I want to use the data for a vertex buffer object.
here is how I try to load the data
GLfloat values[NUM_TRIANGLES*3];
for(int i = 0; i < NUM_TRIANGLES*3; i++)
values[i] = bunny[i];
// init and bind a VBO (vertex buffer object) //
glGenBuffers(1, &bunnyVBO);
glBindBuffer(GL_ARRAY_BUFFER, bunnyVBO);
// copy data into the VBO //
glBufferData(GL_ARRAY_BUFFER, sizeof(values), &values, GL_STATIC_DRAW);
// init and bind a IBO (index buffer object) //
glGenBuffers(1, &bunnyIBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bunnyIBO);
// copy data into the IBO //
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(triangles), &triangles, GL_STATIC_DRAW);
// unbind active buffers //
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
Later in the program I want to render the buffers using this code:
glBindBuffer(GL_ARRAY_BUFFER, bunnyVBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bunnyIBO);
glEnableClientState(GL_VERTEX_ARRAY);
glDrawElements(GL_TRIANGLES, NUM_TRIANGLES, GL_UNSIGNED_INT, triangles);
glDisableClientState(GL_VERTEX_ARRAY);
OpenGL is working fine, but I dont see the bunny... (the data is not corrupted or anything like that, the error is in my code)
Can some please help me?
I don't see any call to glVertexPointer. And if you want to use the elements from the VBO, it should be
glDrawElements(GL_TRIANGLES, NUM_TRIANGLES, GL_UNSIGNED_INT, 0);