The title sums up my issue, but no matter what I set the first vertex as, OpenGL always draws it at the origin. I've tried this on a school computer and it wasn't a problem but I'm not at school and it's possible something I've changed is causing the issue. Regardless, I see no reason why this should happen. In case syntax seems weird, this code is written in D but should be an almost seamless port from C.
My code is:
class Mesh
{
this(vec3[] vertices, uint[] indices)
{
draw_count = indices.length;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
glGenBuffers(NUM_BUFFERS, vertex_array_buffers.ptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_array_buffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, vertices.length * vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(cast(GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, cast(void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertex_array_buffers[INDEX_VB]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.length * indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindVertexArray(0);
}
void draw()
{
glBindVertexArray(vertex_array_object);
glDrawElements(GL_TRIANGLES, draw_count, GL_UNSIGNED_INT, cast(const(void)*)0);
glBindVertexArray(0);
}
private:
enum
{
POSITION_VB,
INDEX_VB,
NORMAL_VB,
NUM_BUFFERS
};
GLuint vertex_array_object;
GLuint vertex_array_buffers[NUM_BUFFERS];
vec3 normals;
int draw_count;
}
Related
Is there a way to share data between shaders?
Do i just create a VAO and then swich shader programs as needed?
I guess shaders probably need to have the same layout then.
Or do i create VBO-s and per object-shader pair i create the VAO that binds data to shader variables?
This should load most data only once to the OpenGl + i could reuse quite alot.
I tried the second approach, but im not sure if it has reduced memory usage or not. The code pieces in question are below, i hope its enough.
Or is there some other, even better way?
Idea is to reduce the memory usage, and general speedup of the process.
void Mesh::bind()
{
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, verticesCount * sizeof(GLfloat), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, nbo);
glBufferData(GL_ARRAY_BUFFER, normalCount * sizeof(GLfloat), normals, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, uvbo);
glBufferData(GL_ARRAY_BUFFER, uvCount * sizeof(GLfloat), UVs, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesCount * sizeof(GLuint), indices, GL_STATIC_DRAW);
}
void MeshRenderer::init()
{
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, mesh->getVertexBufferId());
GLuint posId = shaderProgram->getAttribute("LVertexPos2D");
glVertexAttribPointer(posId, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(posId);
glBindBuffer(GL_ARRAY_BUFFER, mesh->getNormalBufferId());
GLuint normId = shaderProgram->getAttribute("LVertexNorm");
glVertexAttribPointer(normId, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(normId);
glBindBuffer(GL_ARRAY_BUFFER, mesh->getUVBufferId());
GLuint uvId = shaderProgram->getAttribute("uv");
glVertexAttribPointer(uvId, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(uvId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh->getIndexBufferId());
glBindVertexArray(0);
}
GameObject* GameObject::createCustom(Mesh* mesh)
{
Texture* texture = new Texture();
texture->loadFromFile("UV.png", NULL);
GenericShaderProgram* shader = new GenericShaderProgram("basic.vs", "basic.fs");
shader->loadProgram();
return new GameObject(texture, shader, mesh);
}
GameObject::GameObject(Texture* texture, ShaderProgram* program, Mesh* mesh)
{
this->texture = texture;
dir = new Vector3(1, 0, 0);
meshRenderer = new MeshRenderer(mesh, program);
transform.setRotation(1.57, 1, 0, 0);
transform.translate(Vector3(0, -2, 0));
transform.setScale(Vector3(1, 1, 1));
}
//In main.cpp
Mesh* mesh = new Mesh();
mesh->cubePrimitive();
mesh->init();
object = GameObject::createCustom(mesh);
object2 = GameObject::createCustom(mesh);
object3 = GameObject::createCustom(mesh);
I'm writing a program which uses OpenGL for 3D rendering and so far I've been programming on Ubuntu and all was good. However, I had to change to Windows and much to my surprise the code that used to render a cube, a quad or whatever really, because it was working perfectly fine, was rendering absolutely nothing.
To test that the code was indeed the same, I took the copy I had on windows, did a complete rebuild on Ubuntu again and it was working as intended there.
So, in my attempt to see where I could be going wrong I fired up the debugger a few times to see if there weren't happening any strange memory leaks or something like that and I confirmed that the memory was fine.
The OpenGL context is being created fine because the clear color is showing, so I really don't have a clue of what might be going wrong.
The render code is as follows:
void render(Handle<Model> _model)
{
if(!_model->created && _model->loaded)
{
glGenVertexArrays(1, &_model->VAO);
glGenBuffers(1, &_model->VBO);
glGenBuffers(1, &_model->EBO);
glBindVertexArray(_model->VAO);
glBindBuffer(GL_ARRAY_BUFFER, _model->VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * _model->vertices.size(), &_model->vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _model->EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * _model->indices.size(), &_model->indices[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)(3*sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)(6*sizeof(float)));
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
_model->created = true;
}
if(_model->loaded && _model->created)
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
_model->shader.useProgram();
_model->shader.setMat4("transform", _model->transform);
_model->shader.setMat4("view", this->view);
_model->shader.setMat4("projection", this->projection);
glBindVertexArray(_model->VAO);
glDrawElements(GL_TRIANGLES, _model->indices.size(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
if(glGetError() != GL_NO_ERROR)
{
std::cout << "erro" << std::endl;
}
}
}
The Handle is more or less a pointer wrapper and the Model struct is just a bunch of data:
struct Model
{
unsigned int VAO;
unsigned int VBO;
unsigned int EBO;
std::vector<unsigned int> indices;
Shader shader;
glm::mat4 transform;
std::vector<Vertex> vertices;
bool created;
bool loaded;
};
The fact that this code works on Ubuntu and not Windows is driving me insane.
I'm learning OpenGL and ran into an "obstacle".
I drew some houses (blocks and pyramids) using an indexbuffer.
This works fine when loading all the vertices (from all houses) into the vertexbuffer and using 1 big indexbuffer. Now I want to animate the objects and load them in the vertexbuffer one by one so I can execute transformation on the objects one by one. The code is not much different from one big buffer, but when I do this I just see some random shapes shooting around on the screen. My code is as follows:
I have a world class which holds the list of 3D objects, has one big list of all vertices (for trial purposes), one big list of indices (also trial) and a method to add an Object3D object to the world.
class World
{
public:
World();
~World();
vector<Object3D> objects;
vector<glm::vec3> Vertices;
vector<GLushort> Indices;
void AddObject(Object3D &object);
};
Object3D class:
class Object3D
{
public:
Object3D();
~Object3D();
glm::vec3 Position;
vector<glm::vec3> Vertices;
vector<unsigned int> Indices;
};
World AddObject method, simply adds the object to the "objects" list and adds the vertices and indices to the "Vertices" and "Indices" lists to create one big buffer:
void World::AddObject(Object3D &object) {
int oldVerticesSize = Vertices.size();
objects.push_back(object);
Vertices.insert(Vertices.end(), object.Vertices.begin(), object.Vertices.end());
for each (GLushort index in object.Indices)
{
Indices.push_back(index + oldVerticesSize);
}
}
When I render the big buffer with all the vertices and indices (as shown below) it works fine.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, World.Indices.size(), GL_UNSIGNED_SHORT, 0);
//glDrawArrays(GL_TRIANGLES, 0, World.Vertices.size());
glBindVertexArray(0); //release vao
//Model = glm::rotate(Model, 0.01f, glm::vec3(0.0f, 1.0f, 0.0f));
Mvp = Projection * View * Model;
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
glutSwapBuffers();
//glutPostRedisplay();
}
When I loop through the objects and load the vertices of the objects in the buffer one-object-at-a-time (as shown below) it shows some random "shapes" which keep "shooting around" or rapidly changing. What am I doing wrong here?
Thanks in advance for any advice.
void WorldRenderer::Render()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
int index = 0;
for each (Object3D mesh in World.objects)
{
Mvp = Projection * View * Model;
UpdateBuffer(mesh.Vertices, mesh.Indices);
glBindVertexArray(Vao); //use vao
glDrawElements(GL_TRIANGLES, mesh.Indices.size() , GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0); //release vao
glUniformMatrix4fv(UniformMvp, 1, GL_FALSE, glm::value_ptr(Mvp));
index++;
}
glutSwapBuffers();
}
UpdateBuffers method:
void WorldRenderer::UpdateBuffer(vector<glm::vec3> vertices, vector<unsigned int> indices) {
//fill Vbo
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//fill ibo
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), indices.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(Vao);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
}
For the first Render method (without the loop) the buffers are created once in the init method which looks like this:
void WorldRenderer::Init(int argc, char ** argv) {
InitGlutGlew(argc, argv);
InitMatrices();
glDisable(GL_CULL_FACE);
//-------------- init buffers --------------
// vbo vertices
glGenBuffers(1, &Vbo);
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
glBufferData(GL_ARRAY_BUFFER, World.Vertices.size() * sizeof(glm::vec3),
&World.Vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//ibo
glGenBuffers(1, &Ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, World.Indices.size() * sizeof(unsigned int),
&World.Indices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// VAO setup
glGenVertexArrays(1, &Vao);
glBindVertexArray(Vao);
// Bind vertices to vao
glBindBuffer(GL_ARRAY_BUFFER, Vbo);
//Bind elements
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, Ibo);
//------ init shaders -----------
InitShader();
}
As soon as I see your UpdateBuffer function, I feel that the glBufferData for vbo is not loaded properly.
The second parameter vertices.size() returns just number of elements in vector but not the actual size of object.
So the second parameter should be vertices.size() * sizeof(glm::vec3)
And the third parameter is ok as it returns pointer to the underlying array. If not working ....directly pass the address of first element as shown below.
Overall it should be something like as shown below.
glBufferData(
GL_ARRAY_BUFFER,
vertices.size() * sizeof(glm::vec3),
&vertices[0],
GL_STATIC_DRAW
);
Check if it works.
Why you are seeing differences?
First render:
Your buffer contain all the world's data for vertices continuosly, when the glDrawElements is called.
So here the mesh1 last vertex is continued with mesh 2 fist vertex.... So you see a kind of closed shape.
Second render:
your buffer contains only one mesh data at a time, when the glDrawElements is called.
so your shape ends for each mesh after calling glDrawElements.
To obtain the same result as first render, you have to first update a single vertex buffer for all meshes ( use glBufferSubData).
Then call glDrawElements once. Then you will see same result.
So I have some code that creates a buffer and puts some vertices in it:
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(0);
I also bind it to a shader attribute:
glBindAttribLocation(programID, 0, "pos");
And, finally, draw it:
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glDrawArrays(GL_TRIANGLES, 0, 3);
Of course, there is other code, but all of this stuff runs fine (displays a red triangle on the screen)
However, the instant I try to factor this stuff out in a struct, nothing will display (here is one of the methods):
void loadVerts(GLfloat verts[], int indices)
{
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
glVertexAttribPointer(indice, indices, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(indice);
indice++;
buffers.push_back(vertexbuffer);
}
I've quadruple checked this code, and I've also traced it to make sure it would match the code above whenever its called. My draw call is almost the same as my original:
void draw()
{
glBindBuffer(GL_ARRAY_BUFFER, buffers.at(0));
glDrawArrays(GL_TRIANGLES, 0, 3);
}
I've also tried making this a class, and adding/changing many parts of the code. buffers and indice are just some vars to keep track of buffers and attribute indexes. buffers is an std::vector<GLuint> FWIW.
The main problem is here:
void loadVerts(GLfloat verts[], int indices)
{
...
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
The type of the verts argument is a pointer to GLfloat. Your function signature is equivalent to:
void loadVerts(GLfloat* verts, int indices)
So sizeof(verts), which is used as the second argument to glBufferData(), is 4 on a 32-bit architecture, 8 on a 64-bit architecture.
You will need to pass the size as an additional argument to this function, and use that value as the second argument to glBufferData().
These statements also look somewhat confusing:
glVertexAttribPointer(indice, indices, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(indice);
I can't tell if there's a real problem, but you have two variables with very similar names that are used very differently. indice needs to be the location of the attribute in your vertex shader, while indices needs to be the number of components in the attribute.
Whenever I try to draw a mesh using glDrawElements, my program gets no errors, but it doesn't draw anything. I'm pretty sure that the issue is with glDrawElements since if I comment glDrawElements out and replace it with glDrawArrays, it works perfectly. So, my question is, can anyone help me figure out why this is happening?
Also glGetError returns no errors
#include "Mesh.h"
Mesh::Mesh(glm::vec2* vertices, glm::vec2* texCoords, GLushort* elements, int size)
{
m_size = size;
m_vertices = vertices;
m_elements = elements;
movementVector = glm::vec2(0.0f, 0.0f);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, size * sizeof(float), vertices, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glGenBuffers(1, &tex_vbo);
glBindBuffer(GL_ARRAY_BUFFER, tex_vbo);
glBufferData(GL_ARRAY_BUFFER, size * sizeof(float), texCoords, GL_STATIC_DRAW);
// note: I assume that vertex positions are location 0
int dimensions = 2; // 2d data for texture coords
glVertexAttribPointer(1, dimensions, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray (1); // don't forget this!
glGenBuffers(1, &ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
}
Mesh::~Mesh()
{
glDeleteVertexArrays(1, &vbo);
}
void Mesh::Draw()
{
glBindVertexArray(vao);
glEnableVertexAttribArray(0);
//glDrawArrays(GL_TRIANGLES, 0, 8);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, NULL);
glDisableVertexAttribArray(0);
}
Here's a link to the code handling drawing objects to the screen, also, I have tested my shaders, they are fully functional and unless drawing via glDrawElements requires changes to the shaders as well, then there is nothing that is causing this with them.