OpenGL 3.3 failing to render - c++

I'm writing a program which uses OpenGL for 3D rendering and so far I've been programming on Ubuntu and all was good. However, I had to change to Windows and much to my surprise the code that used to render a cube, a quad or whatever really, because it was working perfectly fine, was rendering absolutely nothing.
To test that the code was indeed the same, I took the copy I had on windows, did a complete rebuild on Ubuntu again and it was working as intended there.
So, in my attempt to see where I could be going wrong I fired up the debugger a few times to see if there weren't happening any strange memory leaks or something like that and I confirmed that the memory was fine.
The OpenGL context is being created fine because the clear color is showing, so I really don't have a clue of what might be going wrong.
The render code is as follows:
void render(Handle<Model> _model)
{
if(!_model->created && _model->loaded)
{
glGenVertexArrays(1, &_model->VAO);
glGenBuffers(1, &_model->VBO);
glGenBuffers(1, &_model->EBO);
glBindVertexArray(_model->VAO);
glBindBuffer(GL_ARRAY_BUFFER, _model->VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * _model->vertices.size(), &_model->vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _model->EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * _model->indices.size(), &_model->indices[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)(3*sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)(6*sizeof(float)));
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
_model->created = true;
}
if(_model->loaded && _model->created)
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
_model->shader.useProgram();
_model->shader.setMat4("transform", _model->transform);
_model->shader.setMat4("view", this->view);
_model->shader.setMat4("projection", this->projection);
glBindVertexArray(_model->VAO);
glDrawElements(GL_TRIANGLES, _model->indices.size(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
if(glGetError() != GL_NO_ERROR)
{
std::cout << "erro" << std::endl;
}
}
}
The Handle is more or less a pointer wrapper and the Model struct is just a bunch of data:
struct Model
{
unsigned int VAO;
unsigned int VBO;
unsigned int EBO;
std::vector<unsigned int> indices;
Shader shader;
glm::mat4 transform;
std::vector<Vertex> vertices;
bool created;
bool loaded;
};
The fact that this code works on Ubuntu and not Windows is driving me insane.

Related

How to properly add OpenGL code into an external function

I am attempting to be able to take some OpenGL code that draws objects from a vertex array and add it to a class file. However, the code only runs currently when I have it in my main.cpp file. I call init() from my main() function, before heading into my draw loop.
init(){
GLuint containerVAO, VBO;
glGenVertexArrays(1, &containerVAO);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(containerVAO);
// Position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
// Normal attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat),(GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
}
The relevant code in my draw loop:
glUseProgram(noTextureShaderID);
glBindVertexArray(containerVAO);
///many different uniforms added here
glDrawArrays(GL_TRIANGLES, 0, 36);
This creates a cube no problem.
Now, when I replace the code inside my init() function (which initialises all objects, not just this one, I change it to this:
init(){
square.init(noTextureShaderID, vertices[], NULL, 36);
//Square is a global variable within my main.cpp file
}
And then I use this function:
void Mesh::init(const GLuint& shaderid, GLfloat vertices[], const char* tex_file, int num_vertices)
{
GLuint VBO;
vao = NULL; //This is a variable within the Mesh class
g_point_count = num_vertices;
glGenVertexArrays(1, &vao);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(vao);
// Position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
// Normal attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
}
Then, in my draw function I call this instead:
glUseProgram(noTextureShaderID);
glBindVertexArray(square.vao);
///many different uniforms added here
glDrawArrays(GL_TRIANGLES, 0, g_point_count);
But even though both programs seem to have the same code, only the first version generates a cube. What am I missing in this regard?
Your code is not identical in both cases, and this has nothing to do with OpenGL:
void Mesh::init(const GLuint& shaderid, GLfloat vertices[], const char* tex_file, int num_vertices)
{
// ...
glBufferData(..., sizeof(vertices), ...);
}
vertices is passed by reference here, the inner function will never see the array, and sizeof(vertices) will be identical to sizeof(GLfloat*), which is typically 4 or 8 on todays machines. Hence, your buffer is just containing the first one or two floats.
You either have to explicitely provide the array size as an additional parameter, or you use some (reference to an) higher-level object like std:vector, which completely manages the array internally and allows you to query the size.

OpenGL VAO always drawing first Vertex from origin

The title sums up my issue, but no matter what I set the first vertex as, OpenGL always draws it at the origin. I've tried this on a school computer and it wasn't a problem but I'm not at school and it's possible something I've changed is causing the issue. Regardless, I see no reason why this should happen. In case syntax seems weird, this code is written in D but should be an almost seamless port from C.
My code is:
class Mesh
{
this(vec3[] vertices, uint[] indices)
{
draw_count = indices.length;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
glGenBuffers(NUM_BUFFERS, vertex_array_buffers.ptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_array_buffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, vertices.length * vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(cast(GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, cast(void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertex_array_buffers[INDEX_VB]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.length * indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindVertexArray(0);
}
void draw()
{
glBindVertexArray(vertex_array_object);
glDrawElements(GL_TRIANGLES, draw_count, GL_UNSIGNED_INT, cast(const(void)*)0);
glBindVertexArray(0);
}
private:
enum
{
POSITION_VB,
INDEX_VB,
NORMAL_VB,
NUM_BUFFERS
};
GLuint vertex_array_object;
GLuint vertex_array_buffers[NUM_BUFFERS];
vec3 normals;
int draw_count;
}

Cannot create more than one VAO

I am attempting to create a simple graphics system using modern (3.3) OpenGL for use in a game. Dynamic objects will have dynamic geometry and VBOs will be updated whenever there is a change. This part was very easy to implement. While everything works well so long as only one VAO is used, further calls to glGenVertexArrays don't create another object (printing the ID for the first and the second both returns 1) and something in the VAO initialization for the new objects renders the first one unable to perform any edits. Every "modern OpenGL" tutorial either only uses one object or has some pretty significant conflicts (most often the use of glVertexAttribPointer). The following code is involved with the graphics system of the objects. (Don't know how important this is, but I am using glfw3 for window and and context creation).
Init Game Object (graphics snippet)
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
glGenBuffers(1, &vertexCoordBufferID);
glBindBuffer(GL_ARRAY_BUFFER, vertexCoordBufferID);
glBufferData(GL_ARRAY_BUFFER, 2000 * 3 * sizeof(float), nullptr, GL_DYNAMIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(0);
allocatedVerticies = 2000;
Update Mesh
//generate mesh
std::vector<float> vertices;
vertices.reserve(voxels.size() * 3 * 12);
//Vertices are added to the vector. This part works. Removed this code for clarity.
vertexCount = vertices.size() / 3;
glBindVertexArray(vertexArrayID);
glBindBuffer(GL_ARRAY_BUFFER, vertexCoordBufferID);
if (allocatedVerticies < vertexCount)
{
//reallocate the buffers
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), vertices.data(), GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
allocatedVerticies = vertexCount;
}
else
{
/////////////////////////////
//reallocate the buffers- originally a reallocation was not going to be used, but glBufferSubData does not work
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), vertices.data(), GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
allocatedVerticies = vertexCount;
////////////////////////////////////////this does not work for some reason
//reset data
//glBufferSubData(GL_ARRAY_BUFFER, 0, vertices.size() * sizeof(float), vertices.data());
}
edited = false;
Render Object
glBindVertexArray(vertexArrayID);
glDrawArrays(GL_TRIANGLES, 0, vertexCount);
If anyone more experienced with OpenGL could point out my mistake, (or some other problem) that would be fantastic.
In code that you provided I see nothing that can cause such a behaviour. Maby you should provide your entire solution that is connected with your rendering for better understanding what is going on in your program?
By the way: if you already defined glVertexAttribPointer() for your buffor you don't have to do it every time you update the data you stored in your vertexCoordBufferID if of course you don't change the data layout and its type. Instead of:
vertexCount = vertices.size() / 3;
glBindVertexArray(vertexArrayID);
glBindBuffer(GL_ARRAY_BUFFER, vertexCoordBufferID);
if (allocatedVerticies < vertexCount)
{
//reallocate the buffers
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), vertices.data(), GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
allocatedVerticies = vertexCount;
}
You can use just:
vertexCount = vertices.size() / 3;
glBindBuffer(GL_ARRAY_BUFFER, vertexCoordBufferID);
if (allocatedVerticies < vertexCount)
{
//reallocate the buffers
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), vertices.data(), GL_DYNAMIC_DRAW);
allocatedVerticies = vertexCount;
}

Weird OpenGL issue when factoring out code

So I have some code that creates a buffer and puts some vertices in it:
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(0);
I also bind it to a shader attribute:
glBindAttribLocation(programID, 0, "pos");
And, finally, draw it:
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glDrawArrays(GL_TRIANGLES, 0, 3);
Of course, there is other code, but all of this stuff runs fine (displays a red triangle on the screen)
However, the instant I try to factor this stuff out in a struct, nothing will display (here is one of the methods):
void loadVerts(GLfloat verts[], int indices)
{
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
glVertexAttribPointer(indice, indices, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(indice);
indice++;
buffers.push_back(vertexbuffer);
}
I've quadruple checked this code, and I've also traced it to make sure it would match the code above whenever its called. My draw call is almost the same as my original:
void draw()
{
glBindBuffer(GL_ARRAY_BUFFER, buffers.at(0));
glDrawArrays(GL_TRIANGLES, 0, 3);
}
I've also tried making this a class, and adding/changing many parts of the code. buffers and indice are just some vars to keep track of buffers and attribute indexes. buffers is an std::vector<GLuint> FWIW.
The main problem is here:
void loadVerts(GLfloat verts[], int indices)
{
...
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
The type of the verts argument is a pointer to GLfloat. Your function signature is equivalent to:
void loadVerts(GLfloat* verts, int indices)
So sizeof(verts), which is used as the second argument to glBufferData(), is 4 on a 32-bit architecture, 8 on a 64-bit architecture.
You will need to pass the size as an additional argument to this function, and use that value as the second argument to glBufferData().
These statements also look somewhat confusing:
glVertexAttribPointer(indice, indices, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(indice);
I can't tell if there's a real problem, but you have two variables with very similar names that are used very differently. indice needs to be the location of the attribute in your vertex shader, while indices needs to be the number of components in the attribute.

glDrawElements not drawing anything to screen

Whenever I try to draw a mesh using glDrawElements, my program gets no errors, but it doesn't draw anything. I'm pretty sure that the issue is with glDrawElements since if I comment glDrawElements out and replace it with glDrawArrays, it works perfectly. So, my question is, can anyone help me figure out why this is happening?
Also glGetError returns no errors
#include "Mesh.h"
Mesh::Mesh(glm::vec2* vertices, glm::vec2* texCoords, GLushort* elements, int size)
{
m_size = size;
m_vertices = vertices;
m_elements = elements;
movementVector = glm::vec2(0.0f, 0.0f);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, size * sizeof(float), vertices, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glGenBuffers(1, &tex_vbo);
glBindBuffer(GL_ARRAY_BUFFER, tex_vbo);
glBufferData(GL_ARRAY_BUFFER, size * sizeof(float), texCoords, GL_STATIC_DRAW);
// note: I assume that vertex positions are location 0
int dimensions = 2; // 2d data for texture coords
glVertexAttribPointer(1, dimensions, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray (1); // don't forget this!
glGenBuffers(1, &ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
}
Mesh::~Mesh()
{
glDeleteVertexArrays(1, &vbo);
}
void Mesh::Draw()
{
glBindVertexArray(vao);
glEnableVertexAttribArray(0);
//glDrawArrays(GL_TRIANGLES, 0, 8);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, NULL);
glDisableVertexAttribArray(0);
}
Here's a link to the code handling drawing objects to the screen, also, I have tested my shaders, they are fully functional and unless drawing via glDrawElements requires changes to the shaders as well, then there is nothing that is causing this with them.