access violation when i use my index buffer to draw objects [duplicate] - c++

This question already has answers here:
OpenGL object in C++ RAII class no longer works
(2 answers)
Closed 1 year ago.
I am learning OpenGL and I am trying to abstract it to make it convenient for me to use it.
but I am getting access violations when I use my IndexBuffer class while rendering.
this is my code for IndexBuffer.h
class IndexBuffer
{
public:
IndexBuffer(void* data, int count);
IndexBuffer(int count);
IndexBuffer Bind();
IndexBuffer UnBind();
void AddData(void* data);
~IndexBuffer();
private:
int count;
size_t size;
unsigned int id;
};
and this is my code for IndexBuffer.cpp
#include "IndexBuffer.h"
#include "glew/glew.h"
IndexBuffer::IndexBuffer( void* data, int count): id(0), count(count), size(sizeof(unsigned int)* count)
{
glGenBuffers(1, &id);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * count, data, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
IndexBuffer::IndexBuffer( int count) : id(0), count(count), size(sizeof(unsigned int)* count)
{
glGenBuffers(1, &id);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * count, nullptr, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
IndexBuffer IndexBuffer::Bind()
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
return *this;
}
IndexBuffer IndexBuffer::UnBind()
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
return *this;
}
void IndexBuffer::AddData(void* data)
{
Bind();
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, sizeof(unsigned int) * count, data);
UnBind();
}
IndexBuffer::~IndexBuffer()
{
glDeleteBuffers(1, &id);
}
I wrote my VertexBuffer in the same way and it works fine, But my index buffer just doesn't work.
this is my main.cpp
#include <iostream>
#include "glew/glew.h"
#include "glfw/glfw3.h"
#include "VertexBuffer.h"
#include "IndexBuffer.h"
struct Vertex
{
float aPos[2];
};
int main() {
GLFWwindow* window;
if (glfwInit() == GLFW_FALSE)
{
return -1;
}
//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_COMPAT_PROFILE);
window = glfwCreateWindow(600, 600, "Hello world", nullptr, nullptr);
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
{
return -2;
}
unsigned int index[3] = {
0, 1, 2
};
Vertex data[] = {
Vertex({-0.5f, -0.5f}),
Vertex({ 0.5f, -0.5f}),
Vertex({ 0.0f, 0.5f})
};
unsigned int VertexArrayObject;
glGenVertexArrays(1, &VertexArrayObject);
VertexBuffer buffer = VertexBuffer(sizeof(Vertex) * 3);
IndexBuffer Ibuffer = IndexBuffer( 3);
buffer.Bind();
//glBufferData(GL_ARRAY_BUFFER, sizeof(float) * 6, data, GL_STATIC_DRAW);
buffer.AddData(data);
glBindVertexArray(VertexArrayObject);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const void*)offsetof(Vertex, aPos));
//glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
buffer.UnBind();
Ibuffer.Bind();
Ibuffer.AddData(index);
Ibuffer.UnBind();
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
buffer.Bind();
Ibuffer.Bind();
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, nullptr);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VertexArrayObject);
glfwDestroyWindow(window);
return 0;
}
can anybody help me out?

In short you have undefined behaviour.
Your classes doesn't support deep copy.
When Bind function returns object (i.e itself) by value, like:
IndexBuffer IndexBuffer::Bind()
destructor of IndexBuffer is called, which deletes previously allocated buffer, so buffer's id is dangled.
All Bind should return reference to instances:
IndexBuffer& IndexBuffer::Bind()

Related

gl_deletebuffers not working in a separate thread

In my rendering application if i run the render function in the main loop than everything works fine but if i take the rendering function to another thread than the destructor of the objects is not able to release the buffer.
when any object gets destroyed
The destructor for the objects are called but it seems as if gl_deletebuffers are not able to release the buffer.
How i came to this conclusion
1) when i run everthing in the main loop and if i create a object and the VAO number for the object is 1
2) after destroying the object the next object VAO is also assigned number 1.
///////////////////////////////////////////////////////////////////////////////////////////////////////////
1) But when Rendering part goes to a seprate thread than VAO number keeps on incrementing with every object
2) System Ram memory also keeps increasing and when i close the application than only the memory is released.
3) Destructor for objects is definitely called when i delete a object but it seems as if destructor has not been able to release the buffer.
//#define GLEW_STATIC
#include <gl\glew.h>
#include <glfw3.h>
#include "TreeModel.h"
#include "ui_WavefrontRenderer.h"
#include <QtWidgets/QApplication>
#include <QMessageBox>
#include <thread>
#define FPS_WANTED 60
const double limitFPS = 1.0 / 50.0;
Container cont;
const unsigned int SCR_WIDTH = 800;
const unsigned int SCR_HEIGHT = 600;
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void processInput(GLFWwindow *window);
GLFWwindow* window = nullptr;
void RenderThread(WavefrontRenderer* w)
{
glfwMakeContextCurrent(window);
GLenum GlewInitResult;
glewExperimental = GL_TRUE;
GlewInitResult = glewInit();
if (GLEW_OK != GlewInitResult) // Check if glew is initialized properly
{
QMessageBox msgBox;
msgBox.setText("Not able to Initialize Glew");
msgBox.exec();
glfwTerminate();
}
if (window == NULL)
{
QMessageBox msgBox;
msgBox.setText("Not able to create GL Window");
msgBox.exec();
glfwTerminate();
//return -1;
}
w->InitData();
glEnable(GL_MULTISAMPLE);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBlendEquation(GL_FUNC_ADD);
while (!glfwWindowShouldClose(window))
{
// input
// -----
processInput(window);
// - Measure time
glClearColor(0.3, 0.3, 0.3, 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
w->render(); // DO the Rendering
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
std::terminate();
}
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
cont.SetName("RootItem");
TreeModel* model = new TreeModel("RootElement", &cont);
WavefrontRenderer w(model);
w.show();
glfwInit();
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
glfwWindowHint(GLFW_SAMPLES, 4);
window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "Renderer", nullptr, nullptr); // Create the render
window
glfwMakeContextCurrent(0);
std::thread renderThread(RenderThread, &w);
renderThread.detach();
return a.exec();
return 0;
}
Class defination for a Object
the render function w->render() calls the draw() function of a object.
The Base class has a virtual destructor.
#include "Triangle.h"
#include "qdebug.h"
#include "qmessagebox.h"
float verticesTriangle[] = {
-50.0f, -50.0f, 0.0f, 0.0f , 0.0f,1.0f ,0.0f, 0.0f,
50.0f, -50.0f, 0.0f, 0.0f , 0.0f,1.0f ,1.0f, 0.0f,
0.0f, 50.0f, 0.0f, 0.0f, 0.0f,1.0f ,0.5f, 1.0f
};
Triangle::Triangle() : Geometry("TRIANGLE", true)
{
this->isInited = 0;
this->m_VBO = 0;
this->m_VAO = 0;
this->iNumsToDraw = 0;
this->isChanged = true;
}
Triangle::Triangle(const Triangle& triangle) : Geometry( triangle )
{
CleanUp();
this->isInited = 0;
this->m_VBO = 0;
this->m_VAO = 0;
this->iNumsToDraw = triangle.iNumsToDraw;
this->isChanged = true;
this->shader = ResourceManager::GetShader("BasicShader");
iEntries = 3;
}
Triangle& Triangle::operator=(const Triangle& triangle)
{
CleanUp();
Geometry::operator=(triangle);
this->isInited = 0;
this->m_VBO = 0;
this->m_VAO = 0;
this->iNumsToDraw = triangle.iNumsToDraw;
this->isChanged = true;
this->shader = ResourceManager::GetShader("BasicShader");
return (*this);
}
void Triangle::init()
{
glGenVertexArrays(1, &m_VAO);
glGenBuffers(1, &m_VBO);
glBindVertexArray(m_VAO);
glBindBuffer(GL_ARRAY_BUFFER, m_VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticesTriangle), verticesTriangle, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
isInited = true;
}
void Triangle::CleanUp()
{
if (!this->isInited)
{
return;
}
if (this->m_VAO)
glDeleteVertexArrays(1, &this->m_VAO);
if (this->m_VBO)
glDeleteBuffers(1, &this->m_VBO);
this->isInited = false;
}
void Triangle::draw()
{
if (isChanged)
{
init();
isChanged = false;
}
this->shader.Use();
glBindVertexArray(m_VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
}
Triangle::~Triangle()
{
if (this->m_VAO)
glDeleteVertexArrays(1, &this->m_VAO);
if (this->m_VBO)
glDeleteBuffers(1, &this->m_VBO);
this->isInited = false;
}
OpenGL contexts are thread local state:
Every thread has exactly one or no OpenGL contexts active in it at any given time.
Each OpenGL context must be active in no or exactly one thread at any given time.
OpenGL contexts are not automatically migrated between thread.
I.e. if you don't explicitly unmake current the OpenGL context in question on the threads it's currently active, and subsequently make it active on the thread you're calling glDeleteBuffers on, the call on that will have no effect; on the context you expected it to have an effect on, at least.

Custom OpenGL buffer class doesn't display anything

I wanted to create a class to handle OpenGL buffers like vertex buffers objects or colorbuffers.
Here is the buffer.h:
#pragma once
#include <GL/glew.h>
class glBuffer
{
public:
glBuffer(GLenum target);
void setdata(const void *data, GLenum mode);
void bind(GLuint index, GLint valuePerVertex, GLenum variableType = GL_FLOAT, GLsizei stride = 0, int offset = 0);
void unbind();
GLuint getBufferID() const;
~glBuffer();
private:
bool m_active;
GLuint m_buffer;
GLuint m_index;
GLenum m_target;
};
And buffer.cpp:
#include "buffer.h"
#include <GL/glew.h>
#include <iostream>
glBuffer::glBuffer(GLenum target)
{
m_target = target;
m_active = false;
glGenBuffers(1, &m_buffer);
}
void glBuffer::setdata(const void *data, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, sizeof(data), data, mode);
glBindBuffer(m_target, 0);
}
void glBuffer::bind(GLuint index, GLint valuePerVertex, GLenum variableType, GLsizei stride, int offset)
{
m_active = true;
m_index = index;
glEnableVertexAttribArray(m_index);
glBindBuffer(m_target, m_buffer);
glVertexAttribPointer(
m_index,
valuePerVertex,
variableType,
GL_FALSE, //normalized?
stride,
(void*)offset //buffer offset
);
}
void glBuffer::unbind()
{
m_active = false;
glBindBuffer(m_target, 0);
glDisableVertexAttribArray(m_index);
}
GLuint glBuffer::getBufferID() const
{
return m_buffer;
}
glBuffer::~glBuffer()
{
if (!m_active){
unbind();
}
glDeleteBuffers(1, &m_buffer);
}
Here is how I use it in my application, where I #include "buffer.h" :
glBuffer vbo(GL_ARRAY_BUFFER);
vbo.setdata(color_buffer_data, GL_STATIC_DRAW);
vbo.bind(0, 3);
Replaces :
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
When I compile and run it, I get a black window without anything drawn.
What is happening?
PS: I'm using vs, glfw3, glew.
Here is your working code for setting the buffer data
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
I am assuming that your vertex_buffer_data is an array which is why this works. Since you cast this to a void* you cannot simple call sizeof on the pointer. What you need is the size of the entire array in bytes.
Here is your function in your class that does not work
void glBuffer::setdata(const void *data, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, sizeof(data), data, mode);
glBindBuffer(m_target, 0);
}
This is because the sizeof(data) is not the same as in first case. It is either 4 (32-bit) or 8 (64-bit) as pointed out by #genpfault. The simple solution would be to change your function as below.
void glBuffer::setdata(const void *data, int numElements, size_t elementSize, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, numElements * elementSize, data, mode);
glBindBuffer(m_target, 0);
}
in this function 'numElements' is the number of elements in the array that your void* data points to and 'elementSize' is the size of each element.
Here is an example code for the above function
float vertex_buffer_data[9] = {0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f};
glBuffer vbo(GL_ARRAY_BUFFER);
vbo.setdata(vertex_buffer_data, 9, sizeof(float), GL_STATIC_DRAW);
vbo.bind(0, 3);
And it should work. If you are still confused, here is a small example program to demonstrate why your code did not work.
#include "stdafx.h"
int _tmain(int argc, _TCHAR* argv[])
{
int a[5] = {1, 2, 3, 4, 5};
void* ptr = a;
printf( " sizeof(a) = %d \n", sizeof(a));
printf( " sizeof(a[0]) = %d \n", sizeof(a[0]));
printf( " sizeof(ptr) = %d \n", sizeof(ptr));
getchar();
return 0;
}
Output :
sizeof(a) = 20
sizeof(a[0]) = 4
sizeof(ptr) = 4
Note: this was compiled on windows 32-bit in visual studio, hence the pointer size is 4 bytes. It would be 8 if I compiled it in 64-bit.

Problems with initiating different models in openGl with c++

I'm learning openGL and I'm trying to do something very simple. I have a graphics object called Model which contains a vector of GLfloats for vertices and has a draw function. In addition to this, I have an addVertex function which takes in 3 floats and a draw function which binds the object's VAO and then unbinds it after drawing. I have no problem rendering one of these objects and adding points to it, however I can't draw more than one of these at the same time without the program crashing and visual studio's telling me "Frame not in module" which isn't helpful. Also, only the last Model object I create can be rendered, any object that comes before it will crash the program.
Here's the Model object's code:
#include "Model.h"
#include <iostream>
#include <string>
Model::Model() {
drawMode = 0;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, NULL, NULL, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, NULL, NULL, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
}
Model::~Model() {
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
}
void Model::addVertex(GLfloat x, GLfloat y, GLfloat z) {
vertices.push_back(x);
vertices.push_back(y);
vertices.push_back(z);
update();
}
void Model::addIndex(int i, int j, int k) {
indices.push_back(i);
indices.push_back(j);
indices.push_back(k);
}
void Model::setShader(GLuint& shader) {
shaderProgram = &shader;
}
void Model::clearModel() {
vertices.clear();
indices.clear();
}
int Model::sizeOfVertices() {
return sizeof(GLfloat)*vertices.size();
}
int Model::sizeOfIndices() {
return sizeof(GLuint)*(indices.size());
}
void Model::draw() {
glUseProgram(*shaderProgram);
GLuint transformLoc = glGetUniformLocation(*shaderProgram, "model");
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, glm::value_ptr(model));
glBindVertexArray(VAO);
switch (drawMode) {
case 0: glDrawArrays(GL_POINTS, 0, vertices.size() / 3);
break;
case 1: glDrawArrays(GL_LINE_STRIP, 0, vertices.size() / 3);
break;
case 2: glDrawArrays(GL_TRIANGLES, 0, vertices.size() / 3);
break;
case 3: glDrawArrays(GL_TRIANGLE_STRIP, 0, vertices.size() / 3);
break;
default: break;
}
glBindVertexArray(0);
}
void Model::setDrawMode(int type) {
drawMode = type;
}
void Model::move(glm::vec3 translation) {
model = glm::translate(model, translation);
}
void Model::rotate(float degrees,glm::vec3 axis) {
model = glm::rotate(model, degrees,axis);
}
void Model::update() {
glBindVertexArray(0);
glBindVertexArray(VAO);
glBufferData(GL_ARRAY_BUFFER, NULL, NULL, GL_STATIC_DRAW);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, NULL, NULL, GL_STATIC_DRAW);
glBufferData(GL_ARRAY_BUFFER, sizeOfVertices(), &vertices[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
}
There is an EBO included in the Model class but I'm not using it yet as I want to isolate the problem.
Model.h:
#pragma once
#include <vector>;
#include <glew.h>;
#include "glm.hpp"
#include "gtc/matrix_transform.hpp"
#include "gtc/type_ptr.hpp"
#include "gtc/constants.hpp"
class Model {
public:
int drawMode;
GLuint VAO;
GLuint VBO;
GLuint EBO;
GLuint *shaderProgram;
std::vector<GLfloat> vertices;
std::vector<GLuint> indices;
glm::mat4 model;
Model();
~Model();
void addVertex(GLfloat, GLfloat, GLfloat);
void addIndex(int, int, int);
void setShader(GLuint&);
void clearModel();
int sizeOfVertices();
int sizeOfIndices();
void draw();
void setDrawMode(int);
void move(glm::vec3);
void rotate(float, glm::vec3);
void update();
};
Main class:
#include <iostream>
#include <string>
#include <fstream>
#include <vector>
#include "Callbacks.h"
#include "Shader.h"
#include "GlState.h"
#include "Model.h"
#include <glew.h>
#include <glfw3.h>
#include "glm.hpp"
#include "gtc/matrix_transform.hpp"
#include "gtc/type_ptr.hpp"
#include "gtc/constants.hpp"
Model *model;
Model *model2;
Model *model3;
void mainLoop(GLFWwindow* window) {
glfwPollEvents();
//RENDER UNDER HERE
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPointSize(5);
Callbacks::update(model3);
model3->draw();
model2->draw();
model->draw();
glfwSwapBuffers(window);
}
int main() {
GLFWwindow* window = GlState::Initialize(800,600);
GLuint shaderProgram = Shader::makeProgram("vertex.vs", "fragment1.fs");
GLuint shaderProgram2 = Shader::makeProgram("vertex.vs", "fragment2.fs");
model = new Model();
model2 = new Model();
model3 = new Model();
model->setShader(shaderProgram);
model2->setShader(shaderProgram);
model3->setShader(shaderProgram2);
while (!glfwWindowShouldClose(window))
{
mainLoop(window);
}
delete(model);
delete(model2);
delete(model3);
glfwTerminate();
return 0;
}
I'll also include the important part of Callbacks, which is called by update(Model* m)
void Callbacks::checkMouse(Model* m) {
if (leftMousePressed) {
if (tics == 0) {
m->addVertex(2*(mouseXCurrent-0.5), 2*(0.5-mouseYCurrent), 0.0f);
}
tics++;
}
}
I haven't set the camera yet so its in default orthographic view.
Since I can only add points to the last Model object initiated without crashing (model3 in this case), I assume it must be something to do with the way these models are being initiated but I can't figure out what! It's driving me nuts.
So an example of it crashing:
after the code segment in main:
model->setShader(shaderProgram);
model2->setShader(shaderProgram);
model3->setShader(shaderProgram2);
if you input
model->addVertex(0.1f,0.2f,0.0f);
or
model2->addVertex(0.1f,0.2f,0.0f);
the program will crash, however
model3->addVertex(0.1f,0.2f,0.0f);
works with no problems
The code will crash because you are accessing data outside of the buffers. In the constructor, you create buffers with size NULL:
glBufferData(GL_ARRAY_BUFFER, NULL, NULL, GL_STATIC_DRAW)
Note that the pointer type NULL is totally invalide for the size argument of that call, but that is not the issue here.
When you call the addVertex method, you append the data to your std::vector, but not to the VBO. Yo never call the update method which actually transfers the data into your buffer, so your attribute pointers still point to the 0-sized buffer, and when drawing more than 0 vertices from that, you're just in undefined-behavior land.
So I found what the problem is. Within the update method, I would bind the VAO and the problem was that I assumed this also bound the VBO for editing which apparently never happened. So when I called
glBufferData(GL_ARRAY_BUFFER, sizeOfVertices(), &vertices[0], GL_STATIC_DRAW);
I thought I was modifying the VBO within the VAO, but I was actually modifying the last VBO bound. The last time the VBO was bound was in the constructor, so this is why I was only able to modify the last Model object created, and the other ones would crash because I would bind their VAO and attempt to modify another Model's VBO.
At least I assume this was the problem as there is no error now because I'm calling
glBindBuffer(GL_ARRAY_BUFFER, VBO);
before I modify it.

glDrawArrays does not draw anything in GL_TRIANGLES mode

My problem is the glDrawArray command stops working. I've written a few programs which I used that command. However now, I don't have any idea why it doesn't work.
But I still can draw lines to the screen with GL_LINES instead of GL_TRIANGLES.
Here is my code:
#include "Mesh.h"
#include <iostream>
Mesh::Mesh()
{
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
size = 0;
}
void Mesh::AddVertices(const Vertex vertices[], int length)
{
size = length;
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
}
void Mesh::Draw()
{
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, Vertex::SIZE * 4, 0);
glDrawArrays(GL_TRIANGLES, 0, size);
glDisableVertexAttribArray(0);
}
Draw is always called by Game class. And my Vertex class:
#include "Vertex.h"
Vertex::Vertex(glm::vec3 pos)
{
this->pos = pos;
}
Vertex::Vertex(float x, float y, float z)
{
pos = glm::vec3(x, y, z);
}
glm::vec3 Vertex::GetPos() const
{
return pos;
}
void Vertex::SetPos(glm::vec3 pos)
{
this->pos = pos;
}
sizeof(vertices) is sizeof(void*), because in C or C++ arrays in function arguments decays to pointers. You should use length provided in second argument, if it is size in bytes.
I've found the solution. When I initialize my OpenGL functions, I have accidentally written glFrontFace(GL_CW) instead of glFrontFace(GL_CCW).

glDrawElements throw GL_INVALID_VALUE error

I am trying to draw part of my tile image but I am getting GL_INVALID_VALUE error when I call glDrawElements function. There is no problem when I change this function with glDrawArrays. The problem is that the indices count parameter is not negative number.
There is a code:
#define BUFFER_OFFSET(i) ((char *)nullptr + (i))
#define VERTEX_ATTR_PTR(loc, count, member, type) \
glEnableVertexAttribArray(loc); \
glVertexAttribPointer(loc, count, GL_FLOAT, GL_FALSE, sizeof(type), BUFFER_OFFSET(offsetof(struct type, member)))
// ---------- TextRenderer
void TextRenderer::setText(const string& text) {
vector<Vertex2f> vertex_buffer;
vector<GLuint> index_buffer;
GLfloat cursor = 0.f;
FPoint2D cell_size = font->getCellSize();
for (char c : text) {
TILE_ITER iter = font->getCharacter(c);
{
// UV
for (GLuint i = 0; i < 4; ++i) {
TILE_ITER _v = iter + i;
vertex_buffer.push_back( {
{
_v->pos[0] + cursor,
_v->pos[1],
_v->pos[2]
},
{ _v->uv[0], _v->uv[1] }
});
}
// Index
for (GLuint i = 0; i < 6; ++i)
index_buffer.push_back(
Tile::indices[i] + vertex_buffer.size() - 4);
}
cursor += cell_size.X;
}
vertices_count = vertex_buffer.size();
indices_count = index_buffer.size();
glBindVertexArray(vao);
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indices);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER,
0,
indices_count * sizeof(GLuint),
&index_buffer[0]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferSubData(GL_ARRAY_BUFFER,
0,
vertices_count * sizeof(Vertex2f),
&vertex_buffer[0]);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
glBindVertexArray(0);
}
void TextRenderer::create() {
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
{
indices = genGLBuffer( {
nullptr,
BUFFER_SIZE / 2 * sizeof(GLuint),
GL_ELEMENT_ARRAY_BUFFER
}, true, GL_DYNAMIC_DRAW);
vbo = genGLBuffer( {
nullptr,
BUFFER_SIZE * sizeof(Vertex2f),
GL_ARRAY_BUFFER
}, true, GL_DYNAMIC_DRAW);
VERTEX_ATTR_PTR(0, 3, pos, Vertex2f); // Vertex
VERTEX_ATTR_PTR(1, 2, uv, Vertex2f); // UV
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(0);
setText("DUPA");
}
void TextRenderer::draw(MatrixStack& matrix, GLint) {
static Shader shader(
getFileContents("shaders/text_frag.glsl"),
getFileContents("shaders/text_vert.glsl"),
"");
shader.begin();
shader.setUniform(GL_TEXTURE_2D, "texture", 0,
font->getHandle());
shader.setUniform("matrix.mvp", matrix.vp_matrix * matrix.model);
shader.setUniform("col", col);
{
glBindVertexArray(vao);
//glDrawArrays(GL_LINE_STRIP, 0, vertices_count);
glDrawElements(GL_LINES, indices_count, GL_UNSIGNED_INT,
nullptr);
glBindVertexArray(0);
showGLErrors();
}
shader.end();
}
The problem is with the following (shortened) call sequence in your setText() method:
glBindVertexArray(vao);
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indices);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, ...);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
...
}
glBindVertexArray(0);
The binding of the GL_ELEMENT_ARRAY_BUFFER is part of the VAO state. So by making this call while the VAO is bound:
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
you're setting the VAO state to have an element array buffer binding of 0. So when you later bind the VAO in your draw() method, you won't have a binding for GL_ELEMENT_ARRAY_BUFFER.
To avoid this, the simplest solution is to just remove that call. If you want to explicitly unbind it because you're worried that having it bound might have undesired side effects on other code, you need to move it after unbinding the VAO:
glBindVertexArray(vao);
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indices);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, ...);
...
}
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
Without seeing the whole code and knowing exact GL version, I will attempt to give you a correct answer.
First, if you're using ES2 then using index type as GL_UNSIGNED_INT is not supported by default, however I don't think that's your problem.
Your actual issue is that element arrays are actually not stored in your VAO object, only vertex data configuration. Therefore glDrawElements will give you this error as it will think no element array is bound and you passed NULL as indices argument to the function.
To solve this, bind the appropriate element array before you call glDrawElements