So I have recently rewritten the rendering portion of my hobby OpenGL graphics engine and I can't seem to figure out why I am getting an access violation exception being thrown on glDrawElements.
From what I have researched already, it is usually due to a mis-configured VAO but from what I can see, everything seems fine.
Below is an order of OpenGL calls I am making before getting the error. Unfortunately I can't log the values of each parameter but if anyone can notice an error in the order please let me know.
Note that I am running OpenGL 4.1 on a 1080 Nvidia graphics card and the drivers are up to date.
glCreateShaderProgramv(GetShaderType(_desc.ShaderType), 1, &ptr)
glGetProgramiv(_id, GL_INFO_LOG_LENGTH, &logLength)
glGetProgramiv(_id, GL_LINK_STATUS, &linkStatus)
glCreateShaderProgramv(GetShaderType(_desc.ShaderType), 1, &ptr)
glGetProgramiv(_id, GL_INFO_LOG_LENGTH, &logLength)
glGetProgramiv(_id, GL_LINK_STATUS, &linkStatus)
glGenBuffers(1, &_id)
glBindBuffer(target, _id)
glBufferData(target, _desc.ByteCount, 0, GetBufferUsage(_desc.BufferUsage))
glBindBuffer(target, _id)
glMapBufferRange(target, byteOffset, byteCount, access)
glDisable(GL_POLYGON_OFFSET_FILL)
glDisable(GL_POLYGON_OFFSET_POINT)
glDisable(GL_POLYGON_OFFSET_LINE)
glDisable(GL_CULL_FACE)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glDisable(GL_SCISSOR_TEST)
glScissor(0, 0, _desc.RenderWidth, _desc.RenderHeight)
glDisable(GL_MULTISAMPLE)
glDisable(GL_DEPTH_CLAMP)
glDisable(GL_LINE_SMOOTH)
glDisable(GL_STENCIL_TEST)
glStencilOpSeparate(GL_FRONT, GetStencilOp(stencilOperationDesc.FailOp), GetStencilOp(stencilOperationDesc.ZFailOp), GetStencilOp(stencilOperationDesc.PassOp))
glStencilOpSeparate(GL_FRONT, GetStencilOp(stencilOperationDesc.FailOp, true), GetStencilOp(stencilOperationDesc.ZFailOp, true), GetStencilOp(stencilOperationDesc.PassOp, true))
glStencilFuncSeparate(GL_FRONT, GetCompareFunc(comparisonFunc), _stencilRefValue, readMask)
glStencilFuncSeparate(GL_BACK, GetCompareFunc(comparisonFunc), _stencilRefValue, readMask)
glStencilMask(writeMask)
glEnable(GL_DEPTH_TEST)
glDepthMask(GL_FALSE)
glDepthFunc(GetCompareFunc(depthFunc))
glGenBuffers(1, &_id)
glBindBuffer(target, _id)
glBufferData(target, _desc.ByteCount, 0, GetBufferUsage(_desc.BufferUsage))
glBindBuffer(target, _id)
glMapBufferRange(target, byteOffset, byteCount, access)
glGenBuffers(1, &_id)
glBindBuffer(target, _id)
glBufferData(target, _desc.ByteCount, 0, GetBufferUsage(_desc.BufferUsage))
glBindBuffer(target, _id)
glMapBufferRange(target, byteOffset, byteCount, access)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, glIndexBuffer->GetId())
glGenProgramPipelines(1, &_id)
glUseProgramStages(_id, GL_VERTEX_SHADER_BIT, desc.VertexShaderId)
glUseProgramStages(_id, GL_FRAGMENT_SHADER_BIT, desc.PixelShaderId)
glUseProgramStages(_id, GL_GEOMETRY_SHADER_BIT, desc.GeometryShaderId)
glUseProgramStages(_id, GL_TESS_CONTROL_SHADER_BIT, desc.HullShaderId)
glUseProgramStages(_id, GL_TESS_EVALUATION_SHADER_BIT, desc.DomainShaderId)
glBindProgramPipeline(shaderPipeline->GetId())
glGenVertexArrays(1, &vao)
glBindVertexArray(vao)
glBindBuffer(GL_ARRAY_BUFFER, boundBuffers[inputSlot]->GetId())
glVertexAttribPointer(inputSlot, compSize, compType, GL_FALSE, stride, reinterpret_cast<GLvoid*>(offset))
glEnableVertexAttribArray(inputSlot)
glBindVertexArray(0)
glBindVertexArray(vao->GetId())
The problem was that I was passing an invalid size when creating a VBO.
Related
This question already has an answer here:
Direct State access with vertex buffers
(1 answer)
Closed 2 years ago.
The Description
I'm currently learning OpenGL and want to try out the the direct state access extension from OpenGL 4.5. Thus I setup a simple triangle render example (in 3D) which should render my triangle. The code below is executed each frame and draws a triangle in 3D space.
void Geometry::draw(glm::vec3 pos, glm::vec3 pos1, glm::vec3 pos2) {
float vertices[9] = {
pos.x, pos.y, pos.z,
pos1.x, pos1.y, pos1.z,
pos2.x, pos2.y, pos2.z
};
unsigned int m_VaoID;
glGenVertexArrays(1, &m_VaoID);
glBindVertexArray(m_VaoID);
unsigned int m_VboID;
glGenBuffers(1, &m_VboID);
glBindBuffer(GL_ARRAY_BUFFER, m_VboID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 3, 0);
Shader shader = Shader("../../shader.glsl");
shader.compileShader();
shader.useShader();
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDeleteVertexArrays(1, &m_VaoID);
glDeleteBuffers(1, &m_VboID);
}
However, as soon as I change the code to make use of the direct state access extension, it no longer renders a triangle and instead produces error messages.
void Geometry::drawDSA(glm::vec3 pos, glm::vec3 pos1, glm::vec3 pos2) {
float vertices[9] = {
pos.x, pos.y, pos.z,
pos1.x, pos1.y, pos1.z,
pos2.x, pos2.y, pos2.z
};
unsigned int m_VaoID;
glCreateVertexArrays(1, &m_VaoID);
glVertexArrayAttribFormat(m_VaoID, 0, 3, GL_FLOAT, GL_FALSE, 0);
unsigned int m_VboID;
glCreateBuffers(1, &m_VboID);
glNamedBufferData(m_VboID, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexArrayAttribBinding(m_VaoID, 0, m_VboID);
Shader shader = Shader("../../shader.glsl");
shader.compileShader();
shader.useShader();
glBindVertexArray(m_VaoID);
glBindBuffer(GL_ARRAY_BUFFER, m_VboID);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDeleteVertexArrays(1, &m_VaoID);
glDeleteBuffers(1, &m_VboID);
}
Both methods get called in my update loop with the same values (between -1.f and 0.f).
The error output:
Here is the console output of the error I'm getting:
[OpenGL Debug HIGH] GL_INVALID_VALUE in glVertexArrayAttribBinding(bindingindex=16 >= GL_MAX_VERTEX_ATTRIB_BINDINGS)
[OpenGL Debug HIGH] GL_INVALID_VALUE in glVertexArrayAttribBinding(bindingindex=17 >= GL_MAX_VERTEX_ATTRIB_BINDINGS)
[OpenGL Debug HIGH] GL_INVALID_VALUE in glVertexArrayAttribBinding(bindingindex=18 >= GL_MAX_VERTEX_ATTRIB_BINDINGS)
[OpenGL Debug HIGH] GL_INVALID_VALUE in glVertexArrayAttribBinding(bindingindex=19 >= GL_MAX_VERTEX_ATTRIB_BINDINGS)
What I tried so far
I looked through the Khronos Wiki and through the docs.gl documentation, but I could not find out why my code produces the above error instead of rendering my triangle.
The 3d parameter of glVertexArrayAttribBinding is not the vertex buffer object, it is a binding index. It is an arbitrary id which you have to choose.
The following instruction associates a binding index to an attribute index (0) of the specified VAO.
glVertexArrayAttribBinding(m_VaoID, 0, m_VboID);
glVertexArrayAttribBinding(m_VaoID, 0, binding_index);
Furthermore you have to useglVertexArrayVertexBuffer to associate a buffer object to a binding index of a specified VAO:
glVertexArrayVertexBuffer(m_VaoID, binding_index, m_VboID, 0, sizeof(float) * 3);
I am trying to draw a simple triangle and set the buffers as follows;
triangle t;
point3f vertices[] = { t.p1(), t.p2(), t.p3() };
GLushort indices[] = { 0, 1, 2 };
gl_vertex_array vao{ 3 };
vao.bind_vertex_array();
gl_vertex_buffer position_vbo{ buffer_type::array_buf };
position_vbo.bind_vertex_buffer();
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), &vertices[0],
GL_STATIC_DRAW);
position_vbo.unbind_vertex_buffer();
gl_vertex_buffer index_vbo{ buffer_type::element_array_buf };
index_vbo.bind_vertex_buffer();
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), &indices[0],
GL_STATIC_DRAW);
index_vbo.unbind_vertex_buffer();
vao.unbind_vertex_array();
Setting up of buffers and VAOs are fine I think, I checked with glGetError at each stage and everything seems to be working. On my render function, I do the following:
glClearColor(0.4f, 0.3f, 0.6f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
o.vao.bind_vertex_array();
o.sp.use_program();
GLenum error = glGetError();
assert(error == GL_NO_ERROR);
//glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
error = glGetError();
assert(error == GL_NO_ERROR);
o.sp.unuse_program();
o.vao.unbind_vertex_array();
This rendering call with glDrawArrays works just fine but when I try to render with glDrawElements, I get an exception thrown. Moreover, this is hard exception. I can't go to the next line to see the error code. I didn't know that OpenGl calls could throw. I am stuck here. What might be the problem?
Here is a similar discussion
nvoglv32.dll throws the exception
The problem lies in the VAO setup code. The index buffer gets unbound before the VAO is unbound:
index_vbo.unbind_vertex_buffer();
vao.unbind_vertex_array();
Since the VAO always stores the last state of the bound GL_ELEMENT_ARRAY_BUFFER, this is effectively unbinding the index buffer. The exception happens then because you try to read from a not bound index buffer. The solution should be to exchange these two lines and unbind the VAO first:
vao.unbind_vertex_array();
index_vbo.unbind_vertex_buffer();
As Nicol Bolas mentioned in the comments: You can actually leave away the unbinding of the element buffer completely. When the VAO gets unbound, there is no element buffer bound anymore.
I tried to draw a textured square using OpenGL and indexes. At first, I draw a simple white square using VAOs and VBOs. After that, I tried to create an index buffer object to draw the same simple white square but it doesn't draw anything and it throws error core GL_INVALID_ENUM (0x500). This error code is thrown after calling glDrawElements.
Here there are some parts of my code:
Function that creates Index Buffer Object, VAO, and VBO:
void Object::loadObject(const float *lpfVertices, size_t uVerticesSize, const char *lpbElementsList, size_t uNumElements) {
this->uNumElements = uNumElements;
glGenVertexArrays(1, &uVertexArrayID);
glBindVertexArray(uVertexArrayID);
glGenBuffers(1, &uVertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, uVertexBufferID);
glBufferData(GL_ARRAY_BUFFER, uVerticesSize, (void *)lpfVertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, NULL);
glEnableVertexAttribArray(0);
glGenBuffers(1, &uElemetsListID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, uElemetsListID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, uNumElements, lpbElementsList, GL_STATIC_DRAW);
}
Function that render my object:
void Object::renderObject() {
glBindVertexArray(uVertexArrayID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, uElemetsListID);
glDrawElements(GL_TRIANGLES, uNumElements, GL_BYTE, NULL);
}
Part of the main code:
object.loadObject(lpfTriangleVertices, sizeof(lpfTriangleVertices), lpbElementsList, sizeof(lpbElementsList));
uProgID = loadShader("default.vs", "default.fs");
while(!glfwWindowShouldClose(lpstWndID)) {
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(uProgID);
object.renderObject();
glfwSwapBuffers(lpstWndID);
}
glDrawElements(GL_TRIANGLES, uNumElements, GL_BYTE, NULL);
^^^^^^^
GL_BYTE is not a valid argument for type in glDrawElements():
type
Specifies the type of the values in indices. Must be one of GL_UNSIGNED_BYTE, GL_UNSIGNED_SHORT, or GL_UNSIGNED_INT.
I'm trying to put together a very basic OpenGL 3.2 (core profile) application. In the following code, which is supposed to create a VBO containing the vertex positions for a triangle, the call to glVertexAttribPointer fails and raises the OpenGL error GL_INVALID_OPERATION. What does this mean, and how might I go about fixing it?
GLuint vbo, attribLocation = glGetAttribLocation(...);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
GLfloat vertices[] = { 0, 1, 0, 1, 0, 0, -1, 0, 0 };
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(attribLocation);
// At this point, glGetError() returns GL_NO_ERROR.
glVertexAttribPointer(attribLocation, 3, GL_FLOAT, GL_FALSE, 0, 0);
// At this point, glGetError() returns GL_INVALID_OPERATION.
glEnableVertexAttribArray(program.getAttrib("in_Position"));
// A call to getGLError() at this point prints nothing.
glVertexAttribPointer(program.getAttrib("in_Position"), 3, GL_FLOAT, GL_FALSE, 0, 0);
// A call to getGLError() at this point prints "OpenGL error 1282".
First, there's an obvious driver bug here, because glEnableVertexAttribArray should also have issued a GL_INVALID_OPERATION error. Or you made a mistake when you checked it.
Why should both functions error? Because you didn't use a Vertex Array Object. glEnableVertexAttribArray sets state in the current VAO. There is no current VAO, so... error. Same goes for glVertexAttribPointer. It's even in the list of errors for both on those pages.
You don't need a VAO in a compatibility context, but you do in a core context. Which you asked for. So... you need one:
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
Put that somewhere in your setup and your program will work.
As an aside, this:
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
is only necessary if you intend your code to run on MacOS's GL 3.2+ implementation. Unless you have that as a goal, it is unneeded and can be disruptive, as a small number of features are available in a core context that are not part of forward compatibility (wide lines, for example).
I've been trying to convert some of my code to modern OpenGL. I've gotten it to the point where I don't get any OpenGL errors, but nothing shows up when I try to draw an object. Here's my code (minus context creation, and error checking):
//Compile shaders and create/link program
//I very highly doubt the problem's here (all my tests say it worked fine),
//so I'm leaving this out for now, but I'll dig it out of my classes if
//there's no obvious problem with the VBO code.
//Create VAO, VBO
unsigned vaoId, vboId;
int positionAttributeLocation;
float vertices[] = {...vertex data here...};
unsigned indices[] = {...index data here...};
positionAttributeLocation = glGetAttribLocation(programId, "position");
glGenVertexArrays(1, &vaoId);
glGenBuffers(1, &vboId);
glBindVertexArray(vaoId);
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(positionAttributeLocation, 3, GL_FLOAT, GL_FALSE, 0, null);
glEnableVertexAttribArray(positionAttributeLocation);
//Create index buffer
unsigned indexId;
glGenBuffers(1, &indexId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexId);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glUseProgram(programId);
glDrawElements(GL_TRIANGLES, sizeof(indices)/sizeof(unsigned int), GL_UNSIGNED_INT, null);
Not quite SSCCE, but I think that's all the code that could possibly be causing an issue and it's pretty much self-contained.
Try glUseProgram() before your glGetAttribLocation()/glEnableVertexAttribArray() calls.
I figured it out. With some of my refactoring, I forgot to set my width and height variables properly, creating a 0 by 0 viewport. Oops...
Your problem more than likely lies with your cg program and modelview space.
Add cgGLSetStateMatrixParameter(modelViewMatrix, CG_GL_MODELVIEW_PROJECTION_MATRIX, CG_GL_MATRIX_IDENTITY); to your program just before gldrawarrays, and in your cg file add OUT.HPos = mul(ModelViewProj, IN.position);.
Also add modelViewMatrix as a cgparameter in you initcg section.
I worked this out from the basic opengl samples in the cgtoolkit, and my render function is very similar to yours and now works after having the same problem.