So, I am trying to make a basic "Drawable" class that handles a lot of the drawing for me in the background and I want to use modern OpenGL (no begin and end statements). I keep just getting a blank screen when I run draw().
I have run the debugger and checked, my array is initialized properly with 3xFLOAT for position and 4xFLOAT for color. Any idea what is going wrong? I am very new to this library. My example tries to draw a red cube at (+-0.5, +-0.5, 0.0), so the indexData array is just { 0, 1, 2, 3 }.
#define DRAWABLE_VERTEX_DEPTH 3
#define SIZE_OF_VERTEX_ELEMENT sizeof(GLfloat)
#define VERTEX_SIZE (DRAWABLE_VERTEX_DEPTH * SIZE_OF_VERTEX_ELEMENT)
#define DRAWABLE_COLOR_DEPTH 4
#define SIZE_OF_COLOR_ELEMENT sizeof(GLfloat)
#define COLOR_SIZE (DRAWABLE_COLOR_DEPTH * SIZE_OF_COLOR_ELEMENT)
#define INDEX_SIZE sizeof(GLushort)
#define DRAWABLE_STRIDE (VERTEX_SIZE + COLOR_SIZE)
inline Drawable(/*Arguments omitted for brevity...*/)
{
//Standard initialization omitted....
glGenBuffers(1, &vboID);
glGenBuffers(1, &vioID);
glGenVertexArrays(1, &vaoID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vioID);
glBufferData(GL_ARRAY_BUFFER, (VERTEX_SIZE + COLOR_SIZE) * vertexCount, vertexData, drawType);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, INDEX_SIZE * indexCount, indexData, drawType);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Generate Vertex Array
glBindVertexArray(vaoID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, DRAWABLE_VERTEX_DEPTH, GL_FLOAT, GL_FALSE, DRAWABLE_STRIDE, 0);
glVertexAttribPointer(1, DRAWABLE_COLOR_DEPTH, GL_FLOAT, GL_FALSE, DRAWABLE_STRIDE, (GLbyte*)VERTEX_SIZE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vioID);
glBindVertexArray(0);
}
inline void draw()
{
glBindVertexArray(vaoID);
glDrawElements(drawMode, indexCount, GL_UNSIGNED_SHORT, NULL);
glBindVertexArray(0);
}
GLSL Vertex Shader:
#version 430\r\n
in layout(location=0) vec3 inPosition;
in layout(location=1) vec4 inColor;
out vec4 outVertexColor;
void main()
{
gl_Position = vec4(inPosition, 1.0);
outVertexColor = inColor;
}
GLSL Fragment Shader:
#version 430\r\n
in vec4 outVertexColor;
out vec4 outFragmentcolor;
void main()
{
outFragmentcolor = outVertexColor;
}
Apart from the issues mentioned in the comments, your index array is GLushort (unsigned 16 bit), while your draw call specifies GL_UNSIGNED_INT (unsigned 32 bit). Replace with GL_UNSIGNED_SHORT.
Related
I was trying to implement normal mapping on a simple cube but since i had troubles with my normals i wanted to try to display them using a geometry shader. Following learnopengl tutorial, it basically calls mesh.render() twice, the first time to draw the model and the second time to display normals. When i try to do the same i get this
The cube seems well drawn but it has a weird white rectangle in front of it and i don't know why. I don't know if it's a problem of drawings or if it's the geometry shader, so i'll post both.
My code:
glutDisplayFunc(MyRenderScene);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glGenVertexArrays(1,&global.gVAO);
glGenBuffers(1, &global.VBO);
glBindVertexArray(global.VAO);
glBindBuffer(GL_ARRAY_BUFFER, global.VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenBuffers(1, &global.IBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.IBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glGenVertexArrays(1,&global.gVAO);
glGenBuffers(1, &global.gVBO);
glBindVertexArray(global.gVAO);
glBindBuffer(GL_ARRAY_BUFFER, global.gVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenBuffers(1, &global.gIBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.gIBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
Here i'm basically passing the same structure to both buffers.
Buffers are istantiated in a global struct.
This is MyRenderScene() :
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
global.shaders.enable(); // glUseProgram
global.shaders.set_sampler(0); // setting textures
global.shaders.set_sampler(1);
global.sceneT.set_camera(
global.camera.position(),
global.camera.lookat(),
global.camera.up()
);
glm::mat4 model = glm::mat4(1.0f);
glm::vec3 vaxis = glm::vec3(0.0,1.0,0.0);
glm::vec3 haxis = glm::vec3(1.0,0.0,0.0);
model = glm::rotate(model,glm::radians(global.gradX),haxis);
model = glm::rotate(model,glm::radians(global.gradY),vaxis);
glm::mat4 projection = glm::perspective(glm::radians(40.0f), (float)global.WINDOW_WIDTH/(float)global.WINDOW_HEIGHT, 0.1f, 100.0f);
glm::mat4 view = glm::lookAt(global.camera.position(),global.camera.lookat()+ global.camera.position(),global.camera.up());
global.shaders.set_model(model);
global.shaders.set_view(view);
global.shaders.set_projection(projection);
global.shaders.set_viewPos(global.camera.position());
global.shaders.set_lightPos(lightPos);
global.shaders.update_uniforms();
glBindVertexArray(global.VAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, position)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, textcoord)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, normal)));
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, tangent)));
glBindBuffer(GL_ARRAY_BUFFER, global.VBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.IBO);
global.brickwall.Bind(GL_TEXTURE0+0); // binding textures
global.brickwall_normals.Bind(GL_TEXTURE0+1);
glDrawElements(GL_TRIANGLES,36,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
global.geometryShader.enable(); // setting up geometry shader
global.geometryShader.set_projection(projection);
global.geometryShader.set_model(model);
global.geometryShader.set_view(view);
global.geometryShader.update_uniforms();
glBindVertexArray(global.gVAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, position)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, textcoord)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, normal)));
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, tangent)));
glBindBuffer(GL_ARRAY_BUFFER, global.gVBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.gIBO);
glDrawElements(GL_TRIANGLES,36,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
glutSwapBuffers();
glutPostRedisplay();***
I even tried calling the same vertexArrays and vertexArrayBuffer but i get the same result.
Here are vertex shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 textcoord;
layout (location = 2) in vec3 normal;
layout (location = 3) in vec3 tangent;
out VS_OUT {
vec3 newNormal;
} vs_out;
uniform mat4 view;
uniform mat4 model;
void main() {
mat3 normalMatrix = mat3(transpose(inverse(view * model)));
newNormal = vec3(vec4(normalMatrix * normal, 0.0));
gl_Position = view * model * vec4(position, 1.0);
}
And geometryShader:
#version 330 core
layout (triangles) in;
layout (line_strip, max_vertices = 6) out;
in VS_OUT {
vec3 newNormal;
} gs_in[];
const float MAGNITUDE = 0.2;
uniform mat4 projection;
void GenerateLine(int index) {
gl_Position = projection * gl_in[index].gl_Position;
EmitVertex();
gl_Position = projection * (gl_in[index].gl_Position + vec4(gs_in[index].newNormal,0.0) * MAGNITUDE);
EmitVertex();
EndPrimitive();
}
void main() {
GenerateLine(0); // first vertex normal
GenerateLine(1); // second vertex normal
GenerateLine(2); // third vertex normal
}
Feel free to correct me on everything possible and imaginable.
The reason for the issue is, that the shader program with the geometry shader fails to compile or link. Hence the geometry is drawn by the default shader program instead of the program with the geometry shader.
There is (at least) one error in the vertex shader:
newNormal = vec3(vec4(normalMatrix * normal, 0.0));
vs_out.newNormal = vec3(vec4(normalMatrix * normal, 0.0));
Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 1 year ago.
Improve this question
I am stuck using a laptop with no support for OpenGL 3.3 or anything higher than 3.1. This is problematic as I am not entirely familiar with this version so I am learning how to use things again. My problem is that for some reason, my fragment shader is only outputting in white. I'm not quite sure what's wrong, but I get the feeling that it has something to do with the way I set the VBO data as this has recently changed in my code. Right now, I'm trying to convert my old rendering engine (written in OpenGL 3.3) to OpenGL 3.1. This is because of the limitations set by my old laptop. This is why I am using a struct for the Vertex which includes BiTangent and Tangent values.
My Code:
struct Vertex {
// Position
glm::vec3 Position;
// Normals
glm::vec3 Normal;
// Texture Coordinates
glm::vec2 TexCoords;
// BiTangent
glm::vec3 BiTangent;
// Tangent
glm::vec3 Tangent;
};
struct Texture {
unsigned int id;
int number;
};
class Mesh {
std::vector<Vertex> vertices;
std::vector<unsigned int> indices;
std::vector<Texture> textures;
unsigned int VAO, VBO[5], EBO;
public:
Mesh(std::vector<Vertex> vertices, std::vector<unsigned int> indices, std::vector<Texture> textures) {
this->vertices = vertices;
this->indices = indices;
this->textures = textures;
setupMesh();
};
void setupMesh() {
glGenVertexArrays(1, &VAO);
glGenBuffers(4, VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
int stride = 14;
// Position
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Normal
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glEnableVertexAttribArray(1);
// Texture Coordinates
glBindBuffer(GL_ARRAY_BUFFER, VBO[2]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glEnableVertexAttribArray(2);
// BiTangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[3]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)8);
glEnableVertexAttribArray(3);
// Tangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[4]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(4, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)11);
glEnableVertexAttribArray(4);
// Element Buffer Object
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// Unbind
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
};
void Draw(ShaderProgram shader) {
for (int i = 0; i < textures.size(); i++) {
// Give textures the proper id and bind them
glActiveTexture(GL_TEXTURE0 + textures[i].number);
glBindTexture(GL_TEXTURE_2D, textures[i].id);
}
// Draw
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
// Unbind
glBindVertexArray(0);
}
};
Vertex Shader:
#version 140 core
in vec3 aPos;
in vec3 aColor;
in vec2 aTexCoords;
in vec3 aBiTangent;
in vec3 aTangent;
out vec3 Color;
void main(void){
Color = vec3(aColor);
gl_Position = vec4(aPos, 1.0);
}
Fragment Shader:
#version 140 core
out vec4 FragColor;
//precision highp float;
in vec3 Color;
void main(void)
{
FragColor = vec4(1.0, 0.8, 0.8, 1.0);
}
I tried manually setting a color to override displaying the normals, but the output is still white.
EDIT:
If I can generate one VBO instead of 4, that would be useful. My question for that is how I'm supposed to access the vertex data from the shaders when using one VBO.
If a named buffer object is bound, then the last parameter of glVertexAttribPointer is treated as a byte offset into the buffer object's data store.
Hence the offset has to be sizeof(float)*number rather than number:
For instance:
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 3));
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 6));
...
Furthermore, the version specification is invalide. There is no GLSL 1.40 core version. Remove the token core in the vertex and in the fragment shader:
#version 140 core
#version 140
core is introduced in OpenGL Shading Language 1.50.
I recommend to check if the shader compilation succeeded and if the program object linked successfully. See Shader Compilation.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS. If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.
I have this vertex shader. When i remove in vec3 LVertexNorm and everything related to it. It render fine. But if i add it in, nothing renders anymore.
#version 140
in vec3 LVertexNorm;
in vec3 LVertexPos2D;
uniform mat4 MVP;
out vec3 norm;
void main() {
norm = LVertexNorm;
gl_Position = MVP * vec4( LVertexPos2D.x, LVertexPos2D.y, LVertexPos2D.z, 1 );
}
Fragment shader
#version 140
in vec3 norm;
out vec4 LFragment;
void main() {
LFragment = vec4( 1.0,1.0,1.0, 1.0 );
}
And code for building the VAO
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, verticesCount * sizeof(GLfloat), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(program->getAttribute("LVertexPos2D"), 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, nbo);
glBufferData(GL_ARRAY_BUFFER, normalCount * sizeof(GLfloat), normals, GL_STATIC_DRAW);
glVertexAttribPointer(program->getAttribute("LVertexNorm"), 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesCount * sizeof(GLuint), indices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBindVertexArray(0);
I tryd different ways. And always the same result. when LVertexNorm gets added, shader stops working. I cant ifgure out why. What might be wrong?
The argument to glEnableVertexAttribArray has to be the vertex attribute index:
GLuint pos_inx = program->getAttribute("LVertexPos2D")
glVertexAttribPointer(pos_inx, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(pos_inx);
GLuint norm_inx = program->getAttribute("LVertexNorm");
glVertexAttribPointer(norm_inx, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(norm_inx);
When you have the vertex shader input variables in vec3 LVertexNorm; and in vec3 LVertexPos2D;, then possibly LVertexNorm gets the attribute index 0 and LVertexPos2D gets the attribute index 1. Since the vertex attribute 1 is not enabled, the vertex positions are not specified.
In fact, the attribute indexes are not specified and can be any number. Most drivers, however, use ascending indexes that start at 0.
I'm working on a 3D object viewer, a really basic opengl program i think. But i'm new to opengl and i find a problem that i can't resolve.
The last tutorial that i used is here : LearnOpenGL
So the code than i'll show is from there (Model Loading part), i use Assimp to load the object.
There is some code:
Mesh Setup:
struct Vertex {
glm::vec3 position;
glm::vec3 normal;
glm::vec2 texCoords;
};
glGenVertexArrays(1, &this->m_VAO);
glGenBuffers(1, &this->m_VBO);
glGenBuffers(1, &this->m_EBO);
glBindVertexArray(this->m_VAO);
glBindBuffer(GL_ARRAY_BUFFER, this->m_VBO);
glBufferData(GL_ARRAY_BUFFER, this->m_vertices.size() * sizeof(Vertex),
&this->m_vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->m_EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, this->m_indices.size() * sizeof(GLuint),
&this->m_indices[0], GL_STATIC_DRAW);
// Vertex Positions
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)0);
// Vertex Normals
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, normal));
// Vertex Texture Coords
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, texCoords));
glBindVertexArray(0);
You can check the whole code on the site.
Draw :
// Draw mesh
glBindVertexArray(model.get_VAO());
glDrawElements(GL_TRIANGLES, model.get_Indices_Size(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
Shaders :
Vertex ->
#version 130
in vec4 gxl3d_Position;
in vec4 gxl3d_Color;
smooth out vec4 VertexColor;
void main() {
gl_Position = gxl3d_Position;
VertexColor = vec4(0.5f, 0.0f, 0.0f, 1.0f);
}
Fragment ->
#version 130
smooth in vec4 VertexColor;
void main() {
gl_FragColor = VertexColor;
}
When i run this, i can check than the last vertex from Assimp is not the same when i read the .obj directly, maybe it's normal?
So in the end i reach a "nouveau failed idel channel 0xcccc0000 Xorg 677"
If someone have any idea, Thanks!
One thing is wrong in the mesh setup.
This
// Vertex Positions
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)0);
// Vertex Normals
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, normal));
// Vertex Texture Coords
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
(GLvoid*)offsetof(Vertex, texCoords));
Should be after:
glBindBuffer(GL_ARRAY_BUFFER, this->m_VBO);
glBufferData(GL_ARRAY_BUFFER, this->m_vertices.size() * sizeof(Vertex),
&this->m_vertices[0], GL_STATIC_DRAW);
while the VBO is bound, otherwise you're setting the attrib pointers the EBO instead if the VBO.
And if you have no reason to use that old GLSL version, you should really bump it up to like 330 or so and use layout(location = 0) in vec3 position.
For instance to access to normals:
#version 330
layout(location = 1) in vec3 normals;
etc
About assimp I have no idea.
Good luck :)
The title sums up my issue, but no matter what I set the first vertex as, OpenGL always draws it at the origin. I've tried this on a school computer and it wasn't a problem but I'm not at school and it's possible something I've changed is causing the issue. Regardless, I see no reason why this should happen. In case syntax seems weird, this code is written in D but should be an almost seamless port from C.
My code is:
class Mesh
{
this(vec3[] vertices, uint[] indices)
{
draw_count = indices.length;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
glGenBuffers(NUM_BUFFERS, vertex_array_buffers.ptr);
glBindBuffer(GL_ARRAY_BUFFER, vertex_array_buffers[POSITION_VB]);
glBufferData(GL_ARRAY_BUFFER, vertices.length * vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(cast(GLuint)0, 3, GL_FLOAT, GL_FALSE, 0, cast(void*)0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vertex_array_buffers[INDEX_VB]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.length * indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindVertexArray(0);
}
void draw()
{
glBindVertexArray(vertex_array_object);
glDrawElements(GL_TRIANGLES, draw_count, GL_UNSIGNED_INT, cast(const(void)*)0);
glBindVertexArray(0);
}
private:
enum
{
POSITION_VB,
INDEX_VB,
NORMAL_VB,
NUM_BUFFERS
};
GLuint vertex_array_object;
GLuint vertex_array_buffers[NUM_BUFFERS];
vec3 normals;
int draw_count;
}