OpenGL VBO data seems to get corrupted - c++

I've uploaded vertices, colors, normals, and texture coordinates into a single VBO, which is associated with a VAO. I also have an EBO associated with the same VAO that stores indices. I am also using SDL and OpenGL 3.3 (Core Profile context, which is set using SDL).
At first, my model seems to render fine. Then after maybe 8 or so seconds, it looks like the data gets corrupted.
Here is a video: https://youtu.be/eEiH3EFTPFk
Every frame I am pulling the data out of OpenGL (using glGetNamedBufferSubData) and comparing to what it should be, and everything seems to check out.
Does anyone have any idea what might be happening here? I appreciate any insight you guys might be able to provide.
Here is my code for loading the model data:
struct Vbo
{
GLuint id;
};
struct Ebo
{
GLuint id;
GLenum mode;
GLsizei count;
GLenum type;
};
struct Vao
{
GLuint id;
Vbo vbo[4];
Ebo ebo;
};
// ...
MeshId GraphicsEngine::createStaticMesh(
std::vector<glm::vec3> vertices,
std::vector<glm::detail::uint32> indices,
std::vector<glm::vec4> colors,
std::vector<glm::vec3> normals,
std::vector<glm::vec2> textureCoordinates
)
{
Vao vao;
glGenVertexArrays(1, &vao.id);
glGenBuffers(1, &vao.vbo[0].id);
glGenBuffers(1, &vao.ebo.id);
auto size = vertices.size() * sizeof(glm::vec3);
size += colors.size() * sizeof(glm::vec4);
size += normals.size() * sizeof(glm::vec3);
size += textureCoordinates.size() * sizeof(glm::vec2);
glBindVertexArray(vao.id);
glBindBuffer(GL_ARRAY_BUFFER, vao.vbo[0].id);
glBufferData(GL_ARRAY_BUFFER, size, nullptr, GL_STATIC_DRAW);
auto offset = 0;
glBufferSubData(GL_ARRAY_BUFFER, offset, vertices.size() * sizeof(glm::vec3), &vertices[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
offset += vertices.size() * sizeof(glm::vec3);
glBufferSubData(GL_ARRAY_BUFFER, offset, colors.size() * sizeof(glm::vec4), &colors[0]);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
glEnableVertexAttribArray(1);
offset += colors.size() * sizeof(glm::vec4);
glBufferSubData(GL_ARRAY_BUFFER, offset, normals.size() * sizeof(glm::vec3), &normals[0]);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
glEnableVertexAttribArray(2);
offset += normals.size() * sizeof(glm::vec3);
glBufferSubData(GL_ARRAY_BUFFER, offset, textureCoordinates.size() * sizeof(glm::vec2), &textureCoordinates[0]);
glVertexAttribPointer(3, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid*)(offset));
glEnableVertexAttribArray(3);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vao.ebo.id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(glm::detail::uint32), &indices[0], GL_STATIC_DRAW);
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
vao.ebo.count = indices.size();
vao.ebo.mode = GL_TRIANGLES;
vao.ebo.type = GL_UNSIGNED_INT;
vertexArrayObjects_.push_back(vao);
auto index = vertexArrayObjects_.size() - 1;
return MeshId(index);
}
Here is my code that does the rendering:
// Setup camera
const glm::quat temp = glm::conjugate(camera_.orientation);
view_ = glm::mat4_cast(temp);
view_ = glm::translate(view_, glm::vec3(-camera_.position.x, -camera_.position.y, -camera_.position.z));
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram_);
const int modelMatrixLocation = glGetUniformLocation(shaderProgram_, "modelMatrix");
const int pvmMatrixLocation = glGetUniformLocation(shaderProgram_, "pvmMatrix");
const int normalMatrixLocation = glGetUniformLocation(shaderProgram_, "normalMatrix");
glm::detail::uint32 i = 0;
for ( const auto& r : renderables_ )
{
const auto& graphicsData = graphicsData_[i];
glm::mat4 newModel = glm::translate(model_, graphicsData.position);
newModel = newModel * glm::mat4_cast( graphicsData.orientation );
newModel = glm::scale(newModel, graphicsData.scale);
// Send uniform variable values to the shader
const glm::mat4 pvmMatrix(projection_ * view_ * newModel);
glUniformMatrix4fv(pvmMatrixLocation, 1, GL_FALSE, &pvmMatrix[0][0]);
glm::mat3 normalMatrix = glm::inverse(glm::transpose(glm::mat3(view_ * newModel)));
glUniformMatrix3fv(normalMatrixLocation, 1, GL_FALSE, &normalMatrix[0][0]);
glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, &newModel[0][0]);
glBindTexture(GL_TEXTURE_2D, r.texture.id);
glBindVertexArray(r.vao.id);
glDrawElements(r.vao.ebo.mode, r.vao.ebo.count, r.vao.ebo.type, 0);
glBindVertexArray(0);
i++;
}
Fragment shader:
#version 330 core
in vec4 ourColor;
in vec2 texCoord;
out vec4 color;
uniform sampler2D ourTexture;
void main()
{
color = texture(ourTexture, texCoord);
}
Vertex shader:
#version 330 core
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
uniform mat4 pvmMatrix;
uniform mat3 normalMatrix;
layout (location = 0) in vec3 position;
layout (location = 1) in vec4 color;
layout (location = 2) in vec3 normal;
layout (location = 3) in vec2 textureCoordinate;
out vec4 ourColor;
out vec2 texCoord;
void main()
{
//gl_Position = vec4(position, 1.0);
gl_Position = pvmMatrix * vec4(position, 1.0);
ourColor = color;
texCoord = textureCoordinate;
}

As per #MichaelNastenkos comment about, I added glEnable(GL_DEPTH_TEST); before my rendering code and it seems to fix it.

Related

Corrupted data in vertex shader attribute location

My model looks stretched because of corrupted data in vertex shader attribute location
Here's the vertex shader code:
#version 330 core
layout (location = 0) in vec3 vertPos;
layout (location = 1) in vec3 vertNormal;
layout (location = 2) in vec2 texCoord;
layout (location = 3) in vec4 boneWeigths;
layout (location = 4) in ivec4 boneIDs;
out vec3 vNormal;
out vec3 fragPos;
out vec2 fragTexCoord;
const int MAX_BONES = 100;
uniform mat4 MVP;
uniform mat4 M;
uniform mat4 boneTransforms[MAX_BONES];
void main()
{
mat4 boneTx = boneTransforms[boneIDs[0]] * boneWeigths[0]
+ boneTransforms[boneIDs[1]] * boneWeigths[1]
+ boneTransforms[boneIDs[2]] * boneWeigths[2]
+ boneTransforms[boneIDs[3]] * boneWeigths[3];
vec4 pos = boneTx * vec4(vertPos, 1.0f);
gl_Position = MVP * pos;
vec4 normal = boneTx * vec4(vertNormal, 0.0f);
vNormal = normalize(vec3(M * normal));
fragPos = vec3(M * pos);
fragTexCoord = vec2(texCoord.x, texCoord.y);
}
The problem seems to be corrupted data in boneIDs (boneIDs data is fine on CPU, but getting corrupted data in shader). I tried hard-coding boneIDs data in shader, and that works fine.
Here's the code for VAO:
// create buffers/arrays
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
// load data into vertex buffers
glBindBuffer(GL_ARRAY_BUFFER, VBO);
unsigned int sz = sizeof(BoneVertex);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(BoneVertex), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// set the vertex attribute pointers
// vertex Positions
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)0);
// vertex normals
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(3 * sizeof(float)));
// vertex texture coords
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(6 * sizeof(float)));
// bone weights
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 4, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(8 * sizeof(float)));
// bone ids
glEnableVertexAttribArray(4);
glVertexAttribPointer(4, 4, GL_INT, GL_FALSE, sizeof(BoneVertex), (void*)(12 * sizeof(float)));
glBindVertexArray(0);
BoneVertex structure:
struct BoneVertex
{
glm::vec3 position;
glm::vec3 normal;
glm::vec2 textureCoords;
glm::vec4 boneWeights;
glm::ivec4 boneIDs;
}
This is weird, because the first 3 attributes data seems fine. The problem is with boneIDs and boneWeights
Is this somehow related to padding, and how data is arranged in a structure? Or am I missing something else?
Thanks
boneIDs is a vertex shader input with an integral data type:
layout (location = 4) in ivec4 boneIDs;
If you want to specify the generic vertex attribute data for an integral attribute, then you have to use glVertexAttribIPointer (focus on I) rather than glVertexAttribPointer (see glVertexAttribPointer).
Note, the type argument doesn't specify the type of the target attribute, it specifies the element type of the source data array. glVertexAttribPointer converts the source data array to floating point values, but glVertexAttribIPointer specifies the array for integral target attributes.
glVertexAttribPointer(4, 4, GL_INT, GL_FALSE, sizeof(BoneVertex), (void*)(12 * sizeof(float)));
glVertexAttribIPointer(4, 4, GL_INT, sizeof(BoneVertex), (void*)(12 * sizeof(float)));

openGL drawing GL_LINES giving incorrect result

I am trying to draw a grid of velocity vectors, I expect the velocity at each grid point to be a line with a slop of 1. A slanting line, but I always end up with a vertical line. I'm not sure what I'm doing wrong. Is there something I'm overlooking?
Here is how my vertex buffer looks :
float vel_pos[6*(N+2)*(N+2)];
int index1 = 0;
for (int i=0;i<N+2;i++)
{
for (int j=0;j<N+2;j++)
{
vel_pos[index1] = float(i);
vel_pos[index1+1] = float(j);
vel_pos[index1+2] = 0.0f;
vel_pos[index1+3] = float(i) +0.5f;
vel_pos[index1+4] = float(j) + 0.5f;
vel_pos[index1+5] = 0.0f;
index1 += 6;
}
}
Here is how I am creating my VBO and VAO :
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
// Bind vertex array object first and then bind the vertex buffer objects
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vel_pos), vel_pos, GL_STREAM_DRAW);
GLint velAttrib = glGetAttribLocation(ourShader.ID, "aPos");
// iterpreting data from buffer
glVertexAttribPointer(velAttrib, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
Here is my vertex shader :
out vec4 vertexColor;
layout (location = 0) in vec3 aPos;
layout (location = 1) in float densitySource; /* source of density */
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = projection*transform * vec4(aPos, 1.0);
vertexColor = vec4(1, 0.0, 0.0, 1.0);
}
And here's my drawing code :
ourShader.use();
glm::mat4 trans = glm::mat4(1.0f);
trans = glm::translate(trans, glm::vec3(-0.5f, -0.5f, 0.0f));
unsigned int transformMatrixLocation = glGetUniformLocation(ourShader.ID, "transform");
glUniformMatrix4fv(transformMatrixLocation, 1, GL_FALSE, glm::value_ptr(trans));
glm::mat4 projection = glm::ortho(-10.0f, 110.0f, -1.0f, 110.0f, -1.0f, 100.0f);
unsigned int projectionMatrixLocation = glGetUniformLocation(ourShader.ID, "projection");
glUniformMatrix4fv(projectionMatrixLocation, 1, GL_FALSE, glm::value_ptr(projection));
glBindVertexArray(VAO);
glLineWidth(1.0f);
glDrawArrays(GL_LINES, 0, (N+2)*(N+2));
This is the image I get :
resulting image
The 5th parameter (stride) of glVertexAttribPointer is the offset between two vertex coordinates and not between to primitives. Since your vertex coordinates have 3 components of type float, the offset has to be 3 * sizeof(float):
glVertexAttribPointer(velAttrib, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
Because you set an offset of 6 * sizeof(float), you have skipped every 2 coordinate and have drawn lines between the points of the grid.
But note, if stride is 0, the generic vertex attributes are understood to be tightly packed in the array. This is the case, so you ca use an offset of 0:
glVertexAttribPointer(velAttrib, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);

Sending single unsinged int to VBO

I am trying to send a single unsigned int to vbo, but when I test its value in the vertex shader (which should be 1), the value is different from the one expected.
The variable that contains my unsigned ints is textureIds
The code that load the vbo :
std::vector<unsigned int> textureIds;
glGenBuffers(1, &vboID_m);
glBindBuffer(GL_ARRAY_BUFFER, vboID_m);
{
glBufferData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size() + normals.size())
* sizeof(float) + textureIds.size() * sizeof(unsigned int), 0,
GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertices.size() * sizeof(float),
vertices.data());
glBufferSubData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float),
texture.size() * sizeof(float), texture.data());
glBufferSubData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size()) * sizeof(float),
normals.size() * sizeof(float), normals.data());
glBufferSubData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size() + normals.size()) * sizeof(float),
textureIds.size() * sizeof(unsigned int), textureIds.data());
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
The code that draw the vbo:
void Chunck::draw() const {
glBindBuffer(GL_ARRAY_BUFFER, vboID_m);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
BUFFER_OFFSET(0));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float),
BUFFER_OFFSET(verticeSize_m * sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
BUFFER_OFFSET(
(verticeSize_m + textureSize_m) * sizeof(float)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(3, 1, GL_UNSIGNED_INT, GL_FALSE, sizeof(unsigned int),
BUFFER_OFFSET(
(verticeSize_m + textureSize_m + normalSize_m) * sizeof(float)));
glEnableVertexAttribArray(3);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, grassTexture_m.getID());
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, dirtTexture_m.getID());
glDrawArrays(GL_TRIANGLES, 0, verticeSize_m / 3);
glBindTexture(GL_TEXTURE_2D, 0);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
The vertex shader:
#version 330 core
in vec3 in_Vertex;
in vec2 in_TexCoord0;
in vec3 in_normal;
in int in_textureId;
uniform mat4 projection;
uniform mat4 model;
uniform mat4 view;
out vec2 coordTexture;
out vec3 normal;
out vec3 FragPos;
out vec3 rayDir;
out float grassAmount;
out float dirtAmount;
void main()
{
vec4 pos = model * vec4(in_Vertex, 1.0);
vec2 mu = vec2(0., 0.);
//pos.y = max(pos.y, 10 * exp(-((pos.x - mu.x) * (pos.x - mu.x) + (pos.z - mu.y) * (pos.z - mu.y)) / 100.));
gl_Position = projection * view * pos;
FragPos = vec3(pos);
coordTexture = in_TexCoord0;
normal = in_normal;
rayDir = (view * model * vec4(FragPos, 1.)).xyz;
grassAmount = 0;
dirtAmount = 0;
if (in_textureId == 0)
grassAmount = 1;
else if (in_textureId == 1)
dirtAmount = 1;
}
I should enter into the second if, but it doesn't :\
You have to use glVertexAttribIPointer (focus on I), when defining the array of generic vertex attribute data, for the vertex attribute in int in_textureId;.
Vertex attribute data defined by glVertexAttribPointer will be converted to floating point.
See OpenGL 4.6 API Core Profile Specification; 10.2. CURRENT VERTEX ATTRIBUTE VALUES; page 344
The VertexAttribI* commands specify signed or unsigned fixed-point values
that are stored as signed or unsigned integers, respectively. Such values are referred to as pure integers.
...
All other VertexAttrib* commands specify values that are converted directly to the internal floating-point representation.
Note, you should use either Layout Qualifier to specify the attribute index in the vertex shader:
layout(location = 0) in vec3 in_Vertex;
layout(location = 1) in vec2 in_TexCoord0;
layout(location = 2) in vec3 in_normal;
layout(location = 3) in int in_textureId;
or you should ask for the attribute index by glGetAttribLocation aftet the program has been linked:
e.g.:
GLuint progObj = ...;
glLinkProgram( progObj );
GLint texIdInx = glGetAttribLocation( progObj, "in_textureId" );
glVertexAttribIPointer(
texIdInx, 1, GL_UNSIGNED_INT, GL_FALSE, sizeof(unsigned int),
BUFFER_OFFSET((verticeSize_m + textureSize_m + normalSize_m) * sizeof(float)));
glEnableVertexAttribArray(texIdInx );
Edit:
Of course, glBindAttribLocation is a proper solution too.

OpenGL texture shader not displaying to screen

I'm trying to texture the ground with grass. Would someone mind looking through my steps to get a 2D texture to show up in 3D space? I haven't had any luck with online tutorials.
Here is my steps:
1) Call the sprite class initialization to set the vertex coordinates and uv data
void Sprite::init(vec3 bottomleft, vec3 topLeft, vec3 topRight, vec3 bottomRight, std::string texturePath)
{
_texture = ImageLoader::loadPNG(texturePath);
_points = { bottomleft.x, bottomleft.y, bottomleft.z, topLeft.x, topLeft.y, topLeft.z, topRight.x, topRight.y, topRight.z, topRight.x, topRight.y, topRight.z, bottomRight.x, bottomRight.y, bottomRight.z, bottomleft.x, bottomleft.y, bottomleft.z };
_uv = { 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
}
I use the uniform sampler and activate the texture
//draw ground texture
glUniform1i(samplerLocation, 0);
glActiveTexture(GL_TEXTURE0);
_grassTexture.draw();
The draw function is implemented as follows, which puts the data in the buffer and draws the triangle:
void Sprite::draw()
{
if (_vboID == 0)
{
glGenBuffers(1, &_vboID);
}
glBindTexture(GL_TEXTURE_2D, _texture.id);
glBindBuffer(GL_ARRAY_BUFFER, _vboID);
glBufferData(GL_ARRAY_BUFFER, _points.size() * sizeof(float) + _uv.size() * sizeof(float), nullptr, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, _points.size() * sizeof(float), &_points.front());
glBufferSubData(GL_ARRAY_BUFFER, _points.size() * sizeof(float), _uv.size() * sizeof(float), &_uv.front());
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
//position attribute pointer
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, &_points.front());
//uv attribute pointer
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, &_uv.front());
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
The vertex shader:
#version 400
in vec4 vertexPosition;
in vec2 vertexUV;
out vec4 fragmentColor;
out vec2 fragmentUV;
uniform mat4 MVP;
uniform vec4 COLOR;
void main()
{
gl_Position = MVP * vertexPosition;
fragmentColor = COLOR;
fragmentUV = vec2(vertexUV.x, 1.0 - vertexUV.y);
}
and fragment shader:
#version 400
in vec4 fragmentColor;
in vec2 fragmentUV;
out vec4 fragColor;
uniform sampler2D SAMPLER;
void main()
{
vec4 textureColor = texture(SAMPLER, fragmentUV);
fragColor = textureColor * fragmentColor;
}
At this point, nothing displays in the screen. I do know that the textureColor in the fragment shader is green, so it appears the uniform is being set correctly. My best guess is that I'm either missing a texture initialization step or am not filling the buffer correctly.
I think there's a problem with the pointer parameter in your calls to glVertexAttribPointer. You're passing the address of your _points and _uv arrays, but you should be passing the offset relative to the base of your vbo.
Try:
//position attribute pointer
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
//uv attribute pointer
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)(_points.size() * sizeof(float)));

Simple GLSL example not showing anything on screen

I am trying to print a simple cube using GLSL but I only get an empty screen. I don't know what I am doing wrong. The vertices, normals, triangles are exported from Blender.
void InitBuffers() {
// monkey vertices, normals
readVerticesNormals();
// cube vertices
glGenVertexArraysAPPLE(1, &CubeVao);
glBindVertexArrayAPPLE(CubeVao);
glGenBuffers(1, &CubeVboPositions);
glBindBuffer(GL_ARRAY_BUFFER,CubeVboPositions);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glGenBuffers(1,&CubeVboColors);
glBindBuffer(GL_ARRAY_BUFFER, CubeVboColors);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, 0);
glGenBuffers(1, &CubeNormals);
glBindBuffer(GL_ARRAY_BUFFER, CubeNormals);
glBufferData(GL_ARRAY_BUFFER, sizeof(normals), normals, GL_STATIC_DRAW);
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, 0);
glGenBuffers(1, &CubeIbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, CubeIbo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(triangles), triangles, GL_STATIC_DRAW);
}
I bind the vertex data to the shader.
glBindAttribLocation(ProgramShader, 0, "position");
glBindAttribLocation(ProgramShader, 1, "color");
glBindAttribLocation(ProgramShader, 2, "normal");
Camera is positioned in (0,0,0) looking towards (0,0,-1). The object, in this case the cube, it positioned at (0,0,-4).
The render function is:
void display() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glColor3d(1,0,0);
// set view matrix
ViewMatrix.setView(0,0,0,0,0,-1,0,1,0);
// use shader program
glUseProgram(ProgramShader);
// send uniforms to shader
glUniformMatrix4fv(ProjectionMatrixLocation, 1, false, ProjectionMatrix.m);
glUniformMatrix4fv(ViewMatrixLocation, 1, false, ViewMatrix.m);
glUniformMatrix4fv(ModelMatrixLocation, 1, false, ModelMatrix.m);
glBindVertexArrayAPPLE(CubeVao);
glDrawElements(GL_TRIANGLES, 3*tri_num, GL_UNSIGNED_INT, (void*)0);
glutSwapBuffers();
}
Vertex shader:
attribute vec3 position;
attribute vec3 color;
attribute vec3 normal;
uniform mat4 modelMatrix,viewMatrix,projMatrix;
varying vec4 Normal;
varying vec4 Position;
varying vec4 Color;
void main() {
// position in view space
Position = viewMatrix * modelMatrix * vec4(position, 1.0);
// normal in view space
Normal = normalize(viewMatrix * modelMatrix * vec4(normal, 1.0));
Color = vec4(color, 1.0);
// final position
gl_Position = projMatrix * viewMatrix * modelMatrix * vec4(position, 1.0);
}
Fragment shader:
varying vec4 Normal;
varying vec4 Position;
varying vec4 Color;
void main() {
gl_FragColor = Color;
}
Depending how vertices, colors, etc are defined, sizeof(vertices) may just return the size of the pointer. Try:
3*numVertices*sizeof(float)