So i'm trying to render a rectangle in openGL using index buffers however instead i'm getting a triangle with one vertex at the origin (even though no vertex in my rectangle is suppsoed to go at the origin).
void Renderer::drawRect(int x,int y,int width, int height)
{
//(Ignoring method arguments for debugging)
float vertices[12] = {200.f, 300.f, 0.f,
200.f, 100.f, 0.f,
600.f, 100.f, 0.f,
600.f, 300.f, 0.f};
unsigned int indices[6] = {0,1,3,1,2,3};
glBindVertexArray(this->flat_shape_VAO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,this->element_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER,sizeof(indices),indices,GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER,this->render_buffer);
glBufferData(GL_ARRAY_BUFFER,sizeof(vertices),vertices,GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glUseProgram(this->shader_program);
glUniformMatrix4fv(this->model_view_projection_uniform,1,GL_FALSE,glm::value_ptr(this->model_view_projection_mat));
glUniform3f(this->color_uniform,(float) this->color.r,(float)this->color.g,(float)this->color.b);
glDrawElements(GL_TRIANGLES,6,GL_UNSIGNED_INT,nullptr);
}
My projection matrix is working fine I can still render a triangle at the correct screen coords. I suspect maybe I did index buffering wrong? Transformation matrices also work fine, atleast on my triangles.
Edit:
The VAO's attributes are set up in the class constructor with glVertexAttribPointer();
Edit 2:
I disabled shaders completely and something interesting happened.
Here is the shader source code:
(vertex shader)
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 mvp;
uniform vec3 aColor;
out vec3 color;
void main()
{
gl_Position = mvp * vec4(aPos, 1.0);
color = aColor;
}
(fragment shader)
#version 330 core
in vec3 color;
out vec4 FragColor;
void main()
{
FragColor = vec4(color,1.0f);
}
My projection matrix shouldn't work with shaders disabled yet I still see a triangle rendering on the screen..??
What is the stride argument of glVertexAttribPointer? stride specifies the byte offset between consecutive generic vertex attributes. In your case it should be 0 or 12 (3*sizeof(float)) but if you look at your images it seems to be 24 because the triangle has the 1st (200, 300) and 3rd (600, 100) vertices and one more vertex with the coordinate (0, 0).
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), nullptr);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), nullptr);
Related
I'm trying to have 4 integers represent the colors of all the verticii in a VBO by having the stride on the color vertex attribute pointer, however, It seems to only take the value once for the color, and, as a result, assigns the rest of the verticii as black as in the picture: picture. The expected result is that all the verticii will be white.
Here is the relevant pieces of code:
int triangleData[18] =
{
2147483647,2147483647,2147483647,2147483647,//opaque white
0,100, //top
100,-100, //bottom right
-100,-100 //bottom left
};
unsigned int colorVAO, colorVBO;
glGenVertexArrays(1, &colorVAO);
glGenBuffers(1, &colorVBO);
glBindVertexArray(colorVAO);
glBindBuffer(GL_ARRAY_BUFFER, colorVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleData), triangleData, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_INT, GL_FALSE, 2 * sizeof(int), (void*)(4*sizeof(int)));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 4, GL_INT, GL_TRUE, 0, (void*)0);
glEnableVertexAttribArray(1);
Vertex shader:
#version 330 core
layout (location = 0) in vec2 aPos;
layout (location = 1) in vec4 aColor;
out vec4 Color;
uniform mat4 model;
uniform mat4 view;
uniform mat4 ortho;
void main()
{
gl_Position = ortho * view * model * vec4(aPos, 1.0, 1.0);
Color = aColor;
}
Fragment shader:
#version 330 core
out vec4 FragColor;
in vec4 Color;
void main()
{
FragColor = Color;
}
From the documentation of glVertexAttribPointer:
stride
Specifies the byte offset between consecutive generic vertex attributes. If stride is 0, the generic vertex attributes are understood to be tightly packed in the array.
Setting the stride to 0 does not mean that the same data is read for each vertex. It means that the data is packed one after the other in the buffer.
If you want all the vertices to use the same data, you can either disable the attribute and use glVertexAttrib, or you can use the separate vertex format (available starting from OpenGL 4.3 or with ARB_vertex_attrib_binding) similar to:
glBindVertexBuffer(index, buffer, offset, 0);
where a stride of 0 really means no stride.
I have created a program that renders a 3d cube, and now I'd like to change the position of the cube. The matrix multiplication I'm doing now seems to distort the cube instead of changing it's position. The distortion is small for values in the 0.1 - 0.4 range and fill the whole screen for larger values.
The vertex shader:
#version 330 core
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec3 vertexColor;
uniform mat4 projectionMatrix;
uniform mat4 cameraMatrix;
uniform mat4 modelMatrix;
out vec3 fragmentColor;
void main()
{
gl_Position = projectionMatrix * cameraMatrix * modelMatrix * vec4(vertexPosition_modelspace,1);
fragmentColor = vertexColor;
}
Model.cpp (note that modelMatrix is initialized to an identity matrix and I'm using glm)
void Model::SetPos(glm::vec3 coords)
{
modelMatrix[0][3] = coords[0];
modelMatrix[1][3] = coords[1];
modelMatrix[2][3] = coords[2];
}
void Model::Render()
{
// Select the right program
glUseProgram(program);
// Set the model matrix in the shader
GLuint MatrixID = glGetUniformLocation(program, "modelMatrix");
glUniformMatrix4fv(MatrixID, 1, GL_FALSE, value_ptr(modelMatrix));
// Setup the shader color attributes
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
// Setup the shader vertex attributes
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
// Draw the model
glDrawArrays(GL_TRIANGLES, 0, triangles);
// Now disable the attributes
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
}
The other matrices are initialized like this and remain unchanged:
cameraMatrix = glm::lookAt(pos, target, orient);
projectionMatrix = glm::perspective(45.0f, 1280.0f / 720.0f, 0.1f, 100.0f);
The glm library produces column major matrices. You've also specified GL_FALSE to glUniformMatrix4fv which is correct for column major matrices. However, when you are setting the position you are setting wrong values. This code:
void Model::SetPos(glm::vec3 coords)
{
modelMatrix[0][3] = coords[0];
modelMatrix[1][3] = coords[1];
modelMatrix[2][3] = coords[2];
}
Causes the matrix produce non 1.0 values in the w component after multiplication. This can cause some weird distortions. You should change SetPos to this:
void Model::SetPos(glm::vec3 coords)
{
modelMatrix[3][0] = coords[0];
modelMatrix[3][1] = coords[1];
modelMatrix[3][2] = coords[2];
}
I'm having trouble figuring out how to position a 2D object in my scene using screen coordinates. At the moment I have something working (code below) but it want NDC coordinates which isn't easy to work with. I can't figure out where it's going wrong. I think I've used everything like it should be so I think I'm forgetting something.
Here's the code that handles the drawing of the objects in my scene:
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// RENDERING HERE
colorProgram.bind();
for (size_t t = 0; t < objectsWithGraphicsComponentInThisScene.size(); ++t)
{
// set texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, objectsWithGraphicsComponentInThisScene[t]->getComponent<GraphicsComponent>()->getTexture());
GLint texLocation = colorProgram.getUniformLocation("texSampler");
glUniform1i(texLocation, 0);
glm::mat4 trans;
trans = glm::translate(glm::mat4x4(1.0f), glm::vec3(objectsWithGraphicsComponentInThisScene[t]->getPosition().x, objectsWithGraphicsComponentInThisScene[t]->getPosition().y, 0));
GLint transMatLocation = colorProgram.getUniformLocation("transformMatrix");
glUniformMatrix4fv(transMatLocation, 1, GL_FALSE, glm::value_ptr(trans));
// set camera Matrix
GLint projMatLocation = colorProgram.getUniformLocation("projectionMatrix");
glm::mat4 cameraMatrix = camera->getCameraMatrix();
glUniformMatrix4fv(projMatLocation, 1, GL_FALSE, glm::value_ptr(cameraMatrix));
objectsWithGraphicsComponentInThisScene[t]->getComponent<GraphicsComponent>()->getSprite()->draw();
// unbind all
glBindTexture(GL_TEXTURE_2D, 0);
}
colorProgram.unbind();
where colorProgram is the shader my sprites use and getPosition() simply returns a value which I've set. (where the x y and z value should be given as screen coordinates). so for example, getPosition might return [100, 50, 0] but that will render the object outside of the screen (the screen is 1280x720).
Now the code that renders the sprite (objectsWithGraphicsComponentInThisScene[t]->getComponent()->getSprite()->draw();):
void Sprite::draw()
{
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glEnableVertexAttribArray(0);
// position
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, position));
//color
glVertexAttribPointer(1, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(Vertex), (void*)offsetof(Vertex, color));
// uv
glVertexAttribPointer(2, 2, GL_FLOAT, GL_TRUE, sizeof(Vertex), (void*)offsetof(Vertex, uv));
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
And here's the code in the shader (colorProgram):
VERTEX SHADER:
#version 130
// per vertex
// input data from VBO
in vec2 vertexPosition;
in vec4 vertexColor;
in vec2 vertexUV;
// output to fragment shader
out vec4 fragmentColor;
out vec2 fragmentUV;
uniform mat4 projectionMatrix;
uniform mat4 transformMatrix;
void main()
{
mat4 resultMatrix = transformMatrix * projectionMatrix;
gl_Position.xy = (resultMatrix * vec4(vertexPosition, 0.0, 1.0)).xy;
gl_Position.z = 0.0;
// Indicate that the coordinates are normalized
gl_Position.w = 1.0;
fragmentColor = vertexColor;
fragmentUV = vec2(vertexUV.x, 1.0 - vertexUV.y);
}
FRAGMENT SHADER
#version 130
// per pixel
// input from vertex shader
in vec4 fragmentColor;
in vec2 fragmentUV;
out vec4 color;
uniform sampler2D texSampler;
void main()
{
vec4 textureColor = texture(texSampler, fragmentUV);
if (textureColor.a < 0.5) discard;
color = fragmentColor * textureColor;
}
If you need more code I'd be happy to add more although I think this is everything that is needed.
This sequence in your vertex shader
mat4 resultMatrix = transformMatrix * projectionMatrix;
gl_Position.xy = (resultMatrix * vec4(vertexPosition, 0.0, 1.0)).xy;
is very unlikely what you actually want. Since you use the matrix * vector convention, you'll end up with
position = transform * projection * v
= transform * (projection * v)
In other words: you apply the transformation after the projection. Since after the projection, the viewing volume is in the [-1,1]^3 range (in euclidean NDC space after the perspecitive divide. In reality, we are working in clip space here, where it is [-w,w]^3, but this is not really important in this context,), translating the object by values like 100 units will certainly move it out of the frustum.
You should just reverse the order of your matrix multiplication.
I'm having an issue drawing multiple point lights in my scene. I am working on a simple maze-style game in OpenGL, where the maze is randomly generated. Each "room" in the maze is represented by a Room struct, like so:
struct Room
{
int x, z;
bool already_visited, n, s, e, w;
GLuint vertex_buffer, texture, uv_buffer, normal_buffer;
std::vector<glm::vec3>vertices, normals;
std::vector<glm::vec2>uvs;
glm::vec3 light_pos; //Stores the position of a light in the room
};
Each room has a light in it, the position of this light is stored in light_pos. This light is used in a simple per-vertex shader, like so:
layout(location = 0) in vec3 pos;
layout(location = 1) in vec2 uv_coords;
layout(location = 2) in vec3 normal;
uniform mat4 mvpMatrix;
uniform mat4 mvMatrix;
uniform vec3 lightpos;
out vec2 vs_uv;
out vec3 vs_normal;
out vec3 color;
void main()
{
gl_Position = mvpMatrix * vec4(pos,1.0);
vs_normal = normal;
vs_uv = uv_coords;
vec3 lightVector = normalize(lightpos - pos);
float diffuse = clamp(dot(normal,lightVector),0.0,1.0);
color = vec3(diffuse,diffuse,diffuse);
}
My fragment shader looks like this (ignore the "vs_normal", it is unused for now):
in vec2 vs_uv;
in vec3 vs_normal;
in vec3 color;
uniform sampler2D tex;
out vec4 frag_color;
void main()
{
frag_color = vec4(color,1.0) * texture(tex,vs_uv).rgba;
}
And my drawing code looks like this:
mat4 mvMatrix = view_matrix*model_matrix;
mat4 mvpMatrix = projection_matrix * mvMatrix;
glBindVertexArray(vertexBufferObjectID);
glUseProgram(shaderProgram);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
for (int x = 0; x < NUM_ROOMS_X; x++)
{
for (int z = 0; z < NUM_ROOMS_Z; z++)
{
//int x = int(std::round(position.x / ROOM_SIZE_X_MAX));
//int z = int(std::round(position.z / ROOM_SIZE_Z_MAX));
Room rm = room_array[x][z];
glBindBuffer(GL_ARRAY_BUFFER, rm.vertex_buffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*) 0);
glBindBuffer(GL_ARRAY_BUFFER, rm.uv_buffer);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*) 0);
glBindBuffer(GL_ARRAY_BUFFER, rm.normal_buffer);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*) 0);
glUniformMatrix4fv(mvpMatrixID, 1, GL_FALSE, &mvpMatrix[0][0]);
glUniformMatrix4fv(mvMatrixID, 1, GL_FALSE, &mvMatrix[0][0]);
glUniform3fv(light_ID, 3, &rm.light_pos[0]); //Here is where I'm setting the new light position. It looks like this is ignored
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, rm.texture);
glUniform1i(texture_ID, 0);
glDrawArrays(GL_QUADS, 0, rm.vertices.size());
}
}
glUseProgram(0);
glBindVertexArray(0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glBindTexture(GL_TEXTURE_2D, 0);
However, here is what the result looks like (I've modified my drawing code to draw a box where each light is located, and I've circled the room at position (0,0)):
http://imgur.com/w4uPMD6
As you can see, it looks like only the light at position (0,0) affects any of the rooms on the map, the other lights are simply ignored. I know that the lights are positioned correctly, because the boxes I use to show the positions are correct. I think even though I'm setting the new light_pos, it isn't going through for some reason. Any ideas?
One thing that your are doing, which is not very common, is to pass the light position as a vertex attribute. Optimally, you should pass it to the shader as a uniform variable, just as you do with the matrices. But I doubt that is the problem here.
I believe your problem is that you are doing the light calculations in different spaces. The vertexes of the surfaces that you draw are in object/model space, while I'm guessing, your light is located at a point defined in world space. Try multiplying your light position by the inverse of the model matrix you are applying to the vertexes. I'm not familiar with GLM, but I figure there must be an inverse() function in it:
vec3 light_pos_object_space = inverse(model_matrix) * rm.light_pos;
glVertexAttrib3fv(light_ID, &light_pos_object_space[0]);
Figured out my problem. I was calling this function:
glUniform3fv(light_ID, 3, &rm.light_pos[0]);
When I should have been calling this:
glUniform3fv(light_ID, 1, &rm.light_pos[0]);
I have been successful in rendering primitives with a colour component via the shader and also translating them. However, upon attempting to load a texture and render it for the primitive via the shader, the primitives glitch, they should be squares:
As you can see, it successfully loads and applies the texture with the colour component to the single primitive in the scene.
If I then remove the color component, I again have primitives, but oddly, they are scaled by changing the uvs - this should not be the case, only the uvs should scale! (also their origin is offset)
My shader init code:
void renderer::initRendererGfx()
{
shader->compilerShaders();
shader->loadAttribute(#"Position");
shader->loadAttribute(#"SourceColor");
shader->loadAttribute(#"TexCoordIn");
}
Here is my object handler rendering function code:
void renderer::drawRender(glm::mat4 &view, glm::mat4 &projection)
{
//Loop through all objects of base type OBJECT
for(int i=0;i<SceneObjects.size();i++){
if(SceneObjects.size()>0){
shader->bind();//Bind the shader for the rendering of this object
SceneObjects[i]->mv = view * SceneObjects[i]->model;
shader->setUniform(#"modelViewMatrix", SceneObjects[i]->mv);//Calculate object model view
shader->setUniform(#"MVP", projection * SceneObjects[i]->mv);//apply projection transforms to object
glActiveTexture(GL_TEXTURE0); // unneccc in practice
glBindTexture(GL_TEXTURE_2D, SceneObjects[i]->_texture);
shader->setUniform(#"Texture", 0);//Apply the uniform for this instance
SceneObjects[i]->draw();//Draw this object
shader->unbind();//Release the shader for the next object
}
}
}
Here is my sprite buffer initialisation and draw code:
void spriteObject::draw()
{
glVertexAttribPointer((GLuint)0, 3, GL_FLOAT, GL_FALSE, sizeof(SpriteVertex), NULL);
glVertexAttribPointer((GLuint)1, 4, GL_FLOAT, GL_FALSE, sizeof(SpriteVertex) , (GLvoid*) (sizeof(GL_FLOAT) * 3));
glVertexAttribPointer((GLuint)2, 2, GL_FLOAT, GL_FALSE, sizeof(SpriteVertex) , (GLvoid*)(sizeof(GL_FLOAT) * 7));
glDrawElements(GL_TRIANGLE_STRIP, sizeof(SpriteIndices)/sizeof(SpriteIndices[0]), GL_UNSIGNED_BYTE, 0);
}
void spriteObject::initBuffers()
{
glGenBuffers(1, &vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(SpriteVertices), SpriteVertices, GL_STATIC_DRAW);
glGenBuffers(1, &indexBufferID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBufferID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(SpriteIndices), SpriteIndices, GL_STATIC_DRAW);
}
Here is the vertex shader:
attribute vec3 Position;
attribute vec4 SourceColor;
varying vec4 DestinationColor;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 MVP;
attribute vec2 TexCoordIn;
varying vec2 TexCoordOut;
void main(void) {
DestinationColor = SourceColor;
gl_Position = MVP * vec4(Position,1.0);
TexCoordOut = TexCoordIn;
}
And finally the fragment shader:
varying lowp vec4 DestinationColor;
varying lowp vec2 TexCoordOut;
uniform sampler2D Texture;
void main(void) {
gl_FragColor = DestinationColor * texture2D(Texture, TexCoordOut);
}
If you want to see any more specifics of certain elements, just ask.
Many thanks.
Are you sure your triangles have the same winding? The winding is the order in which the triangle points are listed ( either clockwise or counter-clockwise ). The winding is used in face culling to determine if the triangle is facing or back-facing.
You can easily check if your triangle are wrongly winded by disabling face culling.
glDisable( GL_CULL_FACE );
More information here ( http://db-in.com/blog/2011/02/all-about-opengl-es-2-x-part-23/#face_culling )