OpenGL 3.2 trouble setting up matrices - c++

I am using GLM to manage my matrices, but I am running in to some problems that make no sense to me. When I set the projection matrix to anything other than an identity matrix, I can't see the square I am trying to draw. If it is an identity it will work. Something similiar happens with my view matrix. If I try and translate past -1 or +1 the square will dissapear, otherwise it seems to have no effects.
There are no OpenGL errors, GLSL linker/compiler errors, and glGetUniformLocation returns a valid location. Also the shader program is correctly being used.
Also I have tested the shader to see if it is getting the correct values passed to each of the matrices (by changing the color of the square if the value is correct).
Here's how I set up the projection matrix:
projectionMatrix = glm::perspective(60.0f, (float)windowWidth / (float)windowHeight, 0.1f, 100.0f);
And here's my draw function:
void OpenGLContext::render(void) {
glViewport(0, 0, windowWidth, windowHeight); // Set the viewport size to fill the window
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT); // Clear required buffers
//Set up matrices
viewMatrix = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f, 0.0f, -5.0f));
modelMatrix = glm::scale(glm::mat4(1.0f), glm::vec3(.5f));
shader->bind();
int projectionMatrixLocation = glGetUniformLocation(shader->id(), "projectionMatrix");
int viewMatrixLocation = glGetUniformLocation(shader->id(), "viewMatrix");
int modelMatrixLocation = glGetUniformLocation(shader->id(), "modelMatrix");
glUniformMatrix4fv(projectionMatrixLocation, 1, GL_FALSE, &projectionMatrix[0][0]);
glUniformMatrix4fv(viewMatrixLocation, 1, GL_FALSE, &viewMatrix[0][0]);
glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, &modelMatrix[0][0]);
glBindVertexArray(vaoID[0]);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
shader->unbind();
SwapBuffers(hdc);
}
Here's the shader.vert
#version 150 core
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
in vec3 in_Position;
in vec3 in_Color;
out vec3 pass_Color;
void main(void)
{
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(in_Position, 1.0);
pass_Color = in_Color;
}
Here's shader.frag
#version 150 core
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
in vec3 pass_Color;
out vec4 out_Color;
void main(void)
{
out_Color = vec4(pass_Color, 1.0);
}
Sorry forgot about what i'm drawing:
void OpenGLContext::createSquare(void)
{
float* vertices = new float[18];
vertices[0] = -0.5; vertices[1] = -0.5; vertices[2] = 0.0; // Bottom left corner
vertices[3] = -0.5; vertices[4] = 0.5; vertices[5] = 0.0; // Top left corner
vertices[6] = 0.5; vertices[7] = 0.5; vertices[8] = 0.0; // Top Right corner
vertices[9] = 0.5; vertices[10] = -0.5; vertices[11] = 0.0; // Bottom right corner
vertices[12] = -0.5; vertices[13] = -0.5; vertices[14] = 0.0; // Bottom left corner
vertices[15] = 0.5; vertices[16] = 0.5; vertices[17] = 0.0; // Top Right corner
glGenVertexArrays(1, &vaoID[0]);
glBindVertexArray(vaoID[0]);
glGenBuffers(1, vboID);
glBindBuffer(GL_ARRAY_BUFFER, vboID[0]);
glBufferData(GL_ARRAY_BUFFER, 18 * sizeof(GLfloat), vertices, GL_STATIC_DRAW);
glVertexAttribPointer((GLuint) 0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0); // Disable our Vertex Array Object
glBindVertexArray(0);
delete [] vertices;
}
Setting my matrices like this results in nothing being drawn on the screen. Like I said if I set the projection and view matrices to an identity it will work. The scaling on the modelMatrix seems to always work as well.

There is no attribute on position 1 (in_Color). If you just left it out of this question, then the problem are the locations, which you are not defining in the shaders. I've never actually tested it without the location part, but I think it's necessary, at least for multiple values: you should use e.g. layout(location = 0) in in_Position.

Related

light bugs on 3D model [duplicate]

This question already has an answer here:
light source is not set correctly
(1 answer)
Closed 1 year ago.
I have this picture below that shows the 3D model and the light is not as I expected, I've already tried many ways but I can't figure out how to fix it. my normal vectors are fine
the goemetry:
glGenVertexArrays(1, &VAOArray);
glBindVertexArray(VAOArray);
/* GENERATE THE BUFFERS */
glGenBuffers(1, &bufferArray);
/* SELECT THAT BUFFER TO WORK WITH */
glBindBuffer(GL_ARRAY_BUFFER, bufferArray);
glBufferData(GL_ARRAY_BUFFER, myMeshes.at(j).realPositions.size() * sizeof(float), (GLfloat*)RealPos, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
glGenBuffers(1, &normalArray);
glBindBuffer(GL_ARRAY_BUFFER, normalArray);
glBufferData(GL_ARRAY_BUFFER, myMeshes.at(j).realNormals.size()*sizeof(float), (GLfloat*)RealNor, GL_STATIC_DRAW);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(3);
this is my code:
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glm::mat4 view = glm::mat4(1.0f);
glm::mat4 projection = glm::mat4(1.0f);
projection = glm::perspective(glm::radians(45.0f), (float)width / (float)heigh, 0.1f, 100.0f);
view = glm::translate(view, glm::vec3(0.0f, 0.0f, transZ)); //this is for scroll mouse
view = glm::translate(view, glm::vec3(0.0f, 0.0f, -2.0f));
view = glm::translate(view, glm::vec3(0.0f, 0.0f, -5.0f));
glUseProgram(programT);
int lightColorLoc = glGetUniformLocation(programT, "lightColor");
glUniformMatrix4fv(lightColorLoc, 1, GL_FALSE, glm::value_ptr(glm::vec3(1.0f, 0.0f, 0.0f)));
int objectColorLoc = glGetUniformLocation(programT, "objectColor");
glUniformMatrix4fv(objectColorLoc, 1, GL_FALSE, glm::value_ptr(glm::vec3(1.0f, 0.5f, 0.31f)));
glm::vec3 lightPos(2.0f, 4.0f, 5.0f);
int lightPosLoc = glGetUniformLocation(programT, "lightPos");
glUniformMatrix4fv(lightPosLoc, 1, GL_FALSE, glm::value_ptr(lightPos));
int projectionLocLight = glGetUniformLocation(programT, "projection");
glUniformMatrix4fv(projectionLocLight, 1, GL_FALSE, glm::value_ptr(projection));
glm::mat4 modelLight = glm::mat4(1.0f);
int modelLocLight = glGetUniformLocation(programT, "model");
glUniformMatrix4fv(modelLocLight, 1, GL_FALSE, glm::value_ptr(modelLight));
glm::mat4 viewLight = glm::mat4(1.0f);
int viewLocLight = glGetUniformLocation(programT, "view");
glUniformMatrix4fv(viewLocLight, 1, GL_FALSE, glm::value_ptr(viewLight));
int viewLoc = glGetUniformLocation(programT, "view");
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
int projectionLoc = glGetUniformLocation(programT, "projection");
glUniformMatrix4fv(projectionLoc, 1, GL_FALSE, glm::value_ptr(projection));
model = glm::rotate(model, glm::radians(rotX), glm::vec3(1.0f, 0.0f, 0.0f));
model = glm::rotate(model, glm::radians(rotY), glm::vec3(.0f, 1.0f, .0f));
model = glm::rotate(model, glm::radians(rotZ), glm::vec3(.0f, 0.0f, 1.0f));
int modelLoc = glGetUniformLocation(programT, "model");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glGenerateMipmap(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureArray[0]);
glUniform1i(glGetUniformLocation(programT, "ourTexture"), 0);
glBindVertexArray(VAOArray[0]);
glDrawArrays(GL_TRIANGLES, 0, myMeshes.at(0).Indices.size());
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
glfwSetKeyCallback(window, key_callback);
glfwSetScrollCallback(window, scroll_callback);
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
}
vertex shader used for this project:
#version 330 core
layout (location = 0) in vec3 RealPos;
layout (location = 1) in vec3 vertex_color;
layout (location = 2) in vec2 vertex_textcoord;
layout (location = 3) in vec3 RealNor;
out vec3 vs_pos;
out vec3 vs_color;
out vec2 vs_text;
out vec3 normal;
out vec3 FragPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
vs_pos = RealPos;
vs_color = vertex_color;
vs_text=vertex_textcoord;
FragPos = vec3(model * vec4(vs_pos, 1.0));
mat3 normalMat = mat3(inverse(transpose(model)));
normal = RealNor * normalMat;
gl_Position = projection * view * model * vec4(vs_pos, 1.0);
}
fragment shader used:
#version 330 core
in vec2 vs_text;
in vec3 normal;
in vec3 FragPos;
uniform vec3 lightPos;
out vec4 gl_FragColor;
uniform vec3 objectColor;
uniform vec3 lightColor;
uniform sampler2D ourTexture;
void main()
{
vec3 norm = normalize(normal);
vec3 lightDir = normalize(lightPos - FragPos);
float ambientStrength = 0.1;
vec3 ambient = ambientStrength * lightColor;
float diff = max(abs(dot(norm, lightDir)), 0.0);
vec3 diffuse = diff * lightColor;
vec3 result = (ambient + diffuse) * objectColor;
gl_FragColor = texture(ourTexture, vs_text) * diff;
}
can someone help me to fix this or maybe guide me to solve it?
thanks
It applies that matrix * vector == vector * (matrix)^T
You are calculating normals via normal = RealNor * normalMat;
This is not equal to normalMat * RealNor which results in wrong normals. So kommutate the expression to normalMat * RealNor and see if it fixes the issue.

OpenGL camera movement program vertex shader issue

So, I'm a beginner learning graphics programmer. I'm working on a program for camera movement. I think there's something wrong with the vertex shader. The program runs with no errors but the screen is completely blank. Here is the vertex shader I'm using:
#version 330
in vec4 vPosition;
out vec4 vColor;
uniform mat4 model_view;
uniform mat4 projection;
void main()
{
vec4 pos = projection * model_view * vPosition / vPosition.w;
gl_Position = pos;
vColor = vPosition;
}
If I switch the shader back to basic version:
#version 330
in vec4 vPosition;
out vec4 vColor;
void
main()
{
gl_Position = vPosition;
vColor = vPosition;
}
The program runs and renders a triangle successfully. So, I'm pretty sure the error is with the shader.
The shader is called in the initialize function:
void initialize(void)
{
glClearColor(1.0, 1.0, 1.0, 1.0); // white background
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// Create and initialize a buffer object
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
// Load shaders and use the resulting shader program
GLuint program = InitShader("res/shaders/vshader21.glsl", "res/shaders/fshader21.glsl");
model_view = glGetUniformLocation(program, "model_view");
projection = glGetUniformLocation(program, "projection");
glUseProgram(program);
// Initialize the vertex position attribute from the vertex shader
GLuint loc = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
}
the 'points' in glBufferData is as follows:
const int WIDTH = 500, HEIGHT = 500;
/* Positions */
vec4 points[] = {
vec4(0.5,0.5, 1, 1),
vec4(-0.5,0.5, 1, 1),
vec4(0.5,-0.5, 1, 1) ,
vec4(-0.5,-0.5, 1, 1)
};
model_view and projection are of GLuint type in main application and global.
I set the uniform variables (position, model_view) in the display functions.
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT); // clear the window
glPointSize(20.0);
// Projection transformation parameters
GLfloat left = -1.0, right = 1.0;
GLfloat bottom = -1.0, top = 1.0;
GLfloat zNear = 0, zFar = 3.0;
mat4 p = Ortho(left, right, bottom, top, zNear, zFar);
glUniformMatrix4fv(projection, 1, GL_TRUE, p);
vec4 eye(0.0, 0.0, -1.0, 1.0);
vec4 at(0.0, 0.0, 0.0, 1.0);
vec4 up(0.0, 1.0, 0.0, 0.0);
mat4 mv = LookAt(eye, at, up);
glUniformMatrix4fv(model_view, 1, GL_TRUE, mv);
glDrawArrays(GL_TRIANGLES, 0, 3); // draw the points
glFlush();
}
What could possibly be going wrong?
The explicit division by the .w component is superfluous.
vec4 pos = projection * model_view * vPosition / vPosition.w;
vec4 pos = projection * model_view * vPosition;
Note, the Perspective divide is automatically performed after clipping.
Since the vector is multiplied to the uniforms form the right, you do not have to transpose the matrices:
glUniformMatrix4fv(projection, 1, GL_TRUE, p);
glUniformMatrix4fv(projection, 1, GL_FALSE, p);
glUniformMatrix4fv(model_view, 1, GL_TRUE, mv);
glUniformMatrix4fv(model_view, 1, GL_FALSE, mv);
See GLSL Programming/Vector and Matrix Operations

Additional sprite UVs when rendering sprites in OpenGL

I have sprites in an atlas rendering in OpenGL properly with the code below. My problem comes from trying to add a secondary "texture" to sample so I can do some multitexturing magic. The problem I think it's that the second sprite is also in an atlas and it's being affected by the VAO offsets so I can't really pick the right UVs to get the exact point I need. I've tried adding some calculations to reverse engineer the correct UVs for this sprite inside the other texture (you can see my attempt at the bottom) but this doesn't seem to work. What would be the best approach to do this?
Preparation:
glm::vec4 fRect;
fRect.x = static_cast<float>(iRect.x) / textureWidth;
fRect.y = static_cast<float>(iRect.y) / textureHeight;
fRect.z = (static_cast<float>(iRect.z) / textureWidth) + fRect.x;
fRect.w = (static_cast<float>(iRect.w) / textureHeight) + fRect.y;
// Configure VAO/VBO
GLuint VBO;
GLfloat vertices[] = {
// Pos // Tex
0.0f, 1.0f, fRect.x, fRect.w,
1.0f, 0.0f, fRect.z, fRect.y,
0.0f, 0.0f, fRect.x, fRect.y,
0.0f, 1.0f, fRect.x, fRect.w,
1.0f, 1.0f, fRect.z, fRect.w,
1.0f, 0.0f, fRect.z, fRect.y
};
GLuint VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(VAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (GLvoid*)0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
Rendering:
// Prepare transformations
glm::mat4 modelMatrix;
modelMatrix = glm::translate(modelMatrix, position);
// modelMatrix = glm::translate(modelMatrix, -glm::vec3(spriteOffset.x, spriteOffset.y, 0.0f));
modelMatrix = glm::rotate(modelMatrix, rotate.x, glm::vec3(1.0f, 0.0f, 0.0f));
// modelMatrix = glm::rotate(modelMatrix, rotate.y, glm::vec3(0.0f, 1.0f, 0.0f));
modelMatrix = glm::rotate(modelMatrix, rotate.z, glm::vec3(0.0f, 0.0f, 1.0f));
modelMatrix = glm::translate(modelMatrix, glm::vec3(-spriteOffset.x, -spriteOffset.y, 0.0f));
modelMatrix = glm::scale(modelMatrix, glm::vec3(size, 1.0f));
//(...)
glUniformMatrix4fv(modelMatrixLocation, 1, false, glm::value_ptr( modelMatrix ) );
glUniformMatrix4fv(viewMatrixLocation, 1, false, glm::value_ptr(viewMatrix));
glUniformMatrix4fv(projectionMatrixLocation, 1, false, glm::value_ptr(projectionMatrix));
ASSERT( !HasOpenGLErrors(), "OpenGL error!" );
glUniform3f(multColorLocation, multColour.x, multColour.y, multColour.z );
glUniform3f(addColorLocation, addColour.x, addColour.y, addColour.z );
ASSERT( !HasOpenGLErrors(), "OpenGL error!" );
// Bind Texture, etc
glDrawArrays(GL_TRIANGLES, 0, 6);
Vertex shader:
#version 330 core
layout (location = 0) in vec4 vertex; // <vec2 position, vec2 texCoords>
out vec2 TexCoords;
uniform mat4 model_matrix, view_matrix, projection_matrix;
void main()
{
TexCoords = vec2(vertex.z, 1.0 - vertex.w);
gl_Position = projection_matrix*view_matrix*model_matrix*vec4(vertex.xyz,1);
}
Fragment shader:
#version 330 core
in vec2 TexCoords;
out vec4 color;
uniform sampler2D texture;
uniform vec3 multColour;
uniform vec3 addColour;
void main()
{
vec4 minColour = vec4(0.0, 0.0, 0.0, 0.0);
vec4 maxColour = vec4(1.0, 1.0, 1.0, 1.0);
vec4 texColour = texture(texture, TexCoords);
if(texColour.a < 0.01)
discard;
color = clamp(vec4(multColour, 1.0) * texColour + vec4(addColour,0.0), minColour, maxColour);
}
Failed attempt at reading the right UVs in fragment shader:
float normU = (TexCoords.x - currUVs.x) / (currUVs.z - currUVs.x);
float icU = mix(icUVs.x, icUVs.z, normU);
float normV = (TexCoords.y - currUVs.y) / (currUVs.w - currUVs.y);
float icV = mix(icUVs.y, icUVs.w, normV);
vec2 UV = vec2(icU, icV );
vec4 initial = texture(initialColor, UV);
Where currUVs are the values of fRect passed in the VAO above and the icUVs are the UV bounds (min and max values) for the second sprite within the atlas texture.
So far it seems like all sprites that have no offset applied will render properly but if I passed in any kind of spriteOffset into the rendering, then it will render wrong.
How can I solve this? Is there a way of applying the VAO rects in the shaders and then be able to get the second sprite correctly?

Visual issue using Frambuffer Object as texture

My OpenGL engine draws a given scene into a Framebuffer Object, then uses its color attachment as a texture. It is then put on a square in the viewport.
The problem is that I see a strange visual artifact:
The square is built with
glm::vec2 square[4];
square[0] = glm::vec2(0.f, 0.f);
square[1] = glm::vec2(engWidth, 0.f);
square[2] = glm::vec2(0.f, engHeight);
square[3] = glm::vec2(engWidth, engHeight);
glm::vec2 texcoords[4];
texcoords[0] = glm::vec2(0.f, 0.f);
texcoords[1] = glm::vec2(1.f, 0.f);
texcoords[2] = glm::vec2(0.f, 1.f);
texcoords[3] = glm::vec2(1.f, 1.f);
glGenBuffers(2, Buffer2D);
glBindBuffer(GL_ARRAY_BUFFER, Buffer2D[0]);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(glm::vec2), square, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, Buffer2D[1]);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(glm::vec2), texcoords, GL_STATIC_DRAW);
orthographicProj = glm::ortho(0.f, (float)engWidth, 0.f, (float)engHeight, -1.f, 1.f);
Where engWidth and engHeight are the actual window size.
Then the frame is rendered with
shaderProgram->setMatrix("Clip", orthographicProj);
glBindBuffer(GL_ARRAY_BUFFER, Buffer2D[0]);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, Buffer2D[1]);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glEnableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
Vertex and fragment shaders for the final phase are simple
const char *frontVertexShader = R"(
#version 440 core
uniform mat4 Clip;
layout(location = 0) in vec2 Position;
layout(location = 1) in vec2 TexCoord;
out vec2 Tex_Coord;
void main() {
gl_Position = Clip * vec4(Position, 0.0, 1.0);
Tex_Coord = TexCoord;
}
)";
const char *frontFragmentShader = R"(
#version 440 core
in vec2 Tex_Coord;
uniform sampler2D sampler;
out vec4 Fragment;
void main() {
Fragment = texture(sampler, Tex_Coord);
}
)";
I tried using triangles instead a triangle strip but with the same result.
Any idea? I can post more code if needed.
I don't know if this will help, but for what it's worth this is what I do for a full screen quad:
(Note the vertex has no texture component - it's just -1 to 1).
v[0].x = -1.f; v[0].y = -1.f; v[0].z = 0.f;
v[1].x = -1.f; v[1].y = 1.f; v[1].z = 0.f;
v[2].x = 1.f; v[2].y = -1.f; v[2].z = 0.f;
v[3].x = 1.f; v[3].y = 1.f; v[3].z = 0.f;
The vertex shader is simply:
void main()
{
attrib_Fragment_Texture = attrib_Position.xy * 0.5 + 0.5;
gl_Position = vec4(attrib_Position.xy, 0.0, 1.0);
}
With the following pixel shader:
void main(void)
{
Out_Colour = texture2D(Map_Diffuse, attrib_Fragment_Texture);
}
You don't need an orthographic projection matrix.
Found it! I forgot to call glClear on the backbuffer and for a strange reason only some pixel presented the issue.

My shaders stopped working

I seem to have broken the shaders in my program, here is their code:
vertex shader
#version 330 core
uniform mat4 camera;
uniform mat4 model;
layout(location = 0) in vec3 vert;
layout(location = 1) in vec3 vertNormal;
out vec3 fragVert;
out vec3 fragNormal;
void main() {
// Pass some variables to the fragment shader
fragNormal = vertNormal;
fragVert = vert;
// Apply all matrix transformations to vert
gl_Position = camera * model * vec4(vert, 1);
}
fragment shader
#version 150 core
uniform mat4 model;
uniform vec3 cameraPosition;
// material settings
uniform float materialShininess;
uniform vec3 materialSpecularColor;
uniform vec3 materialColor;
uniform struct Light {
vec3 position;
vec3 intensities; //a.k.a the color of the light
float attenuation;
float ambientCoefficient;
} light;
in vec3 fragNormal;
in vec3 fragVert;
out vec4 finalColor;
void main() {
vec3 normal = normalize(transpose(inverse(mat3(model))) * fragNormal);
vec3 surfacePos = vec3(model * vec4(fragVert, 1));
vec4 surfaceColor = vec4(materialColor, 1);
vec3 surfaceToLight = normalize(light.position - surfacePos);
vec3 surfaceToCamera = normalize(cameraPosition - surfacePos);
//ambient
vec3 ambient = light.ambientCoefficient * surfaceColor.rgb * light.intensities;
//diffuse
float diffuseCoefficient = max(0.0, dot(normal, surfaceToLight));
vec3 diffuse = diffuseCoefficient * surfaceColor.rgb * light.intensities;
//specular
float specularCoefficient = 0.0;
if(diffuseCoefficient > 0.0)
specularCoefficient = pow(max(0.0, dot(surfaceToCamera, reflect(-surfaceToLight, normal))), materialShininess);
vec3 specular = specularCoefficient * materialSpecularColor * light.intensities;
//attenuation
float distanceToLight = length(light.position - surfacePos);
float attenuation = 1.0 / (1.0 + light.attenuation * pow(distanceToLight, 2));
//linear color (color before gamma correction)
vec3 linearColor = ambient + attenuation*(diffuse + specular);
//final color (after gamma correction)
vec3 gamma = vec3(1.0/2.2);
finalColor = vec4(pow(linearColor, gamma), surfaceColor.a);
}
I have an asset that I am loading from an obj file, then drawing it like such:
void OpenGLView::run()
{
initializeAndSetupWindow(WINDOW_WIDTH, WINDOW_HEIGHT, "PhongBunny");
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
loadBunnyAsset();
AssetInstance bunny1;
bunny1.asset = bunny;
bunny1.position = glm::vec3(2.0f, 2.0f, 2.0f);
bunny1.scale = glm::vec3(1.0f, 1.0f, 1.0f);
do{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
loadUniforms(bunny1);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, bunny.vertexBuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, bunny.normalBuffer);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bunny.elementBuffer);
glDrawElements(GL_TRIANGLES, bunny.elementsSize, GL_UNSIGNED_INT, (void*)0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glfwSwapBuffers(window);
glfwPollEvents();
} while (!glfwWindowShouldClose(window));
glfwDestroyWindow(window);
glfwTerminate();
}
with this being the function to load uniforms:
void OpenGLView::loadUniforms(AssetInstance assetInstance)
{
Asset* asset = &assetInstance.asset;
glUseProgram(asset->shaderProgramID);
glm::mat4 Projection = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 1000.0f);
glm::mat4 camera = Projection * getViewMatrix();
glm::mat4 model = translate(assetInstance.position) * scale(assetInstance.position);
GLuint cameraID = glGetUniformLocation(asset->shaderProgramID, "camera");
GLuint modelID = glGetUniformLocation(asset->shaderProgramID, "model");
GLuint cameraPositionID = glGetUniformLocation(asset->shaderProgramID, "cameraPosition");
GLuint lightPositionID = glGetUniformLocation(asset->shaderProgramID, "light.position");
GLuint lightIntensitiesID = glGetUniformLocation(asset->shaderProgramID, "light.intensities");
GLuint lightAttenuationID = glGetUniformLocation(asset->shaderProgramID, "light.attenuation");
GLuint lightAmbientCoefficientID = glGetUniformLocation(asset->shaderProgramID, "light.ambientCoefficient");
GLuint materialColorID = glGetUniformLocation(asset->shaderProgramID, "materialColor");
GLuint materialShininessID = glGetUniformLocation(asset->shaderProgramID, "materialShininess");
GLuint materialSpecularColorID = glGetUniformLocation(asset->shaderProgramID, "materialSpecularColor");
glUniformMatrix4fv(cameraID, 1, GL_FALSE, &camera[0][0]);
glUniformMatrix4fv(modelID, 1, GL_FALSE, &model[0][0]);
glUniform3fv(cameraPositionID, 1, &cameraPosition[0]);
glUniform3fv(lightPositionID, 1, &light.position[0]);
glUniform3fv(lightIntensitiesID, 1, &light.intensities[0]);
glUniform1f(lightAttenuationID, light.attenuation);
glUniform1f(lightAmbientCoefficientID, light.ambientCoefficient);
glUniform3fv(materialColorID, 1, &assetInstance.materialColor[0]);
glUniform1f(materialShininessID, assetInstance.materialShininess);
glUniform3fv(materialSpecularColorID, 1, &assetInstance.materialSpecularColor[0]);
}
and some setup being done here:
OpenGLView::OpenGLView()
{
light.position = glm::vec3(0.0f, 7.0f, 3.0f);
light.intensities = glm::vec3(0.3f, 0.3, 0.3f);
light.attenuation = 0.3f;
light.ambientCoefficient = 0.005f;
cameraPosition = glm::vec3(5.0f, 3.0f, 8.0f);
}
For a while I had the bunny1's position set to 0, 0, 0 which caused it to not be drawn at all, I can't figure out why that is? Then when I changed it to 1, 1, 1 it started to draw, but now my key_callback function (which rotates and scales the bunny) stopped working. Also, here are my translate and scale functions:
glm::mat4 OpenGLView::translate(glm::vec3 position)
{
return glm::translate(glm::mat4(), position);
}
glm::mat4 OpenGLView::scale(glm::vec3 size)
{
return glm::scale(glm::mat4(), size);
}
and I can't figure out why changing bunny1.position seems to scale the bunny instead of translating its position?
The reason why your bunny's scale changes when changing bunny1.position is because you scale your bunny by bunny1.position:
glm::mat4 model = translate(assetInstance.position) * scale(assetInstance.position);
That might also be the reason why the bunny disapears when setting it's position to (0,0,0) since you then scale it to 0.