OpenGL diffuse light - opengl

I met some problems when implementing diffuse light.
correct result may look like the picture on left diffuse light picture
The right side is incorrect result.
The problems are below
1.There is no lighting effect in the beginning.I have to rotate the object to some degree the lighting effect will appear.
2.The object mixed some triangle in it. (I have checked my read file(.obj &.mtl) part already. It's correct.)
3.I only turn on the diffuse light. But the lighting effect seems to ambient light.
My light source position is (0, 0, 5), eye position is(0, 0, 0);
my vertex shader
attribute vec4 vertexPosition;
attribute vec3 vertexNormal_objectSpace;
varying vec4 vv4color;
uniform mat4 mvp; //model, viewing, projection transformation matrix
uniform mat4 NormalMatrix;
struct LightSourceParameters
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
vec4 position;
vec4 halfVector;
vec3 spotDirection;
float spotExponent;
float spotCutoff; // (range: [0.0,90.0], 180.0)
float spotCosCutoff; // (range: [1.0,0.0],-1.0)
float constantAttenuation;
float linearAttenuation;
float quadraticAttenuation;
};
struct MaterialParameters
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
float shininess;
};
uniform MaterialParameters Material;
uniform LightSourceParameters LightSource[3]; //because I have to implement three light sources(directional, point, specular light)
void main()
{
vec3 normal, TransformedNormal, lightDirection;
vec4 ambient, diffuse;
float NdotL;
ambient = LightSource[0].ambient * Material.ambient;
TransformedNormal = vec3(vec4(vertexNormal_objectSpace, 0.0) * NormalMatrix);
normal = normalize(TransformedNormal);
lightDirection = normalize(vec3(LightSource[0].position));
NdotL = max(dot(normal, lightDirection), 0.0);
diffuse = LightSource[0].diffuse * Material.diffuse * NdotL;
vv4color = ambient + diffuse; //vv4color pass to fragment shader
gl_Position = mvp * vertexPosition;
}
my display function
void onDisplay(void)
{
Matrix4 MVP, modelView, NormalMatrix;
int i=0;
// clear canvas
glClearColor(0.5f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(iLocPosition);
glEnableVertexAttribArray(iLocNormal);
geo_rotate = geo_rotate_x * geo_rotate_y * geo_rotate_z;
Geo = geo_rotate * geo_scale * geo_trans;
modelView = View * Geo;
modelView = modelView.transpose(); //row-major -> column-major
modelView = modelView.invert(); //normal transformation(transpose after inverse)
NormalMatrix = modelView.transpose();
MVP = Proj * View * Geo * Norm;
MVP = MVP.transpose();
glUniformMatrix3fv(iLocEyePosition, 1, GL_FALSE, &eye[0]);
glUniformMatrix4fv(iLocNormalMatrix, 1, GL_FALSE, &NormalMatrix[0]); //bind uniform matrix to shader
glUniformMatrix4fv(iLocMVP, 1, GL_FALSE, &MVP[0]);
group = OBJ->groups;
for(i=0; i<OBJ->numgroups-1; i++)
{
//pass model material value to the shader
glUniform4fv(iLocMAmbient, 1, material[i].ambient);
glUniform4fv(iLocMDiffuse, 1, material[i].diffuse);
glUniform4fv(iLocMSpecular, 1, material[i].specular);
glUniform1f(iLocMShininess, material[i].shininess);
glVertexAttribPointer(iLocPosition, 3, GL_FLOAT, GL_FALSE, 0, V[i]); //bind array pointers to shader
glVertexAttribPointer(iLocNormal, 3, GL_FLOAT, GL_FALSE, 0, N[i]);
glDrawArrays(GL_TRIANGLES, 0, group->numtriangles*3); //draw the array we just bound
group = group->next;
}
glutSwapBuffers();
}
Thank you all of you.

Related

GLSL Geometry Shader causes "No vertex shader bound at draw" in RenderDoc

I am trying to get a basic geometry shader to work, but I am completely failing. After checking numerous resources, I still cannot find a solution to my problem.
Here is my code for my vertex, geometry, and fragment shaders.
Vertex Shader:
#version 330 core
// Vertex Shader Inputs
layout (location = 0) in vec3 Pos;
layout (location = 1) in vec3 Norm;
layout (location = 2) in vec3 Color;
// Vertex to Fragment Shader Outputs
out DATA {
vec3 vsPos;
vec3 vsNorm;
vec4 vsColor;
} data_out;
// Main.cpp Imports
uniform mat4 camMatrix; // viewProjection Matrix
uniform mat4 model;
void main()
{
vec3 vsPos = vec3(model * vec4(Pos, 1.0f));
vec3 vsNorm = mat3(transpose(inverse(model))) * Norm; // Normal vector correction
vec4 vsColor = vec4(Color, 1.0f);
gl_Position = camMatrix * vec4(vsPos, 1.0f);
}
Geometry Shader:
#version 330 core
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
out vec3 gsPos;
out vec3 gsNorm;
out vec3 gsColor;
in DATA {
vec3 vsPos;
vec3 vsNorm;
vec4 vsColor;
} data_in[];
uniform mat4 camMatrix;
void main()
{
for (int i=0; i<3; i++)
{
gsPos = data_in[i].vsPos;
gsNorm = data_in[i].vsNorm;
gsColor = data_in[i].vsColor;
gl_Position = camMatrix * vec4(data_in[i].vsPos, 1.0f);
EmitVertex();
}
EndPrimitive();
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
// Fragment Shader Inputs
in vec3 gsPos;
in vec3 gsNorm;
in vec4 gsColor;
// Fragment Shader Uniforms
uniform sampler2D diffuse0;
uniform sampler2D specular0;
uniform vec4 lightColor;
uniform vec3 lightPos;
uniform vec3 camPos;
vec4 pointLight()
{
vec3 lightVec = (lightPos - vsPos);
// intensity of light with respect to distance
float dist = length(lightVec);
float a = 0.7;
float b = 0.4;
float c = 1.0;
float inten = 1.0f / (a * dist * dist + b * dist + c);
// ambient lighting
float ambient = 0.75f;
// diffuse lighting
vec3 fsNorm = normalize(gsNorm);
vec3 lightDirection = normalize(lightVec);
float diffuse = max(dot(fsNorm, lightDirection), 0.0f);
// specular lighting
float specular = 0.0f;
if (diffuse != 0.0f)
{
float specularLight = 0.50f;
vec3 viewDirection = normalize(gsNorm - gsPos);
vec3 halfwayVec = normalize(viewDirection + lightDirection);
float specAmount = pow(max(dot(fsNorm, halfwayVec), 0.0f), 32);
specular = specAmount * specularLight;
};
return inten * (gsColor * (diffuse + ambient) + gsColor * specular) * lightColor;
}
void main()
{// outputs final color
FragColor = pointLight();
}
My mesh generation function:
void genMesh()
{
VAO.Bind();
VBO VBO(vtx);
EBO EBO(idx);
VAO.LinkAttrib(VBO, 0, 3, GL_FLOAT, sizeof(Vertex), (void*)0);
VAO.LinkAttrib(VBO, 1, 3, GL_FLOAT, sizeof(Vertex), (void*)(3 * sizeof(float)));
VAO.LinkAttrib(VBO, 2, 4, GL_FLOAT, sizeof(Vertex), (void*)(6 * sizeof(float)));
VAO.Unbind();
VBO.Unbind();
EBO.Unbind();
};
My mesh draw function:
void Mesh::Draw(Shader& shader, Camera& camera)
{
shader.Activate();
VAO.Bind();
// Take care of the camera Matrix
glUniform3f(glGetUniformLocation(shader.ID, "camPos"),
camera.Position.x,
camera.Position.y,
camera.Position.z);
camera.Matrix(shader, "camMatrix");
// Draw the actual mesh
glDrawElements(GL_TRIANGLES, idx.size() * sizeof(GLuint), GL_UNSIGNED_INT, 0);
};
I call my mesh generation function outside of the main while loop, then I draw the mesh in my main while loop.
Debugging my program through RenderDoc gives me the error, "No vertex shader bound at draw!" Without the geometry shader (keeping everything else roughly the same), I do not get any errors in RenderDoc. I tried updating my graphics drivers, but I am just getting the same error. Please help me, I feel like I am losing my mind.
In the fragment shader gsColor is defined at a vec4 variable but in the geometry shader it is declared a vec3 variable

SSAO & Shadow mapping | Shadows do not work with the SSAO

The SSAO in our engine seems to be working, however I cannot get the SSAO to work with shadow mapping. Here is a screenshot of the bug I am currently having when shadows are applied....
With shadows applied
But also, depending on the camera view and camera position, random shadows sometimes appear...
Random Shadows depending on camera view and position
Here is the gbuffer vertex shader..
#version 330 core
layout (location = 0) in vec3 positions;
layout (location = 1) in vec2 texCoords;
layout (location = 2) in vec3 normals;
out vec3 FragPos;
out vec3 ShadowFragPos;
out vec2 TexCoords;
out vec3 Normal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 proj;
void main()
{
vec4 viewPos = view * model * vec4(positions, 1.0);
FragPos = viewPos.xyz;
TexCoords = texCoords;
mat3 normalMatrix = transpose(inverse(mat3(view * model)));
Normal = normalMatrix * normals;
gl_Position = proj * viewPos;
}
Here is the lighting shader..
#version 330 core
out vec4 FragColor;
in vec2 TexCoords;
uniform sampler2D gPosition;
uniform sampler2D gNormal;
uniform sampler2D gAlbedoSpec;
uniform sampler2D gShadowmap;
uniform sampler2D gSsao;
uniform vec3 cameraPos;
uniform mat4 lightSpaceMatrix;
vec3 Normal;
vec3 FragPos;
uniform vec3 lightPos;
float calculate_shadows(vec4 light_space_pos)
{
// perform perspective divide
vec3 projCoords = light_space_pos.xyz / light_space_pos.w;
// transform to [0,1] range
projCoords = projCoords * 0.5 + 0.5;
// get closest depth value from light's perspective (using [0,1] range fragPosLight as coords)
float closestDepth = texture(gShadowmap, projCoords.xy).r;
// get depth of current fragment from light's perspective
float currentDepth = projCoords.z;
// check whether current frag pos is in shadow
vec3 lightDir = normalize(vec3(2.0f, 4.0f, 1.0f) - FragPos);
float bias = max(0.05 * (1.0 - dot(Normal, lightDir)), 0.005);
float shadow = 0.0;
vec2 texelSize = 1.0 / textureSize(gShadowmap, 0);
// 8x8 kernel PCF
float x;
float y;
for (y = -3.5; y <= 3.5 ; y += 1.0)
{
for (x = -3.5; x <= 3.5 ; x += 1.0)
{
float pcfDepth = texture(gShadowmap, projCoords.xy + vec2(x, y) * texelSize).r;
shadow += currentDepth - bias > pcfDepth ? 1.0 : 0.0;
}
}
shadow /= 64.0;
return shadow;
}
void main(void)
{
FragPos = texture(gPosition, TexCoords).rgb;
Normal = texture(gNormal, TexCoords).rgb;
vec3 Diffuse = texture(gAlbedoSpec, TexCoords).rgb;
float Specular = texture(gAlbedoSpec, TexCoords).a;
float AmbientOcclusion = texture(gSsao, TexCoords).r;
vec3 lighting = vec3(0.3 * Diffuse * AmbientOcclusion);
vec3 viewDir = normalize(-FragPos);
vec3 lightDir = normalize(lightPos - FragPos);
vec3 diffuse = max(dot(Normal, lightDir), 0.0) * Diffuse * vec3(1.0f, 0.5f, 0.3f);
vec3 halfwayDir = normalize(lightDir + viewDir);
float spec = pow(max(dot(Normal, halfwayDir), 0.0), 8.0);
vec3 specular = vec3(1.0f, 0.5f, 0.3f) * spec * Specular;
float shadow = calculate_shadows(lightSpaceMatrix * vec4(FragPos, 1.0));
lighting += ((1.0 - shadow) * (diffuse + specular));
FragColor = vec4(lighting, 1.0f);
}
The textures are binded in the light pass as follows..
// bind the positions texture and store in the first texture slot/unit
glActiveTexture(GL_TEXTURE0); // texture unit 0
glBindTexture(GL_TEXTURE_2D, gbuffer.gPositions); // geometry positions
// bind the normals texture and store in the second texture slot/unit
glActiveTexture(GL_TEXTURE1); // texture unit 1
glBindTexture(GL_TEXTURE_2D, gbuffer.gNormals); // geometry normals
// bind the albedo & specular texture and store in the third texture slot/unit
glActiveTexture(GL_TEXTURE2); // texture unit 2
glBindTexture(GL_TEXTURE_2D, gbuffer.gAlbedoSpec); // geometry albedospec
// bind the albedo & specular texture and store in the third texture slot/unit
glActiveTexture(GL_TEXTURE3); // texture unit 3
glBindTexture(GL_TEXTURE_2D, gbuffer.gShadowmap); // geometry albedospec
glActiveTexture(GL_TEXTURE4); // texture unit 2
glBindTexture(GL_TEXTURE_2D, gbuffer.ssaoColorBuffer); // geometry albedospec
Finally, here is the calculation of the lightSpaceMatrix..
light_projection = glm::ortho(-10.0f, 10.0f, -10.0f, 10.0f, 1.0f, 7.5f);
light_view = glm::lookAt(glm::vec3(0.0f, 4.0f, 5.0f), glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 1.0f, 0.0f));
light_space_matrix = light_projection * light_view;
Any ideas why this could be happening? how do I get shadows to work with SSAO?
any help is much appreciated.
FragPos is a camera view space position.
light_space_pos, the input parameter to calculate_shadows has to be a clip space coordinate, as seen from the light source.
This mean that when you do
float shadow = calculate_shadows(lightSpaceMatrix * vec4(FragPos, 1.0));
lightSpaceMatrix has to be the transformation from the camera view space to the clip space of the light source.
To do so, you have to do 3 transformations:
camera view space to world space. This can bed done by the inverse view matrix.
world space to light space, which is the transformation by light_view.
light view space to light clip space, is the transformation by light_projection.
So the setting of light_space_matrix = light_projection * light_view; is not sufficient, it has to be
light_space_matrix = light_projection * light_view * glm::inverse(view);

Stuck on rendering lights after binding FBO using deffered rendering using OpenGL

First of all I am sorry for this long post after trying to make this work the whole day.
I have many questions about this especially because I use inheritance in C++ to build lights.
I use directional light as my core model for light since i can give the light direction and it will calculate the light, then on top of it I build point light where i just calculate the vector from light to fragment position, and finally for spot light I use point light with the addition of cut off angle to create spot lights (just ignore whatever is outside the cone). I have tested lights and they work fine with forward rendering but now I would like to change my light model to PBR (basically just change how I calculate light in directional light) and move to differed rendering.
Today i started working on deferred rendering and I can get the position, texture, normal and depth buffers, however i have a problem when trying to render lights.
That was the first problem, the second, since each type of light has it own shader and i build them using polymorphism. My second question is I can loop through each light in C++ and call each light to be renderer or there is another way that i can solve this in shaders.
Prototypes of lights are
EDIT: I fixed a small issue where iw as transforming the render quat with VP projection but still i can not draw anything and i have no idea if FB are working correctlly now. Nvidia opengl debugger is just crashing.
Light(glm::vec3& color, float intensity, float ambient, ShaderProgram& lightShader);
DirectionalLight(glm::vec3& color = glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3& position = glm::vec3(0.0f, 0.0f, 0.0f), float intensity = 1.0f, float ambient = 0.0f, ShaderProgram& lightShader = ShaderProgram("Directional Light"));
PointLight(glm::vec3& color = glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3& position = glm::vec3(0.0f, 0.0f, 0.0f), float intensity = 1.0f, float ambient = 0.0f, LightAttenuation& lightAttenuation = LightAttenuation(), ShaderProgram& lightShader = ShaderProgram("Point Light"));
SpotLight(glm::vec3& color = glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3& position = glm::vec3(0.0f, 0.0f, 0.0f),
My render path looks like this.
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
defferedShader_.startProgram();
defferedShader_.setUniformMat4("VP", camera.getVP());
glBindFramebuffer(GL_FRAMEBUFFER, deferredFbo);
//the scene is small and does not need culling.
for (auto* mesh : world.getMeshes()) {
//mesh->draw(light->getLightShader());
//mesh->draw(activeLight_->getLightShader());
mesh->draw(defferedShader_);
drawCallCounter += mesh->getMeshObjectSize();
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
defferedShader_.stopProgram();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, positionFbo);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, normalFbo);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, albedoFbo);
//This is where i got stuck, I would like to make directional light work then test other lights then test the whole program with more than one light
//for (auto* light : world.getLights()) {
// //glEnable(GL_CULL_FACE);
// //glCullFace(GL_FRONT);
glDisable(GL_DEPTH_TEST);
activeLight_->getLightShader().startProgram();
activeLight_->getLightShader().setUniformMat4("VP", camera.getVP());
activeLight_->getLightShader().setUniformVec3("eyePosition", camera.getCameraPosition());
//activeLight_->getLightShader();
RenderQuad();
activeLight_->getLightShader().stopProgram();
//}
The shader code that i started building is (PS i removed the shadows for now)
Vertex Shader
#version 410 core
#include "../Global/GlobalShader.inc"
#include "../Global/GlobalMesh.inc"
out vec3 Position;
out vec2 TexCoord;
//out vec4 ShadowCoord;
//uniform mat4 ShadowMatrix;
void main() {
Position = position;
TexCoord = texCoord;
//ShadowCoord = ShadowMatrix * vec4(position, 1.0);
gl_Position = VP * vec4(position, 1.0);
}
Fragment shader
One thing that is bothering me is i can not set the uniform values for gPosition, gPosition and gAlbedoSpec even if I use them, and no matter what i change in the shader the output will be the same.
#version 410 core
#include "../Global/GlobalShader.inc"
#include "../Global/GlobalMesh.inc"
#include "../Global/GlobalLight.inc"
//#include "../Global/ShadowSampling.inc"
in vec3 Position;
in vec2 TexCoord;
//in vec4 ShadowCoord;
uniform sampler2D gPosition;
uniform sampler2D gNormal;
uniform sampler2D gAlbedoSpec;
float specularStrength = 32.0f; // to be impelemented
out vec4 gl_FragColor;
void main() {
//vec4 lightning = vec4(0.0f);
////vec4 shadowMapping = vec4(0.0f);
//
vec3 FragPos = texture(gPosition, TexCoord).rgb;
vec3 Normal = texture(gNormal, TexCoord).rgb;
vec3 Diffuse = texture(gAlbedoSpec, TexCoord).rgb;
float Specular = texture(gAlbedoSpec, TexCoord).a;
//vec3 Diffuse = texture(gAlbedoSpec, TexCoord).rgb;
//lightning = calculateDirectionalLight(directionalLight.light, directionalLight.position, Normal, Position, specularStrength, eyePosition, material, TexCoord);
//gl_fragColor = vec3(Position, 1.0);
//shadowMapping = calculateShadow(shadowMap, ShadowCoord, directionalLight.light.ambient);
//gl_FragColor = vec4(Diffuse, 1.0);
gl_FragColor = vec4(1.0); //vec4(Diffuse, 1.0);// lightning;//g * shadowMapping;
//gl_FragColor = lightning;// * shadowMapping;
}
in case you want to see global light
struct Light
{
vec3 color;
float intensity;
float ambient;
};
struct DirectionalLight
{
Light light;
vec3 position;
};
struct Attenuation
{
float constant;
float linear;
float quadratic;
};
struct PointLight
{
Light light;
Attenuation atten;
vec3 position;
float range;
};
struct SpotLight
{
PointLight pointLight;
//vec3 lookAt;
vec3 direction;
float cutOff;
};
vec3 GAMMA = vec3(1.0/2.2);
vec4 calculateDirectionalLight(Light light, vec3 direction, vec3 normal, vec3 worldPosition, float specularIntensity, vec3 eyePosition, Material material, vec2 texCoord)
{
vec3 diffuseFactor = ( light.color * material.diffuse * vec3(texture(material.texture.diffuse, texCoord.st)) )
* (light.intensity * clamp(dot(normal, direction), 0.0, 1.0) ) ;
vec3 viewDir = normalize(eyePosition - worldPosition);
vec3 reflectDir = normalize(reflect(-direction, normal));
float specularFactor = pow(clamp(dot(viewDir, reflectDir), 0.0, 1.0), specularIntensity);
vec3 specularColor = ( light.color * material.specular * vec3(texture(material.texture.specular, texCoord.st)) ) * (specularFactor * material.shininess);
return vec4(pow((diffuseFactor + specularColor + light.ambient + material.ambient), GAMMA), 1.0);
}
vec4 calculatePointLight(PointLight pointLight, vec3 normal, vec3 worldPosition, float specularIntensity, vec3 eyePosition, Material material, vec2 texCoord)
{
// DO NOT NORMALIZE lightDirection, WE NEED IT TO CALCULATE THE DISTANCE TO COMPARE RANGE OF LIGHT
vec3 lightDirection = pointLight.position - worldPosition;
float distanceToPoint = length(lightDirection);
// I dont like conditionals in shader, but since this is fragment based lighting i believe
// this will speed-up things insetead of calculating the light
if(distanceToPoint > pointLight.range)
return vec4(0.0,0.0,0.0,0.0);
vec4 light = calculateDirectionalLight(pointLight.light, lightDirection, normal, worldPosition, specularIntensity, eyePosition, material, texCoord);
// light attenuateion explained https://developer.valvesoftware.com/wiki/Constant-Linear-Quadratic_Falloff
// http://www.ogre3d.org/tikiwiki/tiki-index.php?page=Light+Attenuation+Shortcut
float attenuation = max(pointLight.atten.constant
+ pointLight.atten.linear * distanceToPoint
+ pointLight.atten.quadratic * distanceToPoint * distanceToPoint,
1.0);
return light / attenuation;
}
vec4 calculateSpotLight(SpotLight spotLight, vec3 normal, vec3 worldPosition, float specularIntensity, vec3 eyePosition, Material material, vec2 texCoord)
{
vec3 lightDirection = normalize(spotLight.pointLight.position - worldPosition);
float spotFactor = dot(lightDirection, spotLight.direction);
vec4 light = vec4(0.0f);
if(spotFactor > spotLight.cutOff)
{
light = calculatePointLight(spotLight.pointLight, normal, worldPosition, specularIntensity, eyePosition, material, texCoord) * (1.0 - (1.0 - spotFactor)/(1.0 - spotLight.cutOff));
}
return light;
}
Global mesh
struct Texture {
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
sampler2D ambient;
sampler2D height;
//vec2 texCoord;
};
struct Material {
vec3 ambient; // Ka
vec3 diffuse; // Kd
vec3 specular; // Ks
vec3 transmittance; // Tr
vec3 emission; // Ke
float shininess; // Ns
float ior; // Ni
float dissolve; // Dissolve
int illum; // Illum
Texture texture;
};
uniform Material material;
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 texCoord;
layout (location = 2) in vec3 normal;
Global Shader
uniform mat4 VP;
uniform mat4 P;
What i am getting now after binding the buffers and running the directional shader is
and just as example to see the scene this is the position buffer
Fixed it. I had the clean buffer and color in the wrong place. It should be after I bind the buffer not at the beginning of each frame.
glEnable(GL_DEPTH_TEST);
glBindFramebuffer(GL_FRAMEBUFFER, deferredFbo);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
defferedShader_.startProgram();
defferedShader_.setUniformMat4("VP", camera.getVP());
.
.
. rest of the code

Problems with drawing billboards

I am currently trying to draw billboards and some geometry with "modern opengl approach". Problem is that I cannot force billboards to keep their positions in space.
I need to link text positions with positions of another objects. Coordinates of text position are (3,3,3) and same coordinates has end of black line. In some positions I have exactly what I need: text is drawn at the end of line, but in some - it is too far from the end of line.
My render code:
public void Draw()
{
//Set up matrices
projectionMatrix = Matrix4.CreateOrthographic(_width, _height, -10000, 10000);
modelMatrix = Matrix4.Identity;
viewMatrix = Matrix4.CreateRotationY((float)xrot) *
Matrix4.CreateRotationX((float)yrot) *
Matrix4.CreateScale((float)scale);
var viewPort = new Rectangle(-(_width / 2), -(_height / 2), _width, _height);
var viewportTransformationMatrix = ComputeViewportTransformationMatrix(viewPort, -100, 100);
var viewportOrthographicMatrix = ComputeViewportOrthographicMatrix(viewPort);
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
//DRAW AXISES
GL.UseProgram(axisesProgramID);
axisesProgram.Uniforms["worldViewProj"].SetValue(worldViewProj);
axisesVAO.Bind();
for (int i = 0; i < 4; i++)
{
GL.DrawArrays(PrimitiveType.Lines, i * 2, 2);
}
//DRAW TEXT WITH PRE-CREATED TEXTURE
GL.UseProgram(textProgramID);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, textureID);
//set-up uniforms
textProgram.Uniforms["og_viewportOrthographicMatrix"].SetValue(viewportOrthographicMatrix);
textProgram.Uniforms["og_viewportTransformationMatrix"].SetValue(viewportTransformationMatrix);
textProgram.Uniforms["Position"].SetValue(new float[] { 3.0f, 3.0f, 3.0f });
textProgram.Uniforms["projectionMatrix"].SetValue(projectionMatrix);
textProgram.Uniforms["modelViewMatrix"].SetValue(modelViewMatrix);
textProgram.Uniforms["og_texture0"].SetValue(0);
GL.DrawArrays(PrimitiveType.Points, 0, 1);
GL.BindTexture(TextureTarget.Texture2D, 0);
}
public Matrix4 ComputeViewportTransformationMatrix(Rectangle viewport, float nearDepthRange, float farDepthRange)
{
double halfWidth = viewport.Width * 0.5;
double halfHeight = viewport.Height * 0.5;
double halfDepth = (farDepthRange - nearDepthRange) * 0.5;
//
// Bottom and top swapped: MS -> OpenGL
//
return new Matrix4(
(float)halfWidth, 0.0f, 0.0f, (float)viewport.Left + (float)halfWidth,
0.0f, (float)halfHeight, 0.0f, (float)viewport.Top + (float)halfHeight,
0.0f, 0.0f, (float)halfDepth, (float)nearDepthRange + (float)halfDepth,
0.0f, 0.0f, 0.0f, 1.0f);
}
public static Matrix4 ComputeViewportOrthographicMatrix(Rectangle viewport)
{
//
// Bottom and top swapped: MS -> OpenGL
//
return Matrix4.CreateOrthographicOffCenter(
(float)viewport.Left, (float)viewport.Right,
(float)viewport.Top, (float)viewport.Bottom,
0.0f, 1.0f);
}
My axises shaders are really simple path-through.
//VERTEX SHADER
#version 150 core
in vec3 in_Position;
in vec3 in_Color;
out vec4 color;
uniform mat4 worldViewProj;
void main(void) {
gl_Position = worldViewProj * vec4(in_Position, 1.0);
color = vec4(in_Color, 1.0f);
}
//FRAGMENT SHADER
#version 150 core
in vec4 color;
out vec4 out_Color;
void main(void)
{
out_Color = color;
}
Here are text (texture) shaders:
//VERTEX SHADER
#version 330
out float gsOrigin;
out vec2 gsPixelOffset;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 og_viewportTransformationMatrix;
uniform float origin = 6; // TODO: Why does this not work when float is int?
uniform vec2 pixelOffset = vec2(0,0);
uniform vec3 Position;
vec4 ModelToWindowCoordinates(
vec4 v,
mat4 modelViewPerspectiveMatrix,
mat4 viewportTransformationMatrix)
{
v = modelViewPerspectiveMatrix * v; // clip coordinates
v.xyz /= v.w; // normalized device coordinates
v.xyz = (viewportTransformationMatrix * vec4(v.xyz, 1.0)).xyz; // window coordinates
return v;
}
void main()
{
gl_Position = ModelToWindowCoordinates ( vec4(Position, 1.0f) , modelViewMatrix * projectionMatrix , og_viewportTransformationMatrix ) ;
gsOrigin = origin;
gsPixelOffset = pixelOffset;
}
//GEOMETRY SHADER
#version 330
layout(points) in;
layout(triangle_strip, max_vertices = 4) out;
in float gsOrigin[];
in vec2 gsPixelOffset[];
out vec2 fsTextureCoordinates;
uniform sampler2D og_texture0;
uniform float og_highResolutionSnapScale;
uniform mat4 og_viewportOrthographicMatrix;
void main()
{
float originScales[3] = float[](0.0, 1.0, -1.0);
vec2 halfSize = vec2(textureSize(og_texture0, 0)) * 0.5 * og_highResolutionSnapScale;
vec4 center = gl_in[0].gl_Position;
int horizontalOrigin = int(gsOrigin[0]) & 3; // bits 0-1
int verticalOrigin = (int(gsOrigin[0]) & 12) >> 2; // bits 2-3
center.xy += (vec2(originScales[horizontalOrigin], originScales[verticalOrigin]) * halfSize);
center.xy += (gsPixelOffset[0] * og_highResolutionSnapScale);
vec4 v0 = vec4(center.xy - halfSize, 0, 1.0);
vec4 v1 = vec4(center.xy + vec2(halfSize.x, -halfSize.y), 0, 1.0);
vec4 v2 = vec4(center.xy + vec2(-halfSize.x, halfSize.y), 0, 1.0);
vec4 v3 = vec4(center.xy + halfSize, 0, 1.0);
gl_Position = og_viewportOrthographicMatrix * v0;
fsTextureCoordinates = vec2(0.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v1;
fsTextureCoordinates = vec2(1.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v2;
fsTextureCoordinates = vec2(0.0, 1.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v3;
fsTextureCoordinates = vec2(1.0, 1.0);
EmitVertex();
}
//FRAGMENT SHADER
#version 330
in vec2 fsTextureCoordinates;
out vec4 fragmentColor;
uniform sampler2D og_texture0;
uniform vec3 u_color;
void main()
{
vec4 color = texture(og_texture0, fsTextureCoordinates);
if (color.a == 0.0)
{
discard;
}
fragmentColor = vec4(color.rgb * u_color.rgb, color.a);
}
To me it looks like there is some basic coordinate system confusion. I have not checked everything here, but to me,
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
looks like the wrong way round, as vertices should be multiplied from the right like
projection*view*model*vertex
The same issue is within your shaders.
Also, i am not entirely sure, but it seems you are computing pixel coordinates for gl_Position in the shader (as you are applying some viewporttransform in the function ModelToWindowCoordinates). Since pixel coordinates may e.g. range from 0,0 to 1920,1080 they are not correct for gl_Position, which should be in clip coordinates.
I think you should read up a good tutorial about 3d billboarding and the math, for example
this one looks quite interesting. Then modify the sample code to fit your needs step by step.

Diffuse lighting error on parallel surfaces

As a test, I created a simple quad. Here are its attributes:
Vertex vertices[] =
{
// Positions Normals
{vec3(-1,-1, 0), vec3(-1,-1, 1)}, // v0
{vec3( 1,-1, 0), vec3( 1,-1, 1)}, // v1
{vec3(-1, 1, 0), vec3(-1, 1, 1)}, // v2
{vec3( 1, 1, 0), vec3( 1, 1, 1)}, // v3
};
And I put it in my world space at (0.0, 0.0, -9.5). Then I put my point light position at (0.0, 0.0, -8.0). My camera is at the origin (0.0, 0.0, 0.0). When I run my program, this works as expected:
But then, when I replace this quad with 9 scaled down quads, put them all at -9.5 on Z (in other word, they are all parallel to each other on Z), my diffuse lighting gets a little weird
It looks like the corners are showing too much lighting, breaking the nice diffuse circle that we see on a regular quad.
Here is my shader program:
precision mediump int;
precision mediump float;
varying vec3 v_position;
varying vec3 v_normal;
#if defined(VERTEX)
uniform mat4 u_mvpMatrix;
uniform mat4 u_mvMatrix;
uniform mat3 u_normalMatrix;
attribute vec4 a_position;
attribute vec3 a_normal;
void main()
{
vec4 position = u_mvMatrix * a_position;
v_position = position.xyz / position.w;
v_normal = normalize(u_normalMatrix * a_normal);
gl_Position = u_mvpMatrix * a_position;
}
#endif // VERTEX
#if defined(FRAGMENT)
uniform vec3 u_pointLightPosition;
void main()"
{
vec3 viewDir = normalize(-v_position);
vec3 normal = normalize(v_normal);
vec3 lightPosition = u_pointLightPosition - v_position;
vec3 pointLightDir = normalize(lightPosition);
float distance = length(lightPosition);
float pointLightAttenuation = 1.0 / (1.0 + (0.25 * distance * distance));
float diffuseTerm = max(dot(pointLightDir, normal), 0.15);
gl_FragColor = vec4(diffuseTerm * pointLightAttenuation);
}
#endif // FRAGMENT
My uniforms are uploaded as followed (I'm using GLM):
const mat4 &view_matrix = getViewMatrix();
mat4 mv_matrix = view * getModelMatrix();
mat4 mvp_matrix = getProjectionMatrix() * mv_matrix;
mat3 normal_matrix = inverseTranspose(mat3(mv_matrix));
vec3 pointLightPos = vec3(view_matrix * vec4(getPointLightPos(), 1.0f));
glUniformMatrix4fv( mvpMatrixUniformID, 1, GL_FALSE, (GLfloat*)&mvp_matrix);
glUniformMatrix4fv( vpMatrixUniformID, 1, GL_FALSE, (GLfloat*)&mv_matrix);
glUniformMatrix3fv(normalMatrixUniformID, 1, GL_FALSE, (GLfloat*)&normal_matrix);
glUniform3f(pointLightPosUniformID, pointLightPos.x, pointLightPos.y, pointLightPos.z);
Am I doing anything wrong?
Thanks!
Without going too much into your code, I think everything is working just fine. I see a very similar result with a quick blender setup:
The issue is the interpolation of the normal doesn't produce a spherical bump.
It ends up being a patch like this (I simply subdivided a smooth shaded cube)...
If you want a more spherical bump, you could generate the normals implicitly in a fragment shader (for example as is done here (bottom image)), use a normal map, or use more tessellated geometry such as an actual sphere.