I have been trying to imploment parallax corrected local cubemaps in OpenGL for a little while now, but I've not really managed to get anywhere, does anybody know where to start?
Here is my current shader code:
Fragment:
#version 330
in vec2 TexCoord;
in vec3 Normal;
in vec3 Position;
in vec3 Color;
out vec4 color;
uniform samplerCube CubeMap;
uniform vec3 CameraPosition;
void main() {
vec4 OutColor = vec4(Color,1.0);
vec3 normal = normalize(Normal);
vec3 view = normalize(Position-CameraPosition);
vec3 ReflectionVector = reflect(view,normal);
vec4 ReflectionColor = texture(CubeMap,ReflectionVector);
OutColor = mix(OutColor,ReflectionColor,0.5);
color = OutColor;
}
Vertex:
#version 330
layout (location = 0) in vec3 position;
layout (location = 2) in vec3 normal;
layout (location = 1) in vec2 texCoord;
layout (location = 3) in vec3 color;
out vec2 TexCoord;
out vec3 Normal;
out vec3 Position;
out vec3 Color;
uniform mat4 ModelMatrix;
uniform mat4 ViewMatrix;
uniform mat4 ProjectionMatrix;
void main()
{
gl_Position = ProjectionMatrix * ViewMatrix * ModelMatrix * vec4(position, 1.0f);
TexCoord=texCoord;
Normal = normal;
Position = vec4(ModelMatrix * vec4(position,1.0)).xyz;
Color = color;
}
Position = vec4(ModelMatrix * vec4(position,1.0)).xyz;
This puts the Position output variable into world space. Assuming the CameraPosition uniform is also in world space, then:
vec3 view = normalize(Position-CameraPosition);
view will also be in world space.
However, Normal comes directly from the vertex attribute. So unless you are updating each vertex's Normal value every time you rotate the object, that value will probably be in model space. And therefore:
vec3 ReflectionVector = reflect(view,normal);
This statement is incoherent. view is in one space, while normal is in another. And you can't really perform reasonable operations on vectors that are in different coordinate systems.
You need to make sure that Normal, Position, and CameraPosition are all in the same space. If you want that space to be world space, then you need to transform Normal into world space. Which in the general case requires computing the inverse/transpose of your model-to-world matrix.
Related
I've been trying for some time now to debug this simple Phong-Shader and just came up short. The diffuse part is fine but the specular is not. It doesn't move with the camera.
Here's from one side:
And here from the other:
As far as I can see, I did convert the Position to Viewspace, but, apparently, I made another mistake of some kind.
Vertex Shader:
#version 330
#extension GL_ARB_explicit_attrib_location : enable
layout(location=0) in vec3 aPosition;
layout(location=1) in vec3 aNormal;
out vec3 vPosition;
out vec3 vNormal;
uniform mat4 uModel;
uniform mat4 uView;
uniform mat4 uProjection;
void main(void)
{
vPosition = vec3(uModel * vec4(aPosition,1.0f));
vNormal = vec3(uModel * vec4(aNormal, 0.0f));
gl_Position = uProjection * uView * uModel * vec4(aPosition, 1.0);
}
And my Fragment Shader
#version 330
out vec4 FragColor;
in vec3 vPosition;
in vec3 vNormal;
uniform mat4 uView;
uniform vec3 uColor;
uniform vec3 uLightpositions[10];
uniform vec3 uLightcolors[10];
uniform float uPhongSpecular;
void main(void)
{
FragColor = vec4(0.4*uColor, 1.0);//Ambient Value
for(int i = 0; i < 5; i++){
vec3 lVec = normalize(uLightpositions[i] - vPosition);
vec3 nVec = normalize(vNormal);
float diffuse = max(dot(lVec,nVec), 0);
FragColor += 0.5* vec4(uLightcolors[i] * diffuse,0.0f);
vec3 rVec = normalize(reflect(lVec,nVec));
vec3 vVec = -normalize(vec3(uView * vec4(vPosition,1.0)));
float specular = 0;
if(dot(rVec,vVec) < 0.0)
{
specular = pow(max(dot(-rVec,vVec),0),uPhongSpecular);
}
FragColor += 0.2*vec4(uLightcolors[i] * specular,0.0f);
}
}
The problem is dot(-rVec, vVec). vVec is a vector in view space, however, rVec is a vector in world space. Convert rVec from world space to view space:
vec3 rVec = normalize(reflect(lVec, nVec));
vec3 rVec = normalize(mat3(uView) * reflect(lVec, nVec));
One thing wrong in addition to Rabbid76's answer is if(dot(rVec,vVec) < 0.0) is testing if the angle between the two is greater than 90 degrees. In other words, you are testing if the specular reflection is visible behind the model. You have to flip that from less-than to greater-than >.
C++ , OpenGL , Glad -> I have a light which is supposed to cause some diffuse lighting in the scene.The problem is that when i rotate my object that is being render on the Y (UP) axis , it seems that the light is also moving with the object.
The movement of the light is not synchronized with the rotation of the object.
WHy is this happening and how do i fix this?
This is the Shader.
The Vertex Shader
#version 330 core
layout (location = 0) in vec3 pos;
layout (location = 1) in vec2 coords;
layout (location = 2) in vec3 normals;
out vec2 Texture_Coords;
out vec3 normal;
out vec3 toLightVector;
uniform mat4 p;
uniform mat4 m;
uniform mat4 v;
uniform vec3 light_position;
void main(){
vec4 world_position = m * vec4(pos,1.0);
gl_Position = p * v * world_position;
Texture_Coords = coords;
normal = (vec4(normals,1.0) * m).xyz;
toLightVector = light_position - world_position.xyz;
}
The Fragment Shader
#version 330 core
out vec4 Pixel;
in vec2 Texture_Coords;
in vec3 normal;
in vec3 toLightVector;
uniform vec4 color;
uniform sampler2D Texture;
uniform float ambient;
uniform vec3 light_color;
void main(){
vec3 unitNormal = normalize(normal);
vec3 unitToLightVector = normalize(toLightVector);
float light_factor = dot(unitNormal, unitToLightVector);
float brightness = max(light_factor, ambient);
vec3 diffuse = brightness * light_color;
Pixel = vec4(diffuse,1.0) * texture(Texture, Texture_Coords);
}
Matrix multiplications are not commutative v * m is not the same as m * v:
normal = (vec4(normals,1.0) * m).xyz;
normal = mat3(m) * normals;
I also recommend reading Why is the transposed inverse of the model view matrix used to transform the normal vectors? and Why transforming normals with the transpose of the inverse of the modelview matrix?:
normal = transpose(inverse(mat3(m))) * normals;
I'm quite a rookie in GLSL and I've been struggling to try to find a way to color all the vertices of a cube in different color. Each face has 4 vertices and a cube has 6 faces, so 6 * 4 = 24. But I can only draw 1 color per corner.
Vertex Shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
void main()
{
normal = a_position;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Fragment Shader:
#version 330
in vec3 normal;
out vec4 fragColor;
void main() {
fragColor = vec4(normal, 1.0);
}
Result:
If you want to color each face in a different color and you want to find a in-shader solution, the a possibility would be to color the fragments of the cube, dependent on the component of the vertex coordinate with the maximum amount.
Pass the vertex coordinate to the fragment shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
//layout (location=1) in vec3 a_normal;
out vec3 vertPos;
void main()
{
vertPos = a_position;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Find the component of the vertex coordinate withe maximum absolut value and chose a color:
#version 330
in vec3 vertPos;
out vec4 fragColor;
void main() {
vec3 posAbs = abs(vertPos);
vec3 color = step(posAbs.yzx, posAbs) * step(posAbs.zxy, posAbs);
color += (1.0 - step(color.zxy * vertPos.zxy, vec3(0.0)));
fragColor = vec4(color, 1.0);
}
If the normal vectors are face normals, then there is even a simpler solution using the normal vectors:
Pass the normal vector to the fragment shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
void main()
{
normal = a_normal;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Compute the color dependent on the normal vector:
#version 330
in vec3 normal;
out vec4 fragColor;
void main() {
vec3 color = abs(normal.xyz) + max(normal.zxy, 0.0);
fragColor = vec4(color, 1.0);
}
[...] so I need 24 colors. [...]
In that case I suggest the following solution.
#version 330
in vec3 vertPos;
out vec4 fragColor;
void main() {
vec3 posAbs = abs(vertPos);
vec3 color = (step(posAbs.yzx, posAbs) * step(posAbs.zxy, posAbs) +
step(0.0, vertPos.yzx)) * 0.5;
fragColor = vec4(color, 1.0);
}
Currently i try to implement Bump Mapping for my OpenGL Project.
The Problem is that same parts of my cube is black. Like shown in this picture :
I am almost certain that i just dont understand how the shaders works, so i used the shader from OpenGL Superbible .
Here is my code :
Vertex Shader
#version 330
layout(location = 0) in vec4 position;
layout(location = 1) in vec3 normal;
layout(location = 2) in vec2 texCoord;
layout(location = 3) in vec3 tangent;
layout(location = 4) in vec3 bitangent;
uniform mat4 matModel;
uniform mat4 matNormal;
uniform mat4 matMVP;
uniform mat4 matMV;
uniform vec3 light_pos = vec3(0.0,0.0,100.0);
out VS_OUT{
vec3 eyeDir;
vec3 lightDir;
vec2 fragTexCoord;
} vs_out;
void main()
{
vec4 P = matMV*position;
vec3 V = P.xyz;
vec3 L = normalize(light_pos - P.xyz);
vec3 N = normalize(mat3(matNormal)*normal);
vec3 T = normalize(mat3(matNormal)*tangent);
vec3 B = cross(N,T);
vs_out.lightDir = normalize(vec3(dot(L,T),
dot(L,B),
dot(L,N)));
V = -P.xyz;
vs_out.eyeDir = normalize(vec3(dot(V,T),
dot(V,B),
dot(V,N)));
vs_out.fragTexCoord = texCoord;
gl_Position = matMVP * position;
}
And the fragment shader :
#version 330
uniform sampler2D diffuseTex;
uniform sampler2D heightTex;
uniform vec3 heightColor;
in vec3 fragNormal;
in vec2 fragTexCoord;
in vec3 tangent_out_normalized;
in vec3 bitangent_out;
in vec3 normal_out_normalized;
in VS_OUT{
vec3 eyeDir;
vec3 lightDir;
vec2 fragTexCoord;
}fs_in;
out vec4 outputColor;
void main()
{
vec3 V = normalize(fs_in.eyeDir);
vec3 L = normalize(fs_in.lightDir);
vec3 N = normalize(texture(heightTex,fs_in.fragTexCoord).rgb*2-vec3(1.0));
vec3 R = reflect(-L,N);
vec3 diffuse_albedo = texture(diffuseTex,fs_in.fragTexCoord).rgb;
vec3 diffuse =max(dot(N,L),0)*diffuse_albedo;
vec3 specular_albedo = vec3(1.0);
vec3 specular = max(pow(dot(R,V),25.0),0.0)*specular_albedo;
outputColor = vec4(diffuse+specular,1);
}
What am i missing?
It seems very wrong that you don't use fragNormal anywhere. You should use it to rotate the texture normal. To make it obvious, if the bump is flat you should still get the usual surface lighting.
The next strange thing is that you need to multiply you bump normal by 2 and subtract {1,1,1}. The normal should never flip and I suspect this is going on in your case. When it flips you will suddenly go from in light to in shadow, and that might cause the black areas.
I know that same questions were asked many times, but unfortunately I am unable to find the source of my problem.
With help of tutorials I've written a small GLSL shader. Right now it can work with ambient light and load normals from normal map. The issue is that directional light seems to be dependent on my viewing angle.
Here are my shaders:
//Vertex Shader
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
attribute vec3 tangent;
varying vec2 texCoord0;
varying mat3 tbnMatrix;
uniform mat4 transform;
void main(){
gl_Position=transform * vec4(position, 1.0);
texCoord0 = texCoord;
vec3 n = normalize((transform*vec4(normal,0.0)).xyz);
vec3 t = normalize((transform*vec4(tangent,0.0)).xyz);
t=normalize(t-dot(t,n)*n);
vec3 btTangent=cross(t,n);
tbnMatrix=transpose(mat3(t,btTangent,n));
}
//Fragment Shader
#version 120
varying vec2 texCoord0;
varying mat3 tbnMatrix;
struct BaseLight{
vec3 color;
float intensity;
};
struct DirectionalLight{
BaseLight base;
vec3 direction;
};
uniform sampler2D diffuse;
uniform sampler2D normalMap;
uniform vec3 ambientLight;
uniform DirectionalLight directionalLight;
vec4 calcLight(BaseLight base, vec3 direction,vec3 normal){
float diffuseFactor=dot(normal,normalize(direction));
vec4 diffuseColor = vec4(0,0,0,0);
if(diffuseFactor>0){
diffuseColor=vec4(base.color,1.0)* base.intensity *diffuseFactor;
}
return diffuseColor;
}
vec4 calcDirectionalLight(DirectionalLight directionalLight ,vec3 normal){
return calcLight(directionalLight.base,directionalLight.direction,normal);
}
void main(){
vec3 normal =tbnMatrix*(255.0/128.0* texture2D(normalMap,texCoord0).xyz-255.0/256.0);
vec4 totalLight = vec4(ambientLight,0) +calcDirectionalLight(directionalLight, normal);
gl_FragColor=texture2D(diffuse,texCoord0)*totalLight;
}
"transform" matrix that I send to the shader is summarily computed this way:
viewProjection=m_perspective* glm::lookAt(CameraPosition,CameraPosition+m_forward,m_up);
glm::mat4 transform = vievProjection * object_matrix;
"object_matrix" is a matrix that I get directly from physics engine.(I think it's the matrix that defines position and rotation of the object in the world space, correct me if I'm wrong.)
And I guess that "transform" matrix is computed correctly, since all the objects are drawn in the right positions. It looks like the problem is related to normals because if I set gl_FragColor = vec4(normal, 0) the color is also changing with camera rotation.
I would greatly appreciate if anyone could point me to my mistake