Currently i try to implement Bump Mapping for my OpenGL Project.
The Problem is that same parts of my cube is black. Like shown in this picture :
I am almost certain that i just dont understand how the shaders works, so i used the shader from OpenGL Superbible .
Here is my code :
Vertex Shader
#version 330
layout(location = 0) in vec4 position;
layout(location = 1) in vec3 normal;
layout(location = 2) in vec2 texCoord;
layout(location = 3) in vec3 tangent;
layout(location = 4) in vec3 bitangent;
uniform mat4 matModel;
uniform mat4 matNormal;
uniform mat4 matMVP;
uniform mat4 matMV;
uniform vec3 light_pos = vec3(0.0,0.0,100.0);
out VS_OUT{
vec3 eyeDir;
vec3 lightDir;
vec2 fragTexCoord;
} vs_out;
void main()
{
vec4 P = matMV*position;
vec3 V = P.xyz;
vec3 L = normalize(light_pos - P.xyz);
vec3 N = normalize(mat3(matNormal)*normal);
vec3 T = normalize(mat3(matNormal)*tangent);
vec3 B = cross(N,T);
vs_out.lightDir = normalize(vec3(dot(L,T),
dot(L,B),
dot(L,N)));
V = -P.xyz;
vs_out.eyeDir = normalize(vec3(dot(V,T),
dot(V,B),
dot(V,N)));
vs_out.fragTexCoord = texCoord;
gl_Position = matMVP * position;
}
And the fragment shader :
#version 330
uniform sampler2D diffuseTex;
uniform sampler2D heightTex;
uniform vec3 heightColor;
in vec3 fragNormal;
in vec2 fragTexCoord;
in vec3 tangent_out_normalized;
in vec3 bitangent_out;
in vec3 normal_out_normalized;
in VS_OUT{
vec3 eyeDir;
vec3 lightDir;
vec2 fragTexCoord;
}fs_in;
out vec4 outputColor;
void main()
{
vec3 V = normalize(fs_in.eyeDir);
vec3 L = normalize(fs_in.lightDir);
vec3 N = normalize(texture(heightTex,fs_in.fragTexCoord).rgb*2-vec3(1.0));
vec3 R = reflect(-L,N);
vec3 diffuse_albedo = texture(diffuseTex,fs_in.fragTexCoord).rgb;
vec3 diffuse =max(dot(N,L),0)*diffuse_albedo;
vec3 specular_albedo = vec3(1.0);
vec3 specular = max(pow(dot(R,V),25.0),0.0)*specular_albedo;
outputColor = vec4(diffuse+specular,1);
}
What am i missing?
It seems very wrong that you don't use fragNormal anywhere. You should use it to rotate the texture normal. To make it obvious, if the bump is flat you should still get the usual surface lighting.
The next strange thing is that you need to multiply you bump normal by 2 and subtract {1,1,1}. The normal should never flip and I suspect this is going on in your case. When it flips you will suddenly go from in light to in shadow, and that might cause the black areas.
Related
I am working on a shader where the fragment shader should work in the tangent space. It works just as expected for both the ambient and diffuse light, but the specular light is just plain weird. It seems that nearby fragments can have a lot or no light with no obvious reasons.
The vertex shader is:
#version 330 core
layout (location = 0) in vec3 inVertex;
layout (location = 1) in vec3 inNormal;
layout (location = 2) in vec2 inTexture;
layout (location = 3) in vec3 inTangent;
layout (location = 4) in vec3 inBitangent;
out vec3 FragmentPosition;
out vec2 TextureCoordinate;
out vec3 TangentLightDirection;
out vec3 TangentViewPosition;
out vec3 TangentFragmentPosition;
void main()
{
FragmentPosition = vec3(inVertex);
vec3 normal = normalize(inNormal);
gl_Position = vec4( inVertex, 1 );
TextureCoordinate = inTexture;
vec3 tangent = normalize(inTangent);
vec3 biTangent = normalize(inBitangent);
mat3 toTangentSpaceTransformation = transpose(mat3(tangent,biTangent,normal));
TangentFragmentPosition = toTangentSpaceTransformation * FragmentPosition;
TangentLightPosition = toTangentSpaceTransformation * vec3(0,1,1);
TangentFragmentPosition = toTangentSpaceTransformation * vec3(0,0,3);
}
And the fragment shader is:
#version 330 core
out vec4 FragColor;
in vec3 FragmentPosition;
in vec2 TextureCoordinate;
in vec3 TangentLightDirection;
in vec3 TangentViewPosition;
in vec3 TangentFragmentPosition;
uniform sampler2D Texture;
uniform sampler2D normalTexture;
void main() {
vec3 normal = vec3(0,0,1);
float shininess = 4;
vec3 phongVector = vec3(0.3,0.7,1);
vec4 color = texture(Texture,TextureCoordinate);
vec4 ambientLightColor = vec4(1,1,1,1);//vec4(normalOffset,1);
// Calculation of ambient light
vec4 sunLightColor = vec4(1,1,1,1);
vec3 sunLightDirection = normalize(TangentLightPosition);
vec4 ambientLight = phongVector[0] * ambientLightColor;
// Calculation of diffuse light
float diffuseConst = max(dot(normal,sunLightDirection),0.0);
vec4 diffuseLight = phongVector[1] * diffuseConst * sunLightColor;
// Calculation of specular light
vec3 viewDirection = normalize(TangentViewPosition - TangentFragmentPosition);
vec3 reflectionDirection = reflect(-sunLightDirection,normal);
float spec = pow(max(dot(reflectionDirection,viewDirection),0),shininess);
vec4 specularLight = phongVector[2] * spec * sunLightColor;
FragColor = (specularLight)*color;
}
It was a typo. tangentFragmentPosition was initialized twice, while tangentViewPosition was not initialized at all. Initizalizing tangentViewPosition gave the desired result.
I have been trying to imploment parallax corrected local cubemaps in OpenGL for a little while now, but I've not really managed to get anywhere, does anybody know where to start?
Here is my current shader code:
Fragment:
#version 330
in vec2 TexCoord;
in vec3 Normal;
in vec3 Position;
in vec3 Color;
out vec4 color;
uniform samplerCube CubeMap;
uniform vec3 CameraPosition;
void main() {
vec4 OutColor = vec4(Color,1.0);
vec3 normal = normalize(Normal);
vec3 view = normalize(Position-CameraPosition);
vec3 ReflectionVector = reflect(view,normal);
vec4 ReflectionColor = texture(CubeMap,ReflectionVector);
OutColor = mix(OutColor,ReflectionColor,0.5);
color = OutColor;
}
Vertex:
#version 330
layout (location = 0) in vec3 position;
layout (location = 2) in vec3 normal;
layout (location = 1) in vec2 texCoord;
layout (location = 3) in vec3 color;
out vec2 TexCoord;
out vec3 Normal;
out vec3 Position;
out vec3 Color;
uniform mat4 ModelMatrix;
uniform mat4 ViewMatrix;
uniform mat4 ProjectionMatrix;
void main()
{
gl_Position = ProjectionMatrix * ViewMatrix * ModelMatrix * vec4(position, 1.0f);
TexCoord=texCoord;
Normal = normal;
Position = vec4(ModelMatrix * vec4(position,1.0)).xyz;
Color = color;
}
Position = vec4(ModelMatrix * vec4(position,1.0)).xyz;
This puts the Position output variable into world space. Assuming the CameraPosition uniform is also in world space, then:
vec3 view = normalize(Position-CameraPosition);
view will also be in world space.
However, Normal comes directly from the vertex attribute. So unless you are updating each vertex's Normal value every time you rotate the object, that value will probably be in model space. And therefore:
vec3 ReflectionVector = reflect(view,normal);
This statement is incoherent. view is in one space, while normal is in another. And you can't really perform reasonable operations on vectors that are in different coordinate systems.
You need to make sure that Normal, Position, and CameraPosition are all in the same space. If you want that space to be world space, then you need to transform Normal into world space. Which in the general case requires computing the inverse/transpose of your model-to-world matrix.
I have this vertex shader:
#version 430 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 textureCoordinate;
layout (location = 2) in vec3 normal;
layout (location = 4) in vec3 tangent;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform vec3 lightPosition = vec3(0.0, 1.0, 0.0);
out vec2 texCoord;
out vec3 lightDirection;
out vec3 eyeDirection;
void main() {
vec4 P = modelViewMatrix * vec4(position, 1.0);
vec3 N = normalize(mat3(modelViewMatrix) * normal);
vec3 T = normalize(mat3(modelViewMatrix) * tangent);
vec3 B = cross(N, T);
vec3 L = lightPosition - P.xyz;
vec3 V = -P.xyz;
lightDirection = normalize(vec3(dot(V, T), dot(V, B), dot(V, N)));
eyeDirection = normalize(vec3(dot(V, T), dot(V, B), dot(V, N)));
texCoord = textureCoordinate;
gl_Position = projectionMatrix * P;
}
And this fragment shader:
#version 430 core
in vec2 texCoord;
in vec3 lightDirection;
in vec3 eyeDirection;
layout (binding = 0) uniform sampler2D tex;
layout (binding = 1) uniform sampler2D normalMap;
out vec4 color;
void main() {
vec3 ambient = vec3(0.1);
vec3 V = normalize(eyeDirection);
vec3 L = normalize(lightDirection);
vec3 N = normalize(texture(normalMap, texCoord)).rgb * 2.0 - vec3(1.0);
vec3 R = reflect(-L, N);
vec3 diffuseAlbedo = texture(tex, texCoord).rgb;
/*vec3 diffuseAlbedo = vec3(1.0);*/
vec3 diffuse = max(dot(N, L), 0.0) * diffuseAlbedo;
vec3 specular = vec3(0.0);
color = vec4(ambient + diffuse + specular, 1.0);
}
As you can see, the variable lightPosition is actually used in the calculations. But when I want to get the location of it via glGetUniformLocation, I get -1.
I looked through the other questions here on SO, like those ones:
glGetActiveUniform reports uniform exists, but glGetUniformLocation returns -1
glGetUniformLocation return -1 on nvidia cards
glGetUniformLocation() returning -1 even though used in vertex shader
Especially the last one - but in my case, lightPosition is used to calculate lightDirection and this is then used in the fragment shader. So it should not be removed.
Any ideas on what is going on here? Or how to debug this?
Also: I set a default value for the uniform. If I remove the default value, it still gives -1.
I am running Ubuntu 15.10 with Nvidia 340.96 drivers and a GF 710M card. I am running OpenGL 4.4 with core profile.
... As you can see, the variable lightPosition is actually used in the calculations. ...
You would think, right. Actually you're using it once in the Vertex Shader to assign a value to vec3 L, which is not actually used after that.
The compilation process for the shaders is very meticulous. You may have used it in a calculation in the Vertex Shader, but you didn't utilize the result L anywhere and it doesn't make any contribution to the final result calculated in the Fragment Shader, so it was "optimized away".
You must give some use to L.
I am using FragmentShader and VertexShader at present, and works absolutely fine. I cannot get my geometry shader working. I am absolutely new to it, below is what I have tried.
I am using VBO, lighting and textures along with some geometry, but it works fine before using GeometryShader. the only thing I have changed is the variable names as I had to get the input in the geometry shader and give the output. So I have appended 1 at the end of those variable names those which will go out from geometry shader to the fragment shader.
Also I have added headers starting with # which were earlier not there. I am using GL_TRIANGLES to draw.
VertexShader
in vec4 position;
in vec4 color1;
in vec4 normal;
in vec2 texCoord;
uniform sampler2D Tex1;
uniform int use_texture;
out vec4 pcolor;
out vec3 N;
out vec3 L;
out vec3 R;
out vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
#version 330 compatibility
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color; // vertex color
out vec4 pos_in_eye; //vertex position in eye space
out vec2 FtexCoord;
void main(){
gl_Position = local2clip * position;
N = normalize(vec3(normal_matrix * normal)); //v_normal
vec4 Lpos = world2eye * light_pos; //light pos. in eye
vec4 Vpos = local2eye * position; //pos_in_eye
L = normalize(vec3(Lpos - Vpos)); //light_vector
R = normalize(reflect(-L, N));
V = normalize(vec3(-Vpos)); //eye vector
vec3 halfv = normalize(L+V);
FtexCoord = texCoord;
//pcolor = color1;
}
This is my FragemntShader
#version 330 compatibility
uniform int use_texture;
in vec4 pcolor;
in vec3 N1;
in vec3 L1;
in vec3 R1;
in vec3 V1;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
uniform sampler2D Tex1;
in vec2 FtexCoord1;
void main() {
vec4 ambient = light_ambient * mat_ambient;
float NdotL;
if (dot(N1,L1) <0.0) NdotL = 0.0;
else NdotL = dot(N1, L1);
vec4 diffuse = light_diffuse * mat_diffuse * NdotL;
float RdotV;
RdotV = dot(R1, V1);
if (NdotL == 0.0) RdotV = 0.0;
if (RdotV <0.0) RdotV = 0.0;
vec4 specular = light_specular * mat_specular * pow(RdotV,mat_shine);
vec4 texcolor;
if( use_texture == 1 ) {
texcolor = texture2D(Tex1, FtexCoord1);
gl_FragColor = texcolor;
}
else
gl_FragColor = (diffuse + ambient + specular);
}
This is my GeometryShader
#version 330
layout (triangles) in;
layout (triangles) out;
layout (max_vertices = 3) out;
out vec3 N1;
out vec3 L1;
out vec3 R1;
out vec3 V1;
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color1; // vertex color
out vec4 pos_in_eye1; //vertex position in eye space
out vec2 FtexCoord1;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
void main(void)
{
int i;
N1=N;
L1=L;
R1=R;
V1=R;
FtexCoord1=FtexCoord;
v_color1=v_color;
pos_in_eye1=pos_in_eye;
for (i = 0; i < gl_in.length(); i++)
{
gl_Position = gl_in[i].gl_Position;
EmitVertex();
}
EndPrimitive();
}
I just want that what ever was there earlier is passed from vertex shader to fragment shader via geometry shader, so that I can manipulate the shader later. Currently the screen is just black
The core of your problem is that you didn't bother to check for compilation errors when you built your Geometry Shader. I know that because I see several syntax errors for it. In particular:
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
Geometry Shader inputs are always aggregated into arrays. Remember: a geometry shader operates on primitives, which are defined as a collection of one or more vertices. Each GS invocation therefore gets a set of per-vertex input values, one for each vertex in the primitive type defined by your layout in qualifier.
Notice how you loop over the number of vertices in a primitive and use gl_in[i] to get the input value for each vertex in the primitive. That's how you need to access all of your Geometry Shader inputs. And you need to write each one to its corresponding output variable, then call EmitVertex. All in that loop.
In the past few days I been trying to implement parallax mapping in my engine, but it doesn't seem to work, I have seen at least 15 examples, and I'm still not being able to get it to work
Here is an Image:
As you can see, all you can see is the base color, the height map is not there
Here are my shaders:
Fragment Shader
#version 330 core
uniform sampler2D DiffuseTextureSampler;
uniform sampler2D HeightTextureSampler;
vec2 scaleBias = vec2(0.5,0.5);
in vec3 EyeDirection_tangentspace;
in vec2 UV;
void main()
{
float height = texture2D(HeightTextureSampler, vec2 (UV.x, -UV.y)).r;
//Our heightmap only has one color channel.
float v = height * scaleBias.r - scaleBias.g;
vec3 eye = EyeDirection_tangentspace;
vec2 newCoords = UV + (eye.xy * v);
vec3 rgb = texture2D(DiffuseTextureSampler, vec2 (newCoords.x, -newCoords.y)).rgb;
gl_FragColor = vec4(rgb, 1.0);
}
Vertex Shader
#version 330 core
// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec2 vertexUV;
layout(location = 2) in vec3 vertexNormal_modelspace;
layout(location = 3) in vec3 vertexTangent_modelspace;
layout(location = 4) in vec3 vertexBitangent_modelspace;
// Output data ; will be interpolated for each fragment.
out vec2 UV;
out vec3 Position_worldspace;
out vec3 EyeDirection_cameraspace;
out vec3 LightDirection_cameraspace;
out vec3 LightDirection_tangentspace;
out vec3 EyeDirection_tangentspace;
// Values that stay constant for the whole mesh.
uniform mat4 MVP;
uniform mat4 V;
uniform mat4 M;
uniform mat3 MV3x3;
uniform vec3 LightPosition_worldspace;
void main()
{
gl_Position = MVP * vec4(vertexPosition_modelspace,1);
Position_worldspace = (M * vec4(vertexPosition_modelspace,1)).xyz;
// Vector that goes from the vertex to the camera, in camera space.
// In camera space, the camera is at the origin (0,0,0).
vec3 vertexPosition_cameraspace = ( V * M * vec4(vertexPosition_modelspace,1)).xyz;
EyeDirection_cameraspace = vec3(0,0,0) - vertexPosition_cameraspace;
UV = vertexUV;
vec3 vertexTangent_cameraspace = MV3x3 * vertexTangent_modelspace;
vec3 vertexBitangent_cameraspace = MV3x3 * vertexBitangent_modelspace;
vec3 vertexNormal_cameraspace = MV3x3 * vertexNormal_modelspace;
mat3 TBNMatrix = transpose(mat3(vertexTangent_cameraspace, vertexBitangent_cameraspace, vertexNormal_cameraspace));
EyeDirection_tangentspace = Position_worldspace - vertexPosition_modelspace.xyz;
EyeDirection_tangentspace *= TBNMatrix;
}
couple things
set your scale to 1. no point in halving your hightscale if you cant see it at all.
(YOUR CURRENT PROBLEM) you are getting your texture coordinates with -UV.y Opengl does not have negative texture coordinates. getting negative will pull nothing from the texture, or worse a mirrored textured if you have tiling on.
(YOUR NEXT PROBLEM) normalize your eye vector before calculating new coordinates in the fragment. if you don't normalize, the XY coords of the vector are going to be HUGE so your new texture coordinates are MASSIVE offsets.
try these shaders. they are very simple and work. you will have to add lighting after you get the parallax working
Vertex shader
attribute vec3 tangent;
attribute vec3 binormal;
uniform vec3 CAMERA_POSITION;
varying vec3 eyeVec;
void main()
{
gl_Position = ftransform();
gl_TexCoord[0] = gl_TextureMatrix[0] * gl_MultiTexCoord0;
mat3 TBNMatrix = mat3(tangent, binormal, gl_Normal);
eyeVec = CAMERA_POSITION - gl_Vertex.xyz;
eyeVec *= TBNMatrix;
}
fragment shader
uniform sampler2D basetex;
uniform sampler2D heightMap;
uniform vec2 scaleBias;
varying vec3 eyeVec;
void main()
{
float height = texture2D(heightMap, gl_TexCoord[0].st).r;
float v = height * scaleBias.r - scaleBias.g;
vec3 eye = normalize(eyeVec);
vec2 newCoords = texCoord + (eye.xy * v);
vec3 rgb = texture2D(basetex, newCoords).rgb;
gl_FragColor = vec4(rgb, 1.0);
}