Implement normal mapping GLSL and raylib - c++

I'm trying to implement normal mapping to my shaders but I keep getting weird results.
I already implemented diffuse and specular light (which are working).
I first calculated the normalized vector from the normal map : Ni = 2 * normalColor - 1
then calculated the binormal vector thanks to the cross vector of normal and tangent.
Finally I created the TBN matrix by transposing (tangent, binormal, normal)
Here is my code and some screenshots.
Vertex shader
#version 330
// Input vertex attributes
in vec3 vertexPosition;
in vec2 vertexTexCoord;
in vec3 vertexNormal;
in vec4 vertexColor;
in vec4 vertexTangent;
// Input uniform values
uniform mat4 mvp;
// Output vertex attributes (to fragment shader)
out vec2 fragTexCoord;
out vec4 fragColor;
out vec3 fragPosition;
out vec4 fragTangent;
out vec3 fragNormal;
// out vec3 fragBiTangent;
// NOTE: Add here your custom variables
void main()
{
// Send vertex attributes to fragment shader
fragTexCoord = vertexTexCoord;
fragColor = vertexColor;
//added
fragNormal = vertexNormal;
fragPosition = vertexPosition;
fragTangent = vertexTangent;
// Calculate final vertex position
gl_Position = mvp*vec4(vertexPosition, 1.0);
}
Fragment Shader
#version 330
// Input vertex attributes (from vertex shader)
in vec2 fragTexCoord;
in vec4 fragColor;
in vec3 fragNormal;
in vec3 fragPosition;
in vec4 fragTangent;
// Input uniform values
uniform sampler2D texture0; // diffuse texture
uniform vec4 colDiffuse;
uniform vec3 lightPos; // light position
uniform mat4 matModel; // pos, rotation and scaling of object
uniform vec3 viewPos; // eyes position
uniform sampler2D normalMap; // normal texture
// Output fragment color
out vec4 finalColor;
// NOTE: Add here your custom variables
vec3 unHomogenous(vec4 v)
{
return v.xyz/v.w;
}
void main()
{
//=====================LOAD TEXTURES=============================
// Texel color fetching from texture sampler
vec4 texelColor = texture(texture0, fragTexCoord);
// obtain normal from normal map in range [0,1]
vec3 normalColor = texture(normalMap, fragTexCoord).xyz;
//=======================PARAMETERS==============================
// calculate normal in world coordinates
mat3 matNormal = transpose(inverse(mat3(matModel))); //CPU heavy
vec3 worldNormal = normalize(matNormal * fragNormal);
// Calculate the location of this fragment (pixel) in world coordinates
vec3 worldPosition = unHomogenous(matModel * vec4(fragPosition, 1.0));
//=======================NORMAL MAPPING========================
// transform normal vector to range [-1,1]
vec3 normal = normalize(normalColor * 2.0 - 1.0);
normal = normalize(matNormal * normal);
vec3 tangent = normalize(matNormal * fragTangent.xyz);
vec3 binormal = normalize(cross(normal, tangent));
mat3 TBN = mat3(tangent, binormal, worldNormal);
TBN = transpose(TBN);
//=======================DIFFUSING LIGHT===========================
// Shading is calculated by diffuse = (LightVect dot NormalVect) * Diffused color
vec3 ambiant = 0.01 * texelColor.xyz ;
// find light source : L = Lightposition - surfacePosition
vec3 lightDir = normalize(lightPos - worldPosition);
lightDir *= TBN; // NOT WORKING
// diffuse the light with the dot matrix :
float shading = clamp(dot(worldNormal, lightDir), 0.1, 1.0);
vec3 diffuse = shading * texelColor.xyz;
//=======================SPECULAR LIGHTNING====================
//intensity between 0 and 1
float specularStrength = 1;
// //calculate the view direction vector and corresponding reflect vector along the normal axis
vec3 viewDir = normalize(viewPos - worldPosition);
viewDir *= TBN; // NOT WORKING
vec3 reflectDir = reflect(-lightDir, worldNormal);
//Note that we negate the lightDir vector.
//The reflect function expects the first vector to point from the light source towards the fragment's position,
//but the lightDir vector is currently pointing the other way
// calclulate the specula component 32 is the shininess value of the highlight
int shininess = 32;
float spec = pow(clamp(dot(viewDir, reflectDir), 0.1, 1.0), shininess);
vec3 lightColor = vec3(1.0,1.0,1.0);
vec3 specular = specularStrength * spec * texelColor.xyz;
//=======================RENDER================================
finalColor = vec4(ambiant + diffuse + specular, 1.0);
}
main.cpp
#include "raylib.h"
#include "rlgl.h"
#include <math.h>
#include <raymath.h>
int main(void)
{
// Initialization
//[...]
//=====================WALL=======================//
Vector3 position = { -2.5f, 3.0f, 0.0f };
Model model = LoadModel("assets/models/wall.obj");
Texture2D texture = LoadTexture("assets/textures/cgaxis_pbr_17_stone_wall_5_diffuse.png");
// Set normal mapping
Texture2D normal_texture = LoadTexture("assets/textures/cgaxis_pbr_17_stone_wall_5_normal.png");
model.materials[0].maps[MATERIAL_MAP_NORMAL].texture = normal_texture;
//==========Generate mesh and diffuse texture=========//
GenMeshTangents(model.meshes);
model.materials[0].maps[MATERIAL_MAP_DIFFUSE].texture = texture; // Set map diffuse texture
//===================SELECT SHADERS====================//
// Shader shader = LoadShader("assets/shaders/base.vs", "assets/shaders/base.fs"); // diffuse light
// Shader shader = LoadShader("assets/shaders/specular.vs", "assets/shaders/specular.fs"); // diff + specular
Shader shader = LoadShader("assets/shaders/normal_mapping.vs", "assets/shaders/normal_mapping.fs"); // diff + spec + normal mapping
// Set shader effect to 3d model
model.materials[0].shader = shader;
//==================Light======================//
Vector3 sunPos = {0.0f, 2.0f, 0.0f };
float rotation = 90.0f;
float radius = 5.0f;
// Diffuse light
int lightPosLoc = GetShaderLocation(shader, "lightPos");
float lightPos[] = {sunPos.x, sunPos.y, sunPos.z};
SetShaderValue(shader, lightPosLoc, lightPos, SHADER_UNIFORM_VEC3);
//specular light
int specularPosLoc = GetShaderLocation(shader, "viewPos");
float specularPos[] = {camera.position.x, camera.position.y, camera.position.z};
SetShaderValue(shader, specularPosLoc, specularPos, SHADER_UNIFORM_VEC3);
//RUN & draw
//[...]
// De-Initialization
//--------------------------------------------------------------------------------------
CloseWindow(); // Close window and OpenGL context
//--------------------------------------------------------------------------------------
return 0;
}
diffuse + specular
normal mapping

Related

OpenGL GLFW Changing mesh faces individual colors

Currently I am rendering mesh triangles like this:
// draw the same polygons again
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
shader.setVec3("objectColor", obj_color);
glDrawElements(GL_TRIANGLES, static_cast<unsigned int>(indices.size()), GL_UNSIGNED_INT, 0);
The problem with this code is that I am setting object color inside shader for the full mesh.
What would be a good way to render one single mesh whose faces have different colors?
For now I only know how to set vertex colors, and pass it the fragment shader.
What are the most common ways to set individual face colors?
I only think about duplicating mesh vertices twice to avoid vertex color interpolation.
My current shader looks like this:
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormal;
out vec3 FragPos;
out vec3 Normal;
out vec3 LightPos;
uniform vec3 lightPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection * view * model * vec4(aPos, 1.0);
FragPos = vec3(view * model * vec4(aPos, 1.0));
Normal = mat3(transpose(inverse(view * model))) * aNormal;
LightPos = vec3(vec4(lightPos, 1.0)); // Transform world-space light position to view-space light position
// FragPos = vec3(model * vec4(aPos, 1.0));
//Normal = mat3(transpose(inverse(model))) * aNormal;
// gl_Position = projection * view * vec4(FragPos, 1.0);
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
in vec3 FragPos;
in vec3 Normal;
in vec3 LightPos;
// extra in variable, since we need the light position in view space we calculate this in the vertex shader
uniform vec3 lightColor;
uniform vec3 objectColor;
uniform float f;
uniform float transparency;
void main()
{
//flat shading
// vec3 x_ = dFdx(FragPos);
// vec3 y_= dFdy(FragPos);
// vec3 normal_ = cross(x_, y_);
// vec3 norm_ = normalize(normal_);
// ambient
float ambientStrength = 0.75;
vec3 ambient = ambientStrength * lightColor;
// diffuse
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(LightPos - FragPos);
float diff = max(dot(norm, lightDir), 0.0);//change "norm_" to "norm" avoid the performance warning and have unwelded view
vec3 diffuse = diff * lightColor;
// specular
float specularStrength = 0.01;
vec3 viewDir = normalize(-FragPos); // the viewer is always at (0,0,0) in view-space, so viewDir is (0,0,0) - Position => -Position
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32);
vec3 specular = specularStrength * spec * lightColor;
vec3 shading = (ambient + diffuse + specular)*objectColor;
//float f = 0.75;
float r_interpolated = shading[0] + f * (objectColor[0] - shading[0]);
float g_interpolated = shading[1] + f * (objectColor[1] - shading[1]);
float b_interpolated = shading[2] + f * (objectColor[2] - shading[2]);
vec3 result = vec3(r_interpolated,g_interpolated,b_interpolated);
FragColor = vec4(result, transparency);
}
You can use the flat Interpolation qualifier:
The value will not be interpolated. The value given to the fragment shader is the value from the Provoking Vertex for that primitive.
Vertex shader
// [...]
layout (location = 0) in vec3 aColor;
flat out vec3 vColor;
void main()
{
vColor = aColor;
// [...]
}
Fragment shader
// [...]
flat in vec3 vColor;
void main()
{
FragColor = vec4(vColor, 1.0);
}
With this implementation, the entire triangle primitive is rendered with one color. If you find an intelligent system for assigning the color attributes to the vertices, you can render all triangles with different colors. e.g. 2 tringles with the indices 0-1-2 and 1-2-3. The color attribute of vertex 0 defines the color of the first triangle and the color attribute of vertex 1 defines the color of the 2nd triangle.
An alternative way would be to create an array of colors for each triangle primitive and store this color array in a Shader Storage Buffer Object. Use gl_VertexID to address the color in the vertex shader.
layout(std430, binding = 0) buffer primitiveColors
{
vec4 colors[];
};
void main()
{
vColor = colors[gl_VertexID / 3];
// [...]
}

Point lighting with shadow mapping and camera moving

Faced a problem when trying to create a spotlight in my scene. The problem is that my camera is moving around the scene, and because of this, there is something wrong with the lighting. In addition, I see only a black screen. I understand that I missed the transformation somewhere, or did some extra, but where - I really do not know.
Below is the code for my shaders.
Fragment shader:
#version 330 core
precision mediump float; // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
#define MAX_LAMPS_COUNT 8 // Max lamps count.
uniform vec3 u_ViewPos; // Camera position
uniform int u_LampsCount; // Lamps count
uniform int u_ShadowMapWidth = 1024; // shadow map width / default is 1024
uniform int u_ShadowMapHeight = 1024; // shadow map height / default is 1024
uniform float brightnessThreshold = 0.5; // brightness threshold variable
uniform float far_plane = 16;
varying mat4 v_MVMatrix; // Model View matrix
varying mat3 v_TBN; // Tangent Bitangent Normal matrix
varying vec4 v_Position; // Position for this fragment.
varying vec3 v_Normal; // Interpolated normal for this fragment.
varying vec2 v_Texture; // Texture coordinates.
varying float v_NormalMapping; // Is normal mapping enabled 0 - false, 1 - true
struct Lamp {
float ambientStrength;
float diffuseStrength;
float specularStrength;
float kc; // constant term
float kl; // linear term
float kq; // quadratic term
int shininess;
vec3 lampPos; // in eye space, cameraViewMatrix * lamp world coordinates
vec3 lampColor;
};
uniform samplerCube shadowMaps[MAX_LAMPS_COUNT];
uniform struct Mapping {
sampler2D ambient;
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
} u_Mapping;
uniform Lamp u_Lamps[MAX_LAMPS_COUNT];
vec3 norm;
vec3 fragPos;
float shadow;
// output colors
layout(location = 0) out vec4 fragColor;
layout(location = 1) out vec4 fragBrightColor;
float calculateShadow(int textureIndex, vec3 lightPos) {
// get vector between fragment position and light position
vec3 fragToLight = fragPos - lightPos;
// use the light to fragment vector to sample from the depth map
float closestDepth = texture(shadowMaps[textureIndex], fragToLight).r;
// it is currently in linear range between [0,1]. Re-transform back to original value
closestDepth *= far_plane;
// now get current linear depth as the length between the fragment and light position
float currentDepth = length(fragToLight);
// now test for shadows
float bias = 0.05;
float shadow = currentDepth - bias > closestDepth ? 1.0 : 0.0;
//fragColor = vec4(vec3(closestDepth / far_plane), 1.0); // visualization
return shadow;
}
float calculateAttenuation(Lamp lamp) {
float distance = length(lamp.lampPos - fragPos);
return 1.0 / (
lamp.kc +
lamp.kl * distance +
lamp.kq * (distance * distance)
);
}
vec4 toVec4(vec3 v) {
return vec4(v, 1);
}
// The entry point for our fragment shader.
void main() {
// Transform the vertex into eye space.
fragPos = vec3(v_MVMatrix * v_Position);
vec3 viewDir = normalize(u_ViewPos - fragPos);
if (v_NormalMapping == 0) norm = vec3(normalize(v_MVMatrix * vec4(v_Normal, 0)));
else { // using normal map if normal mapping enabled
norm = texture2D(u_Mapping.normal, v_Texture).rgb;
norm = normalize(norm * 2.0 - 1.0); // from [0; 1] to [-1; -1]
norm = normalize(v_TBN * norm);
}
vec3 ambientResult = vec3(0, 0, 0); // result of ambient lighting for all lamps
vec3 diffuseResult = vec3(0, 0, 0); // result of diffuse lighting for all lamps
vec3 specularResult = vec3(0, 0, 0); // result of specular lighting for all lamps
for (int i = 0; i<u_LampsCount; i++) {
// attenuation
float attenuation = calculateAttenuation(u_Lamps[i]);
// ambient
vec3 ambient = u_Lamps[i].ambientStrength * u_Lamps[i].lampColor * attenuation;
// diffuse
vec3 lightDir = normalize(u_Lamps[i].lampPos - fragPos);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = u_Lamps[i].diffuseStrength * diff * u_Lamps[i].lampColor * attenuation;
// specular
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_Lamps[i].shininess);
vec3 specular = u_Lamps[i].specularStrength * spec * u_Lamps[i].lampColor * attenuation;
// fragment position in light space
//fragLightSpacePos = u_Lamps[i].lightSpaceMatrix * u_Lamps[i].lightModelMatrix * v_Position;
// calculate shadow
shadow = calculateShadow(i, u_Lamps[i].lampPos);
// result for this(i) lamp
ambientResult += ambient;
diffuseResult += diffuse * (1-shadow);
specularResult += specular * (1-shadow);
}
fragColor =
toVec4(ambientResult) * texture2D(u_Mapping.ambient, v_Texture) +
toVec4(diffuseResult) * texture2D(u_Mapping.diffuse, v_Texture) +
toVec4(specularResult) * texture2D(u_Mapping.specular, v_Texture);
// brightness calculation
//float brightness = dot(fragColor.rgb, vec3(0.2126, 0.7152, 0.0722));
//if (brightness > brightnessThreshold) fragBrightColor = vec4(fragColor.rgb, 1.0);
fragBrightColor = vec4(0, 0, 0, 1);
}
Vertex shader:
#version 130
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
uniform float u_NormalMapping; // Normal mapping; 0 - false, 1 - true
attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.
attribute vec3 a_Tangent; // Per-vertex tangent information we will pass in.
attribute vec3 a_Bitangent; // Per-vertex bitangent information we will pass in.
attribute vec2 a_Texture; // Per-vertex texture information we will pass in.
varying mat4 v_MVMatrix; // This will be passed into the fragment shader.
varying mat3 v_TBN; // This will be passed into the fragment shader.
varying vec4 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.
varying vec2 v_Texture; // This will be passed into the fragment shader.
varying float v_NormalMapping; // This will be passed into the fragment shader.
void main() {
// creating TBN (tangent-bitangent-normal) matrix if normal mapping enabled
if (u_NormalMapping == 1) {
vec3 T = normalize(vec3(u_MVMatrix * vec4(a_Tangent, 0.0)));
vec3 B = normalize(vec3(u_MVMatrix * vec4(a_Bitangent, 0.0)));
vec3 N = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0)));
mat3 TBN = mat3(T, B, N);
v_TBN = TBN;
}
// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
gl_Position = u_MVPMatrix * a_Position;
// sending all needed variables to fragment shader
v_Position = a_Position;
v_Texture = a_Texture;
v_NormalMapping = u_NormalMapping;
v_MVMatrix = u_MVMatrix;
v_Normal = a_Normal;
}
Vertex shadow shader:
#version 130
attribute vec3 a_Position;
uniform mat4 u_ModelMatrix;
void main() {
gl_Position = u_ModelMatrix * vec4(a_Position, 1.0);
}
Fragment shadow shader:
#version 330 core
in vec4 fragPos;
uniform vec3 lightPos; // cameraViewMatrix * lamp world coordinates
uniform float far_plane = 16;
void main()
{
float lightDistance = length(fragPos.xyz - lightPos);
// map to [0;1] range by dividing by far_plane
lightDistance = lightDistance / far_plane;
// write this as modified depth
gl_FragDepth = lightDistance;
}
Geometry shadow shader:
#version 330 core
layout (triangles) in;
layout (triangle_strip, max_vertices=18) out;
uniform mat4 shadowMatrices[6];
out vec4 fragPos; // FragPos from GS (output per emitvertex)
void main() {
for(int face = 0; face < 6; ++face) {
gl_Layer = face; // built-in variable that specifies to which face we render.
for(int i = 0; i < 3; ++i) // for each triangle's vertices
{
fragPos = gl_in[i].gl_Position;
gl_Position = shadowMatrices[face] * fragPos;
EmitVertex();
}
EndPrimitive();
}
}
And a video demonstrating visualization shadow map:
https://youtu.be/zaNXGG1qLaw
I understand that I missed the transformation somewhere, or did some extra, but where - I really do not know.
The content of shadowMaps[textureIndex] is probably a depth map taken in "light space". This means it is a depth map as seen from the light source.
But
fragPos = vec3(v_MVMatrix * v_Position);
and
struct Lamp {
.....
vec3 lampPos; // in eye space, cameraViewMatrix * lamp world coordinates
.....
};
are in view space coordiantes. This causes that
vec3 fragToLight = fragPos - lightPos;
is a direction in view space, as seen from the camera.
If you do
float closestDepth = texture(shadowMaps[textureIndex], fragToLight).r;
then a "light space" map is accessed by a "view space" vector. The transformation from view space coordiantes to "light space" coordiantes is missing.
To solve the issue you need a matrix which transforms from world coordinates to "light space" coordinates. This is the inverse matrix, of that view projection matrix, which you used, when you create shadowMaps.
mat4 inverse_light_vp_mat[MAX_LAMPS_COUNT];
The fragment position has to be transformed to world coordinates, then it has to be transformed to "light space" coordinates, with inverse_light_vp_mat:
varying mat4 v_ModelMatrix; // Model matrix
vec4 fragLightPos = inverse_light_vp_mat[textureIndex] * v_ModelMatrix * v_Position;
fragLightPos.xyz /= fragLightPos.w;
In "light space" the light position is vec3( 0.0, 0.0, 0.0 ), because the position of the light source is the origin of the "light space". So the look up in the shadowMaps can be done directly with fragLightPos:
float closestDepth = texture(shadowMaps[textureIndex], fragLightPos.xyz).r;
The problem was solved. It was due to the fact that I considered a map of shadows in the camera space (view space), but it was necessary in the world space. Also, during the calculation of the shadow itself, it was also necessary to calculate everything in the world space.
Fragment shader:
vec3 fragToLight = vec3(model * v_Position) - lightPosWorldSpace;
or
vec3 fragToLight = vec3(model * v_Position) - vec3(inverse(view) * lightPos); (lightPos - vec4)
Fragment shadow shader:
float lightDistance = length(fragPos.xyz - lightPos);, lightPos - lamp position in world space

Weird view/light behaviour with normal mapping

I'm trying to implement normal/steep parallax mapping, but I'm getting some weird artefacts. It looks like the view & light vectors are being distorted when they are transformed into tangent space, which is causing lighting and the uv offsetting of the parallax to be incorrect.
Vertex shader source:
// parallax.vert
#version 330
// Some drivers require the following
precision highp float;
//per vertex inputs stored on GPU memory
in vec3 in_Position;
in vec3 in_Normal;
in vec4 tangent;
//per mesh data sent at rendering time
uniform mat4 model; //object to world space transformations
uniform mat4 view; //world to eye space transformations
uniform mat4 projection; //eye space to screen space
uniform mat3 normalMat; //for transforming normals in
uniform vec4 lightPosition; //light position in world space
uniform vec3 eyePosition; //eye position in world space
//outputs to fragment shader
in vec2 in_TexCoord;
// multiply each vertex position by the MVP matrix
// and find V, L, and TBN Matrix vectors for the fragment shader
out VertexData
{
vec3 ts_L; //view vector tangent space
vec3 ts_V; //light vector tangent space
vec2 ex_TexCoord;
float dist;
} vertex;
void main(void)
{
//view space position for lighting calculations
vec3 position = ( ( view * model ) * vec4( in_Position,1.0 ) ).xyz;
//calculate view vector and light vector in view space
vec3 ex_V = -position;
vec3 ex_L = ( view * vec4( lightPosition.xyz,1.0 ) ).xyz - position;
vertex.ex_TexCoord = in_TexCoord;
//now calculate tbn matrix
//calculate biTangent, in view space
vec3 normal = normalMat*in_Normal;
vec3 tan = normalMat * tangent.xyz;
vec3 bitangent = normalize( cross( normal, tan.xyz ) * tangent.w );
//fragment stage will need this value for calculating light attenuation
vertex.dist = length( ex_L );
//calculate transpose tangent space matrix, as we are doing everything in T space in fragment stage
mat3 transTBN = transpose( mat3( tan, bitangent, normal ) );
//transform view space vertex view & light vectors into tangent space
vertex.ts_V = normalize( transTBN * ex_V );
vertex.ts_L = normalize( transTBN * ex_L );
gl_Position = projection * vec4(position,1.0);
}
fragment shader source:
// parallax.frag
#version 330
// Some drivers require the following
precision highp float;
struct lightStruct
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
vec4 position;
float radius;
};
struct materialStruct
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
vec4 emissive;
float shininess;
};
//uniforms sent at render time
uniform lightStruct light;
uniform materialStruct material;
uniform vec2 scaleBias;
//texture information
uniform sampler2D colourMap;
uniform sampler2D normalMap;
uniform sampler2D specularMap;
uniform sampler2D heightMap;
uniform sampler2D glossMap;
//inputs from vertex shader stage
in VertexData
{
vec3 ts_L; //view vector tangent space
vec3 ts_V; //light vector tangent space
vec2 ex_TexCoord;
float dist;
} vertex;
//final fragment colour
layout(location = 0) out vec4 out_Color;
void main(void) {
//calculate halfway vector for blinn phong
vec3 Half = normalize( vertex.ts_V + vertex.ts_L );
//and create some temporary types algorithm will need
vec2 texCoord;
vec3 diffuseTexel ;
vec3 specularTexel;
float gloss;
vec3 normal;
//first sample texel value from height map
float height = texture( heightMap, vertex.ex_TexCoord.st ).r;
//-----------------------------------------------
// steep parallax
float start = 1.0;
int numsteps = 50;
float step = 1.0/numsteps;
vec2 offset = vertex.ex_TexCoord;
vec2 delta = -vertex.ts_V.xy * scaleBias.r / (vertex.ts_V.z * numsteps);
for (int i = 0; i < numsteps; i++)
{
if(height < start)
{
start-=step;
offset+=delta;
height = texture(heightMap,offset).r;
}
else
{
texCoord = offset;
break;
}
}
//---------------------------------------------------
//normal maps are encoded with values between 0.0 and 0.5 being negative, so need to remove the encoding.
//sample textures
diffuseTexel = texture(colourMap,texCoord.st).rgb;
specularTexel = texture(specularMap,texCoord.st).rgb;
normal = normalize(texture(normalMap,texCoord.st).rgb * 2.0 - 1.0);
gloss = texture(glossMap,texCoord.st).r;
//--------------------------------------------------
//Lighting -- Blinn/Phong Lighting with specular, normal and gloss mapping
float lambert = max( dot (normal, vertex.ts_L ), 0.0 );
vec3 litColour = vec3(0.0,0.0,0.0);
//if light is worth calculating
if (lambert > 0.0)
{
vec3 R = normalize(-reflect(vertex.ts_L,normal));
float specularPower = pow(max(dot(R,Half),0.0),material.shininess);
litColour += light.diffuse.xyz * material.diffuse.xyz * lambert;
litColour += light.specular.xyz * material.specular.xyz * specularPower * gloss;
float attenuation = 1.0 + (0.01 * vertex.dist*vertex.dist) + (0.01 * vertex.dist);
attenuation = 1.0/attenuation;
litColour *= attenuation;
}
out_Color = vec4( litColour *diffuseTexel*(specularTexel) ,1.0);
}
The normal matrix is being calculated by taking the
transpose( inverse( modelview ) );
I've probably missed something really obvious, but I can't see it, and would be grateful for some input.
Update:
I've modified to perform the lighting & offsetting based on world space vectors, which has now produced the following results:
Lighting is now correct, and the parallax is good when viewed from close to the surface normal, but...
As the angle between view & normal increases, the parallax goes very weird.
Again, I'd be grateful for some help in this.
vec3 ex_V = -position;
Shouldn't this be:
vec3 ex_V = eyePosition - position;

Position of lights in blinn light model

I have implemented Blinn-Phong shading in my fragment shader to calculate the lighting of each fragment with multiple lights. The computation seems to be all good except for one part. My directional light is computed correctly but my position light is not.
Pictures talk more than word so:
Wrong light positions:
The shader works like this:
I update the light array with their positions in world space. And then for each object, I upload their material information and draw them using this fragment shader to get the correct color. The vertex shader's only task (regarding lighting) is to pass on normals for the fragment. All my computation are done in world space.
I already checked the position of each light and they are correct. Which is why I came here. I think I made a mistake when I compute the cosine of the incident angle but I don't know where.
Here is the code of my fragment shader:
Edit : I corrected some mistakes with the nice help of Nico Schertler, here is the new code with both fragment and vertex shader
Edit 2 : After another round of check, it was in fact not a misplaced light position. But an error in the computation of dot vector between the normal and the light direction.
Fragment Shader
#version 330 compatibility
struct light {
vec4 position; // position.w indicates if it's a direction or point lighting
vec3 diffuse; // diffuse color of the light
vec3 specular; // specular color of the light
float attenuation; // attenuation of the light
};
struct material {
vec3 diffuse;
vec3 specular;
vec3 ambient;
float shininess;
};
uniform material objMaterial;
uniform light lights[9];
uniform int nbLight;
uniform mat4 viewMatrix; // Matrix to view coordinate
uniform mat4 viewInvMatrix; // invert view matrix to find the position of the viewer
in vec3 worldNormal; // normal in worldspace
in vec3 position; // position in worldspace
vec3 ambiant = vec3(0.2,0.2,0.2);
void main()
{
int i;
vec3 viewDirection = normalize((viewInvMatrix * vec4(0.0, 0.0, 0.0, 1.0) - vec4(position, 1.)).xyz);
vec3 lightDirection, lightPositon;
vec3 normalizeWorldNormal = normalize(worldNormal);
float attenuation;
vec3 totalLight = ambiant * objMaterial.ambient;
float cosAngIncidence;
float blinnTerm;
for(i=0; i<9; i=i+1){
if(i < nbLight){
lightPositon = lights[i].position.xyz;
if (0.0 == lights[i].position.w) // directional light?
{
attenuation = 1.0; // no attenuation
lightDirection = normalize(lightPositon);
}
else // point light
{
vec3 positionToLightSource = lightPositon - position;
lightDirection = normalize(positionToLightSource);
float dis = length(positionToLightSource);
attenuation = 1.0 / (lights[i].attenuation * dis);
}
cosAngIncidence = dot(normalizeWorldNormal, lightDirection);
cosAngIncidence = clamp(cosAngIncidence, 0, 1);
vec3 halfAngle = normalize(lightDirection + viewDirection);
blinnTerm = dot(normalizeWorldNormal, halfAngle);
blinnTerm = clamp(blinnTerm, 0, 1);
blinnTerm = cosAngIncidence != 0.0 ? blinnTerm : 0.0;
blinnTerm = pow(blinnTerm, objMaterial.shininess*5);
totalLight = totalLight
+ (lights[i].diffuse * objMaterial.diffuse * attenuation * cosAngIncidence)
+ (lights[i].specular * objMaterial.specular * attenuation * blinnTerm);
}
}
gl_FragColor = vec4(totalLight.xyz, 1.0);
}
Vertex Shader
#version 330 compatibility
layout(location=0) in vec3 vx_pos;
layout(location=1) in vec3 vx_nor;
layout(location=3) in vec3 vx_col;
uniform mat4 modelMatrix; // Matrix to world coordinate
uniform mat4 normalInvTransMatrix; // Matrix for normal in world coordinate
uniform mat4 viewMatrix; // Matrix to view coordinate
uniform mat4 projectionMatrix; // Matrix to projection coordinate
out vec3 worldNormal;
out vec3 position;
void main() {
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(vx_pos,1.0);
worldNormal = vec3(normalInvTransMatrix * vec4(vx_nor, 0.));
position = vec3(modelMatrix * vec4(vx_pos,0.));
}

add texture to tree(represented by spheres and cylinder)

I'm rendering a tree using spheres and cylinders, and i use normal mapping to map bark texture
on the sphere and cylinder. Below is my vertex shader:
// Normal mapping light shader
// Vertex Shader
#version 130
// Incoming per vertex... position and normal
in vec4 vVertex;
in vec3 vNormal;
in vec4 vTexture0;
uniform mat4 mvpMatrix;
uniform mat4 mvMatrix;
uniform mat3 normalMatrix;
uniform vec3 vLightPosition;
// Color to fragment program
vec3 vEyeNormal;
smooth out vec3 vVaryingLightDir;
smooth out vec2 vTexCoords;
vec3 vTangent = vec3(1.0, 0.0, 0.0);
void main(void)
{
// Tangent Basis
vec3 b, t, v;
// Get surface normal in eye coordinates
vEyeNormal = normalMatrix * vNormal;
// Get vertex position in eye coordinates
vec4 vPosition4 = mvMatrix * vVertex;
vec3 vPosition3 = vPosition4.xyz / vPosition4.w;
// Get vector to light source
vVaryingLightDir = normalize(vLightPosition - vPosition3);
// Build Tangent Basis
t = normalize(normalMatrix * vTangent);
b = cross(vEyeNormal, t);
v.x = dot(vVaryingLightDir, t);
v.y = dot(vVaryingLightDir, b);
v.z = dot(vVaryingLightDir, vEyeNormal);
vVaryingLightDir = normalize(v);
// Pass along the texture coordinates
vTexCoords = vTexture0.st;
// Don't forget to transform the geometry!
gl_Position = mvpMatrix * vVertex;
}
And this is the fragment shader:
// Normal mapping light shader
// Fragment Shader
#version 130
uniform vec4 ambientColor;
uniform vec4 diffuseColor;
uniform sampler2D colorMap;
uniform sampler2D normalMap;
smooth in vec3 vVaryingLightDir;
smooth in vec2 vTexCoords;
out vec4 vFragColor;
void main(void)
{
vec3 vTextureNormal = texture2D(normalMap, vTexCoords).xyz;
vTextureNormal = (vTextureNormal - 0.5) * 2.0;
// Dot product gives us diffuse intensity
float diff = max(0.0, dot(normalize(vTextureNormal), normalize(vVaryingLightDir)));
// Multiply intensity by diffuse color, force alpha to 1.0
vFragColor = diff * diffuseColor;
// Add in ambient light
vFragColor += ambientColor;
vFragColor.rgb = min(vec3(1.0,1.0,1.0), vFragColor.rgb);
// Modulate in the texture
vFragColor *= texture(colorMap, vTexCoords);
}
But what i get is an ugly effect:
Can anybody tell me how i can make the bump mapping smooth?