Ray tracing on GLSL - glsl

I'm looking for to simulate the reflection effect using ray tracing on GLSL, however I could not find good references, examples or tutorial related to this topic. When I got some interesting data, the method is limited for specific object's surfaces (e.g. sphere, cube...); it is not my case. I also know the GLSL does not support recursive functions, but as far as I know the ray tracing can be done iteratively.
My goal is to simulate the reverberation process for an acoustic sensor as follows: primary reflections by rasterization; and secondary reflections by ray tracing. When a ray hits the object's surface, the distance and normal values are measured.
Follows below my current code. At this moment, I am able to calculate the ray parameters (world position and direction vector values, for each pixel), however I do not know how to calculate the data when a ray hits a surface.
Thanks in advance. Any help is very much welcome.
Vertex shader:
#version 130
uniform mat4 osg_ViewMatrixInverse;
out vec3 positionEyeSpace;
out vec3 normalEyeSpace;
uniform vec3 cameraPos;
// ray definition, with an origin point and a direction vector
struct Ray {
vec3 origin;
vec3 direction;
};
void main() {
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
// world space
mat4 modelWorld = osg_ViewMatrixInverse * gl_ModelViewMatrix;
vec3 positionWorldSpace = vec3(modelWorld * gl_Vertex);
vec3 normalWorldSpace = mat3(modelWorld) * gl_Normal;
// eye space
positionEyeSpace = vec3(gl_ModelViewMatrix * gl_Vertex);
normalEyeSpace = gl_NormalMatrix * gl_Normal;
// calculate the reflection direction for an incident vector
vec3 I = normalize(positionWorldSpace - cameraPos);
vec3 N = normalize(normalWorldSpace);
vec3 reflectedDirection = normalize(reflect(I, N));
}
Fragment shader:
#version 130
in vec3 positionEyeSpace;
in vec3 normalEyeSpace;
uniform float farPlane;
uniform bool drawNormal;
uniform bool drawDepth;
out vec4 out_data;
void main() {
vec3 nNormalEyeSpace = normalize(normalEyeSpace);
vec3 nPositionEyeSpace = normalize(-positionEyeSpace);
float linearDepth = sqrt(positionEyeSpace.x * positionEyeSpace.x +
positionEyeSpace.y * positionEyeSpace.y +
positionEyeSpace.z * positionEyeSpace.z);
linearDepth = linearDepth / farPlane;
// output the normal and depth data as matrix
out_data = vec4(0, 0, 0, 1);
if (linearDepth <= 1) {
if (drawNormal) out_data.z = abs(dot(nPositionEyeSpace, nNormalEyeSpace));
if (drawDepth) out_data.y = linearDepth;
}
gl_FragDepth = linearDepth;
}

Related

Point lighting with shadow mapping and camera moving

Faced a problem when trying to create a spotlight in my scene. The problem is that my camera is moving around the scene, and because of this, there is something wrong with the lighting. In addition, I see only a black screen. I understand that I missed the transformation somewhere, or did some extra, but where - I really do not know.
Below is the code for my shaders.
Fragment shader:
#version 330 core
precision mediump float; // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
#define MAX_LAMPS_COUNT 8 // Max lamps count.
uniform vec3 u_ViewPos; // Camera position
uniform int u_LampsCount; // Lamps count
uniform int u_ShadowMapWidth = 1024; // shadow map width / default is 1024
uniform int u_ShadowMapHeight = 1024; // shadow map height / default is 1024
uniform float brightnessThreshold = 0.5; // brightness threshold variable
uniform float far_plane = 16;
varying mat4 v_MVMatrix; // Model View matrix
varying mat3 v_TBN; // Tangent Bitangent Normal matrix
varying vec4 v_Position; // Position for this fragment.
varying vec3 v_Normal; // Interpolated normal for this fragment.
varying vec2 v_Texture; // Texture coordinates.
varying float v_NormalMapping; // Is normal mapping enabled 0 - false, 1 - true
struct Lamp {
float ambientStrength;
float diffuseStrength;
float specularStrength;
float kc; // constant term
float kl; // linear term
float kq; // quadratic term
int shininess;
vec3 lampPos; // in eye space, cameraViewMatrix * lamp world coordinates
vec3 lampColor;
};
uniform samplerCube shadowMaps[MAX_LAMPS_COUNT];
uniform struct Mapping {
sampler2D ambient;
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
} u_Mapping;
uniform Lamp u_Lamps[MAX_LAMPS_COUNT];
vec3 norm;
vec3 fragPos;
float shadow;
// output colors
layout(location = 0) out vec4 fragColor;
layout(location = 1) out vec4 fragBrightColor;
float calculateShadow(int textureIndex, vec3 lightPos) {
// get vector between fragment position and light position
vec3 fragToLight = fragPos - lightPos;
// use the light to fragment vector to sample from the depth map
float closestDepth = texture(shadowMaps[textureIndex], fragToLight).r;
// it is currently in linear range between [0,1]. Re-transform back to original value
closestDepth *= far_plane;
// now get current linear depth as the length between the fragment and light position
float currentDepth = length(fragToLight);
// now test for shadows
float bias = 0.05;
float shadow = currentDepth - bias > closestDepth ? 1.0 : 0.0;
//fragColor = vec4(vec3(closestDepth / far_plane), 1.0); // visualization
return shadow;
}
float calculateAttenuation(Lamp lamp) {
float distance = length(lamp.lampPos - fragPos);
return 1.0 / (
lamp.kc +
lamp.kl * distance +
lamp.kq * (distance * distance)
);
}
vec4 toVec4(vec3 v) {
return vec4(v, 1);
}
// The entry point for our fragment shader.
void main() {
// Transform the vertex into eye space.
fragPos = vec3(v_MVMatrix * v_Position);
vec3 viewDir = normalize(u_ViewPos - fragPos);
if (v_NormalMapping == 0) norm = vec3(normalize(v_MVMatrix * vec4(v_Normal, 0)));
else { // using normal map if normal mapping enabled
norm = texture2D(u_Mapping.normal, v_Texture).rgb;
norm = normalize(norm * 2.0 - 1.0); // from [0; 1] to [-1; -1]
norm = normalize(v_TBN * norm);
}
vec3 ambientResult = vec3(0, 0, 0); // result of ambient lighting for all lamps
vec3 diffuseResult = vec3(0, 0, 0); // result of diffuse lighting for all lamps
vec3 specularResult = vec3(0, 0, 0); // result of specular lighting for all lamps
for (int i = 0; i<u_LampsCount; i++) {
// attenuation
float attenuation = calculateAttenuation(u_Lamps[i]);
// ambient
vec3 ambient = u_Lamps[i].ambientStrength * u_Lamps[i].lampColor * attenuation;
// diffuse
vec3 lightDir = normalize(u_Lamps[i].lampPos - fragPos);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = u_Lamps[i].diffuseStrength * diff * u_Lamps[i].lampColor * attenuation;
// specular
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_Lamps[i].shininess);
vec3 specular = u_Lamps[i].specularStrength * spec * u_Lamps[i].lampColor * attenuation;
// fragment position in light space
//fragLightSpacePos = u_Lamps[i].lightSpaceMatrix * u_Lamps[i].lightModelMatrix * v_Position;
// calculate shadow
shadow = calculateShadow(i, u_Lamps[i].lampPos);
// result for this(i) lamp
ambientResult += ambient;
diffuseResult += diffuse * (1-shadow);
specularResult += specular * (1-shadow);
}
fragColor =
toVec4(ambientResult) * texture2D(u_Mapping.ambient, v_Texture) +
toVec4(diffuseResult) * texture2D(u_Mapping.diffuse, v_Texture) +
toVec4(specularResult) * texture2D(u_Mapping.specular, v_Texture);
// brightness calculation
//float brightness = dot(fragColor.rgb, vec3(0.2126, 0.7152, 0.0722));
//if (brightness > brightnessThreshold) fragBrightColor = vec4(fragColor.rgb, 1.0);
fragBrightColor = vec4(0, 0, 0, 1);
}
Vertex shader:
#version 130
uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.
uniform float u_NormalMapping; // Normal mapping; 0 - false, 1 - true
attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.
attribute vec3 a_Tangent; // Per-vertex tangent information we will pass in.
attribute vec3 a_Bitangent; // Per-vertex bitangent information we will pass in.
attribute vec2 a_Texture; // Per-vertex texture information we will pass in.
varying mat4 v_MVMatrix; // This will be passed into the fragment shader.
varying mat3 v_TBN; // This will be passed into the fragment shader.
varying vec4 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.
varying vec2 v_Texture; // This will be passed into the fragment shader.
varying float v_NormalMapping; // This will be passed into the fragment shader.
void main() {
// creating TBN (tangent-bitangent-normal) matrix if normal mapping enabled
if (u_NormalMapping == 1) {
vec3 T = normalize(vec3(u_MVMatrix * vec4(a_Tangent, 0.0)));
vec3 B = normalize(vec3(u_MVMatrix * vec4(a_Bitangent, 0.0)));
vec3 N = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0)));
mat3 TBN = mat3(T, B, N);
v_TBN = TBN;
}
// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
gl_Position = u_MVPMatrix * a_Position;
// sending all needed variables to fragment shader
v_Position = a_Position;
v_Texture = a_Texture;
v_NormalMapping = u_NormalMapping;
v_MVMatrix = u_MVMatrix;
v_Normal = a_Normal;
}
Vertex shadow shader:
#version 130
attribute vec3 a_Position;
uniform mat4 u_ModelMatrix;
void main() {
gl_Position = u_ModelMatrix * vec4(a_Position, 1.0);
}
Fragment shadow shader:
#version 330 core
in vec4 fragPos;
uniform vec3 lightPos; // cameraViewMatrix * lamp world coordinates
uniform float far_plane = 16;
void main()
{
float lightDistance = length(fragPos.xyz - lightPos);
// map to [0;1] range by dividing by far_plane
lightDistance = lightDistance / far_plane;
// write this as modified depth
gl_FragDepth = lightDistance;
}
Geometry shadow shader:
#version 330 core
layout (triangles) in;
layout (triangle_strip, max_vertices=18) out;
uniform mat4 shadowMatrices[6];
out vec4 fragPos; // FragPos from GS (output per emitvertex)
void main() {
for(int face = 0; face < 6; ++face) {
gl_Layer = face; // built-in variable that specifies to which face we render.
for(int i = 0; i < 3; ++i) // for each triangle's vertices
{
fragPos = gl_in[i].gl_Position;
gl_Position = shadowMatrices[face] * fragPos;
EmitVertex();
}
EndPrimitive();
}
}
And a video demonstrating visualization shadow map:
https://youtu.be/zaNXGG1qLaw
I understand that I missed the transformation somewhere, or did some extra, but where - I really do not know.
The content of shadowMaps[textureIndex] is probably a depth map taken in "light space". This means it is a depth map as seen from the light source.
But
fragPos = vec3(v_MVMatrix * v_Position);
and
struct Lamp {
.....
vec3 lampPos; // in eye space, cameraViewMatrix * lamp world coordinates
.....
};
are in view space coordiantes. This causes that
vec3 fragToLight = fragPos - lightPos;
is a direction in view space, as seen from the camera.
If you do
float closestDepth = texture(shadowMaps[textureIndex], fragToLight).r;
then a "light space" map is accessed by a "view space" vector. The transformation from view space coordiantes to "light space" coordiantes is missing.
To solve the issue you need a matrix which transforms from world coordinates to "light space" coordinates. This is the inverse matrix, of that view projection matrix, which you used, when you create shadowMaps.
mat4 inverse_light_vp_mat[MAX_LAMPS_COUNT];
The fragment position has to be transformed to world coordinates, then it has to be transformed to "light space" coordinates, with inverse_light_vp_mat:
varying mat4 v_ModelMatrix; // Model matrix
vec4 fragLightPos = inverse_light_vp_mat[textureIndex] * v_ModelMatrix * v_Position;
fragLightPos.xyz /= fragLightPos.w;
In "light space" the light position is vec3( 0.0, 0.0, 0.0 ), because the position of the light source is the origin of the "light space". So the look up in the shadowMaps can be done directly with fragLightPos:
float closestDepth = texture(shadowMaps[textureIndex], fragLightPos.xyz).r;
The problem was solved. It was due to the fact that I considered a map of shadows in the camera space (view space), but it was necessary in the world space. Also, during the calculation of the shadow itself, it was also necessary to calculate everything in the world space.
Fragment shader:
vec3 fragToLight = vec3(model * v_Position) - lightPosWorldSpace;
or
vec3 fragToLight = vec3(model * v_Position) - vec3(inverse(view) * lightPos); (lightPos - vec4)
Fragment shadow shader:
float lightDistance = length(fragPos.xyz - lightPos);, lightPos - lamp position in world space

How to handle lightning (ambient, diffuse, specular) for point spheres in openGL

Initial situation
I want to visualize simulation data in openGL.
My data consists of particle positions (x, y, z) where each particle has some properties (like density, temperature, ...) which will be used for coloring. Those (SPH) particles (100k to several millions), grouped together, actually represent planets, in case you wonder. I want to render those particles as small 3D spheres and add ambient, diffuse and specular lighting.
Status quo and questions
In MY case: In which coordinate frame do I do the lightning calculations? Which way is the "best" to pass the various components through the pipeline?
I saw that it is common to do it in view space which is also very intuitive. However: The normals at the different fragment positions are calculated in the fragment shader in clip space coordinates (see appended fragment shader). Can I actually convert them "back" into view space to do the lightning calculations in view space for all the fragments? Would there be any advantage compared to doing it in clip space?
It would be easier to get the normals in view space if I would use meshes for each sphere but I think with several million particles this would decrease performance drastically, so better do it with sphere intersection, would you agree?
PS: I don't need a model matrix since all the particles are already in place.
//VERTEX SHADER
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 2) in float density;
uniform float radius;
uniform vec3 lightPos;
uniform vec3 viewPos;
out vec4 lightDir;
out vec4 viewDir;
out vec4 viewPosition;
out vec4 posClip;
out float vertexColor;
// transformation matrices
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
lightDir = projection * view * vec4(lightPos - position, 1.0f);
viewDir = projection * view * vec4(viewPos - position, 1.0f);
viewPosition = projection * view * vec4(lightPos, 1.0f);
posClip = projection * view * vec4(position, 1.0f);
gl_Position = posClip;
gl_PointSize = radius;
vertexColor = density;
}
I know that projective divion happens for the gl_Position variable, does that actually happen to ALL vec4's which are passed from the vertex to the fragment shader? If not, maybe the calculations in the fragment shader would be wrong?
And the fragment shader where the normals and diffuse/specular lightning calculations in clip space:
//FRAGMENT SHADER
#version 330 core
in float vertexColor;
in vec4 lightDir;
in vec4 viewDir;
in vec4 posClip;
in vec4 viewPosition;
uniform vec3 lightColor;
vec4 colormap(float x); // returns vec4(r, g, b, a)
out vec4 vFragColor;
void main(void)
{
// AMBIENT LIGHT
float ambientStrength = 0.0;
vec3 ambient = ambientStrength * lightColor;
// Normal calculation done in clip space (first from texture (gl_PointCoord 0 to 1) coord to NDC( -1 to 1))
vec3 normal;
normal.xy = gl_PointCoord * 2.0 - vec2(1.0); // transform from 0->1 point primitive coords to NDC -1->1
float mag = dot(normal.xy, normal.xy); // sqrt(x=1) = sqrt(x)
if (mag > 1.0) // discard fragments outside sphere
discard;
normal.z = sqrt(1.0 - mag); // because x^2 + y^2 + z^2 = 1
// DIFFUSE LIGHT
float diff = max(0.0, dot(vec3(lightDir), normal));
vec3 diffuse = diff * lightColor;
// SPECULAR LIGHT
float specularStrength = 0.1;
vec3 viewDir = normalize(vec3(viewPosition) - vec3(posClip));
vec3 reflectDir = reflect(-vec3(lightDir), normal);
float shininess = 64;
float spec = pow(max(dot(vec3(viewDir), vec3(reflectDir)), 0.0), shininess);
vec3 specular = specularStrength * spec * lightColor;
vFragColor = colormap(vertexColor / 8) * vec4(ambient + diffuse + specular, 1);
}
Now this actually "kind of" works but i have the feeling that also the sides of the sphere which do NOT face the light source are being illuminated, which shouldn't happen. How can I fix this?
Some weird effect: In this moment the light source is actually BEHIND the left planet (it just peaks out a little bit at the top left), bit still there are diffuse and specular effects going on. This side should be actually pretty dark! =(
Also at this moment I get some glError: 1282 error in the fragment shader and I don't know where it comes from since the shader program actually compiles and runs, any suggestions? :)
The things that you are drawing aren't actually spheres. They just look like them from afar. This is absolutely ok if you are fine with that. If you need geometrically correct spheres (with correct sizes and with a correct projection), you need to do proper raycasting. This seems to be a comprehensive guide on this topic.
1. What coordinate system?
In the end, it is up to you. The coordinate system just needs to fulfill some requirements. It must be angle-preserving (because lighting is all about angles). And if you need distance-based attenuation, it should also be distance-preserving. The world and the view coordinate systems usually fulfill these requirements. Clip space is not suited for lighting calculations as neither angles nor distances are preserved. Furthermore, gl_PointCoord is in none of the usual coordinate systems. It is its own coordinate system and you should only use it together with other coordinate systems if you know their relation.
2. Meshes or what?
Meshes are absolutely not suited to render spheres. As mentioned above, raycasting or some screen-space approximation are better choices. Here is an example shader that I used in my projects:
#version 330
out vec4 result;
in fData
{
vec4 toPixel; //fragment coordinate in particle coordinates
vec4 cam; //camera position in particle coordinates
vec4 color; //sphere color
float radius; //sphere radius
} frag;
uniform mat4 p; //projection matrix
void main(void)
{
vec3 v = frag.toPixel.xyz - frag.cam.xyz;
vec3 e = frag.cam.xyz;
float ev = dot(e, v);
float vv = dot(v, v);
float ee = dot(e, e);
float rr = frag.radius * frag.radius;
float radicand = ev * ev - vv * (ee - rr);
if(radicand < 0)
discard;
float rt = sqrt(radicand);
float lambda = max(0, (-ev - rt) / vv); //first intersection on the ray
float lambda2 = (-ev + rt) / vv; //second intersection on the ray
if(lambda2 < lambda) //if the first intersection is behind the camera
discard;
vec3 hit = lambda * v; //intersection point
vec3 normal = (frag.cam.xyz + hit) / frag.radius;
vec4 proj = p * vec4(hit, 1); //intersection point in clip space
gl_FragDepth = ((gl_DepthRange.diff * proj.z / proj.w) + gl_DepthRange.near + gl_DepthRange.far) / 2.0;
vec3 vNormalized = -normalize(v);
float nDotL = dot(vNormalized, normal);
vec3 c = frag.color.rgb * nDotL + vec3(0.5, 0.5, 0.5) * pow(nDotL, 120);
result = vec4(c, frag.color.a);
}
3. Perspective division
Perspective division is not applied to your attributes. The GPU does perspective division on the data that you pass via gl_Position on the way to transforming them to screen space. But you will never actually see this perspective-divided position unless you do it yourself.
4. Light in the dark
This might be the result of you mixing different coordinate systems or doing lighting calculations in clip space. Btw, the specular part is usually not multiplied by the material color. This is light that gets reflected directly at the surface. It does not penetrate the surface (which would absorb some colors depending on the material). That's why those highlights are usually white (or whatever light color you have), even on black objects.

Strange specular results from GLSL phong shader

I'm trying to implement phong shading in GLSL but am having some issues with the specular component.
The green light is the specular component. The light (a point light) travels in a circle above the plane. The specular highlight always points inward toward the Y axis about which the light rotates and fans out toward the diffuse reflection as seen in the image. It doesn't appear to be affected at all by the positioning of the camera and I'm not sure where I'm going wrong.
Vertex shader code:
#version 330 core
/*
* Phong Shading with with Point Light (Quadratic Attenutation)
*/
//Input vertex data
layout(location = 0) in vec3 vertexPosition_modelSpace;
layout(location = 1) in vec2 vertexUVs;
layout(location = 2) in vec3 vertexNormal_modelSpace;
//Output Data; will be interpolated for each fragment
out vec2 uvCoords;
out vec3 vertexPosition_cameraSpace;
out vec3 vertexNormal_cameraSpace;
//Uniforms
uniform mat4 mvMatrix;
uniform mat4 mvpMatrix;
uniform mat3 normalTransformMatrix;
void main()
{
vec3 normal = normalize(vertexNormal_modelSpace);
//Set vertices in clip space
gl_Position = mvpMatrix * vec4(vertexPosition_modelSpace, 1);
//Set output for UVs
uvCoords = vertexUVs;
//Convert vertex and normal into eye space
vertexPosition_cameraSpace = mat3(mvMatrix) * vertexPosition_modelSpace;
vertexNormal_cameraSpace = normalize(normalTransformMatrix * normal);
}
Fragment Shader Code:
#version 330 core
in vec2 uvCoords;
in vec3 vertexPosition_cameraSpace;
in vec3 vertexNormal_cameraSpace;
//out
out vec4 fragColor;
//uniforms
uniform sampler2D diffuseTex;
uniform vec3 lightPosition_cameraSpace;
void main()
{
const float materialAmbient = 0.025; //a touch of ambient
const float materialDiffuse = 0.65;
const float materialSpec = 0.35;
const float lightPower = 2.0;
const float specExponent = 2;
//--------------Set Colors and determine vectors needed for shading-----------------
//reflection colors- NOTE- diffuse and ambient reflections will use the texture color
const vec3 colorSpec = vec3(0,1,0); //Green spec color
vec3 diffuseColor = texture2D(diffuseTex, uvCoords).rgb; //Get color from the texture at fragment
const vec3 lightColor = vec3(1,1,1); //White light
//Re-normalize normal vectors : after interpolation they make not be unit length any longer
vec3 normVertexNormal_cameraSpace = normalize(vertexNormal_cameraSpace);
//Set camera vec
vec3 viewVec_cameraSpace = normalize(-vertexPosition_cameraSpace); //Since its view space, camera at origin
//Set light vec
vec3 lightVec_cameraSpace = normalize(lightPosition_cameraSpace - vertexPosition_cameraSpace);
//Set reflect vect
vec3 reflectVec_cameraSpace = normalize(reflect(-lightVec_cameraSpace, normVertexNormal_cameraSpace)); //reflect function requires incident vec; from light to vertex
//----------------Find intensity of each component---------------------
//Determine Light Intensity
float distance = abs(length(lightPosition_cameraSpace - vertexPosition_cameraSpace));
float lightAttenuation = 1.0/( (distance > 0) ? (distance * distance) : 1 ); //Quadratic
vec3 lightIntensity = lightPower * lightAttenuation * lightColor;
//Determine Ambient Component
vec3 ambientComp = materialAmbient * diffuseColor * lightIntensity;
//Determine Diffuse Component
float lightDotNormal = max( dot(lightVec_cameraSpace, normVertexNormal_cameraSpace), 0.0 );
vec3 diffuseComp = materialDiffuse * diffuseColor * lightDotNormal * lightIntensity;
vec3 specComp = vec3(0,0,0);
//Determine Spec Component
if(lightDotNormal > 0.0)
{
float reflectDotView = max( dot(reflectVec_cameraSpace, viewVec_cameraSpace), 0.0 );
specComp = materialSpec * colorSpec * pow(reflectDotView, specExponent) * lightIntensity;
}
//Add Ambient + Diffuse + Spec
vec3 phongFragRGB = ambientComp +
diffuseComp +
specComp;
//----------------------Putting it together-----------------------
//Out Frag color
fragColor = vec4( phongFragRGB, 1);
}
Just noting that the normalTransformMatrix seen in the Vertex shader is the inverse-transpose of the model-view matrix.
I am setting a vector from the vertex position to the light, to the camera, and the reflect vector, all in camera space. For the diffuse calculation I am taking the dot product of the light vector and the normal vector, and for the specular component I am taking the dot product of the reflection vector and the view vector. Perhaps there is some fundamental misunderstanding that I have with the algorithm?
I thought at first that the problem could be that I wasn't normalizing the normals entering the fragment shader after interpolation, but adding a line to normalize didn't affect the image. I'm not sure where to look.
I know that there a lot of phong shading questions on the site, but everyone seems to have a problem that is a bit different. If anyone can see where I am going wrong, please let me know. Any help is appreciated.
EDIT: Okay its working now! Just as jozxyqk suggested below, I needed to do a mat4*vec4 operation for my vertex position or lose the translation information. When I first made the change I was getting strange results until I realized that I was making the same mistake in my OpenGL code for the lightPosition_cameraSpace before I passed it to the shader (the mistake being that I was casting down the view matrix to a mat3 for the calculation instead of setting the light position vector as a vec4). Once I edited those lines the shader appears to be working properly! Thanks for the help, jozxqk!
I can see two parts which don't look right.
"vertexPosition_cameraSpace = mat3(mvMatrix) * vertexPosition_modelSpace" should be a mat4/vec4(x,y,z,1) multiply, otherwise it ignores the translation part of the modelview matrix.
2. distance uses the light position relative to the camera and not the vertex. Use lightVec_cameraSpace instead. (edit: missed the duplicated calculation)

OpenGL Programmable Pipeline Point Lights

Since built-in uniforms such as gl_LightSource are now marked as deprecated in the latest versions of the OpenGL specification, I am currently implementing a basic lighting system (point lights right now) which receives all the light and material information through custom uniform variables.
I have implemented the light attenuation and specular highlights for a point light, and it seems to be working good, apart from a position glitch: I'm manually moving the light, altering its position along the X axis. The light source however (judging by the light it casts upon the square plane below it) doesn't seem to move along the X axis, but, rather, diagonally, on both the X and Z axes (possibly Y too, though it's not entirely a positioning bug).
Here's a screenshot of what the distortion looks like (the light is at -35, 5, 0, Suzanne ist at 0, 2, 0:
:
It looks OK when the light is at 0, 5, 0:
According to the OpenGL specification, all the default light computations take place in eye coordinates, which is what I'm trying to emulate here (hence the multiplication of the light position with the vMatrix). I am using just the view matrix, since applying the model transformation of the vertex batch being rendered to the light doesn't really make sense.
If it matters, all the plane's normals are pointing straight up - 0, 1, 0.
(Note: I fixed the issue now, thanks to msell and myAces! The following snippets are the corrected versions. There's also an option to add spotlight parameters to the light now (d3d style ones))
Here's the code I'm using in the vertex shader:
#version 330
uniform mat4 mvpMatrix;
uniform mat4 mvMatrix;
uniform mat4 vMatrix;
uniform mat3 normalMatrix;
uniform vec3 vLightPosition;
uniform vec3 spotDirection;
uniform bool useTexture;
uniform bool fogEnabled;
uniform float minFogDistance;
uniform float maxFogDistance;
in vec4 vVertex;
in vec3 vNormal;
in vec2 vTexCoord;
smooth out vec3 vVaryingNormal;
smooth out vec3 vVaryingLightDir;
smooth out vec2 vVaryingTexCoords;
smooth out float fogFactor;
smooth out vec4 vertPos_ec;
smooth out vec4 lightPos_ec;
smooth out vec3 spotDirection_ec;
void main() {
// Surface normal in eye coords
vVaryingNormal = normalMatrix * vNormal;
vec4 vPosition4 = mvMatrix * vVertex;
vec3 vPosition3 = vPosition4.xyz / vPosition4.w;
vec4 tLightPos4 = vMatrix * vec4(vLightPosition, 1.0);
vec3 tLightPos = tLightPos4.xyz / tLightPos4.w;
// Diffuse light
// Vector to light source (do NOT normalize this!)
vVaryingLightDir = tLightPos - vPosition3;
if(useTexture) {
vVaryingTexCoords = vTexCoord;
}
lightPos_ec = vec4(tLightPos, 1.0f);
vertPos_ec = vec4(vPosition3, 1.0f);
// Transform the light direction (for spotlights)
vec4 spotDirection_ec4 = vec4(spotDirection, 1.0f);
spotDirection_ec = spotDirection_ec4.xyz / spotDirection_ec4.w;
spotDirection_ec = normalMatrix * spotDirection;
// Projected vertex
gl_Position = mvpMatrix * vVertex;
// Fog factor
if(fogEnabled) {
float len = length(gl_Position);
fogFactor = (len - minFogDistance) / (maxFogDistance - minFogDistance);
fogFactor = clamp(fogFactor, 0, 1);
}
}
And this is the code I'm using in the fragment shader:
#version 330
uniform vec4 globalAmbient;
// ADS shading model
uniform vec4 lightDiffuse;
uniform vec4 lightSpecular;
uniform float lightTheta;
uniform float lightPhi;
uniform float lightExponent;
uniform int shininess;
uniform vec4 matAmbient;
uniform vec4 matDiffuse;
uniform vec4 matSpecular;
// Cubic attenuation parameters
uniform float constantAt;
uniform float linearAt;
uniform float quadraticAt;
uniform float cubicAt;
// Texture stuff
uniform bool useTexture;
uniform sampler2D colorMap;
// Fog
uniform bool fogEnabled;
uniform vec4 fogColor;
smooth in vec3 vVaryingNormal;
smooth in vec3 vVaryingLightDir;
smooth in vec2 vVaryingTexCoords;
smooth in float fogFactor;
smooth in vec4 vertPos_ec;
smooth in vec4 lightPos_ec;
smooth in vec3 spotDirection_ec;
out vec4 vFragColor;
// Cubic attenuation function
float att(float d) {
float den = constantAt + d * linearAt + d * d * quadraticAt + d * d * d * cubicAt;
if(den == 0.0f) {
return 1.0f;
}
return min(1.0f, 1.0f / den);
}
float computeIntensity(in vec3 nNormal, in vec3 nLightDir) {
float intensity = max(0.0f, dot(nNormal, nLightDir));
float cos_outer_cone = lightTheta;
float cos_inner_cone = lightPhi;
float cos_inner_minus_outer = cos_inner_cone - cos_outer_cone;
// If we are a point light
if(lightTheta > 0.0f) {
float cos_cur = dot(normalize(spotDirection_ec), -nLightDir);
// d3d style smooth edge
float spotEffect = clamp((cos_cur - cos_outer_cone) /
cos_inner_minus_outer, 0.0, 1.0);
spotEffect = pow(spotEffect, lightExponent);
intensity *= spotEffect;
}
float attenuation = att( length(lightPos_ec - vertPos_ec) );
intensity *= attenuation;
return intensity;
}
/**
* Phong per-pixel lighting shading model.
* Implements basic texture mapping and fog.
*/
void main() {
vec3 ct, cf;
vec4 texel;
float at, af;
if(useTexture) {
texel = texture2D(colorMap, vVaryingTexCoords);
} else {
texel = vec4(1.0f);
}
ct = texel.rgb;
at = texel.a;
vec3 nNormal = normalize(vVaryingNormal);
vec3 nLightDir = normalize(vVaryingLightDir);
float intensity = computeIntensity(nNormal, nLightDir);
cf = matAmbient.rgb * globalAmbient.rgb + intensity * lightDiffuse.rgb * matDiffuse.rgb;
af = matAmbient.a * globalAmbient.a + lightDiffuse.a * matDiffuse.a;
if(intensity > 0.0f) {
// Specular light
// - added *after* the texture color is multiplied so that
// we get a truly shiny result
vec3 vReflection = normalize(reflect(-nLightDir, nNormal));
float spec = max(0.0, dot(nNormal, vReflection));
float fSpec = pow(spec, shininess) * lightSpecular.a;
cf += intensity * vec3(fSpec) * lightSpecular.rgb * matSpecular.rgb;
}
// Color modulation
vFragColor = vec4(ct * cf, at * af);
// Add the fog to the mix
if(fogEnabled) {
vFragColor = mix(vFragColor, fogColor, fogFactor);
}
}
What math bug could be causing this distortion?
Edit 1:
I've updated the shader code. The attenuation is now being computed in the fragment shader, as it should have been all along. It's currently disabled, though - the bug doesn't have anything to do with the attenuation. When rendering only the attenuation factor of the light (see the last few lines of the fragment shader), the attenuation is being computed right. This means that the light position is being correctly transformed into eye coordinates, so it can't be the source of the bug.
The last few lines of the fragment shader can be used for some (slightly hackish but nevertheless insightful) debugging - it seems the intensity of the light is not being computed right per-fragment, though I have no idea why.
What's interesting is that this bug is only noticeable on (very) large quads like the floor in the images. It's not noticeable on small models.
Edit 2:
I've updated the shader code to a working version. It's all good now and I hope it helps any future user reading this, since as of today, I have yet to see any glsl tutorial that implements lights with absolutely no fixed functionality and secret implicit transforms (such as gl_LightSource[i].* and the implicit transformations to eye space).
My code is licensed under the BSD 2-clause license and can be found on GitHub!
I recently had a similar problem, where lighting worked somewhat wrong when using large polygons. The problem was normalizing the eye vector in vertex shader, as interpolating normalized values procudes incorrect results.
Change
vVaryingLightDir = normalize( tLightPos - vPosition3 );
to
vVaryingLightDir = tLightPos - vPosition3;
in your vertex shader. You can keep the normalization in the fragment shader.
Just because I noticed:
vec3 tLightPos = (vMatrix * vec4(vLightPosition, 1.0)).xyz;
you are simply eliminating the homogenous coordinate here, without dividing through it first. This will cause some problems.

Opengl Refraction ,Texture is repeating on ellipsoid object

I have a query regarding refraction.
I am using a texture image for refraction(refertest_car.png).
But somehow the texture is getting multiplied and givinga distorted image(Refer Screenshot.png)
i am using following shader.
attribute highp vec4 vertex;
attribute mediump vec3 normal;
uniformhighp mat4 matrix;
uniformhighp vec3 diffuse_color;
uniformhighp mat3 matrixIT;
uniformmediump mat4 matrixMV;
uniformmediump vec3 EyePosModel;
uniformmediump vec3 LightDirModel;
varyingmediump vec4 color;
constmediump float cShininess = 3.0;
constmediump float cRIR = 1.015;
varyingmediump vec2 RefractCoord;
vec3 SpecularColor= vec3(1.0,1.0,1.0);
voidmain(void)
{
vec3 toLight = normalize(vec3(1.0,1.0,1.0));
mediump vec3 eyeDirModel = normalize(vertex.xyz -EyePosModel);
mediump vec3 refractDir =refract(eyeDirModel,normal, cRIR);
refractDir = (matrix * vec4(refractDir, 0.0)).xyw;
RefractCoord = 0.5 * (refractDir.xy / refractDir.z) + 0.5;
vec3 normal_cal = normalize(matrixIT *normal );
float NDotL = max(dot(normal_cal, toLight), 0.0);
vec4 ecPosition = normalize(matrixMV * vertex);
vec3 eyeDir = vec3(1.0,1.0,1.0);
float NDotH = 0.0;
vec3 SpecularLight = vec3(0.0,0.0,0.0);
if(NDotL > 0.0)
{
vec3 halfVector = normalize( eyeDirModel + LightDirModel);
float NDotH = max(dot(normal_cal, halfVector), 0.0);
float specular =pow(NDotH,3.0);
SpecularLight = specular * SpecularColor;
}
color = vec4((NDotL * diffuse_color.xyz) + (SpecularLight.xyz) ,1.0);
gl_Position = matrix * vertex;
}
And
varyingmediump vec2 RefractCoord;
uniformsampler2D sTexture;
varyingmediump vec4 color;
voidmain(void)
{
lowp vec3 refractColor = texture2D(sTexture,RefractCoord).rgb;
gl_FragColor = vec4(color.xyz + refractColor,1.0);
}
Can anyone let me know the solution to this problem?
Thanks for any help.
Sorry guys i am not able to attach image.
It seems that you are calculating the refraction vector incorrectly. Hovewer, the answer to your question is already in it's title. If you are looking at ellipsoid, the rays from the view span a cone, wrapping the ellipsoid. But after the refraction, the cone may be much wider, reaching beyond the edges of your images, therefore giving texture coordinates larger than 0 - 1 and leading to texture being wrapped. So we need to take care of that as well.
First, the refraction coordinate should be calculated in vertex shader as follows:
vec3 eyeDirModel = normalize(-vertex * matrix);
vec3 refractDir = refract(eyeDirModel, normal, cRIR);
RefractCoord = normalize((matrix * vec4(refractDir, 0.0)).xyz); // no dehomog!
RefractCoord now contains refracted eye-space vectors. This counts on "matrix" being modelview matrix (that is not clear from your code, but i suspect it is). You could possibly skip normalization if you wish the shader to run faster, it shouldn't cause noticeable errors. Now a little bit of modification to your fragment shader.
vec3 refractColor = texture2D(sTexture, normalize(RefractCoord).xy * .5 + .5).rgb;
Here, using normalize() makes sure that the texture coordinates do not cause the texture to repeat.
Note that using 2D texture for refractions should be only justified by generating it on the fly (as e.g. Half-Life 2 does), otherwise one should probably use cube-map texture, which does the normalization for you and gives you color based on 3D direction - which is what you need.
Hope this helps ... (and, oh yeah, i wrote this from memory, in case there are any errors, please comment).