I am looking to create a waving flag effect in a vertex shader and here is what i have so far:
#version 330
layout(location = 0) in vec3 in_position;
layout(location = 1) in vec3 in_color;
uniform mat4 model_matrix, view_matrix, projection_matrix;
uniform vec3 culoare;
uniform float currentAngle;
out vec3 vertex_to_fragment_color;
void main(){
vertex_to_fragment_color = culoare;
vec4 v = vec4( in_position.x, in_position.y, in_position.z, 1.0 );
v.y = sin( in_position.x + currentAngle );
v.y += sin( in_position.z + currentAngle );
v.y *= in_position.x * 0.08;
gl_Position = projection_matrix*view_matrix*model_matrix*v;
}
current_angle is a variable that i'm sending to the shader and it kind of looks like this:
if ( currentAngle > 360.0f ) currentAngle -= 360.0f;
if ( currentAngle < 0.0f ) currentAngle += 360.0f;
I am new to this so i could really use some help to get this right.
GLSL's sin() and cos() take their arguments in radians, not degrees.
You can use the GLSL function radians() to convert degrees to radians.
You'll also have to subdivide your flag rectangle to get a convincing effect.
Related
Most of the shader codes are follow the instruction of LearnOpenGL. I could make sure that the g-buffer and noise data pass into the shader are correct. It seems like some kind of dislocation, but But I really can't figure out why this happened.
misplace ssao
#version 450 core
out float OUT_FragColor;
in vec2 TexCoords;
uniform sampler2D g_position;
uniform sampler2D g_normal;
uniform sampler2D noise_texture;
struct CameraInfo
{
vec4 position;
mat4 view;
mat4 projection;
};
layout(std140, binding = 0) uniform Camera
{
CameraInfo camera;
};
float radius = 0.5;
float bias = 0.025;
uniform int noise_tex_size;
void main()
{
const vec2 noise_scale = vec2(1280.0/noise_tex_size, 720.0/noise_tex_size);
vec3 frag_pos = texture(g_position, TexCoords).xyz;
vec3 normal = normalize(texture(g_normal, TexCoords).xyz);
vec3 random_vec = normalize(texture(noise_texture, TexCoords * noise_scale).xyz);
vec3 tangent = normalize(random_vec - normal * dot(random_vec, normal));
vec3 bitangent = cross(normal, tangent);
mat3 TBN = mat3(tangent, bitangent, normal);
float occlusion = 0.f;
for(int i = 0; i < sample_array.length(); ++i)
{
vec3 sample_pos = TBN * sample_array[i].xyz;
sample_pos = frag_pos + sample_pos * radius;
vec4 offset = vec4(sample_pos, 1.0);
offset = camera.projection * offset; // from view to clip-space
offset.xyz /= offset.w; // perspective divide ?????
offset.xyz = offset.xyz * 0.5 + 0.5; // transform to range 0.0 - 1.0
float sample_depth = texture(g_position, offset.xy).z;
float range_check = smoothstep(0.f, 1.f, radius / abs(frag_pos.z - sample_depth));
occlusion += (sample_depth >= sample_pos.z + bias ? 1.0 : 0.0) * range_check; //ignore sample points that too near the origin point
}
occlusion = 1.f - (occlusion / sample_array.length());
OUT_FragColor = occlusion;
}
transform the g_postion and g_normal into model space
FragPos = (camera.view * vec4(WorldPos, 1.0)).xyz;
mat4 normal_matrix = camera.view * mat4(transpose(inverse(mat3(model))));
FragNormal = mat3(normal_matrix) * normal;
I'm porting an openGL application to webassembly using Emscripten. I've written a bunch of shaders in GLSL (330) for the native version. However for the webversion I need shaders written in GLSL ES (300 es). How would I go about converting my shaders from GLSL to GLSL ES?
Possibilities I have considered so far:
GLSL -> SPIR-V -> GLSL ES,
having a bunch of #ifdef statements in the GLSL code in order to make blocks of code only execute for GLSL ES or GLSL,
writing custom C++ code that dynamically creates GLSL / GLSL ES code depending on what you need
simply having two nearly identical copies of all the shaders, one in GLSL and the other in GLSL ES
Example of GLSL vertex shader:
#version 330 core
#define NR_LIGHTS 10
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
out vec3 normalViewSpace;
out vec3 posViewSpace;
out vec2 textureCoords;
out vec4 positionsLightSpace[NR_LIGHTS];
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform mat4 lightMatrices[NR_LIGHTS];
void main()
{
vec4 posViewSpaceV4;
posViewSpaceV4 = viewMatrix * modelMatrix * vec4(position, 1.0);
posViewSpace = posViewSpaceV4.xyz;
gl_Position = projectionMatrix * posViewSpaceV4;
normalViewSpace = mat3(viewMatrix) * normalMatrix * normal;
for( int i = 0; i
Example of GLSL fragment shader:
#version 330 core
#define NR_LIGHTS 10
struct Material {
vec3 ambient;
vec3 diffuse;
vec3 specular;
float shininess;
float alpha;
};
struct Light {
vec3 posViewSpace;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
vec3 directionViewSpace;
float cutOff;
float outerCutOff;
sampler2D shadowMap;
};
out vec4 FragColor;
in vec3 normalViewSpace;
in vec3 posViewSpace;
in vec4 positionsLightSpace[NR_LIGHTS];
uniform Material material;
uniform Light lights[NR_LIGHTS];
float shadowCalculation(vec4 posLightSpace, sampler2D shadowMap, Light light)
{
// perform perspective divide
vec3 projCoords = posLightSpace.xyz / posLightSpace.w; // range [-1, 1]
// transform range [0, 1]
projCoords = projCoords * 0.5 + 0.5;
float closestDepth = texture(shadowMap, projCoords.xy).r;
float currentDepth = projCoords.z;
vec3 lightDir = normalize(light.posViewSpace - posViewSpace);
float bias = max(0.00005 * (1.0 - dot(normalViewSpace, lightDir)), 0.000005); // solves shadow acne
float shadow = currentDepth - bias > closestDepth ? 1.0 : 0.0;
return shadow;
}
vec3 calcSpotLight( Light light, vec3 normal, vec3 position, float shadow) // normal and position in view space, although this function should not care about which space it's in
{
vec3 result = vec3(0.0, 0.0, 0.0);
vec3 lightDir = normalize(light.posViewSpace - position);
float theta = dot(lightDir, normalize(-light.directionViewSpace));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0); // interpolate between inner and outer cutOff and clamp to 0 and 1
if( intensity > 0 ) // if inside spot radius
{
// attenuation
float distance = length(light.posViewSpace - position);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
if( attenuation > 0.001 )
{
// ambient
vec3 ambient = material.ambient * light.ambient;
// diffuse
vec3 norm = normalize(normalViewSpace);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = diff * material.diffuse * light.diffuse;
// specular
vec3 viewDir = normalize(-position); // in view space the camera is at (0, 0, 0)
vec3 reflectDir = reflect(-lightDir, norm); // reflect function expect vector FROM light source TO position
float spec = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
vec3 specular = material.specular * spec * light.specular;
// result
result = intensity * attenuation * (ambient + (1.0 - shadow) * (diffuse + specular));
}
}
return result;
}
void main()
{
vec3 result = material.ambient * 0.08;
for( int i = 0; i
I'm trying to implement Oren-Nayar lighting in the fragment shader as shown here.
However, I'm getting some strange lighting effects on the terrain as shown below.
I am currently sending the shader the 'view direction' uniform as the camera's 'front' vector. I am not sure if this is correct, as moving the camera around changes the artifacts.
Multiplying the 'front' vector by the MVP matrix gives a better result, but the artifacts are still very noticable when viewing the terrain from some angles. It is particularly noticable in dark areas and around the edges of the screen.
What could be causing this effect?
Artifact example
How the scene should look
Vertex Shader
#version 450
layout(location = 0) in vec3 position;
layout(location = 1) in vec3 normal;
out VS_OUT {
vec3 normal;
} vert_out;
void main() {
vert_out.normal = normal;
gl_Position = vec4(position, 1.0);
}
Tesselation Control Shader
#version 450
layout(vertices = 3) out;
in VS_OUT {
vec3 normal;
} tesc_in[];
out TESC_OUT {
vec3 normal;
} tesc_out[];
void main() {
if(gl_InvocationID == 0) {
gl_TessLevelInner[0] = 1.0;
gl_TessLevelInner[1] = 1.0;
gl_TessLevelOuter[0] = 1.0;
gl_TessLevelOuter[1] = 1.0;
gl_TessLevelOuter[2] = 1.0;
gl_TessLevelOuter[3] = 1.0;
}
tesc_out[gl_InvocationID].normal = tesc_in[gl_InvocationID].normal;
gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
}
Tesselation Evaluation Shader
#version 450
layout(triangles, equal_spacing) in;
in TESC_OUT {
vec3 normal;
} tesc_in[];
out TESE_OUT {
vec3 normal;
float height;
vec4 shadow_position;
} tesc_out;
uniform mat4 model_view;
uniform mat4 model_view_perspective;
uniform mat3 normal_matrix;
uniform mat4 depth_matrix;
vec3 lerp(vec3 v0, vec3 v1, vec3 v2) {
return (
(vec3(gl_TessCoord.x) * v0) +
(vec3(gl_TessCoord.y) * v1) +
(vec3(gl_TessCoord.z) * v2)
);
}
vec4 lerp(vec4 v0, vec4 v1, vec4 v2) {
return (
(vec4(gl_TessCoord.x) * v0) +
(vec4(gl_TessCoord.y) * v1) +
(vec4(gl_TessCoord.z) * v2)
);
}
void main() {
gl_Position = lerp(
gl_in[0].gl_Position,
gl_in[1].gl_Position,
gl_in[2].gl_Position
);
tesc_out.normal = normal_matrix * lerp(
tesc_in[0].normal,
tesc_in[1].normal,
tesc_in[2].normal
);
tesc_out.height = gl_Position.y;
tesc_out.shadow_position = depth_matrix * gl_Position;
gl_Position = model_view_perspective * gl_Position;
}
Fragment Shader
#version 450
in TESE_OUT {
vec3 normal;
float height;
vec4 shadow_position;
} frag_in;
out vec4 colour;
uniform vec3 view_direction;
uniform vec3 light_position;
#define PI 3.141592653589793
void main() {
const vec3 ambient = vec3(0.1, 0.1, 0.1);
const float roughness = 0.8;
const vec4 water = vec4(0.0, 0.0, 0.8, 1.0);
const vec4 sand = vec4(0.93, 0.87, 0.51, 1.0);
const vec4 grass = vec4(0.0, 0.8, 0.0, 1.0);
const vec4 ground = vec4(0.49, 0.27, 0.08, 1.0);
const vec4 snow = vec4(0.9, 0.9, 0.9, 1.0);
if(frag_in.height == 0.0) {
colour = water;
} else if(frag_in.height < 0.2) {
colour = sand;
} else if(frag_in.height < 0.575) {
colour = grass;
} else if(frag_in.height < 0.8) {
colour = ground;
} else {
colour = snow;
}
vec3 normal = normalize(frag_in.normal);
vec3 view_dir = normalize(view_direction);
vec3 light_dir = normalize(light_position);
float NdotL = dot(normal, light_dir);
float NdotV = dot(normal, view_dir);
float angleVN = acos(NdotV);
float angleLN = acos(NdotL);
float alpha = max(angleVN, angleLN);
float beta = min(angleVN, angleLN);
float gamma = dot(view_dir - normal * dot(view_dir, normal), light_dir - normal * dot(light_dir, normal));
float roughnessSquared = roughness * roughness;
float roughnessSquared9 = (roughnessSquared / (roughnessSquared + 0.09));
// calculate C1, C2 and C3
float C1 = 1.0 - 0.5 * (roughnessSquared / (roughnessSquared + 0.33));
float C2 = 0.45 * roughnessSquared9;
if(gamma >= 0.0) {
C2 *= sin(alpha);
} else {
C2 *= (sin(alpha) - pow((2.0 * beta) / PI, 3.0));
}
float powValue = (4.0 * alpha * beta) / (PI * PI);
float C3 = 0.125 * roughnessSquared9 * powValue * powValue;
// now calculate both main parts of the formula
float A = gamma * C2 * tan(beta);
float B = (1.0 - abs(gamma)) * C3 * tan((alpha + beta) / 2.0);
// put it all together
float L1 = max(0.0, NdotL) * (C1 + A + B);
// also calculate interreflection
float twoBetaPi = 2.0 * beta / PI;
float L2 = 0.17 * max(0.0, NdotL) * (roughnessSquared / (roughnessSquared + 0.13)) * (1.0 - gamma * twoBetaPi * twoBetaPi);
colour = vec4(colour.xyz * (L1 + L2), 1.0);
}
First I've plugged your fragment shader into my renderer with my view/normal/light vectors and it works perfectly. So the problem has to be in the way you calculate those vectors.
Next, you say that you set view_dir to your camera's front vector. I assume that you meant "camera's front vector in the world space" which would be incorrect. Since you calculate the dot products with vectors in the camera space, the view_dir must be in the camera space too. That is vec3(0,0,1) would be an easy way to check that. If it works -- we found your problem.
However, using (0,0,1) for the view direction is not strictly correct when you do perspective projection, because the direction from the fragment to the camera then depends on the location of the fragment on the screen. The correct formula then would be view_dir = normalize(-pos) where pos is the fragment's position in camera space (that is with model-view matrix applied without the projection). Further, this quantity now depends only on the fragment location on the screen, so you can calculate it as:
view_dir = normalize(vec3(-(gl_FragCoord.xy - frame_size/2) / (frame_width/2), flen))
flen is the focal length of your camera, which you can calculate as flen = cot(fovx/2).
I know this is a long dead thread, but I've been having the same problem (for several years), and finally found the solution...
It can be partially solved by fixing the orientation of the surface normals to match the polygon winding direction, but you can also get rid of the artifacts in the shader, by changing the following two lines...
float angleVN = acos(cos_nv);
float angleLN = acos(cos_nl);
to this...
float angleVN = acos(clamp(cos_nv, -1.0, 1.0));
float angleLN = acos(clamp(cos_nl, -1.0, 1.0));
Tada!
the problem is that i pass few lights to the shader but it showing shadow only for the first one.
the lighting applied also for the lights that their shadows are not seen.
i checked the shadow maps and they being passed correctly.
the fragment shader:
#version 420 compatibility
#define MAX_LIGHTS 8
struct lightSource
{
vec4 position;
vec4 diffuse;
vec4 specular;
float constantAttenuation, linearAttenuation, quadraticAttenuation;
float spotCutoff, spotExponent, intensity;
vec3 spotDirection;
sampler2D TexShadow;
samplerCube TexShadowPoint;
};
struct material
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
float shininess;
};
uniform material frontMaterial;
uniform lightSource lights[MAX_LIGHTS];
uniform int numberOfLights;
uniform vec4 scene_ambient;
uniform mat4 inversedViewMatrix;
uniform sampler2D Tex;
uniform sampler2D TexNorm;
in vec4 position; // position of the vertex (and fragment) in world space
in vec3 varyingNormalDirection; // surface normal vector in world space
in vec2 ex_UV;
in vec4 ShadowCoords[MAX_LIGHTS];
vec2 poissonDisk[16] = vec2[](
vec2( -0.94201624, -0.39906216 ), vec2( 0.94558609, -0.76890725 ),
vec2( -0.094184101, -0.92938870 ), vec2( 0.34495938, 0.29387760 ),
vec2( -0.91588581, 0.45771432 ), vec2( -0.81544232, -0.87912464 ),
vec2( -0.38277543, 0.27676845 ), vec2( 0.97484398, 0.75648379 ),
vec2( 0.44323325, -0.97511554 ), vec2( 0.53742981, -0.47373420 ),
vec2( -0.26496911, -0.41893023 ), vec2( 0.79197514, 0.19090188 ),
vec2( -0.24188840, 0.99706507 ), vec2( -0.81409955, 0.91437590 ),
vec2( 0.19984126, 0.78641367 ), vec2( 0.14383161, -0.14100790 )
);
float rand(vec2 co)
{
return fract(sin(dot(co, vec2(12.9898, 78.233)) * 43758.5453));
}
float linstep(float low, float high, float v)
{
return clamp((v-low)/(high-low), 0.0, 1.0);
}
float VSM(sampler2D depths, vec2 uv, float compare)
{
vec2 moments = texture2D(depths, uv).xy;
float p = smoothstep(compare - 0.02, compare, moments.x);
float variance = max(moments.y - moments.x*moments.x, -0.001);
float d = compare - moments.x;
float p_max;
p_max = linstep(0.6, 1.0, variance / (variance + d*d));
return clamp(max(p, p_max), 0.0, 1.0) / 2;
}
void main(void)
{
vec3 normalDirection;
//sample the normal map and covert from 0:1 range to -1:1 range
if (texture2D(TexNorm, ex_UV).rgb != vec3(0.0, 0.0, 0.0))
{
vec3 mapped_Normals = texture2D(TexNorm, ex_UV).rgb * 2.0 - 1.0;
normalDirection = normalize(mapped_Normals); //normal mapped normals
}
else
normalDirection = normalize(varyingNormalDirection);
vec4 texColor = texture2D(Tex, ex_UV);// usual processing of texture coordinates
vec3 viewDirection = normalize(vec3(inversedViewMatrix * vec4(0.0, 0.0, 0.0, 1.0) - position));
vec3 lightDirection;
float attenuation;
//initialize total lighting with ambient lighting
vec3 totalAmbientDiffuse = vec3(scene_ambient) * vec3(frontMaterial.ambient);
vec3 totalSpecular;
for (int index = 0; index < numberOfLights; index++) // for all light sources
{
float Visibility;
if (lights[index].position.w == 0.0) // directional light
{
attenuation = 1.0; // no attenuation
lightDirection = normalize(vec3(lights[index].position));
//int num = int(rand(position.xyz) * 16);
Visibility = VSM(lights[index].TexShadow, ShadowCoords[index].xy + poissonDisk[int(rand(position.xy + vec2(position.z,-position.z)) * 16)] /2000, ShadowCoords[index].z);
}
else // point light or spotlight (or other kind of light)
{
vec3 positionToLightSource = vec3(lights[index].position - position);
float Distance = length(positionToLightSource);
lightDirection = normalize(positionToLightSource);
attenuation = 1.0 / (lights[index].constantAttenuation + lights[index].linearAttenuation * Distance + lights[index].quadraticAttenuation * Distance * Distance);
if (lights[index].spotCutoff <= 90.0) // spotlight
{
Visibility = VSM(lights[index].TexShadow, ShadowCoords[index].xy / ShadowCoords[index].w + poissonDisk[int(rand(position.xy + vec2(position.z,-position.z)) * 16)] / 700, ShadowCoords[index].z / ShadowCoords[index].w);
float clampedCosine = max(0.0, dot(-lightDirection, normalize(lights[index].spotDirection)));
if (clampedCosine < cos(radians(lights[index].spotCutoff))) // outside of spotlight cone
{
attenuation = 0.0;
}
else
{
attenuation = attenuation * pow(clampedCosine, lights[index].spotExponent);
}
}
else // point light
{
//point light shadow calculations
}
}
vec3 diffuseReflection = attenuation * vec3(lights[index].diffuse) * vec3(frontMaterial.diffuse) * max(0.0, dot(normalDirection, lightDirection));
vec3 specularReflection;
if (dot(normalDirection, lightDirection) < 0.0) // light source on the wrong side
{
specularReflection = vec3(0.0, 0.0, 0.0); // no specular reflection
}
else // light source on the right side
{
specularReflection = attenuation * vec3(lights[index].specular) * vec3(frontMaterial.specular)
* pow(max(0.0, dot(reflect(-lightDirection, normalDirection), viewDirection)), frontMaterial.shininess);
}
totalAmbientDiffuse += diffuseReflection * lights[index].intensity * Visibility;
totalSpecular += specularReflection * lights[index].intensity * Visibility;
}
gl_FragColor = vec4(totalAmbientDiffuse, 1.0) * texColor + vec4(totalSpecular, 1.0);
}
I am working on the beginnings of omnidirectional shadow mapping in my engine. For now I am only producing one shadowmap as a test. I am getting an odd result when using my current shaders. Here is a screenshot which shows the problem:
I am using a near value of 0.5 and a far value of 5.0 in the projection matrix for the shadowmap render. As near as I can tell, any value with a light-space z larger than my far plane distance is being computed by my fragment shader as in shadow.
This is my fragment shader:
in vec2 st;
uniform sampler2D colorTexture;
uniform sampler2D normalTexture;
uniform sampler2D depthTexture;
uniform sampler2D shadowmapTexture;
uniform mat4 invProj;
uniform mat4 lightProj;
uniform vec3 lightPosition;
out vec3 color;
void main () {
vec3 clipSpaceCoords;
clipSpaceCoords.xy = st.xy * 2.0 - 1.0;
clipSpaceCoords.z = texture(depthTexture, st).x * 2.0 - 1.0;
vec4 position = invProj * vec4(clipSpaceCoords,1.0);
position.xyz /= position.w;
vec4 lightSpace = lightProj * vec4(position.xyz,1.0);
lightSpace.xyz /= lightSpace.w;
lightSpace.xyz = lightSpace.xyz * 0.5 + 0.5;
float lightDepth = texture(shadowmapTexture, lightSpace.xy).x;
vec3 normal = texture(normalTexture, st);
vec3 diffuse;
float shadowFactor = 1.0;
if(lightSpace.w > 0.0 && lightSpace.z > lightDepth+0.0042) {
shadowFactor = 0.2;
}
else {
float k = 0.00001;
vec3 distanceToLight = lightPosition - position.xyz;
float distanceLength = length(distanceToLight);
float attenuation = (1.0 / (1.0 + (0.1 * distanceLength) + k * (distanceLength * distanceLength)));
float diffuseTemp = max(dot(normalize(normal), normalize(distanceToLight)), 0.0);
diffuse = vec3(1.0, 1.0, 1.0) * attenuation * diffuseTemp;
}
vec3 gamma = vec3(1.0/2.2);
color = pow(texture(colorTexture, st).xyz*shadowFactor+diffuse, gamma);
}
How can I fix this issue (Other than increasing my far plane distance)?
One other question, as this is the first time I have attempted shadowmapping: am I doing the lighting in relation to the shadows correctly?