How can I add gradient banding and control it? - opengl

I am rendering a quad with up to 4 different vertex colours.
My vertex shader is super simple:
#version 330
layout(location=0) in vec2 inVertexPosition;
layout(location=1) in vec4 inColor;
out vec4 color;
void main()
{
gl_Position = vec4(inVertexPosition.x,-inVertexPosition.y, 0.0, 1.0);
color = inColor;
}
The fragment shader:
#version 330
layout(location=0) out vec4 frag_colour;
in vec4 color;
void main()
{
frag_colour = color;
}
also super simple. The results are a smooth gradient from corner to corner. However, I would like to produce an effect similar to the background of the text in this image:
where there is a limited palette so there's intentional banding in the gradient. My attempt to create this same style is a combination of Gradient with fixed number of levels and From RGB to HSV in OpenGL GLSL which has given me a fragment shader like this:
#version 330
layout(location=0) out vec4 frag_colour;
in vec4 color;
uniform bool uBand = false;
uniform float uBandingSteps;
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main()
{
if(uBand)
{
vec3 hsv = rgb2hsv(color.rgb);
float h = floor(hsv.x * (uBandingSteps*3+1) + 0.5) / (uBandingSteps*3+1);
float s = floor(hsv.y * (uBandingSteps*3+1) + 0.5) / (uBandingSteps*3+1);
float v = floor(hsv.z * (uBandingSteps*3+1) + 0.5) / (uBandingSteps*3+1);
frag_colour = vec4(hsv2rgb(vec3(h,s,v)),color.a);
}
else
frag_colour = color;
}
This works to a degree, however, for some reason I need to multiply uBandingSteps to generate the right number of colours. The above works "okay", but the problem is that the banding seems fairly arbitrary:
You can see that the banding doesn't give a bevelled sort of look, but instead the first colour is narrow, then large, then narrow, then large etc. Rather than being gradually larger until it gets to the middle colour that's the widest part of the gradient, and then steadily retreats.
How can I modify what I have in order to produce the intended effect? (sort of bevelled, smooth, stepped gradient).

Possibly the answer is super simple, too. Just round the color channels.
e.g. If you want to allow 8 different gradients for each color channel, then based on the fragment shader of the question:
#version 330
layout(location=0) out vec4 frag_colour;
in vec4 color;
void main()
{
vec3 color8 = round(color.rgb * 8.0) / 8.0;
frag_colour = vec4(color8, color.a);
}

Related

How I can apply antialiasing to a 2D grid on a shader?

I have the following fragment shader to draw a grid
#version 450 core
layout(location = 0) in vec2 position;
layout(location = 0) out vec4 fragColor;
layout(binding = 0) uniform ubo
{
mat4 uCameraView;
vec4 uGridColor;
float uTileSize;
float uGridBorderSize;
};
void main()
{
vec2 uv = mod(position, uTileSize);
vec2 border = mod(uv + (uGridBorderSize / 2.0), uTileSize);
border -= mod(uv - (uGridBorderSize / 2.0), uTileSize);
if (length(border) > uTileSize - uGridBorderSize)
{
fragColor = uGridColor;
}
else
{
fragColor = vec4(0.0);
}
}
This works fine until I change the zoom, the issue appears when the camera gets far away and the uGridBorderSize is smaller than a pixel in the screen, then I get this ugly effect, where lines appear and disappear when the zoom changes.
So I wonder, is it possible to apply antialising to this lines so they appear consistently?
In the end I found this fragment shader that draws antialiased grid lines
void main()
{
// https://madebyevan.com/shaders/grid/
vec2 uv = fragCoord / uTileSize;
vec2 grid = abs(fract(uv - 0.5) - 0.5) / fwidth(uv);
float line = (min(grid.x, grid.y) * uGridBorderSize) / uScaleFactor;
float color = 1.0 - min(line, 1.0);
fragColor = uGridColor * color;
}

How to draw a smooth circle with a border in glsl?

I want to draw a smooth circle in GLSL but with a border of variable width, in a separate colour. Potentially, the interior of the circle could be transparent.
My original non-smooth shader:
#version 330
layout(location=0) out vec4 frag_colour;
in vec4 color;
uniform float radius;
uniform vec2 position;
uniform vec4 borderColor;
uniform float borderThickness;
void main()
{
float distanceX = abs(gl_FragCoord.x - position.x);
float distanceY = abs(gl_FragCoord.y - position.y);
if(sqrt(distanceX * distanceX + distanceY * distanceY) > radius)
discard;
else if(sqrt(distanceX * distanceX + distanceY * distanceY) <= radius &&
sqrt(distanceX * distanceX + distanceY * distanceY) >= radius-borderThickness)
frag_colour = borderColor;
else
frag_colour = color;
}
This works, but is not smooth. I can draw smooth circles:
#version 330
layout(location=0) out vec4 frag_colour;
in vec4 color;
uniform float radius;
uniform vec2 position;
uniform vec4 borderColor;
uniform float borderThickness;
void main()
{
vec2 uv = gl_FragCoord.xy - position;
float d = sqrt(dot(uv,uv));
float t = 1.0 - smoothstep(radius-borderThickness,radius, d);
frag_colour = vec4(color.rgb,color.a*t);
}
But I can't work out how to add my border to the above.
You have to compute the absolut value of the difference between the radius and the distance and interpolate between 0.0 and borderThickness:
float t = 1.0 - smoothstep(0.0, borderThickness, abs(radius-d));
If you want to fill the circle, then you need 2 gradients. 1 for the transition between the inner circle and the border and a 2nd one for the alpha channel on the outline. mix the colors by the former and set the alpha channel by the later:
float t1 = 1.0 - smoothstep(radius-borderThickness, radius, d);
float t2 = 1.0 - smoothstep(radius, radius+borderThickness, d);
frag_colour = vec4(mix(color.rgb, baseColor.rgb, t1), t2);

Normal mapping working incorrectly, weird half-light effect

We are trying to implement normal mapping in our 2D Game Engine and get weird effect.
If normal is set manually like that
vec3 Normal = vec3(0.0, 0.0, 1.0) light works correctly, but we dont get "deep" effect that we want to achieve by normal mapping:
But if we get normal using normal map texture: vec3 Normal = texture(NormalMap, TexCoord).rgb it doesn't work at all. What should not be illuminated is illuminated and vice versa (such as the gaps between the bricks). And besides this, a dark area is on the bottom (or top, depending on the position of the light) side of the texture.
Although the texture of the normal map itself looks fine:
This is our fragment shader:
#version 330 core
layout (location = 0) out vec4 FragColor;
in vec2 TexCoord;
in vec2 FragPos;
uniform sampler2D OurTexture;
uniform sampler2D NormalMap;
struct point_light
{
vec3 Position;
vec3 Color;
};
uniform point_light Light;
void main()
{
vec4 Color = texture(OurTexture, TexCoord);
vec3 Normal = texture(NormalMap, TexCoord).rgb;
if (Color.a < 0.1)
discard;
vec3 LightDir = vec3(Light.Position.xy - FragPos, Light.Position.z);
float D = length(LightDir);
vec3 L = normalize(LightDir);
Normal = normalize(Normal * 2.0 - 1.0);
vec3 Diffuse = Light.Color * max(dot(Normal, L), 0);
vec3 Ambient = vec3(0.3, 0.3, 0.3);
vec3 Falloff = vec3(1, 0, 0);
float Attenuation = 1.0 /(Falloff.x + Falloff.y*D + Falloff.z*D*D);
vec3 Intensity = (Ambient + Diffuse) * Attenuation;
FragColor = Color * vec4(Intensity, 1);
}
And vertex as well:
#version 330 core
layout (location = 0) in vec2 aPosition;
layout (location = 1) in vec2 aTexCoord;
uniform mat4 Transform;
uniform mat4 ViewProjection;
out vec2 FragPos;
out vec2 TexCoord;
void main()
{
gl_Position = ViewProjection * Transform * vec4(aPosition, 0.0, 1.0);
TexCoord = aTexCoord;
FragPos = vec2(Transform * vec4(aPosition, 0.0, 1.0));
}
I google about that and found some people that get the same result, but their questions remained unanswered.
Any idea of what is the cause?
What texture format are you using for the normal map? SRGB, SNORM, etc? That might be the issue. Try UNORM.
Additionally, since you are not using a tangent space, make sure the plane's Z axis aligns with the Z axis of the normals. Also OGL reads Y in the reversed direction, so you need to flip the Y coordinates of the normals that you read from the normal map. Alternatively, you can use a reversed Y normal map (green pointing down).

Issue with lighting, possibly normal related?

I'm using OpenGL 3.3 and having some odd lighting issue, I'll first show two screenshots at different angles and then give the shader code.
First angle:
Second angle:
What you see here is:
A cube, with its middle on the origin;
A directional light source, coming from the yellow point through the origin;
In cyan you see the normals of the vertices.
I know the normals of the vertices are "wrong", but I was exactly trying to debug those.
What I expected was: A (from top-to-bottom) varying color of every face, depending on the position of the "sun" and the camera.
But what I get is that two parts of the cube (upper and lower) that both have varying colors, but not in the way I expected.
There is code for shadows in the shader, but I deliberately disabled them here to avoid confusion.
Vertex shader:
#version 430 core
layout(location = 0) in vec4 position;
layout(location = 1) in vec3 normal;
layout(location = 0) uniform mat4 model_matrix;
layout(location = 1) uniform mat4 view_matrix;
layout(location = 2) uniform mat4 proj_matrix;
layout(location = 3) uniform mat4 shadow_matrix;
out VS_OUT {
vec3 N;
vec3 L;
vec3 V;
vec4 shadow_coord;
} vs_out;
uniform vec4 light_pos = vec4(-20.0, 7.5, -20.0, 1.0);
void main(void) {
vec4 local_light_pos = view_matrix * light_pos;
vec4 p = view_matrix * model_matrix * position;
//normal
vs_out.N = normalize(normal);
//light vector
vs_out.L = local_light_pos.xyz - p.xyz;
//view vector
vs_out.V = -p.xyz;
//light space coordinates
vs_out.shadow_coord = shadow_matrix * position;
gl_Position = proj_matrix * p;
}
Fragment shader:
#version 430 core
out vec4 color;
in VS_OUT {
vec3 N;
vec3 L;
vec3 V;
vec4 shadow_coord;
} fs_in;
layout(binding = 0) uniform sampler2DShadow shadow_tex;
uniform vec3 light_ambient_albedo = vec3(1.0);
uniform vec3 light_diffuse_albedo = vec3(1.0);
uniform vec3 light_specular_albedo = vec3(1.0);
uniform vec3 ambient_albedo = vec3(0.0, 0.2, 0.0);
uniform vec3 diffuse_albedo = vec3(0.2, 0.7, 0.2);
uniform vec3 specular_albedo = vec3(0.0, 0.0, 0.0);
uniform float specular_power = 128.0;
vec3 rgb_to_grayscale_luminosity(vec3 color) {
float value = color.r * 0.21 + color.g * 0.71 + color.b * 0.07;
return vec3(value);
}
void main(void) {
//normalize
vec3 N = normalize(fs_in.N);
vec3 L = normalize(fs_in.L);
vec3 V = normalize(fs_in.V);
//calculate R
vec3 R = reflect(-L, N);
//calcualte ambient
vec3 ambient = ambient_albedo * light_ambient_albedo;
//calculate diffuse
vec3 diffuse = max(dot(N, L), 0.0) * diffuse_albedo * light_diffuse_albedo;
//calcualte spcular
vec3 specular = pow(max(dot(R, V), 0.0), specular_power) * specular_albedo * light_specular_albedo;
//apply shadow and write color
float shadow_value = textureProj(shadow_tex, fs_in.shadow_coord);
if (shadow_value > 0.0001 || true) {
//no shadow
color = vec4(ambient + diffuse + specular, 1.0);
}
else {
//in shadow
//color = vec4(rgb_to_grayscale_luminosity((ambient + diffuse) * (1 - shadow_value)), 0.5);
//color = vec4(vec3(shadow_value), 0.5);
color = vec4((ambient + diffuse) * (1 - shadow_value) * 0.5, 1.0);
}
}
What could be going wrong here?
Assuming your normals only point upwards/downwards (x=0 and z=0 in the OpenGL coordinate system) what you see should be the expected behavior (no bug concerning the shaders/graphics pipeline).
During the rasterization stage in the graphics pipeline the attributes are interpolated among the vertices (barycentric coordinates).
Assuming that all normals above the plane "y=0" are
"vec3(0, 1, 0)"
and all normals below this plane are
"vec3(0, -1, 0)"
then for every pixel the interpolated normal will be
"vec3(0, *, 0)" where * is >0 above the "y=0"-plane and <0 below that plane.
In your fragment shader you normalize all normals hence they will all again be
"vec3(0, 1, 0)" if the corresponding vertex lies above the "y=0"-plane and
"vec3(0, -1, 0)" if the corresponding vertex lies below that plane.
This will result in the same color for all vertices below and above the "y=0"-plane.
You could check this if you would remove the normal-"normalization" within the fragment shader or if you add a minimal offset to the x- or z-coordinate of some normals e.g.
vec3(0.0000001, +/-1, 0)

Shadowmapping always produces shadows beyond far plane

I am working on the beginnings of omnidirectional shadow mapping in my engine. For now I am only producing one shadowmap as a test. I am getting an odd result when using my current shaders. Here is a screenshot which shows the problem:
I am using a near value of 0.5 and a far value of 5.0 in the projection matrix for the shadowmap render. As near as I can tell, any value with a light-space z larger than my far plane distance is being computed by my fragment shader as in shadow.
This is my fragment shader:
in vec2 st;
uniform sampler2D colorTexture;
uniform sampler2D normalTexture;
uniform sampler2D depthTexture;
uniform sampler2D shadowmapTexture;
uniform mat4 invProj;
uniform mat4 lightProj;
uniform vec3 lightPosition;
out vec3 color;
void main () {
vec3 clipSpaceCoords;
clipSpaceCoords.xy = st.xy * 2.0 - 1.0;
clipSpaceCoords.z = texture(depthTexture, st).x * 2.0 - 1.0;
vec4 position = invProj * vec4(clipSpaceCoords,1.0);
position.xyz /= position.w;
vec4 lightSpace = lightProj * vec4(position.xyz,1.0);
lightSpace.xyz /= lightSpace.w;
lightSpace.xyz = lightSpace.xyz * 0.5 + 0.5;
float lightDepth = texture(shadowmapTexture, lightSpace.xy).x;
vec3 normal = texture(normalTexture, st);
vec3 diffuse;
float shadowFactor = 1.0;
if(lightSpace.w > 0.0 && lightSpace.z > lightDepth+0.0042) {
shadowFactor = 0.2;
}
else {
float k = 0.00001;
vec3 distanceToLight = lightPosition - position.xyz;
float distanceLength = length(distanceToLight);
float attenuation = (1.0 / (1.0 + (0.1 * distanceLength) + k * (distanceLength * distanceLength)));
float diffuseTemp = max(dot(normalize(normal), normalize(distanceToLight)), 0.0);
diffuse = vec3(1.0, 1.0, 1.0) * attenuation * diffuseTemp;
}
vec3 gamma = vec3(1.0/2.2);
color = pow(texture(colorTexture, st).xyz*shadowFactor+diffuse, gamma);
}
How can I fix this issue (Other than increasing my far plane distance)?
One other question, as this is the first time I have attempted shadowmapping: am I doing the lighting in relation to the shadows correctly?