How will the value be passed to the fragment shader - glsl

This is a extract from a Geometry shader.
#version 460 core
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
noperspective out vec3 g_edge_distance;
in vec3 world_pos[];
in vec3 normal[];
void main()
{
// Calc triangle altitudes
float ha = abs( c * sin( beta ) );
float hb = abs( c * sin( alpha ) );
float hc = abs( b * sin( alpha ) );
g_edge_distance = vec3( ha, 0, 0 );
gl_Position = gl_in[0].gl_Position;
EmitVertex();
g_edge_distance = vec3( 0, hb, 0 );
gl_Position = gl_in[1].gl_Position;
EmitVertex();
g_edge_distance = vec3( 0, 0, hc );
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
}
What i want to understand is how will the value of g_edge_distance be passed to the fragment shader.

You will have a value that is linear interpolated in a screen (window) space.
You can read more about type qualifiers on the official khronos page
I think to quickly see and understand the difference, have a look at the Geeks3d tutorial

Related

OpenGL Flickering Fragments when Drawing Wireframe

I've been following along with the OpenGL 4 Shading Language cookbook and have gotten a teapot rendering with bezier surfaces. The next step I'm attempting is to draw a wireframe over the surfaces using a geometry shader. The directions can be found here on pages 228-230. Following the code that is given, I've gotten the wireframe to display, however, I also have multiple fragments that flicker different shades of my material color.
An image of this can be seen
I have narrowed down the possible issues and have discovered that for some reason, when I perform my triangle height calculations, I am getting variable side lengths for my calculations, as if I hard code the values in the edge distance for each vertex of the triangle within the geometry shader, the teapot no longer flickers, but neither does a wireframe display. (variables ha, hb, hc in the geo shader below)
I was wondering if anyone has run into this issue before or are aware of a workaround.
Below are some sections of my code:
Geometry Shader:
/*
* Geometry Shader
*
* CSCI 499, Computer Graphics, Colorado School of Mines
*/
#version 410 core
layout( triangles ) in;
layout( triangle_strip, max_vertices = 3 ) out;
out vec3 GNormal;
out vec3 GPosition;
out vec3 ghalfwayVec;
out vec3 GLight;
noperspective out vec3 GEdgeDistance;
in vec4 TENormal[];
in vec4 TEPosition[];
in vec3 halfwayVec[];
in vec3 TELight[];
uniform mat4 ViewportMatrix;
void main() {
// Transform each vertex into viewport space
vec3 p0 = vec3(ViewportMatrix * (gl_in[0].gl_Position / gl_in[0].gl_Position.w));
vec3 p1 = vec3(ViewportMatrix * (gl_in[1].gl_Position / gl_in[1].gl_Position.w));
vec3 p2 = vec3(ViewportMatrix * (gl_in[2].gl_Position / gl_in[2].gl_Position.w));
// Find the altitudes (ha, hb and hc)
float a = length(p1 - p2);
float b = length(p2 - p0);
float c = length(p1 - p0);
float alpha = acos( (b*b + c*c - a*a) / (2.0*b*c) );
float beta = acos( (a*a + c*c - b*b) / (2.0*a*c) );
float ha = abs( c * sin( beta ) );
float hb = abs( c * sin( alpha ) );
float hc = abs( b * sin( alpha ) );
// Send the triangle along with the edge distances
GEdgeDistance = vec3( ha, 0, 0 );
GNormal = vec3(TENormal[0]);
GPosition = vec3(TEPosition[0]);
gl_Position = gl_in[0].gl_Position;
EmitVertex();
GEdgeDistance = vec3( 0, hb, 0 );
GNormal = vec3(TENormal[1]);
GPosition = vec3(TEPosition[1]);
gl_Position = gl_in[1].gl_Position;
EmitVertex();
GEdgeDistance = vec3( 0, 0, hc );
GNormal = vec3(TENormal[2]);
GPosition = vec3(TEPosition[2]);
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
ghalfwayVec = halfwayVec[0];
GLight = TELight[0];
}
Fragment Shader:
/*
* Fragment Shader
*
* CSCI 441, Computer Graphics, Colorado School of Mines
*/
#version 410 core
in vec3 ghalfwayVec;
in vec3 GLight;
in vec3 GNormal;
in vec3 GPosition;
noperspective in vec3 GEdgeDistance;
layout( location = 0 ) out vec4 FragColor;
uniform vec3 mDiff, mAmb, mSpec;
uniform float shininess;
uniform light {
vec3 lAmb, lDiff, lSpec, lPos;
};
// The mesh line settings
uniform struct LineInfo {
float Width;
vec4 Color;
} Line;
vec3 phongModel( vec3 pos, vec3 norm ) {
vec3 lightVec2 = normalize(GLight);
vec3 normalVec2 = -normalize(GNormal);
vec3 halfwayVec2 = normalize(ghalfwayVec);
float sDotN = max( dot(lightVec2, normalVec2), 0.0 );
vec4 diffuse = vec4(lDiff * mDiff * sDotN, 1);
vec4 specular = vec4(0.0);
if( sDotN > 0.0 ) {
specular = vec4(lSpec * mSpec * pow( max( 0.0, dot( halfwayVec2, normalVec2 ) ), shininess ),1);
}
vec4 ambient = vec4(lAmb * mAmb, 1);
vec3 fragColorOut = vec3(diffuse + specular + ambient);
// vec4 fragColorOut = vec4(0.0,0.0,0.0,0.0);
return fragColorOut;
}
void main() {
// /*****************************************/
// /******* Final Color Calculations ********/
// /*****************************************/
// The shaded surface color.
vec4 color=vec4(phongModel(GPosition, GNormal), 1.0);
// Find the smallest distance
float d = min( GEdgeDistance.x, GEdgeDistance.y );
d = min( d, GEdgeDistance.z );
// Determine the mix factor with the line color
float mixVal = smoothstep( Line.Width - 1, Line.Width + 1, d );
// float mixVal = 1;
// Mix the surface color with the line color
FragColor = vec4(mix( Line.Color, color, mixVal ));
FragColor.a = 1;
}
I ended up stumbling across the solution to my issue. In the geometry shader, I was passing the halfway vector and the light vector after ending the primitive, as such, the values of these vectors was never being correctly sent to the fragment shader. Since no data was given to the fragment shader, garbage values were used and the Phong shading model used random values to compute the fragment color. Moving the two lines after EndPrimative() to the top of the main function in the geometry shader resolved the issue.

OpenGl 4.x ADS phong shading, plane not fully colored instead bullseye-like coloring

Here is a picture of the program running:
I can't figure out why my plane is getting a bullseye coloring, I'm pretty sure I'm doing something wrong with the shaders but I'm not entirely sure what's the problem.
this is my fragment shader.
#version 430 core
in vec4 color;
in vec4 position;
uniform float fTime;
uniform vec3 lookat;
out vec4 fColor;
vec4 calculateMyNormal(vec4 mposition)
{
float dfdx = 2*(mposition.x) * 4 * cos(radians((mposition.x*mposition.x)+ (mposition.z*mposition.z)+fTime));
float dfdz = 2*(mposition.z) * 4 * cos(radians((mposition.x*mposition.x)+(mposition.z*mposition.z)+fTime));
vec3 a = vec3(1, dfdx, 0);
vec3 b = vec3(0, dfdz, 1);
vec3 normal = normalize(cross(a, b));
return vec4(normal, 1.0);
}
vec4 ADSLightModel(vec4 myNormal, vec4 myPosition)
{
const vec4 myLightPosition = vec4(1.0, 0.5, 0.0, 1.0 );
const vec4 myLightAmbient = vec4( 0.2, 0.2, 0.2, 1.0 );
const vec4 myLightDiffuse = vec4( 1.0 , 1.0 , 1.0, 1.0 );
const vec4 myLightSpecular = vec4( 1.0 , 1.0 , 1.0 , 1.0);
const vec4 myMaterialAmbient = vec4( 1.0 , 0.5, 0.0, 1.0 );
const vec4 myMaterialDiffuse = vec4( 0.5 , 0.1, 0.5, 1.0 );
const vec4 myMaterialSpecular = vec4( 0.6, 0.6, 0.6, 1.0 );
const float myMaterialShininess = 80;
vec4 norm = normalize( myNormal );
vec4 lightv = normalize( myLightPosition - myPosition );
vec4 viewv = normalize( vec4(lookat, 1.0) - myPosition );
vec4 refl = reflect( vec4(lookat, 1.0) - lightv, norm );
vec4 ambient = myMaterialAmbient*myLightAmbient;
vec4 diffuse = max(0.0, dot(lightv, norm)) * myMaterialDiffuse * myLightDiffuse;
vec4 specular = vec4( 0.0, 0.0, 0.0, 1.0 );
if( dot(lightv, viewv) > 0)
{
specular = pow(max(0.0, dot(viewv,refl)), myMaterialShininess)*myMaterialSpecular* myLightSpecular;
}
return clamp(ambient + diffuse + specular, 0.0, 1.0);
}
void main()
{
vec4 norml = calculateMyNormal(position);
fColor = ADSLightModel(norml, position);
}
the plane moves and I do that in the vertex shader, I don't know if that might be the problem.
#version 430 core
layout (location = 0) in vec4 vPosition;
uniform float fTime;
uniform mat4 mTransform;
out vec4 color;
out vec4 position;
float calculaY(float x, float z, float time)
{
return 0.5 * sin(time + (x*x + z*z) / 50.0);
}
void main()
{
vec4 vNewpos = vPosition;
vNewpos.y = calculaY(vNewpos.x, vNewpos.z, fTime);
color = vec4(0.0, 0.0, 1.0, 1.0);
position = vNewpos;
gl_Position = mTransform * vNewpos;
}
The last thing I can imagine being wrong, would be the normals, but I'm using a the code of my teacher to generate the plane and his plane had a solid color all over the plane so either he did something wrong and fixed it or as I think, the problem is in my shaders.
Your reflection vector does not really make sense:
vec4 refl = reflect( vec4(lookat, 1.0) - lightv, norm );
There are a couple of things which should make you suspicious:
refl is not normalized. The reflect operation will preserve the length of the input vector, but the input vec4(lookat, 1.0) - lightv is not normalized.
The value of vec4(lookat, 1.0) - lightv references a point, not a direction vector, since it is the difference between a point and another direction vector.
The term vec4(lookat, 1.0) - lightv does not make sense geometrically. What you want is the reflection of the light incidence vector lightv around the normal. The viewing position is totally irrelevant for determining the direction an incident light ray will be reflected to at some surface point.
The reflection vector should just be:
refl = reflect(lightv, normal);

Why do all my particles share the same rotation and color? (GLSL Shaders)

Like the title suggests, I am building a particle system in OpenGL using a geometry shader to create billboards from points. Everything seems okay but it looks like all particles share the rotation and color of the first particle. I've checked the inputs of course.
Here are my current vertex and geometry shaders:
Vertex Shader:
#version 330 core
layout(location=0) in vec4 in_position;
layout(location=2) in float in_angle;
layout(location=3) in vec4 in_color;
uniform mat4 P;
uniform mat4 V;
out VertexData
{
vec4 color;
float angle;
} vertex;
void main()
{
vertex.color = in_color;
vertex.angle = in_angle;
gl_Position = in_position;
}
Geometry shader:
#version 330
layout (points) in;
layout (triangle_strip, max_vertices = 4) out;
uniform mat4 V;
uniform mat4 P;
in VertexData
{
vec4 color;
float angle;
} vertex[];
out vec4 fragColor;
out vec2 texcoord;
mat4 rotationMatrix(vec3 axis, float angle)
{
axis = normalize(axis);
float s = sin(angle);
float c = cos(angle);
float oc = 1.0 - c;
return mat4(oc * axis.x * axis.x + c, oc * axis.x * axis.y - axis.z * s, oc * axis.z * axis.x + axis.y * s, 0.0,
oc * axis.x * axis.y + axis.z * s, oc * axis.y * axis.y + c, oc * axis.y * axis.z - axis.x * s, 0.0,
oc * axis.z * axis.x - axis.y * s, oc * axis.y * axis.z + axis.x * s, oc * axis.z * axis.z + c, 0.0,
0.0, 0.0, 0.0, 1.0);
} // http://www.neilmendoza.com/glsl-rotation-about-an-arbitrary-axis/
void main()
{
mat4 R = rotationMatrix( vec3(0,0,1), vertex[0].angle );
vec4 pos = V * vec4( gl_in[0].gl_Position.xyz, 1.0 );
float size = gl_in[0].gl_Position.w;
texcoord = vec2( 0.0, 0.0);
gl_Position = P * ( pos + vec4( texcoord, 0, 0 ) * R * size );
fragColor = vertex[0].color;
EmitVertex();
texcoord = vec2( 1.0, 0.0);
gl_Position = P * ( pos + vec4( texcoord, 0, 0 ) * R * size );
fragColor = vertex[0].color;
EmitVertex();
texcoord = vec2( 0.0, 1.0);
gl_Position = P * ( pos + vec4( texcoord, 0, 0 ) * R * size );
fragColor = vertex[0].color;
EmitVertex();
texcoord = vec2( 1.0, 1.0);
gl_Position = P * ( pos + vec4( texcoord, 0, 0 ) * R * size );
fragColor = vertex[0].color;
EmitVertex();
EndPrimitive();
}
Am I doing something wrong in here?
Ahh I found out what was causing my problem.
I still had two calls to glVertexAttribDivisor(), from when I was still using glDrawArraysInstanced().

Drawing a rectangle in a Fragment Shader

I'd like to know if there is a possibility to draw a rectangle in a fragment shader without using any if. I tried this :
void main(void)
{
vec2 resolution = vec2( 300.0, 300.0 );
vec2 center = resolution / 2.0;
vec2 position = ( gl_FragCoord.xy - center ) / resolution;
if ( ( abs(position.x) < 0.5 ) && ( abs(position.y) < 0.5 ) )
gl_FragColor = vec4( 0.0, 0.0, 1.0, 1.0 );
}
And when i test if the pixel is in the rectangle i must perform an if test.
I think there's a solution not to have an if, if you can help me ?
I'm not sure what you are trying to do but here's one idea?
precision mediump float;
uniform float time;
uniform vec2 mouse;
uniform vec2 resolution;
void main( void ) {
vec2 position = ( gl_FragCoord.xy / resolution.xy ) + mouse / 4.0;
// x1 y1 x2 y2
vec4 rect = vec4(0.2, 0.3, 0.4, 0.5);
vec2 hv = step(rect.xy, position) * step(position, rect.zw);
float onOff = hv.x * hv.y;
gl_FragColor = mix(vec4(0,0,0,0), vec4(1,0,0,0), onOff);
}
Here's a working version

Waving flag effect in vertex shader

I am looking to create a waving flag effect in a vertex shader and here is what i have so far:
#version 330
layout(location = 0) in vec3 in_position;
layout(location = 1) in vec3 in_color;
uniform mat4 model_matrix, view_matrix, projection_matrix;
uniform vec3 culoare;
uniform float currentAngle;
out vec3 vertex_to_fragment_color;
void main(){
vertex_to_fragment_color = culoare;
vec4 v = vec4( in_position.x, in_position.y, in_position.z, 1.0 );
v.y = sin( in_position.x + currentAngle );
v.y += sin( in_position.z + currentAngle );
v.y *= in_position.x * 0.08;
gl_Position = projection_matrix*view_matrix*model_matrix*v;
}
current_angle is a variable that i'm sending to the shader and it kind of looks like this:
if ( currentAngle > 360.0f ) currentAngle -= 360.0f;
if ( currentAngle < 0.0f ) currentAngle += 360.0f;
I am new to this so i could really use some help to get this right.
GLSL's sin() and cos() take their arguments in radians, not degrees.
You can use the GLSL function radians() to convert degrees to radians.
You'll also have to subdivide your flag rectangle to get a convincing effect.