I'm trying to calculate per-face normals in geometry shader, using the following pipeline
//VERTEX_SHADER
#version 330
layout(location = 0) in vec4 vertex;
out vec3 vert;
uniform mat4 projMatrix;
uniform mat4 mvMatrix;
void main()
{
vert = vertex.xyz;
gl_Position = projMatrix * mvMatrix * vertex;
}
//GEOMETRY_SHADER
#version 330
layout ( triangles ) in;
layout ( triangle_strip, max_vertices = 3 ) out;
out vec3 normal_out;
uniform mat3 normalMatrix;
void main()
{
vec3 A = gl_in[2].gl_Position.xyz - gl_in[0].gl_Position.xyz;
vec3 B = gl_in[1].gl_Position.xyz - gl_in[0].gl_Position.xyz;
normal_out = normalMatrix * normalize(cross(A,B));
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gl_Position = gl_in[1].gl_Position;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
}
//FRAG_SHADER
#version 330
in vec3 normal_out;
in vec3 vert;
out vec4 fColor;
uniform vec3 lightPos;
void main()
{
highp vec3 L = normalize(lightPos - vert);
highp float NL = max(dot(normal_out, L), 0.0);
highp vec3 color = vec3(1, 1, 0.0);
fColor = vec4(color*NL, 1.0);
}
However, I end up with very weird looking faces that keeps flickering(I included a snapshot below). It occurred to me that it might be because I'm using 8 vertices to represent 1 cell(cube) instead of 24 vertices, But I'm not quite sure if that is what is causing the problem.
Left: Using Light Weighting 'NL', Right:Without
After every call to EmitVertex, the contents of all output variables are made undefined. Therefore, if you want to output the same value to multiple vertices, you must copy it to the output every time.
Also, note that each shader stage's outputs provide inputs only to the next stage. So if you have a GS, and you want to pass a value from the VS to the FS, you must have the GS explicitly pass that value through.
Related
I've been trying for some time now to debug this simple Phong-Shader and just came up short. The diffuse part is fine but the specular is not. It doesn't move with the camera.
Here's from one side:
And here from the other:
As far as I can see, I did convert the Position to Viewspace, but, apparently, I made another mistake of some kind.
Vertex Shader:
#version 330
#extension GL_ARB_explicit_attrib_location : enable
layout(location=0) in vec3 aPosition;
layout(location=1) in vec3 aNormal;
out vec3 vPosition;
out vec3 vNormal;
uniform mat4 uModel;
uniform mat4 uView;
uniform mat4 uProjection;
void main(void)
{
vPosition = vec3(uModel * vec4(aPosition,1.0f));
vNormal = vec3(uModel * vec4(aNormal, 0.0f));
gl_Position = uProjection * uView * uModel * vec4(aPosition, 1.0);
}
And my Fragment Shader
#version 330
out vec4 FragColor;
in vec3 vPosition;
in vec3 vNormal;
uniform mat4 uView;
uniform vec3 uColor;
uniform vec3 uLightpositions[10];
uniform vec3 uLightcolors[10];
uniform float uPhongSpecular;
void main(void)
{
FragColor = vec4(0.4*uColor, 1.0);//Ambient Value
for(int i = 0; i < 5; i++){
vec3 lVec = normalize(uLightpositions[i] - vPosition);
vec3 nVec = normalize(vNormal);
float diffuse = max(dot(lVec,nVec), 0);
FragColor += 0.5* vec4(uLightcolors[i] * diffuse,0.0f);
vec3 rVec = normalize(reflect(lVec,nVec));
vec3 vVec = -normalize(vec3(uView * vec4(vPosition,1.0)));
float specular = 0;
if(dot(rVec,vVec) < 0.0)
{
specular = pow(max(dot(-rVec,vVec),0),uPhongSpecular);
}
FragColor += 0.2*vec4(uLightcolors[i] * specular,0.0f);
}
}
The problem is dot(-rVec, vVec). vVec is a vector in view space, however, rVec is a vector in world space. Convert rVec from world space to view space:
vec3 rVec = normalize(reflect(lVec, nVec));
vec3 rVec = normalize(mat3(uView) * reflect(lVec, nVec));
One thing wrong in addition to Rabbid76's answer is if(dot(rVec,vVec) < 0.0) is testing if the angle between the two is greater than 90 degrees. In other words, you are testing if the specular reflection is visible behind the model. You have to flip that from less-than to greater-than >.
I am busy implementing a deferred lighting system and have gotten all the way to having the bound position, diffuse and normal textures in my fragment shader in which I am to calculate the lighting specifications for each fragment.
#version 400 core
in vec3 fs_position;
in vec3 fs_color;
in vec4 fs_attenuation;
layout (location = 0) out vec4 outColor;
uniform sampler2D diffuseSampler;
uniform sampler2D positionSampler;
uniform sampler2D normalSampler;
const float cutOffFactor = 200;
const float reflectivity = 0.15;
const float shineDamper = 1;
void main(void){
vec2 frag = gl_PointCoord.xy;
frag.x = (frag.x+1)/2f;
frag.y = ((frag.y+1)/2f);
vec4 texDiffuse = texture(diffuseSampler,frag);
vec4 texPosition = texture(positionSampler,frag);
vec4 texNormal = texture(normalSampler,frag);
vec3 p = vec3(fs_position.xyz);
vec3 ePosition = texPosition.xyz;
ePosition = ePosition*200;
vec3 eNormal = texNormal.xyz;
vec3 unitNormal = normalize(eNormal);
outColor = vec4(texNormal.xyz,1.0);
}
That is literally all that my Fragment Shader contains.
The probem lies at "vec3 p = vec3(fs_position.xyz);".
When I remove it the program renders a perfect normal map, but when I add it a blank screen in which I can rotate and eventually a certain color flickers.
fs_position has nothing to do with color and was inputted from the geometry shader (all references are correct) yet it somehow causes a massive malfunction.
Same thing happens as well with all in variables (fs_color and fs_attenuation).
Whats being rendered is a non-blended quad of equal per-vertex properties that covers the viewport that renders to a color_attachment that exists(as said without that line everything works).
(blending does nothing, and I will put additive blending on when I get a result worthy of allowing me to continue)
Any help will be appreciated, the engine and shaders have never acted this way for me before and no errors are popping up.
Extra code:
Vertex shader
#version 400 core
in vec3 position;
in vec3 color;
in vec4 attenuation;
out vec3 gs_position;
out vec3 gs_color;
out vec4 gs_attenuation;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
void main(void){
vec4 worldPosition = vec4(position,1.0);
vec4 viewPosition = viewMatrix * worldPosition;
gl_Position = projectionMatrix * viewPosition;
gs_position = viewPosition.xyz;
gs_color = color;
gs_attenuation = attenuation;
}
GeometryShader
#version 150
layout (points) in;
layout (triangle_strip,max_vertices = 4) out;
in vec3 gs_position[];
in vec3 gs_color[];
in vec4 gs_attenuation[];
out vec3 fs_position;
out vec3 fs_color;
out vec4 fs_attenuation;
void main(void){
gl_Position = vec4(-1,1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(-1,-1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(1,1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(1,-1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
EndPrimitive();
}
Example of light values:
Position: -1, 0.5, -1
Color: 0, 0.5 ,0
Attenuation: 1, 0.1, 0.2, 0
As for the requested screenshots, basically without referencing an in variable I get something like this:
And with it I get a black screen, which is pretty easy to visualise.
(Although when rotating the view matrix (y-axis) there is a certain point at which the quad gets colored green, although I cant get values for it)
I have this vertex shader:
#version 430 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 textureCoordinate;
layout (location = 2) in vec3 normal;
layout (location = 4) in vec3 tangent;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform vec3 lightPosition = vec3(0.0, 1.0, 0.0);
out vec2 texCoord;
out vec3 lightDirection;
out vec3 eyeDirection;
void main() {
vec4 P = modelViewMatrix * vec4(position, 1.0);
vec3 N = normalize(mat3(modelViewMatrix) * normal);
vec3 T = normalize(mat3(modelViewMatrix) * tangent);
vec3 B = cross(N, T);
vec3 L = lightPosition - P.xyz;
vec3 V = -P.xyz;
lightDirection = normalize(vec3(dot(V, T), dot(V, B), dot(V, N)));
eyeDirection = normalize(vec3(dot(V, T), dot(V, B), dot(V, N)));
texCoord = textureCoordinate;
gl_Position = projectionMatrix * P;
}
And this fragment shader:
#version 430 core
in vec2 texCoord;
in vec3 lightDirection;
in vec3 eyeDirection;
layout (binding = 0) uniform sampler2D tex;
layout (binding = 1) uniform sampler2D normalMap;
out vec4 color;
void main() {
vec3 ambient = vec3(0.1);
vec3 V = normalize(eyeDirection);
vec3 L = normalize(lightDirection);
vec3 N = normalize(texture(normalMap, texCoord)).rgb * 2.0 - vec3(1.0);
vec3 R = reflect(-L, N);
vec3 diffuseAlbedo = texture(tex, texCoord).rgb;
/*vec3 diffuseAlbedo = vec3(1.0);*/
vec3 diffuse = max(dot(N, L), 0.0) * diffuseAlbedo;
vec3 specular = vec3(0.0);
color = vec4(ambient + diffuse + specular, 1.0);
}
As you can see, the variable lightPosition is actually used in the calculations. But when I want to get the location of it via glGetUniformLocation, I get -1.
I looked through the other questions here on SO, like those ones:
glGetActiveUniform reports uniform exists, but glGetUniformLocation returns -1
glGetUniformLocation return -1 on nvidia cards
glGetUniformLocation() returning -1 even though used in vertex shader
Especially the last one - but in my case, lightPosition is used to calculate lightDirection and this is then used in the fragment shader. So it should not be removed.
Any ideas on what is going on here? Or how to debug this?
Also: I set a default value for the uniform. If I remove the default value, it still gives -1.
I am running Ubuntu 15.10 with Nvidia 340.96 drivers and a GF 710M card. I am running OpenGL 4.4 with core profile.
... As you can see, the variable lightPosition is actually used in the calculations. ...
You would think, right. Actually you're using it once in the Vertex Shader to assign a value to vec3 L, which is not actually used after that.
The compilation process for the shaders is very meticulous. You may have used it in a calculation in the Vertex Shader, but you didn't utilize the result L anywhere and it doesn't make any contribution to the final result calculated in the Fragment Shader, so it was "optimized away".
You must give some use to L.
I was looking for a lot of this problem. I found this question Passing data into different shaders but this problem not mine. I get "The fragment shader uses varying "normal", but previous shader does not write to it." error message.
My vertey shader code:
#version 430
in layout(location=0) vec3 position;
in layout(location=1) vec3 normal;
out vec3 norm;
uniform mat4 transformation;
void main()
{
gl_Position = transformation * vec4(position, 1.0);
norm = (transformation * vec4(normal, 0.0)).xyz;
}
And my fragment shader code:
#version 430
in vec3 normal;
out vec4 colour;
vec3 lightPos = vec3(0,50,0);
vec3 lightColor = vec3(0.5, 0, 0);
vec3 materialColor = vec3(0, 1.0, 0);
void main() {
float cosTheta = dot(-lightPos, normalize(normal));
vec3 temp = materialColor * lightColor * cosTheta;
colour = vec4(temp, 1.0);
}
What is the main problem? I don't understand this message my vertex shader using the normal vector and it passing into fragment shader. I don't see difference between the linked code and mine. Please tell me some idea :\
If you want to use different variable names for some reason you can specify a location to match in- and output variables.
For example, in your case:
.vert:
out layout(location = 7) vec3 norm;
.frag:
in layout(location = 7)vec3 normal;
I have created an application in OpenGL that uses a Vertex Shader, Geometry Shader, and Fragment Shader.
I have a uniform variable, eyePositionWorld that I would like to use both in the Geometry Shader and the Fragment Shader.
(I am rendering the position of the verticies compared to the eyePositionWorld as the color)
Vertex Shader
#version 430
in vec4 vertexPositionModel;
in vec3 vertexColor;
in vec3 vertexNormalModel;
in mat4 modelMatrix;
uniform mat4 viewMatrix;//World To View
uniform mat4 projectionMatrix;//View to Projection
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
out fData geomData;
void main()
{
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertexPositionModel;
geomData.fragColor = vertexColor;
geomData.fragPositionWorld = (modelMatrix * vertexPositionModel).xyz;
geomData.fragNormalWorld = (modelMatrix * vec4(vertexNormalModel, 0.0)).xyz;
}
Geometry Shader
#version 430
layout(triangles_adjacency) in;
layout(triangle_strip, max_vertices=3) out;
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
uniform vec3 eyePositionWorldGeomShader;
in fData geomData[];
out fData fragData;
void main() {
gl_Position = gl_in[0].gl_Position;
fragData = geomData[0];
fragData.fragColor = gl_in[0].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
fragData = geomData[2];
fragData.fragColor = gl_in[2].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
gl_Position = gl_in[4].gl_Position;
fragData = geomData[4];
fragData.fragColor = gl_in[4].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
EndPrimitive();
}
Fragment Shader
#version 430
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
in fData fragData;
uniform vec4 ambientLight;
uniform vec3 lightPositionWorld;
uniform vec3 eyePositionWorld;
uniform bool isLighted;
out vec4 color;
void main()
{
if (!isLighted)
{
color = vec4(fragData.fragColor, 1.0);
}
else
{
vec3 lightVectorWorld = normalize(lightPositionWorld - fragData.fragPositionWorld);
float brightness = clamp(dot(lightVectorWorld, normalize(fragData.fragNormalWorld)), 0.0, 1.0);
vec4 diffuseLight = vec4(brightness, brightness, brightness, 1.0);
vec3 reflectedLightVectorWorld = reflect(-lightVectorWorld, fragData.fragNormalWorld);
vec3 eyeVectorWorld = normalize(eyePositionWorld - fragData.fragPositionWorld);
float specularity = pow(clamp(dot(reflectedLightVectorWorld, eyeVectorWorld), 0.0, 1.0), 40) * 0.5;
vec4 specularLight = vec4(specularity, specularity, specularity, 1.0);
//Maximum Distance of All Lights
float maxDist = 55.0;
float attenuation = clamp((maxDist - length(lightPositionWorld - fragData.fragPositionWorld)) / maxDist, 0.0, 1.0);
color = (ambientLight + (diffuseLight + specularLight) * attenuation) * vec4(fragData.fragColor, 1.0);
}
}
C++ Code (the m_eyePositionUL and m_eyePositionGeomShaderUL are both just loaded with glGetUniformLocation)
glUniform3fv(m_eyePositionUL, 1, &m_camera.getPosition()[0]);
glUniform3fv(m_eyePositionGeomShaderUL, 1, &m_camera.getPosition()[0]);
How can I only upload one uniform to OpenGL and use it in both the Geometry Shader and Vertex Shader?
It's a bit surprising but OpenGL makes it easy. All that you have to do is use the same uniform name in both Shaders!
Then just upload it once under that uniform location.
Replace uniform vec3 eyePositionWorldGeomShader; with uniform vec3 eyePositionWorld; in your Geometry Shader and keep the uniform name the same in the Fragment Shader.
Then just don't upload the other Uniform so your C++ code will simply be
glUniform3fv(m_eyePositionUL, 1, &m_camera.getPosition()[0]);