GLSL for depth only with GL_DEPTH_COMPONENT texture format - opengl

my situation is when I deal with some depth data from a RGBD camera by GLSL. The information of the depth texture is below:
internal Format = GL_LUMINANCE16UI_EXT
Format = GL_LUMINANCE_INTEGER_EXT
DataType = GL_UNSIGNED_SHORT
My Vertex shader is empty:
#version 330
layout(location = 0) in vec4 in_position;
void main()
{
gl_Position = in_position;
}
My Fragment shader is
#version 330 core
in vec2 texcoord;
out uint FragColor;
uniform usampler2D gSampler;
void main()
{
.
.
.
FragColor = ...;
}
And my Geometry shader is:
#version 330 core
layout(points) in;
layout(triangle_strip, max_vertices = 4) out;
out vec2 texcoord;
void main()
{
gl_Position = vec4(1.0, 1.0, 0.0, 1.0);
texcoord = vec2(1.0, 1.0);
EmitVertex();
gl_Position = vec4(-1.0, 1.0, 0.0, 1.0);
texcoord = vec2(0.0, 1.0);
EmitVertex();
gl_Position = vec4(1.0,-1.0, 0.0, 1.0);
texcoord = vec2(1.0, 0.0);
EmitVertex();
gl_Position = vec4(-1.0,-1.0, 0.0, 1.0);
texcoord = vec2(0.0, 0.0);
EmitVertex();
EndPrimitive();
}
This code works well in my NVIDIA, but Intel driver (Intel 5th Gen). Therefore I change the depth texture into format as follows:
internal Format = GL_DEPTH_COMPONENT16
Format = GL_DEPTH_COMPONENT
DataType = GL_UNSIGNED_SHORT
It can work without any error log when I check with glerror and glGetShaderInfoLog, but it only return Zero from the shaders. Do I have to change something within my shaders due to the new texture format?

Related

OpenGL line width geometry shader

I am trying to implement geometry shader for line thickness using OpenGL 4.3.
I followed accepted answer and other given solutions of stackoverflow, but it is wrong according to the screenshot. Is there any proper way how can I get a normal of a screen? It seems correct in the first frame but the moment I move my mouse, the camera changes and offset direction is not correct. The shader is updated by camera matrix in while loop.
GLSL Geometry shader to replace glLineWidth
Vertex shader
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 projection_view_model;
void main()
{
gl_Position = projection_view_model * vec4(aPos, 1.0);
}
Fragment shader
#version 330 core
//resources:
//https://stackoverflow.com/questions/6017176/gllinestipple-deprecated-in-opengl-3-1
out vec4 FragColor;
uniform vec4 uniform_fragment_color;
void main()
{
FragColor = uniform_fragment_color;
}
Geometry shader
#version 330 core
layout (lines) in;
layout(triangle_strip, max_vertices = 4) out;
uniform float u_thickness ;
uniform vec2 u_viewportSize ;
in gl_PerVertex
{
vec4 gl_Position;
//float gl_PointSize;
//float gl_ClipDistance[];
} gl_in[];
void main() {
//https://stackoverflow.com/questions/54686818/glsl-geometry-shader-to-replace-gllinewidth
vec4 p1 = gl_in[0].gl_Position;
vec4 p2 = gl_in[1].gl_Position;
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 offset = vec2(-dir.y, dir.x) * u_thickness*100 / u_viewportSize;
gl_Position = p1 + vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p1 - vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 + vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 - vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
EndPrimitive();
}
To get the direction of the line in normalized device space, the x and y components of the clip space coordinated must be divided by the w component (perspective divide):
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 dir = normalize((p2.xy / p2.w - p1.xy / p1.w) * u_viewportSize);

Fragment shader input interfering with texture access

I am using a vertex, geometry and fragment shader to render a scene with shadows:
Vertex Shader:
#version 400
layout(location=0) in vec3 position;
out vec4 vShadowCoord;
uniform mat4 modelViewProjectionMatrix;
uniform mat4 shadowMatrix;
void main(void)
{
vShadowCoord = shadowMatrix * vec4(position, 1.0);
gl_Position = modelViewProjectionMatrix * vec4(position, 1.0);
}
Geometry Shader:
#version 400
layout(triangles_adjacency) in;
layout(triangle_strip, max_vertices = 3) out;
in vec4 vShadowCoord[];
out vec4 gShadowCoord;
uniform vec3 lightPosition;
void main()
{
gShadowCoord = vShadowCoord[0];
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gShadowCoord = vShadowCoord[2];
gl_Position = gl_in[2].gl_Position;
EmitVertex();
gShadowCoord = vShadowCoord[4];
gl_Position = gl_in[4].gl_Position;
EmitVertex();
EndPrimitive();
}
Fragment Shader:
#version 400
in vec4 shadowCoord;
out vec4 fColor;
uniform sampler2DShadow shadowMap;
void main(void)
{
float shadow = textureProj(shadowMap, shadowCoord);
fColor = (shadow > 0.0) ? vec4(1.0, 1.0, 1.0, 1.0) : vec4(0.1, 0.1, 0.1, 1.0);
}
This successfully renders my scene with shadows. The cubes in my scene are lit and in shadow where I would expect them to be. The problem occurs when I try to pass one of the two colors in from the geometry shader. When I do this, my conditional statement always evaluates to false.
Geometry shader:
#version 400
layout(triangles_adjacency) in;
layout(triangle_strip, max_vertices = 3) out;
in vec4 vShadowCoord[];
out vec4 gShadowCoord; // Added
out vec4 gColorLit; // Added
uniform vec3 lightPosition;
void main()
{
gShadowCoord = vShadowCoord[0];
gColorLit = vec4(1.0, 1.0, 1.0, 1.0); // Added
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gShadowCoord = vShadowCoord[2];
gColorLit = vec4(1.0, 1.0, 1.0, 1.0); // Added
gl_Position = gl_in[2].gl_Position;
EmitVertex();
gShadowCoord = vShadowCoord[4];
gColorLit = vec4(1.0, 1.0, 1.0, 1.0); // Added
gl_Position = gl_in[4].gl_Position;
EmitVertex();
EndPrimitive();
}
Fragment Shader:
#version 400
in vec4 shadowCoord;
in vec4 gColorLit; // Added
out vec4 fColor;
uniform sampler2DShadow shadowMap;
void main(void)
{
float shadow = textureProj(shadowMap, shadowCoord);
// Changed
fColor = (shadow > 0.0) ? gColorLit : vec4(0.1, 0.1, 0.1, 1.0);
}
What could be causing this to happen?
This occurs on both Ubuntu 12.04 and Windows 7
Some of the code in my shaders might seem unnecessary, but that is because I have stripped as much as I could away while troubleshooting.
There's no reason for your second shaders to successfully link. Your geometry shader states that it's writing:
out vec4 gShadowCoord; // Added
But your fragment shader is expecting:
in vec4 shadowCoord;
You should have gotten a linker error.

glsl light doesn't seem to be working

I'm working on some basic lighting in my application, and am unable to get a simple light to work (so far..).
Here's the vertex shader:
#version 150 core
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
uniform mat4 pvmMatrix;
uniform mat3 normalMatrix;
in vec3 in_Position;
in vec2 in_Texture;
in vec3 in_Normal;
out vec2 textureCoord;
out vec4 pass_Color;
uniform LightSources {
vec4 ambient = vec4(0.5, 0.5, 0.5, 1.0);
vec4 diffuse = vec4(0.5, 0.5, 0.5, 1.0);
vec4 specular = vec4(0.5, 0.5, 0.5, 1.0);
vec4 position = vec4(1.5, 7, 0.5, 1.0);
vec4 direction = vec4(0.0, -1.0, 0.0, 1.0);
} lightSources;
struct Material {
vec4 ambient;
vec4 diffuse;
vec4 specular;
float shininess;
};
Material mymaterial = Material(
vec4(1.0, 0.8, 0.8, 1.0),
vec4(1.0, 0.8, 0.8, 1.0),
vec4(1.0, 0.8, 0.8, 1.0),
0.995
);
void main() {
gl_Position = pvmMatrix * vec4(in_Position, 1.0);
textureCoord = in_Texture;
vec3 normalDirection = normalize(normalMatrix * in_Normal);
vec3 lightDirection = normalize(vec3(lightSources.direction));
vec3 diffuseReflection = vec3(lightSources.diffuse) * vec3(mymaterial.diffuse) * max(0.0, dot(normalDirection, lightDirection));
/*
float bug = 0.0;
bvec3 result = equal( diffuseReflection, vec3(0.0, 0.0, 0.0) );
if(result[0] && result[1] && result[2]) bug = 1.0;
diffuseReflection.x += bug;
*/
pass_Color = vec4(diffuseReflection, 1.0);
}
And here's the fragment shader:
#version 150 core
uniform sampler2D texture;
in vec4 pass_Color;
in vec2 textureCoord;
void main() {
vec4 out_Color = texture2D(texture, textureCoord);
gl_FragColor = pass_Color;
//gl_FragColor = out_Color;
}
I'm rendering a textured wolf to the screen as a test. If I change the fragment shader to use out_Color, I see the wolf rendered properly. If I use the pass_Color, I see nothing on the screen.
This is what the screen looks like when I use out_Color in the fragment shader:
I know the diffuseReflection vector is full of 0's, by uncommenting this code in the vertex shader:
...
/*
float bug = 0.0;
bvec3 result = equal( diffuseReflection, vec3(0.0, 0.0, 0.0) );
if(result[0] && result[1] && result[2]) bug = 1.0;
diffuseReflection.x += bug;
*/
...
This will make the x component of the diffuseReflection vector 1.0, which turns the wolf red.
Does anyone see anything obvious I'm doing wrong here?
As suggested in the comments, try debugging incrementally. I see a number of ways your shader could be wrong. Maybe your normalMatrix isn't being passed properly? Maybe your in_Normal isn't mapped to the appropriate input? Maybe when you're casting lightSources.direction to a vec3, the compiler's doing something funky? Maybe your shader isn't even running at all, but you think it is? Maybe you have geometry or tessellation units and it's not passing correctly?
No one really has a chance of answering this correctly. As for me, it looks fine--but again, any of the factors above could happen--and probably more.
To debug this, you need to break it down. As suggested in the comments, try rendering the normals. Then, you should try rendering the light direction. Then render your n dot l term. Then multiply by your material parameters, then by your texture. Somewhere along the way you'll figure out the problem. As an additional tip, change your clear color to something other than black so that any black-rendered objects stand out.
It's worth noting that the above advice--break it down--is applicable to all things debugging, not just shaders. As I see it, you haven't done so here.

Phong lighting model is not actually lighting anything

I currently have an assignment to implement the Phong Lighting Model in openGL / GLSL. The two shaders that I am currently working with are below. The problem is that in the fragment shader, if I do not add vColor to gl_FragColor then the entire shape is black. However, if I DO add vColor, then the entire shape is that color with no lighting at all. I have been trying to solve this for a couple of hours now to no luck. What is the reason for this? Is it a problem in my shaders, or a problem perhaps in the openGL code? I am using one material and one point light source, which I'll show after the shaders.
Edit: If I set gl_FragColor = vec4(N, 1.0) then the object looks like this:
vertex shader:
#version 150
in vec4 vPosition;
in vec3 vNormal;
uniform mat4 vMatrix;
uniform vec4 LightPosition;
out vec3 fNorm;
out vec3 fEye;
out vec3 fLight;
void main() {
fNorm = vNormal;
fEye = vPosition.xyz;
fLight = LightPosition.xyz;
if(LightPosition.w != 0.0) {
fLight = LightPosition.xyz - vPosition.xyz;
}
gl_Position = vMatrix * vPosition;
}
fragment shader:
#version 150
in vec3 fNorm;
in vec3 fLight;
in vec3 fEye;
uniform vec4 vColor;
uniform vec4 AmbientProduct, DiffuseProduct, SpecularProduct;
uniform mat4 vMatrix;
uniform vec4 LightPosition;
uniform float Shininess;
void main(){
vec3 N = normalize(fNorm);
vec3 E = normalize(fEye);
vec3 L = normalize(fLight);
vec3 H = normalize(L + E);
vec4 ambient = AmbientProduct;
float Kd = max(dot(L, N), 0.0);
vec4 diffuse = Kd * DiffuseProduct;
float Ks = pow(max(dot(N, H), 0.0), Shininess);
vec4 specular = Ks * SpecularProduct;
if(dot(L,N) < 0.0)
specular = vec4(0.0, 0.0, 0.0, 1.0);
gl_FragColor = vColor + ambient + diffuse + specular;
}
Setting materials and light:
void init() {
setMaterials(vec4(1.0, 0.0, 0.0, 0.0), //ambient
vec4(1.0, 0.8, 0.0, 1.0), //diffuse
vec4(1.0, 1.0, 1.0, 1.0), //specular
100.0); //shine
setLightSource(vec4(1.0, 0.0, 0.0, 1.0), //ambient
vec4(1.0, 0.0, 0.0, 1.0), //diffuse
vec4(1.0, 0.0, 0.0, 1.0), //specular
vec4(1.0, 2.0, 3.0, 1.0)); //position
setProducts();
....
}
/*
* Sets the material properties for Phong lighting model.
*/
void setMaterials(vec4 amb, vec4 dif, vec4 spec, GLfloat s) {
ambient = amb;
diffuse = dif;
specular = spec;
shine = s;
glUniform1f(vShininess, shine);
}
/*
* Set light source properties.
*/
void setLightSource(vec4 amb, vec4 dif, vec4 spec, vec4 pos) {
ambient0 = amb;
diffuse0 = dif;
specular0 = spec;
light0_pos = pos;
glUniform4fv(vLightPosition, 1, light0_pos);
}
/*
* Find the products of materials components and light components.
*/
void setProducts(){
vec4 ambientProduct = ambient * ambient0;
vec4 diffuseProduct = diffuse * diffuse0;
vec4 specularProduct = specular * specular0;
glUniform4fv(vAmbientProduct, 1, ambientProduct);
glUniform4fv(vDiffuseProduct, 1, diffuseProduct);
glUniform4fv(vSpecularProduct, 1, specularProduct);
}
Your final lighting composition doesn't look right:
gl_FragColor = vColor + ambient + diffuse + specular;
It should be something like
gl_FragColor = vColor * (ambient + diffuse) + specular;
i.e. the illumination modulated by the albedo of the object.

Passing variables from a geometry shader to a fragment shader

I have an GLSL geometry shader that looks like the following:
#version 150
uniform mat4 p;
uniform mat4 mv;
uniform mat3 nm;
layout(points) in;
layout(triangle_strip, max_vertices = 200) out;
out vec4 test;
void main() {
for (int i = 0; i < gl_in.length(); i++) {
....
gl_Position = p * mv * gl_in[i].gl_Position;
test = vec4(1.0, 0.0, 0.0, 0.0);
EmitVertex();
....
EndPrimitive();
}
}
However when I try to access "test" in my fragment shader my application crashes. Here is my fragment shader:
#version 150
out vec4 fColor;
in vec4 test;
void main(void) {
fColor = vec4(test.x, 1.0, 0.4, 0);
}
Can someone help me to pass a variable from the geometry to the fragment shader? varying is deprecated in #version 150.
You need to declare test as input in your fragment shader (I wonder why the shader compiles):
in vec4 test;