I just started testing around with geometry shaders a bit. I want to draw a triangle for every point. This is my shader:
#version 150
layout (points) in;
layout(triangles, max_vertices = 3) out;
void main(void)
{
gl_Position = gl_in[0].gl_Position + vec4(0, 0, 0, 0);
EmitVertex();
gl_Position = gl_in[0].gl_Position + vec4(0.1, 0, 0, 0);
EmitVertex();
gl_Position = gl_in[0].gl_Position + vec4(0, 0.1, 0, 0);
EmitVertex();
EndPrimitive();
}
This is the error message:
error C3008: unknown layout specifier 'triangles'
It kind of works when replacing "triangles" with "points", but obviously it's drawing points instead.
According to the standard the only allowed primitive types for outputs are
points
line_strip
triangle_strip
So what you want is
layout(triangle_strip, max_vertices = 3) out;
Related
I am trying to implement geometry shader for line thickness using OpenGL 4.3.
I followed accepted answer and other given solutions of stackoverflow, but it is wrong according to the screenshot. Is there any proper way how can I get a normal of a screen? It seems correct in the first frame but the moment I move my mouse, the camera changes and offset direction is not correct. The shader is updated by camera matrix in while loop.
GLSL Geometry shader to replace glLineWidth
Vertex shader
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 projection_view_model;
void main()
{
gl_Position = projection_view_model * vec4(aPos, 1.0);
}
Fragment shader
#version 330 core
//resources:
//https://stackoverflow.com/questions/6017176/gllinestipple-deprecated-in-opengl-3-1
out vec4 FragColor;
uniform vec4 uniform_fragment_color;
void main()
{
FragColor = uniform_fragment_color;
}
Geometry shader
#version 330 core
layout (lines) in;
layout(triangle_strip, max_vertices = 4) out;
uniform float u_thickness ;
uniform vec2 u_viewportSize ;
in gl_PerVertex
{
vec4 gl_Position;
//float gl_PointSize;
//float gl_ClipDistance[];
} gl_in[];
void main() {
//https://stackoverflow.com/questions/54686818/glsl-geometry-shader-to-replace-gllinewidth
vec4 p1 = gl_in[0].gl_Position;
vec4 p2 = gl_in[1].gl_Position;
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 offset = vec2(-dir.y, dir.x) * u_thickness*100 / u_viewportSize;
gl_Position = p1 + vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p1 - vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 + vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 - vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
EndPrimitive();
}
To get the direction of the line in normalized device space, the x and y components of the clip space coordinated must be divided by the w component (perspective divide):
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 dir = normalize((p2.xy / p2.w - p1.xy / p1.w) * u_viewportSize);
my situation is when I deal with some depth data from a RGBD camera by GLSL. The information of the depth texture is below:
internal Format = GL_LUMINANCE16UI_EXT
Format = GL_LUMINANCE_INTEGER_EXT
DataType = GL_UNSIGNED_SHORT
My Vertex shader is empty:
#version 330
layout(location = 0) in vec4 in_position;
void main()
{
gl_Position = in_position;
}
My Fragment shader is
#version 330 core
in vec2 texcoord;
out uint FragColor;
uniform usampler2D gSampler;
void main()
{
.
.
.
FragColor = ...;
}
And my Geometry shader is:
#version 330 core
layout(points) in;
layout(triangle_strip, max_vertices = 4) out;
out vec2 texcoord;
void main()
{
gl_Position = vec4(1.0, 1.0, 0.0, 1.0);
texcoord = vec2(1.0, 1.0);
EmitVertex();
gl_Position = vec4(-1.0, 1.0, 0.0, 1.0);
texcoord = vec2(0.0, 1.0);
EmitVertex();
gl_Position = vec4(1.0,-1.0, 0.0, 1.0);
texcoord = vec2(1.0, 0.0);
EmitVertex();
gl_Position = vec4(-1.0,-1.0, 0.0, 1.0);
texcoord = vec2(0.0, 0.0);
EmitVertex();
EndPrimitive();
}
This code works well in my NVIDIA, but Intel driver (Intel 5th Gen). Therefore I change the depth texture into format as follows:
internal Format = GL_DEPTH_COMPONENT16
Format = GL_DEPTH_COMPONENT
DataType = GL_UNSIGNED_SHORT
It can work without any error log when I check with glerror and glGetShaderInfoLog, but it only return Zero from the shaders. Do I have to change something within my shaders due to the new texture format?
I am trying to figure out how to switch outputs in the geometry shader, specifically these two outputs:
layout(points, max_vertices = 1) out; // OUTPUT 1
layout(triangle_strip, max_vertices = 4) out; // OUTPUT 2
I am doing a rendering of starclusters and have a dataset of 100 000+ stars using sprites generated in the geometry shader. But, the sprite "halos" are to only appear if we move closer to the target star.
Geometry Shader
#version 440
const vec2 corners[4] = {
vec2(0.0, 1.0),
vec2(0.0, 0.0),
vec2(1.0, 1.0),
vec2(1.0, 0.0)
};
layout(points) in;
layout(points, max_vertices = 1) out;
//layout(triangle_strip, max_vertices = 4) out;
uniform vec4 campos;
float spriteSize = 0.000005; // set here for now.
out vec2 texCoord;
void main(){
float distToPoint = 1; //(1.f/((length(gl_in[0].gl_Position - campos)) ));
if(distToPoint == 1){ // dumb condition just to illustrate my point
// EMIT QUAD
for(int i=0; i<4; ++i){
vec4 pos = gl_in[0].gl_Position;
pos.xy += spriteSize *(corners[i] - vec2(0.5));
gl_Position = pos;
texCoord = corners[i];
EmitVertex();
}
EndPrimitive();
}else{
// EMIT POINT
gl_Position = gl_in[0].gl_Position;
EmitVertex();
EndPrimitive();
}
}
Fragment Shader
void main(void){
... // First I do a bunch of depth computation, irrelevant to this question
gl_FragDepth = depth;
// Here I want to switch between these two outputs!
//diffuse = texture2D(texture1, texCoord); // OUTPUT 1
diffuse = vec4(Color, 1.0); // OUTPUT 2
}
I have recently encountered streams for geometry shader but cant find a decent example of how this can be done in my instance.
Please let me know if more code needs to be posted.
I created a basic quad drawing shader using a single point and a geometry shader.
I've read many posts and articles suggesting that I would not need to use glProgramParameteriEXT and could use the layout keyword so long as I was using a shader #version 150 or higher. Some suggested #version 400 or #version 420. My computer will not support #version 420 or higher.
If I use only layout and #version 150 or higher, nothing draws. If I remove layout (or even keep it; it does not seem to care because it will compile) and use glProgramParameteriEXT, it renders.
In code, this does nothing:
layout (points) in;
layout (triangle_strip, max_vertices=4) out;
This is the only code that works:
glProgramParameteriEXT( id, GL_GEOMETRY_INPUT_TYPE_EXT, GL_POINTS );
glProgramParameteriEXT( id, GL_GEOMETRY_OUTPUT_TYPE_EXT, GL_TRIANGLE_STRIP );
glProgramParameteriEXT( id, GL_GEOMETRY_VERTICES_OUT_EXT, 4 );
The alternative is to create a parser that creates the parameters via shader source.
Source for quad rendering via geometry shader:
#version 330
#ifdef VERTEX_SHADER
in vec4 aTexture0;
in vec4 aColor;
in mat4 aMatrix;
out vec4 gvTex0;
out vec4 gvColor;
out mat4 gvMatrix;
void main()
{
// Texture color
gvTex0 = aTexture0;
// Vertex color
gvColor = aColor;
// Matrix
gvMatrix = aMatrix;
}
#endif
#ifdef GEOMETRY_SHADER
layout (points) in;
layout (triangle_strip, max_vertices=4) out;
in vec4 gvTex0[1];
in vec4 gvColor[1];
in mat4 gvMatrix[1];
out vec2 vTex0;
out vec4 vColor;
void main()
{
vColor = gvColor[0];
// Top right.
//
gl_Position = gvMatrix[0] * vec4(1, 1, 0, 1);
vTex0 = vec2(gvTex0[0].z, gvTex0[0].y);
EmitVertex();
// Top left.
//
gl_Position = gvMatrix[0] * vec4(-1, 1, 0, 1);
vTex0 = vec2(gvTex0[0].x, gvTex0[0].y);
EmitVertex();
// Bottom right.
//
gl_Position = gvMatrix[0] * vec4(1, -1, 0, 1);
vTex0 = vec2(gvTex0[0].z, gvTex0[0].w);
EmitVertex();
// Bottom left.
//
gl_Position = gvMatrix[0] * vec4(-1, -1, 0, 1);
vTex0 = vec2(gvTex0[0].x, gvTex0[0].w);
EmitVertex();
EndPrimitive();
}
#endif
#ifdef FRAGMENT_SHADER
uniform sampler2D tex0;
in vec2 vTex0;
in vec4 vColor;
out vec4 vFragColor;
void main()
{
vFragColor = clamp(texture2D(tex0, vTex0) * vColor, 0.0, 1.0);
}
#endif
I am looking for suggestions as to why something like this might happen.
i'm having difficulties understanding the math between the different shader stages.
in the fragment shader from the lights perspective i basically write out the fragDepth to rgb color
#version 330
out vec4 shader_fragmentColor;
void main()
{
shader_fragmentColor = vec4(gl_FragCoord.z, gl_FragCoord.z, gl_FragCoord.z, 1);
//shader_fragmentColor = vec4(1, 0.5, 0.5, 1);
}
when rendering the scene using the above shader it displays the scene in an all white color. i suppose thats because gl_FragCoord.z is bigger than 1. hopefully its not maxed out at 1. but we can leave that question alone for now.
in the geometry shader from the cameras perspective i basicly turn all points into quads and write out the probably "incorrect" texture position to lookup in the lightTexture. the math here is the question. im also a bit unsure about if the interpolation value will be correct in the next shader stage.
#version 330
#extension GL_EXT_geometry_shader4 : enable
uniform mat4 p1_modelM;
uniform mat4 p1_cameraPV;
uniform mat4 p1_lightPV;
out vec4 shader_lightTexturePosition;
void main()
{
float s = 10.00;
vec4 llCorner = vec4(-s, -s, 0.0, 0.0);
vec4 llWorldPosition = ((p1_modelM * llCorner) + gl_in[0].gl_Position);
gl_Position = p1_cameraPV * llWorldPosition;
shader_lightTexturePosition = p1_lightPV * llWorldPosition;
EmitVertex();
vec4 rlCorner = vec4(+s, -s, 0.0, 0.0);
vec4 rlWorldPosition = ((p1_modelM * rlCorner) + gl_in[0].gl_Position);
gl_Position = p1_cameraPV * rlWorldPosition;
shader_lightTexturePosition = p1_lightPV * rlWorldPosition;
EmitVertex();
vec4 luCorner = vec4(-s, +s, 0.0, 0.0);
vec4 luWorldPosition = ((p1_modelM * luCorner) + gl_in[0].gl_Position);
gl_Position = p1_cameraPV * luWorldPosition;
shader_lightTexturePosition = p1_lightPV * luWorldPosition;
EmitVertex();
vec4 ruCorner = vec4(+s, +s, 0.0, 0.0);
vec4 ruWorldPosition = ((p1_modelM * ruCorner) + gl_in[0].gl_Position);
gl_Position = p1_cameraPV * ruWorldPosition;
shader_lightTexturePosition = p1_lightPV * ruWorldPosition;
EmitVertex();
EndPrimitive();
}
in the fragment shader from the cameras perspective i basicly lookup in the lightTexture what color would be shown from the lights perspecive and write out the same color.
#version 330
uniform sampler2D p1_lightTexture;
in vec4 shader_lightTexturePosition;
out vec4 shader_fragmentColor;
void main()
{
vec4 lightTexel = texture2D(p1_lightTexture, shader_lightTexturePosition.xy);
shader_fragmentColor = lightTexel;
/*
if(lightTexel.x < shader_lightTexturePosition.z)
shader_fragmentColor = vec4(1, 0, 0, 1);
else
shader_fragmentColor = vec4(0, 1, 0, 1);
*/
//shader_fragmentColor = vec4(1, 1, 1, 1);
}
when rendering from the cameras perspective i see the scene drawn as it should but with the incorrect texture coordinates applied on them that repeats. repeating texture is probably caused by the texture-coordinate being outside the bounds of 0 to 1.
I've tried several things but still fail to understand what the math should be. some of out commented code and one example im unsure of is:
shader_lightTexturePosition = normalize(p1_lightPV * llWorldPosition) / 2 + vec4(0.5, 0.5, 0.5, 0.5);
for the lower-left corner. similair code to the other corners
from the solution i expect the scene to be rendered from the cameras perspective with exactly the same color as from the lights perspective. with perhaps some precision error.
i figured out the texture mapping bit myself. the depth value bit is still a bit strange.
convert the screenProjectedCoords to normalizedDeviceCoords then add 1 divide by 2.
vec4 textureNormalizedCoords(vec4 screenProjected)
{
vec3 normalizedDeviceCoords = (screenProjected.xyz / screenProjected.w);
return vec4( (normalizedDeviceCoords.xy + 1.0) / 2.0, screenProjected.z * 0.005, 1/screenProjected.w);
}
void main()
{
float s = 10.00;
vec4 llCorner = vec4(-s, -s, 0.0, 0.0);
vec4 llWorldPosition = ((p1_modelM * llCorner) + gl_in[0].gl_Position);
gl_Position = p1_cameraPV * llWorldPosition;
shader_lightTextureCoords = textureNormalizedCoords(p1_lightPV * llWorldPosition);
EmitVertex();a