I have a vertex shader that takes in position, texture coordinates, normals and some uniforms:
#version 330 core
layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;
layout(location = 2) in vec3 normal;
//MVP
uniform mat4 u_Model;
uniform mat4 u_View;
uniform mat4 u_Proj;
//Lighting
uniform mat4 u_InvTranspModel;
uniform mat4 u_LightMVP;
out DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_out[];
void main()
{
gl_Position = u_Model * position;
data_out.v_TexCoord = texCoord;
data_out.v_Normal = mat3(u_InvTranspModel) * normal;
data_out.v_Proj = u_Proj * u_View * u_Model;
//Light
data_out.v_FragPos = vec3(u_Model*position);
data_out.v_LightSpacePos = u_LightMVP * position;
}
I'm passing into my geometry shader:
#version 330 core
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
in DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_in[];
out vec2 g_TexCoord;
out vec3 g_Normal;
out vec3 g_FragPos;
out vec4 g_LightSpacePos;
void main()
{
gl_Position = data_in[0].v_Proj * gl_in[0].gl_Position;
g_Normal = data_in[0].v_Normal;
g_FragPos = data_in[0].v_FragPos;
g_TexCoord = data_in[0].v_TexCoord;
g_LightSpacePos = data_in[0].v_LightSpacePos;
EmitVertex();
gl_Position = data_in[1].v_Proj * gl_in[1].gl_Position;
g_Normal = data_in[1].v_Normal;
g_FragPos = data_in[1].v_FragPos;
g_TexCoord = data_in[1].v_TexCoord;
g_LightSpacePos = data_in[1].v_LightSpacePos;
EmitVertex();
gl_Position = data_in[2].v_Proj * gl_in[2].gl_Position;
g_Normal = data_in[2].v_Normal;
g_FragPos = data_in[2].v_FragPos;
g_TexCoord = data_in[2].v_TexCoord;
g_LightSpacePos = data_in[2].v_LightSpacePos;
EmitVertex();
EndPrimitive();
}
However I get the errors:
ERROR: 0:28: '.' : dot operator to an array only takes length()
ERROR: 0:28: 'assign' : cannot convert from 'attribute 2-component vector of highp float' to 'varying unknown-sized array of highp block'
ERROR: 0:29: '.' : dot operator to an array only takes length()
ERROR: 0:29: 'assign' : cannot convert from '3-component vector of highp float' to 'varying unknown-sized array of highp block'
................ Etc
I get these errors for every variable in the vertex shader that is passed to the geometry shader.
This shader works when its in a form without geometry shader, and should be setup currently to do nothing, so what have I done wrong?
The output of the vertex shader is not an array, even if the next stage is a Geometry shader. The Vertex shader processes a single vertex, and therefore the outputs are always single values related to that vertex. The geometry shader's input interface is an array because the geometry shader takes a primitive (multiple vertices) as input rather than a single vertex. Remove [] in the vertex shader:
out DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_out; // <--- remove []
Related
I have code trying to upload data into an OpenGL shader, but when I call glGetAttribLocation() going for the array of data I am looking for, it always returns -1 as location (thus not found). I have no idea how to debug this issue in the first place, since the variables are in the code (albeit the vertex shader only passes it on to the geometry shader).
Can someone help me figure out why the glGetAttribLocation returns not found? Other items, like worldMatrix for example, when using glGetUniformLocation(), work just fine.
C++ Code trying to get the attribute id:
for (unsigned int i = 0; i < _nNumCameras; ++i) {
const auto glName = "texcoords[" + std::to_string(i) + "]";
const auto location = glGetAttribLocation(id(), glName.c_str());
if (location == -1) {
continue;
}
Vertex Shader:
#version 430
#define NUM_CAMERAS 3
uniform mat4 worldMatrix;
uniform mat4 viewProjMatrix;
layout(location = 0)in vec3 position;
layout(location = 1)in vec3 normal;
layout(location = 2)in float radius;
in vec2 texcoords[NUM_CAMERAS];
in uint cameraIds[NUM_CAMERAS];
out vec4 gl_Position;
out VS_OUT {
vec2 v_texCoords[NUM_CAMERAS];
vec3 v_normal;
uint cameraIDs[NUM_CAMERAS];
} vs_out;
void main()
{
gl_Position.xyz = position.xyz;
gl_Position.w = 1;
gl_Position = worldMatrix * gl_Position;
gl_Position = viewProjMatrix * gl_Position;
vs_out.v_texCoords = texcoords;
vs_out.cameraIDs = cameraIds;
vs_out.v_normal = normal;
}
Geometry Shader:
#version 430
#define NUM_CAMERAS 3
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
out VS_OUT {
vec2 v_texCoords[NUM_CAMERAS];
vec3 v_normal;
uint cameraIDs[NUM_CAMERAS];
} gs_in[];
out GS_OUT {
vec2 v_texcoord;
} gs_out;
flat out uint camera_id_unique;
void main() {
// Code selecting best camera texture
...
///
gl_Position = gl_in[0].gl_Position;
gs_out.v_texcoord = gs_in[0].v_texCoords[camera_id_unique];
EmitVertex();
gl_Position = gl_in[1].gl_Position;
gs_out.v_texcoord = gs_in[1].v_texCoords[camera_id_unique];
EmitVertex();
gl_Position = gl_in[2].gl_Position;
gs_out.v_texcoord = gs_in[2].v_texCoords[camera_id_unique];
EmitVertex();
EndPrimitive();
}
Fragment Shader:
#version 430
#define NUM_CAMERAS 3
uniform sampler2D colorTextures[NUM_CAMERAS];
in GS_OUT {
vec2 v_texcoord;
} fs_in;
flat in uint camera_id_unique;
out vec4 color;
void main(){
color = texture(colorTextures[camera_id_unique], fs_in.v_texcoord);
}
Arrayed program resources work in different ways depending on whether they are arrays of basic types or arrays of structs (or arrays). Resources that are arrays of basic types only expose the entire array as a single resource, whose name is "name[0]" and which has an explicit array size (if you query that property). Other arrayed resources expose separate names for each array element.
Since texcoords is an array of basic types, there is no "texcoords[2]" or "texcoords[1]"; there is only "texcoords[0]". Arrayed attributes are always assigned contiguous locations, the locations for indices 1 and 2 will simply be 1 or 2 plus the location for index 0.
I'm quite a rookie in GLSL and I've been struggling to try to find a way to color all the vertices of a cube in different color. Each face has 4 vertices and a cube has 6 faces, so 6 * 4 = 24. But I can only draw 1 color per corner.
Vertex Shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
void main()
{
normal = a_position;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Fragment Shader:
#version 330
in vec3 normal;
out vec4 fragColor;
void main() {
fragColor = vec4(normal, 1.0);
}
Result:
If you want to color each face in a different color and you want to find a in-shader solution, the a possibility would be to color the fragments of the cube, dependent on the component of the vertex coordinate with the maximum amount.
Pass the vertex coordinate to the fragment shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
//layout (location=1) in vec3 a_normal;
out vec3 vertPos;
void main()
{
vertPos = a_position;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Find the component of the vertex coordinate withe maximum absolut value and chose a color:
#version 330
in vec3 vertPos;
out vec4 fragColor;
void main() {
vec3 posAbs = abs(vertPos);
vec3 color = step(posAbs.yzx, posAbs) * step(posAbs.zxy, posAbs);
color += (1.0 - step(color.zxy * vertPos.zxy, vec3(0.0)));
fragColor = vec4(color, 1.0);
}
If the normal vectors are face normals, then there is even a simpler solution using the normal vectors:
Pass the normal vector to the fragment shader:
#version 330
uniform mat4 u_m_matrix;
uniform mat4 u_vp_matrix;
layout (location=0) in vec3 a_position;
layout (location=1) in vec3 a_normal;
out vec3 normal;
void main()
{
normal = a_normal;
gl_Position = u_vp_matrix * u_m_matrix * vec4(a_position, 1.0);
}
Compute the color dependent on the normal vector:
#version 330
in vec3 normal;
out vec4 fragColor;
void main() {
vec3 color = abs(normal.xyz) + max(normal.zxy, 0.0);
fragColor = vec4(color, 1.0);
}
[...] so I need 24 colors. [...]
In that case I suggest the following solution.
#version 330
in vec3 vertPos;
out vec4 fragColor;
void main() {
vec3 posAbs = abs(vertPos);
vec3 color = (step(posAbs.yzx, posAbs) * step(posAbs.zxy, posAbs) +
step(0.0, vertPos.yzx)) * 0.5;
fragColor = vec4(color, 1.0);
}
I'm trying to calculate per-face normals in geometry shader, using the following pipeline
//VERTEX_SHADER
#version 330
layout(location = 0) in vec4 vertex;
out vec3 vert;
uniform mat4 projMatrix;
uniform mat4 mvMatrix;
void main()
{
vert = vertex.xyz;
gl_Position = projMatrix * mvMatrix * vertex;
}
//GEOMETRY_SHADER
#version 330
layout ( triangles ) in;
layout ( triangle_strip, max_vertices = 3 ) out;
out vec3 normal_out;
uniform mat3 normalMatrix;
void main()
{
vec3 A = gl_in[2].gl_Position.xyz - gl_in[0].gl_Position.xyz;
vec3 B = gl_in[1].gl_Position.xyz - gl_in[0].gl_Position.xyz;
normal_out = normalMatrix * normalize(cross(A,B));
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gl_Position = gl_in[1].gl_Position;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
}
//FRAG_SHADER
#version 330
in vec3 normal_out;
in vec3 vert;
out vec4 fColor;
uniform vec3 lightPos;
void main()
{
highp vec3 L = normalize(lightPos - vert);
highp float NL = max(dot(normal_out, L), 0.0);
highp vec3 color = vec3(1, 1, 0.0);
fColor = vec4(color*NL, 1.0);
}
However, I end up with very weird looking faces that keeps flickering(I included a snapshot below). It occurred to me that it might be because I'm using 8 vertices to represent 1 cell(cube) instead of 24 vertices, But I'm not quite sure if that is what is causing the problem.
Left: Using Light Weighting 'NL', Right:Without
After every call to EmitVertex, the contents of all output variables are made undefined. Therefore, if you want to output the same value to multiple vertices, you must copy it to the output every time.
Also, note that each shader stage's outputs provide inputs only to the next stage. So if you have a GS, and you want to pass a value from the VS to the FS, you must have the GS explicitly pass that value through.
I am using FragmentShader and VertexShader at present, and works absolutely fine. I cannot get my geometry shader working. I am absolutely new to it, below is what I have tried.
I am using VBO, lighting and textures along with some geometry, but it works fine before using GeometryShader. the only thing I have changed is the variable names as I had to get the input in the geometry shader and give the output. So I have appended 1 at the end of those variable names those which will go out from geometry shader to the fragment shader.
Also I have added headers starting with # which were earlier not there. I am using GL_TRIANGLES to draw.
VertexShader
in vec4 position;
in vec4 color1;
in vec4 normal;
in vec2 texCoord;
uniform sampler2D Tex1;
uniform int use_texture;
out vec4 pcolor;
out vec3 N;
out vec3 L;
out vec3 R;
out vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
#version 330 compatibility
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color; // vertex color
out vec4 pos_in_eye; //vertex position in eye space
out vec2 FtexCoord;
void main(){
gl_Position = local2clip * position;
N = normalize(vec3(normal_matrix * normal)); //v_normal
vec4 Lpos = world2eye * light_pos; //light pos. in eye
vec4 Vpos = local2eye * position; //pos_in_eye
L = normalize(vec3(Lpos - Vpos)); //light_vector
R = normalize(reflect(-L, N));
V = normalize(vec3(-Vpos)); //eye vector
vec3 halfv = normalize(L+V);
FtexCoord = texCoord;
//pcolor = color1;
}
This is my FragemntShader
#version 330 compatibility
uniform int use_texture;
in vec4 pcolor;
in vec3 N1;
in vec3 L1;
in vec3 R1;
in vec3 V1;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
uniform sampler2D Tex1;
in vec2 FtexCoord1;
void main() {
vec4 ambient = light_ambient * mat_ambient;
float NdotL;
if (dot(N1,L1) <0.0) NdotL = 0.0;
else NdotL = dot(N1, L1);
vec4 diffuse = light_diffuse * mat_diffuse * NdotL;
float RdotV;
RdotV = dot(R1, V1);
if (NdotL == 0.0) RdotV = 0.0;
if (RdotV <0.0) RdotV = 0.0;
vec4 specular = light_specular * mat_specular * pow(RdotV,mat_shine);
vec4 texcolor;
if( use_texture == 1 ) {
texcolor = texture2D(Tex1, FtexCoord1);
gl_FragColor = texcolor;
}
else
gl_FragColor = (diffuse + ambient + specular);
}
This is my GeometryShader
#version 330
layout (triangles) in;
layout (triangles) out;
layout (max_vertices = 3) out;
out vec3 N1;
out vec3 L1;
out vec3 R1;
out vec3 V1;
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color1; // vertex color
out vec4 pos_in_eye1; //vertex position in eye space
out vec2 FtexCoord1;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
void main(void)
{
int i;
N1=N;
L1=L;
R1=R;
V1=R;
FtexCoord1=FtexCoord;
v_color1=v_color;
pos_in_eye1=pos_in_eye;
for (i = 0; i < gl_in.length(); i++)
{
gl_Position = gl_in[i].gl_Position;
EmitVertex();
}
EndPrimitive();
}
I just want that what ever was there earlier is passed from vertex shader to fragment shader via geometry shader, so that I can manipulate the shader later. Currently the screen is just black
The core of your problem is that you didn't bother to check for compilation errors when you built your Geometry Shader. I know that because I see several syntax errors for it. In particular:
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
Geometry Shader inputs are always aggregated into arrays. Remember: a geometry shader operates on primitives, which are defined as a collection of one or more vertices. Each GS invocation therefore gets a set of per-vertex input values, one for each vertex in the primitive type defined by your layout in qualifier.
Notice how you loop over the number of vertices in a primitive and use gl_in[i] to get the input value for each vertex in the primitive. That's how you need to access all of your Geometry Shader inputs. And you need to write each one to its corresponding output variable, then call EmitVertex. All in that loop.
I am using FragmentShader and VertexShader at present, and works absolutely fine. I cannot get my geometry shader working proprly. I am absolutely new to it, below is what I have tried.
I am using VBO, lighting and textures along with some geometry, but it works fine before using GeometryShader. The only thing I have changed is the variable names as I had to get the input in the geometry shader and give the output. So I have appended 1 at the end of those variable names those which will go out from geometry shader to the fragment shader.
Also I have added headers starting with # which were earlier not there. I am using GL_TRIANGLES to draw.
VertexShader
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec2 FtexCoord;
void main(){
gl_Position = local2clip * position;
N = normalize(vec3(normal_matrix * normal)); //v_normal
vec4 Lpos = world2eye * light_pos; //light pos. in eye
vec4 Vpos = local2eye * position; //pos_in_eye
L = normalize(vec3(Lpos - Vpos)); //light_vector
R = normalize(reflect(-L, N));
V = normalize(vec3(-Vpos)); //eye vector
vec3 halfv = normalize(L+V);
FtexCoord = texCoord;
//pcolor = color1;
}
This is my FragmentShader
#version 330 compatibility
uniform int use_texture;
in vec4 pcolor;
in vec3 N1;
in vec3 L1;
in vec3 R1;
in vec3 V1;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
uniform sampler2D Tex1;
in vec2 FtexCoord1;
void main() {
vec4 ambient = light_ambient * mat_ambient;
float NdotL;
if (dot(N1,L1) <0.0) NdotL = 0.0;
else NdotL = dot(N1, L1);
vec4 diffuse = light_diffuse * mat_diffuse * NdotL;
float RdotV;
RdotV = dot(R1, V1);
if (NdotL == 0.0) RdotV = 0.0;
if (RdotV <0.0) RdotV = 0.0;
vec4 specular = light_specular * mat_specular * pow(RdotV,mat_shine);
vec4 texcolor;
if( use_texture == 1 ) {
texcolor = texture2D(Tex1, FtexCoord1);
gl_FragColor = texcolor;
}
else
gl_FragColor = (diffuse + ambient + specular);
}
This is my Geometry Shader
#version 330
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
out vec3 N1;
out vec3 L1;
out vec3 R1;
out vec3 V1;
in vec3 N[3];
in vec3 L[3];
in vec3 R[3];
in vec3 V[3];
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec2 FtexCoord1;
in vec2 FtexCoord[3];
void main(void)
{
int i;
for (i = 0; i < gl_in.length(); i++)
{
N1=N[i];
L1=L[i];
R1=R[i];
V1=R[i];
FtexCoord1=FtexCoord[i];
gl_Position = gl_in[i].gl_Position;
EmitVertex();
}
EndPrimitive();
}
I just want that what ever was there earlier is passed from vertex shader to fragment shader via geometry shader, so that I can manipulate the shader later. But the light is not showing the same effect.As shown in the pics.
There was a small bug in the code.It should be V1=V[i]; Instead of V1=R[i];