Names of vectors in diffrent fragment and vertex shader files - opengl

I'm trying to make 2 objects in OpenGL with 2 diffrent textures and one of them should moving.
I make 2 shader program and sign it to diffrent indicates tabs. Parameters of shader programs looking but program draws only one obejct (which I use later). Is it correctly if I make .frag and .vert file same structure and name of in/out vectors but change only texture and delete transformation from static object?
//fragment of moving object
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 color;
layout (location = 2) in vec2 texCoord;
out vec3 vecColor;
out vec2 TexCoord;
uniform mat4 transform;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection*view* transform * vec4(position, 1.0f);
vecColor = color;
TexCoord = texCoord;
}
// fragment of static object
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 color;
layout (location = 2) in vec2 texCoord;
out vec3 vecColor;
out vec2 TexCoord;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection*view * vec4(position, 1.0f);
vecColor = color;
TexCoord = texCoord;
}
// code of Use()
void Use() const
{
glUseProgram(get_programID());
}
// Drawing elements in main event loop whiile()
// Draw first object
theProgram_stelaz.Use();
glBindVertexArray(VAO_stelaz);
glDrawElements(GL_TRIANGLES, _countof(indices_stelaz), GL_UNSIGNED_INT,
0);
glBindVertexArray(0);
// Draw second object
theProgram.Use();
glBindVertexArray(VAO_wings);
glDrawElements(GL_TRIANGLES, _countof(indices_wings), GL_UNSIGNED_INT,
0);
glBindVertexArray(0);
Now program draws only secon object from indices_wings. I want to draw two elements and just object from indices_wings should move.

Related

GLSL Error when trying to pass data to geometry shader

I have a vertex shader that takes in position, texture coordinates, normals and some uniforms:
#version 330 core
layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;
layout(location = 2) in vec3 normal;
//MVP
uniform mat4 u_Model;
uniform mat4 u_View;
uniform mat4 u_Proj;
//Lighting
uniform mat4 u_InvTranspModel;
uniform mat4 u_LightMVP;
out DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_out[];
void main()
{
gl_Position = u_Model * position;
data_out.v_TexCoord = texCoord;
data_out.v_Normal = mat3(u_InvTranspModel) * normal;
data_out.v_Proj = u_Proj * u_View * u_Model;
//Light
data_out.v_FragPos = vec3(u_Model*position);
data_out.v_LightSpacePos = u_LightMVP * position;
}
I'm passing into my geometry shader:
#version 330 core
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
in DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_in[];
out vec2 g_TexCoord;
out vec3 g_Normal;
out vec3 g_FragPos;
out vec4 g_LightSpacePos;
void main()
{
gl_Position = data_in[0].v_Proj * gl_in[0].gl_Position;
g_Normal = data_in[0].v_Normal;
g_FragPos = data_in[0].v_FragPos;
g_TexCoord = data_in[0].v_TexCoord;
g_LightSpacePos = data_in[0].v_LightSpacePos;
EmitVertex();
gl_Position = data_in[1].v_Proj * gl_in[1].gl_Position;
g_Normal = data_in[1].v_Normal;
g_FragPos = data_in[1].v_FragPos;
g_TexCoord = data_in[1].v_TexCoord;
g_LightSpacePos = data_in[1].v_LightSpacePos;
EmitVertex();
gl_Position = data_in[2].v_Proj * gl_in[2].gl_Position;
g_Normal = data_in[2].v_Normal;
g_FragPos = data_in[2].v_FragPos;
g_TexCoord = data_in[2].v_TexCoord;
g_LightSpacePos = data_in[2].v_LightSpacePos;
EmitVertex();
EndPrimitive();
}
However I get the errors:
ERROR: 0:28: '.' : dot operator to an array only takes length()
ERROR: 0:28: 'assign' : cannot convert from 'attribute 2-component vector of highp float' to 'varying unknown-sized array of highp block'
ERROR: 0:29: '.' : dot operator to an array only takes length()
ERROR: 0:29: 'assign' : cannot convert from '3-component vector of highp float' to 'varying unknown-sized array of highp block'
................ Etc
I get these errors for every variable in the vertex shader that is passed to the geometry shader.
This shader works when its in a form without geometry shader, and should be setup currently to do nothing, so what have I done wrong?
The output of the vertex shader is not an array, even if the next stage is a Geometry shader. The Vertex shader processes a single vertex, and therefore the outputs are always single values related to that vertex. The geometry shader's input interface is an array because the geometry shader takes a primitive (multiple vertices) as input rather than a single vertex. Remove [] in the vertex shader:
out DATA
{
vec2 v_TexCoord;
vec3 v_Normal;
vec3 v_FragPos;
vec4 v_LightSpacePos;
mat4 v_Proj;
} data_out; // <--- remove []

Why doesnt my passthrough geometry shader work?

I am making project of cubesphere Earth. I implemented vertex and fragment shaders for textures and it worked fine (with camera movement).
I wanted to add a passthrough geometry shader but i cant get it to work.
Vertex shader
#version 450 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 textureCoords;
out vec2 textCoords;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection * view * model * vec4(position, 1.0);
textCoords = textureCoords;
}
Geometry shader
#version 450 core
layout (triangles) in;
layout (triangles, max_vertices = 3) out;
in vec2 textCoords[];
out vec2 TextCoords;
void main()
{
int i;
for(i = 0; i < gl_in.length(); i++)
{
gl_Position = gl_in[i].gl_Position;
TextCoords = textCoords[i];
EmitVertex();
}
EndPrimitive();
}
Fragment shader
#version 450 core
in vec2 TextCoords;
out vec4 fragmentColor;
uniform sampler2D earth;
void main()
{
fragmentColor = texture(earth, TextCoords);
}
I used shader class from learnopengl, and only thing i changed in Main was geometry shader path in Shader constructor.
BEFORE GEOMETRY SHADER
]
AFTER GEOMETRY SHADER
Issue was
layout (triangles, max_vertices = 3) out;
Should be
layout (triangle_strip, max_vertices = 3) out;

Can't get openGL's glDrawElements to work with geometry shader

i have attached the shader but i can't find any info on how to use glDrawElements with a goemetry shader attached to the shader program.
The program would output a quad on the screen without the geometry shader, now i'm trying to do the same but with a geometry shader attached.
//In my .cpp file
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
// Vertex shader
#version 440
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_color;
uniform mat4 world_matrix;
uniform mat4 view_matrix;
uniform mat4 projection_matrix;
out vec3 color;
void main() {
color = vertex_color;
gl_Position = projection_matrix*view_matrix* world_matrix *
vec4(vertex_position, 1.0);
}
// Geometry shader
#version 440 core
layout (triangle_strip) in;
layout (triangle_strip, max_vertices = 6) out;
layout(location = 1) in vec3 vertex_color;
out vec3 color;
void main()
{
for(int i = 0; i < gl_in.length(); i++)
{
// copy attributes
gl_Position = gl_in[i].gl_Position;
color=vertex_color;
// done with the vertex
EmitVertex();
}
EndPrimitive();
}
//Fragment shader
#version 440
in vec3 color;
out vec4 fragment_color;
void main () {
fragment_color = vec4 (color, 1.0);
}
See the handy OpenGL wiki site of Khronos Group for Shader stage inputs and outputs:
Global variables declared with the in qualifier are shader stage input variables. These variables are given values by the previous stage (possibly via interpolation of values output from multiple shader executions).
Global variables declared with the out qualifier are shader stage output variables. These values are passed to the next stage of the pipeline (possibly via interpolation of values output from multiple shader executions).
Geometry Shader inputs are aggregated into arrays, one per vertex in the primitive. The length of the array depends on the input primitive type used by the GS. Each array index represents a single vertex in the input primitive.
You have a vertex shader, a geometry shader and a fragment shader. In this case the vertex shader is the first shader stage, followed by the geometry shader and the last shader stage is the fragment shader.
So the input variables of the geometry shader have to match to the output variables of the vertex shader. The input variables of the fragment shader have to match to the output variables of the geometry shader.
Further note, that the possible input primitive specifier are points, lines, lines_adjacency, triangles and triangles_adjacency.
See also Geometry Shader - Primitive in/out specification.
This means you code has to look somehow like this:
Vertex shader:
#version 440
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_color;
uniform mat4 world_matrix;
uniform mat4 view_matrix;
uniform mat4 projection_matrix;
out vec3 vert_stage_color;
void main()
{
vert_out_color = vertex_color;
gl_Position = projection_matrix*view_matrix* world_matrix * vec4(vertex_position, 1.0);
}
Geometry shader:
#version 440 core
layout (triangles) in;
layout (triangle_strip, max_vertices = 6) out;
layout(location = 1) in vec3 vertex_color;
in vec3 vert_stage_color[];
out vec3 geo_stage_color;
void main()
{
for(int i = 0; i < gl_in.length(); i++)
{
// copy attributes
gl_Position = gl_in[i].gl_Position;
geo_stage_color = vert_stage_color[i];
// done with the vertex
EmitVertex();
}
EndPrimitive();
}
Fragment shader:
#version 440
in vec3 geo_stage_color;
out vec4 fragment_color;
void main ()
{
fragment_color = vec4(geo_stage_color, 1.0);
}

Passing array of mat4 to GLSL Shader uniform

I did run into some trouble setting up my animation shader for my OpenGL application. Basically it takes in an array of 50 glm::mat4 matrices and should set them as uniform in my GLSL shader. Yet only the first value is actually send to the shader, all other array entries in the shader are set to 0.
I think the problem occurs when passing from C++ to GLSL:
class model{
...
glm::mat4 finalBoneTransforms[50];
...
}
model::draw(){
//Set Joints
int jointLoc = glGetUniformLocation(shaderID, "jointTransforms");
glUniformMatrix4fv(jointLoc, 50 , GL_FALSE, glm::value_ptr(finalBoneTransforms[0]));
...
}
So how comes that only the first value is passed? Shouldn't OpenGL take in 50 elements stored in the contiguous memory to the first element, which is referenced via the value_ptr?
I would highly prefer to use arrays instead of vectors to make sure not to suffer any pointer loss due to reallocation. Arent elements in an array stored in contiguous memory? Any other obvious mistakes causing that weird behaviour?
Edit: Heres the shader code:
#version 330 core
const int MAX_JOINTS = 50;
const int MAX_WEIGHTS = 4;
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormals;
layout (location = 2) in vec2 aTexCoord;
layout (location = 3) in vec4 aBoneWeight;
layout (location = 4) in ivec4 aBoneIndex;
out vec2 texCoords;
uniform mat4 jointTransforms[MAX_JOINTS];
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main(void) {
vec4 totalLocalPos = vec4(0.0);
vec4 totalNormal = vec4(0.0);
for (int i = 0; i<MAX_WEIGHTS; i++) {
mat4 jointTransform = jointTransforms[aBoneIndex[i]];
vec4 posePosition = jointTransform * vec4(aPos, 1.0);
totalLocalPos += posePosition * aBoneWeight[i];
}
gl_Position = projection*view * totalLocalPos;
texCoords = aTexCoord;
};

LWJGL shader normal's alias

I'm writing my app with LWJGL and on some graphic cards (AMD Radeon series) I can not change the normal's name in vertex shader because something goes wrong and only one big triangle appears on the screen. Here is my code:
#version 150
//our attributes
in vec3 a_position;
in vec2 a_textureCoords;
in vec3 a_normal; //Here is an error
//send the color out to the fragment shader
out vec2 vTextureCoords;
void main(void)
{
a_normal;
gl_Position = vec4(a_position, 1.0);
vTextureCoords = a_textureCoords;
}
if I change shader to this:
#version 150
//our attributes
in vec3 a_position;
in vec2 a_textureCoords;
in vec3 normal;
//send the color out to the fragment shader
out vec2 vTextureCoords;
void main(void)
{
normal;
gl_Position = vec4(a_position, 1.0);
vTextureCoords = a_textureCoords;
}
Everything works fine and mesh appear as it should be. Is it normal?
And my bind attribute function:
//Before
GL20.glBindAttribLocation(s_programID, 2, "a_normal");
//After
GL20.glBindAttribLocation(s_programID, 2, "normal");
edit
Attributes locations in shader:
//Before
a_normal 0
a_position 1
a_textureCoords 2
//After
a_position 0
a_textureCoords 1
normal 2
I'd suggest you to write something like this:
#version 150
#define POSITION 0
#define TEX_COORD 1
#define NORMAL 2
//our attributes
layout (location = POSITION) in vec3 a_position;
layout (location = TEX_COORD) in vec2 a_textureCoords;
layout (location = NORMAL) in vec3 a_normal;
//send the color out to the fragment shader
out vec2 vTextureCoords;
void main(void)
{
a_normal;
gl_Position = vec4(a_position, 1.0);
vTextureCoords = a_textureCoords;
}
And then on java you have something like:
interface Semantic {
interface Attr {
int POSITION = 0;
int TEX_COORD = 1;
int NORMAL = 2;
}
}
that you will use in your glVertexAttribPointer and glEnableVertexAttribute functions
Otherwise, remember to glBindAttribLocation before or glGetAttribLocation after linking the program
Ps: remember to do something useful with that a_normal otherwise the glsl compiler will optimize it out by removing it completely