Update vertex position in vertex shader opengl - opengl

I have a single VBO with the following vertex attributes:
3 floats for x,y,z
3 floats for r,g,b
2 floats for velocity_x,velocity_y
I'd like to apply the velocity of a given vertex to its position, in the vertex shader and have done this so far, but it doesn't seem to be applying any changes:
"#version 330 core\n"
"layout (location = 0) in vec3 vertexPos;\n"
"layout (location = 1) in vec3 vertexColor;\n"
"layout (location = 2) in vec2 vertexVelocity;\n"
"vec3 newPosition;\n"
"out vec3 vertexColorRes;\n"
"uniform mat4 viewMatrix;\n"
"uniform mat4 projectionMatrix;\n"
"void main()\n"
"{\n"
" newPosition = vertexPos + vec3(vertexVelocity, 1.0);\n"
" gl_Position = projectionMatrix * viewMatrix * vec4(newPosition, 1.0);\n"
" vertexColorRes = vertexColor;\n"
"}\0"));
What seems to be the problem?

Related

Can you write OpenGL shader in different file and later link it to the program?

Can you write OpenGL shader in a different file and later link it to the program? and if it's possible how? writing OpenGL shader in string makes my code messy.
Here is example code for shaders:
const char* vertexShaderSource =
"#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"layout (location = 1) in vec3 aColor;\n"
"\n"
"out vec3 ourColor;\n"
"uniform vec2 angleValues;\n"
"\n"
"void main()\n"
"{\n"
"gl_Position = vec4(aPos.x * angleValues.x - aPos.y * angleValues.y, aPos.y * angleValues.x + aPos.x * angleValues.y , aPos.z, 1.0);\n"
"ourColor = aColor;\n"
"}\n";
const char* fragmentShaderSource =
"#version 330 core\n"
"out vec4 FragColor;\n"
"in vec3 ourColor;\n"
"\n"
"void main()\n"
"{\n"
"FragColor = vec4(ourColor, 1.0);\n"
"}\n";
Yes, you can have files like my_shader.vs or my_fragment.fs and link them like in this Shader class
Just initialize it like this:
shader = Shader("./shaders/my_shader.vs", "./shaders/my_fragment.fs");

Qt and OpenGL: Fragment shader won't compile when using addShaderFromSourceFile instead of addShaderFromSourceCode

I am using QOpenGLWidget to render some CAD meshes. I started with an example that had the shader source code written directly into the code in the following way:
Vertex shader
static const char *vertexShaderSourceCore =
"#version 330\n"
"in vec4 vertex;\n"
"in vec3 normal;\n"
"out vec3 vert;\n"
"out vec3 vertNormal;\n"
"uniform mat4 projMatrix;\n"
"uniform mat4 mvMatrix;\n"
"uniform mat3 normalMatrix;\n"
"void main() {\n"
" vert = vertex.xyz;\n"
" vertNormal = normalMatrix * normal;\n"
" gl_Position = projMatrix * mvMatrix * vertex;\n"
"}\n";
Fragment shader
static const char *fragmentShaderSourceCore =
"#version 330\n"
"in highp vec3 vert;\n"
"in highp vec3 vertNormal;\n"
"out highp vec4 fragColor;\n"
"uniform highp vec3 lightPos;\n"
"void main() {\n"
" highp vec3 L = normalize(lightPos - vert);\n"
" highp float NL = max(dot(normalize(vertNormal), L), 0.0);\n"
" highp vec3 color = vec3(0.39, 0.4, 0.0);\n"
" highp vec3 col = clamp(color * 0.2 + color * 0.8 * NL, 0.0, 1.0);\n"
" fragColor = vec4(col, 1.0);\n"
"}\n";
I load this code into my QOpenGLShaderProgram with the function addShaderFromSourceCode. With this, everything works as expected.
Now I wanted to move the shader source code to a separate file in order to make the code cleaner. I created a .qrc file and put in the shaders in the following way:
Vertex Shader
#version 330
in vec4 vertex;
in vec3 normal;
out vec3 vert;
out vec3 vertNormal;
uniform mat4 projMatrix;
uniform mat4 mvMatrix;
uniform mat3 normalMatrix;
void main()
{
vert = vertex.xyz;
vertNormal = normalMatrix * normal;
gl_Position = projMatrix * mvMatrix * vertex;
}
Fragment shader
in highp vec3 vert;
in highp vec3 vertNormal;
out highp vec4 fragColor;
uniform highp vec3 lightPos;
void main() {
highp vec3 L = normalize(lightPos - vert);
highp float NL = max(dot(normalize(vertNormal), L), 0.0);
highp vec3 color = vec3(0.39, 0.4, 0.0);
highp vec3 col = clamp(color * 0.2 + color * 0.8 * NL, 0.0, 1.0);
fragColor = vec4(col, 1.0);
}
I then wanted to use QOpenGLShaderProgram::addShaderFromSourceFile to load the shader code from these files.
The vertex shader loads without issues, but when importing the fragment shader I get the following error message:
QOpenGLShader::compile(Fragment): ERROR: 4:1: ':' : syntax error syntax error
*** Problematic Fragment shader source code ***
#version 110
#ifdef GL_KHR_blend_equation_advanced
#extension GL_ARB_fragment_coord_conventions : enable
#extension GL_KHR_blend_equation_advanced : enable
#endif
#define lowp
#define mediump
#define highp
#line 1
:/globalCore.frag
***
which I do not understand. I don't think its a compatibility issue since I have two additional shaders if core mode is not enabled.
Can someone help me understand where this ominous "syntax error" is coming from? I don't have a colon in my shader code so I am at a loss here.

why some triangles are becoming black after I add lighting? [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 1 year ago.
Improve this question
I am trying to add specular lighting to my opengl es program which loads 3d model. Its working normally. But whenever I add lighting this happens:
some triangles are becoming black and some are staying white.
here is my Vertex and fragment shader code:
"attribute vec4 position;\n"
"attribute vec4 normal;\n"
"attribute vec4 color;\n"
"attribute vec2 texCord;\n"
"varying vec4 vcolor;\n"
"varying vec2 vtexCord;\n"
"varying vec3 s_normal;\n"
"varying vec3 toLightv;\n"
"varying vec3 toCameraV;\n"
"uniform vec3 light_pos;\n"
"uniform mat4 MVP;\n"
"uniform mat4 view;"
"uniform mat4 transform;\n"
"void main()\n"
"{\n"
"gl_Position = MVP * vec4(position.xyz, 1.0);\n"
"vcolor = color;\n"
"vtexCord = texCord;\n"
"s_normal = (transform * vec4(normal.xyz,0.0)).xyz;\n"
"toLightv = light_pos - (MVP * vec4(position.xyz, 1.0)).xyz;\n"
"toCameraV = (view * vec4(0.0,0.0,0.0,1.0)).xyz - (MVP * vec4(position.xyz, 1.0)).xyz;\n"
"}";
`
"precision mediump float;\n"
"varying vec4 vcolor;\n"
"varying vec2 vtexCord;\n"
"varying vec3 s_normal;\n"
"varying vec3 toLightv;\n"
"varying vec3 toCameraV;\n"
"uniform sampler2D s_texr;\n"
"uniform vec3 light_col;\n"
"void main()\n"
"{\n"
// "gl_FragColor = vec4(1.0,0.0,1.0,1.0);\n"
//"gl_FragColor = vec4 (vcolor.xyz,1.0);\n"
"vec3 unitCV = normalize(toCameraV);\n"
"vec3 unitNL = normalize(s_normal);\n"
"vec3 unitLV = normalize(toLightv);\n"
"vec3 lightComing = -unitLV;\n"
"vec3 reflectedL = reflect(lightComing,unitNL);\n"
"float specularFactor = dot(reflectedL,toCameraV);\n"
"specularFactor = max(specularFactor,0.0);\n"
"float dampFactor = pow(specularFactor,1.0);\n"
"vec3 Specular= dampFactor * vec3(1.0,1.0,1.0);\n"
"float nDotl = dot(unitNL,unitLV);"
"vec3 diffuse =max(nDotl,0.1) * vec3(1.0,1.0,1.0);"
// diffuse = diffuse * (1.0 / (1.0 + (0.00000025 * distance * distance)));
"gl_FragColor =vec4(diffuse.xyz,1.0)* texture2D(s_texr, vtexCord)+vec4(Specular.xyz,1.0);"
"};"
I have enabled depth testing and the problem solved.
glEnable(GL_DEPTH_TEST);

Specular light erratic in OpenGL

I am working on a shader where the fragment shader should work in the tangent space. It works just as expected for both the ambient and diffuse light, but the specular light is just plain weird. It seems that nearby fragments can have a lot or no light with no obvious reasons.
The vertex shader is:
#version 330 core
layout (location = 0) in vec3 inVertex;
layout (location = 1) in vec3 inNormal;
layout (location = 2) in vec2 inTexture;
layout (location = 3) in vec3 inTangent;
layout (location = 4) in vec3 inBitangent;
out vec3 FragmentPosition;
out vec2 TextureCoordinate;
out vec3 TangentLightDirection;
out vec3 TangentViewPosition;
out vec3 TangentFragmentPosition;
void main()
{
FragmentPosition = vec3(inVertex);
vec3 normal = normalize(inNormal);
gl_Position = vec4( inVertex, 1 );
TextureCoordinate = inTexture;
vec3 tangent = normalize(inTangent);
vec3 biTangent = normalize(inBitangent);
mat3 toTangentSpaceTransformation = transpose(mat3(tangent,biTangent,normal));
TangentFragmentPosition = toTangentSpaceTransformation * FragmentPosition;
TangentLightPosition = toTangentSpaceTransformation * vec3(0,1,1);
TangentFragmentPosition = toTangentSpaceTransformation * vec3(0,0,3);
}
And the fragment shader is:
#version 330 core
out vec4 FragColor;
in vec3 FragmentPosition;
in vec2 TextureCoordinate;
in vec3 TangentLightDirection;
in vec3 TangentViewPosition;
in vec3 TangentFragmentPosition;
uniform sampler2D Texture;
uniform sampler2D normalTexture;
void main() {
vec3 normal = vec3(0,0,1);
float shininess = 4;
vec3 phongVector = vec3(0.3,0.7,1);
vec4 color = texture(Texture,TextureCoordinate);
vec4 ambientLightColor = vec4(1,1,1,1);//vec4(normalOffset,1);
// Calculation of ambient light
vec4 sunLightColor = vec4(1,1,1,1);
vec3 sunLightDirection = normalize(TangentLightPosition);
vec4 ambientLight = phongVector[0] * ambientLightColor;
// Calculation of diffuse light
float diffuseConst = max(dot(normal,sunLightDirection),0.0);
vec4 diffuseLight = phongVector[1] * diffuseConst * sunLightColor;
// Calculation of specular light
vec3 viewDirection = normalize(TangentViewPosition - TangentFragmentPosition);
vec3 reflectionDirection = reflect(-sunLightDirection,normal);
float spec = pow(max(dot(reflectionDirection,viewDirection),0),shininess);
vec4 specularLight = phongVector[2] * spec * sunLightColor;
FragColor = (specularLight)*color;
}
It was a typo. tangentFragmentPosition was initialized twice, while tangentViewPosition was not initialized at all. Initizalizing tangentViewPosition gave the desired result.

Link error adding geometry shader between vertex and fragment shader

When I try adding a geometry shader between working vertex and fragment shaders I get a link error:
Fragment shader(s) failed to link, vertex shader(s) failed to link.
ERROR: error(#280) Not all shaders have valid object code
ERROR: error(#280) Not all shaders have valid object code
All three shaders compile without errors. I guess the in and outs doesn't fit in the information flow pipeline. The built-in ins and outs confuse me so I can't spot the error.
Source for the shaders:
vertex_source =
"#version 330\n"
"in vec3 Position;\n"
"in vec2 TexCoord;\n"
"out vec3 oColor;\n"
"out vec2 oTexcoord;\n"
"void main() {\n"
" oTexcoord = TexCoord;\n"
" gl_Position = gl_ModelViewProjectionMatrix*vec4(Position, 1.0);\n"
"}\n";
geometry_source =
"#version 330\n";
"layout (triangles) in;\n";
"layout (triangle_strip, max_vertices=3) out;\n";
"in vec3 Color;\n";
"in vec2 TexCoord;\n";
"out vec3 oColor;\n";
"out vec2 oTexCoord;\n";
"void main() {\n";
" oColor = Color;\n";
" oTexCoord = TexCoord;\n";
" gl_Position = gl_in[0].gl_Position;\n";
" EmitVertex();\n";
" gl_Position = gl_in[1].gl_Position;\n";
" EmitVertex();\n";
" gl_Position = gl_in[2].gl_Position;\n";
" EmitVertex();\n";
" EndPrimitive();\n";
"}\n";
fragment_source =
"#version 330\n"
"in vec2 oTexcoord;\n"
"out vec4 oColor;\n"
"uniform sampler2D tex;\n"
"uniform sampler2D tex_norm;\n"
"uniform sampler2D tex_spec;\n"
"void main() {\n"
" vec4 lightpos = normalize(-gl_ModelViewProjectionMatrix*vec4(1.0, -1.0, -1.5, 1.0));\n"
" vec3 tmpNorm = normalize(texture2D(tex_norm, oTexcoord).rgb * 2.0 - 1.0);\n"
" float a = dot(tmpNorm, lightpos.xyz);\n"
" float difuse = max(a, 0.1);\n"
" float spec = texture2D(tex_spec, oTexcoord).r * pow(a, 2.0);\n"
" vec3 tmpcolor = difuse * texture2D(tex, oTexcoord).rgb;\n"
" oColor = vec4(tmpcolor+tmpcolor*spec, 1.0);\n"
"}\n";
What am I doing wrong in the geometry shader?
I have tried skipping the unused oColor out and changed to an array in geometry shader like this:
#define GLSL(src) "#version 330 core\n" #src
vertex_source = GLSL(
in vec3 Position;
in vec2 TexCoord;
out vec2 oTexcoord;
void main() {
gl_Position = gl_ModelViewProjectionMatrix*vec4(Position, 1.0);
oTexcoord = TexCoord;
}
);
geometry_source = GLSL(
layout (triangles) in;
layout (triangle_strip, max_vertices=3) out;
in vec2 gsTexCoord[];
out vec2 gsoTexCoord;
void main() {
gsoTexCoord = gsTexCoord[0];
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gsoTexCoord = gsTexCoord[1];
gl_Position = gl_in[1].gl_Position;
EmitVertex();
gsoTexCoord = gsTexCoord[2];
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
}
);
fragment_source = GLSL(
in vec2 oTexcoord;
out vec4 oColor;
uniform sampler2D tex;
uniform sampler2D tex_norm;
uniform sampler2D tex_spec;
void main() {
vec4 lightpos = normalize(-gl_ModelViewProjectionMatrix*vec4(1.0, -1.0, -1.5, 1.0));
vec3 tmpNorm = normalize(texture2D(tex_norm, oTexcoord).rgb * 2.0 - 1.0);
float a = dot(tmpNorm, lightpos.xyz);
float difuse = max(a, 0.1);
float spec = texture2D(tex_spec, oTexcoord).r * pow(a, 2.0);
vec3 tmpcolor = difuse * texture2D(tex, oTexcoord).rgb;
oColor = vec4(tmpcolor+tmpcolor*spec, 1.0);
}
);
That gives me the following link error:
Fragment shader(s) failed to link, vertex shader(s) failed to link.
ERROR: error(#277) Symbol 'gsTexCoord[0]' usage doesn't match between two stages
ERROR: error(#277) Symbol 'oTexcoord' usage doesn't match between two stages
ERROR: error(#277) Symbol 'gsTexCoord[0]' usage doesn't match between two stages
ERROR: error(#277) Symbol 'oTexcoord' usage doesn't match between two stages
Your geometry shader inputs need to be array-valued and match the names in your vertex shader:
// existing vertex shader outputs:
out vec3 oColor;
out vec2 oTexcoord;
// wrong geometry shader inputs:
in vec3 Color;
in vec2 TexCoord;
// correct geometry shader inputs:
in vec3 oColor[];
in vec2 oTexCoord[];