OpenGL Mac OSX vertex shader not linking to fragment shader - c++

I'm having problems to compile and execute a simple OpenGL application in Mac OSX 10.9. It works just fine in windows. But in Mac I keep getting some errors while linking the vertex shader to the fragment shader in the shaderProgram.
Here is my console Output.
4.1 INTEL-8.26.34
ERROR! could not link the shader program
WARNING: Output of vertex shader 'outColor' not read by fragment shader
ERROR: Input of fragment shader 'inColor' not written by vertex shader
Program ended with exit code: 0
Here is the Method that I'm using to link both together.
GLuint createShaderProgram(GLuint vertexShader, GLuint fragmentShader)
{
// Create and link the shader program
GLuint shaderProgram = glCreateProgram(); // create handle
if (!shaderProgram) {
ERROR("could not create the shader program", false);
return NULL_HANDLE;
}
glAttachShader(shaderProgram, vertexShader); // attach vertex shader
glAttachShader(shaderProgram, fragmentShader); // attach fragment shader
glLinkProgram(shaderProgram);
// check to see if the linking was successful
int linked;
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &linked); // get link status
if (!linked) {
ERROR("could not link the shader program", false);
int maxLength;
int length;
glGetProgramiv(shaderProgram, GL_INFO_LOG_LENGTH, &maxLength);
char* log = new char[maxLength];
glGetProgramInfoLog(shaderProgram, maxLength,&length,log);
printf(log);
return NULL_HANDLE;
}
return shaderProgram;
}
Here is my vertexshader.
#version 410 core
layout (location = 0) in vec3 inPosition;
layout (location = 3) in vec3 inColor;
layout (location = 3) smooth out vec4 outColor;
void main()
{
gl_Position = vec4(inPosition, 1.0);
outColor = vec4(inColor, 1.0);
}
Here is the fragShader
#version 410 core
layout (location = 3) smooth in vec4 inColor;
layout (location = 0) out vec4 outColor;
void main()
{
outColor = inColor;
}
Thanks!!

the names need to match:
have the name of the output of the vertex shader match the name of the input of the fragment shader:
#version 410 core
layout (location = 0) in vec3 inPosition;
layout (location = 3) in vec3 inColor;
smooth out vec4 vertOutColor;
void main()
{
gl_Position = vec4(inPosition, 1.0);
vertOutColor = vec4(inColor, 1.0);
}
fragment shader:
#version 410 core
smooth in vec4 vertOutColor;
layout (location = 0) out vec4 outColor;
void main()
{
outColor = vertOutColor;
}

Related

glsl error: "v_color" not declared as an output from the previous stage while trying to render wtih tesselation shaders

I want my renderer to be able to use tesselation shaders, but when it comes to run, the debugger says
%s
Link info
---------
error: "v_color" not declared as an output from the previous stage
and I do not know what does it exactly mean.
v_color is the fragment shader in vec4 which comes from the vertex shader and vertex shader gets this value from the vbo like this:
#version 420 core
layout (location = 1) in vec4 a_color
out vec4 v_color;
void main(void)
{
gl_Position = //something;
v_color = a_color;
}
#version 420 core
out vec4 color;
in vec4 v_color;
void main(void)
{
color = v_color;
}
and vertex shader gets a_color from vertex attrib pointer.
Why it returns error?
Each shader stage pass the output to the input of the next stage. The vertex shader is followed by the tessellation control shader, which is followed by the tessellation evaluation shader and finally the fragment shader (if there is no geometry shader).
If you have a tessellation shader and you want to pass an attribute from the vertex sahder to the fragment shader, then you have to pass the attribute through all shader stages:
e.g.:
Vertex shader:
#version 420 core
layout (location = 1) in vec4 a_color
out vec4 v_color;
void main(void)
{
v_color = a_color;
// .....
}
Tessellation control shader:
#version 420 core
layout(vertices=3) out;
in vec4 v_color[];
out vec4 tc_color[];
void main()
{
tc_color[gl_InvocationID] = v_color[gl_InvocationID];
// .....
}
Tessellation evaluation shader:
#version 420 core
layout(triangles) in;
in vec4 tc_color[];
out vec4 te_color;
void main()
{
te_color = tc_color[0] * gl_TessCoord.x +
tc_color[1] * gl_TessCoord.y +
tc_color[2] * gl_TessCoord.z;
// .....
}
Fragment shader:
#version 420 core
in vec4 te_color;
out vec4 color;
void main(void)
{
color = te_color;
}

Can't get openGL's glDrawElements to work with geometry shader

i have attached the shader but i can't find any info on how to use glDrawElements with a goemetry shader attached to the shader program.
The program would output a quad on the screen without the geometry shader, now i'm trying to do the same but with a geometry shader attached.
//In my .cpp file
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
// Vertex shader
#version 440
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_color;
uniform mat4 world_matrix;
uniform mat4 view_matrix;
uniform mat4 projection_matrix;
out vec3 color;
void main() {
color = vertex_color;
gl_Position = projection_matrix*view_matrix* world_matrix *
vec4(vertex_position, 1.0);
}
// Geometry shader
#version 440 core
layout (triangle_strip) in;
layout (triangle_strip, max_vertices = 6) out;
layout(location = 1) in vec3 vertex_color;
out vec3 color;
void main()
{
for(int i = 0; i < gl_in.length(); i++)
{
// copy attributes
gl_Position = gl_in[i].gl_Position;
color=vertex_color;
// done with the vertex
EmitVertex();
}
EndPrimitive();
}
//Fragment shader
#version 440
in vec3 color;
out vec4 fragment_color;
void main () {
fragment_color = vec4 (color, 1.0);
}
See the handy OpenGL wiki site of Khronos Group for Shader stage inputs and outputs:
Global variables declared with the in qualifier are shader stage input variables. These variables are given values by the previous stage (possibly via interpolation of values output from multiple shader executions).
Global variables declared with the out qualifier are shader stage output variables. These values are passed to the next stage of the pipeline (possibly via interpolation of values output from multiple shader executions).
Geometry Shader inputs are aggregated into arrays, one per vertex in the primitive. The length of the array depends on the input primitive type used by the GS. Each array index represents a single vertex in the input primitive.
You have a vertex shader, a geometry shader and a fragment shader. In this case the vertex shader is the first shader stage, followed by the geometry shader and the last shader stage is the fragment shader.
So the input variables of the geometry shader have to match to the output variables of the vertex shader. The input variables of the fragment shader have to match to the output variables of the geometry shader.
Further note, that the possible input primitive specifier are points, lines, lines_adjacency, triangles and triangles_adjacency.
See also Geometry Shader - Primitive in/out specification.
This means you code has to look somehow like this:
Vertex shader:
#version 440
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_color;
uniform mat4 world_matrix;
uniform mat4 view_matrix;
uniform mat4 projection_matrix;
out vec3 vert_stage_color;
void main()
{
vert_out_color = vertex_color;
gl_Position = projection_matrix*view_matrix* world_matrix * vec4(vertex_position, 1.0);
}
Geometry shader:
#version 440 core
layout (triangles) in;
layout (triangle_strip, max_vertices = 6) out;
layout(location = 1) in vec3 vertex_color;
in vec3 vert_stage_color[];
out vec3 geo_stage_color;
void main()
{
for(int i = 0; i < gl_in.length(); i++)
{
// copy attributes
gl_Position = gl_in[i].gl_Position;
geo_stage_color = vert_stage_color[i];
// done with the vertex
EmitVertex();
}
EndPrimitive();
}
Fragment shader:
#version 440
in vec3 geo_stage_color;
out vec4 fragment_color;
void main ()
{
fragment_color = vec4(geo_stage_color, 1.0);
}

OpenGL error thrown by fragment shader

I'm writing a 2D game in OpenTK, using OpenGL 4.4. Using colour and texture UV coordinates and a matrix I can succesfully draw textures between vertices with vertex shader:
public const string vertexShaderDefaultSrc =
#"
#version 330
uniform mat4 MVPMatrix;
layout (location = 0) in vec2 Position;
layout (location = 1) in vec2 Texture;
layout (location = 2) in vec4 Colour;
out vec2 InTexture;
out vec4 OutColour;
void main()
{
gl_Position = MVPMatrix * vec4(Position, 0, 1);
InTexture = Texture;
OutColour = Colour;
}";
and fragment shader:
public const string fragmentShaderDefaultSrc =
#"
#version 330
uniform sampler2D Sampler;
in vec2 InTexture;
in vec4 OutColour;
out vec4 OutFragColor;
void main()
{
OutFragColor = texture(Sampler, InTexture) * OutColour;
//Alpha test
if(OutFragColor.a <= 0)
discard;
}
";
BUT if I want to draw just a solid colour rather than a texture, I use this shader (with the same vertices, passing UV coords that won't be used):
public const string fragmentShaderSolidColourSrc =
#"
#version 330
uniform sampler2D Sampler;
in vec2 InTexture;
in vec4 OutColour;
out vec4 OutFragColor;
void main()
{
OutFragColor = OutColour;
//Alpha test
if(OutFragColor.a <= 0)
discard;
}
";
Now this works beautifully, but OpenGL reports an error - GL_INVALID_VALUE. It draws fine and everything seems to work, but ideally I would like OpenGL to be error free in that situation, so I can catch real errors. I would appreciate any help, and can share more detail of how the shader is compiled or used if that is helpful - what I don't understand is how the default shader can work but the solid colour doesn't.
I have tracked down the exact source of the errors in my render call (shader builds with no problems)
GL.EnableVertexAttribArray(shader.LocationPosition);
GL.VertexAttribPointer(shader.LocationPosition, 2, VertexAttribPointerType.Float, false, Stride, 0);
//-----everything up to here is fine
//this line throws an error
GL.EnableVertexAttribArray(shader.LocationTexture);
//as does this line
GL.VertexAttribPointer(shader.LocationTexture, 2, VertexAttribPointerType.Float, false, Stride, 8);
//this is all ok
GL.EnableVertexAttribArray(shader.LocationColour);
GL.VertexAttribPointer(shader.LocationColour, 4, VertexAttribPointerType.UnsignedByte, true, Stride, 16);
//ok
GL.BindBuffer(BufferTarget.ElementArrayBuffer, indexBuffer);
GL.DrawArrays(DrawType, 0, Vertices.Length);
//ok
GL.DisableVertexAttribArray(shader.LocationPosition);
//this line throws error
GL.DisableVertexAttribArray(shader.LocationTexture);
//this is ok
GL.DisableVertexAttribArray(shader.LocationColour);
It appears to me after some tests (would be nice to have this verified) that if a variable such as the texture coordinates are not used by the shader the compiler gets rid of it, so a call to get it's location returns -1. Simply checking if locationTexture was -1 here and then not binding locationTexture etc if so resolved my issues.

glGetAttribLocation() returns -1 but the variable is uniform, How to fix it?

This is my shader 'triangles.vert':
#version 430 core
layout(location = 0) in vec4 vPosition;
layout(location = 1) in vec4 vColor;
uniform mat4 vRota;
out vec4 color;
void main()
{
color = vColor;
gl_Position = vRota * vPosition;
}
and this is the snippet of the C++ source:
ShaderInfo shaders[] =
{
{ GL_VERTEX_SHADER, "triangles.vert" },
{ GL_FRAGMENT_SHADER, "triangles.frag" },
{ GL_NONE, NULL }
};
GLuint program = LoadShaders(shaders);
glUseProgram(program);
//...
int vRota_loc = glGetAttribLocation(program, "vRota");
if (vRota_loc == -1)
{
cout << "No uniform match for 'vRota'" << endl;
}
It returns -1, but I don't know why.
Am I doing something wrong in the shader?
ps LoadShaders() is function for compiling and linking the shader programm. it is given as sourcefile by a book I'm practicing with. So I supose there won't be the error.
glGetAttribLocation is for vertex attributes, i.e. in variables in the vertex shader (such as vPosition). You want glGetUniformLocation.
A uniform is not an attribute. You need to use glGetUniformLocation
A uniform is consistent for each vertex while an attribute is per vertex.

Shader creation Opengl

I am using OpenGl 3 and the tutorial from BennyBox on Youtube.
Using this method:
static void CheckShaderError(GLuint shader, GLuint flag, bool isProgram, const std::string& errorMessage){
GLint success = 0;
GLchar error [1024] = {0};
if(isProgram){
glGetProgramiv(shader, flag, &success);
}else{
glGetShaderiv(shader, flag, &success);
}
if(success == GL_FALSE){
if(isProgram){
glGetProgramInfoLog(shader, sizeof(error), NULL, error);
}else{
glGetShaderInfoLog(shader,sizeof(error), NULL, error);
}
std::cerr<< errorMessage<< ": " << error<< "'"<<std::endl;
}
}
I should be able to load shader file (fragment and vertex shaders). It works with basic shaders but when I try to modify them to that point:
#version 120
attribute vec3 position;
attribute vec2 texCoord;
varying vec2 texCoord;
void main(){
gl_Position = vec4(position, 1.0);
texCoord0 = texCoord;
}
The I get:
Error compiling shader!: 'ERROR: 0:6: 'texCoord' : redefinition
ERROR: 0:11: 'texCoord0' : undeclared identifier
ERROR: 0:11: 'assign' : cannot convert from 'attribute 2-component vector of float' to 'float'
and the fragment shader:
#version 120
uniform sampler2D diffuse;
varying vec2 texCoord0;
void main(){
gl_FragColor = texture2D(diffuse, texCoord0);
}
gives:
Unable to load shader: ./res/basicShader.fs
Error linking shader program: 'Attached vertex shader is not compiled.
I have the exact same code as the video, and it runs fine using basic coloring shader. I am on Visual Studio 2012.
There are two errors in your vertex shader.
First, you are declaring texCoord twice.
attribute vec2 texCoord;
varying vec2 texCoord;
Then you are trying to use a varying called texCoord0, without ever declaring it.
texCoord0 = texCoord;
Your vertex shader should look like this:
#version 120
attribute vec3 position;
attribute vec2 texCoord;
varying vec2 texCoord0;
void main(){
gl_Position = vec4(position, 1.0);
texCoord0 = texCoord;
}