I am clearly misunderstanding something pretty simple here to do with GLSL and all Google results point to the obvious answer that I'm not using the variable I'm trying to find and it has been optimised out - However I am using the variable in question. Consider the following very basic shaders:
Vertex shader
attribute vec2 TexCoord;
varying vec2 TexCoordA;
void main(){
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
TexCoordA = TexCoord;
}
Fragment shader
varying vec2 TexCoordA;
void main(){
gl_FragColor = vec3(TexCoordA.x, TexCoordA.y, 0);
}
They compile and link fine- no errors. However using "glGetAttribLocation" returns -1 when I try and find the location of "TexCoord". If I use TexCoordA for another purpose (such as a call to "texture2D()") then I am able to find the location of TexCoord correctly.
Why does this matter you're probably asking (because why else would you use UV coords for anything other than a texture call)? I am trying to render one pixel into a frame buffer for all the UV coordinates and then read them back again on a second pass - this is the only way I can guarantee the results I'm looking for.
TL;DR
Why does "glGetAttribLocation" return -1 for the above shaders given they compile and link without a problem?
Requested information about code surrounding the problem area as follows (I am loading about 20-25 other shaders the same way so I'm confident the problem isn't here):
Problem lines:
mPassOneProgram = LoadShader("PCT_UV_CORRECTION_PASS_1.vert", "PCT_UV_CORRECTION_PASS_1.frag");
mPassOneUVLocation = glGetAttribLocation(mPassOneProgram, "TexCoord");
Shader loader code:
GLuint LoadShader(const char *vertex_path, const char *fragment_path) {
GLuint vertShader = glCreateShader(GL_VERTEX_SHADER);
GLuint fragShader = glCreateShader(GL_FRAGMENT_SHADER);
// Read shaders
std::string vertShaderStr = readFile(vertex_path);
std::string fragShaderStr = readFile(fragment_path);
const char *vertShaderSrc = vertShaderStr.c_str();
const char *fragShaderSrc = fragShaderStr.c_str();
GLint result = GL_FALSE;
int logLength;
// Compile vertex shader
std::cout << "Compiling vertex shader." << std::endl;
glShaderSource(vertShader, 1, &vertShaderSrc, NULL);
glCompileShader(vertShader);
// Check vertex shader
glGetShaderiv(vertShader, GL_COMPILE_STATUS, &result);
glGetShaderiv(vertShader, GL_INFO_LOG_LENGTH, &logLength);
std::vector<char> vertShaderError(logLength);
glGetShaderInfoLog(vertShader, logLength, NULL, &vertShaderError[0]);
std::cout << &vertShaderError[0] << std::endl;
OutputDebugString(&vertShaderError[0]);
// Compile fragment shader
std::cout << "Compiling fragment shader." << std::endl;
glShaderSource(fragShader, 1, &fragShaderSrc, NULL);
glCompileShader(fragShader);
// Check fragment shader
glGetShaderiv(fragShader, GL_COMPILE_STATUS, &result);
glGetShaderiv(fragShader, GL_INFO_LOG_LENGTH, &logLength);
std::vector<char> fragShaderError(logLength);
glGetShaderInfoLog(fragShader, logLength, NULL, &fragShaderError[0]);
std::cout << &fragShaderError[0] << std::endl;
OutputDebugString(&vertShaderError[0]);
std::cout << "Linking program" << std::endl;
GLuint program = glCreateProgram();
glAttachShader(program, vertShader);
glAttachShader(program, fragShader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &result);
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
std::vector<char> programError( (logLength > 1) ? logLength : 1 );
glGetProgramInfoLog(program, logLength, NULL, &programError[0]);
std::cout << &programError[0] << std::endl;
OutputDebugString(&vertShaderError[0]);
glDeleteShader(vertShader);
glDeleteShader(fragShader);
return program;
}
Managed to solve this by doing
gl_FrontColor = vec3(TexCoord.x, TexCoord.y, 0)
in the Vertex shader and
gl_FragColor = gl_Color;
in the Fragment shader.
Which is essentially the same thing and I still don't understand why it wasn't working before. I'm gonna put this one down to a bug in the compiler as nobody else seems to be able to find a problem.
glGetShaderiv(vertShader, GL_COMPILE_STATUS, &result);
...
glGetShaderiv(fragShader, GL_COMPILE_STATUS, &result);
...
glGetProgramiv(program, GL_LINK_STATUS, &result);
Each of these should be followed by a check to ensure that result is equal to GL_TRUE, otherwise the shader hasn't properly compiled. See here for a complete shader / program set of classes.
Related
I am trying to write a function for loading and compiling a shader using OpenGl, but the shader refuses to compile while giving me an empty (or random) error log.
The following is my minimal example:
vertexShader = glCreateShader( GL_VERTEX_SHADER );
std::string vertexSource = ShaderLoader::load("vertexShader.vert");
const char * vertexAdress = vertexSource.c_str();
glShaderSource( vertexShader, 1, &vertexAdress, NULL );
int shaderCompiled = GL_FALSE;
char errorLog[512];
glCompileShader(vertexShader);
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &shaderCompiled );
glGetShaderInfoLog(vertexShader, 512, nullptr, &errorLog[0]);
glDeleteShader(vertexShader);
Outputting shaderCompiled gives 0 and errorLog gives either an empty string or a few random characters.
The function ShaderLoader::load contains the following code:
std::string ShaderLoader::load(std::string source) {
std::string shader;
std::fstream readFile(source, std::ios::in);
if (readFile.is_open()){
std::stringstream buffer;
buffer << readFile.rdbuf();
shader = buffer.str() + "\0";
} else {
throw std::runtime_error( "Couldn't load shader file: " + source);
}
return shader;
}
It seems to be working as expected. The shader source code is
#version 330 core
layout (location = 0) in vec3 inVertex;
layout (location = 1) in vec3 inNormal;
out vec3 FragmentPosition;
out vec3 Normal;
uniform mat4 transform;
void main()
{
FragmentPosition = vec3(inVertex);
Normal = mat3(transpose(inverse(transform))) * inNormal;
gl_Position = transform * vec4( inVertex, 1 );
}
How can I populate errorLog with an actual error message, or even better, get the shader to compile.
First only use error logging if shaderCompiled is false so you don't get random strings,
glGetShaderiv(VS, GL_COMPILE_STATUS, &shaderCompiled);
if (!shaderCompiled)
{
//error
GLchar InfoLog[256];
glGetShaderInfoLog(vertexShader, sizeof(InfoLog), NULL, InfoLog);
Second, are you actually linking and using the shaders, the problem might not be with the compilation:
// after compilation
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glLinkProgram(shaderProgram);
glDetachShader(shaderProgram, vertexShader);
glDeleteShader(vertexShader);
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &success);
I'm writing an openGL program using the wxWidgets library, I have it mostly working, but I am getting shader compilation errors due to bad characters being inserted (I think), only I can't find where the characters are or what is causing them. The error is :
error 0(1) : error C0000: syntax error, unexpected $undefined at token "<undefined>"
I'm not sure why though, since I can't see any errors when I cout the string. Here is what is being fed to glshadersource() :
#version 430 core
layout(location =0) in vec3 vpos;
out vec3 fragmentColor;
uniform mat4 MVP;
void main(void)
{
gl_Position = MVP * vec4(vpos,1);
};
Here are the shader compiler functions I use:
void GL_set::compileAndLink()
{
GLint Result = GL_FALSE;
int InfoLogLength = 0;
// Create vertex shader, attach source code, compile
vertexShader = glCreateShader(GL_VERTEX_SHADER);
const GLchar* adapter[1];
adapter[0] = readShaderCode("Vshader.glsl").c_str();
glShaderSource(vertexShader, 1, adapter, NULL);
glCompileShader(vertexShader);
// Check vertex shader for errors
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &Result);
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &InfoLogLength);
if (InfoLogLength > 0)
{
char vertexError[1000];
glGetShaderInfoLog(vertexShader, InfoLogLength, NULL, &vertexError[0]);
std::cout << &vertexError[0];
}
// Create fragment shader, attach source code, compile
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
adapter[0] = readShaderCode("Fshader.glsl").c_str();
glShaderSource(fragmentShader, 1, adapter, NULL);
glCompileShader(fragmentShader);
// Check fragment shader for errors
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &Result);
glGetShaderiv(fragmentShader, GL_INFO_LOG_LENGTH, &InfoLogLength);
if (InfoLogLength > 0)
{
char fragmentError[1000];
glGetShaderInfoLog(fragmentShader, InfoLogLength, NULL,
&fragmentError[0]);
std::cout << &fragmentError[0];
}
// Create program and attach shaders
program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
//check program for errors
glGetProgramiv(program, GL_LINK_STATUS, &Result);
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &InfoLogLength);
if (InfoLogLength > 0)
{
char programError[1000];
glGetProgramInfoLog(program, InfoLogLength, NULL, &programError[0]);
std::cout << &programError[0];
}
}
std::string GL_set::readShaderCode(const char* fileName)
{
printf("passing %s to readShaderCode() \n", fileName);
std::ifstream input(fileName);
if(!input.good())
{
std::cout << "failed to load file " << fileName;
exit(1);
}
std::string code = std::string(
std::istreambuf_iterator<char>(input),
std::istreambuf_iterator<char>()
);
// output the code to cout for error checking
std::cout << "Read shader input: \n" << code << "\n" << std::endl;
return code;
}
Here is the full console output:
normal display attribs are supported
calling gl_init()
gl_set construtor called
setting buffers
passing Vshader.glsl to readShaderCode()
Read shader input:
#version 430 core
layout(location =0) in vec3 vpos;
out vec3 fragmentColor;
uniform mat4 MVP;
void main(void)
{
//output position of the vertex in clip space MVP*position
gl_Position = MVP * vec4(vpos,1);
};
0(1) : error C0000: syntax error, unexpected $undefined at token "<undefined>"
passing Fshader.glsl to readShaderCode()
Read shader input:
#version 430 core
in vec3 fragmentColor;
out vec3 color;
void main()
{
color = fragmentColor;
};
0(1) : error C0000: syntax error, unexpected $undefined at token "<undefined>"
Vertex info
-----------
0(1) : error C0000: syntax error, unexpected $undefined at token "
<undefined>"
(0) : error C2003: incompatible options for link
Fragment info
-------------
0(1) : error C0000: syntax error, unexpected $undefined at token "<undefined>"
(0) : error C2003: incompatible options for link
Loading OBJ file mod.obj...
Here is the rest of the class:
#include "GL_set.h"
GL_set::GL_set()
{
std::cout << "gl_set construtor called" << std::endl;
GL_set::setBuffers();
}
void GL_set::setBuffers()
{
std::cout << "setting buffers" << std::endl;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
//create shaders and attach them to the program object
GL_set::compileAndLink();
loadOBJ("mod.obj", vdata); // use OBJ loader
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, vdata.size()*sizeof(glm::vec3), &vdata[0], GL_STATIC_DRAW);
//vertex buffer
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, //index
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
0 //array buffer offset
);
}
void GL_set::draw()
{
glBindVertexArray(vao);
GLuint matrixID = glGetUniformLocation(program, "MVP");
////////////////////////////matrix operations/////////////////////////////////////////
//projection matrix 45 degree FoV, 4:3 ratio, display range 0.1 - 100
glm::mat4 projection = glm::perspective(45.0f, 4.0f/3.0f, 0.1f, 100.0f);
//camera matrix
glm::mat4 view = glm::lookAt(
glm::vec3(8, 8, 8), //camera posiiton
glm::vec3(0, 1, 0), //camera looks at this point
glm::vec3(0, 1, 0) //head up position
);
//model matrix identity matrix
glm::mat4 model = glm::mat4(1.0f);
//rotate
model = glm::rotate(model, 1.0f, glm::vec3(1,1,1));
//model-view-projection
glm::mat4 MVP = projection * view * model;
/////////////////////////////////////////////////////////
glUniformMatrix4fv(matrixID, 1, GL_FALSE, &MVP[0][0]);
glDrawArrays(GL_TRIANGLES, 0, vdata.size()*sizeof(glm::vec3));
}
I think the wxWidgets related code I have is okay, the window interface loads with a blank white screen, presumably caused by the shader errors. I can post more if needed, thanks.
The std::string returned by readShaderCode only lives for the duration of the .c_str() call. After that, the std::string implementation is allowed to free the memory, leaving your adapter[0] point to memory that has just been freed (a use-after-free).
You should assign the result of readShaderCode to a local std::string variable such that the memory is only freed at the end of the function. You can then safely store the result of .c_str() into adapter, knowing that the memory has not been freed yet.
I recently started working on Vertex and Fragment shaders in GLEW. For some reason I am getting a failed to compile shader error.
The error I am getting is as follow:
Failed to compile vertex shader: Vertex shader failed to compile with the following errors:
ERROR: 0:3 error (#12) Unexpected qualifier.
ERROR: 0:3 error(#132) Syntax error: "position" parse error
ERROR: error(#273) 2 compilation errors. No code generated.
The text document that contain the code for the shader is:
#version 330 core
layout (location = 0) in Vector4 position;
uniform Matrix pr_matrix;
uniform Matrix vw_matrix = Matrix(1, 0);
uniform Matrix ml_matrix = Matrix(1, 0);
void main()
{
gl_Position = /*pr_matrix **/ position;
}
And the code that compile the shader is:
GLuint Shader::load() {
GLuint program = glCreateProgram();
GLuint vertex = glCreateShader(GL_VERTEX_SHADER);
GLuint fragment = glCreateShader(GL_FRAGMENT_SHADER);
std::string vertexSourceString = File::read_file(mVertex);
std::string fragmentSourceString = File::read_file(mFragment);
const char* vertexSource = vertexSourceString.c_str();
const char* fragmentSource = fragmentSourceString.c_str();
glShaderSource(vertex, 1, &vertexSource, NULL);
glCompileShader(vertex);
GLint result;
glGetShaderiv(vertex, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
GLint length;
glGetShaderiv(vertex, GL_INFO_LOG_LENGTH, &length);
std::vector<char> error(length);
glGetShaderInfoLog(vertex, length, &length, &error[0]);
std::cout << "Failed to compile vertex shader: " << &error[0] << std::endl;
glDeleteShader(vertex);
return 0;
}
glShaderSource(fragment, 1, &fragmentSource, NULL);
glCompileShader(fragment);
glGetShaderiv(fragment, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
GLint length;
glGetShaderiv(fragment, GL_INFO_LOG_LENGTH, &length);
std::vector<char> error(length);
glGetShaderInfoLog(fragment, length, &length, &error[0]);
std::cout << "Failed to compile fragment shader: " << &error[0] << std::endl;
glDeleteShader(fragment);
return 0;
}
glAttachShader(program, vertex);
glAttachShader(program, fragment);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vertex);
glDeleteShader(fragment);
return program;
}
I think something is wrong with the vertex, if anyone can help I will appreciate it. Thanks.
The strings Vertex4 and Matrix have no meaning for GLSL. Types in GLSL are things like vec4 and mat4
After following a set of OpenGL tutorials which were great but didn't let me understand the basics, I'm trying some very basic OpenGL coding with C++.
My program is supposed to read a vertex and fragment shader and draw a triangle.
I get an error when linking the shaders (I suspect the error can be tracked down to the compiling of the shader though). I know my shaders are read by my program, but any changes to them doesn't affect my error. By running:
glGetProgramiv(ShaderProgram, GL_LINK_STATUS, &Success);
I receive the error: "Link called without any attached shader object". The programs builds, and my triangle shows, but is not affected by the shaders.
UPDATE
I no longer get the above error after fixing a mistake. I now get a complain after glCompileShader():
"Error: 0:3 'location' : syntax error parse error"
So I imagine it has to do with my shader files (will add them below). The shader files are taken from a tutorial, so I assumed they would work.
Shader files:
Vertex shader:
#version 330
layout (location = 0) in vec3 Position;
void main()
{
gl_Position = vec4(0.5*Position.x, 0.5*Position.y, Position.z, 1.0);
}
Fragment shader:
#version 330
out vec4 FragColor;
void main()
{
FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
In my main function i run:
compileShader();
Attach shaders:
static void AddShader(GLuint ShaderProgram, GLenum ShaderType, std::string filePath){
//create shader object
GLuint ShaderObj = glCreateShader(ShaderType);
//error if no shader
if (ShaderObj == 0){
fprintf(stderr, "Error creating shader type %d\n", ShaderType);
exit(0);
}
//"specify source code"
//readShaderFile returns the shader file as a string
std::string shaderFile = readShaderFile(filePath);
const char* shaderFilePointer = shaderFile.c_str();
GLint ShaderFileLength[1];
ShaderFileLength[0] = strlen(shaderFilePointer);
glShaderSource(ShaderObj, 1, &shaderFilePointer, ShaderFileLength);
//compile the shader
glCompileShader(ShaderObj);
//check if compile successful
GLint success;
glGetShaderiv(ShaderObj, GL_COMPILE_STATUS, &success);
if (!success){
GLchar InfoLog[1024];
glGetShaderInfoLog(ShaderObj, sizeof(InfoLog), NULL, InfoLog);
fprintf(stderr, "Error compiling shader type %d: '%s'\n", ShaderType, InfoLog);
exit(1);
}
glAttachShader(ShaderProgram, ShaderObj);
}
And here are the functions used:
static void compileShaders(){
//create program
GLuint ShaderProgram = glCreateProgram();
//check error
if (ShaderProgram == 0){
fprintf(stderr, "Error creating shader program!\n");
exit(1);
}
//attach compiled shaders
std::string vertexShaderFilePath = "Shaders/colShading.vert";
std::string fragShaderFilePath = "Shaders/colShading.frag";
AddShader(ShaderProgram, GL_VERTEX_SHADER, vertexShaderFilePath);
AddShader(ShaderProgram, GL_FRAGMENT_SHADER, fragShaderFilePath);
GLint Success = 0;
GLchar ErrorLog[1024] = { 0 };
//link shader to program
glLinkProgram(ShaderProgram);
//check link error
glGetProgramiv(ShaderProgram, GL_LINK_STATUS, &Success);
if (Success == 0) {
glGetProgramInfoLog(ShaderProgram, sizeof(ErrorLog), NULL, ErrorLog);
fprintf(stderr, "Error linking shader program: '%s'\n", ErrorLog);
}
//use the linked shader program
glUseProgram(ShaderProgram);
}
I doubt more of the code is needed to find the problem here, but just let me know. Thanks a lot in advance!
SOLUTION
The first answer below did the trick. I removed:
layout (location = 0)
from the vertex shader and then added:
glBindAttribLocation(ShaderProgram, 0, "Position");
before the linking.
It seems that either your hardware or your driver doesn't support the explicit shader location syntax, which require OpenGL/GLSL > 3.3.
To solve this, if samgak's answer doesn't help, you still have two options:
Explicitly set the locations before linking the shader:
This is done with the glBindAttribLocation function, and means basically the same as what you have in your shader. Example:
AddShader(ShaderProgram, GL_VERTEX_SHADER, vertexShaderFilePath);
AddShader(ShaderProgram, GL_FRAGMENT_SHADER, fragShaderFilePath);
//...
//Define the location of the attributes
glBindAttribute(ShaderProgram, 0, "Position") //Bind "in Position" to location 0
//link shader to program
glLinkProgram(ShaderProgram);
After linking, and while building your VAOs (or drawing the geometry), query the location of the attributes, and set the buffers accordingly:
You do this with the glGetAttribLocation function.
GLuint positionLoc = glGetAttribLocation(ShaderProgram, "Position");// Ask for the location of the attribute "Position
//Create buffers and transfer data...
glBindBuffer(gl.ARRAY_BUFFER, ...);
glBufferData(...);
//Turns on the vertex attribute for "Position"
glEnableVertexAttribArray(positionLoc);
//Set the pointer between buffer and attribute
glVertexAttribPointer(positionLoc,...);
For performance reasons, the first option is recommended, as it doesn't force a flush.
I'm trying to set up a simple vertex shader. When I compile it, it fails (according to GL_COMPILE_STATUS), but the info log is empty, leaving me nothing to work with from a debugging standpoint.
Here is my code:
GLuint vertexShader, fragmentShader;
char *code =
"void main(void)\
{\
vec4 a = gl_Vertex;\
a.x = a.x * 0.5;\
a.y = a.y * 0.5;\
gl_Position = gl_ModelViewProjectionMatrix * a;\
}";
int length = strlen(code);
vertexShader = glCreateShader(GL_VERTEX_SHADER);
//fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vertexShader, 1, &code, &length);
glCompileShader(GL_VERTEX_SHADER);
GLint compiled;
GLint logLength;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &compiled);
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH , &logLength);
if (logLength > 1)
{
GLchar* compiler_log = (GLchar*)malloc(logLength);
glGetShaderInfoLog(vertexShader, logLength, 0, compiler_log);
printf("%s\n", compiler_log);
free (compiler_log);
}
I'm aware that this is pretty messy, and since I'm new to shaders I'm sure there's something wrong with my shader code. However, the thing that is bugging the hell out of me is the lack of debugging information. The code above results in compiled being GL_FALSE (meaning the code failed to compile) and logLength being 0 (meaning the info log is empty). What gives? Isn't that the whole point of the info log?
glCompileShader take a shader number returned from glCreateShader not a shader type.
In this case
glCompileShader(GL_VERTEX_SHADER);
should be
glCompileShader(vertexShader);