I am trying to add shaders with GLFW/GLEW.
I am getting an error that the shaders are loaded but they don't have valid object code.
This is the code I have for loading the shaders:
class SHADER {
public:
void LoadShaders(const char *vertexFile, const char *fragmentFile);
char *vertexShader;
char *fragmentShader;
private:
int Load(const char *filename, char*&shaderSource);
fstream file;
};
int SHADER::Load(const char *filename, char *&shaderSource) {
file.open(filename, ios::in);
if(file.is_open()) {
file.tellg();
file.seekg(0,ios::end);
unsigned long len = file.tellg();
file.seekg(ios::beg);
if(len == 0 ) {
return -2;
} else {
shaderSource = new char[len + 1];
file.read(shaderSource,len);
file.close();
printf("%s\n",shaderSource);
}
} else {
return -1;
}
return 0;
}
void SHADER::LoadShaders(const char *vertexFile, const char *fragmentFile) {
int resultVertex = this->Load(vertexFile, vertexShader);
int resultFragment = this->Load(fragmentFile, fragmentShader);
if(resultVertex ==0 && resultFragment ==0) {
printf("Shaders loaded succesfully.\n");
}
if(resultVertex == -2) {
printf("VertexShader is empty!\n");
}
if(resultFragment == -2) {
printf("FragmentShader is empty!\n");
}
if(resultVertex == -1) {
printf("Unable to load VertexShader!\n");
}
if(resultFragment == -1) {
printf("Unable to load FragmentShader!\n");
}
}
This is the code for initializing the shaders:
SHADER Shaders;
GLhandleARB vertexShader, fragmentShader, shaderProgram;
Shaders.LoadShaders("vertexShader.vert","fragmentShader.frag");
const char* vertTemp = Shaders.vertexShader;
vertexShader = glCreateShaderObjectARB(GL_VERTEX_SHADER_ARB);
glShaderSourceARB(vertexShader, 1, &vertTemp, NULL);
glCompileShaderARB(vertexShader);
traceShaderInfoLog(vertexShader);
const char* fragTemp = Shaders.fragmentShader;
fragmentShader = glCreateShaderObjectARB(GL_FRAGMENT_SHADER_ARB);
glShaderSourceARB(fragmentShader, 1, &fragTemp, NULL);
glCompileShaderARB(fragmentShader);
traceShaderInfoLog(fragmentShader);
delete[] Shaders.vertexShader;
delete[] Shaders.fragmentShader;
shaderProgram = glCreateProgramObjectARB();
glAttachObjectARB(shaderProgram,vertexShader);
glAttachObjectARB(shaderProgram,fragmentShader);
glLinkProgramARB(shaderProgram);
traceProgramInfoLog(shaderProgram);
glUseProgramObjectARB(shaderProgram);
This is vertexShader.vert and fragmentShader.frag:
void main(){
gl_Position = ftransform();
}
void main(){
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
And this is the error I get when I print the log, I get this really weird symbol at the end of the scripts which is every time I compile again another symbol:
void main(){
//gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_Position = ftransform();
}┘
void main(){
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}♣
Shaders loaded succesfully.
Vertex shader was successfully compiled to run on hardware.
Fragment shader failed to compile with the following errors:
ERROR: 0:3: error(#132) Syntax error: '<' parse error
ERROR: error(#273) 1 compilation errors. No code generated
Fragment shader(s) were not successfully compiled before glLinkProgram() was called.
Link failed.
Your Load function is most likely the problem: the signature should read
int SHADER::Load(const char *filename, char *&shaderSource) // Note the &
for a quick fix. You're allocating the memory in that function and reassigning to the pointer - you only load the source to that address - but that never leaves the function.
The compiler most likely gets an empty string due to memory being zero initialized (are you running this in debug?) and doesn't fail because that's sometimes okay. But the linker notices that there's not really any code to link!
When it fails to link, retrieve the link error log, which may have additional clues.
See glGetProgramInfoLog.
Related
I have a certain OpenGL application which I compiled in the past but now can't in the same machine. The problem seems to be in the fragment shader not compiling properly.
I'm using:
Glew 2.1.0
Glfw 3.2.1
Also all necessary context is being created on the beginning of the program. Here's how my program creation function looks like:
std::string vSource, fSource;
try
{
vSource = getSource(vertexShader, "vert");
fSource = getSource(fragmentShader, "frag");
}
catch (std::runtime_error& e)
{
std::cout << e.what() << std::endl;
}
GLuint vsID, fsID;
try
{
vsID = compileShader(vSource.c_str(), GL_VERTEX_SHADER); //Source char* was checked and looking good
fsID = compileShader(fSource.c_str(), GL_FRAGMENT_SHADER);
}
catch (std::runtime_error& e)
{
std::cout << e.what() << std::endl; //incorrect glsl version 450 thrown here
}
GLuint programID;
try
{
programID = createProgram(vsID, fsID); //Debugging fails here
}
catch (std::runtime_error& e)
{
std::cout << e.what() << std::endl;
}
glDeleteShader(vsID);
glDeleteShader(fsID);
return programID;
My main:
/* ---------------------------- */
/* OPENGL CONTEXT SET WITH GLEW */
/* ---------------------------- */
static bool contextFlag = initializer::createContext(vmath::uvec2(1280, 720), "mWs", window);
std::thread* checkerThread = new std::thread(initializer::checkContext, contextFlag);
/* --------------------------------- */
/* STATIC STATE SINGLETON DEFINITION */
/* --------------------------------- */
Playing Playing::playingState; //Failing comes from here which tries to create a program
/* ---- */
/* MAIN */
/* ---- */
int main(int argc, char** argv)
{
checkerThread->join();
delete checkerThread;
Application* app = new Application();
...
return 0;
}
Here is the looking of an example of the fragmentShader file:
#version 450 core
out vec4 fColor;
void main()
{
fColor = vec4(0.5, 0.4, 0.8, 1.0);
}
And this is what I catch as errors:
[Engine] Glew initialized! Using version: 2.1.0
[CheckerThread] Glew state flagged as correct! Proceeding to mainthread!
Error compiling shader: ERROR: 0:1: '' : incorrect GLSL version: 450
ERROR: 0:7: 'fColor' : undeclared identifier
ERROR: 0:7: 'assign' : cannot convert from 'const 4-component vector of float' to 'float'
My specs are the following:
Intel HD 4000
Nvidia GeForce 840M
I shall state that I compiled shaders in this same machine before. I can't do it anymore after a disk format. However, every driver is updated.
As stated in comments the problem seemed to be with a faulty option of running the IDE with selected graphics card. As windows defaults the integrated Intel HD 4000 card, switching the NVIDIA card to the default preferred one by the OS fixed the problem.
I'm having an issue with compiling GLSL code. When I try to print whether my shader was compiled correctly by using glGetShaderiv(), my program sometimes prints out the wrong result. For example, with this shader (test.vert):
#version 410
void main()
{
}
and using the following code:
#include <GL\glew.h>
#include <GLFW\glfw3.h>
#include <iostream>
#include <fstream>
#include <string>
int main() {
glfwInit();
GLFWwindow* window = glfwCreateWindow(200, 200, "OpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewInit();
std::string fileText = "";
std::string textBuffer = "";
std::ifstream fileStream{ "test.vert" };
while (fileStream.good()) {
getline(fileStream, textBuffer);
fileText += textBuffer;
}
GLuint vertShaderID = glCreateShader(GL_VERTEX_SHADER);
const char* vertShaderText = fileText.c_str();
glShaderSource(vertShaderID, 1, &vertShaderText, NULL);
glCompileShader(vertShaderID);
GLint vertCompiled;
glGetShaderiv(vertShaderID, GL_COMPILE_STATUS, &vertCompiled);
if (vertCompiled != GL_TRUE) {
std::cerr << "vert shader did not compile." << std::endl;
}
glfwTerminate();
system("PAUSE");
return 0;
}
the program outputs that the shader did not compile, although I believe that it should have. I have tested many other shader programs, for example by putting a random 'a' or another letter in the middle of a word in the shader code, and I'm still getting incorrect outputs (this test had no error output).
I have also tried printing out the value of 'fileText' and it was correct (the same as in test.vert). What am I doing wrong?
I'm using a 64-bit Windows system, the supported OpenGL version is 4.40.
getline clips off the \n. That means that your entire file will not have any line breaks. It's all on one line, and therefore looks like this:
#version 410 void main() { }
That's not legal GLSL.
Please stop reading files line-by-line. If you want to read an entire file, then read the entire file.
I'm trying to compile simple shader on my linux machine with Radeon HD 5470 video card and fglrx AMD driver.
My vertex shader code
#version 330 core
layout(location = 0) in vec3 vertexPosition_modelspace;
void main()
{
gl_Position.xyz = vertexPosition_modelspace;
gl_Position.w = 1.0;
}
Read code from file
void Shader::load_from_file(const std::string& file)
{
std::ifstream is(file, std::ios_base::in);
if (is.is_open()) {
std::string line{""};
while(std::getline(is, line)) {
// program_code_ is a std::string member
program_code_ += "\n" + line;
}
is.close();
} else {
throw Exception("Could not open shader source code file");
}
}
Try to compile
void Shader::build_shader()
{
const GLchar* tmp = program_code_.c_str();
const GLint tmplen = program_code_.length();
std::cout << "Shader code: " << tmp << std::endl;
glShaderSource(shader_handler_, 1, &tmp, &tmplen);
CHECK_ERR();
glCompileShader(shader_handler_);
CHECK_ERR();
//...
}
And have error from glGetShaderInfoLog
Exception caught: Vertex shader failed to compile with the following errors:
ERROR: 0:1: error(#132) Syntax error: "<" parse error
ERROR: error(#273) 1 compilation errors. No code generated
But before I calling glShaderSource, I print to stdout value of tmp pointer and it seems to valid shader code:
Shader code:
#version 330 core
layout(location = 0) in vec3 vertexPosition_modelspace;
void main()
{
gl_Position.xyz = vertexPosition_modelspace;
gl_Position.w = 1.0;
}
My code doesn't read garbage from memory, but I can't understand what's wrong.
Also
% glxinfo | grep vertex_program
% GL_ARB_vertex_program
Reading the file line by line, and concatenating these lines, seems to be the problem.
I don't know how this introduces an error which matches the error message you got from the shader compiler, but as suggested in the comments, reading the whole file at once solves the problem.
The following lines reads from a file stream is by utilizing the function rdbuf and a stringstream (you need to #include <sstream>):
std::ostringstream contents;
contents << is.rdbuf();
program_code_ = contents.str();
For more information about this method, and a comparison to other methods, see http://insanecoding.blogspot.de/2011/11/how-to-read-in-file-in-c.html.
It seems I can't have my GLSL shaders compiled. Once in a while (mainly after editing a file), I get following error while compiling:
----- SRC ----- (150 B)
#version 330 core
uniform mat4 mvpMatrix;
in vec4 vertexPosition_modelspace;
void main() {
gl_Position = mvpMatrix * vertexPosition_modelspace;
}
gp!
----- END -----
SimpleTransform.vertexshader:Vertex shader failed to compile with the following errors:
ERROR: 0:10: error(#132) Syntax error: 'gp' parse error
ERROR: error(#273) 1 compilation errors. No code generated
It's quite strange since I swear the file doesn't contain that awkward gp! part. Nevertheless I investigated it with cat
#version 330 core
uniform mat4 mvpMatrix;
in vec4 vertexPosition_modelspace;
void main() {
gl_Position = mvpMatrix * vertexPosition_modelspace;
}
and less
#version 330 core
uniform mat4 mvpMatrix;
in vec4 vertexPosition_modelspace;
void main() {
gl_Position = mvpMatrix * vertexPosition_modelspace;
}
and both of them proved me right.
I wonder what's causing this strange behaviour.
Here's link to my project. You should be able to easily compile it by entering src directory and typing make (Linux only). It requires GLFW, GLEW, GLM and GL3.
And the code itself:
Loading shader files
GLuint shader_load(GLenum type, const char filename[]) {
if ((type != GL_VERTEX_SHADER && type != GL_FRAGMENT_SHADER) || !filename) return 0;
/* wczytywanie pliku shadera */
FILE *file = fopen(filename, "rb");
//okreslenie rozmiaru pliku
fseek(file, 0, SEEK_END);
uint32 iFileSize = ftell(file);
fseek(file, 0, SEEK_SET);
//wczytywanie
char *tmp = new char[iFileSize];
memset(tmp, 0, sizeof(tmp));
uint32 iBytes = (uint32) fread(tmp, sizeof(char), iFileSize, file);
fclose(file);
if (iBytes != iFileSize) printf("Warning: reading error possible!\n");
#ifdef _DEBUG_
printf("----- SRC ----- (%d B)\n%s\n----- END -----\n", iBytes, tmp);
#endif
/* przygotowanie shadera */
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, const_cast<const GLchar**>(&tmp), NULL);
delete[] tmp;
glCompileShader(shader); //kompilacja shadera
/* sprawdzenie statusu kompilacji */
int status = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
int logsize = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logsize);
char *log = new char[logsize];
glGetShaderInfoLog(shader, logsize, NULL, log);
printf("%s:%s", filename, log);
delete[] log;
if (status != GL_TRUE) return 0;
return shader;
}
FIRST OFF Switch to C++ instead of C-with-a-cpp extension to avoid shipwrecks like this.
Analysis:
Running under valgrind shows
==15579== Invalid read of size 1
==15579== at 0x5B95C65: vfprintf (vfprintf.c:1623)
==15579== by 0x5B9E768: printf (printf.c:35)
==15579== by 0x4019C1: shader_load(unsigned int, char const*) (shaders.cpp:88)
==15579== by 0x401B30: program_create(char const*, char const*) (shaders.cpp:120)
==15579== by 0x401D65: main (in /tmp/ogl-jg-3/test)
==15579== Address 0xb3018a6 is 0 bytes after a block of size 150 alloc'd
==15579== at 0x4C2864B: operator new[](unsigned long) (vg_replace_malloc.c:305)
==15579== by 0x401961: shader_load(unsigned int, char const*) (shaders.cpp:81)
==15579== by 0x401B30: program_create(char const*, char const*) (shaders.cpp:120)
==15579== by 0x401D65: main (in /tmp/ogl-jg-3/test)
It tells you exactly that it tries to read beyond the end of the buffer tmp which is allocated in line 81. It seems you are somehow assuming it is null-terminated. Which it isn't. Add that:
//wczytywanie
char *tmp = new char[iFileSize+1];
memset(tmp, 0, (iFileSize+1)*sizeof(char));
uint32 iBytes = (uint32) fread(tmp, sizeof(char), iFileSize, file);
fclose(file);
if (iBytes != iFileSize) printf("Warning: reading error possible!\n");
#ifdef _DEBUG_
printf("----- SRC ----- (%d B)\n%s\n----- END -----\n", iBytes, tmp);
#endif
And I get semi-decent output. The GL window stays blank, though
Update
To make it clearer what I meant by switch to C++ here's the idea:
GLuint shader_load(GLenum type, const char filename[]) {
if ((type != GL_VERTEX_SHADER && type != GL_FRAGMENT_SHADER) || !filename) return 0;
GLuint shader = glCreateShader(type);
std::string src;
{
/* wczytywanie pliku shadera */
std::ifstream ifs(filename, std::ios::binary);
if (!std::getline(ifs, src, '\0'))
std::cerr << "Warning: reading error possible!\n";
}
#ifdef _DEBUG_
std::cout << "----- SRC ----- " << src.size() << " B \n" << src << "\n----- END -----\n";
#endif
/* przygotowanie shadera */
const GLchar* sources[] = { src.c_str() };
glShaderSource(shader, 1, sources, NULL);
glCompileShader(shader); //kompilacja shadera
I'm having a problem with my shader loading code. The bizarre thing that's confusing me is that it works maybe once in 5 times, but then only sort of works. For instance, it'll load the frag shader, but then texturing won't work properly (it'll draw a strange semblance of the texture over the geometry instead). I think the problem is with the loading code, so that's what my question is about. Can anyone spot an error I haven't found in the code below?
char* vs, * fs;
vertexShaderHandle = glCreateShader(GL_VERTEX_SHADER);
fragmentShaderHandle = glCreateShader(GL_FRAGMENT_SHADER);
long sizeOfVShaderFile = getSizeOfFile(VERTEX_SHADER_FILE_NAME);
long sizeOfFShaderFile = getSizeOfFile(FRAGMENT_SHADER_FILE_NAME);
if(sizeOfVShaderFile == -1)
{
cerr << VERTEX_SHADER_FILE_NAME<<" is null! Exiting..." << endl;
return;
}
if(sizeOfFShaderFile == -1)
{
cerr << FRAGMENT_SHADER_FILE_NAME<<" is null! Exiting..." << endl;
return;
}
vs = readFile(VERTEX_SHADER_FILE_NAME);
fs = readFile(FRAGMENT_SHADER_FILE_NAME);
const char* vv = vs, *ff = fs;
glShaderSource(vertexShaderHandle , 1, &vv, NULL);
cout << "DEBUGGING SHADERS" << endl;
cout << "VERTEX SHADER: ";
printShaderInfoLog(vertexShaderHandle);
cout << endl;
glShaderSource(fragmentShaderHandle, 1, &ff, NULL);
cout << "FRAGMENT SHADER: ";
printShaderInfoLog(fragmentShaderHandle);
cout << endl;
glCompileShader(vertexShaderHandle);
cout << "VERTEX SHADER: ";
printShaderInfoLog(vertexShaderHandle);
cout << endl;
glCompileShader(fragmentShaderHandle);
cout << "FRAGMENT SHADER: ";
printShaderInfoLog(fragmentShaderHandle);
cout << endl;
programHandle = glCreateProgram();
cout << "DEBUGGING PROGRAM" << endl;
glAttachShader(programHandle, vertexShaderHandle);
printProgramInfoLog(programHandle);
glAttachShader(programHandle, fragmentShaderHandle);
printProgramInfoLog(programHandle);
glLinkProgram(programHandle);
printProgramInfoLog(programHandle);
glUseProgram(programHandle);
printProgramInfoLog(programHandle);
delete[] vs; delete[] fs;
Here's the readFile function:
char* readFile(const char* path)
{
unsigned int fileSize = getSizeOfFile(path);
char* file_data = new char[fileSize];
ifstream input_stream;
input_stream.open(path, ios::binary);
input_stream.read(file_data, fileSize);
input_stream.close();
//this is deleted at the end of the shader code
return file_data;
}
All of the below messages are from the exact same executable (no rebuild).
Here's the first possible error message:
BallGLWidget::initializeGL called
DEBUGGING SHADERS
VERTEX SHADER:
FRAGMENT SHADER:
VERTEX SHADER: ERROR: 0:17: '<' : syntax error syntax error
FRAGMENT SHADER:
DEBUGGING PROGRAM
ERROR: One or more attached shaders not successfully compiled
ERROR: One or more attached shaders not successfully compiled
glGetError enum value: GL_NO_ERROR
Another possible error message:
BallGLWidget::initializeGL called
DEBUGGING SHADERS
VERTEX SHADER:
FRAGMENT SHADER:
VERTEX SHADER: ERROR: 0:17: 'tt' : syntax error syntax error
FRAGMENT SHADER: ERROR: 0:33: '?' : syntax error syntax error
DEBUGGING PROGRAM
ERROR: One or more attached shaders not successfully compiled
ERROR: One or more attached shaders not successfully compiled
Here's the output when it works (maybe 1 in 5 or 6 times)
BallGLWidget::initializeGL called
DEBUGGING SHADERS
VERTEX SHADER:
FRAGMENT SHADER:
VERTEX SHADER:
FRAGMENT SHADER:
DEBUGGING PROGRAM
Image format is GL_RGB
Checking textures...
glGetError enum value: GL_NO_ERROR
I seriously doubt its the shaders themselves since they do work sometimes... and the reported errors are garbage.
If any more information would be helpful I'll gladly provide it.
EDIT: Here's the shaders
The vertex shader:
attribute vec2 a_v_position;
attribute vec2 a_tex_position;
varying vec2 tex_coord_output;
void main()
{
tex_coord_output = a_tex_position;
gl_Position = vec4(a_v_position, 0.0, 1.0);
}
The fragment shader:
varying vec2 tex_coord_output;
uniform sampler2D ballsampler;
void main()
{
gl_FragColor = texture2D(ballsampler, tex_coord_output);
}
Your question is a duplicate of Getting garbage chars when reading GLSL files and here's my answer to it:
You're using C++, so I suggest you leverage that. Instead of reading into a self allocated char array I suggest you read into a std::string:
#include <string>
#include <fstream>
std::string loadFileToString(char const * const fname)
{
std::ifstream ifile(fname);
std::string filetext;
while( ifile.good() ) {
std::string line;
std::getline(ifile, line);
filetext.append(line + "\n");
}
return filetext;
}
That automatically takes care of all memory allocation and proper delimiting -- the keyword is RAII: Resource Allocation Is Initialization. Later on you can upload the shader source with something like
void glcppShaderSource(GLuint shader, std::string const &shader_string)
{
GLchar const *shader_source = shader_string.c_str();
GLint const shader_length = shader_string.size();
glShaderSource(shader, 1, &shader_source, &shader_length);
}
void load_shader(GLuint shaderobject, char * const shadersourcefilename)
{
glcppShaderSource(shaderobject, loadFileToString(shadersourcefilename));
}
You are reading the files but as far as I can see you are not zero-terminating the text. Try allocating filesize+1 and set the last char to zero.