GLSL loading core dumped - opengl

I am compiling this project on a linux machine. After fixing all the include and lib errors, I get a core dumped error. but if I comment out the initGLSL line, it runs well. The initGLSL function is given below.
void Viewer :: initGLSL( void )
{
shader.loadVertex( "shaders/vertex.glsl" );
shader.loadFragment( "shaders/fragment.glsl" );
}
vertex.glsl:
varying vec3 position;
varying vec3 normal;
void main()
{
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_FrontColor = gl_Color;
position = gl_Vertex.xyz;
normal = gl_Normal.xyz;
}
I don't know how to debug this program. How can I check if this error is because of the GLSL, or the loader function?
---------------Thanks for your comment------------
The load function:
void Shader::loadVertex( const char* filename )
{
load( GL_VERTEX_SHADER, filename, vertexShader );
}
void Shader::loadFragment( const char* filename )
{
load( GL_FRAGMENT_SHADER, filename, fragmentShader );
}
void Shader::load( GLenum shaderType, const char* filename, GLuint& shader )
// read vertex shader from GLSL source file, compile, and attach to program
{
string source;
if( !readSource( filename, source ))
{
return;
}
if( program == 0 )
{
program = glCreateProgram();
}
if( shader != 0 )
{
glDetachShader( program, shader );
}
shader = glCreateShader( shaderType );
const char* source_c_str = source.c_str();
glShaderSource( shader, 1, &(source_c_str), NULL );
glCompileShader( shader );
GLint compileStatus;
glGetShaderiv( shader, GL_COMPILE_STATUS, &compileStatus );
if( compileStatus == GL_TRUE )
{
glAttachShader( program, shader );
linked = false;
}
else
{
GLsizei maxLength = 0;
glGetShaderiv( shader, GL_INFO_LOG_LENGTH, &maxLength );
if( maxLength > 0 )
{
GLchar* infoLog = new char[ maxLength ];
GLsizei length;
glGetShaderInfoLog( shader, maxLength, &length, infoLog );
cerr << "GLSL Error: " << infoLog << endl;
delete[] infoLog;
}
}
}
and when I tried debugging with gdb, I get the msg:
(gdb) p (filename)
$1 = 0x482e41 "shaders/vertex.glsl"
(gdb) n
77 if( program == 0 )
(gdb) n
79 program = glCreateProgram();
(gdb) n
Program received signal SIGSEGV, Segmentation fault.
0x0000000000000000 in ?? ()

Have you definitely created an OpenGL context before calling shader.load*?
E.g. one of these, if you're using the corresponding library:
glutInit
glutCreateWindow
glfwInit
glfwCreateWindow
SDL_init
SDL_CreateWindow
SDL_GL_CreateContext
I don't have much experience with this, but some GL functions aren't linked at runtime by default - glCreateProgram may still be NULL after creating a context. I use GLEW to do this for me (glewInit()), but there are other ways.

Related

Emscripten fails to compile shader

I'm using emscripten with C++ 17 and WebGL 1.0 to try to draw some basic things in OpenGL. But I can't seem to go past shader compilation. I tried many different types of shaders to eliminate the possibility of shader being the issues.
My assumption is that it is the encoding of the shader text but I can't seem to figure it out. Maybe someone already faced this and can provide a solution.
Compiled with:
-s ENVIRONMENT=web
-s TOTAL_MEMORY=2147483648
-s MODULARIZE=1
-s EXPORT_ES6=1
-s DEMANGLE_SUPPORT=1
-s USE_PTHREADS=0
-s GL_ASSERTIONS=1
-s GL_DEBUG=1
--preload-file ./assets#/
If you're trying to build a minimal program, the file names in assets are standard.vert and standard.frag
Shaders
Vertex shader:
precision mediump float;
attribute vec2 vertPosition;
attribute vec3 vertColor;
varying vec3 fragColor;
void main() {
fragColor = vertColor;
gl_Position = vec4(vertPosition, 0.0, 1.0);
}
Fragment Shader:
precision mediump float;
varying vec3 fragColor;
void main() {
gl_FragColor = vec4(fragColor, 1.0);
}
Loader
readFile
std::string readFile( FILE* file )
{
fseek( file, 0L, SEEK_END );
long buffSize = ftell( file );
rewind( file );
std::string buffer( buffSize, '\0' );
fread( (void*)buffer.c_str(), 1, buffSize, file );
return buffer;
}
GLenum ErrorCheckValue = glGetError();
GLint gl_shader_result = GL_FALSE;
int InfoLogLength;
std::string vertex_src;
std::string frag_src;
FILE* fp = nullptr;
fp = fopen( "/standard.vert", "r" );
if ( !fp )
{
emscripten_console_error( "No file found for vertex shader" );
return;
}
vertex_src = readFile( fp );
emscripten_console_logf( "Vertex Shader:\n%s", vertex_src.c_str() );
fclose( fp );
fp = fopen( "/standard.frag", "r" );
if ( !fp )
{
emscripten_console_error( "No file found for fragment shader" );
return;
}
frag_src = readFile( fp );
emscripten_console_logf( "Fragment Shader:\n%s", frag_src.c_str() );
fclose( fp );
const char* vertexCode = vertex_src.c_str();
const char* fragCode = frag_src.c_str();
u32 vertexShaderId = glCreateShader( GL_VERTEX_SHADER );
glShaderSource( vertexShaderId, 1, (const GLchar* const*)vertexCode, NULL );
glCompileShader( vertexShaderId );
// check for vertex shader errors
glGetShaderiv( vertexShaderId, GL_COMPILE_STATUS, &gl_shader_result );
glGetShaderiv( vertexShaderId, GL_INFO_LOG_LENGTH, &InfoLogLength );
if ( InfoLogLength > 0 )
{
std::string msg( InfoLogLength + 1, '\0' );
glGetShaderInfoLog( vertexShaderId, InfoLogLength, NULL, (GLchar*)msg.c_str() );
emscripten_console_error( ( "WASM:: Vertex shader error: " + msg ).c_str() );
return;
}
u32 fragmentShaderId = glCreateShader( GL_FRAGMENT_SHADER );
glShaderSource( fragmentShaderId, 1, (const GLchar* const*)fragCode, NULL );
glCompileShader( fragmentShaderId );
// check for vertex shader errors
glGetShaderiv( fragmentShaderId, GL_COMPILE_STATUS, &gl_shader_result );
glGetShaderiv( fragmentShaderId, GL_INFO_LOG_LENGTH, &InfoLogLength );
if ( InfoLogLength > 0 )
{
std::string msg( InfoLogLength + 1, '\0' );
glGetShaderInfoLog( fragmentShaderId, InfoLogLength, NULL, (GLchar*)msg.c_str() );
emscripten_console_error( ( "WASM:: Fragment shader error: " + msg ).c_str() );
return;
}
u32 shaderProgramId = glCreateProgram();
glAttachShader( shaderProgramId, vertexShaderId );
glAttachShader( shaderProgramId, fragmentShaderId );
glLinkProgram( shaderProgramId );
// Check the program
glGetProgramiv( shaderProgramId, GL_LINK_STATUS, &gl_shader_result );
glGetProgramiv( shaderProgramId, GL_INFO_LOG_LENGTH, &InfoLogLength );
if ( InfoLogLength > 0 )
{
std::string msg( InfoLogLength + 1, '\0' );
glGetProgramInfoLog( shaderProgramId, InfoLogLength, NULL, (GLchar*)msg.c_str() );
emscripten_console_error( ( "WASM:: Shader compilation error: " + msg ).c_str() );
return;
}
emscripten_console_log( "WASM:: Compiled shaders" );
glDetachShader( shaderProgramId, vertexShaderId );
glDetachShader( shaderProgramId, fragmentShaderId );
glDeleteShader( vertexShaderId );
glDeleteShader( fragmentShaderId );
And ofcourse the error: glCompileShader: ERROR: 1:1: '' : syntax error
Thank you for your patience to read all this. Any comment, suggestion or solution are always better none so thank you.
I am not sure if this is due to WebGL 1.0/2.0 being OpenGL ES2/ES3, but in order for WebGL to correctly read the shader code, it needs to the pointer to the char[] pointer containing the shader code.
The correction to be made for vertex and fragment shaders is:
glShaderSource( vertexShaderId, 1, (const GLchar**)&vertexCode, NULL );
glShaderSource( fragmentShaderId, 1, (const GLchar**)&fragCode, NULL );

Can't render to GtkGLArea

I try to render a triangle to a GtkGLArea but I only see the color with which I cleared the frame with glClearColor().
Please note:
I know that the triangle is so big that it would fill the whole screen, but I also tried smaller ones and it didn't work either.
I also know that I should normally not create the program before each rendering, I only did it here to keep the example short.
I'm fairly certain that the error is neither in LoadShaders nor in the shaders themselves because I've tried the exact same functions with GLFW and they've worked fine their.
Things which might cause the problem:
I'm not flushing the frame currently or swapping framebuffers because the documentation (https://developer.gnome.org/gtk3/stable/GtkGLArea.html) doesn't mention that I have to. I've tried glFlush() but it didn't help either.
I assume that the screen coordinates go from -1 to 1 on all axis like in normal OpenGL. Maybe that's wrong but I couldn't find anything in the documentation there either.
Could somebody help me?
This is how I compile it:
g++ -O3 -s -o main main.cpp -isystem include -Llibs -DNDEBUG `pkg-config --cflags gtk+-3.0` `pkg-config --libs gtk+-3.0` -lepoxy -lm
This is my code:
#include <gtk/gtk.h>
#include <epoxy/gl.h>
#include <epoxy/glx.h>
#include <iostream>
#include <vector>
GLuint LoadShaders(char const* vertex, char const* fragment){
// Create the shaders
GLuint VertexShaderID = glCreateShader(GL_VERTEX_SHADER);
GLuint FragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
GLint Result = GL_FALSE;
int InfoLogLength;
// Compile Vertex Shader
glShaderSource(VertexShaderID, 1, &vertex , NULL);
glCompileShader(VertexShaderID);
// Check Vertex Shader
glGetShaderiv(VertexShaderID, GL_COMPILE_STATUS, &Result);
glGetShaderiv(VertexShaderID, GL_INFO_LOG_LENGTH, &InfoLogLength);
if ( InfoLogLength > 0 ){
std::vector<char> VertexShaderErrorMessage(InfoLogLength+1);
glGetShaderInfoLog(VertexShaderID, InfoLogLength, NULL, &VertexShaderErrorMessage[0]);
printf("%s\n", &VertexShaderErrorMessage[0]);
}
// Compile Fragment Shader
glShaderSource(FragmentShaderID, 1, &fragment , NULL);
glCompileShader(FragmentShaderID);
// Check Fragment Shader
glGetShaderiv(FragmentShaderID, GL_COMPILE_STATUS, &Result);
glGetShaderiv(FragmentShaderID, GL_INFO_LOG_LENGTH, &InfoLogLength);
if ( InfoLogLength > 0 ){
std::vector<char> FragmentShaderErrorMessage(InfoLogLength+1);
glGetShaderInfoLog(FragmentShaderID, InfoLogLength, NULL, &FragmentShaderErrorMessage[0]);
printf("%s\n", &FragmentShaderErrorMessage[0]);
}
// Link the program
GLuint ProgramID = glCreateProgram();
glAttachShader(ProgramID, VertexShaderID);
glAttachShader(ProgramID, FragmentShaderID);
glLinkProgram(ProgramID);
// Check the program
glGetProgramiv(ProgramID, GL_LINK_STATUS, &Result);
glGetProgramiv(ProgramID, GL_INFO_LOG_LENGTH, &InfoLogLength);
if ( InfoLogLength > 0 ){
std::vector<char> ProgramErrorMessage(InfoLogLength+1);
glGetProgramInfoLog(ProgramID, InfoLogLength, NULL, &ProgramErrorMessage[0]);
printf("%s\n", &ProgramErrorMessage[0]);
}
glDetachShader(ProgramID, VertexShaderID);
glDetachShader(ProgramID, FragmentShaderID);
glDeleteShader(VertexShaderID);
glDeleteShader(FragmentShaderID);
return ProgramID;
}
char const* vertShader = R"GLSL(
#version 330 core
void main(){
gl_Position.z = 0.0;
gl_Position.w = 1.0;
if (0 == gl_VertexID) {
gl_Position.x = -100.0;
gl_Position.y = -100.0;
}
if (2 == gl_VertexID) {
gl_Position.x = 0.0;
gl_Position.y = 100.0;
}
if (1 == gl_VertexID) {
gl_Position.x = 100.0;
gl_Position.y = -100.0;
}
}
)GLSL";
char const* fragShader = R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main(){
color = vec4(1.0, 0.0, 0.0, 1.0);
}
)GLSL";
gboolean
render(GtkGLArea*, GdkGLContext*, gpointer) {
glClearColor(0.5, 0.5, 0.5, 0);
glClear(GL_COLOR_BUFFER_BIT);
GLuint programID;
programID = LoadShaders(vertShader, fragShader);
glUseProgram(programID);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glFlush();
glDeleteProgram(programID);
return TRUE;
}
int
main(int argc, char** argv) {
gtk_init(&argc, &argv);
auto window{gtk_window_new(GTK_WINDOW_TOPLEVEL)};
auto glWidget{gtk_gl_area_new()};
gtk_container_add(GTK_CONTAINER(window), glWidget);
g_signal_connect (glWidget, "render", G_CALLBACK(render), nullptr);
gtk_widget_show_all(window);
gtk_main();
return EXIT_SUCCESS;
}
Two things I can think of:
You aren't requesting a Core context from the OS. Looks like you have to override create-context & create + return a gdk_gl_context_set_required_version'd GdkGLContext.
When you do get a Core context up & going I'm pretty sure you still need a VAO bound even if you're generating geometry entirely within your vertex shader.
RE: missing VAOs:
With this GLFW program and the VAO creation/bind commented out:
#include <glad/glad.h>
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
#include <iostream>
void CheckStatus( GLuint obj, bool isShader )
{
GLint status = GL_FALSE, log[ 1 << 11 ] = { 0 };
( isShader ? glGetShaderiv : glGetProgramiv )( obj, isShader ? GL_COMPILE_STATUS : GL_LINK_STATUS, &status );
( isShader ? glGetShaderInfoLog : glGetProgramInfoLog )( obj, sizeof( log ), NULL, (GLchar*)log );
if( status == GL_TRUE ) return;
std::cerr << (GLchar*)log << "\n";
std::exit( EXIT_FAILURE );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader, true );
glAttachShader( program, shader );
glDeleteShader( shader );
}
const char* vert = 1 + R"GLSL(
#version 330 core
void main(){
gl_Position.z = 0.0;
gl_Position.w = 1.0;
if (0 == gl_VertexID) {
gl_Position.x = -100.0;
gl_Position.y = -100.0;
}
if (2 == gl_VertexID) {
gl_Position.x = 0.0;
gl_Position.y = 100.0;
}
if (1 == gl_VertexID) {
gl_Position.x = 100.0;
gl_Position.y = -100.0;
}
}
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main(){
color = vec4(1.0, 0.0, 0.0, 1.0);
}
)GLSL";
int main( int, char** )
{
glfwSetErrorCallback( []( int, const char* desc ) { std::cerr << desc << "\n"; std::exit( EXIT_FAILURE ); } );
glfwInit();
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "GLFW", NULL, NULL );
glfwMakeContextCurrent( window );
gladLoadGLLoader( (GLADloadproc)glfwGetProcAddress );
//GLuint vao = 0;
//glGenVertexArrays( 1, &vao );
//glBindVertexArray( vao );
GLuint prog = glCreateProgram();
AttachShader( prog, GL_VERTEX_SHADER, vert );
AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog, false );
while( !glfwWindowShouldClose( window ) )
{
glfwPollEvents();
int w, h;
glfwGetFramebufferSize( window, &w, &h );
glViewport( 0, 0, w, h );
glClearColor( 0.5, 0.5, 0.5, 0 );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glUseProgram( prog );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
}
glfwTerminate();
}
Running on Linux with Mesa 13.0.6's llvmpipe backend & the MESA_DEBUG=1 envvar gives me a grey window and this message on stdout:
Mesa: User error: GL_INVALID_OPERATION in glDrawArrays(no VAO bound)
Restoring the VAO gives the expected red window.

Opengl Shader Program changes state without reason

I'm writing an After Effects plugin for the windows platform using OpenGL. After initializing and compiling my shader program I call
glGetProgramiv(inData.mProgramObjSu, GL_ATTACHED_SHADERS, &size); and I get back the value 3 (wich is correct because i attashed a geometry shader).
Also I call glGetAttribLocation with various attribute names and get back valid values. When my render function is called later, the shader program is completely useless, every call to glGetattribLocation returns -1 with the same attribute strings I used earlier and glGetProgramiv(inData.mProgramObjSu, GL_ATTACHED_SHADERS, &size); returns a size of 2. I've got a little experience in OpenGL programming, but I never had a problem like that before.
Here is the shader initialisation code:
GLint vertCompiledB = 0;
GLint geoCompiledB = 0;
GLint fragCompiledB = 0;
GLint linkedB = 0;
// Create vertex shader
inData.mVertexShaderSu = glCreateShader( GL_VERTEX_SHADER );
glShaderSource( inData.mVertexShaderSu, 1, &inVertexShader, nullptr );
glCompileShader( inData.mVertexShaderSu);
glGetShaderiv( inData.mVertexShaderSu, GL_COMPILE_STATUS, &vertCompiledB );
char str[4096];
if(!vertCompiledB)
{
glGetShaderInfoLog(inData.mVertexShaderSu, sizeof(str), NULL, str);
GL_CHECK(AESDK_OpenGL_ShaderInit_Err);
}
// Create geometry shader
inData.mGeometryShaderSu = glCreateShader( GL_GEOMETRY_SHADER );
glShaderSource( inData.mGeometryShaderSu, 1, &inGeometryShader, nullptr );
glCompileShader( inData.mGeometryShaderSu);
glGetShaderiv( inData.mGeometryShaderSu, GL_COMPILE_STATUS, &geoCompiledB );
if(!geoCompiledB)
{
glGetShaderInfoLog(inData.mGeometryShaderSu, sizeof(str), NULL, str);
GL_CHECK(AESDK_OpenGL_ShaderInit_Err);
}
// Create fragment shader
inData.mFragmentShaderSu = glCreateShader( GL_FRAGMENT_SHADER );
glShaderSource( inData.mFragmentShaderSu, 1, &inFragmentShader, nullptr );
glCompileShader( inData.mFragmentShaderSu );
glGetShaderiv( inData.mFragmentShaderSu, GL_COMPILE_STATUS,&fragCompiledB );
if(!fragCompiledB)
{
glGetShaderInfoLog( inData.mFragmentShaderSu, sizeof(str), NULL, str );
GL_CHECK(AESDK_OpenGL_ShaderInit_Err);
}
// Create a program object and attach the two compiled shaders...
inData.mProgramObjSu = glCreateProgram();
glAttachShader( inData.mProgramObjSu, inData.mVertexShaderSu );
glAttachShader( inData.mProgramObjSu, inData.mGeometryShaderSu );
glAttachShader( inData.mProgramObjSu, inData.mFragmentShaderSu );
// Link the program object
glLinkProgram( inData.mProgramObjSu );
glGetProgramiv( inData.mProgramObjSu, GL_LINK_STATUS, &linkedB );
inData.mPosAttribLoc = glGetAttribLocation(inData.mProgramObjSu, "pos");
inData.mNormAttribLoc = glGetAttribLocation(inData.mProgramObjSu, "norm");
inData.mMVPUnifLoc = glGetUniformLocation(inData.mProgramObjSu, "mvp");
inData.mNormMatUnifLoc = glGetUniformLocation(inData.mProgramObjSu, "normMat");
inData.mLineWidthUnifLoc = glGetUniformLocation(inData.mProgramObjSu, "width");
inData.mEdgeWidthRatioUnifLoc = glGetUniformLocation(inData.mProgramObjSu, "edgeWidthRatio");
inData.mStrokeOverflowUnifLoc = glGetUniformLocation(inData.mProgramObjSu, "strokeOverflow");
int length;
int size;
GLenum type;
GLchar name[40];
glGetActiveUniform(inData.mProgramObjSu, 1, 40, &length, &size, &type, name);
glGetProgramiv(inData.mProgramObjSu, GL_ATTACHED_SHADERS, &size);
if( !linkedB )
{
int length;
glGetShaderInfoLog( inData.mProgramObjSu, 1000, &length, str );
GL_CHECK(AESDK_OpenGL_ShaderInit_Err);
}

Reading OpenGL Shader Language from .glsl File

I am trying to read shader strings from a file; however, I faced with a problem at glShaderSource() function line. As you know, glShaderSource() takes const char**, and I have to declare char * for reading from the file. So, I am using casting to convert types.
If I use const_cast<const char **>, the shape appears; however, it has wrong color (It should be orange not white).
If I use reinterpret_cast<const char**>, I get a Access violation reading location 0x73726576 error in running time.
So, how can I solve this problem? Thank you!
Platform: Windows 7, Visual Studio 2010
Code Lines:
File: shader.glsl
#version 330
in vec3 vp;
void main() {
gl_Position = vec4( vp, 1.0);
}
main():
/* FILE READING */
FILE* shaderFile = fopen( "shader.glsl ", "r");
int fileSize = 0;
char* vertex_shader = NULL;
//Getting File Size
fseek( shaderFile, 0, SEEK_END );
fileSize = ftell( shaderFile );
rewind( shaderFile );
//Reading From File
vertex_shader = (char*)malloc( sizeof( char) * (fileSize+1) );
fread( vertex_shader, sizeof( char ), fileSize, shaderFile );
vertex_shader[ fileSize] = '\0';
fclose( shaderFile );
//Shader definition - If I used this format, it works.
/*const char* vertex_shader = "#version 330\n"
"in vec3 vp;"
"void main(){"
"gl_Position = vec4( vp, 1.0);"
"}";*/
//If I use const char* vertex_shader above, it appears orange.
const char* fragment_shader = "#version 330\n"
"out vec4 frag_colour;"
"void main () {"
" frag_colour = vec4(0.7, 0.4, 0.2, 1.0);"
"}";
//Shader compiling
unsigned int vertexShader = glCreateShader( GL_VERTEX_SHADER );
//The shape appears but not orange
glShaderSource( vertexShader, 1, const_cast<const char **>(&vertex_shader) , NULL );
//glShaderSource( vertexShader, 1, reinterpret_cast<const char**>(vertex_shader) , NULL ); //Gives error
glCompileShader( vertexShader );
unsigned int fragmentShader = glCreateShader( GL_FRAGMENT_SHADER );
glShaderSource( fragmentShader, 1, &fragment_shader, NULL);
glCompileShader( fragmentShader );
//Shader program
unsigned int shaderProgram = glCreateProgram();
glAttachShader( shaderProgram, fragmentShader );
glAttachShader( shaderProgram, vertexShader );
glLinkProgram( shaderProgram );
//Drawing
while( !glfwWindowShouldClose( window ) )
{
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glUseProgram( shaderProgram );
glBindVertexArray( vao );
glDrawArrays( GL_TRIANGLES, 0, 3);
glfwPollEvents();
glfwSwapBuffers( window);
}
Pass the address of the array to the function:
glShaderSource( vertexShader, 1, (const GLchar**)&vertex_shader, NULL);
EDIT:
Thanks for updating the code in the question. If your program compiles, doesn't crash but the shaders still don't work, it's time to investigate if the GLSL compiler returned any errors! After each call to glCompileShader() write something like the following to display any problems that occurred during compilation:
// This checks for errors upon compiling the Vertex Shader
GLint _compiled = 0;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &_compiled);
if (!_compiled)
{
GLint length;
GLchar* log;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &length);
log = new GLchar[length];
glGetShaderInfoLog(vertexShader, length, &length, log);
std::cerr << "!!! Compile log = " << log << std::endl;
delete log;
return;
}
The shaderSource functiont takes a char ** so take your char * and just use the & operator on it instead of trying to just cast it.

Opengl Sierpinski Shader based C code in Visual Studio2012 & 2010

I'm doing my first steps in OpenGl Shader base programming and computer graphics. I'm trying the following example, but when I try to compile the project I got the following error:
fragment shader failed to link. vertex shader failed to link.
ERROR: Not all shaders has valid object code
i tried running the program in Visual Studio 2012 and 2010. glut library version is 3.7 and glew library version is 1.10.0. what is the problem?
.cpp Program
#include <stdio.h>
#include <stdlib.h>
#include <glew.h>
#include <glut.h>
#include <gl.h>
#pragma comment(lib,"glew32.lib")
#define NoP 50000
GLuint InitShader(char *, char *);
void mydisplay();
struct points
{ GLfloat x,y,z;
};
void init();
void Sierpinski(points Num[]);
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE|GLUT_DEPTH);
glutInitWindowSize(600, 600);
glutInitWindowPosition(0,0);
glutCreateWindow("Sierpinski 3D");
glewInit();
init();
glEnable(GL_DEPTH_TEST);
glutDisplayFunc(mydisplay);
glutMainLoop();
}
void init()
{
points Num[NoP];
Sierpinski(Num);
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);//set the color for clearing the display
glPointSize(2); // set the point size
// Creating a program object containing shader files
GLuint program;
program = InitShader("vshader.glsl","fshader.glsl");
glUseProgram(program);
//Creating a buffer object containing Sirepinski verteces data
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Num), Num, GL_STATIC_DRAW);
//Make a conncetion between data in object buffer and "vPosition in vertex shader
GLuint location = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray ( location );
glVertexAttribPointer( location, 3, GL_FLOAT, GL_FALSE,0, 0);//BUFFER_OFFSET(0));
}
void mydisplay()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //the clear call will affect the color buffer
glDrawArrays(GL_POINTS,0,NoP);//Rendering verteces data
glFlush(); //Empty all commands buffer, causing all issued commands to be executed as quickly as they are accepted by the actual rendering engine
}
static char * ReadShaderSource(char * ShaderFile)
{
FILE *fp;
fp = fopen(ShaderFile,"rt");
if (!fp) return NULL;
long size=0;
while (!feof(fp))
{
fgetc (fp);
size++;
}
size--;//EOF should not be counted
fseek(fp, 0, SEEK_SET);
char * buf= new char[size + 1];
fread(buf, 1, size,fp);
buf[size]=0;// string is NULL terminated
fclose(fp);
return buf;
}
GLuint InitShader(char * vShaderFile, char * fShaderFile)
{
char * svs, * sfs;
GLuint program, VertexShader, FragmentShader;
program = glCreateProgram();
VertexShader = glCreateShader(GL_VERTEX_SHADER);
svs=ReadShaderSource(vShaderFile);
glShaderSource(VertexShader,1,(const GLchar **)&svs,NULL);
glCompileShader(VertexShader);
// reading GLSL compiler error messages for vertex shader
GLint compiled;
glGetShaderiv(VertexShader, GL_COMPILE_STATUS, &compiled);
if(!compiled)
{ printf("/n failed to compile");
GLint logSize;
glGetShaderiv(VertexShader, GL_INFO_LOG_LENGTH,&logSize);
char * logMsg = new char[logSize];
glGetShaderInfoLog(VertexShader, logSize, NULL, logMsg);
printf("\n %s",logMsg);
delete [] logMsg;
getchar();
exit(EXIT_FAILURE);
}
FragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
sfs = ReadShaderSource(fShaderFile);
glShaderSource(FragmentShader, 1, (const GLchar **)&sfs, NULL);
glCompileShader(FragmentShader);
// reading GLSL compiler error messages for fragment shader
glGetShaderiv(FragmentShader, GL_COMPILE_STATUS, &compiled);
if(!compiled)
{ printf("\n failed to compile");
GLint logSize2;
glGetShaderiv(FragmentShader, GL_INFO_LOG_LENGTH,&logSize2);
char * logMsg2 = new char[logSize2];
glGetShaderInfoLog(FragmentShader, logSize2, NULL, logMsg2);
printf("\n %s",logMsg2);
delete [] logMsg2;
getchar();
exit(EXIT_FAILURE);
}
glAttachShader(program,VertexShader);
glAttachShader(program, FragmentShader);
glLinkProgram(program);
// reading GLSL linker error messages for program object
GLint linked;
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if(!linked)
{ printf("/n failed to link");
GLint logSize;
glGetProgramiv(program, GL_INFO_LOG_LENGTH,&logSize);
char * logMsg = new char[logSize];
glGetProgramInfoLog(program, logSize, NULL, logMsg);
printf("\n %s",logMsg);
delete [] logMsg;
getchar();
exit(EXIT_FAILURE);
}
glUseProgram(program);
return program;
}
void Sierpinski(points Num[])
{
int j;
points Vertices[4]={{-1.0,-1.0, -1.0},{1.0,-1.0,-1.0},{0.0,1.0,-1.0},{0.0,0.0,1.0}};
Num[0].x = 0;
Num[0].y = 0;
Num[0].z = 0;
for(int i=1;i<NoP;i++)
{
j = rand() % 4;
Num[i].x = (Vertices[j].x + Num[i - 1].x)/2;
Num[i].y = (Vertices[j].y + Num[i - 1].y)/2;
Num[i].z = (Vertices[j].z + Num[i - 1].z)/2;
}
}
vshader.glsl
#version 130
in vec4 vPosition;
out vec4 color;
void main()
{
gl_Position = vPosition;
color = vPosition;
}
fshader.glsl
#version 130
in vec4 color;
void main()
{
gl_FragColor = vec4((1.0 + color.xyz)/2.0,1.0);
}
Seems to work fine:
#include <GL/glew.h>
#include <GL/glut.h>
#include <iostream>
void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 17 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( -1 );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
GLuint LoadProgram( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
130,
in vec4 vPosition;
out vec4 color;
void main()
{
gl_Position = vPosition;
color = vPosition;
}
);
const char* frag = GLSL
(
130,
precision mediump float;
in vec4 color;
void main()
{
gl_FragColor = vec4((1.0 + color.xyz)/2.0,1.0);
}
);
#define NoP 50000
struct points
{
GLfloat x,y,z;
};
void Sierpinski(points Num[])
{
int j;
points Vertices[4]={{-1.0,-1.0, -1.0},{1.0,-1.0,-1.0},{0.0,1.0,-1.0},{0.0,0.0,1.0}};
Num[0].x = 0;
Num[0].y = 0;
Num[0].z = 0;
for(int i=1;i<NoP;i++)
{
j = rand() % 4;
Num[i].x = (Vertices[j].x + Num[i - 1].x)/2;
Num[i].y = (Vertices[j].y + Num[i - 1].y)/2;
Num[i].z = (Vertices[j].z + Num[i - 1].z)/2;
}
}
void init()
{
points Num[NoP];
Sierpinski(Num);
GLuint program = LoadProgram( vert, NULL, frag );
glUseProgram(program);
//Creating a buffer object containing Sirepinski verteces data
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(Num), Num, GL_STATIC_DRAW);
//Make a conncetion between data in object buffer and "vPosition in vertex shader
GLuint location = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray ( location );
glVertexAttribPointer( location, 3, GL_FLOAT, GL_FALSE,0, 0);//BUFFER_OFFSET(0));
}
void mydisplay()
{
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);//set the color for clearing the display
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //the clear call will affect the color buffer
glPointSize(2); // set the point size
glDrawArrays(GL_POINTS,0,NoP);//Rendering verteces data
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE|GLUT_DEPTH);
glutInitWindowSize(600, 600);
glutInitWindowPosition(0,0);
glutCreateWindow("Sierpinski 3D");
glewInit();
init();
glEnable(GL_DEPTH_TEST);
glutDisplayFunc(mydisplay);
glutMainLoop();
}
Make sure you use FreeGLUT, Nate's GLUT is way, way old.