OpenGL basic triangle render with c++ not working - c++

I have a basic OpenGL program using GLFW and GLEW to render a triangle.
I'm using a Mac with Xcode with GLFW 3.3 and GLEW 2.1.
I was encountering a problem with glfwCreateWindow() that would return null if I set the OpenGL profile to core profile (because for what I understand Mac needs it) and I solved it by putting the forward compatibility to true.
Can somebody explain to me if this solution is right or not?
One other thing is that now the windows gets created without problems but now I'm not seeing anything on the screen.
This is the code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
// Compiles the actual shader
glCompileShader(id);
// Error handeling of the compilation
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE)
{
int length;
// Gets the lenght of the message
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
// This is used to allocate memory on the stack dynamically
char* message = (char*)alloca(length * sizeof(char));
// Gets the log
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
// Creating the two shader
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
// Attaches the two shaders to the program and validate everything
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program); // This links the shader executable to the actual processor
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
// to initialize glew go to c++ preprocessor and add GLEW_STATIC
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
/* Create a window and its OpenGl context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
// Glew init HAS to be put after making the context
if (glewInit() != GLEW_OK)
std::cout << "GlewInit Error" << std::endl;
std::cout << "VERSION:" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
std::cout << "GL Version: " << (char *)glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
// Defining the position of the vertices
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
// second parameter is the starting index and the third is the number of indexes
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}

Vertex Array Objects (VAOs) aren't optional in Core contexts like they are in Compatibility. You have to have one bound to draw anything.
Create one and bind it before setting up your vertex layout and drawing:
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
...
glEnableVertexAttribArray( ... );
glVertexAttribPointer( ... );
...
glDrawArrays( ... );
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = 1 + R"GLSL(
#version 330 core
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
layout(location = 0) in vec4 position;
void main()
{
gl_Position = position;
};
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color = vec4(1.0, 0.0, 0.0, 1.0);
};
)GLSL";
int main( void )
{
if( !glfwInit() )
return -1;
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
glfwMakeContextCurrent( window );
// Glew init HAS to be put after making the context
if( glewInit() != GLEW_OK )
std::cout << "GlewInit Error" << std::endl;
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
// Defining the position of the vertices
float positions[ 6 ] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * sizeof( float ), positions, GL_STATIC_DRAW );
glEnableVertexAttribArray( 0 );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
GLuint shader = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram( shader );
while( !glfwWindowShouldClose( window ) )
{
glClear( GL_COLOR_BUFFER_BIT );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
glfwPollEvents();
}
glDeleteProgram( shader );
glfwTerminate();
return 0;
}

Related

Drawing a dot with OpenGL

My program is supposed to open a window and draw a single blue dot in the center but fails to draw the dot.
This is the full code I'm working with:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#define numVAOs 1
GLuint renderingProgram;
GLuint vao[numVAOs];
GLuint createShaderProgram()
{
const char* vshaderSource = "#version 320 \n"
"void main(void) \n"
"{gl_Position = vec4(0.0,0.0,0.0,1.0);}";
const char* fshaderSource = "#version 320 \n"
"out vec4 color; \n"
"void main(void) \n"
"{ color = vec4(0.0,0.0,1.0,1.0);}";
GLuint vShader = glCreateShader(GL_VERTEX_SHADER);
GLuint fShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vShader, 1, &vshaderSource, NULL);
glShaderSource(fShader, 1, &fshaderSource, NULL);
glCompileShader(vShader);
glCompileShader(fShader);
GLuint vfProgram = glCreateProgram();
glAttachShader(vfProgram, vShader);
glAttachShader(vfProgram, fShader);
glLinkProgram(vfProgram);
return vfProgram;
}
void init(GLFWwindow* window)
{
renderingProgram = createShaderProgram();
glGenVertexArrays(numVAOs, vao);
glBindVertexArray(vao[0]);
}
void display(GLFWwindow* window, double currentTime)
{
glUseProgram(renderingProgram);
glDrawArrays(GL_POINT, 0, 1);
}
int main(void)
{
if (!glfwInit())
{
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
GLFWwindow* window = glfwCreateWindow(600, 600, "Chapter2-program1", NULL, NULL);
glfwMakeContextCurrent(window);
//Window or OpenGL context creation failed
if (!window)
{
glfwTerminate();
return -1;
}
if (glewInit() != GLEW_OK)
{
std::cout << "Error!" << std::endl;
exit(EXIT_FAILURE);
}
glfwSwapInterval(1);
init(window);
/* Rendering loop- until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
display(window, glfwGetTime());
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
return 0;
}
A few issues:
#version 320 doesn't exist, it went from version 150 in OpenGL 3.2 to version 330 in OpenGL 3.3.
Drop down to #version 150 and make sure to check the shader compilation and link logs.
GL_POINT is not a valid value for glDrawArrays()'s mode argument, you're thinking of GL_POINTS.
Make sure to clear the color/depth buffers with glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT).
All together:
// g++ main.cpp `pkg-config glfw3 glew --cflags --libs`
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#define numVAOs 1
GLuint renderingProgram;
GLuint vao[numVAOs];
void CheckStatus( GLuint obj, bool isShader )
{
GLint status = GL_FALSE, log[ 1 << 11 ] = { 0 };
( isShader ? glGetShaderiv : glGetProgramiv )( obj, isShader ? GL_COMPILE_STATUS : GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
( isShader ? glGetShaderInfoLog : glGetProgramInfoLog )( obj, sizeof( log ), NULL, (GLchar*)log );
std::cerr << (GLchar*)log << "\n";
std::exit( EXIT_FAILURE );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader, true );
glAttachShader( program, shader );
glDeleteShader( shader );
}
const char* const vert = R"GLSL(
#version 150
void main(void)
{
gl_Position = vec4(0.0,0.0,0.0,1.0);
}
)GLSL";
const char* const frag = R"GLSL(
#version 150
out vec4 color;
void main(void)
{
color = vec4(0.0,0.0,1.0,1.0);
}
)GLSL";
int main(void)
{
if (!glfwInit())
{
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
GLFWwindow* window = glfwCreateWindow(600, 600, "Chapter2-program1", NULL, NULL);
//Window or OpenGL context creation failed
if (!window)
{
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
{
std::cout << "Error!" << std::endl;
exit(EXIT_FAILURE);
}
glfwSwapInterval(1);
GLuint renderingProgram = glCreateProgram();
AttachShader( renderingProgram, GL_VERTEX_SHADER, vert );
AttachShader( renderingProgram, GL_FRAGMENT_SHADER, frag );
glLinkProgram( renderingProgram );
CheckStatus( renderingProgram, false );
glGenVertexArrays(numVAOs, vao);
glBindVertexArray(vao[0]);
/* Rendering loop- until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glUseProgram(renderingProgram);
glDrawArrays(GL_POINTS, 0, 1);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
return 0;
}

printing a red triangle in openGL

I have the following code and am trying to print a red triangle using opengl glew ect. The program builds and shows a triangle, however it only shows a triangle when I drag the window, and the triangle shows up white not red. Not sure how to fix this issue. Below is a copy of my code (I am running in xcode).
#include <iostream>
// GLEW
#define GLEW_STATIC
#include <GL/glew.h>
// GLFW
#include <GLFW/glfw3.h>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result != GL_FALSE)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << "shader" << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "Error!" << std::endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, 0);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
I am using xCode and am having 2 issues:
1. the triangle is printing out white
2. the triangle only shows up when i drag the window
I have tried debugging but am not able to figure these issues out.
You need to go whole-hog on Core contexts on macOS if you want anything past GL 2.1:
Request a versioned Core context via GLFW_OPENGL_PROFILE, GLFW_CONTEXT_VERSION_MAJOR, and GLFW_CONTEXT_VERSION_MINOR; it should meet or exceed the #version you're specifying in your shaders
For macOS in particular you need a forward-compatbile context; set GLFW_OPENGL_FORWARD_COMPAT to GL_TRUE
Core contexts require a vertex array object (VAO) to draw anything; for simple stuff you can get away with creating a single one and leaving it bound
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <vector>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = 1 + R"GLSL(
#version 330 core
layout(location = 0) in vec4 position;
void main()
{
gl_Position = position;
};
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color = vec4(1.0, 0.0, 0.0, 1.0);
}
)GLSL";
int main(void)
{
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
GLFWwindow* window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "Error!" << std::endl;
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
float positions[6] =
{
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
GLuint shader = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, 0);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
Your problem may be in vertex shader because in vertex shader "layout(location = 0) in vec4 position;\n"
here you define vec4 but in you are sending vec2 from glVertexAttribPointer() function.
Try to send vec4 or try this in vertex shader
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec2 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = vec4(position, 0, 1);\n"
"}\n";

OpenGL program to draw a triangle gives a yellow screen

I am trying to draw a triangle with OpenGL.
My code compiles fine, and does not produce any errors but it still does not do what its supposed to.
I have a vertex shader and a fragment shader and both of them are in the same program.
My code is below:
#include "../../include/sb7.h"
GLuint compile_shaders(void)
{
GLuint vertex_shader;
GLuint fragment_shader;
GLuint program;
// Source code for Vertex Shader
static const GLchar * vertex_shader_source[] =
{
"#version 450 core \n"
" \n"
"void main(void) \n"
"{ \n"
" //Decalre a hardcoded array of positions \n"
" const vec4 vertices[3] = vec4[3](vec4(0.25, -0.25, 0.5, 1.0), \n"
" vec4(-0.25, -0.25, 0.5, 1.0), \n"
" vec4(0.25, 0.25, 0.5, 1.0)); \n"
" //Index into our array using gl_VertexID \n"
" gl_Position = vertices[gl_VertexID]; \n"
"} \n"
};
// Source code for Fragment Shader
static const GLchar * fragment_shader_source[] =
{
"#version 450 core \n"
" \n"
"out vec4 color; \n"
" \n"
"void main(void) \n"
"{ \n"
" color = vec4(0.0, 0.8, 1.0, 1.0); \n"
"} \n"
};
// Create and compiler Vertex Shader
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, vertex_shader_source, NULL);
glCompileShader(vertex_shader);
// Create and compiler Fragment Shader
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, fragment_shader_source, NULL);
glCompileShader(fragment_shader);
// Create program, attach shaders to it, and link it
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
// Delete shaders as program has them now
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
return program;
};
class my_application : public sb7::application
{
public:
void startup()
{
rendering_program = compile_shaders();
glCreateVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
}
void shutdown()
{
glDeleteVertexArrays(1, &vertex_array_object);
glDeleteProgram(rendering_program);
glDeleteVertexArrays(1, &vertex_array_object);
}
// Our rendering function
void render(double currentTime)
{
// Sets Colour
static const GLfloat color[] = { 0.0f, 0.2f, 0.0f, 1.0f };
glClearBufferfv(GL_COLOR, 0, color);
// Use program object we created for rendering
glUseProgram(rendering_program);
// Draw one triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
}
private:
GLuint rendering_program;
GLuint vertex_array_object;
};
I am following the OpenGL Superbible, 7th edition.
The same program (with small changes) was also not drawing a point even when I changed the point size and the output was the same screen (full yellow) that I am getting for this.
I have seen similar problems that arise during drawing of triangles but they are all different from what I'm doing.
How do I fix this?
Workin' fine here:
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
#include <cstdlib>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = R"GLSL(
#version 450 core
void main(void)
{
//Decalre a hardcoded array of positions
const vec4 vertices[3] = vec4[3]
(
vec4(0.25, -0.25, 0.5, 1.0),
vec4(-0.25, -0.25, 0.5, 1.0),
vec4(0.25, 0.25, 0.5, 1.0)
);
//Index into our array using gl_VertexID
gl_Position = vertices[gl_VertexID];
}
)GLSL";
const char* frag = R"GLSL(
#version 450 core
out vec4 color;
void main(void)
{
color = vec4(0.0, 0.8, 1.0, 1.0);
}
)GLSL";
int main( int argc, char** argv )
{
glfwInit();
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 4 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 5 );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
glfwWindowHint( GLFW_OPENGL_DEBUG_CONTEXT, GL_TRUE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "51307782", NULL, NULL );
glfwMakeContextCurrent( window );
glewInit();
glfwSwapInterval( 1 );
GLuint rendering_program = Program::Load
(
vert, GL_VERTEX_SHADER,
frag, GL_FRAGMENT_SHADER,
NULL
);
GLuint vertex_array_object = 0;
glCreateVertexArrays( 1, &vertex_array_object );
glBindVertexArray( vertex_array_object );
while( !glfwWindowShouldClose( window ) )
{
glfwPollEvents();
static const GLfloat color[] = { 0.0f, 0.2f, 0.0f, 1.0f };
glClearBufferfv( GL_COLOR, 0, color );
glUseProgram( rendering_program );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
}
glDeleteVertexArrays( 1, &vertex_array_object );
glDeleteProgram( rendering_program );
glfwDestroyWindow( window );
glfwTerminate();
}

OpenGL, a very basic shader not working

I am learning OpenGL through TheCherno Project.
The code below is from there and I faithfully reproduce it for my learning purpose. I have also added both vertex and fragment shaders as well.
My issue here is in my square output I don't see the color at all and I am guessing it is using the default shader.
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * 2 * sizeof(float), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
unsigned int ibo;
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof(unsigned int), indices, GL_STATIC_DRAW);
ShaderSourceProgram shader = ParseShader("res/shaders/Basic.shader");
//std::cout << "VERTEX" << std::endl;
//std::cout << shader.VertexShader << std::endl;
unsigned int shaderProgram = CreateShader(shader.VertexShader, shader.FragmentShader);
glUseProgram(shaderProgram);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
The shaders:
#shader vertex
#version 330 core
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
#shader fragment
#version 330 core
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
EDIT: Please click here for the entire project (github)
As #Ripi2 pointed out your glBufferData() calls, while not exactly wrong (they overrequest buffer space), aren't exactly correct either. Since you're using raw arrays you can use a straight sizeof(<arrayname>) for the size parameter:
glBufferData( GL_ARRAY_BUFFER, sizeof( positions ), positions, GL_STATIC_DRAW );
...
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( indices ), indices, GL_STATIC_DRAW );
I suspect your (unknown) shader loader might be wonky since my personal one worked fine.
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
int main( void )
{
GLFWwindow* window;
/* Initialize the library */
if( !glfwInit() )
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent( window );
if( glewInit() != GLEW_OK )
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString( GL_VERSION ) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * 2 * sizeof( float ), positions, GL_STATIC_DRAW );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
glEnableVertexAttribArray( 0 );
unsigned int ibo;
glGenBuffers( 1, &ibo );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof( unsigned int ), indices, GL_STATIC_DRAW );
const char* vert = GLSL
(
330 core,
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
);
const char* frag = GLSL
(
330 core,
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
);
unsigned int shaderProgram = Program::Load
(
vert, GL_VERTEX_SHADER,
frag, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram( shaderProgram );
/* Loop until the user closes the window */
while( !glfwWindowShouldClose( window ) )
{
/* Render here */
glClear( GL_COLOR_BUFFER_BIT );
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr );
/* Swap front and back buffers */
glfwSwapBuffers( window );
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram( shaderProgram );
glfwTerminate();
return 0;
}
glUseProgram() needs to be called after shaders are compiled and linked. Here's a piece of a sample code:
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", NULL, NULL);
if (window == NULL)
{
cout << "Failed to create GLFW window" << endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLAD
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
cout << "Failed to initialize GLAD" << endl;
return -1;
}
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, 800, 600);
float TriangleVertices[] = {
-0.3f, -0.3f, 0.0f,
0.0f, 0.3f, 0.0f,
0.3f, -0.3f, 0.0f
};
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
//BindVertexArray for modifying it
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(TriangleVertices), TriangleVertices, GL_STATIC_DRAW);
glBindVertexArray(VAO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
const char* vertexShaderSource = "\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos, 1.0);\n"
"}\0";
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
const char* fragmentShaderSource = "\n"
"out vec4 FragColor; \n"
"void main()\n"
"{\n"
"FragColor = vec4(1.0f, 0.7f, 0.5f, 1.0f); \n"
"} \n";
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
unsigned int shaderProgram;
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
while (!glfwWindowShouldClose(window))
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
//// draw triangle using the data from the VAO
//glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}

Can't draw triangle using Opengl

in this code i want to draw a simple triangle on a blue background using openGL however when i compile and run the code only a window with the blue background appears (without the white triangle that is supposed to be drawn), iam using Xcode
my code
#include <iostream>
#include <string>
#include <GLUT/glut.h>
#include <OpenGL/gl3.h>
#include <fstream>
using namespace std;
// VAO & VBO objects
GLuint VBO;
GLuint VAO;
void display();
// vertex Data (position)
float vertex[] = {-1.0, 0.0 , 1.0,
0.0, 1.0 , 0.0,
0.0, 0.0 , 0.0 };
GLuint Program;
GLuint Vshader;
GLuint Fshader;
// main program
int main (int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE);
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
// main display function
void display()
{
// reading the vertex shader
fstream VertexS;
VertexS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/vertexShader");
if(VertexS.good() == false)
cout << "Error openning the file \n";
if(VertexS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (VertexS.fail() == true)
cout <<"Logical error on i/o operation \n";
VertexS.seekg(0,ios::end);
int size = (int)VertexS.tellg();
VertexS.clear();
VertexS.seekg(0,ios::beg);
char* vBuffer = new (nothrow) char[size];
VertexS.read(vBuffer,size);
VertexS.close();
// reading fragment shader
fstream FragS;
FragS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/fragmentShader");
if(FragS.good() == false)
cout << "Error openning the file \n";
if(FragS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (FragS.fail() == true)
cout <<"Logical error on i/o operation \n";
FragS.seekg(0,ios::end);
int size2 = (int)FragS.tellg();
FragS.clear();
FragS.seekg(0,ios::beg);
char* fBuffer = new (nothrow) char[size2];
FragS.read(fBuffer,size2);
FragS.close();
// creating shaders
Vshader = glCreateShader(GL_VERTEX_SHADER);
Fshader = glCreateShader(GL_FRAGMENT_SHADER);
GLint x = size;
GLint y = size2;
glShaderSource(Vshader, 1,(const char**)&vBuffer, &x);
glShaderSource(Fshader, 1, (const char**)&fBuffer, &y);
glCompileShader(Vshader);
glCompileShader(Fshader);
Program = glCreateProgram();
glAttachShader(Program, Vshader);
glAttachShader(Program, Fshader);
glLinkProgram(Program);
glUseProgram(Program);
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER,9 *sizeof(vertex),vertex, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3,GL_FLOAT, GL_TRUE, 0, 0);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glutSwapBuffers();
glDeleteShader(Vshader);
glDeleteShader(Fshader);
delete [] vBuffer;
delete [] fBuffer;
};
Vertex shader:
#version 320 core
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
Fragment Shader
#version 320 core
out vec4 fColor;
void
main()
{
fColor = vec4(0.0, 0.0, 1.0, 1.0);
}
glClearColor(0, 0, 1,1);
...
fColor = vec4(0.0, 0.0, 1.0, 1.0);
You're trying to draw a blue triangle on top of a blue background. You'll have to dial up your contrast pretty high to see that :)
Make one of them a different color, like red.
You're also:
Missing a VAO
Not requesting a Core context
Using #version 320 core instead of #version 150 core
Try this:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
150 core,
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
150 core,
out vec4 fColor;
void
main()
{
fColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VAO & VBO objects
GLuint VAO;
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenVertexArrays( 1, &VAO );
glBindVertexArray( VAO );
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
glEnableVertexAttribArray(0);
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutInitContextVersion(3, 2);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow("My First GLUT/OpenGL Window");
glewExperimental = GL_TRUE;
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
EDIT: Ported to GL 2.1:
#include <GL/glew.h>
#include <GL/glut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
120,
attribute vec4 vPosition;
void main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
120,
void main()
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VBO objects
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
int posLoc = glGetAttribLocation( prog, "vPosition" );
glEnableVertexAttribArray( posLoc );
glVertexAttribPointer( posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
You have provided invalid buffer size in the following line:
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
Because sizeof(vertex) should return the total size of the array (e.g 36) not the underlying type, thus no need to multiply by 9. If you still want to use multiplication try following:
9 * sizeof(float)
I have to write this down... You should separate your initialization and draw cycle like in genpfault's answer.