in this code i want to draw a simple triangle on a blue background using openGL however when i compile and run the code only a window with the blue background appears (without the white triangle that is supposed to be drawn), iam using Xcode
my code
#include <iostream>
#include <string>
#include <GLUT/glut.h>
#include <OpenGL/gl3.h>
#include <fstream>
using namespace std;
// VAO & VBO objects
GLuint VBO;
GLuint VAO;
void display();
// vertex Data (position)
float vertex[] = {-1.0, 0.0 , 1.0,
0.0, 1.0 , 0.0,
0.0, 0.0 , 0.0 };
GLuint Program;
GLuint Vshader;
GLuint Fshader;
// main program
int main (int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE);
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
// main display function
void display()
{
// reading the vertex shader
fstream VertexS;
VertexS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/vertexShader");
if(VertexS.good() == false)
cout << "Error openning the file \n";
if(VertexS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (VertexS.fail() == true)
cout <<"Logical error on i/o operation \n";
VertexS.seekg(0,ios::end);
int size = (int)VertexS.tellg();
VertexS.clear();
VertexS.seekg(0,ios::beg);
char* vBuffer = new (nothrow) char[size];
VertexS.read(vBuffer,size);
VertexS.close();
// reading fragment shader
fstream FragS;
FragS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/fragmentShader");
if(FragS.good() == false)
cout << "Error openning the file \n";
if(FragS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (FragS.fail() == true)
cout <<"Logical error on i/o operation \n";
FragS.seekg(0,ios::end);
int size2 = (int)FragS.tellg();
FragS.clear();
FragS.seekg(0,ios::beg);
char* fBuffer = new (nothrow) char[size2];
FragS.read(fBuffer,size2);
FragS.close();
// creating shaders
Vshader = glCreateShader(GL_VERTEX_SHADER);
Fshader = glCreateShader(GL_FRAGMENT_SHADER);
GLint x = size;
GLint y = size2;
glShaderSource(Vshader, 1,(const char**)&vBuffer, &x);
glShaderSource(Fshader, 1, (const char**)&fBuffer, &y);
glCompileShader(Vshader);
glCompileShader(Fshader);
Program = glCreateProgram();
glAttachShader(Program, Vshader);
glAttachShader(Program, Fshader);
glLinkProgram(Program);
glUseProgram(Program);
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER,9 *sizeof(vertex),vertex, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3,GL_FLOAT, GL_TRUE, 0, 0);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glutSwapBuffers();
glDeleteShader(Vshader);
glDeleteShader(Fshader);
delete [] vBuffer;
delete [] fBuffer;
};
Vertex shader:
#version 320 core
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
Fragment Shader
#version 320 core
out vec4 fColor;
void
main()
{
fColor = vec4(0.0, 0.0, 1.0, 1.0);
}
glClearColor(0, 0, 1,1);
...
fColor = vec4(0.0, 0.0, 1.0, 1.0);
You're trying to draw a blue triangle on top of a blue background. You'll have to dial up your contrast pretty high to see that :)
Make one of them a different color, like red.
You're also:
Missing a VAO
Not requesting a Core context
Using #version 320 core instead of #version 150 core
Try this:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
150 core,
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
150 core,
out vec4 fColor;
void
main()
{
fColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VAO & VBO objects
GLuint VAO;
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenVertexArrays( 1, &VAO );
glBindVertexArray( VAO );
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
glEnableVertexAttribArray(0);
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutInitContextVersion(3, 2);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow("My First GLUT/OpenGL Window");
glewExperimental = GL_TRUE;
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
EDIT: Ported to GL 2.1:
#include <GL/glew.h>
#include <GL/glut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
120,
attribute vec4 vPosition;
void main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
120,
void main()
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VBO objects
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
int posLoc = glGetAttribLocation( prog, "vPosition" );
glEnableVertexAttribArray( posLoc );
glVertexAttribPointer( posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
You have provided invalid buffer size in the following line:
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
Because sizeof(vertex) should return the total size of the array (e.g 36) not the underlying type, thus no need to multiply by 9. If you still want to use multiplication try following:
9 * sizeof(float)
I have to write this down... You should separate your initialization and draw cycle like in genpfault's answer.
Related
I have a basic OpenGL program using GLFW and GLEW to render a triangle.
I'm using a Mac with Xcode with GLFW 3.3 and GLEW 2.1.
I was encountering a problem with glfwCreateWindow() that would return null if I set the OpenGL profile to core profile (because for what I understand Mac needs it) and I solved it by putting the forward compatibility to true.
Can somebody explain to me if this solution is right or not?
One other thing is that now the windows gets created without problems but now I'm not seeing anything on the screen.
This is the code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
// Compiles the actual shader
glCompileShader(id);
// Error handeling of the compilation
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE)
{
int length;
// Gets the lenght of the message
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
// This is used to allocate memory on the stack dynamically
char* message = (char*)alloca(length * sizeof(char));
// Gets the log
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
// Creating the two shader
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
// Attaches the two shaders to the program and validate everything
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program); // This links the shader executable to the actual processor
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
// to initialize glew go to c++ preprocessor and add GLEW_STATIC
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
/* Create a window and its OpenGl context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
// Glew init HAS to be put after making the context
if (glewInit() != GLEW_OK)
std::cout << "GlewInit Error" << std::endl;
std::cout << "VERSION:" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
std::cout << "GL Version: " << (char *)glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
// Defining the position of the vertices
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
// second parameter is the starting index and the third is the number of indexes
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
Vertex Array Objects (VAOs) aren't optional in Core contexts like they are in Compatibility. You have to have one bound to draw anything.
Create one and bind it before setting up your vertex layout and drawing:
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
...
glEnableVertexAttribArray( ... );
glVertexAttribPointer( ... );
...
glDrawArrays( ... );
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = 1 + R"GLSL(
#version 330 core
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
layout(location = 0) in vec4 position;
void main()
{
gl_Position = position;
};
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color = vec4(1.0, 0.0, 0.0, 1.0);
};
)GLSL";
int main( void )
{
if( !glfwInit() )
return -1;
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
glfwMakeContextCurrent( window );
// Glew init HAS to be put after making the context
if( glewInit() != GLEW_OK )
std::cout << "GlewInit Error" << std::endl;
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
// Defining the position of the vertices
float positions[ 6 ] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * sizeof( float ), positions, GL_STATIC_DRAW );
glEnableVertexAttribArray( 0 );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
GLuint shader = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram( shader );
while( !glfwWindowShouldClose( window ) )
{
glClear( GL_COLOR_BUFFER_BIT );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
glfwPollEvents();
}
glDeleteProgram( shader );
glfwTerminate();
return 0;
}
I am trying to pass in a distance as a vertex attribute from my c++ code into the shader which draws a line between two vertices.
Currently one line has distance 0 (start), and the other has distance 1.0. I would expect this value to be interpolated just like the colors, but it does not seem to work. Thus I would expect half of the line to be red and the other green, but the whole line turns out green.
Fragment shader code below, where dist in the vertex shader comes in from layout(location = 2) in float in_dist and is passed out as out float dist (no operations performed).
in float dist;
Fragment getFragment()
{
Fragment frag;
frag.depth = vs_positionScreenSpace.w;
frag.gPosition = vs_gPosition;
if(dist > 0.5){
frag.color = vec4(1.0,0.0,0.0,1.0);
}else{
frag.color = vec4(0.0,1.0,0.0,1.0);
}
frag.gNormal = vec4(0.0, 0.0, 1.0, 0.0);
return frag;
}
Vertex shader
layout(location = 0) in vec3 in_point_position;
layout(location = 1) in vec4 in_color;
layout(location = 2) in float in_dist;
out float dist;
void main() {
//somestuff....
dist = in_dist;
}
Workin' fine here:
MCVE:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <vector>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = R"GLSL(
#version 330 core
layout( location = 0 ) in vec4 inPos;
layout( location = 1 ) in float inDist;
out float dist;
void main()
{
dist = inDist;
gl_Position = inPos;
}
)GLSL";
const char* frag = R"GLSL(
#version 330 core
in float dist;
out vec4 outColor;
void main()
{
if( dist > 0.5 )
{
outColor = vec4( 1.0, 0.0, 0.0, 1.0 );
}
else
{
outColor = vec4( 0.0, 1.0, 0.0, 1.0 );
}
}
)GLSL";
int main( int argc, char** argv )
{
glfwInit();
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "GLFW", NULL, NULL );
glfwMakeContextCurrent( window );
glewInit();
glfwSwapInterval( 1 );
GLuint prog = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram( prog );
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
GLuint vbo0 = 0;
glGenBuffers( 1, &vbo0 );
glBindBuffer( GL_ARRAY_BUFFER, vbo0 );
std::vector<float> data0 =
{
-0.5f, -0.5f,
0.5f, 0.5f,
};
glBufferData( GL_ARRAY_BUFFER, data0.size() * sizeof( std::vector<float>::value_type ), data0.data(), GL_STATIC_DRAW );
glVertexAttribPointer( 0 , 2, GL_FLOAT, GL_FALSE, 0, 0 );
glEnableVertexAttribArray( 0 );
GLuint vbo1 = 0;
glGenBuffers( 1, &vbo1 );
glBindBuffer( GL_ARRAY_BUFFER, vbo1 );
std::vector<float> data1 =
{
0.0f, 1.0f,
};
glBufferData( GL_ARRAY_BUFFER, data1.size() * sizeof( std::vector<float>::value_type ), data1.data(), GL_STATIC_DRAW );
glVertexAttribPointer( 1 , 1, GL_FLOAT, GL_FALSE, 0, 0 );
glEnableVertexAttribArray( 1 );
while( !glfwWindowShouldClose( window ) )
{
glfwPollEvents();
int w, h;
glfwGetFramebufferSize( window, &w, &h );
glViewport( 0, 0, w, h );
glClear( GL_COLOR_BUFFER_BIT );
glUseProgram( prog );
glBindVertexArray( vao );
glDrawArrays( GL_LINES, 0, 2 );
glfwSwapBuffers( window );
}
glfwDestroyWindow( window );
glfwTerminate();
}
Solved; The issue turned out to not be in the shader code, but in how the vertex attribute was defined att glVertexAttributePointer.
I have vs 2013 community edition.Using nuget i installed nupengl.core package.
The only thing i added to my project(except nupengl.core package)is opengl32.lib(to c/c++-linker-input-Additional Dependencies).I started with very simple example but it renders only one point (shader program should render three points).Also the strange thing is that if i add some error to shader code ,say instead "void main" put in my shader code "avoid main", it compiles without signaling errors(when i ask glew are there errors through usual glew - functions glew say everything is ok). This example works on PyOpenGL for example but not on vs 2013.Can somebody tell me what is wrong with my code?
Code is :
#include <GL\glew.h>
#include <GL\freeglut.h>
#include <iostream>
using namespace std;
static const GLchar * vertex_shader_source[] = {
"#version 440 core \n ",
"void main(void){ \n",
"vec4 ves[3]=vec4[3] (vec4(0.0,-0.5,0.5,1.0),
vec4(0.0,0.5,0.5,1.0),vec4(-0.5,-0.5,0.6,1.0)) ; \n"
" gl_Position=ves[gl_VertexID] ; \n",
"} \n"
};
static const GLchar * fragment_shader_source[] = {
"#version 440 core \n ",
"out vec4 color ; \n",
"void main(void){ \n",
" color=vec4(0.0,0.1,0.0,1.0) ; \n",
"} \n"
};
static GLfloat g_nearPlane = 1;
static GLfloat g_farPlane = 1000;
void reshape(GLint width, GLint height){
static int g_Width = width;
static int g_Height = height;
glViewport(0, 0, g_Width, g_Height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(65.0, (float)g_Width / g_Height, g_nearPlane, g_farPlane);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
GLuint gf;
GLuint program;
void display(void){
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPointSize(10.0f);
const static GLfloat red[] = { 1.0f, 0.0f, 0.0f, 1.0f };
glClearBufferfv(GL_COLOR, 0, red);
glDrawArrays(GL_POINTS, 0, 3);
glutSwapBuffers();
glutPostRedisplay();
}
void init(void){
GLuint vertex_shader, fragment_shader ;
glEnable(GL_DEPTH_TEST);
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, vertex_shader_source, NULL);
glCompileShader(vertex_shader);
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, fragment_shader_source, NULL);
glCompileShader(fragment_shader);
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
glUseProgram(program);
glGenVertexArrays(1, &gf);
glBindVertexArray(gf);
glPointSize(10.0f);
}
int main(int argc, char **argv){
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitContextVersion(4, 4);
glutInitContextFlags(GLUT_FORWARD_COMPATIBLE);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutSetOption(
GLUT_ACTION_ON_WINDOW_CLOSE,
GLUT_ACTION_GLUTMAINLOOP_RETURNS
);
glutInitWindowSize(250, 250);
glutInitWindowPosition(100, 100);
glutCreateWindow("shader doesn't work");
glewExperimental = GL_TRUE;
glewInit();
glutDisplayFunc(display);
glutReshapeFunc(reshape);
init();
glDrawArrays(GL_POINTS, 0, 3);
glutMainLoop();
return 0; /* ANSI C requires main to return int. */
}
Workin' fine here:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
440 core,
void main(void)
{
vec4 ves[3] = vec4[3]
(
vec4( 0.0, -0.5, 0.5, 1.0 ),
vec4( 0.0, 0.5, 0.5, 1.0 ),
vec4( -0.5, -0.5, 0.6, 1.0 )
);
gl_Position = ves[ gl_VertexID ];
}
);
const char* frag = GLSL
(
440 core,
out vec4 color;
void main(void)
{
color = vec4( 0.0, 1.0, 0.0, 1.0 );
}
);
GLuint gf;
GLuint program;
void init()
{
glEnable(GL_DEPTH_TEST);
program = Program::Load
(
vert, GL_VERTEX_SHADER,
frag, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram(program);
glGenVertexArrays(1, &gf);
glBindVertexArray(gf);
glPointSize(10.0f);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
const static GLfloat red[] = { 1.0f, 0.0f, 0.0f, 1.0f };
glClearBufferfv(GL_COLOR, 0, red);
glPointSize(10.0f);
glDrawArrays(GL_POINTS, 0, 3);
glutSwapBuffers();
}
int main( int argc, char** argv )
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitContextVersion(4, 4);
glutInitContextFlags(GLUT_FORWARD_COMPATIBLE);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutSetOption
(
GLUT_ACTION_ON_WINDOW_CLOSE,
GLUT_ACTION_GLUTMAINLOOP_RETURNS
);
glutInitWindowSize(250, 250);
glutInitWindowPosition(100, 100);
glutCreateWindow("shader doesn't work");
glewExperimental = GL_TRUE;
glewInit();
glutDisplayFunc(display);
init();
glutMainLoop();
return 0;
}
FYI, none of the old fixed-function matrix stuff you're doing in reshape() and display() will work in a Core context.
I have spent a good amount of time with the fixed pipeline of openGL, and I have recently began learning the programmable pipeline. I know my painter, and shader classes are not the issue because they work with fixed function pipeline stuff. I can't seem to get glDrawArrays to work for my life.
I am not sure if my error is in how i set up the vertex buffer object, in my shader or else where. I have debugged my code also and set breakpoints throughout the display function, and it seems to never get past glDrawArrays(), (i.e. it hits a breakpoint at glDrawArrays, but doesn't hit any after, not sure why.)
What gets outputted is just a white screen, nothing else.
Heres my code:
float vertices[] = { 0.75, 0.75, 0.0, 1.0,
0.75, -0.75, 0.0, 1.0,
-0.75, -0.75, 0.0, 1.0 };
GLuint vertexBufferObject;
GLuint positionLocation;
GLuint vaoObject;
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
void main(int argc, char* argv[])
{
painter.initEngine(argc, argv, 500, 500, 0, 0, "2D3D");
painter.initGlutFuncs(display, resize, Input::MouseButtonClick, Input::MouseDrag, keyboard);
defaultShader.init("default.vert", "default.frag");
defaultShader.link();
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
glGenVertexArrays(1, &vaoObject);
glBindVertexArray(vaoObject);
positionLocation = glGetAttribLocation(defaultShader.id(), "position");
painter.startMainLoop();
}
void display()
{
painter.clearDisplay();
defaultShader.bind();
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glEnableVertexAttribArray(positionLocation);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, 0);
std::cout << Framework::glErrorCheck() << std::endl;
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(positionLocation);
glBindBuffer(GL_ARRAY_BUFFER, 0);
defaultShader.unbind();
painter.flushAndSwapBuffers();
}
Vertex Shader:
#version 140
in vec4 position;
void main()
{
gl_Position = position;
}
Fragment Shader:
#version 140
out vec4 outColor;
void main()
{
outColor = vec4(1.0, 0.0, 1.0, 1.0);
}
Edit: Code updated with Joey Dewd, keltar, and genpfault's suggestions.
I'm no longer hanging at glDrawArrays, i.e. instead of a white screen I'm getting a black screen. This is leading me to think that my buffer is somehow still not setup correctly. Or possibly, I am missing something else needed for the vertex array buffer initialization (vaoObject)?
void initVertexBuffer(GLuint vertexBufferObject, float* vertexData, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexData), vertexData, GL_DRAW_TYPE);
^^^^^^^^^^^^^^^^^^ nnnnope
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
sizeof(vertexData) in this context is not what you seem to hope it is.
It'll probably be 4 or 8 depending on your 64-bit-edness. I.e., the sizeof a pointer-to-float. Not sizeof(vertices).
You need to pass in a separate size argument:
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
...
void main(int argc, char* argv[])
{
...
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
...
}
Or a contiguous (important!) standard container like std::vector:
template< typename Container >
void initVertexBuffer(GLuint& vertexBufferObject, const Container& vertexData, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, vertexData.size() * sizeof( typename Container::value_type ), &vertexData[0], GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
Full example:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <iostream>
#include <vector>
using namespace std;
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
GLuint prog;
GLuint vaoObject;
void init()
{
const char* vert = GLSL
(
140,
in vec4 position;
void main()
{
gl_Position = position;
}
);
const char* frag = GLSL
(
140,
out vec4 outColor;
void main()
{
outColor = vec4(1.0, 0.0, 1.0, 1.0);
}
);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
glGenVertexArrays(1, &vaoObject);
glBindVertexArray(vaoObject);
float vertices[] =
{
0.75, 0.75, 0.0, 1.0,
0.75, -0.75, 0.0, 1.0,
-0.75, -0.75, 0.0, 1.0
};
GLuint vertexBufferObject;
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
GLuint positionLocation = glGetAttribLocation(prog, "position");
glEnableVertexAttribArray(positionLocation);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram( prog );
glBindVertexArray(vaoObject);
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char **argv)
{
glutInit( &argc, argv );
glutInitContextVersion( 3, 1 );
glutInitContextProfile( GLUT_COMPATIBILITY_PROFILE );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutInitWindowSize( 600, 600 );
glutCreateWindow( "GLUT" );
glewExperimental = GL_TRUE;
glewInit();
init();
glutDisplayFunc( display );
glutMainLoop();
return 0;
}
glGenBuffers(1, &vertexBufferObject);
Saves id of vertex buffer in local variable. This value no longer available in draw call and does not modify global vertexBufferObject (yet buffer still exists - you just lost its id and can't use it anymore. can't even destroy it)
I'm having trouble drawing a couple triangles in colour. The shaders load fine, and I have a slight feeling that it's the way I've laid out my data, but in general, I have no clue why it won't draw it in colour.
main.cpp :
//g++ main.cpp -lGL -lGLEW -lGLU -lglut LoadShaders.cpp -o run
#include<GL/glew.h>
#include<GL/freeglut.h>
#include<GL/gl.h>
#include<iostream>
#include"LoadShaders.h"
#include"vgl.h"
using namespace std;
enum VAO_IDs {Triangles, NumVAOs };
enum Attrib_IDs {vPosition = 0, cPosition = 1};
GLuint VAOs;
GLuint Buffers;
const GLuint NumVertices = 6;
void init(void)
{
glGenVertexArrays(1, &VAOs);
glBindVertexArray(VAOs);
GLfloat vertices[NumVertices] [2] = {
{-0.90, -0.90},
{0.85, -0.90 },
{-0.90, 0.85 },
{0.90, -0.85 },
{0.90, 0.90 },
{-0.85, 0.90 }
};
glGenBuffers(1, &Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLfloat colours[NumVertices] [3] = {
{0.583f, 0.568f, 1.000f},
{0.100f, 0.435f, 0.235f},
{0.456f, 0.345f, 0.654f},
{0.345f, 0.222f, 0.564f},
{0.109f, 0.538f, 1.000f},
{0.057f, 0.453f, 0.777f},
};
GLuint colourBuffer;
glGenBuffers(1, &colourBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colourBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colours), colours, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(0);
ShaderInfo shaders[]={
{GL_VERTEX_SHADER, "triangles.vert"},
{GL_FRAGMENT_SHADER, "triangles.frag"},
{GL_NONE}
};
GLuint program = LoadShaders(shaders);
glUseProgram(program);
}
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs);
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glFlush();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA);
glutInitWindowSize(512,512);
glutInitContextVersion( 4, 2 );
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
if (glewInit()) {
cerr << "Unable to initialize GLEW"<<endl;
exit(EXIT_FAILURE);
}
init();
glutDisplayFunc(display);
glutMainLoop();
}
triangles.vert:
#version 420 core
layout(location = 0) in vec2 vPosition;
layout(location = 1) in vec3 cPosition;
void main()
{
gl_Position = vPosition;
fragmentColor = cPosition;
}
triangles.frag:
#version 420 core
in vec3 fragmentColor;
out vec3 fColor;
void main()
{
fColor = fragmentColor;
}
Two problems in your vertex shader:
Missing a declaration for fragmentColor.
Invalid implicit conversion: gl_Position is a vec4, not a vec2. You have to expand out vPosition manually using a vec4 constructor: vec4( vPosition, 0.0, 1.0 ).
All together:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
420 core,
layout(location = 0) in vec2 vPosition;
layout(location = 1) in vec3 cPosition;
out vec3 fragmentColor;
void main()
{
gl_Position = vec4( vPosition, 0.0, 1.0 );
fragmentColor = cPosition;
}
);
const char* frag = GLSL
(
420 core,
in vec3 fragmentColor;
out vec3 fColor;
void main()
{
fColor = fragmentColor;
}
);
GLuint VAO;
const GLuint NumVertices = 6;
void init()
{
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
GLfloat vertices[NumVertices] [2] =
{
{-0.90, -0.90},
{0.85, -0.90 },
{-0.90, 0.85 },
{0.90, -0.85 },
{0.90, 0.90 },
{-0.85, 0.90 },
};
GLuint Buffer;
glGenBuffers(1, &Buffer);
glBindBuffer(GL_ARRAY_BUFFER, Buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLfloat colours[NumVertices] [3] =
{
{0.583f, 0.568f, 1.000f},
{0.100f, 0.435f, 0.235f},
{0.456f, 0.345f, 0.654f},
{0.345f, 0.222f, 0.564f},
{0.109f, 0.538f, 1.000f},
{0.057f, 0.453f, 0.777f},
};
GLuint colourBuffer;
glGenBuffers(1, &colourBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colourBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colours), colours, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(0);
GLuint program = Program::Load( vert, NULL, frag );
glUseProgram(program);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(512, 512);
glutInitContextVersion(4, 2);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
glewExperimental = GL_TRUE;
if( GLEW_OK != glewInit() )
{
return 1;
}
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}