I am learning OpenGL through TheCherno Project.
The code below is from there and I faithfully reproduce it for my learning purpose. I have also added both vertex and fragment shaders as well.
My issue here is in my square output I don't see the color at all and I am guessing it is using the default shader.
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * 2 * sizeof(float), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
unsigned int ibo;
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof(unsigned int), indices, GL_STATIC_DRAW);
ShaderSourceProgram shader = ParseShader("res/shaders/Basic.shader");
//std::cout << "VERTEX" << std::endl;
//std::cout << shader.VertexShader << std::endl;
unsigned int shaderProgram = CreateShader(shader.VertexShader, shader.FragmentShader);
glUseProgram(shaderProgram);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
The shaders:
#shader vertex
#version 330 core
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
#shader fragment
#version 330 core
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
EDIT: Please click here for the entire project (github)
As #Ripi2 pointed out your glBufferData() calls, while not exactly wrong (they overrequest buffer space), aren't exactly correct either. Since you're using raw arrays you can use a straight sizeof(<arrayname>) for the size parameter:
glBufferData( GL_ARRAY_BUFFER, sizeof( positions ), positions, GL_STATIC_DRAW );
...
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( indices ), indices, GL_STATIC_DRAW );
I suspect your (unknown) shader loader might be wonky since my personal one worked fine.
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
int main( void )
{
GLFWwindow* window;
/* Initialize the library */
if( !glfwInit() )
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent( window );
if( glewInit() != GLEW_OK )
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString( GL_VERSION ) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * 2 * sizeof( float ), positions, GL_STATIC_DRAW );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
glEnableVertexAttribArray( 0 );
unsigned int ibo;
glGenBuffers( 1, &ibo );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof( unsigned int ), indices, GL_STATIC_DRAW );
const char* vert = GLSL
(
330 core,
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
);
const char* frag = GLSL
(
330 core,
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
);
unsigned int shaderProgram = Program::Load
(
vert, GL_VERTEX_SHADER,
frag, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram( shaderProgram );
/* Loop until the user closes the window */
while( !glfwWindowShouldClose( window ) )
{
/* Render here */
glClear( GL_COLOR_BUFFER_BIT );
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr );
/* Swap front and back buffers */
glfwSwapBuffers( window );
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram( shaderProgram );
glfwTerminate();
return 0;
}
glUseProgram() needs to be called after shaders are compiled and linked. Here's a piece of a sample code:
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", NULL, NULL);
if (window == NULL)
{
cout << "Failed to create GLFW window" << endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLAD
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
cout << "Failed to initialize GLAD" << endl;
return -1;
}
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, 800, 600);
float TriangleVertices[] = {
-0.3f, -0.3f, 0.0f,
0.0f, 0.3f, 0.0f,
0.3f, -0.3f, 0.0f
};
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
//BindVertexArray for modifying it
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(TriangleVertices), TriangleVertices, GL_STATIC_DRAW);
glBindVertexArray(VAO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
const char* vertexShaderSource = "\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos, 1.0);\n"
"}\0";
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
const char* fragmentShaderSource = "\n"
"out vec4 FragColor; \n"
"void main()\n"
"{\n"
"FragColor = vec4(1.0f, 0.7f, 0.5f, 1.0f); \n"
"} \n";
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
unsigned int shaderProgram;
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
while (!glfwWindowShouldClose(window))
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
//// draw triangle using the data from the VAO
//glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
Related
When I use OpenGL and run my code to make a triangle the function glGenVertexArrays() gives me EXE_BAD_ACCESS
Error
Code:
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <cmath>
// Window specs
const int HEIGHT = 800;
const int WIDTH = 600;
// Shader source code
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(0.8f, 0.3f, 0.02f, 1.0f);\n"
"}\n\0";
int main(void)
{
if (!glfwInit())
{
std::cout<<"GLFW failed to Initialize!"<<std::endl;
return -1;
}
GLfloat vertices[] =
{
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f
};
GLFWwindow* window = glfwCreateWindow(WIDTH, HEIGHT, "Open GL", NULL, NULL);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE);
glfwMakeContextCurrent(window);
if(!window)
{
std::cout<<"Something went Wrong when Creating a Window!\nShutting down ..."<<std::endl;
glfwTerminate();
return -1;
}
//
gladLoadGL();
glViewport(0, 0, WIDTH, HEIGHT);
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
//
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
GLuint VAO;
GLuint VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
while(!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shaderProgram);
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
I used glad to write the code. Does anybody have any idea how to fix this or what I did wrong.
To recreate it use Xcode 13.2.1 then link Glad using Opengl 4.1 and GLFW and make it look something like this
File Layout
Then paste the code into main and run.
A few things:
glfwWindowHint() only affects the next glfwCreateWindow() call so make sure to call them before you create a window. The default hint settings will generally give you a version 2.1 Compatibility context on macOS which won't be able to handle #version 330 core GLSL code.
GLFW_CONTEXT_VERSION_MAJOR was used twice; you need a GLFW_CONTEXT_VERSION_MINOR too.
macOS needs GLFW_OPENGL_FORWARD_COMPAT set to GLFW_TRUE for Core contexts.
Check for shader compilation/link errors.
Your GLAD might have been out of sync for the GL version you were targeting; see the generator link in the comments for the settings I used.
GLFW has a perfectly usable GLADloadproc in glfwGetProcAddress(); might as well switch from gladLoadGL() to gladLoadGLLoader().
Supplying a glfwSetErrorCallback() can get you some early/immediate error reporting from GLFW. It's how I discovered the missing GLFW_CONTEXT_VERSION_MINOR hint. No reason not to use it.
All together:
// g++ -Iinclude main.cpp src/glad.c `pkg-config --cflags --libs glfw3`
// Commandline:
// --profile="compatibility" --api="gl=3.3" --generator="c" --spec="gl" --no-loader --extensions=""
// Online:
// https://glad.dav1d.de/#profile=compatibility&language=c&specification=gl&api=gl%3D3.3
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cmath>
void CheckStatus( GLuint obj, bool isShader )
{
GLint status = GL_FALSE, log[ 1 << 11 ] = { 0 };
( isShader ? glGetShaderiv : glGetProgramiv )( obj, isShader ? GL_COMPILE_STATUS : GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
( isShader ? glGetShaderInfoLog : glGetProgramInfoLog )( obj, sizeof( log ), NULL, (GLchar*)log );
std::cerr << (GLchar*)log << "\n";
std::exit( EXIT_FAILURE );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader, true );
glAttachShader( program, shader );
glDeleteShader( shader );
}
const char* const vert = 1 + R"GLSL(
#version 330 core
layout (location = 0) in vec3 aPos;
void main()
{
gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);
};
)GLSL";
const char* const frag = 1 + R"GLSL(
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(0.8f, 0.3f, 0.02f, 1.0f);
}
)GLSL";
int main(void)
{
glfwSetErrorCallback( []( int, const char* desc )
{
std::cerr << desc << "\n";
std::exit( EXIT_FAILURE );
} );
if (!glfwInit())
{
std::cout<<"GLFW failed to Initialize!"<<std::endl;
return -1;
}
GLfloat vertices[] =
{
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f
};
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "Open GL", NULL, NULL);
if(!window)
{
std::cout<<"Something went Wrong when Creating a Window!\nShutting down ..."<<std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
gladLoadGLLoader( (GLADloadproc)glfwGetProcAddress );
glViewport(0, 0, 800, 600);
GLuint prog = glCreateProgram();
AttachShader( prog, GL_VERTEX_SHADER, vert );
AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog, false );
glUseProgram( prog );
GLuint VAO;
GLuint VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
while(!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(prog);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(prog);
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
Note that with a 3.3 core profile on MacOS you may also need to call glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
by GM at comment Section and genpfault
Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 2 years ago.
Improve this question
I can get Opengl triangle to appear.
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n\0";
void framebuffer_size_callback(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
void processInput(GLFWwindow *window) {
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) { glfwSetWindowShouldClose(window, true); }
}
int main()
{
glewExperimental = GL_TRUE;
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR,3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR,3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window{ glfwCreateWindow(800,600,"Opengl",NULL,NULL) };
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glfwMakeContextCurrent(window);
GLenum err = glewInit();
if (glewInit() != GLEW_OK) { std::cout << glewGetErrorString(err); return -1; }
int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader,1,&fragmentShaderSource,NULL);
glCompileShader(fragmentShader);
int shaderProgram = glCreateProgram();
glAttachShader(shaderProgram,vertexShader);
glAttachShader(shaderProgram,fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
float triangle[]{ -0.5f,-0.5f,0.0f,
0.5f,-0.5f,0.0,
0.0f,-0.5f,0.0f };
unsigned int VAO,buffer;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3*sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER,0);
glBindVertexArray(0);
if (window == NULL) {
std::cout << "Window is null";
glfwTerminate();
return -1;
}
while (!glfwWindowShouldClose(window))
{
processInput(window);
glClearColor(0.2f, 0.2f, 0.2f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES,0,3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
All your triangle vertices are colinear, resulting in a degenerate triangle
Bumping the y-coordinate of the 3rd point up to 0.5 works for me:
float triangle[]
{
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f,
};
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
const char* vertexShaderSource =
"#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, "
"aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource =
"#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, "
"1.0f);\n"
"}\n\0";
void framebuffer_size_callback( GLFWwindow* window, int width, int height )
{
glViewport( 0, 0, width, height );
}
void processInput( GLFWwindow* window )
{
if( glfwGetKey( window, GLFW_KEY_ESCAPE ) == GLFW_PRESS )
{
glfwSetWindowShouldClose( window, true );
}
}
int main()
{
glewExperimental = GL_TRUE;
glfwInit();
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window{ glfwCreateWindow( 800, 600, "Opengl", NULL, NULL ) };
glfwSetFramebufferSizeCallback( window, framebuffer_size_callback );
glfwMakeContextCurrent( window );
GLenum err = glewInit();
if( glewInit() != GLEW_OK )
{
std::cout << glewGetErrorString( err );
return -1;
}
int vertexShader;
vertexShader = glCreateShader( GL_VERTEX_SHADER );
glShaderSource( vertexShader, 1, &vertexShaderSource, NULL );
glCompileShader( vertexShader );
int fragmentShader;
fragmentShader = glCreateShader( GL_FRAGMENT_SHADER );
glShaderSource( fragmentShader, 1, &fragmentShaderSource, NULL );
glCompileShader( fragmentShader );
int shaderProgram = glCreateProgram();
glAttachShader( shaderProgram, vertexShader );
glAttachShader( shaderProgram, fragmentShader );
glLinkProgram( shaderProgram );
glDeleteShader( vertexShader );
glDeleteShader( fragmentShader );
float triangle[]{
-0.5f, -0.5f, 0.0f, 0.5f, -0.5f, 0.0f, 0.0f, 0.5f, 0.0f,
};
unsigned int VAO, buffer;
glGenVertexArrays( 1, &VAO );
glBindVertexArray( VAO );
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData(
GL_ARRAY_BUFFER, sizeof( triangle ), triangle, GL_STATIC_DRAW );
glVertexAttribPointer(
0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof( float ), (void*)0 );
glEnableVertexAttribArray( 0 );
glBindBuffer( GL_ARRAY_BUFFER, 0 );
glBindVertexArray( 0 );
if( window == NULL )
{
std::cout << "Window is null";
glfwTerminate();
return -1;
}
while( !glfwWindowShouldClose( window ) )
{
processInput( window );
glClearColor( 0.2f, 0.2f, 0.2f, 1.0f );
glClear( GL_COLOR_BUFFER_BIT );
glUseProgram( shaderProgram );
glBindVertexArray( VAO );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
glfwPollEvents();
}
glfwTerminate();
return 0;
}
I have the following code and am trying to print a red triangle using opengl glew ect. The program builds and shows a triangle, however it only shows a triangle when I drag the window, and the triangle shows up white not red. Not sure how to fix this issue. Below is a copy of my code (I am running in xcode).
#include <iostream>
// GLEW
#define GLEW_STATIC
#include <GL/glew.h>
// GLFW
#include <GLFW/glfw3.h>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result != GL_FALSE)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << "shader" << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "Error!" << std::endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, 0);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
I am using xCode and am having 2 issues:
1. the triangle is printing out white
2. the triangle only shows up when i drag the window
I have tried debugging but am not able to figure these issues out.
You need to go whole-hog on Core contexts on macOS if you want anything past GL 2.1:
Request a versioned Core context via GLFW_OPENGL_PROFILE, GLFW_CONTEXT_VERSION_MAJOR, and GLFW_CONTEXT_VERSION_MINOR; it should meet or exceed the #version you're specifying in your shaders
For macOS in particular you need a forward-compatbile context; set GLFW_OPENGL_FORWARD_COMPAT to GL_TRUE
Core contexts require a vertex array object (VAO) to draw anything; for simple stuff you can get away with creating a single one and leaving it bound
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <vector>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = 1 + R"GLSL(
#version 330 core
layout(location = 0) in vec4 position;
void main()
{
gl_Position = position;
};
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color = vec4(1.0, 0.0, 0.0, 1.0);
}
)GLSL";
int main(void)
{
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
GLFWwindow* window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "Error!" << std::endl;
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
float positions[6] =
{
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
GLuint shader = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, 0);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
Your problem may be in vertex shader because in vertex shader "layout(location = 0) in vec4 position;\n"
here you define vec4 but in you are sending vec2 from glVertexAttribPointer() function.
Try to send vec4 or try this in vertex shader
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec2 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = vec4(position, 0, 1);\n"
"}\n";
I have a basic OpenGL program using GLFW and GLEW to render a triangle.
I'm using a Mac with Xcode with GLFW 3.3 and GLEW 2.1.
I was encountering a problem with glfwCreateWindow() that would return null if I set the OpenGL profile to core profile (because for what I understand Mac needs it) and I solved it by putting the forward compatibility to true.
Can somebody explain to me if this solution is right or not?
One other thing is that now the windows gets created without problems but now I'm not seeing anything on the screen.
This is the code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
// Compiles the actual shader
glCompileShader(id);
// Error handeling of the compilation
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE)
{
int length;
// Gets the lenght of the message
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
// This is used to allocate memory on the stack dynamically
char* message = (char*)alloca(length * sizeof(char));
// Gets the log
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
// Creating the two shader
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
// Attaches the two shaders to the program and validate everything
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program); // This links the shader executable to the actual processor
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
// to initialize glew go to c++ preprocessor and add GLEW_STATIC
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
/* Create a window and its OpenGl context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
// Glew init HAS to be put after making the context
if (glewInit() != GLEW_OK)
std::cout << "GlewInit Error" << std::endl;
std::cout << "VERSION:" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
std::cout << "GL Version: " << (char *)glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
// Defining the position of the vertices
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragmentShader =
"#version 330 core\n"
"\n"
"layout(location = 0) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
// second parameter is the starting index and the third is the number of indexes
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
Vertex Array Objects (VAOs) aren't optional in Core contexts like they are in Compatibility. You have to have one bound to draw anything.
Create one and bind it before setting up your vertex layout and drawing:
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
...
glEnableVertexAttribArray( ... );
glVertexAttribPointer( ... );
...
glDrawArrays( ... );
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
std::exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
const char* vert = 1 + R"GLSL(
#version 330 core
// here we are saying that the attribute at the position 0 (see attribPointer)
// which is the position itself is a input value put in a vec4 because
// glPosition needs a vec4
layout(location = 0) in vec4 position;
void main()
{
gl_Position = position;
};
)GLSL";
const char* frag = 1 + R"GLSL(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color = vec4(1.0, 0.0, 0.0, 1.0);
};
)GLSL";
int main( void )
{
if( !glfwInit() )
return -1;
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 3 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 3 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
GLFWwindow* window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
glfwMakeContextCurrent( window );
// Glew init HAS to be put after making the context
if( glewInit() != GLEW_OK )
std::cout << "GlewInit Error" << std::endl;
GLuint vao = 0;
glCreateVertexArrays( 1, &vao );
glBindVertexArray( vao );
// Defining the position of the vertices
float positions[ 6 ] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * sizeof( float ), positions, GL_STATIC_DRAW );
glEnableVertexAttribArray( 0 );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
GLuint shader = Program::Load( vert, GL_VERTEX_SHADER, frag, GL_FRAGMENT_SHADER, NULL );
glUseProgram( shader );
while( !glfwWindowShouldClose( window ) )
{
glClear( GL_COLOR_BUFFER_BIT );
glDrawArrays( GL_TRIANGLES, 0, 3 );
glfwSwapBuffers( window );
glfwPollEvents();
}
glDeleteProgram( shader );
glfwTerminate();
return 0;
}
I am running a simple program to draw a red triangle.
However, it only shows a black window.
I suspect it might be a problem with the shader, because if I remove the shader, it perfectly draws a white triangle.
Here's my code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source){
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int res;
glGetShaderiv(id, GL_COMPILE_STATUS, &res);
if(res == GL_FALSE){
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << " shader!" << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader){
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER , vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER , fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
glfwWindowHint( GLFW_CONTEXT_VERSION_MAJOR, 4 );
glfwWindowHint( GLFW_CONTEXT_VERSION_MINOR, 1 );
glfwWindowHint( GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE );
glfwWindowHint( GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE );
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "I Rock", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if(glewInit() != GLEW_OK)
std::cout<< "Error!" << std::endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), 0);
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vertexShader = GLSL
(
410 core,
layout( location = 0 ) in vec4 position;
void main()
{
gl_Position = position;
}
);
const char* fragmentShader = GLSL
(
410 core,
out vec4 color;
void main()
{
color = vec4( 1.0, 0.0, 0.0, 1.0 );
}
);
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
These are my specifications:
GLFW version : 3.2.1 Cocoa NSGL chdir menubar retina dynamic
GLEW_VERSION : 2.1.0
GL_VERSION : 4.1 ATI-1.60.26
GL_VENDOR : ATI Technologies Inc.
GL_RENDERER : AMD Radeon R9 M290 OpenGL Engine
GL_SHADING_LANGUAGE_VERSION : 4.10
You need to generate & bind a Vertex Array Object (VAO) before enabling vertex attributes, setting vertex attrib pointers, and drawing:
...
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), positions, GL_STATIC_DRAW);
// generate & bind VAO
GLuint vao = 0;
glGenVertexArrays( 1, &vao );
glBindVertexArray( vao );
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), 0);
...
VAOs aren't optional in Core contexts like they are in Compatibility.