glDrawArrays unexpected behavior - c++

I have spent a good amount of time with the fixed pipeline of openGL, and I have recently began learning the programmable pipeline. I know my painter, and shader classes are not the issue because they work with fixed function pipeline stuff. I can't seem to get glDrawArrays to work for my life.
I am not sure if my error is in how i set up the vertex buffer object, in my shader or else where. I have debugged my code also and set breakpoints throughout the display function, and it seems to never get past glDrawArrays(), (i.e. it hits a breakpoint at glDrawArrays, but doesn't hit any after, not sure why.)
What gets outputted is just a white screen, nothing else.
Heres my code:
float vertices[] = { 0.75, 0.75, 0.0, 1.0,
0.75, -0.75, 0.0, 1.0,
-0.75, -0.75, 0.0, 1.0 };
GLuint vertexBufferObject;
GLuint positionLocation;
GLuint vaoObject;
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
void main(int argc, char* argv[])
{
painter.initEngine(argc, argv, 500, 500, 0, 0, "2D3D");
painter.initGlutFuncs(display, resize, Input::MouseButtonClick, Input::MouseDrag, keyboard);
defaultShader.init("default.vert", "default.frag");
defaultShader.link();
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
glGenVertexArrays(1, &vaoObject);
glBindVertexArray(vaoObject);
positionLocation = glGetAttribLocation(defaultShader.id(), "position");
painter.startMainLoop();
}
void display()
{
painter.clearDisplay();
defaultShader.bind();
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glEnableVertexAttribArray(positionLocation);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, 0);
std::cout << Framework::glErrorCheck() << std::endl;
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(positionLocation);
glBindBuffer(GL_ARRAY_BUFFER, 0);
defaultShader.unbind();
painter.flushAndSwapBuffers();
}
Vertex Shader:
#version 140
in vec4 position;
void main()
{
gl_Position = position;
}
Fragment Shader:
#version 140
out vec4 outColor;
void main()
{
outColor = vec4(1.0, 0.0, 1.0, 1.0);
}
Edit: Code updated with Joey Dewd, keltar, and genpfault's suggestions.
I'm no longer hanging at glDrawArrays, i.e. instead of a white screen I'm getting a black screen. This is leading me to think that my buffer is somehow still not setup correctly. Or possibly, I am missing something else needed for the vertex array buffer initialization (vaoObject)?

void initVertexBuffer(GLuint vertexBufferObject, float* vertexData, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexData), vertexData, GL_DRAW_TYPE);
^^^^^^^^^^^^^^^^^^ nnnnope
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
sizeof(vertexData) in this context is not what you seem to hope it is.
It'll probably be 4 or 8 depending on your 64-bit-edness. I.e., the sizeof a pointer-to-float. Not sizeof(vertices).
You need to pass in a separate size argument:
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
...
void main(int argc, char* argv[])
{
...
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
...
}
Or a contiguous (important!) standard container like std::vector:
template< typename Container >
void initVertexBuffer(GLuint& vertexBufferObject, const Container& vertexData, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, vertexData.size() * sizeof( typename Container::value_type ), &vertexData[0], GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
Full example:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <iostream>
#include <vector>
using namespace std;
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
void initVertexBuffer(GLuint& vertexBufferObject, float* vertexData, unsigned int size, GLenum GL_DRAW_TYPE)
{
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, size, vertexData, GL_DRAW_TYPE);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
GLuint prog;
GLuint vaoObject;
void init()
{
const char* vert = GLSL
(
140,
in vec4 position;
void main()
{
gl_Position = position;
}
);
const char* frag = GLSL
(
140,
out vec4 outColor;
void main()
{
outColor = vec4(1.0, 0.0, 1.0, 1.0);
}
);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
glGenVertexArrays(1, &vaoObject);
glBindVertexArray(vaoObject);
float vertices[] =
{
0.75, 0.75, 0.0, 1.0,
0.75, -0.75, 0.0, 1.0,
-0.75, -0.75, 0.0, 1.0
};
GLuint vertexBufferObject;
initVertexBuffer(vertexBufferObject, vertices, sizeof(vertices), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
GLuint positionLocation = glGetAttribLocation(prog, "position");
glEnableVertexAttribArray(positionLocation);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram( prog );
glBindVertexArray(vaoObject);
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char **argv)
{
glutInit( &argc, argv );
glutInitContextVersion( 3, 1 );
glutInitContextProfile( GLUT_COMPATIBILITY_PROFILE );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutInitWindowSize( 600, 600 );
glutCreateWindow( "GLUT" );
glewExperimental = GL_TRUE;
glewInit();
init();
glutDisplayFunc( display );
glutMainLoop();
return 0;
}

glGenBuffers(1, &vertexBufferObject);
Saves id of vertex buffer in local variable. This value no longer available in draw call and does not modify global vertexBufferObject (yet buffer still exists - you just lost its id and can't use it anymore. can't even destroy it)

Related

OpenGL: glGenVertexArrays() gives EXC_BAD_ACCESS

When I use OpenGL and run my code to make a triangle the function glGenVertexArrays() gives me EXE_BAD_ACCESS
Error
Code:
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <cmath>
// Window specs
const int HEIGHT = 800;
const int WIDTH = 600;
// Shader source code
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(0.8f, 0.3f, 0.02f, 1.0f);\n"
"}\n\0";
int main(void)
{
if (!glfwInit())
{
std::cout<<"GLFW failed to Initialize!"<<std::endl;
return -1;
}
GLfloat vertices[] =
{
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f
};
GLFWwindow* window = glfwCreateWindow(WIDTH, HEIGHT, "Open GL", NULL, NULL);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE);
glfwMakeContextCurrent(window);
if(!window)
{
std::cout<<"Something went Wrong when Creating a Window!\nShutting down ..."<<std::endl;
glfwTerminate();
return -1;
}
//
gladLoadGL();
glViewport(0, 0, WIDTH, HEIGHT);
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
//
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
GLuint VAO;
GLuint VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
while(!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shaderProgram);
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
I used glad to write the code. Does anybody have any idea how to fix this or what I did wrong.
To recreate it use Xcode 13.2.1 then link Glad using Opengl 4.1 and GLFW and make it look something like this
File Layout
Then paste the code into main and run.
A few things:
glfwWindowHint() only affects the next glfwCreateWindow() call so make sure to call them before you create a window. The default hint settings will generally give you a version 2.1 Compatibility context on macOS which won't be able to handle #version 330 core GLSL code.
GLFW_CONTEXT_VERSION_MAJOR was used twice; you need a GLFW_CONTEXT_VERSION_MINOR too.
macOS needs GLFW_OPENGL_FORWARD_COMPAT set to GLFW_TRUE for Core contexts.
Check for shader compilation/link errors.
Your GLAD might have been out of sync for the GL version you were targeting; see the generator link in the comments for the settings I used.
GLFW has a perfectly usable GLADloadproc in glfwGetProcAddress(); might as well switch from gladLoadGL() to gladLoadGLLoader().
Supplying a glfwSetErrorCallback() can get you some early/immediate error reporting from GLFW. It's how I discovered the missing GLFW_CONTEXT_VERSION_MINOR hint. No reason not to use it.
All together:
// g++ -Iinclude main.cpp src/glad.c `pkg-config --cflags --libs glfw3`
// Commandline:
// --profile="compatibility" --api="gl=3.3" --generator="c" --spec="gl" --no-loader --extensions=""
// Online:
// https://glad.dav1d.de/#profile=compatibility&language=c&specification=gl&api=gl%3D3.3
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <cmath>
void CheckStatus( GLuint obj, bool isShader )
{
GLint status = GL_FALSE, log[ 1 << 11 ] = { 0 };
( isShader ? glGetShaderiv : glGetProgramiv )( obj, isShader ? GL_COMPILE_STATUS : GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
( isShader ? glGetShaderInfoLog : glGetProgramInfoLog )( obj, sizeof( log ), NULL, (GLchar*)log );
std::cerr << (GLchar*)log << "\n";
std::exit( EXIT_FAILURE );
}
void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader, true );
glAttachShader( program, shader );
glDeleteShader( shader );
}
const char* const vert = 1 + R"GLSL(
#version 330 core
layout (location = 0) in vec3 aPos;
void main()
{
gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);
};
)GLSL";
const char* const frag = 1 + R"GLSL(
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(0.8f, 0.3f, 0.02f, 1.0f);
}
)GLSL";
int main(void)
{
glfwSetErrorCallback( []( int, const char* desc )
{
std::cerr << desc << "\n";
std::exit( EXIT_FAILURE );
} );
if (!glfwInit())
{
std::cout<<"GLFW failed to Initialize!"<<std::endl;
return -1;
}
GLfloat vertices[] =
{
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f,
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f
};
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "Open GL", NULL, NULL);
if(!window)
{
std::cout<<"Something went Wrong when Creating a Window!\nShutting down ..."<<std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
gladLoadGLLoader( (GLADloadproc)glfwGetProcAddress );
glViewport(0, 0, 800, 600);
GLuint prog = glCreateProgram();
AttachShader( prog, GL_VERTEX_SHADER, vert );
AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog, false );
glUseProgram( prog );
GLuint VAO;
GLuint VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
while(!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(prog);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(prog);
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
Note that with a 3.3 core profile on MacOS you may also need to call glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
by GM at comment Section and genpfault

OpenGL, a very basic shader not working

I am learning OpenGL through TheCherno Project.
The code below is from there and I faithfully reproduce it for my learning purpose. I have also added both vertex and fragment shaders as well.
My issue here is in my square output I don't see the color at all and I am guessing it is using the default shader.
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * 2 * sizeof(float), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
unsigned int ibo;
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof(unsigned int), indices, GL_STATIC_DRAW);
ShaderSourceProgram shader = ParseShader("res/shaders/Basic.shader");
//std::cout << "VERTEX" << std::endl;
//std::cout << shader.VertexShader << std::endl;
unsigned int shaderProgram = CreateShader(shader.VertexShader, shader.FragmentShader);
glUseProgram(shaderProgram);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
The shaders:
#shader vertex
#version 330 core
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
#shader fragment
#version 330 core
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
EDIT: Please click here for the entire project (github)
As #Ripi2 pointed out your glBufferData() calls, while not exactly wrong (they overrequest buffer space), aren't exactly correct either. Since you're using raw arrays you can use a straight sizeof(<arrayname>) for the size parameter:
glBufferData( GL_ARRAY_BUFFER, sizeof( positions ), positions, GL_STATIC_DRAW );
...
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( indices ), indices, GL_STATIC_DRAW );
I suspect your (unknown) shader loader might be wonky since my personal one worked fine.
All together:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <iostream>
#include <cstdarg>
struct Program
{
static GLuint Load( const char* shader, ... )
{
GLuint prog = glCreateProgram();
va_list args;
va_start( args, shader );
while( shader )
{
const GLenum type = va_arg( args, GLenum );
AttachShader( prog, type, shader );
shader = va_arg( args, const char* );
}
va_end( args );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( EXIT_FAILURE );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
int main( void )
{
GLFWwindow* window;
/* Initialize the library */
if( !glfwInit() )
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow( 640, 480, "Hello World", NULL, NULL );
if( !window )
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent( window );
if( glewInit() != GLEW_OK )
std::cout << "ERROR GLEW" << std::endl;
std::cout << glGetString( GL_VERSION ) << std::endl;
float positions[] = {
-0.5f,-0.5f, //0
0.5f, -0.5f, //1
0.5f, 0.5f, //2
-0.5f, 0.5f //3
};
unsigned int indices[] = {
0, 1, 2,
2, 3, 0
};
unsigned int buffer;
glGenBuffers( 1, &buffer );
glBindBuffer( GL_ARRAY_BUFFER, buffer );
glBufferData( GL_ARRAY_BUFFER, 6 * 2 * sizeof( float ), positions, GL_STATIC_DRAW );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_FALSE, sizeof( float ) * 2, 0 );
glEnableVertexAttribArray( 0 );
unsigned int ibo;
glGenBuffers( 1, &ibo );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, 6 * sizeof( unsigned int ), indices, GL_STATIC_DRAW );
const char* vert = GLSL
(
330 core,
layout(location = 0)in vec4 position;
void main()
{
gl_Position = position;
};
);
const char* frag = GLSL
(
330 core,
out vec4 color;
void main()
{
color = vec4(0.0,1.0,0.0,1.0);
};
);
unsigned int shaderProgram = Program::Load
(
vert, GL_VERTEX_SHADER,
frag, GL_FRAGMENT_SHADER,
NULL
);
glUseProgram( shaderProgram );
/* Loop until the user closes the window */
while( !glfwWindowShouldClose( window ) )
{
/* Render here */
glClear( GL_COLOR_BUFFER_BIT );
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr );
/* Swap front and back buffers */
glfwSwapBuffers( window );
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram( shaderProgram );
glfwTerminate();
return 0;
}
glUseProgram() needs to be called after shaders are compiled and linked. Here's a piece of a sample code:
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", NULL, NULL);
if (window == NULL)
{
cout << "Failed to create GLFW window" << endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLAD
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
cout << "Failed to initialize GLAD" << endl;
return -1;
}
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, 800, 600);
float TriangleVertices[] = {
-0.3f, -0.3f, 0.0f,
0.0f, 0.3f, 0.0f,
0.3f, -0.3f, 0.0f
};
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
//BindVertexArray for modifying it
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(TriangleVertices), TriangleVertices, GL_STATIC_DRAW);
glBindVertexArray(VAO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
const char* vertexShaderSource = "\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos, 1.0);\n"
"}\0";
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
const char* fragmentShaderSource = "\n"
"out vec4 FragColor; \n"
"void main()\n"
"{\n"
"FragColor = vec4(1.0f, 0.7f, 0.5f, 1.0f); \n"
"} \n";
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
unsigned int shaderProgram;
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
while (!glfwWindowShouldClose(window))
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
//// draw triangle using the data from the VAO
//glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}

openGL not rendering colour

I'm having trouble drawing a couple triangles in colour. The shaders load fine, and I have a slight feeling that it's the way I've laid out my data, but in general, I have no clue why it won't draw it in colour.
main.cpp :
//g++ main.cpp -lGL -lGLEW -lGLU -lglut LoadShaders.cpp -o run
#include<GL/glew.h>
#include<GL/freeglut.h>
#include<GL/gl.h>
#include<iostream>
#include"LoadShaders.h"
#include"vgl.h"
using namespace std;
enum VAO_IDs {Triangles, NumVAOs };
enum Attrib_IDs {vPosition = 0, cPosition = 1};
GLuint VAOs;
GLuint Buffers;
const GLuint NumVertices = 6;
void init(void)
{
glGenVertexArrays(1, &VAOs);
glBindVertexArray(VAOs);
GLfloat vertices[NumVertices] [2] = {
{-0.90, -0.90},
{0.85, -0.90 },
{-0.90, 0.85 },
{0.90, -0.85 },
{0.90, 0.90 },
{-0.85, 0.90 }
};
glGenBuffers(1, &Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLfloat colours[NumVertices] [3] = {
{0.583f, 0.568f, 1.000f},
{0.100f, 0.435f, 0.235f},
{0.456f, 0.345f, 0.654f},
{0.345f, 0.222f, 0.564f},
{0.109f, 0.538f, 1.000f},
{0.057f, 0.453f, 0.777f},
};
GLuint colourBuffer;
glGenBuffers(1, &colourBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colourBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colours), colours, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(0);
ShaderInfo shaders[]={
{GL_VERTEX_SHADER, "triangles.vert"},
{GL_FRAGMENT_SHADER, "triangles.frag"},
{GL_NONE}
};
GLuint program = LoadShaders(shaders);
glUseProgram(program);
}
void display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs);
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glFlush();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA);
glutInitWindowSize(512,512);
glutInitContextVersion( 4, 2 );
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
if (glewInit()) {
cerr << "Unable to initialize GLEW"<<endl;
exit(EXIT_FAILURE);
}
init();
glutDisplayFunc(display);
glutMainLoop();
}
triangles.vert:
#version 420 core
layout(location = 0) in vec2 vPosition;
layout(location = 1) in vec3 cPosition;
void main()
{
gl_Position = vPosition;
fragmentColor = cPosition;
}
triangles.frag:
#version 420 core
in vec3 fragmentColor;
out vec3 fColor;
void main()
{
fColor = fragmentColor;
}
Two problems in your vertex shader:
Missing a declaration for fragmentColor.
Invalid implicit conversion: gl_Position is a vec4, not a vec2. You have to expand out vPosition manually using a vec4 constructor: vec4( vPosition, 0.0, 1.0 ).
All together:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
GLchar log[ 1 << 15 ] = { 0 };
if( glIsShader(obj) ) glGetShaderInfoLog( obj, sizeof(log), NULL, log );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, sizeof(log), NULL, log );
std::cerr << log << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
420 core,
layout(location = 0) in vec2 vPosition;
layout(location = 1) in vec3 cPosition;
out vec3 fragmentColor;
void main()
{
gl_Position = vec4( vPosition, 0.0, 1.0 );
fragmentColor = cPosition;
}
);
const char* frag = GLSL
(
420 core,
in vec3 fragmentColor;
out vec3 fColor;
void main()
{
fColor = fragmentColor;
}
);
GLuint VAO;
const GLuint NumVertices = 6;
void init()
{
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
GLfloat vertices[NumVertices] [2] =
{
{-0.90, -0.90},
{0.85, -0.90 },
{-0.90, 0.85 },
{0.90, -0.85 },
{0.90, 0.90 },
{-0.85, 0.90 },
};
GLuint Buffer;
glGenBuffers(1, &Buffer);
glBindBuffer(GL_ARRAY_BUFFER, Buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLfloat colours[NumVertices] [3] =
{
{0.583f, 0.568f, 1.000f},
{0.100f, 0.435f, 0.235f},
{0.456f, 0.345f, 0.654f},
{0.345f, 0.222f, 0.564f},
{0.109f, 0.538f, 1.000f},
{0.057f, 0.453f, 0.777f},
};
GLuint colourBuffer;
glGenBuffers(1, &colourBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colourBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colours), colours, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(0);
GLuint program = Program::Load( vert, NULL, frag );
glUseProgram(program);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(512, 512);
glutInitContextVersion(4, 2);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
glewExperimental = GL_TRUE;
if( GLEW_OK != glewInit() )
{
return 1;
}
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}

How to use two shaders to render two objects in OpenGL

I'm trying to use two different shader programs in order to render two different objects.
I have two vertex shaders with the same input parameters, and two fragment shaders, where one of them is set to color the fragments red.
When trying to get the attribute location of color in the loadPlainShader, I get a GL_INVALID_INDEX as a return.
Why can't OpenGL find the color attribute the second time, even though the vertex shaders are practically alike?
I have included my code, and it is also available here:
https://www.dropbox.com/sh/8o98ovh8pzubxiw/XGbO-IGQsW/stackoverflow
What I am trying
From main I'm trying to first load the two shader programs:
int main(int argc, char* argv[]) {
... GLUT and GLEW initilization ...
loadPlainShader();
loadShader();
loadGeometry();
glutMainLoop();
}
loadPlainShader()
void loadPlainShader(){
plainShader = InitShader("plain-shader.vert", "plain-shader.frag", "fragColor");
plainProjUniform = glGetUniformLocation(plainShader, "projection");
plainModelViewUniform = glGetUniformLocation(plainShader, "modelView");
plainColorAttribute = glGetAttribLocation(plainShader, "color");
plainPositionAttribute = glGetAttribLocation(plainShader, "position");
}
loadShader()
void loadShader(){
shaderProgram = InitShader("color-shader.vert", "color-shader.frag", "fragColor");
projectionUniform = glGetUniformLocation(shaderProgram, "projection");
modelViewUniform = glGetUniformLocation(shaderProgram, "modelView");
colorAttribute = glGetAttribLocation(shaderProgram, "color");
positionAttribute = glGetAttribLocation(shaderProgram, "position");
}
initShader()
GLuint InitShader(const char* vShaderFile, const char* fShaderFile, const char* outputAttributeName) {
struct Shader {
const char* filename;
GLenum type;
GLchar* source;
} shaders[2] = {
{ vShaderFile, GL_VERTEX_SHADER, NULL },
{ fShaderFile, GL_FRAGMENT_SHADER, NULL }
};
GLuint program = glCreateProgram();
for ( int i = 0; i < 2; ++i ) {
Shader& s = shaders[i];
s.source = readShaderSource( s.filename );
if ( shaders[i].source == NULL ) {
std::cerr << "Failed to read " << s.filename << std::endl;
exit( EXIT_FAILURE );
}
GLuint shader = glCreateShader( s.type );
glShaderSource( shader, 1, (const GLchar**) &s.source, NULL );
glCompileShader( shader );
GLint compiled;
glGetShaderiv( shader, GL_COMPILE_STATUS, &compiled );
if ( !compiled ) {
std::cerr << s.filename << " failed to compile:" << std::endl;
GLint logSize;
glGetShaderiv( shader, GL_INFO_LOG_LENGTH, &logSize );
char* logMsg = new char[logSize];
glGetShaderInfoLog( shader, logSize, NULL, logMsg );
std::cerr << logMsg << std::endl;
delete [] logMsg;
exit( EXIT_FAILURE );
}
delete [] s.source;
glAttachShader( program, shader );
}
/* Link output */
glBindFragDataLocation(program, 0, outputAttributeName);
/* link and error check */
glLinkProgram(program);
GLint linked;
glGetProgramiv( program, GL_LINK_STATUS, &linked );
if ( !linked ) {
std::cerr << "Shader program failed to link" << std::endl;
GLint logSize;
glGetProgramiv( program, GL_INFO_LOG_LENGTH, &logSize);
char* logMsg = new char[logSize];
glGetProgramInfoLog( program, logSize, NULL, logMsg );
std::cerr << logMsg << std::endl;
delete [] logMsg;
exit( EXIT_FAILURE );
}
/* use program object */
glUseProgram(program);
return program;
}
loadGeometry()
void loadGeometry() {
vec3 color(1.0f, 1.0f, 0.0f);
vec3 colorR(1.0f, 0.0f, 0.0f);
Vertex rectangleData[rectangleSize] = {
{ vec2(-5.0, -5.0 ), color },
{ vec2(-5.0, 5.0 ), color },
{ vec2( 8.0, 5.0 ), color },
{ vec2( 8.0, -5.0 ), color }
};
Vertex triangleData[triangleSize] = {
{ vec2(2.0, 2.0 ), vec3(1.0f, 0.0f, 0.0f)},
{ vec2(5.0, 2.0 ), vec3(0.0f, 1.0f, 0.0f) },
{ vec2( 3.5, 5.0 ), vec3(0.0f, 0.0f, 1.0f) }
};
rectangleVertexArrayBuffer = loadBufferData(rectangleData, rectangleSize);
triangleVertexArrayBuffer = loadPlainBufferData(triangleData, triangleSize);
}
loadBufferData
GLuint loadBufferData(Vertex* vertices, int vertexCount) {
GLuint vertexArrayObject;
glGenVertexArrays(1, &vertexArrayObject);
glBindVertexArray(vertexArrayObject);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vertexCount * sizeof(Vertex), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(positionAttribute);
glEnableVertexAttribArray(colorAttribute);
glVertexAttribPointer(positionAttribute, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)0);
glVertexAttribPointer(colorAttribute , 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)sizeof(vec2));
return vertexArrayObject;
}
loadPlainBufferData
GLuint loadPlainBufferData(Vertex* vertices, int vertexCount) {
GLuint vertexArrayObject;
glGenVertexArrays(1, &vertexArrayObject);
glBindVertexArray(vertexArrayObject);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vertexCount * sizeof(Vertex), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(plainPositionAttribute);
glEnableVertexAttribArray(plainColorAttribute);
glVertexAttribPointer(plainPositionAttribute, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)0);
glVertexAttribPointer(plainColorAttribute , 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)sizeof(vec2));
return vertexArrayObject;
}
My display function
void display() {
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
mat4 projection = Ortho2D(-15.0f, 15.0f, -10.0f, 15.0f);
glUniformMatrix4fv(projectionUniform, 1, GL_TRUE, projection);
mat4 modelViewRec;
mat4 modelViewTri;
modelViewRec = RotateZ(45) * modelViewRec;
// render rectangle
glUniformMatrix4fv(modelViewUniform, 1, GL_TRUE, modelViewRec);
glBindVertexArray(rectangleVertexArrayBuffer);
glDrawArrays(GL_TRIANGLE_FAN, 0, rectangleSize);
modelViewTri = Translate(6,7,0) * modelViewTri;
glUseProgram(plainShader);
// render triangle
glUniformMatrix4fv(plainModelViewUniform, 1, GL_TRUE, modelViewTri);
glBindVertexArray(triangleVertexArrayBuffer);
glDrawArrays(GL_TRIANGLES, 0, triangleSize);
glutSwapBuffers();
Angel::CheckError();
}
color-shader vertex shader and plain-shader vertex shader (identical)
#version 150
uniform mat4 projection;
uniform mat4 modelView;
in vec2 position;
in vec3 color;
out vec4 colorV;
void main (void)
{
colorV = vec4(color, 1.0);
gl_Position = projection * modelView * vec4(position, 0.0, 1.0);
}
Plainshader fragment shader
#version 150
in vec4 colorV;
out vec4 fragColor;
void main(void)
{
fragColor = vec4(1.0,0.0,0.0,1.0);
}
color-shader fragment shader
#version 150
in vec4 colorV;
out vec4 fragColor;
void main(void)
{
fragColor = colorV;
}

Can't draw triangle using Opengl

in this code i want to draw a simple triangle on a blue background using openGL however when i compile and run the code only a window with the blue background appears (without the white triangle that is supposed to be drawn), iam using Xcode
my code
#include <iostream>
#include <string>
#include <GLUT/glut.h>
#include <OpenGL/gl3.h>
#include <fstream>
using namespace std;
// VAO & VBO objects
GLuint VBO;
GLuint VAO;
void display();
// vertex Data (position)
float vertex[] = {-1.0, 0.0 , 1.0,
0.0, 1.0 , 0.0,
0.0, 0.0 , 0.0 };
GLuint Program;
GLuint Vshader;
GLuint Fshader;
// main program
int main (int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE);
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
// main display function
void display()
{
// reading the vertex shader
fstream VertexS;
VertexS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/vertexShader");
if(VertexS.good() == false)
cout << "Error openning the file \n";
if(VertexS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (VertexS.fail() == true)
cout <<"Logical error on i/o operation \n";
VertexS.seekg(0,ios::end);
int size = (int)VertexS.tellg();
VertexS.clear();
VertexS.seekg(0,ios::beg);
char* vBuffer = new (nothrow) char[size];
VertexS.read(vBuffer,size);
VertexS.close();
// reading fragment shader
fstream FragS;
FragS.open("/Users/hawbashali/Desktop/Project X/BwlbWl/BwlbWl/fragmentShader");
if(FragS.good() == false)
cout << "Error openning the file \n";
if(FragS.bad() == true)
cout << "Read/writing error on i/o operation \n";
if (FragS.fail() == true)
cout <<"Logical error on i/o operation \n";
FragS.seekg(0,ios::end);
int size2 = (int)FragS.tellg();
FragS.clear();
FragS.seekg(0,ios::beg);
char* fBuffer = new (nothrow) char[size2];
FragS.read(fBuffer,size2);
FragS.close();
// creating shaders
Vshader = glCreateShader(GL_VERTEX_SHADER);
Fshader = glCreateShader(GL_FRAGMENT_SHADER);
GLint x = size;
GLint y = size2;
glShaderSource(Vshader, 1,(const char**)&vBuffer, &x);
glShaderSource(Fshader, 1, (const char**)&fBuffer, &y);
glCompileShader(Vshader);
glCompileShader(Fshader);
Program = glCreateProgram();
glAttachShader(Program, Vshader);
glAttachShader(Program, Fshader);
glLinkProgram(Program);
glUseProgram(Program);
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER,9 *sizeof(vertex),vertex, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3,GL_FLOAT, GL_TRUE, 0, 0);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glutSwapBuffers();
glDeleteShader(Vshader);
glDeleteShader(Fshader);
delete [] vBuffer;
delete [] fBuffer;
};
Vertex shader:
#version 320 core
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
Fragment Shader
#version 320 core
out vec4 fColor;
void
main()
{
fColor = vec4(0.0, 0.0, 1.0, 1.0);
}
glClearColor(0, 0, 1,1);
...
fColor = vec4(0.0, 0.0, 1.0, 1.0);
You're trying to draw a blue triangle on top of a blue background. You'll have to dial up your contrast pretty high to see that :)
Make one of them a different color, like red.
You're also:
Missing a VAO
Not requesting a Core context
Using #version 320 core instead of #version 150 core
Try this:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
150 core,
layout(location = 0) in vec4 vPosition;
void
main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
150 core,
out vec4 fColor;
void
main()
{
fColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VAO & VBO objects
GLuint VAO;
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenVertexArrays( 1, &VAO );
glBindVertexArray( VAO );
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
glEnableVertexAttribArray(0);
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutInitContextVersion(3, 2);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow("My First GLUT/OpenGL Window");
glewExperimental = GL_TRUE;
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
EDIT: Ported to GL 2.1:
#include <GL/glew.h>
#include <GL/glut.h>
#include <vector>
#include <iostream>
struct Program
{
static GLuint Load( const char* vert, const char* geom, const char* frag )
{
GLuint prog = glCreateProgram();
if( vert ) AttachShader( prog, GL_VERTEX_SHADER, vert );
if( geom ) AttachShader( prog, GL_GEOMETRY_SHADER, geom );
if( frag ) AttachShader( prog, GL_FRAGMENT_SHADER, frag );
glLinkProgram( prog );
CheckStatus( prog );
return prog;
}
private:
static void CheckStatus( GLuint obj )
{
GLint status = GL_FALSE, len = 10;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_LINK_STATUS, &status );
if( status == GL_TRUE ) return;
if( glIsShader(obj) ) glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
if( glIsProgram(obj) ) glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
std::vector< char > log( len, 'X' );
if( glIsShader(obj) ) glGetShaderInfoLog( obj, len, NULL, &log[0] );
if( glIsProgram(obj) ) glGetProgramInfoLog( obj, len, NULL, &log[0] );
std::cerr << &log[0] << std::endl;
exit( -1 );
}
static void AttachShader( GLuint program, GLenum type, const char* src )
{
GLuint shader = glCreateShader( type );
glShaderSource( shader, 1, &src, NULL );
glCompileShader( shader );
CheckStatus( shader );
glAttachShader( program, shader );
glDeleteShader( shader );
}
};
#define GLSL(version, shader) "#version " #version "\n" #shader
const char* vert = GLSL
(
120,
attribute vec4 vPosition;
void main()
{
gl_Position = vPosition;
}
);
const char* frag = GLSL
(
120,
void main()
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
);
// VBO objects
GLuint VBO;
GLuint prog;
void init()
{
// vertex Data (position)
float vertex[] = { -1.0, -1.0 , 0.0,
1.0, -1.0 , 0.0,
0.0, 1.0 , 0.0 };
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
prog = Program::Load( vert, NULL, frag );
glUseProgram( prog );
int posLoc = glGetAttribLocation( prog, "vPosition" );
glEnableVertexAttribArray( posLoc );
glVertexAttribPointer( posLoc, 3, GL_FLOAT, GL_FALSE, 0, 0 );
}
void display()
{
glClearColor(0, 0, 1,1);
glClear(GL_COLOR_BUFFER_BIT);
// Drawing the triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(1000, 400);
glutInitWindowPosition(100, 100);
glutCreateWindow("My First GLUT/OpenGL Window");
if( GLEW_OK != glewInit() )
return -1;
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
You have provided invalid buffer size in the following line:
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(vertex), vertex, GL_STATIC_DRAW);
Because sizeof(vertex) should return the total size of the array (e.g 36) not the underlying type, thus no need to multiply by 9. If you still want to use multiplication try following:
9 * sizeof(float)
I have to write this down... You should separate your initialization and draw cycle like in genpfault's answer.