In order to understand how to use two buffers and binding it to vao i am trying to display a triangle with some color. I am getting an error like this
First-chance exception at 0x00000000 in ConsoleApplication12.exe: 0xC0000005: Access violation executing location 0x00000000.
When i try to debug, this error is caused by the line in the code
glCreateVertexArrays(1, &vao);
I did initialized vao, but i do not understand why the error occurred. Please help me..
Here is the code
#include <stdlib.h>
#include<iostream>
#include <GL/glew.h>
#include <GL/freeglut.h>
#include "glm/glm.hpp"
#define BUFFER_OFFSET(offset) ((GLvoid *) offset)
GLuint program;
static int timeFor =0;
GLuint vPos;
GLuint buffer[2];
GLuint vao = 0;
void init()
{
static const GLfloat positions[] = { 0.1f, 0.2f, 0.3f, 0.0f};
static const GLfloat colors[] = { 1.0f, 0.0f, 0.0f, 1.0f};
glCreateVertexArrays(1, &vao);
glCreateBuffers(2, &buffer[0]);
glNamedBufferStorage(buffer[0], sizeof(positions), positions, 0);
glVertexArrayVertexBuffer(vao, 0, buffer[0], 0, sizeof(glm::vec4));
glVertexArrayAttribFormat(vao, 0, 4, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 0, 0);
glNamedBufferStorage(buffer[1], sizeof(colors), colors, 0);
glVertexArrayVertexBuffer(vao, 1, buffer[1], 0, sizeof(glm::vec4));
glVertexArrayAttribFormat(vao, 1, 4, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 1, 1);
const char* vSource = {
"#version 330\n"
"layout (location = 0) in vec4 offset;"
"layout (location = 1) in vec4 color;"
"out vec4 fColor;"
"void main() {"
"const vec4 vertices[3] = vec4[3](vec4( 0.25, -0.25, 0.5, 1.0), vec4( -0.25, -0.25, 0.5, 1.0), vec4( 0.25, 0.25, 0.5, 1.0));"
"gl_Position = vertices[gl_VertexID] + offset;"
"fColor = color;"
"}"
};
const char* fSource = {
"#version 330\n"
"in vec4 fColor;"
"out vec4 fragColor;"
"void main() {"
"fragColor = fColor;"
"}"
};
GLuint vShader, fShader;
vShader = glCreateShader(GL_VERTEX_SHADER);
fShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vShader, 1, &vSource, NULL);
glShaderSource(fShader, 1, &fSource, NULL);
glCompileShader(vShader);
glCompileShader(fShader);
program = glCreateProgram();
glAttachShader(program, vShader);
glAttachShader(program, fShader);
glLinkProgram(program);
glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
}
void reshape(int width, int height)
{
glViewport(0, 0, width, height);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
void main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
glutCreateWindow(argv[0]);
glewInit();
init();
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutMainLoop();
}
I checked my openGL version its 4.4, in which glCreateVertexArrays is not supported instead of that glGenVertexArrays must be used.
Related
I am trying to get a square on the screen. This code should draw a square, however it omits the last vertex and only draws a triangle.
Where have I gone wrong?
#include <glad/gl.h>
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
#include "linmath.h"
#include <stdlib.h>
#include <stdio.h>
static const struct
{
float x, y;
float r, g, b;
} vertices[4] =
{
{ -0.5f, -0.5f, 1.f, 0.f, 0.f },
{ 0.5f, -0.5f, 0.f, 1.f, 0.f },
{ -0.5f, 0.5f, 0.f, 0.f, 1.f },
{ 0.5f, 0.5f, 0.f, 1.f, 1.f}
};
static const char* vertex_shader_text =
"#version 110\n"
"uniform mat4 MVP;\n"
"attribute vec3 vCol;\n"
"attribute vec2 vPos;\n"
"varying vec3 color;\n"
"void main()\n"
"{\n"
" gl_Position = MVP * vec4(vPos, 0.0, 1.0);\n"
" color = vCol;\n"
"}\n";
static const char* fragment_shader_text =
"#version 110\n"
"varying vec3 color;\n"
"void main()\n"
"{\n"
" gl_FragColor = vec4(color, 1.0);\n"
"}\n";
static void error_callback(int error, const char* description)
{
fprintf(stderr, "Error: %s\n", description);
}
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
int main(void)
{
GLFWwindow* window;
GLuint vertex_buffer,edge_buffer, vertex_shader, fragment_shader, program;
GLint mvp_location, vpos_location, vcol_location;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
exit(EXIT_FAILURE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
window = glfwCreateWindow(640, 480, "Simple example", NULL, NULL);
if (!window)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwSetKeyCallback(window, key_callback);
glfwMakeContextCurrent(window);
gladLoadGL(glfwGetProcAddress);
glfwSwapInterval(1);
// NOTE: OpenGL error checks have been omitted for brevity
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_text, NULL);
glCompileShader(vertex_shader);
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_text, NULL);
glCompileShader(fragment_shader);
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
mvp_location = glGetUniformLocation(program, "MVP");
vpos_location = glGetAttribLocation(program, "vPos");
vcol_location = glGetAttribLocation(program, "vCol");
glEnableVertexAttribArray(vpos_location);
glVertexAttribPointer(vpos_location, 2, GL_FLOAT, GL_FALSE,
sizeof(vertices[0]), (void*) 0);
glEnableVertexAttribArray(vcol_location);
glVertexAttribPointer(vcol_location, 3, GL_FLOAT, GL_FALSE,
sizeof(vertices[0]), (void*) (sizeof(float) * 2));
while (!glfwWindowShouldClose(window))
{
float ratio;
int width, height;
mat4x4 m, p, mvp;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / (float) height;
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT);
mat4x4_identity(m);
mat4x4_rotate_Z(m, m, (float) glfwGetTime());
mat4x4_ortho(p, -ratio, ratio, -1.f, 1.f, 1.f, -1.f);
mat4x4_mul(mvp, p, m);
glUseProgram(program);
glUniformMatrix4fv(mvp_location, 1, GL_FALSE, (const GLfloat*) mvp);
glDrawArrays(GL_POLYGON, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}
I only have 3 of the 4 vertices rendering, thus resulting in a triangle instead of a square.
You're telling opengl to render 3 vertices instead of 4 with glDrawArrays(GL_POLYGON, 0, 3);
Change it to glDrawArrays(GL_POLYGON, 0, 4);
I'm trying to use glVertexAttribFormat and glVertexAttribBinding to create two triangles, but it doesn't work. I followed the description of how to do this in the question here (Render one VAO containing two VBOs). I don't really knnow what to try. I am new to OpenGL and all descriptions of glVertexAttribFormat appear to assume you already know OpenGL.
This is my code:
#include <glad/glad.h>
#include <glfw/glfw3.h>
#include <iostream>
void adjustViewportToWindowSize(GLFWwindow* window, int width, int height);
void checkEsc(GLFWwindow* window);
int main(void)
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "Tab name", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
glViewport(0, 0, 800, 600); //size of GL rendering window.
glfwSetFramebufferSizeCallback(window, adjustViewportToWindowSize);
const char* vertexShaderSource = "#version 430 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 430 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
"FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\0";
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
unsigned int shaderProgram;
shaderProgram = glCreateProgram();
//Attaching shaders to program
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
//Can delete shader objects after they are linked
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
float triangleVertices1[] = {
-0.50f, 0.0f, 0.0f,
-0.25f, 0.5f, 0.0f,
0.00f, 0.0f, 0.0f
};
float triangleVertices2[] = {
0.0f, 0.0f, 0.0f,
0.25f, 0.5f, 0.0f,
0.5f, 0.0f, 0.0f
};
unsigned int aVBO[2], VAO2;
glGenVertexArrays(1, &VAO2);
glBindVertexArray(VAO2);
glVertexAttribFormat(0, 3, GL_FLOAT, GL_FALSE, 0); //format setup without a buffer
glVertexAttribBinding(0, 0);
glBindVertexArray(0);
//Bind Buffers to data next
glGenBuffers(2, aVBO);
glBindBuffer(GL_ARRAY_BUFFER, aVBO[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices1), triangleVertices1, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, aVBO[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices2), triangleVertices2, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
while (!glfwWindowShouldClose(window))
{
checkEsc(window);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(VAO2);
glBindVertexBuffer(0, aVBO[0], 0, 3*sizeof(float));
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexBuffer(0, aVBO[1], 0, 3*sizeof(float));
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
//Clear up
glDeleteVertexArrays(1, &VAO2);
glDeleteBuffers(1, aVBO);
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
void adjustViewportToWindowSize(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
void checkEsc(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, true);
}
Can also be accessed on github: https://github.com/Ritzerk/OpenGLSelfStudy
In addition to setting up the buffer bindings, you also have to enable the vertex attribute in the shader. To do so, you have to call glEnableVertexAttribArray during VAO setup:
glBindVertexArray(VAO2);
glVertexAttribFormat(0, 3, GL_FLOAT, GL_FALSE, 0);
glVertexAttribBinding(0, 0);
//Enable input in shader
glEnableVertexAttribArray(0);
glBindVertexArray(0);
I have this piece of code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <iostream>
void keyCallback(GLFWwindow* window, int key, int scancode, int action, int mods) {
if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) {
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
}
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 5);
auto window = glfwCreateWindow(800, 600, "title", NULL, NULL);
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, keyCallback);
glewExperimental = true;
glewInit();
glViewport(0, 0, 800, 600);
static const char* vsSource[] = {
"#version 450 core\n"
"layout (location = 0) in vec3 position;"
"uniform mat4 mvp;"
"out vec3 pos;"
"void main() {"
"gl_Position = mvp * vec4(position, 1.0);"
"pos = position;"
"}"
};
static const char* fsSource[] = {
"#version 450 core\n"
"in vec3 pos;"
"out vec4 color;"
"void main() {"
"color = vec4(pos, 1.0) * 0.5 + 0.5;"
"}"
};
static const GLfloat clearColor[] = { 0.1f, 0.1f, 0.1f, 1.0f };
static const GLfloat vertices[] = {
-0.25f, -0.25f, -0.25f,
-0.25f, 0.25f, -0.25f,
0.25f, -0.25f, -0.25f,
0.25f, 0.25f, -0.25f,
0.25f, -0.25f, 0.25f,
0.25f, 0.25f, 0.25f,
-0.25f, -0.25f, 0.25f,
-0.25f, 0.25f, 0.25f,
};
static const GLushort indices[] = {
0, 1, 2,
2, 1, 3,
2, 3, 4,
4, 3, 5,
4, 5, 6,
6, 5, 7,
6, 7, 0,
0, 7, 1,
6, 0, 2,
2, 4, 6,
7, 5, 3,
7, 3, 1
};
auto vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, vsSource, NULL);
glCompileShader(vs);
auto fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, fsSource, NULL);
glCompileShader(fs);
auto program = glCreateProgram();
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
int logLength = 1024;
char* log = new char[logLength];
glGetProgramInfoLog(program, logLength, &logLength, log);
std::cout << log << std::endl;
glUseProgram(program);
auto mvpLocation = glGetUniformLocation(program, "mvp");
glm::mat4 projection = glm::perspective(50.0f, 800.0f/600.0f, 0.1f, 1000.0f);
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3(0.0, 0.0, -10.0));
glm::mat4 mvp;
GLuint vao, vbo, ibo;
glCreateVertexArrays(1, &vao);
glBindVertexArray(vao);
glCreateBuffers(1, &vbo);
glNamedBufferStorage(vbo, sizeof(vertices), vertices, GL_MAP_READ_BIT);
glVertexArrayVertexBuffer(vao, 0, vbo, 0, 2 * sizeof(GLfloat));
glVertexArrayAttribFormat(vao, 0, 3, GL_FLOAT, GL_FALSE, NULL);
glVertexArrayAttribBinding(vao, 0, 0);
glEnableVertexArrayAttrib(vao, 0);
glCreateBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glNamedBufferStorage(ibo, sizeof(indices), indices, GL_MAP_READ_BIT);
float timer = 0.0f;
static const GLfloat depthClear = 1.0f;
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_CULL_FACE);
glfwSwapInterval(1);
while(!glfwWindowShouldClose(window)) {
glClearBufferfv(GL_COLOR, 0, clearColor);
glClearBufferfv(GL_DEPTH, 0, &depthClear);
mvp = projection * model * glm::rotate(glm::mat4(), timer / 1000.0f, glm::vec3(0.0f, 1.0f, 1.0f));
glUniformMatrix4fv(mvpLocation, 1, GL_FALSE, glm::value_ptr(mvp));
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_SHORT, 0);
glfwPollEvents();
glfwSwapBuffers(window);
timer += 1.0f;
}
glfwTerminate();
return 0;
}
I compile it with:
clang++ -std=c++11 main.cpp -o cube -lGLEW -lglfw3 -lGL -gsplit-dwarf
And this is how it is displayed when I compile and run it (expecting a rotating cube):
My question is: what happened here? Both the indices and vertices are copied from a similar, working example piece of code - i.e. they are the same.
I spent the last hour looking for any significant difference and the reason of the problem, but could not find it. The example code (from OpenGL SB7) compiles, runs and displays a cube. It uses older OpenGL apis (like glBufferData where I use glNamedBufferStorage).
I tried different sets of vertices and indices (from different examples on the web) and all of them are displayed like this. So I think it's not the data's fault, but there's something in my code that causes this. Unfortunately, I can't find what and where.
glVertexArrayVertexBuffer(vao, 0, vbo, 0, 2 * sizeof(GLfloat));
^ wat
Stride's wrong for vertcies. Should be 3:
glVertexArrayVertexBuffer(vao, 0, vbo, 0, 3 * sizeof(GLfloat));
I'm guessing the pre-geometry-transplant code was using 2D geometry.
I have written a simple openGL program to draw a triangle on the screen. I have done debugging with glGetError() and now there is no error in the code but when I try to run it only a black screen comes up.
here is my code. I am using GLFW for window creation.
#include<glew.h>
#include<glfw3.h>
#include<stdio.h>
int main(int argc, char ** argv)
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
float vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
const GLchar * vs =
"#version 150\n"
"in vec2 position;\n"
"void main() {\n"
"vec4 gl_Position = vec4( position , 0.0 , 1.0 );\n"
"}";
const GLchar * fs =
"#version 150\n"
"out vec4 out_color; \n"
"void main() { \n"
"out_color = vec4(1.0, 1.0, 1.0, 1.0);\n"
"}";
GLuint vsh = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vsh, 1, &vs, NULL);
glCompileShader(vsh);
GLint status;
glGetShaderiv(vsh, GL_COMPILE_STATUS, &status);
if (status == GL_TRUE) printf("Vertex Shader Compiled success\n");
GLuint fsh = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fsh, 1, &fs, NULL);
glCompileShader(fsh);
glGetShaderiv(fsh, GL_COMPILE_STATUS, &status);
if (status == GL_TRUE) printf("Fragment Shader Compiled success\n");
GLuint sp = glCreateProgram();
glAttachShader(sp, vsh);
glAttachShader(sp, fsh);
glBindFragDataLocation(sp, 0, "out_color");
glBindAttribLocation(sp,1,"position");
glLinkProgram(sp);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(1);
glUseProgram(sp);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
Update
I have narrowed down the issue to just one line of code
GLint pos = glGetAttribLocation(sp, "position") //sp is shader program
The problem is it is returning -1. I have read in tutorials that if you don't use a variable it will be optimized out by the compiler.I have used the position in the code then why it is getting thrown away. Below is my vertex shader.
const GLchar * vs =
"#version 150\n"
"in vec2 position;\n"
"void main() {\n"
"vec4 gl_Position = vec4( position , 0.0 , 1.0 );\n"
"}";
just add these lines after
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
I mean use location 0 instead of 1
hope this helps
I am trying to make basic triangle in opengl es 2, but whatever I do, it just doesn't seem to be working.. All I get is a white triangle. However, I think there might be a problem
when linking the program (that's I have disabled return from linking checking "if", otherwise it won't run at all). Also, when I run with the console, I get in the output "error: 1281". My code:
int loadShader(int type, char * shaderCode)
{
int length = strlen(shaderCode);
int shader = glCreateShader(type);
glShaderSource(shader, 1, (const char **) &shaderCode, NULL);
glCompileShader(shader);
return shader;
}
// Initializes the application data
int Init(void)
{
// Vertex shaders
char * vertexShaderCode =
"attribute vec4 vPosition; \n"
"void main() { \n"
" gl_Position = vPosition; \n"
"} \n";
char * fragmentShaderCode =
"precision mediump float; \n"
"void main() { \n"
" gl_FragColor = vec4(0.5, 0.5, 0.5, 1.0);\n"
"} \n";
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
GLint linked;
program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glBindAttribLocation(program, 0, "vPosition");
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if(!linked)
{
GLint infoLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char *) malloc(sizeof(char) * infoLen);
glGetProgramInfoLog(program, infoLen, NULL, infoLog);
free(infoLog);
}
glDeleteProgram(program);
//return 0;
}
// Basic GL setup
glClearColor (0.0, 0.0, 0.0, 1.0);
return GL_TRUE;
}
// Main-loop workhorse function for displaying the object
void Display(void)
{
// Clear the screen
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(0.0, 0.0, 0.0, 1.0);
GLfloat vertices[] = {
-0.5f, 0.0f, -1.0f,
0.0f, 1.0f, -1.0f,
0.5f, 0.0f, -1.0f,
};
GLfloat colors[] = {
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
};
glUseProgram(program);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertices);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, colors);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
enable the vertex attrib array first and then pass the pointers...
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertices);
glDrawArrays(GL_TRIANGLES, 0, 3);
Also you are sending colors in the "1" attrib array but there is no such attrib in the shader.