Here is my code:
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(0.8f, 0.3f, 0.02f, 1.0f);\n"
"}\n\0";
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window;
int windowWidth, windowHeight, resAnswer, aspectRatFirst, aspectRatSecond, aspectRatDeformation;
do {
std::cout << "Choose the preset for window resolution:" << std::endl;
std::cout << " 1. 800x800 (1:1)" << std::endl;
std::cout << " 2. 1024x768 (16:9)" << std::endl;
std::cout << "Answer: "; std::cin >> resAnswer;
} while (resAnswer != 1 && resAnswer != 2);
switch (resAnswer)
{
case 1:
windowWidth = 800;
windowHeight = 800;
aspectRatFirst = 1;
aspectRatSecond = 1;
break;
case 2:
windowWidth = 1024;
windowHeight = 768;
aspectRatFirst = 16;
aspectRatSecond = 9;
break;
}
aspectRatDeformation = aspectRatSecond / aspectRatFirst;
int shapeAnswer;
do {
std::cout << std::endl;
std::cout << "Choose your shape:" << std::endl;
std::cout << " 1. Triangle" << std::endl;
std::cout << " 2. Square" << std::endl;
std::cout << "Answer: "; std::cin >> shapeAnswer;
} while (shapeAnswer != 1 && shapeAnswer != 2);
64 GLfloat preVertices[];
if (shapeAnswer == 1) {
preVertices = {
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f, //B
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f, //C
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f //A
};
}
else if (shapeAnswer == 2) {
preVertices = {
-0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f, //B
0.5f, -0.5f * float(sqrt(3)) / 3, 0.0f, //C
0.0f, 0.5f * float(sqrt(3)) * 2 / 3, 0.0f //A
};
}
81 GLfloat vertices[] = preVertices;
window = glfwCreateWindow(windowWidth, windowHeight, "OpenGLCourse", NULL, NULL);
if (window == NULL) {
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
gladLoadGL();
glViewport(0, 0, windowWidth, windowHeight);
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
GLuint VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
119 glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glClearColor(0.07f, 0.13f, 0.17f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glfwSwapBuffers(window);
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
My problem is between lines 64-81 and the line 119.
What I am trying to do is to create an array "vertices" for the specified shape vertices coordinates. The problem is that the triangle shape has 3 vertices and the square has 4 vertices, the array tho can only have one number of elements. I have another array "preVertices" just because I tried a lot of things to fix this problem.
The code wasn't working mainly because at line 119 I have to get the size of vertices, and the vertices seems to be undefined (and i think its because its getting defined inside of an if statement, or it was, at first, as i said, i changed a lot of things trying to resolve the problem), now, I think one solution would be just to make two different arrays with the different arrays and actually use an if statement to determine which i need at line 119, but I was wondering if there exists a solution with only one array, cuz I m thinking if there would be 100 different shapes it would be more time consuming to make 100 different arrays and it will also occupy more ram than needed, am I right?
Related
This is a question following OpenGL and loading/reading data in AoSoA (hybrid SoA) format.
I am trying to use a shader storage buffer object (SSBO) to store vertex data which is represented in AoSoA format. I am having trouble drawing the vertices, which obviously means that I am doing something wrong somewhere. The problem is that I can't seem to figure out what or where. The answer to the initial question above seems to indicate that I should not be using vertex attribute arrays, so the question then becomes, how do I render this SSBO, given the code I am about to present?
VertexData structure
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
// Later stored in a std::vector
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
Vertex shader (should this really be a compute shader?)
struct Vertex4
{
float px[4]; // position x
float py[4]; // position y
};
layout(std430, binding=0) buffer VertexData
{
Vertex4 vertices[];
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
}
Assign vertexPosition index
// Do I need this? Where do I use it? glEnableVertexAttribArray(position_attrib_index)?
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
SSBO setup
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
Render loop
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
// ???
glfwSwapBuffers(window);
glfwPollEvents();
}
I just can't seem to figure out how this is supposed to work. Grabbing at straws, I also tried creating a VAO with a later call to glDrawArrays(GL_POINTS, 0, VERTEX_COUNT), but it didn't work either:
GLuint vao{};
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glEnableVertexAttribArray(position_attrib_index);
glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
It seems to me that I should be using position_attrib_index (which should point to vertexPosition) for something, the question is for what?
Complete example code
requires OpenGL 4.3, GLEW and GLFW
build command example: g++ -std=c++17 main.cpp -lGLEW -lglfw -lGL -o ssbo
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <array>
#include <iostream>
#include <vector>
void process_input(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) {
glfwSetWindowShouldClose(window, true);
}
}
void glfw_error_callback(int error_code, const char *description)
{
std::cerr << "GLFW Error: [" << error_code << "] " << description << '\n';
}
void framebuffer_size_callback(GLFWwindow *window, int width, int height)
{
glViewport(0, 0, width, height);
}
auto create_glfw_window()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
return glfwCreateWindow(800, 600, "OpenGL and AoSoA layout", nullptr, nullptr);
}
void set_callbacks(GLFWwindow *window)
{
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glfwSetErrorCallback(glfw_error_callback);
}
void print_versions()
{
std::cout << "Using GLFW " << glfwGetVersionString() << '\n';
std::cout << "Using GLEW " << glewGetString(GLEW_VERSION) << '\n';
}
bool init_loader()
{
GLenum err = glewInit();
if (GLEW_OK != err) {
std::cerr << "GLEW error: " << glewGetErrorString(err);
}
return err == GLEW_OK;
}
void GLAPIENTRY MessageCallback(
GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* message,
const void* userParam = nullptr)
{
std::cerr << "[GL DEBUG] " << (type == GL_DEBUG_TYPE_ERROR ? "Error: " : "") << message << '\n';
}
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
static const char* vertex_shader_source =
"#version 430\n"
"struct Vertex4\n"
"{\n"
" float px[4]; // position x\n"
" float py[4]; // position y\n"
"};\n"
"layout(std430, binding=0) buffer VertexData\n"
"{\n"
" Vertex4 vertices[];\n"
"};\n"
"void main()\n"
"{\n"
" int dataIx = gl_VertexID / 4;\n"
" int vertexIx = gl_VertexID % 4;\n"
" vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);\n"
"}\n";
static const char* fragment_shader_source =
"#version 430\n"
"out vec4 out_color;\n"
"void main()\n"
"{\n"
" out_color = vec4(1.0, 0.5, 0.5, 0.25);\n"
"}\n";
int main(int argc, char *argv[])
{
glewExperimental = GL_TRUE;
auto window = create_glfw_window();
if (window == nullptr) {
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
set_callbacks(window);
init_loader();
print_versions();
glEnable(GL_DEBUG_OUTPUT);
glDebugMessageCallback(MessageCallback, nullptr);
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
vertices[0] = {
{-0.75f, 0.75f, 0.75f, -0.75f},
{-0.75f, -0.75f, 0.75f, 0.75f}
};
vertices[1] = {
{-0.50f, 0.50f, 0.50f, -0.50f},
{-0.50f, -0.50f, 0.50f, 0.50f},
};
vertices[2] = {
{-0.25f, 0.25f, 0.25f, -0.25f},
{-0.25f, -0.25f, 0.25f, 0.25f},
};
vertices[3] = {
{-0.05f, 0.05f, 0.05f, -0.05f},
{-0.05f, -0.05f, 0.05f, 0.05f},
};
auto vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, nullptr);
glCompileShader(vertex_shader);
auto fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, nullptr);
glCompileShader(fragment_shader);
auto program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
glLinkProgram(program);
//glUseProgram(program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
//
// SSBO
//
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
//
// VAO
//
//GLuint vao{};
//glGenVertexArrays(1, &vao);
//glBindVertexArray(vao);
//glEnableVertexAttribArray(position_attrib_index);
//glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0.15f, 0.15f, 0.2f, 1.0f);
glPointSize(10.0f);
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
//glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
This is the correct way to issue a draw with the data you have:
glBindVertexArray(vao);
glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
However, your issue is that your vertex shader does not write to gl_Position, therefore nothing gets rasterized (whatever undefined behavior happens). You should set the position of the vertices in the shader as follows:
//...
out gl_PerVertex {
vec4 gl_Position;
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
gl_Position = vec4(vertexPosition, 0, 1);
}
You can get rid of "Assign vertexPosition index", and your VAO doesn't need to have any attributes.
I am experiencing a black screen in my project using OpenGL and C++. I am in need of assistance as to where I went wrong rendering a red triangle to the screen.
I have tried checking for errors in the vertex and fragment shader.
#include <GL\glew.h>
#include <GL\GL.h>
#include <GLFW\glfw3.h>
#include <iostream>
#include <string>
using namespace std;
unsigned int CreateShader(int type, const string shaderSource) {
unsigned int shader = glCreateShader(type);
const char *src = shaderSource.c_str();
glShaderSource(shader, 1, &src, NULL);
glCompileShader(shader);
int result;
glGetShaderiv(shader, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length);
char *message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(shader, length, &length, message);
cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex shader " : "fragment shader ") << endl;
cout << message << endl;
}
return shader;
}
unsigned int CreateProgram(const string vertexShader, const string fragmentShader) {
unsigned int program = glCreateProgram();
unsigned int vs = CreateShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CreateShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glDeleteShader(vs);
glDeleteShader(fs);
glLinkProgram(program);
glValidateProgram(program);
return program;
}
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow *window = glfwCreateWindow(800, 600, "window", NULL, NULL);
glfwMakeContextCurrent(window);
glewInit();
if (glewInit() != GLEW_OK) {
cout << "Glew initialization Failed! " << endl;
glfwTerminate();
return -1;
}
float positions[] = {
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f,
};
cout << "Made By Rial Seebran" << endl;
cout << glfwGetVersionString() << endl;
unsigned int VAO;
unsigned int VBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
const string vertexShader =
"#version 330 core\n"
"layout (location = 0) in vec3 position;\n"
"void main() {\n"
"gl_Position = vec4(position, 0.0f);\n"
"}\n";
const string fragmentShader =
"#version 330 core\n"
"layout (location = 0) out vec4 color;\n"
"void main() {\n"
"color = vec4(1.0f, 0.0f, 0.0f, 1.0f);\n"
"}\n";
unsigned int program = CreateProgram(vertexShader, fragmentShader);
while (!glfwWindowShouldClose(window)) {
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
glBindVertexArray(VAO);
glDrawArrays(GL_ARRAY_BUFFER, 0, 3);
glBindVertexArray(0);
glfwPollEvents();
glfwSwapBuffers(window);
}
glfwTerminate();
return 0;
}
I'm expecting a red triangle drawn at the coordinates specified with a blueish background.
The first parameter to glDrawArrays has to be the primitive type.
It has to be GL_TRIANGLES rather than GL_ARRAY_BUFFER:
glDrawArrays(GL_ARRAY_BUFFER, 0, 3);
glDrawArrays(GL_TRIANGLES, 0, 3);
The OpenGL enumerator constant GL_ARRAY_BUFFER is not an an accepted value for this parameter and will cause an GL_INVALID_ENUM error (The error can be get by glGetError).
When the clip space coordinate is transformed to normalized device space, then the x, y and z component is divided by the w component.
This means the w component of gl_Position has to be set 1.0 rather the 0.0:
gl_Position = vec4(position, 0.0f);
gl_Position = vec4(position, 1.0);
Glew can enable additional extensions by glewExperimental = GL_TRUE;. See the GLEW documentation which says:
GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.
glewExperimental = GL_TRUE;
if ( glewInit() != GLEW_OK ) {
cout << "Glew initialization Failed! " << endl;
glfwTerminate();
return -1;
}
I am trying to draw a red triangle to the screen in OpenGL. glClearColor(x, x, x, 1) works fine and changes the background color. However no triangle appears and no errors show up.
#include <GL\glew.h>
#include <GL\GL.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <string>
using namespace std;
const unsigned int SCR_WIDTH = 800;
const unsigned int SCR_HEIGHT = 600;
static unsigned int CompileShader(unsigned int type, const string &source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char *message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex shader " : "fragment shader ") << endl;
cout << message << endl;
}
return id;
}
static unsigned int CreateShader(const string &vertexShader, const string &fragmentShader) {
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow *window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "window", NULL, NULL);
if (window == NULL) {
cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
cout << glfwGetVersionString() << endl;
glfwMakeContextCurrent(window);
glewInit();
if (glewInit()) {
cout << "Glew initialization failed! " << endl;
return -1;
}
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
const string vertexShader =
"#version 330 core\n"
"\n"
"layout (location = 0) in vec4 position;\n"
"\n"
"void main() {\n"
" gl_Position = position;\n"
"}\n";
const string fragmentShader =
"#version 330 core\n"
"\n"
"layout (location = 0) out vec4 color;\n"
"\n"
"void main() \n"
"{\n"
"\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0f, 0.0f, 0.0f, 1);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
You need to use a Vertex Array Object. So, change your program to:
(...)
unsigned int vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
(...)
And, then when rendering:
(...)
glBindVertexArray(vao);
while (!glfwWindowShouldClose(window)) {
(...)
You are missing an Vertex array object which holds those VertexAttrib settings. Also your vertex buffer format is set to vec2 points but the vertex shader expects vec4.
Correct shader:
const string vertexShader =
"#version 330 core\n"
"\n"
"layout (location = 0) in vec2 position;\n"
"\n"
"void main() {\n"
" gl_Position = vec4(position,0.0f,1.0f);\n"
"}\n";
Creating and binding the VAO:
unsigned int buffer;
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &buffer);
You initialized glew twice, not sure if that matters.
glewExperimental = GL_TRUE;//Recommended for compatibility
if (glewInit()!= GLEW_OK) {
cout << "Glew initialization failed! " << endl;
return -1;
}
I use OpenGL 3.2, GLFW and GLEW. I try to render simple triangle using VAO and simple shader on OS X (10.8.2), but nothing shows, only white screen. Shaders compile ok, GLEW inits ok, glGetString(GL_VERSION) shows 3.2, tried to put glGetError after every line, it didn't report any errors. I don't know what i do wrong. Here's the code:
#include "include/GL/glew.h"
#include "include/GL/glfw.h"
#include <cstdlib>
#include <iostream>
GLuint program;
char *textFileRead(char *fn) {
FILE *fp;
char *content = NULL;
int count=0;
if (fn != NULL) {
fp = fopen(fn,"rt");
if (fp != NULL) {
fseek(fp, 0, SEEK_END);
count = ftell(fp);
rewind(fp);
if (count > 0) {
content = (char *)malloc(sizeof(char) * (count+1));
count = fread(content,sizeof(char),count,fp);
content[count] = '\0';
}
fclose(fp);
}
}
return content;
}
void checkCompilationStatus(GLuint s) {
GLint status = 0;
glGetShaderiv(s, GL_COMPILE_STATUS, &status);
if (status == 0) {
int infologLength = 0;
int charsWritten = 0;
glGetShaderiv(s, GL_INFO_LOG_LENGTH, &infologLength);
if (infologLength > 0)
{
GLchar* infoLog = (GLchar *)malloc(infologLength);
if (infoLog == NULL)
{
printf( "ERROR: Could not allocate InfoLog buffer");
exit(1);
}
glGetShaderInfoLog(s, infologLength, &charsWritten, infoLog);
printf( "Shader InfoLog:\n%s", infoLog );
free(infoLog);
}
}
}
void setShaders() {
GLuint v, f;
char *vs = NULL,*fs = NULL;
v = glCreateShader(GL_VERTEX_SHADER);
f = glCreateShader(GL_FRAGMENT_SHADER);
vs = textFileRead("minimal.vert");
fs = textFileRead("minimal.frag");
const char * vv = vs;
const char * ff = fs;
glShaderSource(v, 1, &vv,NULL);
glShaderSource(f, 1, &ff,NULL);
free(vs);free(fs);
glCompileShader(v);
checkCompilationStatus(v);
glCompileShader(f);
checkCompilationStatus(f);
program = glCreateProgram();
glAttachShader(program,v);
glAttachShader(program,f);
GLuint error;
glLinkProgram(program);
glUseProgram(program);
}
int main(int argc, char* argv[]) {
glfwInit();
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwOpenWindow(800, 600, 8, 8, 8, 8, 24, 8, GLFW_WINDOW);
glViewport(0, 0, 800, 600);
glfwSetWindowTitle("Triangle");
glewExperimental = GL_TRUE;
GLenum result = glewInit();
if (result != GLEW_OK) {
std::cout << "Error: " << glewGetErrorString(result) << std::endl;
}
std::cout << "VENDOR: " << glGetString(GL_VENDOR) << std::endl;
std::cout << "RENDERER: " << glGetString(GL_RENDERER) << std::endl;
std::cout << "VERSION: " << glGetString(GL_VERSION) << std::endl;
std::cout << "GLSL: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
setShaders();
GLfloat vertices[] = {
1.0f, 1.0f, 0.f,
-1.f, -1.f, 0.f,
1.f, -1.f, 0.f
};
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
GLuint pos = glGetAttribLocation(program, "position");
glEnableVertexAttribArray(pos);
glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, 0);
glClearColor(1.0, 1.0, 1.0, 1.0);
while (glfwGetWindowParam(GLFW_OPENED)) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers();
glfwSleep(0.001);
}
}
And here are the shaders, vertex shader:
#version 150
in vec3 position;
void main()
{
gl_Position = vec4(position, 0);
}
fragment shader:
#version 150
out vec4 out_color;
void main()
{
out_color = vec4(1.0f, 0.0f, 0.0f, 1.0f);
}
The w parameter in your vertex shader should be set to 1, not 0.
gl_Position = vec4(position, 1)
For more information see the section titled "Normalized Coordinates" under "Rasterization Overview" on this page
... The X, Y, and Z of each vertex's position
is divided by W to get normalized device coordinates...
So your coordinates were being divided by 0. A number divided by 0 is undefined.
I have a program like below:
#include <iostream>
#include <fstream>
#include <sstream>
#include <gl/glew.h>
#include <gl/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
using namespace std;
#define WINDOW_WIDTH 640
#define WINDOW_HEIGHT 480
GLuint prog, verShader, fragShader;
GLint attrPos, attrNor;
GLint uniModelView, uniProjection, uniModelViewTransposeInverse;
glm::mat4 modelMatrix, viewMatrix, projectionMatrix, MV, MVP;
glm::mat3 MVI;
const string loadFile(string name)
{
ifstream in(name.c_str(), ios::in);
if (in.fail()) {
cout << "File: '" << name << "' could not exist!!" << endl;
return "";
}
stringstream data;
data << in.rdbuf();
in.close();
return data.str();
}
GLuint createShader(GLenum type, const string name)
{
GLint isCompileOk;
GLuint shader;
string shaderText, shaderType;
const char *shaderSource;
switch(type)
{
case GL_VERTEX_SHADER:
shaderType = "GL_VERTEX_SHADER";
break;
case GL_FRAGMENT_SHADER:
shaderType = "GL_FRAGMENT_SHADER";
break;
}
shaderText = loadFile(name);
shaderSource = shaderText.c_str();
shader = glCreateShader(type);
glShaderSource(shader, 1, &shaderSource, NULL);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &isCompileOk);
if (isCompileOk == GL_FALSE) {
char *shaderErr;
int errLength, errRetrieve;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &errLength);
shaderErr = new char [errLength + 1];
glGetShaderInfoLog(shader, errLength, &errRetrieve, shaderErr);
cout << "Compile '" << shaderType << "' error:" << endl;
cout << shaderErr << endl;
delete [] shaderErr;
return 0;
} else {
cout << "Compile '" << shaderType << "' ok!" << endl;
}
return shader;
}
bool makeShader()
{
GLint isLinkOk;
verShader = createShader(GL_VERTEX_SHADER, "vert.glsl");
fragShader = createShader(GL_FRAGMENT_SHADER, "frag.glsl");
prog = glCreateProgram();
glAttachShader(prog, verShader);
glAttachShader(prog, fragShader);
glLinkProgram(prog);
glGetProgramiv(prog, GL_LINK_STATUS, &isLinkOk);
if (isLinkOk == GL_FALSE) {
char *progErr;
int errLenght, errRetrieve;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &errLenght);
progErr = new char[errLenght + 1];
glGetProgramInfoLog(prog, errLenght, &errRetrieve, progErr);
cout << "Link program error:" << endl;
cout << progErr << endl;
delete [] progErr;
return false;
} else {
cout << "Link program Ok!" << endl;
}
attrPos = glGetAttribLocation(prog, "position");
uniModelView = glGetUniformLocation(prog, "ModelViewMatrix");
uniProjection = glGetUniformLocation(prog, "ProjectionMatrix");
uniModelViewTransposeInverse = glGetUniformLocation(prog, "ModelViewTransposeInverseMatrix");
return true;
}
float vertexs[] = {
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, 1.0f
};
unsigned short indicates[] = {
0, 1, 2,
2, 3, 0
};
GLuint vao, vbo, ibo;
void display()
{
MV = viewMatrix * modelMatrix;
MVP = projectionMatrix * MV;
MVI = glm::transpose(glm::inverse(glm::mat3(MV)));
glUseProgram(prog);
glUniformMatrix4fv(uniModelView, 1, GL_FALSE, glm::value_ptr(MV));
glUniformMatrix3fv(uniModelViewTransposeInverse, 1, GL_FALSE, glm::value_ptr(MVI));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(vao);
glDrawElements(GL_TRIANGLES, sizeof(indicates)/sizeof(unsigned short), GL_UNSIGNED_SHORT, 0);
glutSwapBuffers();
}
void reshape(int w, int h)
{
glViewport(0, 0, w, h);
projectionMatrix = glm::perspective(45.0f, float(w)/h, 0.1f, 1000.0f);
glUseProgram(prog);
glUniformMatrix4fv(uniProjection, 1, GL_FALSE, glm::value_ptr(projectionMatrix));
}
void idle()
{
glutPostRedisplay();
}
void Init()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClearDepth(1.0);
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
modelMatrix = glm::mat4(1.0f);
viewMatrix = glm::lookAt(
glm::vec3(0.0f, 5.0f, 10.0f),
glm::vec3(0.0f, 0.0f, 0.0f),
glm::vec3(0.0f, 1.0f, 0.0f)
);
projectionMatrix = glm::mat4(1.0f);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexs), vertexs, GL_STATIC_DRAW);
glVertexAttribPointer(attrPos, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 3, 0);
glEnableVertexAttribArray(attrPos);
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indicates), indicates, GL_STATIC_DRAW);
glBindVertexArray(0);
}
int main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowSize(WINDOW_WIDTH, WINDOW_HEIGHT);
glutCreateWindow("Rectangle!");
if (glewInit() != GLEW_OK) {
cout << "glewInit() fail!" << endl;
return -1;
}
if (!makeShader()) {
cout << "makeShader() error!!" << endl;
return -1;
}
Init();
reshape(WINDOW_WIDTH, WINDOW_HEIGHT);
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutIdleFunc(idle);
glutMainLoop();
return 0;
}
when i adds VAO buffer, freeglut going to crash in glutMainLoop(), if i removes glGenVertexArrays call (of couse, removed what relatives to VAO too), it run ok, so my guess is problem in freeglut with glGenVertexArrays.
Question: What is my problem?
ps: My graphic cal support opengl 2.1, i use Visual Studio 2008.
Update: With VAO, above program work well but only crash when i close freeglut window.
Update shader: My shaders is very simple:
Vertex shader:
#version 120
attribute vec3 position;
// attribute vec3 normal;
uniform mat4 ModelViewMatrix;
uniform mat3 ModelViewTransposeInverseMatrix;
uniform mat4 ProjectionMatrix;
void main(void)
{
gl_Position = ProjectionMatrix * ModelViewMatrix * vec4(position, 1.0);
}
Fragment shader:
#version 120
void main()
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
glVertexAttribPointer(attrPos, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 3, 0);
Your vertex data is tightly packed. Use a stride of 0 instead of sizeof(float) * 3.
My graphic cal support opengl 2.1
Also, VAOs only became core in 3.0. You should check for ARB_vertex_array_object support before using that functionality.