OpenGL shaders not drawing a thing :/ - c++

Well, as in the title the Shaders aren't doing a thing they should draw a point but it isn't appearing in the screen :/. I have being checking for solutions but it appears that they don't work. Also glfw and glew are initialising okay and the red color is appearing.
#include <GL/glew.h>
#define GLFW_DLL
#include <GLFW/glfw3.h>
#include <stdio.h>
#include <iostream>
#include "jelly/lua_manager.h"
#include "jelly/keysManager.h"
jelly::keys_buttons::KeysManager km;
GLuint vertex_array_obj;
GLuint program;
GLuint startRender(GLFWwindow* window)
{
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
std::cout << "ASD" << std::endl;
static const GLchar * vertexShader_src[] =
{
"#version 430 core \n"
" \n"
"void main(void) \n"
"{ \n"
" gl_Position = vec4(0, 0.5, 0.0, 1); \n"
"} \n"
};
static const GLchar * fragmentShader_src[] =
{
"#version 430 core \n"
" \n"
"out vec4 color; \n"
" \n"
"void main(void) \n"
"{ \n"
" color = vec4(0.0, 0.8, 1.0, 1.0); \n"
"} \n"
};
glShaderSource(vertexShader, 1, vertexShader_src, NULL);
glCompileShader(vertexShader);
glShaderSource(fragmentShader, 1, fragmentShader_src, NULL);
glCompileShader(fragmentShader);
GLuint tprogram = glCreateProgram();
glAttachShader(tprogram, vertexShader);
glAttachShader(tprogram, fragmentShader);
glLinkProgram(tprogram);
glValidateProgram(tprogram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glGenVertexArrays(1, &vertex_array_obj);
glBindVertexArray(vertex_array_obj);
return tprogram;
}
void render(GLFWwindow* window)
{
glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_POINTS, 0, 1);
}
void mouseCallback(GLFWwindow* window, int button, int action, int mods)
{
km.mouseClick(button, action, mods);
}
void keyCallback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
km.keyPressed(key, action, mods);
}
int main()
{
jelly::lua::LuaManager lm;
// 0 = Build | 1 = Release | 2 = Alpha | 3 = Beta
int buildType = 0;
std::string title = "Relieved";
if (buildType != 1)
{
switch (buildType) {
case 0 :
title += " | Build Version";
break;
case 2 :
title += " | Alpha Version";
break;
case 3 :
title += " | Beta Version";
break;
default :
break;
}
}
GLFWwindow* window;
if (!glfwInit()) {
glfwTerminate();
return -1;
}
window = glfwCreateWindow(640, 400, title.c_str(), NULL, NULL);
if (!window) {
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
{
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
glLoadIdentity();
program = startRender(window);
glUseProgram(program);
glfwSetKeyCallback(window, keyCallback);
glfwSetMouseButtonCallback(window, mouseCallback);
while(!glfwWindowShouldClose(window))
{
render(window);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &vertex_array_obj);
glDeleteProgram(program);
glDeleteVertexArrays(1, &vertex_array_obj);
glfwTerminate();
return 0;
}

You are creating your vertexarray
glGenVertexArrays(1, &vertex_array_obj);
glBindVertexArray(vertex_array_obj);
but there is not data in it. You need to add some data so that the shaders are fired (even if the value is hardcoded in the vertex shader, as it is your case)
So, add some vertices to your vertex array like this:
std::vector<float> v({0.0f,0.0f,0.0f});
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, v.size() *sizeof(float), &v[0], GL_STATIC_DRAW); //v is a std::vector<float>, it size is the size of the data (buffer), and &v[0] gives the pointer of the data
glEnableVertexAttribArray(0); //layout 0. In this example, position
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(
0, // layout in shader
3, // number of elements
GL_FLOAT, // type
GL_FALSE, // normalized?
0,
reinterpret_cast<void*>(0)
);
A good way to send data is to pack everything in a std::vector and just arrange the layouts
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, v.size() *sizeof(float), &v[0], GL_STATIC_DRAW); //v is a std::vector<float>, it size is the size of the data (buffer), and &v[0] gives the pointer of the data
const int s[] = { 3, 3, 2 }; //size of data, in my case 3 floats for position, 3 for normals and 2 for texture coordinates
size_t accum = 0;
for (int z = 0; z < 3; ++z){
glEnableVertexAttribArray(z);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(
z, // layout in shader
s[z], // number of elements
GL_FLOAT, // type
GL_FALSE, // normalized?
8 * sizeof(float), // stride (3+3+2)
reinterpret_cast<void*>(accum*sizeof(float)) //shift from the previous data [3,3,2]
);
accum += s[z];
}
Read about vertex arrays here:
https://stackoverflow.com/a/17517103/5729376

Related

Modern OpenGL 3.3 with glfw window not displaying anything

I just get an empty window without any c++ or opengl errors. And the vao method dosen't work. Im also on a 8 year old laptop, so this also may be the cause.
Here's the code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
// shader stuff (do not understand lol)
static unsigned int compile_shader(unsigned int type, const std::string& source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
// ERRORS
int resoult;
glGetShaderiv(id, GL_COMPILE_STATUS, &resoult);
if (resoult == GL_FALSE) {
int lenght;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &lenght);
char* message = (char*)_malloca(lenght * sizeof(char));
glGetShaderInfoLog(id, lenght, &lenght, message);
std::cout << "ERROR !!! Failded to compile shader !!! ERROR" << (type ==
GL_VERTEX_SHADER ? "vertex" : "fragment") << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int create_shader(const std::string& vertex_shader, const std::string&
fragment_shader) {
unsigned int program = glCreateProgram();
unsigned int vs = compile_shader(GL_VERTEX_SHADER, vertex_shader);
unsigned int fs = compile_shader(GL_FRAGMENT_SHADER, fragment_shader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(800, 600, "test", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
std::cout << glGetString(GL_VERSION) << std::endl;
// Initializing GLEW
if (glewInit() != GLEW_OK)
std::cout << "Error with glew :((((((" << std::endl;
// buffer for the triangle
float positions[6] = {
-0.5f, -0.5f,
0.0f, -0.5f,
0.5f, -0.5f
};
unsigned int vao;
glGenVertexArrays(1, &vao);
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, (void*)0);
std::string vertex =
"#version 330 core \n"
"layout(location = 0 ) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string fragment =
"#version 330 core \n"
"layout(location = 0 ) out vec4 color;\n"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = create_shader(vertex, fragment);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
// making a triangle
glUseProgram(shader);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glDeleteProgram(shader);
glfwTerminate();
return 0;
}

Rendering a single triangle using TriangleStrip renders a rectangle

I am trying to render a triangle with a triangle strip, but the word thing is that I am getting a rectangle.
Here is my client application code. I hardcoded points and the indices, create a VBO and an index buffer, and just call glDrawElements.
#include <string>
#include <fstream>
#include <sstream>
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
using namespace std;
#define PI 3.14159265359
static string ParseShader(string filepath) {
ifstream stream(filepath);
string line;
stringstream stringStream;
while (getline(stream, line))
{
stringStream << line << '\n';
}
return stringStream.str();
}
static unsigned int CompileShader(unsigned int type, const string& source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str(); // this returns a pointer to data inside the string, the first character
glShaderSource(id, 1, &src, nullptr); // shader id, count of source codes, a pointer to the array that holds the strings
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
cout << type << endl;
cout << message << endl;
glDeleteShader(id);
return 0;
}
return id;
}
// takes the shader codes as a string parameters
static unsigned int CreateShader(const string& vertexShader, const string& fragmentShader)
{
GLuint program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program); // validate if the program is valid and can be run in the current state of opengl
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
float Angle = 40;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
// call glewInit after creating the context...
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
const int vertexCoordinateCount = 2;
const int coordinateCount = 6;
const int indexCount = 3;
GLfloat coordinates[coordinateCount] = {
500.0f, 0.0f,
-250.0f, 250.f,
-250.f, -250.f,
};
GLuint indices[indexCount] = { 0, 1, 2 };
GLuint position_buffer;
glGenBuffers(1, &position_buffer);
glBindBuffer(GL_ARRAY_BUFFER, position_buffer);
glBufferData(GL_ARRAY_BUFFER, coordinateCount * sizeof(float), coordinates, GL_STATIC_DRAW);
glVertexAttribPointer(0, vertexCoordinateCount, GL_FLOAT, GL_FALSE, sizeof(float) * vertexCoordinateCount, 0);
glEnableVertexAttribArray(0);
GLuint index_buffer;
glGenBuffers(1, &index_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexCount * sizeof(GLuint), indices, GL_STATIC_DRAW);
string vertexSource = ParseShader("vertex.shader");
string fragmentSource = ParseShader("fragment.shader");
unsigned int program = CreateShader(vertexSource, fragmentSource);
glUseProgram(program);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
// Render here
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawElements(GL_TRIANGLE_STRIP, indexCount, GL_UNSIGNED_INT, nullptr);
//Swap front and back buffers
glfwSwapBuffers(window);
// Poll for and process events
glfwPollEvents();
}
glDeleteProgram(program);
glfwTerminate();
return 0;
}
My vertex shader :
#version 330 core
layout(location = 0) in vec3 aPos;
void main()
{
gl_Position = vec4(aPos, 1.0);
}
And my fragment shader:
#version 330 core
out vec4 FragColor;
void main()
{
FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);
}
Edit: Adding a screenshot of the render result.
You do not transform the vertex coordinates. Therefore the vertices must be spepcified in Normalized Device Space, in range [-1.0, 1.0]. e.g.:
GLfloat coordinates[coordinateCount] = {
1.0f, 1.0f,
-1.0f, 1.0f,
1.0f, -1.0f,
};
You're actually drawing a triangle, but you only see an inner part of it. The rest is clipped.

How do I draw vertices that are stored in a SSBO?

This is a question following OpenGL and loading/reading data in AoSoA (hybrid SoA) format.
I am trying to use a shader storage buffer object (SSBO) to store vertex data which is represented in AoSoA format. I am having trouble drawing the vertices, which obviously means that I am doing something wrong somewhere. The problem is that I can't seem to figure out what or where. The answer to the initial question above seems to indicate that I should not be using vertex attribute arrays, so the question then becomes, how do I render this SSBO, given the code I am about to present?
VertexData structure
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
// Later stored in a std::vector
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
Vertex shader (should this really be a compute shader?)
struct Vertex4
{
float px[4]; // position x
float py[4]; // position y
};
layout(std430, binding=0) buffer VertexData
{
Vertex4 vertices[];
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
}
Assign vertexPosition index
// Do I need this? Where do I use it? glEnableVertexAttribArray(position_attrib_index)?
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
SSBO setup
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
Render loop
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
// ???
glfwSwapBuffers(window);
glfwPollEvents();
}
I just can't seem to figure out how this is supposed to work. Grabbing at straws, I also tried creating a VAO with a later call to glDrawArrays(GL_POINTS, 0, VERTEX_COUNT), but it didn't work either:
GLuint vao{};
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glEnableVertexAttribArray(position_attrib_index);
glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
It seems to me that I should be using position_attrib_index (which should point to vertexPosition) for something, the question is for what?
Complete example code
requires OpenGL 4.3, GLEW and GLFW
build command example: g++ -std=c++17 main.cpp -lGLEW -lglfw -lGL -o ssbo
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <array>
#include <iostream>
#include <vector>
void process_input(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) {
glfwSetWindowShouldClose(window, true);
}
}
void glfw_error_callback(int error_code, const char *description)
{
std::cerr << "GLFW Error: [" << error_code << "] " << description << '\n';
}
void framebuffer_size_callback(GLFWwindow *window, int width, int height)
{
glViewport(0, 0, width, height);
}
auto create_glfw_window()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
return glfwCreateWindow(800, 600, "OpenGL and AoSoA layout", nullptr, nullptr);
}
void set_callbacks(GLFWwindow *window)
{
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glfwSetErrorCallback(glfw_error_callback);
}
void print_versions()
{
std::cout << "Using GLFW " << glfwGetVersionString() << '\n';
std::cout << "Using GLEW " << glewGetString(GLEW_VERSION) << '\n';
}
bool init_loader()
{
GLenum err = glewInit();
if (GLEW_OK != err) {
std::cerr << "GLEW error: " << glewGetErrorString(err);
}
return err == GLEW_OK;
}
void GLAPIENTRY MessageCallback(
GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* message,
const void* userParam = nullptr)
{
std::cerr << "[GL DEBUG] " << (type == GL_DEBUG_TYPE_ERROR ? "Error: " : "") << message << '\n';
}
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
static const char* vertex_shader_source =
"#version 430\n"
"struct Vertex4\n"
"{\n"
" float px[4]; // position x\n"
" float py[4]; // position y\n"
"};\n"
"layout(std430, binding=0) buffer VertexData\n"
"{\n"
" Vertex4 vertices[];\n"
"};\n"
"void main()\n"
"{\n"
" int dataIx = gl_VertexID / 4;\n"
" int vertexIx = gl_VertexID % 4;\n"
" vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);\n"
"}\n";
static const char* fragment_shader_source =
"#version 430\n"
"out vec4 out_color;\n"
"void main()\n"
"{\n"
" out_color = vec4(1.0, 0.5, 0.5, 0.25);\n"
"}\n";
int main(int argc, char *argv[])
{
glewExperimental = GL_TRUE;
auto window = create_glfw_window();
if (window == nullptr) {
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
set_callbacks(window);
init_loader();
print_versions();
glEnable(GL_DEBUG_OUTPUT);
glDebugMessageCallback(MessageCallback, nullptr);
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
vertices[0] = {
{-0.75f, 0.75f, 0.75f, -0.75f},
{-0.75f, -0.75f, 0.75f, 0.75f}
};
vertices[1] = {
{-0.50f, 0.50f, 0.50f, -0.50f},
{-0.50f, -0.50f, 0.50f, 0.50f},
};
vertices[2] = {
{-0.25f, 0.25f, 0.25f, -0.25f},
{-0.25f, -0.25f, 0.25f, 0.25f},
};
vertices[3] = {
{-0.05f, 0.05f, 0.05f, -0.05f},
{-0.05f, -0.05f, 0.05f, 0.05f},
};
auto vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, nullptr);
glCompileShader(vertex_shader);
auto fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, nullptr);
glCompileShader(fragment_shader);
auto program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
glLinkProgram(program);
//glUseProgram(program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
//
// SSBO
//
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
//
// VAO
//
//GLuint vao{};
//glGenVertexArrays(1, &vao);
//glBindVertexArray(vao);
//glEnableVertexAttribArray(position_attrib_index);
//glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0.15f, 0.15f, 0.2f, 1.0f);
glPointSize(10.0f);
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
//glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
This is the correct way to issue a draw with the data you have:
glBindVertexArray(vao);
glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
However, your issue is that your vertex shader does not write to gl_Position, therefore nothing gets rasterized (whatever undefined behavior happens). You should set the position of the vertices in the shader as follows:
//...
out gl_PerVertex {
vec4 gl_Position;
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
gl_Position = vec4(vertexPosition, 0, 1);
}
You can get rid of "Assign vertexPosition index", and your VAO doesn't need to have any attributes.

Black screen output in opengl 3.3

I am trying to draw a red triangle to the screen in OpenGL. glClearColor(x, x, x, 1) works fine and changes the background color. However no triangle appears and no errors show up.
#include <GL\glew.h>
#include <GL\GL.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <string>
using namespace std;
const unsigned int SCR_WIDTH = 800;
const unsigned int SCR_HEIGHT = 600;
static unsigned int CompileShader(unsigned int type, const string &source) {
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char *message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex shader " : "fragment shader ") << endl;
cout << message << endl;
}
return id;
}
static unsigned int CreateShader(const string &vertexShader, const string &fragmentShader) {
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow *window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "window", NULL, NULL);
if (window == NULL) {
cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
cout << glfwGetVersionString() << endl;
glfwMakeContextCurrent(window);
glewInit();
if (glewInit()) {
cout << "Glew initialization failed! " << endl;
return -1;
}
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
const string vertexShader =
"#version 330 core\n"
"\n"
"layout (location = 0) in vec4 position;\n"
"\n"
"void main() {\n"
" gl_Position = position;\n"
"}\n";
const string fragmentShader =
"#version 330 core\n"
"\n"
"layout (location = 0) out vec4 color;\n"
"\n"
"void main() \n"
"{\n"
"\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.0f, 0.0f, 0.0f, 1);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
You need to use a Vertex Array Object. So, change your program to:
(...)
unsigned int vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions), positions, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
(...)
And, then when rendering:
(...)
glBindVertexArray(vao);
while (!glfwWindowShouldClose(window)) {
(...)
You are missing an Vertex array object which holds those VertexAttrib settings. Also your vertex buffer format is set to vec2 points but the vertex shader expects vec4.
Correct shader:
const string vertexShader =
"#version 330 core\n"
"\n"
"layout (location = 0) in vec2 position;\n"
"\n"
"void main() {\n"
" gl_Position = vec4(position,0.0f,1.0f);\n"
"}\n";
Creating and binding the VAO:
unsigned int buffer;
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &buffer);
You initialized glew twice, not sure if that matters.
glewExperimental = GL_TRUE;//Recommended for compatibility
if (glewInit()!= GLEW_OK) {
cout << "Glew initialization failed! " << endl;
return -1;
}

Rendering triangle using Vertex Array Object shows nothing (OpenGL)

I use OpenGL 3.2, GLFW and GLEW. I try to render simple triangle using VAO and simple shader on OS X (10.8.2), but nothing shows, only white screen. Shaders compile ok, GLEW inits ok, glGetString(GL_VERSION) shows 3.2, tried to put glGetError after every line, it didn't report any errors. I don't know what i do wrong. Here's the code:
#include "include/GL/glew.h"
#include "include/GL/glfw.h"
#include <cstdlib>
#include <iostream>
GLuint program;
char *textFileRead(char *fn) {
FILE *fp;
char *content = NULL;
int count=0;
if (fn != NULL) {
fp = fopen(fn,"rt");
if (fp != NULL) {
fseek(fp, 0, SEEK_END);
count = ftell(fp);
rewind(fp);
if (count > 0) {
content = (char *)malloc(sizeof(char) * (count+1));
count = fread(content,sizeof(char),count,fp);
content[count] = '\0';
}
fclose(fp);
}
}
return content;
}
void checkCompilationStatus(GLuint s) {
GLint status = 0;
glGetShaderiv(s, GL_COMPILE_STATUS, &status);
if (status == 0) {
int infologLength = 0;
int charsWritten = 0;
glGetShaderiv(s, GL_INFO_LOG_LENGTH, &infologLength);
if (infologLength > 0)
{
GLchar* infoLog = (GLchar *)malloc(infologLength);
if (infoLog == NULL)
{
printf( "ERROR: Could not allocate InfoLog buffer");
exit(1);
}
glGetShaderInfoLog(s, infologLength, &charsWritten, infoLog);
printf( "Shader InfoLog:\n%s", infoLog );
free(infoLog);
}
}
}
void setShaders() {
GLuint v, f;
char *vs = NULL,*fs = NULL;
v = glCreateShader(GL_VERTEX_SHADER);
f = glCreateShader(GL_FRAGMENT_SHADER);
vs = textFileRead("minimal.vert");
fs = textFileRead("minimal.frag");
const char * vv = vs;
const char * ff = fs;
glShaderSource(v, 1, &vv,NULL);
glShaderSource(f, 1, &ff,NULL);
free(vs);free(fs);
glCompileShader(v);
checkCompilationStatus(v);
glCompileShader(f);
checkCompilationStatus(f);
program = glCreateProgram();
glAttachShader(program,v);
glAttachShader(program,f);
GLuint error;
glLinkProgram(program);
glUseProgram(program);
}
int main(int argc, char* argv[]) {
glfwInit();
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwOpenWindow(800, 600, 8, 8, 8, 8, 24, 8, GLFW_WINDOW);
glViewport(0, 0, 800, 600);
glfwSetWindowTitle("Triangle");
glewExperimental = GL_TRUE;
GLenum result = glewInit();
if (result != GLEW_OK) {
std::cout << "Error: " << glewGetErrorString(result) << std::endl;
}
std::cout << "VENDOR: " << glGetString(GL_VENDOR) << std::endl;
std::cout << "RENDERER: " << glGetString(GL_RENDERER) << std::endl;
std::cout << "VERSION: " << glGetString(GL_VERSION) << std::endl;
std::cout << "GLSL: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
setShaders();
GLfloat vertices[] = {
1.0f, 1.0f, 0.f,
-1.f, -1.f, 0.f,
1.f, -1.f, 0.f
};
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
GLuint pos = glGetAttribLocation(program, "position");
glEnableVertexAttribArray(pos);
glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, 0);
glClearColor(1.0, 1.0, 1.0, 1.0);
while (glfwGetWindowParam(GLFW_OPENED)) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers();
glfwSleep(0.001);
}
}
And here are the shaders, vertex shader:
#version 150
in vec3 position;
void main()
{
gl_Position = vec4(position, 0);
}
fragment shader:
#version 150
out vec4 out_color;
void main()
{
out_color = vec4(1.0f, 0.0f, 0.0f, 1.0f);
}
The w parameter in your vertex shader should be set to 1, not 0.
gl_Position = vec4(position, 1)
For more information see the section titled "Normalized Coordinates" under "Rasterization Overview" on this page
... The X, Y, and Z of each vertex's position
is divided by W to get normalized device coordinates...
So your coordinates were being divided by 0. A number divided by 0 is undefined.