Rendering triangle using Vertex Array Object shows nothing (OpenGL) - c++

I use OpenGL 3.2, GLFW and GLEW. I try to render simple triangle using VAO and simple shader on OS X (10.8.2), but nothing shows, only white screen. Shaders compile ok, GLEW inits ok, glGetString(GL_VERSION) shows 3.2, tried to put glGetError after every line, it didn't report any errors. I don't know what i do wrong. Here's the code:
#include "include/GL/glew.h"
#include "include/GL/glfw.h"
#include <cstdlib>
#include <iostream>
GLuint program;
char *textFileRead(char *fn) {
FILE *fp;
char *content = NULL;
int count=0;
if (fn != NULL) {
fp = fopen(fn,"rt");
if (fp != NULL) {
fseek(fp, 0, SEEK_END);
count = ftell(fp);
rewind(fp);
if (count > 0) {
content = (char *)malloc(sizeof(char) * (count+1));
count = fread(content,sizeof(char),count,fp);
content[count] = '\0';
}
fclose(fp);
}
}
return content;
}
void checkCompilationStatus(GLuint s) {
GLint status = 0;
glGetShaderiv(s, GL_COMPILE_STATUS, &status);
if (status == 0) {
int infologLength = 0;
int charsWritten = 0;
glGetShaderiv(s, GL_INFO_LOG_LENGTH, &infologLength);
if (infologLength > 0)
{
GLchar* infoLog = (GLchar *)malloc(infologLength);
if (infoLog == NULL)
{
printf( "ERROR: Could not allocate InfoLog buffer");
exit(1);
}
glGetShaderInfoLog(s, infologLength, &charsWritten, infoLog);
printf( "Shader InfoLog:\n%s", infoLog );
free(infoLog);
}
}
}
void setShaders() {
GLuint v, f;
char *vs = NULL,*fs = NULL;
v = glCreateShader(GL_VERTEX_SHADER);
f = glCreateShader(GL_FRAGMENT_SHADER);
vs = textFileRead("minimal.vert");
fs = textFileRead("minimal.frag");
const char * vv = vs;
const char * ff = fs;
glShaderSource(v, 1, &vv,NULL);
glShaderSource(f, 1, &ff,NULL);
free(vs);free(fs);
glCompileShader(v);
checkCompilationStatus(v);
glCompileShader(f);
checkCompilationStatus(f);
program = glCreateProgram();
glAttachShader(program,v);
glAttachShader(program,f);
GLuint error;
glLinkProgram(program);
glUseProgram(program);
}
int main(int argc, char* argv[]) {
glfwInit();
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwOpenWindow(800, 600, 8, 8, 8, 8, 24, 8, GLFW_WINDOW);
glViewport(0, 0, 800, 600);
glfwSetWindowTitle("Triangle");
glewExperimental = GL_TRUE;
GLenum result = glewInit();
if (result != GLEW_OK) {
std::cout << "Error: " << glewGetErrorString(result) << std::endl;
}
std::cout << "VENDOR: " << glGetString(GL_VENDOR) << std::endl;
std::cout << "RENDERER: " << glGetString(GL_RENDERER) << std::endl;
std::cout << "VERSION: " << glGetString(GL_VERSION) << std::endl;
std::cout << "GLSL: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
setShaders();
GLfloat vertices[] = {
1.0f, 1.0f, 0.f,
-1.f, -1.f, 0.f,
1.f, -1.f, 0.f
};
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
GLuint pos = glGetAttribLocation(program, "position");
glEnableVertexAttribArray(pos);
glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, 0);
glClearColor(1.0, 1.0, 1.0, 1.0);
while (glfwGetWindowParam(GLFW_OPENED)) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers();
glfwSleep(0.001);
}
}
And here are the shaders, vertex shader:
#version 150
in vec3 position;
void main()
{
gl_Position = vec4(position, 0);
}
fragment shader:
#version 150
out vec4 out_color;
void main()
{
out_color = vec4(1.0f, 0.0f, 0.0f, 1.0f);
}

The w parameter in your vertex shader should be set to 1, not 0.
gl_Position = vec4(position, 1)
For more information see the section titled "Normalized Coordinates" under "Rasterization Overview" on this page
... The X, Y, and Z of each vertex's position
is divided by W to get normalized device coordinates...
So your coordinates were being divided by 0. A number divided by 0 is undefined.

Related

Creating a triangle with OpenGL

I am having trouble with my C++ code for an OpenGL program that is supposed to create a 2D triangle and keep receiving this error message:
Error! Fragment shader failed to compile.
ERROR: 0:1: '' : syntax error: #version directive must occur in a shader before anything else.
Error! Shader Program Linker Failure.
I have tried putting the code from lines 13-27 before the const char* APP_TITLE line (line 8) but that doesn't seem to make a difference.
What can I do to generate this 2D triangle?
#include <iostream>
#include <sstream>
#define GLEW_STATIC
#include "GL/glew.h"
#include "GLFW/glfw3.h"
const char* APP_TITLE = "Texturing a Pyramid";
const int gwindowWidth = 800;
const int gwindowHeight = 600;
GLFWwindow* gWindow = NULL;
const GLchar* vertexShaderSrc =
"#version 330 core\n"
"layout (location = 0) in vec3 pos;"
"void main()"
"{"
" gl_Position = vec4(pos.x, pos.y, pos.z, 1.0);"
"}";
const GLchar* fragmentShaderSrc =
"version 330 core\n"
"out vec4 frag_color;"
"void main()"
"{"
" frag_color = vec4(0.35f, 0.96f, 0.3f, 1.0);"
"}";
void glfw_onKey(GLFWwindow* window, int key, int scancode, int action, int mode);
void showFPS(GLFWwindow* window);
bool initOpenGL();
int main()
{
if (!initOpenGL())
{
std::cerr << "GLFW intialization failed." << std::endl;
return false;
}
GLfloat vertices[] = {
0.0f, 0.5f, 0.0f, // Top Vertex
0.5f, -0.5f, 0.0f, // Right Vertex
-0.5f, -0.5f, 0.0f // Left Vertex
};
GLuint vbo, vao;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource (vs, 1, &vertexShaderSrc, NULL);
glCompileShader(vs);
GLint result;
GLchar infoLog[512];
glGetShaderiv(vs, GL_COMPILE_STATUS, &result);
if (!result)
{
glGetShaderInfoLog(vs, sizeof(infoLog), NULL, infoLog);
std::cout << "Error! Vertex shader failed to compile." << infoLog << std::endl;
}
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource (fs, 1, &fragmentShaderSrc, NULL);
glCompileShader(fs);
glGetShaderiv(fs, GL_COMPILE_STATUS, &result);
if (!result)
{
glGetShaderInfoLog(fs, sizeof(infoLog), NULL, infoLog);
std::cout << "Error! Fragment shader failed to compile." << infoLog << std::endl;
}
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vs);
glAttachShader(shaderProgram, fs);
glLinkProgram(shaderProgram);
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &result);
if (!result)
{
glGetProgramInfoLog(shaderProgram, sizeof(infoLog), NULL, infoLog);
std::cout << "Error! Shader Program Linker Failure" << std::endl;
}
glDeleteShader(vs);
glDeleteShader(fs);
// Main loop
while (!glfwWindowShouldClose(gWindow))
{
showFPS(gWindow);
glfwPollEvents();
glfwSwapBuffers(gWindow);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glfwSetKeyCallback(gWindow, glfw_onKey);
}
glDeleteProgram(shaderProgram);
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
glfwTerminate();
return 0;
}
bool initOpenGL()
{
if (!glfwInit())
{
std::cerr << "GLFW initialization failed." << std::endl;
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
GLFWwindow* gWindow = glfwCreateWindow(gwindowWidth, gwindowHeight, APP_TITLE, NULL, NULL);
if (gWindow == NULL)
{
std::cerr << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(gWindow);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK)
{
std::cerr << "GLEW initialization failed." << std::endl;
return false;
}
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
return true;
}
void glfw_onKey(GLFWwindow* window, int key, int scancode, int action, int mode)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
void showFPS(GLFWwindow* window)
{
static double previousSeconds = 0.0;
static int frameCount = 0;
double elapsedSeconds;
double currentSeconds = glfwGetTime(); // returns # of seconds since GLFW started, as a double
elapsedSeconds = currentSeconds - previousSeconds;
// limit text update 4 times per second
if (elapsedSeconds > 0.25)
{
previousSeconds = currentSeconds;
double fps = (double)frameCount / elapsedSeconds;
double msPerFrame = 1000.0 / fps;
std::ostringstream outs;
outs.precision(3);
outs << std::fixed
<< APP_TITLE << " "
<< "FPS: " << fps << " "
<< "FrameTime: " << msPerFrame << " (ms)";
glfwSetWindowTitle(window, outs.str().c_str());
frameCount = 0;
}
frameCount++;
}
fragmentShaderSrc is missing a # (U+0023 NUMBER SIGN) in front of version:
"version 330 core\n"
^ note lack of #
Should be:
"#version 330 core\n"
^ note the #

How do I draw vertices that are stored in a SSBO?

This is a question following OpenGL and loading/reading data in AoSoA (hybrid SoA) format.
I am trying to use a shader storage buffer object (SSBO) to store vertex data which is represented in AoSoA format. I am having trouble drawing the vertices, which obviously means that I am doing something wrong somewhere. The problem is that I can't seem to figure out what or where. The answer to the initial question above seems to indicate that I should not be using vertex attribute arrays, so the question then becomes, how do I render this SSBO, given the code I am about to present?
VertexData structure
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
// Later stored in a std::vector
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
Vertex shader (should this really be a compute shader?)
struct Vertex4
{
float px[4]; // position x
float py[4]; // position y
};
layout(std430, binding=0) buffer VertexData
{
Vertex4 vertices[];
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
}
Assign vertexPosition index
// Do I need this? Where do I use it? glEnableVertexAttribArray(position_attrib_index)?
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
SSBO setup
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
Render loop
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
// ???
glfwSwapBuffers(window);
glfwPollEvents();
}
I just can't seem to figure out how this is supposed to work. Grabbing at straws, I also tried creating a VAO with a later call to glDrawArrays(GL_POINTS, 0, VERTEX_COUNT), but it didn't work either:
GLuint vao{};
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glEnableVertexAttribArray(position_attrib_index);
glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
It seems to me that I should be using position_attrib_index (which should point to vertexPosition) for something, the question is for what?
Complete example code
requires OpenGL 4.3, GLEW and GLFW
build command example: g++ -std=c++17 main.cpp -lGLEW -lglfw -lGL -o ssbo
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <array>
#include <iostream>
#include <vector>
void process_input(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) {
glfwSetWindowShouldClose(window, true);
}
}
void glfw_error_callback(int error_code, const char *description)
{
std::cerr << "GLFW Error: [" << error_code << "] " << description << '\n';
}
void framebuffer_size_callback(GLFWwindow *window, int width, int height)
{
glViewport(0, 0, width, height);
}
auto create_glfw_window()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
return glfwCreateWindow(800, 600, "OpenGL and AoSoA layout", nullptr, nullptr);
}
void set_callbacks(GLFWwindow *window)
{
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
glfwSetErrorCallback(glfw_error_callback);
}
void print_versions()
{
std::cout << "Using GLFW " << glfwGetVersionString() << '\n';
std::cout << "Using GLEW " << glewGetString(GLEW_VERSION) << '\n';
}
bool init_loader()
{
GLenum err = glewInit();
if (GLEW_OK != err) {
std::cerr << "GLEW error: " << glewGetErrorString(err);
}
return err == GLEW_OK;
}
void GLAPIENTRY MessageCallback(
GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* message,
const void* userParam = nullptr)
{
std::cerr << "[GL DEBUG] " << (type == GL_DEBUG_TYPE_ERROR ? "Error: " : "") << message << '\n';
}
constexpr auto VECTOR_WIDTH = 4;
constexpr auto VERTEX_COUNT = 16;
struct VertexData
{
std::array<float, VECTOR_WIDTH> px;
std::array<float, VECTOR_WIDTH> py;
};
static const char* vertex_shader_source =
"#version 430\n"
"struct Vertex4\n"
"{\n"
" float px[4]; // position x\n"
" float py[4]; // position y\n"
"};\n"
"layout(std430, binding=0) buffer VertexData\n"
"{\n"
" Vertex4 vertices[];\n"
"};\n"
"void main()\n"
"{\n"
" int dataIx = gl_VertexID / 4;\n"
" int vertexIx = gl_VertexID % 4;\n"
" vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);\n"
"}\n";
static const char* fragment_shader_source =
"#version 430\n"
"out vec4 out_color;\n"
"void main()\n"
"{\n"
" out_color = vec4(1.0, 0.5, 0.5, 0.25);\n"
"}\n";
int main(int argc, char *argv[])
{
glewExperimental = GL_TRUE;
auto window = create_glfw_window();
if (window == nullptr) {
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
set_callbacks(window);
init_loader();
print_versions();
glEnable(GL_DEBUG_OUTPUT);
glDebugMessageCallback(MessageCallback, nullptr);
std::vector<VertexData> vertices(VERTEX_COUNT / VECTOR_WIDTH);
vertices[0] = {
{-0.75f, 0.75f, 0.75f, -0.75f},
{-0.75f, -0.75f, 0.75f, 0.75f}
};
vertices[1] = {
{-0.50f, 0.50f, 0.50f, -0.50f},
{-0.50f, -0.50f, 0.50f, 0.50f},
};
vertices[2] = {
{-0.25f, 0.25f, 0.25f, -0.25f},
{-0.25f, -0.25f, 0.25f, 0.25f},
};
vertices[3] = {
{-0.05f, 0.05f, 0.05f, -0.05f},
{-0.05f, -0.05f, 0.05f, 0.05f},
};
auto vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, nullptr);
glCompileShader(vertex_shader);
auto fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, nullptr);
glCompileShader(fragment_shader);
auto program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
const GLuint position_attrib_index = 0;
glBindAttribLocation(program, position_attrib_index, "vertexPosition");
glLinkProgram(program);
//glUseProgram(program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
//
// SSBO
//
const GLuint ssbo_binding_point = 0;
GLuint ssbo{};
glGenBuffers(1, &ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, ssbo);
//glBufferStorage(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_MAP_WRITE_BIT);
glBufferData(GL_SHADER_STORAGE_BUFFER, vertices.size() * sizeof(VertexData), vertices.data(), GL_STATIC_DRAW);
const auto block_index = glGetProgramResourceIndex(program, GL_SHADER_STORAGE_BLOCK, "VertexData");
glShaderStorageBlockBinding(program, block_index, ssbo_binding_point);
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, ssbo_binding_point, ssbo);
glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
//
// VAO
//
//GLuint vao{};
//glGenVertexArrays(1, &vao);
//glBindVertexArray(vao);
//glEnableVertexAttribArray(position_attrib_index);
//glVertexAttribPointer(position_attrib_index, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0.15f, 0.15f, 0.2f, 1.0f);
glPointSize(10.0f);
while (!glfwWindowShouldClose(window)) {
process_input(window);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
//glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
This is the correct way to issue a draw with the data you have:
glBindVertexArray(vao);
glDrawArrays(GL_POINTS, 0, VERTEX_COUNT);
However, your issue is that your vertex shader does not write to gl_Position, therefore nothing gets rasterized (whatever undefined behavior happens). You should set the position of the vertices in the shader as follows:
//...
out gl_PerVertex {
vec4 gl_Position;
};
void main()
{
int dataIx = gl_VertexID / 4;
int vertexIx = gl_VertexID % 4;
vec2 vertexPosition = vec2(vertices[dataIx].px[vertexIx], vertices[dataIx].py[vertexIx]);
gl_Position = vec4(vertexPosition, 0, 1);
}
You can get rid of "Assign vertexPosition index", and your VAO doesn't need to have any attributes.

C++ / OpenGL 3.3+: No vertices are being rendered

I guess this is the millionth question of the same type. I am using OpenGL 3.3 Core Profile with C++ and try to render a triangle.
I have already read the following two pages, including typing AND copy-pasting the code that is being discussed. Below I posted the significant bits. I already had a triangle being rendered, but obviously I changed some minor detail and messed it up. GLFW and GLEW are being initialized and clearing with the glClearColor works just fine.
Frameworks in use: GLFW for windowing, GLEW and GLM.
Question: What is the error in my code and why is nothing being rendered?
Expectation: A white triangle should be visible.
Result: Nothing is being rendered. The window is filled with the glClearColor
Game.cpp
const float vertex_data[9] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
void Game::init()
{
shader = ShaderProgram();
shader.attachShader(readTextFromFile("data/shaders/main.vs"), GL_VERTEX_SHADER);
shader.attachShader(readTextFromFile("data/shaders/main.fs"), GL_FRAGMENT_SHADER);
shader.linkProgram();
mesh = Mesh(vertex_data);
}
void Game::render()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
shader.bind();
{
mesh.render();
}
shader.unbind();
}
Mesh.cpp
uint32_t vao;
uint32_t vertex_buffer;
Mesh::Mesh(const float vertex_data[])
{
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_data), vertex_data, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*) 0);
}
void Mesh::render()
{
glBindVertexArray(vao);
glEnableVertexAttribArray(0);
{
// Is this actually necessary for every draw-call?
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
glDisableVertexAttribArray(0);
}
ShaderProgram.cpp
uint32 id = 0;
bool linked = false;
uint32 vertex_shader = 0;
uint32 fragment_shader = 0;
ShaderProgram::~ShaderProgram()
{
unbind();
if (vertex_shader > 0)
{
glDetachShader(id, vertex_shader);
glDeleteShader(vertex_shader);
}
if (fragment_shader > 0)
{
glDetachShader(id, fragment_shader);
glDeleteShader(fragment_shader);
}
if (id > 0 && linked)
{
glDeleteProgram(id);
}
}
void ShaderProgram::attachShader(std::string source, int32 type)
{
assert(type == GL_VERTEX_SHADER || type == GL_FRAGMENT_SHADER);
assert(id == 0);
const char* code = source.c_str();
switch (type)
{
case GL_VERTEX_SHADER:
assert(vertex_shader == 0);
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &code, NULL);
glCompileShader(vertex_shader);
int32 vresult;
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &vresult);
if (vresult != GL_TRUE)
{
int32 infolength;
glGetShaderiv(vertex_shader, GL_INFO_LOG_LENGTH, &infolength);
GLchar* infolog = new GLchar[infolength + 1];
glGetShaderInfoLog(vertex_shader, infolength + 1, NULL, infolog);
std::stringstream ss;
ss << "Shader compilation failed for Vertex Shader: " << infolog << std::endl;
std::cout << ss.str() << std::endl;
throw std::runtime_error(ss.str());
}
break;
case GL_FRAGMENT_SHADER:
assert(fragment_shader == 0);
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &code, NULL);
glCompileShader(fragment_shader);
int32 fresult;
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &fresult);
if (fresult != GL_TRUE)
{
int32 infolength;
glGetShaderiv(fragment_shader, GL_INFO_LOG_LENGTH, &infolength);
int32 infosize = infolength + 1;
GLchar* infolog = new GLchar[infosize];
glGetShaderInfoLog(fragment_shader, infosize, NULL, infolog);
std::stringstream ss;
ss << "Shader compilation failed for Fragment Shader: " << infolog << std::endl;
std::cout << ss.str() << std::endl;
throw std::runtime_error(ss.str());
}
break;
default:
throw std::invalid_argument("Unknown Shader-Type specified");
}
}
void ShaderProgram::linkProgram()
{
assert(id == 0);
assert(vertex_shader > 0);
assert(fragment_shader > 0);
id = glCreateProgram();
glAttachShader(id, vertex_shader);
glAttachShader(id, fragment_shader);
glLinkProgram(id);
int32 result;
glGetProgramiv(id, GL_LINK_STATUS, &result);
if (result != GL_TRUE)
{
int32 infolength;
glGetProgramiv(id, GL_INFO_LOG_LENGTH, &infolength);
int32 infosize = infolength + 1;
GLchar* infolog = new GLchar[infosize];
glGetProgramInfoLog(id, infosize, NULL, infolog);
std::stringstream ss;
ss << "Shader Program Linking failed: " << infolog << std::endl;
throw std::runtime_error(ss.str());
}
linked = true;
}
void ShaderProgram::bind()
{
assert(id > 0);
assert(linked);
glUseProgram(id);
}
void ShaderProgram::unbind()
{
int32 current;
glGetIntegerv(GL_CURRENT_PROGRAM, &current);
if (current == id)
{
glUseProgram(0);
}
}
bool ShaderProgram::isLinked()
{
return linked;
}
Vertex Shader: "main.vs"
#version 330
layout(location = 0) in vec3 VertexPosition;
void main()
{
gl_Position = vec4(VertexPosition.xyz, 1.0);
}
Fragment Shader "main.fs":
#version 330
out vec4 FinalColor;
void main()
{
FinalColor = vec4(1.0, 1.0, 1.0, 1.0);
}
This line has an error:
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_data), vertex_data, GL_STATIC_DRAW);
The second parameter expects the size of the array but you are passing the size of a pointer. To fix it use something like vertex count * sizeof( float )

OpenGL shaders not drawing a thing :/

Well, as in the title the Shaders aren't doing a thing they should draw a point but it isn't appearing in the screen :/. I have being checking for solutions but it appears that they don't work. Also glfw and glew are initialising okay and the red color is appearing.
#include <GL/glew.h>
#define GLFW_DLL
#include <GLFW/glfw3.h>
#include <stdio.h>
#include <iostream>
#include "jelly/lua_manager.h"
#include "jelly/keysManager.h"
jelly::keys_buttons::KeysManager km;
GLuint vertex_array_obj;
GLuint program;
GLuint startRender(GLFWwindow* window)
{
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
std::cout << "ASD" << std::endl;
static const GLchar * vertexShader_src[] =
{
"#version 430 core \n"
" \n"
"void main(void) \n"
"{ \n"
" gl_Position = vec4(0, 0.5, 0.0, 1); \n"
"} \n"
};
static const GLchar * fragmentShader_src[] =
{
"#version 430 core \n"
" \n"
"out vec4 color; \n"
" \n"
"void main(void) \n"
"{ \n"
" color = vec4(0.0, 0.8, 1.0, 1.0); \n"
"} \n"
};
glShaderSource(vertexShader, 1, vertexShader_src, NULL);
glCompileShader(vertexShader);
glShaderSource(fragmentShader, 1, fragmentShader_src, NULL);
glCompileShader(fragmentShader);
GLuint tprogram = glCreateProgram();
glAttachShader(tprogram, vertexShader);
glAttachShader(tprogram, fragmentShader);
glLinkProgram(tprogram);
glValidateProgram(tprogram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glGenVertexArrays(1, &vertex_array_obj);
glBindVertexArray(vertex_array_obj);
return tprogram;
}
void render(GLFWwindow* window)
{
glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_POINTS, 0, 1);
}
void mouseCallback(GLFWwindow* window, int button, int action, int mods)
{
km.mouseClick(button, action, mods);
}
void keyCallback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
km.keyPressed(key, action, mods);
}
int main()
{
jelly::lua::LuaManager lm;
// 0 = Build | 1 = Release | 2 = Alpha | 3 = Beta
int buildType = 0;
std::string title = "Relieved";
if (buildType != 1)
{
switch (buildType) {
case 0 :
title += " | Build Version";
break;
case 2 :
title += " | Alpha Version";
break;
case 3 :
title += " | Beta Version";
break;
default :
break;
}
}
GLFWwindow* window;
if (!glfwInit()) {
glfwTerminate();
return -1;
}
window = glfwCreateWindow(640, 400, title.c_str(), NULL, NULL);
if (!window) {
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
{
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
glLoadIdentity();
program = startRender(window);
glUseProgram(program);
glfwSetKeyCallback(window, keyCallback);
glfwSetMouseButtonCallback(window, mouseCallback);
while(!glfwWindowShouldClose(window))
{
render(window);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &vertex_array_obj);
glDeleteProgram(program);
glDeleteVertexArrays(1, &vertex_array_obj);
glfwTerminate();
return 0;
}
You are creating your vertexarray
glGenVertexArrays(1, &vertex_array_obj);
glBindVertexArray(vertex_array_obj);
but there is not data in it. You need to add some data so that the shaders are fired (even if the value is hardcoded in the vertex shader, as it is your case)
So, add some vertices to your vertex array like this:
std::vector<float> v({0.0f,0.0f,0.0f});
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, v.size() *sizeof(float), &v[0], GL_STATIC_DRAW); //v is a std::vector<float>, it size is the size of the data (buffer), and &v[0] gives the pointer of the data
glEnableVertexAttribArray(0); //layout 0. In this example, position
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(
0, // layout in shader
3, // number of elements
GL_FLOAT, // type
GL_FALSE, // normalized?
0,
reinterpret_cast<void*>(0)
);
A good way to send data is to pack everything in a std::vector and just arrange the layouts
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, v.size() *sizeof(float), &v[0], GL_STATIC_DRAW); //v is a std::vector<float>, it size is the size of the data (buffer), and &v[0] gives the pointer of the data
const int s[] = { 3, 3, 2 }; //size of data, in my case 3 floats for position, 3 for normals and 2 for texture coordinates
size_t accum = 0;
for (int z = 0; z < 3; ++z){
glEnableVertexAttribArray(z);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(
z, // layout in shader
s[z], // number of elements
GL_FLOAT, // type
GL_FALSE, // normalized?
8 * sizeof(float), // stride (3+3+2)
reinterpret_cast<void*>(accum*sizeof(float)) //shift from the previous data [3,3,2]
);
accum += s[z];
}
Read about vertex arrays here:
https://stackoverflow.com/a/17517103/5729376

SDL2 with OpenGL 4.4: Triangle Not Rendering Properly

I'm using OpenGL 4.4 with SDL2. I am trying to render a simple triangle with the vertices (-1, -1, 0), (1, -1, 0), (0, 1, 0). However, when I think I'm doing everything correctly, nothing is drawn.
I extracted and reorganized the relevant code from my project:
#include <cerrno>
#include <cstring>
#include <exception>
#include <fstream>
#include <iostream>
#include <string>
#include <GL/glew.h>
#include <GL/glu.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_opengl.h>
void init();
void cleanUp();
std::string loadShader(std::string filepath);
void checkShaderSuccess(GLuint shader);
SDL_Window* win;
SDL_GLContext glContext;
GLuint program, vertShader, fragShader, vao, vbo;
class GenError: public std::exception {
public:
GenError():
exception(), msg("") {}
GenError(const std::string& m):
exception(), msg(m) {}
virtual ~GenError() throw() {}
virtual const char* what() const throw() {
return msg.c_str();
}
private:
std::string msg;
};
int main() {
init();
program = glCreateProgram();
if (program == 0) {
throw GenError("Shader creation failed: "
"Could not find valid memory location in "
"constructor");
}
vertShader = glCreateShader(GL_VERTEX_SHADER);
fragShader = glCreateShader(GL_FRAGMENT_SHADER);
if (vertShader == 0 || fragShader == 0) {
std::string m;
m += "Shader creation failed: "
"Could not find valid memory location when "
"adding shader: ";
m += (char *)gluErrorString(glGetError());
throw GenError(m);
}
std::cout << "Creating vertex shader..." << std::endl;
std::string data = loadShader("./shaders/basicVertex.vs");
const GLchar* data_c = data.c_str();
glShaderSource(vertShader, 1, &data_c, NULL);
glCompileShader(vertShader);
checkShaderSuccess(vertShader);
glAttachShader(program, vertShader);
std::cout << "Vertex shader created" << std::endl;
std::cout << "Creating fragment shader..." << std::endl;
data = loadShader("./shaders/basicFragment.fs");
data_c = data.c_str();
glShaderSource(fragShader, 1, &data_c, NULL);
glCompileShader(fragShader);
checkShaderSuccess(fragShader);
glAttachShader(program, fragShader);
std::cout << "Fragment shader created" << std::endl;
glLinkProgram(program);
GLint success;
glGetProgramiv(program, GL_LINK_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
GLchar programLog[logLen];
glGetProgramInfoLog(program, logLen, &logLen, programLog);
std::string m;
m += "Failed to link program: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)programLog;
throw GenError(m);
}
glValidateProgram(program);
glGetProgramiv(program, GL_VALIDATE_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
GLchar programLog[logLen];
glGetProgramInfoLog(program, logLen, &logLen, programLog);
std::string m;
m += "Failed to validate program: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)programLog;
throw GenError(m);
}
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER,
sizeof(verts),
verts,
GL_STATIC_DRAW );
SDL_Event ev;
bool running = true;
while (true) {
while (SDL_PollEvent(&ev)) {
if (ev.type == SDL_WINDOWEVENT &&
ev.window.event == SDL_WINDOWEVENT_CLOSE) {
std::cout << "Closing window..." << std::endl;
running = false;
break;
}
}
if (!running) break;
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0,
3,
GL_FLOAT,
GL_FALSE,
3*sizeof(GLfloat),
(GLvoid*)0 );
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
SDL_GL_SwapWindow(win);
}
std::cout << "Window closed" << std::endl;
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
glDeleteProgram(program);
glDeleteShader(vertShader);
glDeleteShader(fragShader);
cleanUp();
return 0;
}
void init() {
std::cout << "Initializing..." << std::endl;
if (SDL_Init(SDL_INIT_VIDEO) != 0) {
std::string m;
m.append("Error initializing SDL2: ");
m.append(SDL_GetError());
throw GenError(m);
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 4);
win = SDL_CreateWindow("Triangle Test",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 600,
SDL_WINDOW_OPENGL );
if (win == NULL) {
throw GenError(SDL_GetError());
}
glContext = SDL_GL_CreateContext(win);
if (glContext == NULL) {
std::string m;
m.append("Error associating window with OpenGL: SDL Error: ");
m.append(SDL_GetError());
throw GenError(m);
}
glewExperimental = GL_TRUE;
GLenum glewErr = glewInit();
if (glewErr != GLEW_OK) {
std::string m;
m.append("Error initializing OpenGL GLEW extension: ");
m.append((const char*)glewGetErrorString(glewErr));
throw GenError(m);
} else {
/* GLEW does not play nice with OpenGL 4.4.
* GLEW thinks OpenGL 4.4 is "pretentious" and
* "entitled". GLEW likes to throw an invalid
* enumerant error the next time glGetError is
* called after GLEW's initialization.
* glGetError must be envoked to discard this
* faulty error. GLEW makes my code look sloppy.
* We do not like GLEW. We tolerate GLEW.
*/
GLenum junk = glGetError();
}
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_FRAMEBUFFER_SRGB);
if(SDL_GL_SetSwapInterval(1) < 0) {
std::cerr << "Warning: Unable to set VSync! "
<< "SDL Error: "
<< SDL_GetError() << std::endl;
}
GLenum error = glGetError();
if (error != GL_NO_ERROR) {
std::string m;
m.append("Error initializing OpenGL: OpenGL Error: ");
m.append(reinterpret_cast<const char*>(gluErrorString(error)));
throw GenError(m);
}
std::cout << "Initialized" << std::endl;
}
void cleanUp() {
std::cout << "Cleaning up..." << std::endl;
SDL_GL_DeleteContext(glContext);
SDL_DestroyWindow(win);
SDL_Quit();
std::cout << "Cleaned" << std::endl;
}
std::string loadShader(std::string filepath) {
std::ifstream shaderFile(filepath.c_str());
if (!shaderFile.is_open()) {
std::cerr << "Could not load shader: "
<< "Error opening "
<< filepath
<< ": " << std::strerror(errno)
<< std::endl;
return std::string("");
}
std::string content, line;
while (std::getline(shaderFile, line)) {
content += line + '\n';
}
shaderFile.close();
return content;
}
void checkShaderSuccess(GLuint shader) {
GLint success;
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (success == GL_FALSE) {
GLint logLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLen);
GLchar shaderLog[logLen];
glGetShaderInfoLog(shader, logLen, &logLen, shaderLog);
std::string m;
m += "Shader compilation failed: ";
m += (char *)gluErrorString(glGetError());
m += ": ";
m += (char *)shaderLog;
glDeleteShader(shader);
throw GenError(m);
}
}
...without error catching (for faster skimming):
#include <cerrno>
#include <cstring>
#include <exception>
#include <fstream>
#include <iostream>
#include <string>
#include <GL/glew.h>
#include <GL/glu.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_opengl.h>
void init();
void cleanUp();
std::string loadShader(std::string filepath);
SDL_Window* win;
SDL_GLContext glContext;
GLuint program, vertShader, fragShader, vao, vbo;
int main() {
init();
program = glCreateProgram();
vertShader = glCreateShader(GL_VERTEX_SHADER);
fragShader = glCreateShader(GL_FRAGMENT_SHADER);
std::cout << "Creating vertex shader..." << std::endl;
std::string data = loadShader("./shaders/basicVertex.vs");
const GLchar* data_c = data.c_str();
glShaderSource(vertShader, 1, &data_c, NULL);
glCompileShader(vertShader);
glAttachShader(program, vertShader);
std::cout << "Vertex shader created" << std::endl;
std::cout << "Creating fragment shader..." << std::endl;
data = loadShader("./shaders/basicFragment.fs");
data_c = data.c_str();
glShaderSource(fragShader, 1, &data_c, NULL);
glCompileShader(fragShader);
glAttachShader(program, fragShader);
std::cout << "Fragment shader created" << std::endl;
glLinkProgram(program);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER,
sizeof(verts),
verts,
GL_STATIC_DRAW );
SDL_Event ev;
bool running = true;
while (true) {
while (SDL_PollEvent(&ev)) {
if (ev.type == SDL_WINDOWEVENT &&
ev.window.event == SDL_WINDOWEVENT_CLOSE) {
std::cout << "Closing window..." << std::endl;
running = false;
break;
}
}
if (!running) break;
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0,
3,
GL_FLOAT,
GL_FALSE,
3*sizeof(GLfloat),
(GLvoid*)0 );
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
SDL_GL_SwapWindow(win);
}
std::cout << "Window closed" << std::endl;
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
glDeleteProgram(program);
glDeleteShader(vertShader);
glDeleteShader(fragShader);
cleanUp();
return 0;
}
void init() {
std::cout << "Initializing..." << std::endl;
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 4);
win = SDL_CreateWindow("Triangle Test",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 600,
SDL_WINDOW_OPENGL );
glContext = SDL_GL_CreateContext(win);
glewExperimental = GL_TRUE;
GLenum glewErr = glewInit();
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glFrontFace(GL_CW);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
glEnable(GL_FRAMEBUFFER_SRGB);
std::cout << "Initialized" << std::endl;
}
void cleanUp() {
std::cout << "Cleaning up..." << std::endl;
SDL_GL_DeleteContext(glContext);
SDL_DestroyWindow(win);
SDL_Quit();
std::cout << "Cleaned" << std::endl;
}
std::string loadShader(std::string filepath) {
std::ifstream shaderFile(filepath.c_str());
std::string content, line;
while (std::getline(shaderFile, line)) {
content += line + '\n';
}
shaderFile.close();
return content;
}
...my vertex shader (GLSL):
#version 440
layout (location = 0) in vec3 position;
void main() {
gl_Position = vec4(0.5 * position, 1.0);
}
...and my fragment shader:
#version 440
out vec4 fragColor;
void main() {
fragColor = vec4(0.0, 1.0, 1.0, 1.0);
}
Now oddly enough, when I change line 148 in my C++ code (with error catching) from this...
3*sizeof(GLfloat),
...to this (in other words, changing the stride)...
3*sizeof(GLdouble),
...compiling and running produces a triangle with the vertices (-1, -1, 0), (0, 0, 0), (0, 1, 0). The second vertex is apparently getting obscured. Instead of an isosceles triangle, I get a scalene triangle.
I would like to 1) figure out how to fix my program so that it displays a triangle with the specified vertices, and 2) understand what I did wrong initially that caused my such an odd result when modifying the aforementioned line of code.
I have been tinkering with this for almost a week. Any insight is appreciated. Thanks!
Your code has a problem with the winding order of the polygons. You specify clockwise winding for the front faces, and enable culling of the back faces:
glFrontFace(GL_CW);
glCullFace(GL_BACK);
But the triangle has counter-clockwise winding order:
const GLfloat verts[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f
};
This means that the triangle will be eliminated by culling.
Using counter-clockwise winding is mostly standard in OpenGL, and is also the default. So the best option is that you simply remove this line of code:
glFrontFace(GL_CW);
This will leave the value at GL_CCW, which matches your geometry.
Disabling backface culling is always one of the first things you should do when polygons don't show up. Having the wrong winding order is one of the most common causes of things not rendering, and it's very easy to triage by simply disabling culling, and checking if that makes the geometry show up.