I am following a tutorial on OpenGL in C++ and for some reason the exact same code doesn't work for me.
It is supposed to display a triangle, but it doesn't show anything.Just a blank screen.
Here is the code:
#include<iostream>
#define GLEW_STATIC
#include"GL/glew.h"
#include"GLFW/glfw3.h"
static unsigned int compileShader(unsigned int type, const std::string& shader)
{
unsigned int id = glCreateShader(type);
const char* src = shader.c_str();
glShaderSource(id, 1, &src, NULL);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (!result) {
char log[512];
glGetShaderInfoLog(id, 512, NULL, log);
std::cout << "Compiling shader error:\n" << log << std::endl;
}
return id;
}
static unsigned int createShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
unsigned int vs = compileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = compileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
int result;
glGetProgramiv(program, GL_LINK_STATUS, &result);
if (!result) {
char log[512];
glGetProgramInfoLog(program, 512, NULL, log);
std::cout << "linking shader error:\n" << log << std::endl;
}
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
void keyCallBack(GLFWwindow* window, int key, int scancode, int action, int mode)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
int main()
{
int width = 0, height = 0;
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(500, 500, "Test", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
glfwSetKeyCallback(window, keyCallBack);
float triangle[] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
GLuint buffer = 0;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
std::string vertexShader = "#version 330 core\n"
"layout (location = 0) in vec4 position;\n"
"void main()\n"
"{\n"
"gl_Position = position;\n"
"}\n";
std::string fragmentShader = "#version 330 core\n"
"out vec4 color;\n"
"void main()\n"
"{\n"
"color = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n";
unsigned int program = createShader(vertexShader, fragmentShader);
glUseProgram(program);
while(!glfwWindowShouldClose(window)) {
glfwPollEvents();
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
}
glfwTerminate();
return 0;
}
I really don't understand why it is not working.The triangle's positions look correct.glVertexAttribPointer also looks correct.I can't find any errors in the compiling and linking of the shaders and their sources..
I also get zero errors when compiling and linking them..
The issue is caused by GLFW_OPENGL_CORE_PROFILE. If you use a core profile OpenGL Context, then you have to use a named Vertex Array Object, because the default VAO (0) is not valid.
Either use a compatibility profile:
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_COMPAT_PROFILE);
or create a VAO before specifying the array of vertex attribute data:
GLuint VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
Related
I am trying to draw a triangle with GLFW and Glad. But the screen won't display anything (I can only clear the screen).
I am able to Open a Window, and clear the screen to a dark blue color (or any other color)
But I cannot draw a triangle on the Screen, and I do not know why.
I thought maybe my GPU didn't have a default shader.
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <string.h>
#include <vector>
void Log(const char* str) {
std::cout << str << std::endl;
}
void error_callback(int error, const char* description)
{
std::cout << "Error Code ["<< stderr << "] Error: \n" << description << std::endl;
}
class Shader {
public:
Shader(const std::string& vertexSrc, const std::string& fragSrc) {
// Create an empty vertex shader handle
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
const GLchar* source = vertexSrc.c_str();
glShaderSource(vertexShader, 1, &source, 0);
glCompileShader(vertexShader);
GLint isCompiled = 0;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isCompiled);
if (isCompiled == GL_FALSE)
{
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
std::vector<GLchar> infoLog(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &infoLog[0]);
glDeleteShader(vertexShader);
return;
}
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
source = fragSrc.c_str();
glShaderSource(fragmentShader, 1, &source, 0);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isCompiled);
if (isCompiled == GL_FALSE)
{
GLint maxLength = 0;
glGetShaderiv(fragmentShader, GL_INFO_LOG_LENGTH, &maxLength);
std::vector<GLchar> infoLog(maxLength);
glGetShaderInfoLog(fragmentShader, maxLength, &maxLength, &infoLog[0]);
glDeleteShader(fragmentShader);
glDeleteShader(vertexShader);
return;
}
m_RendererID = glCreateProgram();
glAttachShader(m_RendererID, vertexShader);
glAttachShader(m_RendererID, fragmentShader);
glLinkProgram(m_RendererID);
// Note the different functions here: glGetProgram* instead of glGetShader*.
GLint isLinked = 0;
glGetProgramiv(m_RendererID, GL_LINK_STATUS, (int*)&isLinked);
if (isLinked == GL_FALSE)
{
GLint maxLength = 0;
glGetProgramiv(m_RendererID, GL_INFO_LOG_LENGTH, &maxLength);
std::vector<GLchar> infoLog(maxLength);
glGetProgramInfoLog(m_RendererID, maxLength, &maxLength, &infoLog[0]);
glDeleteProgram(m_RendererID);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
return;
}
Log("Shader compiled Successfully at:");
std::cout << m_RendererID << std::endl;
glDetachShader(m_RendererID, vertexShader);
glDetachShader(m_RendererID, fragmentShader);
}
~Shader() {
glDeleteProgram(m_RendererID);
}
void Bind() const {
glUseProgram(m_RendererID);
}
void Unbind() const {
glUseProgram(0);
}
private:
uint32_t m_RendererID = 0;
};
int main() {
Log("Initializing...");
glfwSetErrorCallback(error_callback);
if (!glfwInit())
{
// Initialization failed
Log("Initialization Failed");
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
GLFWwindow* window = glfwCreateWindow(1080, 720, "GLFW_GLAD TEST", NULL, NULL);
if (!window)
{
glfwTerminate();
Log("Window or OpenGL Context Failed");
}
glfwMakeContextCurrent(window);
gladLoadGL();
glfwSwapInterval(1);
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
//APP CODE
unsigned int m_VertexArray;
unsigned int m_VertexBuffer;
unsigned int m_IndexBuffer;
float vertices[9] = {
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f
};
// Vertex Array
glGenVertexArrays(1, &m_VertexArray);
glBindVertexArray(m_VertexArray);
// Vertex Buffer
glGenBuffers(1, &m_VertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, m_VertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glDisableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), nullptr);
// Index Buffer
glGenBuffers(1, &m_IndexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_IndexBuffer);
unsigned int indices[3] = {
0, 1, 2,
};
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
std::string vertexSrc = R"(
#version 330
layout(location = 0) in vec3 a_Pos;
void main()
{
gl_Position = vec4(a_Pos, 1);
}
)";
std::string fragmentSrc = R"(
#version 330
layout(location = 0) out vec4 FragColor;
void main()
{
FragColor = vec4(0.8f, 0.5f, 0.2f, 1.0f);
}
)";
Shader* shader = new Shader(vertexSrc, fragmentSrc);
shader->Bind();
while (!glfwWindowShouldClose(window))
{
glViewport(0, 0, width, height);
glClearColor(0.1f, .1f, .14f, 1);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(m_VertexArray);
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, nullptr);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
return 0;
}
This here definitively is wrong.
glDisableVertexAttribArray(0);
You meant glEnableVertexAttribArray here, so that the attibute pointer you are setting up is actually used. For disabled attribute arrays, all shader invocations will get a constant value.
Also your shaders use #version 330 and actually require GLSL 330 syntax, however:
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
you request only a GL 2.0 context and GL 2.0 only mandates support for GLSL 1.10. If this is going to work will be completely up to your driver, you might get some >= 3.3 compatibility profile, but you can't count on it.
You should create a 3.3 context explicitly, and also explicitly request a core profile.
I am on mac and I am using Xcode 10.1 as I can't get a new version as my mac is on 10.13.6. I am new to C++ and OpenGL so I followed a tutorial on how to do this, but it is on Windows 10 so maybe that is the problem? It is using GLFW and GLEW both installed with Homebrew.
Here is my code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if(result == GL_FALSE)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << "Failed to compile " << (type == GL_VERTEX_SHADER ? "vertex" : "fragment") << " shader!" << std::endl;
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
if (!glfwInit())
return -1;
window = glfwCreateWindow(640, 480, "Window", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if(glewInit() != GLEW_OK)
std::cout << "Error!" << std::endl;
std::cout << glGetString(GL_VERSION) << std::endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f
};
unsigned int buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
std::string vertexShader =
"#version 330 core\n"
"\n"
"layout(location = 0) in vec4 position;\n"
"\n"
"void main()\n"
"{\n"
" glPosition = position;\n"
"}\n";
std::string fragmentShader =
"#version 450 core\n"
"\n"
"layout(location = 0) out vec4 colour;\n"
"\n"
"void main()\n"
"{\n"
" colour = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
When I run this code the console writes this:
2.1 NVIDIA-10.4.14 310.90.30.05b27
Failed to compile vertex shader!
ERROR: 0:1: '' : version '330' is not supported
ERROR: 0:1: '' : syntax error: #version
ERROR: 0:3: 'layout' : syntax error: syntax error
Failed to compile fragment shader!
ERROR: 0:1: '' : version '450' is not supported
ERROR: 0:1: '' : syntax error: #version
ERROR: 0:3: 'layout' : syntax error: syntax error
Program ended with exit code: 0
How can I fix this?
I'm macos 10.13.6, following the cherno's lesson, meet your trouble too.
I finish by doing 2 step:
add these line of code in front of glCreateWindow
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
cout << "I'm apple machine" << endl;
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
add VAO as #Ali ASkari memtion above
finally:
GLFWwindow *window;
if (!glfwInit()) {
cout << "glfw init err" << endl;
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
cout << "I'm apple machine" << endl;
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
unsigned int width = 800;
unsigned int height = 600;
window = glfwCreateWindow(width, height, "Learngl", NULL, NULL);
if (!window) {
cout << " can't create window!!!" << endl;
glfwTerminate();
return -1;
}
unsigned int major = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MAJOR);
unsigned int minor = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MINOR);
cout << "oepngl shader version: " << major << "." << minor << endl;
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK) {
cout << "Error ! " << endl;
}
cout << glGetString(GL_VERSION) << endl;
float positions[6] = {
-0.5f, -0.5f,
0.0f, 0.5f,
0.5f, -0.5f,
};
unsigned int VBO, VAO;
glGenBuffers(1, &VBO);
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, 6*sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float)*2, 0);
string vertexShader = "#version 330 core\n"
"layout (location = 0) in vec4 position;\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\0";
string fragmentShader = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.0, 0.0, 0.5f);\n"
"}\n\0";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(shader);
glfwTerminate();
Your c++ code style is the same as the older version of opengl(3.2 and older) and is not compatible with your shaders. You have two options. One is to downgrade your shaders to the old version. Another way is to update the C++ codes. a correct example of opengl 3.30 is like this:
#include <iostream>
// GLEW
#define GLEW_STATIC
#include <GL/glew.h>
// GLFW
#include <GLFW/glfw3.h>
// Function prototypes
void key_callback(GLFWwindow* window, int key, int scancode, int action, int mode);
// Window dimensions
const GLuint WIDTH = 800, HEIGHT = 600;
// Shaders
const GLchar* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 position;\n"
"void main()\n"
"{\n"
"gl_Position = vec4(position.x, position.y, position.z, 1.0);\n"
"}\0";
const GLchar* fragmentShaderSource = "#version 330 core\n"
"out vec4 color;\n"
"void main()\n"
"{\n"
"color = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n\0";
// The MAIN function, from here we start the application and run the game loop
int main()
{
std::cout << "Starting GLFW context, OpenGL 3.3" << std::endl;
// Init GLFW
glfwInit();
// Set all the required options for GLFW
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
// Create a GLFWwindow object that we can use for GLFW's functions
GLFWwindow* window = glfwCreateWindow(WIDTH, HEIGHT, "LearnOpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
// Set the required callback functions
glfwSetKeyCallback(window, key_callback);
// Set this to true so GLEW knows to use a modern approach to retrieving function pointers and extensions
glewExperimental = GL_TRUE;
// Initialize GLEW to setup the OpenGL Function pointers
glewInit();
// Define the viewport dimensions
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
// Build and compile our shader program
// Vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// Check for compile time errors
GLint success;
GLchar infoLog[512];
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(vertexShader, 512, NULL, infoLog);
std::cout << "ERROR::SHADER::VERTEX::COMPILATION_FAILED\n" << infoLog << std::endl;
}
// Fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// Check for compile time errors
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(fragmentShader, 512, NULL, infoLog);
std::cout << "ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n" << infoLog << std::endl;
}
// Link shaders
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
// Check for linking errors
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &success);
if (!success) {
glGetProgramInfoLog(shaderProgram, 512, NULL, infoLog);
std::cout << "ERROR::SHADER::PROGRAM::LINKING_FAILED\n" << infoLog << std::endl;
}
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
// Set up vertex data (and buffer(s)) and attribute pointers
//GLfloat vertices[] = {
// // First triangle
// 0.5f, 0.5f, // Top Right
// 0.5f, -0.5f, // Bottom Right
// -0.5f, 0.5f, // Top Left
// // Second triangle
// 0.5f, -0.5f, // Bottom Right
// -0.5f, -0.5f, // Bottom Left
// -0.5f, 0.5f // Top Left
//};
GLfloat vertices[] = {
0.5f, 0.5f, 0.0f, // Top Right
0.5f, -0.5f, 0.0f, // Bottom Right
-0.5f, -0.5f, 0.0f, // Bottom Left
-0.5f, 0.5f, 0.0f // Top Left
};
GLuint indices[] = { // Note that we start from 0!
0, 1, 3, // First Triangle
1, 2, 3 // Second Triangle
};
GLuint VBO, VAO, EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
// Bind the Vertex Array Object first, then bind and set vertex buffer(s) and attribute pointer(s).
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0); // Note that this is allowed, the call to glVertexAttribPointer registered VBO as the currently bound vertex buffer object so afterwards we can safely unbind
glBindVertexArray(0); // Unbind VAO (it's always a good thing to unbind any buffer/array to prevent strange bugs), remember: do NOT unbind the EBO, keep it bound to this VAO
// Uncommenting this call will result in wireframe polygons.
//glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
// Game loop
while (!glfwWindowShouldClose(window))
{
// Check if any events have been activiated (key pressed, mouse moved etc.) and call corresponding response functions
glfwPollEvents();
// Render
// Clear the colorbuffer
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Draw our first triangle
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
//glDrawArrays(GL_TRIANGLES, 0, 6);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
// Swap the screen buffers
glfwSwapBuffers(window);
}
// Properly de-allocate all resources once they've outlived their purpose
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
// Terminate GLFW, clearing any resources allocated by GLFW.
glfwTerminate();
return 0;
}
// Is called whenever a key is pressed/released via GLFW
void key_callback(GLFWwindow* window, int key, int scancode, int action, int mode)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
A big difference between the old version and the new versions is the need to use VAO in the new version.
I ran into this same problem when trying to run my code on MacOs after developing on my Windows machine. I added the following logic to make it run on MacOs.
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GLFW_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
I'm trying to use glVertexAttribFormat and glVertexAttribBinding to create two triangles, but it doesn't work. I followed the description of how to do this in the question here (Render one VAO containing two VBOs). I don't really knnow what to try. I am new to OpenGL and all descriptions of glVertexAttribFormat appear to assume you already know OpenGL.
This is my code:
#include <glad/glad.h>
#include <glfw/glfw3.h>
#include <iostream>
void adjustViewportToWindowSize(GLFWwindow* window, int width, int height);
void checkEsc(GLFWwindow* window);
int main(void)
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "Tab name", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
glViewport(0, 0, 800, 600); //size of GL rendering window.
glfwSetFramebufferSizeCallback(window, adjustViewportToWindowSize);
const char* vertexShaderSource = "#version 430 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 430 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
"FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\0";
unsigned int vertexShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
unsigned int fragmentShader;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
unsigned int shaderProgram;
shaderProgram = glCreateProgram();
//Attaching shaders to program
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
//Can delete shader objects after they are linked
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
float triangleVertices1[] = {
-0.50f, 0.0f, 0.0f,
-0.25f, 0.5f, 0.0f,
0.00f, 0.0f, 0.0f
};
float triangleVertices2[] = {
0.0f, 0.0f, 0.0f,
0.25f, 0.5f, 0.0f,
0.5f, 0.0f, 0.0f
};
unsigned int aVBO[2], VAO2;
glGenVertexArrays(1, &VAO2);
glBindVertexArray(VAO2);
glVertexAttribFormat(0, 3, GL_FLOAT, GL_FALSE, 0); //format setup without a buffer
glVertexAttribBinding(0, 0);
glBindVertexArray(0);
//Bind Buffers to data next
glGenBuffers(2, aVBO);
glBindBuffer(GL_ARRAY_BUFFER, aVBO[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices1), triangleVertices1, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, aVBO[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices2), triangleVertices2, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
while (!glfwWindowShouldClose(window))
{
checkEsc(window);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
glBindVertexArray(VAO2);
glBindVertexBuffer(0, aVBO[0], 0, 3*sizeof(float));
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexBuffer(0, aVBO[1], 0, 3*sizeof(float));
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
//Clear up
glDeleteVertexArrays(1, &VAO2);
glDeleteBuffers(1, aVBO);
glDeleteProgram(shaderProgram);
glfwTerminate();
return 0;
}
void adjustViewportToWindowSize(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
void checkEsc(GLFWwindow *window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, true);
}
Can also be accessed on github: https://github.com/Ritzerk/OpenGLSelfStudy
In addition to setting up the buffer bindings, you also have to enable the vertex attribute in the shader. To do so, you have to call glEnableVertexAttribArray during VAO setup:
glBindVertexArray(VAO2);
glVertexAttribFormat(0, 3, GL_FLOAT, GL_FALSE, 0);
glVertexAttribBinding(0, 0);
//Enable input in shader
glEnableVertexAttribArray(0);
glBindVertexArray(0);
I do not understand why this is not working with glDrawArrays.
If you see anything out of place or missing I really need to know.
#define GLEW_STATIC
#include <GL/glew.h>
static int CompileShader(unsigned int type, const std::string& Source) {
unsigned int id = glCreateShader(type);
const char* src = Source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE) {
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, message);
std::cout << message ;
}
return id;
}
static unsigned int CreateShader(const std::string& Vertexshader, const std::string& Fragmentshader) {
unsigned int program = glCreateProgram();
unsigned int vertex = CompileShader(GL_VERTEX_SHADER, Vertexshader);
unsigned int fragment = CompileShader(GL_FRAGMENT_SHADER, Fragmentshader);
glAttachShader(program, vertex);
glAttachShader(program, fragment);
glLinkProgram(program);
glValidateProgram(program);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (GLEW_OK == glewInit())
{
}
float vertices[6] = {
-0.5, -0.5,
0.0, 0.5,
0.5, 0.5
};
unsigned int buffer1;
glGenBuffers(1, &buffer1);
glBindBuffer(GL_ARRAY_BUFFER, buffer1);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 6, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
std::string Vertexshader =
"#version 330 core\n"
"\n"
"layout (location = 0)in vec4 position;"
"\n"
"void main()\n"
"{\n"
" gl_Position = position;\n"
"}\n";
std::string Fragmentshader =
"#version 330 core\n"
"\n"
"layout (location = 0)out vec4 color;"
"\n"
"void main()\n"
"{\n"
" color = vec4(1.0, 0.0, 0.0, 1.0);\n"
"}\n";
unsigned int shader = CreateShader(Vertexshader, Fragmentshader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shader);
glDrawArrays(GL_TRIANGLES, 0, 3);
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
The 2nd parameter of glVertexAttribPointer is the tuple size of a coordinate, rather then the number of floats in the array of vertices.
Each of the vertices in the array consists of 2 components, hence the size argument has to be 2 rather than 6:
glVertexAttribPointer(0, 6, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
I tried to code a small OpenGL app, but on the window will be displayed nothing.
But I cannot find any mistakes.
What did I wrong ?
And all error checks are negative, so there occur no errors.
I generated the OpenGL 3.3 header with this tool: https://bitbucket.org/alfonse/glloadgen/wiki/Home
#include "OpenGL.hpp"
#include <iostream>
#include <GLFW/glfw3.h>
#include "World.hpp"
int main(void)
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow * window = glfwCreateWindow(800, 600, "Project X", nullptr, nullptr);
glfwMakeContextCurrent(window);
world = new World();
do
{
world->Render();
glfwSwapBuffers(window);
glfwPollEvents();
}
while(glfwWindowShouldClose(window) == false);
delete world;
glfwTerminate();
return 0;
}
#include "World.hpp"
World::World()
{
glClearColor(0.5f, 0.5f, 0.5f, 1);
GLuint vertex_shader = glCreateShader(GL_VERTEX_SHADER);
const GLchar * vertex_source = "#version 330\n"
"layout(location = 0)in vec3 in_vertex_position;\n"
"void main()\n"
"{\n"
"gl_Position = vec4(in_vertex_position, 1);\n"
"}\n";
glShaderSource(vertex_shader, 1, &vertex_source, nullptr);
glCompileShader(vertex_shader);
GLint vertex_result;
glGetShaderiv(vertex_shader, GL_COMPILE_STATUS, &vertex_result);
if(vertex_result != GL_TRUE)
{
std::cerr<<"Could not compile vertex shader !"<<std::endl;
}
GLuint fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
const GLchar * fragment_source = "#version 330\n"
"out vec4 out_fragment_color;\n"
"void main()\n"
"{\n"
"out_fragment_color = vec4(0.3, 1.0, 1.0, 1.0);\n"
"}\n";
glShaderSource(fragment_shader, 1, &fragment_source, nullptr);
glCompileShader(fragment_shader);
GLint fragment_result;
glGetShaderiv(fragment_shader, GL_COMPILE_STATUS, &fragment_result);
if(fragment_result != GL_TRUE)
{
std::cerr<<"Could not compile fragment shader !"<<std::endl;
}
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
GLint result;
glGetProgramiv(program, GL_LINK_STATUS, &result);
if(result != GL_TRUE)
{
std::cerr<<"Could not link program !"<<std::endl;
}
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
GLfloat vertices_positions[] = {0, 0, 0, 1, 0, 0, 1, 1, 0};
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(GL_FLOAT), vertices_positions, GL_STATIC_DRAW);
}
World::~World()
{
}
void World::Render()
{
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
}